Auto merge of #3667 - RalfJung:rustup, r=RalfJung

Rustup
This commit is contained in:
bors 2024-06-11 05:59:19 +00:00
commit 82c34ad095
252 changed files with 3986 additions and 1617 deletions

View File

@ -131,9 +131,9 @@ dependencies = [
[[package]]
name = "annotate-snippets"
version = "0.11.2"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a5a59f105fb9635e9eebdc1e29d53e764fa5795b9cf899a638a53e61567ef61"
checksum = "086b0afab3b349e5691143adbfb26983821e3eec4ba4c51957104d372c2e1b7d"
dependencies = [
"anstyle",
"unicode-width",
@ -189,9 +189,9 @@ dependencies = [
[[package]]
name = "anstyle-query"
version = "1.0.3"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a64c907d4e79225ac72e2a354c9ce84d50ebb4586dee56c82b3ee73004f537f5"
checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391"
dependencies = [
"windows-sys 0.52.0",
]
@ -266,7 +266,7 @@ dependencies = [
"proc-macro2",
"quote",
"serde",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -501,9 +501,9 @@ version = "0.1.0"
[[package]]
name = "cc"
version = "1.0.97"
version = "1.0.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "099a5357d84c4c61eb35fc8eafa9a79a902c2f76911e5747ced4e032edd8d9b4"
checksum = "96c51067fd44124faa7f870b4b1c969379ad32b2ba805aa959430ceaa384f695"
[[package]]
name = "cfg-if"
@ -568,9 +568,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.4"
version = "4.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0"
checksum = "a9689a29b593160de5bc4aacab7b5d54fb52231de70122626c178e6a368994c7"
dependencies = [
"clap_builder",
"clap_derive",
@ -588,9 +588,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.2"
version = "4.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4"
checksum = "2e5387378c84f6faa26890ebf9f0a92989f8873d4d380467bcd0d8d8620424df"
dependencies = [
"anstream",
"anstyle",
@ -601,30 +601,30 @@ dependencies = [
[[package]]
name = "clap_complete"
version = "4.5.2"
version = "4.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd79504325bf38b10165b02e89b4347300f855f273c4cb30c4a3209e6583275e"
checksum = "d2020fa13af48afc65a9a87335bda648309ab3d154cd03c7ff95b378c7ed39c4"
dependencies = [
"clap",
]
[[package]]
name = "clap_derive"
version = "4.5.4"
version = "4.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64"
checksum = "c780290ccf4fb26629baa7a1081e68ced113f1d3ec302fa5948f1c381ebf06c6"
dependencies = [
"heck 0.5.0",
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
name = "clap_lex"
version = "0.7.0"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce"
checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70"
[[package]]
name = "clippy"
@ -644,7 +644,7 @@ dependencies = [
"regex",
"rustc_tools_util",
"serde",
"syn 2.0.64",
"syn 2.0.66",
"tempfile",
"termize",
"tokio",
@ -754,7 +754,7 @@ dependencies = [
"nom",
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -878,18 +878,18 @@ dependencies = [
[[package]]
name = "crc32fast"
version = "1.4.0"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa"
checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3"
dependencies = [
"cfg-if",
]
[[package]]
name = "crossbeam-channel"
version = "0.5.12"
version = "0.5.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab3db02a9c5b5121e1e42fbdb1aeb65f5e02624cc58c43f2884c6ccac0b82f95"
checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2"
dependencies = [
"crossbeam-utils",
]
@ -915,9 +915,9 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
version = "0.8.19"
version = "0.8.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345"
checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80"
[[package]]
name = "crypto-common"
@ -990,7 +990,7 @@ dependencies = [
"proc-macro2",
"quote",
"strsim",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -1001,7 +1001,7 @@ checksum = "733cabb43482b1a1b53eee8583c2b9e8684d592215ea83efd305dd31bc2f0178"
dependencies = [
"darling_core",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -1027,7 +1027,7 @@ version = "0.1.80"
dependencies = [
"itertools 0.12.1",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -1068,7 +1068,7 @@ dependencies = [
"darling",
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -1078,7 +1078,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "206868b8242f27cecce124c19fd88157fbd0dd334df2587f36417bafbc85097b"
dependencies = [
"derive_builder_core",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -1101,7 +1101,7 @@ dependencies = [
"darling",
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -1196,7 +1196,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -1516,7 +1516,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -1588,9 +1588,9 @@ dependencies = [
[[package]]
name = "getrandom"
version = "0.2.14"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94b22e06ecb0110981051723910cbf0b5f5e09a2062dd7663334ee79a9d1286c"
checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7"
dependencies = [
"cfg-if",
"libc",
@ -1757,7 +1757,7 @@ dependencies = [
"markup5ever 0.12.1",
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -1800,9 +1800,9 @@ dependencies = [
[[package]]
name = "icu_list"
version = "1.4.0"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fe6c04ec71ad1bacdbfb47164d4801f80a0533d9340f94f1a880f521eff59f54"
checksum = "bbfeda1d7775b6548edd4e8b7562304a559a91ed56ab56e18961a053f367c365"
dependencies = [
"displaydoc",
"icu_list_data",
@ -1814,15 +1814,15 @@ dependencies = [
[[package]]
name = "icu_list_data"
version = "1.4.0"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f6afcf7a9a7fedece70b7f17d7a7ecdfb8df145d37ae46d0277cd1e3932532"
checksum = "e1825170d2c6679cb20dbd96a589d034e49f698aed9a2ef4fafc9a0101ed298f"
[[package]]
name = "icu_locid"
version = "1.4.0"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c0aa2536adc14c07e2a521e95512b75ed8ef832f0fdf9299d4a0a45d2be2a9d"
checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637"
dependencies = [
"displaydoc",
"litemap",
@ -1833,9 +1833,9 @@ dependencies = [
[[package]]
name = "icu_locid_transform"
version = "1.4.0"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c17d8f6524fdca4471101dd71f0a132eb6382b5d6d7f2970441cb25f6f435a"
checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e"
dependencies = [
"displaydoc",
"icu_locid",
@ -1847,15 +1847,15 @@ dependencies = [
[[package]]
name = "icu_locid_transform_data"
version = "1.4.0"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "545c6c3e8bf9580e2dafee8de6f9ec14826aaf359787789c7724f1f85f47d3dc"
checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e"
[[package]]
name = "icu_provider"
version = "1.4.0"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba58e782287eb6950247abbf11719f83f5d4e4a5c1f2cd490d30a334bc47c2f4"
checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9"
dependencies = [
"displaydoc",
"icu_locid",
@ -1870,9 +1870,9 @@ dependencies = [
[[package]]
name = "icu_provider_adapters"
version = "1.4.0"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a229f978260da7c3aabb68cb7dc7316589936680570fe55e50fdd3f97711a4dd"
checksum = "d6324dfd08348a8e0374a447ebd334044d766b1839bb8d5ccf2482a99a77c0bc"
dependencies = [
"icu_locid",
"icu_locid_transform",
@ -1883,13 +1883,13 @@ dependencies = [
[[package]]
name = "icu_provider_macros"
version = "1.4.0"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2abdd3a62551e8337af119c5899e600ca0c88ec8f23a46c60ba216c803dcf1a"
checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -2196,9 +2196,9 @@ dependencies = [
[[package]]
name = "libz-sys"
version = "1.1.16"
version = "1.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e143b5e666b2695d28f6bca6497720813f699c9602dd7f5cac91008b8ada7f9"
checksum = "c15da26e5af7e25c90b37a2d75cdbf940cf4a55316de9d84c679c9b8bfabf82e"
dependencies = [
"cc",
"libc",
@ -2252,9 +2252,9 @@ checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
[[package]]
name = "litemap"
version = "0.7.2"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9d642685b028806386b2b6e75685faadd3eb65a85fff7df711ce18446a422da"
checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704"
[[package]]
name = "lld-wrapper"
@ -2415,7 +2415,7 @@ dependencies = [
"pulldown-cmark-to-cmark",
"serde_json",
"thiserror",
"toml 0.8.13",
"toml 0.8.14",
"xmlparser",
]
@ -2608,9 +2608,9 @@ dependencies = [
[[package]]
name = "nu-ansi-term"
version = "0.49.0"
version = "0.50.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c073d3c1930d0751774acf49e66653acecb416c3a54c6ec095a9b11caddb5a68"
checksum = "dd2800e1520bdc966782168a627aa5d1ad92e33b984bf7c7615d31280c83ff14"
dependencies = [
"windows-sys 0.48.0",
]
@ -2849,9 +2849,9 @@ dependencies = [
[[package]]
name = "parking_lot"
version = "0.12.2"
version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7e4af0ca4f6caed20e900d564c242b8e5d4903fdacf31d3daf527b66fe6f42fb"
checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27"
dependencies = [
"lock_api",
"parking_lot_core",
@ -2931,7 +2931,7 @@ dependencies = [
"pest_meta",
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -3142,9 +3142,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068"
[[package]]
name = "proc-macro2"
version = "1.0.82"
version = "1.0.85"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ad3d49ab951a01fbaafe34f2ec74122942fe18a3f9814c3268f1bb72042131b"
checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23"
dependencies = [
"unicode-ident",
]
@ -3246,9 +3246,9 @@ dependencies = [
[[package]]
name = "r-efi"
version = "4.4.0"
version = "4.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c47196f636c4cc0634b73b0405323d177753c2e15e866952c64ea22902567a34"
checksum = "e9e935efc5854715dfc0a4c9ef18dc69dee0ec3bf9cc3ab740db831c0fdd86a3"
dependencies = [
"compiler_builtins",
"rustc-std-workspace-core",
@ -3627,7 +3627,6 @@ dependencies = [
"rustc_macros",
"rustc_serialize",
"rustc_span",
"smallvec",
]
[[package]]
@ -3708,7 +3707,6 @@ dependencies = [
"icu_locid",
"icu_locid_transform",
"icu_provider",
"icu_provider_adapters",
"zerovec",
]
@ -3889,7 +3887,6 @@ dependencies = [
"portable-atomic",
"rustc-hash",
"rustc-rayon",
"rustc-rayon-core",
"rustc_arena",
"rustc_graphviz",
"rustc_index",
@ -3930,7 +3927,6 @@ dependencies = [
"rustc_expand",
"rustc_feature",
"rustc_fluent_macro",
"rustc_hir",
"rustc_hir_analysis",
"rustc_hir_pretty",
"rustc_hir_typeck",
@ -4058,7 +4054,7 @@ dependencies = [
"fluent-syntax",
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
"unic-langid",
]
@ -4192,8 +4188,7 @@ version = "0.0.0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"synstructure",
"syn 2.0.66",
]
[[package]]
@ -4341,7 +4336,7 @@ version = "0.0.0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
"synstructure",
]
@ -4385,7 +4380,6 @@ dependencies = [
"field-offset",
"gsgdt",
"polonius-engine",
"rustc-rayon",
"rustc-rayon-core",
"rustc_apfloat",
"rustc_arena",
@ -4417,7 +4411,6 @@ dependencies = [
name = "rustc_mir_build"
version = "0.0.0"
dependencies = [
"either",
"itertools 0.12.1",
"rustc_apfloat",
"rustc_arena",
@ -4436,7 +4429,6 @@ dependencies = [
"rustc_span",
"rustc_target",
"rustc_trait_selection",
"smallvec",
"tracing",
]
@ -4511,14 +4503,7 @@ dependencies = [
name = "rustc_next_trait_solver"
version = "0.0.0"
dependencies = [
"derivative",
"rustc_ast_ir",
"rustc_data_structures",
"rustc_macros",
"rustc_serialize",
"rustc_type_ir",
"rustc_type_ir_macros",
"tracing",
]
[[package]]
@ -4623,7 +4608,6 @@ version = "0.0.0"
dependencies = [
"field-offset",
"measureme",
"rustc-rayon-core",
"rustc_data_structures",
"rustc_errors",
"rustc_hir",
@ -4920,7 +4904,7 @@ version = "0.0.0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
"synstructure",
]
@ -5018,7 +5002,7 @@ dependencies = [
"proc-macro2",
"quote",
"serde",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -5154,22 +5138,22 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.202"
version = "1.0.203"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "226b61a0d411b2ba5ff6d7f73a476ac4f8bb900373459cd00fab8512828ba395"
checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.202"
version = "1.0.203"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6048858004bcff69094cd972ed40a32500f153bd3be9f716b2eed2e8217c4838"
checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -5281,9 +5265,9 @@ dependencies = [
[[package]]
name = "spanned"
version = "0.2.0"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccdf4f5590b7e6fbd4f2e80d442789079a6fff7c12ef921a9de358b7b353098e"
checksum = "ed14ba8b4b82241bd5daba2c49185d4a0581a0058355fe96537338f002b8605d"
dependencies = [
"bstr",
"color-eyre",
@ -5329,7 +5313,6 @@ name = "stable_mir"
version = "0.1.0-preview"
dependencies = [
"scoped-tls",
"tracing",
]
[[package]]
@ -5463,9 +5446,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.64"
version = "2.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ad3dee41f36859875573074334c200d1add8e4a87bb37113ebd31d926b7b11f"
checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"
dependencies = [
"proc-macro2",
"quote",
@ -5480,7 +5463,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -5540,9 +5523,9 @@ dependencies = [
[[package]]
name = "tar"
version = "0.4.40"
version = "0.4.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb"
checksum = "cb797dad5fb5b76fcf519e702f4a589483b5ef06567f160c392832c1f5e44909"
dependencies = [
"filetime",
"libc",
@ -5625,8 +5608,6 @@ dependencies = [
"core",
"getopts",
"libc",
"panic_abort",
"panic_unwind",
"std",
]
@ -5659,7 +5640,7 @@ checksum = "46c3384250002a6d5af4d114f2845d37b57521033f30d5c3f46c4d70e1197533"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -5744,9 +5725,9 @@ dependencies = [
[[package]]
name = "tinystr"
version = "0.7.5"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83c02bf3c538ab32ba913408224323915f4ef9a6d61c0e85d493f355921c0ece"
checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f"
dependencies = [
"displaydoc",
"zerovec",
@ -5769,9 +5750,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tokio"
version = "1.37.0"
version = "1.38.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787"
checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a"
dependencies = [
"backtrace",
"bytes",
@ -5801,14 +5782,14 @@ dependencies = [
[[package]]
name = "toml"
version = "0.8.13"
version = "0.8.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4e43f8cc456c9704c851ae29c67e17ef65d2c30017c17a9765b89c382dc8bba"
checksum = "6f49eb2ab21d2f26bd6db7bf383edc527a7ebaee412d17af4d40fdccd442f335"
dependencies = [
"serde",
"serde_spanned",
"toml_datetime",
"toml_edit 0.22.13",
"toml_edit 0.22.14",
]
[[package]]
@ -5835,15 +5816,15 @@ dependencies = [
[[package]]
name = "toml_edit"
version = "0.22.13"
version = "0.22.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c127785850e8c20836d49732ae6abfa47616e60bf9d9f57c43c250361a9db96c"
checksum = "f21c7aaf97f1bd9ca9d4f9e73b0a6c74bd5afef56f2bc931943a6e1c37e04e38"
dependencies = [
"indexmap",
"serde",
"serde_spanned",
"toml_datetime",
"winnow 0.6.8",
"winnow 0.6.13",
]
[[package]]
@ -5872,7 +5853,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
@ -5927,11 +5908,11 @@ dependencies = [
[[package]]
name = "tracing-tree"
version = "0.3.0"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65139ecd2c3f6484c3b99bc01c77afe21e95473630747c7aca525e78b0666675"
checksum = "b56c62d2c80033cb36fae448730a2f2ef99410fe3ecbffc916681a32f6807dbe"
dependencies = [
"nu-ansi-term 0.49.0",
"nu-ansi-term 0.50.0",
"tracing-core",
"tracing-log",
"tracing-subscriber",
@ -6011,7 +5992,7 @@ version = "0.23.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29e5f4ffcbab82453958fbf59990e981b8e8a177dcd60c2bd8f9b52c3036a6e1"
dependencies = [
"annotate-snippets 0.11.2",
"annotate-snippets 0.11.3",
"anyhow",
"bstr",
"cargo-platform",
@ -6072,7 +6053,7 @@ checksum = "1ed7f4237ba393424195053097c1516bd4590dc82b84f2f97c5c69e12704555b"
dependencies = [
"proc-macro-hack",
"quote",
"syn 2.0.64",
"syn 2.0.66",
"unic-langid-impl",
]
@ -6143,9 +6124,9 @@ checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202"
[[package]]
name = "unicode-width"
version = "0.1.12"
version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6"
checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d"
dependencies = [
"compiler_builtins",
"rustc-std-workspace-core",
@ -6222,9 +6203,9 @@ checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3"
[[package]]
name = "utf8parse"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a"
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
[[package]]
name = "uuid"
@ -6304,7 +6285,7 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
"wasm-bindgen-shared",
]
@ -6326,7 +6307,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@ -6407,7 +6388,7 @@ dependencies = [
"rayon",
"serde",
"serde_json",
"syn 2.0.64",
"syn 2.0.66",
"windows-metadata",
]
@ -6576,18 +6557,18 @@ dependencies = [
[[package]]
name = "winnow"
version = "0.6.8"
version = "0.6.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3c52e9c97a68071b23e836c9380edae937f17b9c4667bd021973efc689f618d"
checksum = "59b5e5f6c299a3c7890b876a2a587f3115162487e704907d9b6cd29473052ba1"
dependencies = [
"memchr",
]
[[package]]
name = "writeable"
version = "0.5.4"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dad7bb64b8ef9c0aa27b6da38b452b0ee9fd82beaf276a87dd796fb55cbae14e"
checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
[[package]]
name = "xattr"
@ -6635,9 +6616,9 @@ dependencies = [
[[package]]
name = "yoke"
version = "0.7.3"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "65e71b2e4f287f467794c671e2b8f8a5f3716b3c829079a1c44740148eff07e4"
checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5"
dependencies = [
"serde",
"stable_deref_trait",
@ -6647,13 +6628,13 @@ dependencies = [
[[package]]
name = "yoke-derive"
version = "0.7.3"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e6936f0cce458098a201c245a11bef556c6a0181129c7034d10d76d1ec3a2b8"
checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
"synstructure",
]
@ -6674,35 +6655,35 @@ checksum = "15e934569e47891f7d9411f1a451d947a60e000ab3bd24fbb970f000387d1b3b"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]
[[package]]
name = "zerofrom"
version = "0.1.3"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "655b0814c5c0b19ade497851070c640773304939a6c0fd5f5fb43da0696d05b7"
checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55"
dependencies = [
"zerofrom-derive",
]
[[package]]
name = "zerofrom-derive"
version = "0.1.3"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6a647510471d372f2e6c2e6b7219e44d8c574d24fdc11c610a61455782f18c3"
checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
"synstructure",
]
[[package]]
name = "zerovec"
version = "0.10.1"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "eff4439ae91fb5c72b8abc12f3f2dbf51bd27e6eadb9f8a5bc8898dddb0e27ea"
checksum = "bb2cc8827d6c0994478a15c53f374f46fbd41bea663d809b14744bc42e6b109c"
dependencies = [
"yoke",
"zerofrom",
@ -6711,11 +6692,11 @@ dependencies = [
[[package]]
name = "zerovec-derive"
version = "0.10.1"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b4e5997cbf58990550ef1f0e5124a05e47e1ebd33a84af25739be6031a62c20"
checksum = "97cf56601ee5052b4417d90c8755c6683473c926039908196cf35d99f893ebe7"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.64",
"syn 2.0.66",
]

View File

@ -1,3 +1,131 @@
Version 1.79.0 (2024-06-13)
==========================
<a id="1.79.0-Language"></a>
Language
--------
- [Stabilize inline `const {}` expressions.](https://github.com/rust-lang/rust/pull/104087/)
- [Prevent opaque types being instantiated twice with different regions within the same function.](https://github.com/rust-lang/rust/pull/116935/)
- [Stabilize WebAssembly target features that are in phase 4 and 5.](https://github.com/rust-lang/rust/pull/117457/)
- [Add the `redundant_lifetimes` lint to detect lifetimes which are semantically redundant.](https://github.com/rust-lang/rust/pull/118391/)
- [Stabilize the `unnameable_types` lint for public types that can't be named.](https://github.com/rust-lang/rust/pull/120144/)
- [Enable debuginfo in macros, and stabilize `-C collapse-macro-debuginfo` and `#[collapse_debuginfo]`.](https://github.com/rust-lang/rust/pull/120845/)
- [Propagate temporary lifetime extension into `if` and `match` expressions.](https://github.com/rust-lang/rust/pull/121346/)
- [Restrict promotion of `const fn` calls.](https://github.com/rust-lang/rust/pull/121557/)
- [Warn against refining impls of crate-private traits with `refining_impl_trait` lint.](https://github.com/rust-lang/rust/pull/121720/)
- [Stabilize associated type bounds (RFC 2289).](https://github.com/rust-lang/rust/pull/122055/)
- [Stabilize importing `main` from other modules or crates.](https://github.com/rust-lang/rust/pull/122060/)
- [Check return types of function types for well-formedness](https://github.com/rust-lang/rust/pull/115538)
- [Rework `impl Trait` lifetime inference](https://github.com/rust-lang/rust/pull/116891/)
- [Change inductive trait solver cycles to be ambiguous](https://github.com/rust-lang/rust/pull/122791)
<a id="1.79.0-Compiler"></a>
Compiler
--------
- [Define `-C strip` to only affect binaries, not artifacts like `.pdb`.](https://github.com/rust-lang/rust/pull/115120/)
- [Stabilize `-Crelro-level` for controlling runtime link hardening.](https://github.com/rust-lang/rust/pull/121694/)
- [Stabilize checking of `cfg` names and values at compile-time with `--check-cfg`.](https://github.com/rust-lang/rust/pull/123501/)
*Note that this only stabilizes the compiler part, the Cargo part is still unstable in this release.*
- [Add `aarch64-apple-visionos` and `aarch64-apple-visionos-sim` tier 3 targets.](https://github.com/rust-lang/rust/pull/121419/)
- [Add `riscv32ima-unknown-none-elf` tier 3 target.](https://github.com/rust-lang/rust/pull/122696/)
- [Promote several Windows targets to tier 2](https://github.com/rust-lang/rust/pull/121712): `aarch64-pc-windows-gnullvm`, `i686-pc-windows-gnullvm`, and `x86_64-pc-windows-gnullvm`.
Refer to Rust's [platform support page][platform-support-doc]
for more information on Rust's tiered platform support.
<a id="1.79.0-Libraries"></a>
Libraries
---------
- [Implement `FromIterator` for `(impl Default + Extend, impl Default + Extend)`.](https://github.com/rust-lang/rust/pull/107462/)
- [Implement `{Div,Rem}Assign<NonZero<X>>` on `X`.](https://github.com/rust-lang/rust/pull/121952/)
- [Document overrides of `clone_from()` in core/std.](https://github.com/rust-lang/rust/pull/122201/)
- [Link MSVC default lib in core.](https://github.com/rust-lang/rust/pull/122268/)
- [Caution against using `transmute` between pointers and integers.](https://github.com/rust-lang/rust/pull/122379/)
- [Enable frame pointers for the standard library.](https://github.com/rust-lang/rust/pull/122646/)
<a id="1.79.0-Stabilized-APIs"></a>
Stabilized APIs
---------------
- [`{integer}::unchecked_add`](https://doc.rust-lang.org/stable/core/primitive.i32.html#method.unchecked_add)
- [`{integer}::unchecked_mul`](https://doc.rust-lang.org/stable/core/primitive.i32.html#method.unchecked_mul)
- [`{integer}::unchecked_sub`](https://doc.rust-lang.org/stable/core/primitive.i32.html#method.unchecked_sub)
- [`<[T]>::split_at_unchecked`](https://doc.rust-lang.org/stable/core/primitive.slice.html#method.split_at_unchecked)
- [`<[T]>::split_at_mut_unchecked`](https://doc.rust-lang.org/stable/core/primitive.slice.html#method.split_at_mut_unchecked)
- [`<[u8]>::utf8_chunks`](https://doc.rust-lang.org/stable/core/primitive.slice.html#method.utf8_chunks)
- [`str::Utf8Chunks`](https://doc.rust-lang.org/stable/core/str/struct.Utf8Chunks.html)
- [`str::Utf8Chunk`](https://doc.rust-lang.org/stable/core/str/struct.Utf8Chunk.html)
- [`<*const T>::is_aligned`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.is_aligned)
- [`<*mut T>::is_aligned`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.is_aligned-1)
- [`NonNull::is_aligned`](https://doc.rust-lang.org/stable/core/ptr/struct.NonNull.html#method.is_aligned)
- [`<*const [T]>::len`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.len)
- [`<*mut [T]>::len`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.len-1)
- [`<*const [T]>::is_empty`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.is_empty)
- [`<*mut [T]>::is_empty`](https://doc.rust-lang.org/stable/core/primitive.pointer.html#method.is_empty-1)
- [`NonNull::<[T]>::is_empty`](https://doc.rust-lang.org/stable/core/ptr/struct.NonNull.html#method.is_empty)
- [`CStr::count_bytes`](https://doc.rust-lang.org/stable/core/ffi/c_str/struct.CStr.html#method.count_bytes)
- [`io::Error::downcast`](https://doc.rust-lang.org/stable/std/io/struct.Error.html#method.downcast)
- [`num::NonZero<T>`](https://doc.rust-lang.org/stable/core/num/struct.NonZero.html)
- [`path::absolute`](https://doc.rust-lang.org/stable/std/path/fn.absolute.html)
- [`proc_macro::Literal::byte_character`](https://doc.rust-lang.org/stable/proc_macro/struct.Literal.html#method.byte_character)
- [`proc_macro::Literal::c_string`](https://doc.rust-lang.org/stable/proc_macro/struct.Literal.html#method.c_string)
These APIs are now stable in const contexts:
- [`Atomic*::into_inner`](https://doc.rust-lang.org/stable/core/sync/atomic/struct.AtomicUsize.html#method.into_inner)
- [`io::Cursor::new`](https://doc.rust-lang.org/stable/std/io/struct.Cursor.html#method.new)
- [`io::Cursor::get_ref`](https://doc.rust-lang.org/stable/std/io/struct.Cursor.html#method.get_ref)
- [`io::Cursor::position`](https://doc.rust-lang.org/stable/std/io/struct.Cursor.html#method.position)
- [`io::empty`](https://doc.rust-lang.org/stable/std/io/fn.empty.html)
- [`io::repeat`](https://doc.rust-lang.org/stable/std/io/fn.repeat.html)
- [`io::sink`](https://doc.rust-lang.org/stable/std/io/fn.sink.html)
- [`panic::Location::caller`](https://doc.rust-lang.org/stable/std/panic/struct.Location.html#method.caller)
- [`panic::Location::file`](https://doc.rust-lang.org/stable/std/panic/struct.Location.html#method.file)
- [`panic::Location::line`](https://doc.rust-lang.org/stable/std/panic/struct.Location.html#method.line)
- [`panic::Location::column`](https://doc.rust-lang.org/stable/std/panic/struct.Location.html#method.column)
<a id="1.79.0-Cargo"></a>
Cargo
-----
- [Prevent dashes in `lib.name`, always normalizing to `_`.](https://github.com/rust-lang/cargo/pull/12783/)
- [Stabilize MSRV-aware version requirement selection in `cargo add`.](https://github.com/rust-lang/cargo/pull/13608/)
- [Switch to using `gitoxide` by default for listing files.](https://github.com/rust-lang/cargo/pull/13696/)
- [Error on `[project]` in Edition 2024; `cargo fix --edition` will change it to `[package]`.](https://github.com/rust-lang/cargo/pull/13747/)
<a id="1.79.0-Rustdoc"></a>
Rustdoc
-----
- [Always display stability version even if it's the same as the containing item.](https://github.com/rust-lang/rust/pull/118441/)
- [Show a single search result for items with multiple paths.](https://github.com/rust-lang/rust/pull/119912/)
- [Support typing `/` in docs to begin a search.](https://github.com/rust-lang/rust/pull/123355/)
<a id="1.79.0-Misc"></a>
Misc
----
<a id="1.79.0-Compatibility-Notes"></a>
Compatibility Notes
-------------------
- [Update the minimum external LLVM to 17.](https://github.com/rust-lang/rust/pull/122649/)
- [`RustcEncodable` and `RustcDecodable` are soft-destabilized, to be removed
from the prelude in next edition.](https://github.com/rust-lang/rust/pull/116016/)
- [The `wasm_c_abi` future-incompatibility lint will warn about use of the
non-spec-compliant C ABI.](https://github.com/rust-lang/rust/pull/117918/)
Use `wasm-bindgen v0.2.88` to generate forward-compatible bindings.
- [Check return types of function types for well-formedness](https://github.com/rust-lang/rust/pull/115538)
Version 1.78.0 (2024-05-02)
==========================

View File

@ -9,7 +9,6 @@ rustc_data_structures = { path = "../rustc_data_structures", optional = true }
rustc_macros = { path = "../rustc_macros", optional = true }
rustc_serialize = { path = "../rustc_serialize", optional = true }
rustc_span = { path = "../rustc_span", optional = true }
smallvec = { version = "1.8.1" }
# tidy-alphabetical-end
[features]

View File

@ -9,7 +9,6 @@ icu_list = "1.2"
icu_locid = "1.2"
icu_locid_transform = "1.3.2"
icu_provider = "1.2"
icu_provider_adapters = "1.2"
zerovec = "0.10.0"
# tidy-alphabetical-end

View File

@ -832,9 +832,10 @@ fn codegen_stmt<'tcx>(
let val = match null_op {
NullOp::SizeOf => layout.size.bytes(),
NullOp::AlignOf => layout.align.abi.bytes(),
NullOp::OffsetOf(fields) => {
layout.offset_of_subfield(fx, fields.iter()).bytes()
}
NullOp::OffsetOf(fields) => fx
.tcx
.offset_of_subfield(ParamEnv::reveal_all(), layout, fields.iter())
.bytes(),
NullOp::UbChecks => {
let val = fx.tcx.sess.ub_checks();
let val = CValue::by_val(

View File

@ -110,7 +110,7 @@ pub(crate) fn codegen_const_value<'tcx>(
if fx.clif_type(layout.ty).is_some() {
return CValue::const_val(fx, layout, int);
} else {
let raw_val = int.size().truncate(int.assert_bits(int.size()));
let raw_val = int.size().truncate(int.to_bits(int.size()));
let val = match int.size().bytes() {
1 => fx.bcx.ins().iconst(types::I8, raw_val as i64),
2 => fx.bcx.ins().iconst(types::I16, raw_val as i64),
@ -501,12 +501,12 @@ pub(crate) fn mir_operand_get_const_val<'tcx>(
Ordering::Equal => scalar_int,
Ordering::Less => match ty.kind() {
ty::Uint(_) => ScalarInt::try_from_uint(
scalar_int.assert_uint(scalar_int.size()),
scalar_int.to_uint(scalar_int.size()),
fx.layout_of(*ty).size,
)
.unwrap(),
ty::Int(_) => ScalarInt::try_from_int(
scalar_int.assert_int(scalar_int.size()),
scalar_int.to_int(scalar_int.size()),
fx.layout_of(*ty).size,
)
.unwrap(),

View File

@ -902,7 +902,7 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
.span_fatal(span, "Index argument for `_mm_cmpestri` is not a constant");
};
let imm8 = imm8.try_to_u8().unwrap_or_else(|_| panic!("kind not scalar: {:?}", imm8));
let imm8 = imm8.to_u8();
codegen_inline_asm_inner(
fx,
@ -955,7 +955,7 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
.span_fatal(span, "Index argument for `_mm_cmpestrm` is not a constant");
};
let imm8 = imm8.try_to_u8().unwrap_or_else(|_| panic!("kind not scalar: {:?}", imm8));
let imm8 = imm8.to_u8();
codegen_inline_asm_inner(
fx,
@ -1003,7 +1003,7 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
);
};
let imm8 = imm8.try_to_u8().unwrap_or_else(|_| panic!("kind not scalar: {:?}", imm8));
let imm8 = imm8.to_u8();
codegen_inline_asm_inner(
fx,
@ -1040,7 +1040,7 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
);
};
let imm8 = imm8.try_to_u8().unwrap_or_else(|_| panic!("kind not scalar: {:?}", imm8));
let imm8 = imm8.to_u8();
codegen_inline_asm_inner(
fx,
@ -1195,7 +1195,7 @@ pub(crate) fn codegen_x86_llvm_intrinsic_call<'tcx>(
.span_fatal(span, "Func argument for `_mm_sha1rnds4_epu32` is not a constant");
};
let func = func.try_to_u8().unwrap_or_else(|_| panic!("kind not scalar: {:?}", func));
let func = func.to_u8();
codegen_inline_asm_inner(
fx,

View File

@ -147,8 +147,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
let total_len = lane_count * 2;
let indexes =
idx.iter().map(|idx| idx.unwrap_leaf().try_to_u32().unwrap()).collect::<Vec<u32>>();
let indexes = idx.iter().map(|idx| idx.unwrap_leaf().to_u32()).collect::<Vec<u32>>();
for &idx in &indexes {
assert!(u64::from(idx) < total_len, "idx {} out of range 0..{}", idx, total_len);
@ -282,9 +281,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
fx.tcx.dcx().span_fatal(span, "Index argument for `simd_insert` is not a constant");
};
let idx: u32 = idx_const
.try_to_u32()
.unwrap_or_else(|_| panic!("kind not scalar: {:?}", idx_const));
let idx: u32 = idx_const.to_u32();
let (lane_count, _lane_ty) = base.layout().ty.simd_size_and_type(fx.tcx);
if u64::from(idx) >= lane_count {
fx.tcx.dcx().span_fatal(
@ -330,9 +327,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
return;
};
let idx = idx_const
.try_to_u32()
.unwrap_or_else(|_| panic!("kind not scalar: {:?}", idx_const));
let idx = idx_const.to_u32();
let (lane_count, _lane_ty) = v.layout().ty.simd_size_and_type(fx.tcx);
if u64::from(idx) >= lane_count {
fx.tcx.dcx().span_fatal(

View File

@ -327,7 +327,7 @@ impl<'tcx> CValue<'tcx> {
let val = match layout.ty.kind() {
ty::Uint(UintTy::U128) | ty::Int(IntTy::I128) => {
let const_val = const_val.assert_bits(layout.size);
let const_val = const_val.to_bits(layout.size);
let lsb = fx.bcx.ins().iconst(types::I64, const_val as u64 as i64);
let msb = fx.bcx.ins().iconst(types::I64, (const_val >> 64) as u64 as i64);
fx.bcx.ins().iconcat(lsb, msb)
@ -339,7 +339,7 @@ impl<'tcx> CValue<'tcx> {
| ty::Ref(..)
| ty::RawPtr(..)
| ty::FnPtr(..) => {
let raw_val = const_val.size().truncate(const_val.assert_bits(layout.size));
let raw_val = const_val.size().truncate(const_val.to_bits(layout.size));
fx.bcx.ins().iconst(clif_ty, raw_val as i64)
}
ty::Float(FloatTy::F32) => {

View File

@ -166,7 +166,7 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
let bitsize = if layout.is_bool() { 1 } else { layout.size(self).bits() };
match cv {
Scalar::Int(int) => {
let data = int.assert_bits(layout.size(self));
let data = int.to_bits(layout.size(self));
// FIXME(antoyo): there's some issues with using the u128 code that follows, so hard-code
// the paths for floating-point values.

View File

@ -244,7 +244,7 @@ impl<'ll, 'tcx> ConstMethods<'tcx> for CodegenCx<'ll, 'tcx> {
let bitsize = if layout.is_bool() { 1 } else { layout.size(self).bits() };
match cv {
Scalar::Int(int) => {
let data = int.assert_bits(layout.size(self));
let data = int.to_bits(layout.size(self));
let llval = self.const_uint_big(self.type_ix(bitsize), data);
if matches!(layout.primitive(), Pointer(_)) {
unsafe { llvm::LLVMConstIntToPtr(llval, llty) }

View File

@ -1109,10 +1109,12 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), callee_ty.fn_sig(tcx));
let arg_tys = sig.inputs();
// Vectors must be immediates (non-power-of-2 #[repr(packed)] are not)
for (ty, arg) in arg_tys.iter().zip(args) {
if ty.is_simd() && !matches!(arg.val, OperandValue::Immediate(_)) {
return_error!(InvalidMonomorphization::SimdArgument { span, name, ty: *ty });
// Sanity-check: all vector arguments must be immediates.
if cfg!(debug_assertions) {
for (ty, arg) in arg_tys.iter().zip(args) {
if ty.is_simd() {
assert!(matches!(arg.val, OperandValue::Immediate(_)));
}
}
}
@ -1221,7 +1223,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
.iter()
.enumerate()
.map(|(arg_idx, val)| {
let idx = val.unwrap_leaf().try_to_i32().unwrap();
let idx = val.unwrap_leaf().to_i32();
if idx >= i32::try_from(total_len).unwrap() {
bx.sess().dcx().emit_err(InvalidMonomorphization::SimdIndexOutOfBounds {
span,

View File

@ -163,7 +163,7 @@ pub fn asm_const_to_str<'tcx>(
let mir::ConstValue::Scalar(scalar) = const_value else {
span_bug!(sp, "expected Scalar for promoted asm const, but got {:#?}", const_value)
};
let value = scalar.assert_bits(ty_and_layout.size);
let value = scalar.assert_scalar_int().to_bits(ty_and_layout.size);
match ty_and_layout.ty.kind() {
ty::Uint(_) => value.to_string(),
ty::Int(int_ty) => match int_ty.normalize(tcx.sess.target.pointer_width) {

View File

@ -680,7 +680,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
bx.cx().const_usize(val)
}
mir::NullOp::OffsetOf(fields) => {
let val = layout.offset_of_subfield(bx.cx(), fields.iter()).bytes();
let val = bx
.tcx()
.offset_of_subfield(bx.param_env(), layout, fields.iter())
.bytes();
bx.cx().const_usize(val)
}
mir::NullOp::UbChecks => {

View File

@ -95,10 +95,10 @@ fn const_to_valtree_inner<'tcx>(
}
ty::Bool | ty::Int(_) | ty::Uint(_) | ty::Float(_) | ty::Char => {
let val = ecx.read_immediate(place)?;
let val = val.to_scalar();
let val = val.to_scalar_int().unwrap();
*num_nodes += 1;
Ok(ty::ValTree::Leaf(val.assert_int()))
Ok(ty::ValTree::Leaf(val))
}
ty::Pat(base, ..) => {
@ -125,7 +125,7 @@ fn const_to_valtree_inner<'tcx>(
let val = val.to_scalar();
// We are in the CTFE machine, so ptr-to-int casts will fail.
// This can only be `Ok` if `val` already is an integer.
let Ok(val) = val.try_to_int() else {
let Ok(val) = val.try_to_scalar_int() else {
return Err(ValTreeCreationError::NonSupportedType);
};
// It's just a ScalarInt!
@ -411,7 +411,7 @@ fn valtree_into_mplace<'tcx>(
ty::Adt(def, _) if def.is_enum() => {
// First element of valtree corresponds to variant
let scalar_int = branches[0].unwrap_leaf();
let variant_idx = VariantIdx::from_u32(scalar_int.try_to_u32().unwrap());
let variant_idx = VariantIdx::from_u32(scalar_int.to_u32());
let variant = def.variant(variant_idx);
debug!(?variant);

View File

@ -123,14 +123,14 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
// (`tag_bits` itself is only used for error messages below.)
let tag_bits = tag_val
.to_scalar()
.try_to_int()
.try_to_scalar_int()
.map_err(|dbg_val| err_ub!(InvalidTag(dbg_val)))?
.assert_bits(tag_layout.size);
.to_bits(tag_layout.size);
// Cast bits from tag layout to discriminant layout.
// After the checks we did above, this cannot fail, as
// discriminants are int-like.
let discr_val = self.int_to_int_or_float(&tag_val, discr_layout).unwrap();
let discr_bits = discr_val.to_scalar().assert_bits(discr_layout.size);
let discr_bits = discr_val.to_scalar().to_bits(discr_layout.size)?;
// Convert discriminant to variant index, and catch invalid discriminants.
let index = match *ty.kind() {
ty::Adt(adt, _) => {
@ -152,7 +152,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
// discriminant (encoded in niche/tag) and variant index are the same.
let variants_start = niche_variants.start().as_u32();
let variants_end = niche_variants.end().as_u32();
let variant = match tag_val.try_to_int() {
let variant = match tag_val.try_to_scalar_int() {
Err(dbg_val) => {
// So this is a pointer then, and casting to an int failed.
// Can only happen during CTFE.
@ -167,7 +167,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
untagged_variant
}
Ok(tag_bits) => {
let tag_bits = tag_bits.assert_bits(tag_layout.size);
let tag_bits = tag_bits.to_bits(tag_layout.size);
// We need to use machine arithmetic to get the relative variant idx:
// variant_index_relative = tag_val - niche_start_val
let tag_val = ImmTy::from_uint(tag_bits, tag_layout);
@ -175,7 +175,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
let variant_index_relative_val =
self.binary_op(mir::BinOp::Sub, &tag_val, &niche_start_val)?;
let variant_index_relative =
variant_index_relative_val.to_scalar().assert_bits(tag_val.layout.size);
variant_index_relative_val.to_scalar().to_bits(tag_val.layout.size)?;
// Check if this is in the range that indicates an actual discriminant.
if variant_index_relative <= u128::from(variants_end - variants_start) {
let variant_index_relative = u32::try_from(variant_index_relative)
@ -294,8 +294,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
ImmTy::from_uint(variant_index_relative, tag_layout);
let tag = self
.binary_op(mir::BinOp::Add, &variant_index_relative_val, &niche_start_val)?
.to_scalar()
.assert_int();
.to_scalar_int()?;
Ok(Some((tag, tag_field)))
}
}

View File

@ -519,7 +519,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
// `x % y != 0` or `y == 0` or `x == T::MIN && y == -1`.
// First, check x % y != 0 (or if that computation overflows).
let rem = self.binary_op(BinOp::Rem, a, b)?;
if rem.to_scalar().assert_bits(a.layout.size) != 0 {
if rem.to_scalar().to_bits(a.layout.size)? != 0 {
throw_ub_custom!(
fluent::const_eval_exact_div_has_remainder,
a = format!("{a}"),

View File

@ -1344,7 +1344,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
/// Test if this value might be null.
/// If the machine does not support ptr-to-int casts, this is conservative.
pub fn scalar_may_be_null(&self, scalar: Scalar<M::Provenance>) -> InterpResult<'tcx, bool> {
Ok(match scalar.try_to_int() {
Ok(match scalar.try_to_scalar_int() {
Ok(int) => int.is_null(),
Err(_) => {
// Can only happen during CTFE.

View File

@ -87,6 +87,12 @@ impl<Prov: Provenance> Immediate<Prov> {
}
}
#[inline]
#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
pub fn to_scalar_int(self) -> ScalarInt {
self.to_scalar().try_to_scalar_int().unwrap()
}
#[inline]
#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
pub fn to_scalar_pair(self) -> (Scalar<Prov>, Scalar<Prov>) {
@ -219,19 +225,11 @@ impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> {
Self::from_scalar(Scalar::from(s), layout)
}
#[inline]
pub fn try_from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Option<Self> {
Some(Self::from_scalar(Scalar::try_from_uint(i, layout.size)?, layout))
}
#[inline]
pub fn from_uint(i: impl Into<u128>, layout: TyAndLayout<'tcx>) -> Self {
Self::from_scalar(Scalar::from_uint(i, layout.size), layout)
}
#[inline]
pub fn try_from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Option<Self> {
Some(Self::from_scalar(Scalar::try_from_int(i, layout.size)?, layout))
}
#[inline]
pub fn from_int(i: impl Into<i128>, layout: TyAndLayout<'tcx>) -> Self {
Self::from_scalar(Scalar::from_int(i, layout.size), layout)
@ -276,7 +274,8 @@ impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> {
#[inline]
pub fn to_const_int(self) -> ConstInt {
assert!(self.layout.ty.is_integral());
let int = self.to_scalar().assert_int();
let int = self.imm.to_scalar_int();
assert_eq!(int.size(), self.layout.size);
ConstInt::new(int, self.layout.ty.is_signed(), self.layout.ty.is_ptr_sized_integral())
}

View File

@ -95,10 +95,10 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
let l = left.to_scalar_int()?;
let r = right.to_scalar_int()?;
// Prepare to convert the values to signed or unsigned form.
let l_signed = || l.assert_int(left.layout.size);
let l_unsigned = || l.assert_uint(left.layout.size);
let r_signed = || r.assert_int(right.layout.size);
let r_unsigned = || r.assert_uint(right.layout.size);
let l_signed = || l.to_int(left.layout.size);
let l_unsigned = || l.to_uint(left.layout.size);
let r_signed = || r.to_int(right.layout.size);
let r_unsigned = || r.to_uint(right.layout.size);
let throw_ub_on_overflow = match bin_op {
AddUnchecked => Some(sym::unchecked_add),

View File

@ -499,13 +499,14 @@ where
&self,
mplace: &MPlaceTy<'tcx, M::Provenance>,
) -> InterpResult<'tcx, (MPlaceTy<'tcx, M::Provenance>, u64)> {
// Basically we just transmute this place into an array following simd_size_and_type.
// (Transmuting is okay since this is an in-memory place. We also double-check the size
// stays the same.)
// Basically we want to transmute this place into an array following simd_size_and_type.
let (len, e_ty) = mplace.layout.ty.simd_size_and_type(*self.tcx);
let array = Ty::new_array(self.tcx.tcx, e_ty, len);
let layout = self.layout_of(array)?;
let mplace = mplace.transmute(layout, self)?;
// Some SIMD types have padding, so `len` many `e_ty` does not cover the entire place.
// Therefore we cannot transmute, and instead we project at offset 0, which side-steps
// the size check.
let array_layout = self.layout_of(Ty::new_array(self.tcx.tcx, e_ty, len))?;
assert!(array_layout.size <= mplace.layout.size);
let mplace = mplace.offset(Size::ZERO, array_layout, self)?;
Ok((mplace, len))
}

View File

@ -81,6 +81,8 @@ pub trait Projectable<'tcx, Prov: Provenance>: Sized + std::fmt::Debug {
ecx: &InterpCx<'tcx, M>,
) -> InterpResult<'tcx, Self> {
assert!(layout.is_sized());
// We sometimes do pointer arithmetic with this function, disregarding the source type.
// So we don't check the sizes here.
self.offset_with_meta(offset, OffsetMode::Inbounds, MemPlaceMeta::None, layout, ecx)
}

View File

@ -253,7 +253,10 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
Scalar::from_target_usize(val, self)
}
mir::NullOp::OffsetOf(fields) => {
let val = layout.offset_of_subfield(self, fields.iter()).bytes();
let val = self
.tcx
.offset_of_subfield(self.param_env, layout, fields.iter())
.bytes();
Scalar::from_target_usize(val, self)
}
mir::NullOp::UbChecks => Scalar::from_bool(self.tcx.sess.ub_checks()),

View File

@ -653,8 +653,8 @@ impl<'rt, 'tcx, M: Machine<'tcx>> ValidityVisitor<'rt, 'tcx, M> {
let WrappingRange { start, end } = valid_range;
let max_value = size.unsigned_int_max();
assert!(end <= max_value);
let bits = match scalar.try_to_int() {
Ok(int) => int.assert_bits(size),
let bits = match scalar.try_to_scalar_int() {
Ok(int) => int.to_bits(size),
Err(_) => {
// So this is a pointer then, and casting to an int failed.
// Can only happen during CTFE.

View File

@ -16,7 +16,6 @@ libc = "0.2"
measureme = "11"
rustc-hash = "1.1.0"
rustc-rayon = { version = "0.5.0", optional = true }
rustc-rayon-core = { version = "0.5.0", optional = true }
rustc_arena = { path = "../rustc_arena" }
rustc_graphviz = { path = "../rustc_graphviz" }
rustc_index = { path = "../rustc_index", package = "rustc_index" }
@ -52,5 +51,5 @@ portable-atomic = "1.5.1"
[features]
# tidy-alphabetical-start
rustc_use_parallel_compiler = ["indexmap/rustc-rayon", "rustc-rayon", "rustc-rayon-core"]
rustc_use_parallel_compiler = ["indexmap/rustc-rayon", "rustc-rayon"]
# tidy-alphabetical-end

View File

@ -19,7 +19,6 @@ rustc_errors = { path = "../rustc_errors" }
rustc_expand = { path = "../rustc_expand" }
rustc_feature = { path = "../rustc_feature" }
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
rustc_hir = { path = "../rustc_hir" }
rustc_hir_analysis = { path = "../rustc_hir_analysis" }
rustc_hir_pretty = { path = "../rustc_hir_pretty" }
rustc_hir_typeck = { path = "../rustc_hir_typeck" }

View File

@ -30,11 +30,11 @@ impl MutVisitor for Marker {
// it's some advanced case with macro-generated macros. So if we cache the marked version
// of that context once, we'll typically have a 100% cache hit rate after that.
let Marker(expn_id, transparency, ref mut cache) = *self;
let data = span.data();
let marked_ctxt = *cache
.entry(data.ctxt)
.or_insert_with(|| data.ctxt.apply_mark(expn_id.to_expn_id(), transparency));
*span = data.with_ctxt(marked_ctxt);
span.update_ctxt(|ctxt| {
*cache
.entry(ctxt)
.or_insert_with(|| ctxt.apply_mark(expn_id.to_expn_id(), transparency))
});
}
}

View File

@ -559,6 +559,8 @@ declare_features! (
(unstable, offset_of_enum, "1.75.0", Some(120141)),
/// Allows using multiple nested field accesses in offset_of!
(unstable, offset_of_nested, "1.77.0", Some(120140)),
/// Allows using fields with slice type in offset_of!
(unstable, offset_of_slice, "CURRENT_RUSTC_VERSION", Some(126151)),
/// Allows using `#[optimize(X)]`.
(unstable, optimize_attribute, "1.34.0", Some(54882)),
/// Allows postfix match `expr.match { ... }`

View File

@ -827,7 +827,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
) else {
return;
};
let in_scope_methods = self.probe_for_name_many(
let Ok(in_scope_methods) = self.probe_for_name_many(
probe::Mode::MethodCall,
path.ident,
Some(expected),
@ -835,11 +836,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self_ty,
deref.hir_id,
probe::ProbeScope::TraitsInScope,
);
) else {
return;
};
let other_methods_in_scope: Vec<_> =
in_scope_methods.iter().filter(|c| c.item.def_id != pick.item.def_id).collect();
let all_methods = self.probe_for_name_many(
let Ok(all_methods) = self.probe_for_name_many(
probe::Mode::MethodCall,
path.ident,
Some(expected),
@ -847,7 +851,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self_ty,
deref.hir_id,
probe::ProbeScope::AllTraits,
);
) else {
return;
};
let suggestions: Vec<_> = all_methods
.into_iter()
.filter(|c| c.item.def_id != pick.item.def_id)

View File

@ -3363,7 +3363,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let field_ty = self.field_ty(expr.span, field, args);
// FIXME: DSTs with static alignment should be allowed
// Enums are anyway always sized. But just to safeguard against future
// language extensions, let's double-check.
self.require_type_is_sized(field_ty, expr.span, ObligationCauseCode::Misc);
if field.vis.is_accessible_from(sub_def_scope, self.tcx) {
@ -3391,8 +3392,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{
let field_ty = self.field_ty(expr.span, field, args);
// FIXME: DSTs with static alignment should be allowed
self.require_type_is_sized(field_ty, expr.span, ObligationCauseCode::Misc);
if self.tcx.features().offset_of_slice {
self.require_type_has_static_alignment(
field_ty,
expr.span,
ObligationCauseCode::Misc,
);
} else {
self.require_type_is_sized(
field_ty,
expr.span,
ObligationCauseCode::Misc,
);
}
if field.vis.is_accessible_from(def_scope, self.tcx) {
self.tcx.check_stability(field.did, Some(expr.hir_id), expr.span, None);
@ -3412,10 +3424,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if let Ok(index) = field.as_str().parse::<usize>()
&& field.name == sym::integer(index)
{
for ty in tys.iter().take(index + 1) {
self.require_type_is_sized(ty, expr.span, ObligationCauseCode::Misc);
}
if let Some(&field_ty) = tys.get(index) {
if self.tcx.features().offset_of_slice {
self.require_type_has_static_alignment(
field_ty,
expr.span,
ObligationCauseCode::Misc,
);
} else {
self.require_type_is_sized(
field_ty,
expr.span,
ObligationCauseCode::Misc,
);
}
field_indices.push((FIRST_VARIANT, index.into()));
current_container = field_ty;

View File

@ -386,6 +386,26 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
pub fn require_type_has_static_alignment(
&self,
ty: Ty<'tcx>,
span: Span,
code: traits::ObligationCauseCode<'tcx>,
) {
if !ty.references_error() {
let tail =
self.tcx.struct_tail_with_normalize(ty, |ty| self.normalize(span, ty), || {});
// Sized types have static alignment, and so do slices.
if tail.is_trivially_sized(self.tcx) || matches!(tail.kind(), ty::Slice(..)) {
// Nothing else is required here.
} else {
// We can't be sure, let's required full `Sized`.
let lang_item = self.tcx.require_lang_item(LangItem::Sized, None);
self.require_type_meets(ty, span, code, lang_item);
}
}
}
pub fn register_bound(
&self,
ty: Ty<'tcx>,

View File

@ -306,7 +306,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self_ty: Ty<'tcx>,
scope_expr_id: HirId,
scope: ProbeScope,
) -> Vec<Candidate<'tcx>> {
) -> Result<Vec<Candidate<'tcx>>, MethodError<'tcx>> {
self.probe_op(
item_name.span,
mode,
@ -324,7 +324,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.collect())
},
)
.unwrap()
}
pub(crate) fn probe_op<OP, R>(

View File

@ -1640,10 +1640,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.unwrap_or(Ty::new_misc_error(self.tcx)),
);
// FIXME: `probe_for_name_many` searches for methods in inherent implementations,
// so it may return a candidate that doesn't belong to this `revr_ty`. We need to
// check whether the instantiated type matches the received one.
for _matched_method in self.probe_for_name_many(
let Ok(candidates) = self.probe_for_name_many(
Mode::MethodCall,
item_name,
None,
@ -1651,7 +1648,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
rcvr_ty,
source_expr.hir_id,
ProbeScope::TraitsInScope,
) {
) else {
return;
};
// FIXME: `probe_for_name_many` searches for methods in inherent implementations,
// so it may return a candidate that doesn't belong to this `revr_ty`. We need to
// check whether the instantiated type matches the received one.
for _matched_method in candidates {
// found a match, push to stack
stack_methods.push(rcvr_ty);
}

View File

@ -2385,11 +2385,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
min_len: u64,
) -> (Option<Ty<'tcx>>, Ty<'tcx>) {
let len = match len.eval(self.tcx, self.param_env, span) {
// FIXME(BoxyUwU): Assert the `Ty` is a `usize`?
Ok((_, val)) => val
.try_to_scalar()
.and_then(|scalar| scalar.try_to_int().ok())
.and_then(|int| int.try_to_target_usize(self.tcx).ok()),
.and_then(|scalar| scalar.try_to_scalar_int().ok())
.map(|int| int.to_target_usize(self.tcx)),
Err(ErrorHandled::Reported(..)) => {
let guar = self.error_scrutinee_unfixed_length(span);
return (Some(Ty::new_error(self.tcx, guar)), arr_ty);

View File

@ -7,11 +7,10 @@ edition = "2021"
proc-macro = true
[dependencies]
synstructure = "0.13.0"
syn = { version = "2.0.9", features = ["full"] }
proc-macro2 = "1"
quote = "1"
[features]
default = ["nightly"]
nightly = []
nightly = []

View File

@ -32,6 +32,15 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
diag.note("no two closures, even if identical, have the same type");
diag.help("consider boxing your closure and/or using it as a trait object");
}
(ty::Coroutine(def_id1, ..), ty::Coroutine(def_id2, ..))
if self.tcx.coroutine_is_async(def_id1)
&& self.tcx.coroutine_is_async(def_id2) =>
{
diag.note("no two async blocks, even if identical, have the same type");
diag.help(
"consider pinning your async block and casting it to a trait object",
);
}
(ty::Alias(ty::Opaque, ..), ty::Alias(ty::Opaque, ..)) => {
// Issue #63167
diag.note("distinct uses of `impl Trait` result in different opaque types");

View File

@ -11,7 +11,6 @@ either = "1.5.0"
field-offset = "0.3.5"
gsgdt = "0.1.2"
polonius-engine = "0.13.0"
rustc-rayon = { version = "0.5.0", optional = true }
rustc-rayon-core = { version = "0.5.0", optional = true }
rustc_apfloat = "0.2.0"
rustc_arena = { path = "../rustc_arena" }
@ -41,5 +40,5 @@ tracing = "0.1"
[features]
# tidy-alphabetical-start
rustc_use_parallel_compiler = ["rustc-rayon", "rustc-rayon-core"]
rustc_use_parallel_compiler = ["rustc-rayon-core"]
# tidy-alphabetical-end

View File

@ -84,11 +84,11 @@ impl<'tcx> ConstValue<'tcx> {
}
pub fn try_to_scalar_int(&self) -> Option<ScalarInt> {
self.try_to_scalar()?.try_to_int().ok()
self.try_to_scalar()?.try_to_scalar_int().ok()
}
pub fn try_to_bits(&self, size: Size) -> Option<u128> {
self.try_to_scalar_int()?.try_to_bits(size).ok()
Some(self.try_to_scalar_int()?.to_bits(size))
}
pub fn try_to_bool(&self) -> Option<bool> {
@ -96,7 +96,7 @@ impl<'tcx> ConstValue<'tcx> {
}
pub fn try_to_target_usize(&self, tcx: TyCtxt<'tcx>) -> Option<u64> {
self.try_to_scalar_int()?.try_to_target_usize(tcx).ok()
Some(self.try_to_scalar_int()?.to_target_usize(tcx))
}
pub fn try_to_bits_for_ty(
@ -300,7 +300,7 @@ impl<'tcx> Const<'tcx> {
#[inline]
pub fn try_to_bits(self, size: Size) -> Option<u128> {
self.try_to_scalar_int()?.try_to_bits(size).ok()
Some(self.try_to_scalar_int()?.to_bits(size))
}
#[inline]
@ -367,7 +367,7 @@ impl<'tcx> Const<'tcx> {
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
) -> Option<ScalarInt> {
self.try_eval_scalar(tcx, param_env)?.try_to_int().ok()
self.try_eval_scalar(tcx, param_env)?.try_to_scalar_int().ok()
}
#[inline]
@ -375,7 +375,7 @@ impl<'tcx> Const<'tcx> {
let int = self.try_eval_scalar_int(tcx, param_env)?;
let size =
tcx.layout_of(param_env.with_reveal_all_normalized(tcx).and(self.ty())).ok()?.size;
int.try_to_bits(size).ok()
Some(int.to_bits(size))
}
/// Panics if the value cannot be evaluated or doesn't contain a valid integer of the given type.
@ -391,7 +391,7 @@ impl<'tcx> Const<'tcx> {
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
) -> Option<u64> {
self.try_eval_scalar_int(tcx, param_env)?.try_to_target_usize(tcx).ok()
Some(self.try_eval_scalar_int(tcx, param_env)?.to_target_usize(tcx))
}
#[inline]

View File

@ -122,16 +122,12 @@ impl<Prov> Scalar<Prov> {
Scalar::Int(c.into())
}
#[inline]
pub fn try_from_uint(i: impl Into<u128>, size: Size) -> Option<Self> {
ScalarInt::try_from_uint(i, size).map(Scalar::Int)
}
#[inline]
pub fn from_uint(i: impl Into<u128>, size: Size) -> Self {
let i = i.into();
Self::try_from_uint(i, size)
ScalarInt::try_from_uint(i, size)
.unwrap_or_else(|| bug!("Unsigned value {:#x} does not fit in {} bits", i, size.bits()))
.into()
}
#[inline]
@ -164,16 +160,12 @@ impl<Prov> Scalar<Prov> {
Self::from_uint(i, cx.data_layout().pointer_size)
}
#[inline]
pub fn try_from_int(i: impl Into<i128>, size: Size) -> Option<Self> {
ScalarInt::try_from_int(i, size).map(Scalar::Int)
}
#[inline]
pub fn from_int(i: impl Into<i128>, size: Size) -> Self {
let i = i.into();
Self::try_from_int(i, size)
ScalarInt::try_from_int(i, size)
.unwrap_or_else(|| bug!("Signed value {:#x} does not fit in {} bits", i, size.bits()))
.into()
}
#[inline]
@ -227,7 +219,7 @@ impl<Prov> Scalar<Prov> {
}
/// This is almost certainly not the method you want! You should dispatch on the type
/// and use `to_{u8,u16,...}`/`scalar_to_ptr` to perform ptr-to-int / int-to-ptr casts as needed.
/// and use `to_{u8,u16,...}`/`to_pointer` to perform ptr-to-int / int-to-ptr casts as needed.
///
/// This method only exists for the benefit of low-level operations that truly need to treat the
/// scalar in whatever form it is.
@ -289,7 +281,7 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> {
/// The error type is `AllocId`, not `CtfeProvenance`, since `AllocId` is the "minimal"
/// component all provenance types must have.
#[inline]
pub fn try_to_int(self) -> Result<ScalarInt, Scalar<AllocId>> {
pub fn try_to_scalar_int(self) -> Result<ScalarInt, Scalar<AllocId>> {
match self {
Scalar::Int(int) => Ok(int),
Scalar::Ptr(ptr, sz) => {
@ -307,13 +299,13 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> {
#[inline(always)]
pub fn to_scalar_int(self) -> InterpResult<'tcx, ScalarInt> {
self.try_to_int().map_err(|_| err_unsup!(ReadPointerAsInt(None)).into())
self.try_to_scalar_int().map_err(|_| err_unsup!(ReadPointerAsInt(None)).into())
}
#[inline(always)]
#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
pub fn assert_int(self) -> ScalarInt {
self.try_to_int().unwrap()
pub fn assert_scalar_int(self) -> ScalarInt {
self.try_to_scalar_int().expect("got a pointer where a ScalarInt was expected")
}
/// This throws UB (instead of ICEing) on a size mismatch since size mismatches can arise in
@ -330,13 +322,6 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> {
})
}
#[inline(always)]
#[cfg_attr(debug_assertions, track_caller)] // only in debug builds due to perf (see #98980)
pub fn assert_bits(self, target_size: Size) -> u128 {
self.to_bits(target_size)
.unwrap_or_else(|_| panic!("assertion failed: {self:?} fits {target_size:?}"))
}
pub fn to_bool(self) -> InterpResult<'tcx, bool> {
let val = self.to_u8()?;
match val {

View File

@ -1033,8 +1033,8 @@ impl<'tcx> PatRangeBoundary<'tcx> {
if let (Some(a), Some(b)) = (a.try_to_scalar_int(), b.try_to_scalar_int()) {
let sz = ty.primitive_size(tcx);
let cmp = match ty.kind() {
ty::Uint(_) | ty::Char => a.assert_uint(sz).cmp(&b.assert_uint(sz)),
ty::Int(_) => a.assert_int(sz).cmp(&b.assert_int(sz)),
ty::Uint(_) | ty::Char => a.to_uint(sz).cmp(&b.to_uint(sz)),
ty::Int(_) => a.to_int(sz).cmp(&b.to_int(sz)),
_ => unreachable!(),
};
return Some(cmp);

View File

@ -376,7 +376,7 @@ impl<'tcx> Const<'tcx> {
param_env: ParamEnv<'tcx>,
) -> Option<(Ty<'tcx>, ScalarInt)> {
let (ty, scalar) = self.try_eval_scalar(tcx, param_env)?;
let val = scalar.try_to_int().ok()?;
let val = scalar.try_to_scalar_int().ok()?;
Some((ty, val))
}
@ -388,7 +388,7 @@ impl<'tcx> Const<'tcx> {
let (ty, scalar) = self.try_eval_scalar_int(tcx, param_env)?;
let size = tcx.layout_of(param_env.with_reveal_all_normalized(tcx).and(ty)).ok()?.size;
// if `ty` does not depend on generic parameters, use an empty param_env
scalar.try_to_bits(size).ok()
Some(scalar.to_bits(size))
}
#[inline]
@ -405,7 +405,7 @@ impl<'tcx> Const<'tcx> {
param_env: ParamEnv<'tcx>,
) -> Option<u64> {
let (_, scalar) = self.try_eval_scalar_int(tcx, param_env)?;
scalar.try_to_target_usize(tcx).ok()
Some(scalar.to_target_usize(tcx))
}
#[inline]

View File

@ -246,6 +246,10 @@ impl ScalarInt {
Self::try_from_uint(i, tcx.data_layout.pointer_size)
}
/// Try to convert this ScalarInt to the raw underlying bits.
/// Fails if the size is wrong. Generally a wrong size should lead to a panic,
/// but Miri sometimes wants to be resilient to size mismatches,
/// so the interpreter will generally use this `try` method.
#[inline]
pub fn try_to_bits(self, target_size: Size) -> Result<u128, Size> {
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
@ -258,165 +262,149 @@ impl ScalarInt {
}
#[inline]
pub fn assert_bits(self, target_size: Size) -> u128 {
pub fn to_bits(self, target_size: Size) -> u128 {
self.try_to_bits(target_size).unwrap_or_else(|size| {
bug!("expected int of size {}, but got size {}", target_size.bytes(), size.bytes())
})
}
/// Tries to convert the `ScalarInt` to an unsigned integer of the given size.
/// Fails if the size of the `ScalarInt` is not equal to `size` and returns the
/// `ScalarInt`s size in that case.
/// Extracts the bits from the scalar without checking the size.
#[inline]
pub fn try_to_uint(self, size: Size) -> Result<u128, Size> {
self.try_to_bits(size)
pub fn to_bits_unchecked(self) -> u128 {
self.check_data();
self.data
}
/// Converts the `ScalarInt` to an unsigned integer of the given size.
/// Panics if the size of the `ScalarInt` is not equal to `size`.
#[inline]
pub fn to_uint(self, size: Size) -> u128 {
self.to_bits(size)
}
/// Converts the `ScalarInt` to `u8`.
/// Panics if the `size` of the `ScalarInt`in not equal to 1 byte.
#[inline]
pub fn to_u8(self) -> u8 {
self.to_uint(Size::from_bits(8)).try_into().unwrap()
}
/// Converts the `ScalarInt` to `u16`.
/// Panics if the size of the `ScalarInt` in not equal to 2 bytes.
#[inline]
pub fn to_u16(self) -> u16 {
self.to_uint(Size::from_bits(16)).try_into().unwrap()
}
/// Converts the `ScalarInt` to `u32`.
/// Panics if the `size` of the `ScalarInt` in not equal to 4 bytes.
#[inline]
pub fn to_u32(self) -> u32 {
self.to_uint(Size::from_bits(32)).try_into().unwrap()
}
/// Converts the `ScalarInt` to `u64`.
/// Panics if the `size` of the `ScalarInt` in not equal to 8 bytes.
#[inline]
pub fn to_u64(self) -> u64 {
self.to_uint(Size::from_bits(64)).try_into().unwrap()
}
/// Converts the `ScalarInt` to `u128`.
/// Panics if the `size` of the `ScalarInt` in not equal to 16 bytes.
#[inline]
pub fn to_u128(self) -> u128 {
self.to_uint(Size::from_bits(128))
}
#[inline]
pub fn assert_uint(self, size: Size) -> u128 {
self.assert_bits(size)
pub fn to_target_usize(&self, tcx: TyCtxt<'_>) -> u64 {
self.to_uint(tcx.data_layout.pointer_size).try_into().unwrap()
}
// Tries to convert the `ScalarInt` to `u8`. Fails if the `size` of the `ScalarInt`
// in not equal to 1 byte and returns the `size` value of the `ScalarInt` in
// that case.
/// Converts the `ScalarInt` to `bool`.
/// Panics if the `size` of the `ScalarInt` is not equal to 1 byte.
/// Errors if it is not a valid `bool`.
#[inline]
pub fn try_to_u8(self) -> Result<u8, Size> {
self.try_to_uint(Size::from_bits(8)).map(|v| u8::try_from(v).unwrap())
}
/// Tries to convert the `ScalarInt` to `u16`. Fails if the size of the `ScalarInt`
/// in not equal to 2 bytes and returns the `size` value of the `ScalarInt` in
/// that case.
#[inline]
pub fn try_to_u16(self) -> Result<u16, Size> {
self.try_to_uint(Size::from_bits(16)).map(|v| u16::try_from(v).unwrap())
}
/// Tries to convert the `ScalarInt` to `u32`. Fails if the `size` of the `ScalarInt`
/// in not equal to 4 bytes and returns the `size` value of the `ScalarInt` in
/// that case.
#[inline]
pub fn try_to_u32(self) -> Result<u32, Size> {
self.try_to_uint(Size::from_bits(32)).map(|v| u32::try_from(v).unwrap())
}
/// Tries to convert the `ScalarInt` to `u64`. Fails if the `size` of the `ScalarInt`
/// in not equal to 8 bytes and returns the `size` value of the `ScalarInt` in
/// that case.
#[inline]
pub fn try_to_u64(self) -> Result<u64, Size> {
self.try_to_uint(Size::from_bits(64)).map(|v| u64::try_from(v).unwrap())
}
/// Tries to convert the `ScalarInt` to `u128`. Fails if the `size` of the `ScalarInt`
/// in not equal to 16 bytes and returns the `size` value of the `ScalarInt` in
/// that case.
#[inline]
pub fn try_to_u128(self) -> Result<u128, Size> {
self.try_to_uint(Size::from_bits(128))
}
#[inline]
pub fn try_to_target_usize(&self, tcx: TyCtxt<'_>) -> Result<u64, Size> {
self.try_to_uint(tcx.data_layout.pointer_size).map(|v| u64::try_from(v).unwrap())
}
// Tries to convert the `ScalarInt` to `bool`. Fails if the `size` of the `ScalarInt`
// in not equal to 1 byte or if the value is not 0 or 1 and returns the `size`
// value of the `ScalarInt` in that case.
#[inline]
pub fn try_to_bool(self) -> Result<bool, Size> {
match self.try_to_u8()? {
pub fn try_to_bool(self) -> Result<bool, ()> {
match self.to_u8() {
0 => Ok(false),
1 => Ok(true),
_ => Err(self.size()),
_ => Err(()),
}
}
/// Tries to convert the `ScalarInt` to a signed integer of the given size.
/// Fails if the size of the `ScalarInt` is not equal to `size` and returns the
/// `ScalarInt`s size in that case.
/// Converts the `ScalarInt` to a signed integer of the given size.
/// Panics if the size of the `ScalarInt` is not equal to `size`.
#[inline]
pub fn try_to_int(self, size: Size) -> Result<i128, Size> {
let b = self.try_to_bits(size)?;
Ok(size.sign_extend(b) as i128)
}
#[inline]
pub fn assert_int(self, size: Size) -> i128 {
let b = self.assert_bits(size);
pub fn to_int(self, size: Size) -> i128 {
let b = self.to_bits(size);
size.sign_extend(b) as i128
}
/// Tries to convert the `ScalarInt` to i8.
/// Fails if the size of the `ScalarInt` is not equal to 1 byte
/// and returns the `ScalarInt`s size in that case.
pub fn try_to_i8(self) -> Result<i8, Size> {
self.try_to_int(Size::from_bits(8)).map(|v| i8::try_from(v).unwrap())
/// Converts the `ScalarInt` to i8.
/// Panics if the size of the `ScalarInt` is not equal to 1 byte.
pub fn to_i8(self) -> i8 {
self.to_int(Size::from_bits(8)).try_into().unwrap()
}
/// Tries to convert the `ScalarInt` to i16.
/// Fails if the size of the `ScalarInt` is not equal to 2 bytes
/// and returns the `ScalarInt`s size in that case.
pub fn try_to_i16(self) -> Result<i16, Size> {
self.try_to_int(Size::from_bits(16)).map(|v| i16::try_from(v).unwrap())
/// Converts the `ScalarInt` to i16.
/// Panics if the size of the `ScalarInt` is not equal to 2 bytes.
pub fn to_i16(self) -> i16 {
self.to_int(Size::from_bits(16)).try_into().unwrap()
}
/// Tries to convert the `ScalarInt` to i32.
/// Fails if the size of the `ScalarInt` is not equal to 4 bytes
/// and returns the `ScalarInt`s size in that case.
pub fn try_to_i32(self) -> Result<i32, Size> {
self.try_to_int(Size::from_bits(32)).map(|v| i32::try_from(v).unwrap())
/// Converts the `ScalarInt` to i32.
/// Panics if the size of the `ScalarInt` is not equal to 4 bytes.
pub fn to_i32(self) -> i32 {
self.to_int(Size::from_bits(32)).try_into().unwrap()
}
/// Tries to convert the `ScalarInt` to i64.
/// Fails if the size of the `ScalarInt` is not equal to 8 bytes
/// and returns the `ScalarInt`s size in that case.
pub fn try_to_i64(self) -> Result<i64, Size> {
self.try_to_int(Size::from_bits(64)).map(|v| i64::try_from(v).unwrap())
/// Converts the `ScalarInt` to i64.
/// Panics if the size of the `ScalarInt` is not equal to 8 bytes.
pub fn to_i64(self) -> i64 {
self.to_int(Size::from_bits(64)).try_into().unwrap()
}
/// Tries to convert the `ScalarInt` to i128.
/// Fails if the size of the `ScalarInt` is not equal to 16 bytes
/// and returns the `ScalarInt`s size in that case.
pub fn try_to_i128(self) -> Result<i128, Size> {
self.try_to_int(Size::from_bits(128))
/// Converts the `ScalarInt` to i128.
/// Panics if the size of the `ScalarInt` is not equal to 16 bytes.
pub fn to_i128(self) -> i128 {
self.to_int(Size::from_bits(128))
}
#[inline]
pub fn try_to_target_isize(&self, tcx: TyCtxt<'_>) -> Result<i64, Size> {
self.try_to_int(tcx.data_layout.pointer_size).map(|v| i64::try_from(v).unwrap())
pub fn to_target_isize(&self, tcx: TyCtxt<'_>) -> i64 {
self.to_int(tcx.data_layout.pointer_size).try_into().unwrap()
}
#[inline]
pub fn try_to_float<F: Float>(self) -> Result<F, Size> {
pub fn to_float<F: Float>(self) -> F {
// Going through `to_uint` to check size and truncation.
Ok(F::from_bits(self.try_to_bits(Size::from_bits(F::BITS))?))
F::from_bits(self.to_bits(Size::from_bits(F::BITS)))
}
#[inline]
pub fn try_to_f16(self) -> Result<Half, Size> {
self.try_to_float()
pub fn to_f16(self) -> Half {
self.to_float()
}
#[inline]
pub fn try_to_f32(self) -> Result<Single, Size> {
self.try_to_float()
pub fn to_f32(self) -> Single {
self.to_float()
}
#[inline]
pub fn try_to_f64(self) -> Result<Double, Size> {
self.try_to_float()
pub fn to_f64(self) -> Double {
self.to_float()
}
#[inline]
pub fn try_to_f128(self) -> Result<Quad, Size> {
self.try_to_float()
pub fn to_f128(self) -> Quad {
self.to_float()
}
}
macro_rules! from {
macro_rules! from_x_for_scalar_int {
($($ty:ty),*) => {
$(
impl From<$ty> for ScalarInt {
@ -432,30 +420,29 @@ macro_rules! from {
}
}
macro_rules! try_from {
macro_rules! from_scalar_int_for_x {
($($ty:ty),*) => {
$(
impl TryFrom<ScalarInt> for $ty {
type Error = Size;
impl From<ScalarInt> for $ty {
#[inline]
fn try_from(int: ScalarInt) -> Result<Self, Size> {
fn from(int: ScalarInt) -> Self {
// The `unwrap` cannot fail because to_bits (if it succeeds)
// is guaranteed to return a value that fits into the size.
int.try_to_bits(Size::from_bytes(std::mem::size_of::<$ty>()))
.map(|u| u.try_into().unwrap())
int.to_bits(Size::from_bytes(std::mem::size_of::<$ty>()))
.try_into().unwrap()
}
}
)*
}
}
from!(u8, u16, u32, u64, u128, bool);
try_from!(u8, u16, u32, u64, u128);
from_x_for_scalar_int!(u8, u16, u32, u64, u128, bool);
from_scalar_int_for_x!(u8, u16, u32, u64, u128);
impl TryFrom<ScalarInt> for bool {
type Error = Size;
type Error = ();
#[inline]
fn try_from(int: ScalarInt) -> Result<Self, Size> {
fn try_from(int: ScalarInt) -> Result<Self, ()> {
int.try_to_bool()
}
}
@ -463,7 +450,7 @@ impl TryFrom<ScalarInt> for bool {
impl From<char> for ScalarInt {
#[inline]
fn from(c: char) -> Self {
Self { data: c as u128, size: NonZero::new(std::mem::size_of::<char>() as u8).unwrap() }
(c as u32).into()
}
}
@ -476,10 +463,7 @@ impl TryFrom<ScalarInt> for char {
#[inline]
fn try_from(int: ScalarInt) -> Result<Self, Self::Error> {
let Ok(bits) = int.try_to_bits(Size::from_bytes(std::mem::size_of::<char>())) else {
return Err(CharTryFromScalarInt);
};
match char::from_u32(bits.try_into().unwrap()) {
match char::from_u32(int.to_u32()) {
Some(c) => Ok(c),
None => Err(CharTryFromScalarInt),
}
@ -494,11 +478,10 @@ impl From<Half> for ScalarInt {
}
}
impl TryFrom<ScalarInt> for Half {
type Error = Size;
impl From<ScalarInt> for Half {
#[inline]
fn try_from(int: ScalarInt) -> Result<Self, Size> {
int.try_to_bits(Size::from_bytes(2)).map(Self::from_bits)
fn from(int: ScalarInt) -> Self {
Self::from_bits(int.to_bits(Size::from_bytes(2)))
}
}
@ -510,11 +493,10 @@ impl From<Single> for ScalarInt {
}
}
impl TryFrom<ScalarInt> for Single {
type Error = Size;
impl From<ScalarInt> for Single {
#[inline]
fn try_from(int: ScalarInt) -> Result<Self, Size> {
int.try_to_bits(Size::from_bytes(4)).map(Self::from_bits)
fn from(int: ScalarInt) -> Self {
Self::from_bits(int.to_bits(Size::from_bytes(4)))
}
}
@ -526,11 +508,10 @@ impl From<Double> for ScalarInt {
}
}
impl TryFrom<ScalarInt> for Double {
type Error = Size;
impl From<ScalarInt> for Double {
#[inline]
fn try_from(int: ScalarInt) -> Result<Self, Size> {
int.try_to_bits(Size::from_bytes(8)).map(Self::from_bits)
fn from(int: ScalarInt) -> Self {
Self::from_bits(int.to_bits(Size::from_bytes(8)))
}
}
@ -542,11 +523,10 @@ impl From<Quad> for ScalarInt {
}
}
impl TryFrom<ScalarInt> for Quad {
type Error = Size;
impl From<ScalarInt> for Quad {
#[inline]
fn try_from(int: ScalarInt) -> Result<Self, Size> {
int.try_to_bits(Size::from_bytes(16)).map(Self::from_bits)
fn from(int: ScalarInt) -> Self {
Self::from_bits(int.to_bits(Size::from_bytes(16)))
}
}

View File

@ -79,7 +79,7 @@ impl<'tcx> ValTree<'tcx> {
}
pub fn try_to_target_usize(self, tcx: TyCtxt<'tcx>) -> Option<u64> {
self.try_to_scalar_int().and_then(|s| s.try_to_target_usize(tcx).ok())
self.try_to_scalar_int().map(|s| s.to_target_usize(tcx))
}
/// Get the values inside the ValTree as a slice of bytes. This only works for
@ -100,8 +100,9 @@ impl<'tcx> ValTree<'tcx> {
_ => return None,
}
Some(tcx.arena.alloc_from_iter(
self.unwrap_branch().into_iter().map(|v| v.unwrap_leaf().try_to_u8().unwrap()),
))
Some(
tcx.arena
.alloc_from_iter(self.unwrap_branch().into_iter().map(|v| v.unwrap_leaf().to_u8())),
)
}
}

View File

@ -1351,3 +1351,37 @@ pub trait FnAbiOf<'tcx>: FnAbiOfHelpers<'tcx> {
}
impl<'tcx, C: FnAbiOfHelpers<'tcx>> FnAbiOf<'tcx> for C {}
impl<'tcx> TyCtxt<'tcx> {
pub fn offset_of_subfield<I>(
self,
param_env: ty::ParamEnv<'tcx>,
mut layout: TyAndLayout<'tcx>,
indices: I,
) -> Size
where
I: Iterator<Item = (VariantIdx, FieldIdx)>,
{
let cx = LayoutCx { tcx: self, param_env };
let mut offset = Size::ZERO;
for (variant, field) in indices {
layout = layout.for_variant(&cx, variant);
let index = field.index();
offset += layout.fields.offset(index);
layout = layout.field(&cx, index);
if !layout.is_sized() {
// If it is not sized, then the tail must still have at least a known static alignment.
let tail = self.struct_tail_erasing_lifetimes(layout.ty, param_env);
if !matches!(tail.kind(), ty::Slice(..)) {
bug!(
"offset of not-statically-aligned field (type {:?}) cannot be computed statically",
layout.ty
);
}
}
}
offset
}
}

View File

@ -1652,7 +1652,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
if let ty::ConstKind::Value(_, ty::ValTree::Leaf(int)) = len.kind() {
match self.tcx().try_get_global_alloc(prov.alloc_id()) {
Some(GlobalAlloc::Memory(alloc)) => {
let len = int.assert_bits(self.tcx().data_layout.pointer_size);
let len = int.to_bits(self.tcx().data_layout.pointer_size);
let range =
AllocRange { start: offset, size: Size::from_bytes(len) };
if let Ok(byte_str) =
@ -1730,7 +1730,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
}
// Pointer types
ty::Ref(..) | ty::RawPtr(_, _) | ty::FnPtr(_) => {
let data = int.assert_bits(self.tcx().data_layout.pointer_size);
let data = int.to_bits(self.tcx().data_layout.pointer_size);
self.typed_value(
|this| {
write!(this, "0x{data:x}")?;

View File

@ -5,7 +5,6 @@ edition = "2021"
[dependencies]
# tidy-alphabetical-start
either = "1"
itertools = "0.12"
rustc_apfloat = "0.2.0"
rustc_arena = { path = "../rustc_arena" }
@ -24,6 +23,5 @@ rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tracing = "0.1"
# tidy-alphabetical-end

View File

@ -15,11 +15,10 @@ use rustc_index::{Idx, IndexSlice, IndexVec};
use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
use rustc_middle::hir::place::PlaceBase as HirPlaceBase;
use rustc_middle::middle::region;
use rustc_middle::mir::interpret::Scalar;
use rustc_middle::mir::*;
use rustc_middle::query::TyCtxtAt;
use rustc_middle::thir::{self, ExprId, LintLevel, LocalVarId, Param, ParamId, PatKind, Thir};
use rustc_middle::ty::{self, Ty, TyCtxt, TypeVisitableExt};
use rustc_middle::ty::{self, ScalarInt, Ty, TyCtxt, TypeVisitableExt};
use rustc_middle::{bug, span_bug};
use rustc_span::symbol::sym;
use rustc_span::Span;
@ -1014,14 +1013,14 @@ fn parse_float_into_constval<'tcx>(
float_ty: ty::FloatTy,
neg: bool,
) -> Option<ConstValue<'tcx>> {
parse_float_into_scalar(num, float_ty, neg).map(ConstValue::Scalar)
parse_float_into_scalar(num, float_ty, neg).map(|s| ConstValue::Scalar(s.into()))
}
pub(crate) fn parse_float_into_scalar(
num: Symbol,
float_ty: ty::FloatTy,
neg: bool,
) -> Option<Scalar> {
) -> Option<ScalarInt> {
let num = num.as_str();
match float_ty {
// FIXME(f16_f128): When available, compare to the library parser as with `f32` and `f64`
@ -1030,7 +1029,7 @@ pub(crate) fn parse_float_into_scalar(
if neg {
f = -f;
}
Some(Scalar::from_f16(f))
Some(ScalarInt::from(f))
}
ty::FloatTy::F32 => {
let Ok(rust_f) = num.parse::<f32>() else { return None };
@ -1053,7 +1052,7 @@ pub(crate) fn parse_float_into_scalar(
f = -f;
}
Some(Scalar::from_f32(f))
Some(ScalarInt::from(f))
}
ty::FloatTy::F64 => {
let Ok(rust_f) = num.parse::<f64>() else { return None };
@ -1076,7 +1075,7 @@ pub(crate) fn parse_float_into_scalar(
f = -f;
}
Some(Scalar::from_f64(f))
Some(ScalarInt::from(f))
}
// FIXME(f16_f128): When available, compare to the library parser as with `f32` and `f64`
ty::FloatTy::F128 => {
@ -1084,7 +1083,7 @@ pub(crate) fn parse_float_into_scalar(
if neg {
f = -f;
}
Some(Scalar::from_f128(f))
Some(ScalarInt::from(f))
}
}
}

View File

@ -58,11 +58,9 @@ pub(crate) fn lit_to_const<'tcx>(
}
(ast::LitKind::Bool(b), ty::Bool) => ty::ValTree::from_scalar_int((*b).into()),
(ast::LitKind::Float(n, _), ty::Float(fty)) => {
let bits = parse_float_into_scalar(*n, *fty, neg)
.ok_or_else(|| {
tcx.dcx().bug(format!("couldn't parse float literal: {:?}", lit_input.lit))
})?
.assert_int();
let bits = parse_float_into_scalar(*n, *fty, neg).ok_or_else(|| {
tcx.dcx().bug(format!("couldn't parse float literal: {:?}", lit_input.lit))
})?;
ty::ValTree::from_scalar_int(bits)
}
(ast::LitKind::Char(c), ty::Char) => ty::ValTree::from_scalar_int((*c).into()),

View File

@ -282,8 +282,7 @@ impl<'tcx> ConstToPat<'tcx> {
}
ty::Adt(adt_def, args) if adt_def.is_enum() => {
let (&variant_index, fields) = cv.unwrap_branch().split_first().unwrap();
let variant_index =
VariantIdx::from_u32(variant_index.unwrap_leaf().try_to_u32().ok().unwrap());
let variant_index = VariantIdx::from_u32(variant_index.unwrap_leaf().to_u32());
PatKind::Variant {
adt_def: *adt_def,
args,
@ -371,8 +370,8 @@ impl<'tcx> ConstToPat<'tcx> {
let v = cv.unwrap_leaf();
let is_nan = match flt {
ty::FloatTy::F16 => unimplemented!("f16_f128"),
ty::FloatTy::F32 => v.try_to_f32().unwrap().is_nan(),
ty::FloatTy::F64 => v.try_to_f64().unwrap().is_nan(),
ty::FloatTy::F32 => v.to_f32().is_nan(),
ty::FloatTy::F64 => v.to_f64().is_nan(),
ty::FloatTy::F128 => unimplemented!("f16_f128"),
};
if is_nan {

View File

@ -1,102 +0,0 @@
//! Finds locals which are assigned once to a const and unused except for debuginfo and converts
//! their debuginfo to use the const directly, allowing the local to be removed.
use rustc_middle::{
mir::{
visit::{PlaceContext, Visitor},
Body, ConstOperand, Local, Location, Operand, Rvalue, StatementKind, VarDebugInfoContents,
},
ty::TyCtxt,
};
use crate::MirPass;
use rustc_index::{bit_set::BitSet, IndexVec};
pub struct ConstDebugInfo;
impl<'tcx> MirPass<'tcx> for ConstDebugInfo {
fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
sess.mir_opt_level() > 0
}
fn run_pass(&self, _tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
trace!("running ConstDebugInfo on {:?}", body.source);
for (local, constant) in find_optimization_opportunities(body) {
for debuginfo in &mut body.var_debug_info {
if let VarDebugInfoContents::Place(p) = debuginfo.value {
if p.local == local && p.projection.is_empty() {
trace!(
"changing debug info for {:?} from place {:?} to constant {:?}",
debuginfo.name,
p,
constant
);
debuginfo.value = VarDebugInfoContents::Const(constant);
}
}
}
}
}
}
struct LocalUseVisitor {
local_mutating_uses: IndexVec<Local, u8>,
local_assignment_locations: IndexVec<Local, Option<Location>>,
}
fn find_optimization_opportunities<'tcx>(body: &Body<'tcx>) -> Vec<(Local, ConstOperand<'tcx>)> {
let mut visitor = LocalUseVisitor {
local_mutating_uses: IndexVec::from_elem(0, &body.local_decls),
local_assignment_locations: IndexVec::from_elem(None, &body.local_decls),
};
visitor.visit_body(body);
let mut locals_to_debuginfo = BitSet::new_empty(body.local_decls.len());
for debuginfo in &body.var_debug_info {
if let VarDebugInfoContents::Place(p) = debuginfo.value
&& let Some(l) = p.as_local()
{
locals_to_debuginfo.insert(l);
}
}
let mut eligible_locals = Vec::new();
for (local, mutating_uses) in visitor.local_mutating_uses.drain_enumerated(..) {
if mutating_uses != 1 || !locals_to_debuginfo.contains(local) {
continue;
}
if let Some(location) = visitor.local_assignment_locations[local] {
let bb = &body[location.block];
// The value is assigned as the result of a call, not a constant
if bb.statements.len() == location.statement_index {
continue;
}
if let StatementKind::Assign(box (p, Rvalue::Use(Operand::Constant(box c)))) =
&bb.statements[location.statement_index].kind
{
if let Some(local) = p.as_local() {
eligible_locals.push((local, *c));
}
}
}
}
eligible_locals
}
impl Visitor<'_> for LocalUseVisitor {
fn visit_local(&mut self, local: Local, context: PlaceContext, location: Location) {
if context.is_mutating_use() {
self.local_mutating_uses[local] = self.local_mutating_uses[local].saturating_add(1);
if context.is_place_assignment() {
self.local_assignment_locations[local] = Some(location);
}
}
}
}

View File

@ -10,7 +10,7 @@ use rustc_middle::bug;
use rustc_middle::mir::interpret::{InterpResult, Scalar};
use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
use rustc_middle::mir::*;
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::layout::{HasParamEnv, LayoutOf};
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_mir_dataflow::value_analysis::{
Map, PlaceIndex, State, TrackElem, ValueAnalysis, ValueAnalysisWrapper, ValueOrPlace,
@ -285,9 +285,11 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
let val = match null_op {
NullOp::SizeOf if layout.is_sized() => layout.size.bytes(),
NullOp::AlignOf if layout.is_sized() => layout.align.abi.bytes(),
NullOp::OffsetOf(fields) => {
layout.offset_of_subfield(&self.ecx, fields.iter()).bytes()
}
NullOp::OffsetOf(fields) => self
.ecx
.tcx
.offset_of_subfield(self.ecx.param_env(), layout, fields.iter())
.bytes(),
_ => return ValueOrPlace::Value(FlatSet::Top),
};
FlatSet::Elem(Scalar::from_target_usize(val, &self.tcx))
@ -324,7 +326,7 @@ impl<'tcx> ValueAnalysis<'tcx> for ConstAnalysis<'_, 'tcx> {
// This allows the set of visited edges to grow monotonically with the lattice.
FlatSet::Bottom => TerminatorEdges::None,
FlatSet::Elem(scalar) => {
let choice = scalar.assert_bits(scalar.size());
let choice = scalar.assert_scalar_int().to_bits_unchecked();
TerminatorEdges::Single(targets.target_for_value(choice))
}
FlatSet::Top => TerminatorEdges::SwitchInt { discr, targets },
@ -607,7 +609,7 @@ fn propagatable_scalar(
map: &Map,
) -> Option<Scalar> {
if let FlatSet::Elem(value) = state.get_idx(place, map)
&& value.try_to_int().is_ok()
&& value.try_to_scalar_int().is_ok()
{
// Do not attempt to propagate pointers, as we may fail to preserve their identity.
Some(value)
@ -668,7 +670,7 @@ fn try_write_constant<'tcx>(
let FlatSet::Elem(Scalar::Int(discr)) = state.get_idx(discr, map) else {
throw_machine_stop_str!("discriminant with provenance")
};
let discr_bits = discr.assert_bits(discr.size());
let discr_bits = discr.to_bits(discr.size());
let Some((variant, _)) = def.discriminants(*ecx.tcx).find(|(_, var)| discr_bits == var.val) else {
throw_machine_stop_str!("illegal discriminant for enum")
};

View File

@ -83,8 +83,8 @@
//! that contain `AllocId`s.
use rustc_const_eval::const_eval::DummyMachine;
use rustc_const_eval::interpret::{intern_const_alloc_for_constprop, MemoryKind};
use rustc_const_eval::interpret::{ImmTy, InterpCx, OpTy, Projectable, Scalar};
use rustc_const_eval::interpret::{intern_const_alloc_for_constprop, MemPlaceMeta, MemoryKind};
use rustc_const_eval::interpret::{ImmTy, Immediate, InterpCx, OpTy, Projectable, Scalar};
use rustc_data_structures::fx::FxIndexSet;
use rustc_data_structures::graph::dominators::Dominators;
use rustc_hir::def::DefKind;
@ -95,11 +95,11 @@ use rustc_middle::bug;
use rustc_middle::mir::interpret::GlobalAlloc;
use rustc_middle::mir::visit::*;
use rustc_middle::mir::*;
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::layout::{HasParamEnv, LayoutOf};
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_span::def_id::DefId;
use rustc_span::DUMMY_SP;
use rustc_target::abi::{self, Abi, Size, VariantIdx, FIRST_VARIANT};
use rustc_target::abi::{self, Abi, FieldIdx, Size, VariantIdx, FIRST_VARIANT};
use smallvec::SmallVec;
use std::borrow::Cow;
@ -177,6 +177,12 @@ enum AggregateTy<'tcx> {
Array,
Tuple,
Def(DefId, ty::GenericArgsRef<'tcx>),
RawPtr {
/// Needed for cast propagation.
data_pointer_ty: Ty<'tcx>,
/// The data pointer can be anything thin, so doesn't determine the output.
output_pointer_ty: Ty<'tcx>,
},
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
@ -385,11 +391,22 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
AggregateTy::Def(def_id, args) => {
self.tcx.type_of(def_id).instantiate(self.tcx, args)
}
AggregateTy::RawPtr { output_pointer_ty, .. } => output_pointer_ty,
};
let variant = if ty.is_enum() { Some(variant) } else { None };
let ty = self.ecx.layout_of(ty).ok()?;
if ty.is_zst() {
ImmTy::uninit(ty).into()
} else if matches!(kind, AggregateTy::RawPtr { .. }) {
// Pointers don't have fields, so don't `project_field` them.
let data = self.ecx.read_pointer(fields[0]).ok()?;
let meta = if fields[1].layout.is_zst() {
MemPlaceMeta::None
} else {
MemPlaceMeta::Meta(self.ecx.read_scalar(fields[1]).ok()?)
};
let ptr_imm = Immediate::new_pointer_with_meta(data, meta, &self.ecx);
ImmTy::from_immediate(ptr_imm, ty).into()
} else if matches!(ty.abi, Abi::Scalar(..) | Abi::ScalarPair(..)) {
let dest = self.ecx.allocate(ty, MemoryKind::Stack).ok()?;
let variant_dest = if let Some(variant) = variant {
@ -471,7 +488,7 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
let slice = self.evaluated[slice].as_ref()?;
let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap();
let len = slice.len(&self.ecx).ok()?;
let imm = ImmTy::try_from_uint(len, usize_layout)?;
let imm = ImmTy::from_uint(len, usize_layout);
imm.into()
}
NullaryOp(null_op, ty) => {
@ -484,13 +501,15 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
let val = match null_op {
NullOp::SizeOf => layout.size.bytes(),
NullOp::AlignOf => layout.align.abi.bytes(),
NullOp::OffsetOf(fields) => {
layout.offset_of_subfield(&self.ecx, fields.iter()).bytes()
}
NullOp::OffsetOf(fields) => self
.ecx
.tcx
.offset_of_subfield(self.ecx.param_env(), layout, fields.iter())
.bytes(),
NullOp::UbChecks => return None,
};
let usize_layout = self.ecx.layout_of(self.tcx.types.usize).unwrap();
let imm = ImmTy::try_from_uint(val, usize_layout)?;
let imm = ImmTy::from_uint(val, usize_layout);
imm.into()
}
UnaryOp(un_op, operand) => {
@ -862,10 +881,10 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
rvalue: &mut Rvalue<'tcx>,
location: Location,
) -> Option<VnIndex> {
let Rvalue::Aggregate(box ref kind, ref mut fields) = *rvalue else { bug!() };
let Rvalue::Aggregate(box ref kind, ref mut field_ops) = *rvalue else { bug!() };
let tcx = self.tcx;
if fields.is_empty() {
if field_ops.is_empty() {
let is_zst = match *kind {
AggregateKind::Array(..)
| AggregateKind::Tuple
@ -884,13 +903,13 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
}
}
let (ty, variant_index) = match *kind {
let (mut ty, variant_index) = match *kind {
AggregateKind::Array(..) => {
assert!(!fields.is_empty());
assert!(!field_ops.is_empty());
(AggregateTy::Array, FIRST_VARIANT)
}
AggregateKind::Tuple => {
assert!(!fields.is_empty());
assert!(!field_ops.is_empty());
(AggregateTy::Tuple, FIRST_VARIANT)
}
AggregateKind::Closure(did, args)
@ -901,15 +920,49 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
}
// Do not track unions.
AggregateKind::Adt(_, _, _, _, Some(_)) => return None,
// FIXME: Do the extra work to GVN `from_raw_parts`
AggregateKind::RawPtr(..) => return None,
AggregateKind::RawPtr(pointee_ty, mtbl) => {
assert_eq!(field_ops.len(), 2);
let data_pointer_ty = field_ops[FieldIdx::ZERO].ty(self.local_decls, self.tcx);
let output_pointer_ty = Ty::new_ptr(self.tcx, pointee_ty, mtbl);
(AggregateTy::RawPtr { data_pointer_ty, output_pointer_ty }, FIRST_VARIANT)
}
};
let fields: Option<Vec<_>> = fields
let fields: Option<Vec<_>> = field_ops
.iter_mut()
.map(|op| self.simplify_operand(op, location).or_else(|| self.new_opaque()))
.collect();
let fields = fields?;
let mut fields = fields?;
if let AggregateTy::RawPtr { data_pointer_ty, output_pointer_ty } = &mut ty {
let mut was_updated = false;
// Any thin pointer of matching mutability is fine as the data pointer.
while let Value::Cast {
kind: CastKind::PtrToPtr,
value: cast_value,
from: cast_from,
to: _,
} = self.get(fields[0])
&& let ty::RawPtr(from_pointee_ty, from_mtbl) = cast_from.kind()
&& let ty::RawPtr(_, output_mtbl) = output_pointer_ty.kind()
&& from_mtbl == output_mtbl
&& from_pointee_ty.is_sized(self.tcx, self.param_env)
{
fields[0] = *cast_value;
*data_pointer_ty = *cast_from;
was_updated = true;
}
if was_updated {
if let Some(const_) = self.try_as_constant(fields[0]) {
field_ops[FieldIdx::ZERO] = Operand::Constant(Box::new(const_));
} else if let Some(local) = self.try_as_local(fields[0], location) {
field_ops[FieldIdx::ZERO] = Operand::Copy(Place::from(local));
self.reused_locals.insert(local);
}
}
}
if let AggregateTy::Array = ty
&& fields.len() > 4
@ -941,6 +994,9 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
(UnOp::Not, Value::BinaryOp(BinOp::Ne, lhs, rhs)) => {
Value::BinaryOp(BinOp::Eq, *lhs, *rhs)
}
(UnOp::PtrMetadata, Value::Aggregate(AggregateTy::RawPtr { .. }, _, fields)) => {
return Some(fields[1]);
}
_ => return None,
};
@ -1092,6 +1148,23 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
return self.new_opaque();
}
let mut was_updated = false;
// If that cast just casts away the metadata again,
if let PtrToPtr = kind
&& let Value::Aggregate(AggregateTy::RawPtr { data_pointer_ty, .. }, _, fields) =
self.get(value)
&& let ty::RawPtr(to_pointee, _) = to.kind()
&& to_pointee.is_sized(self.tcx, self.param_env)
{
from = *data_pointer_ty;
value = fields[0];
was_updated = true;
if *data_pointer_ty == to {
return Some(fields[0]);
}
}
if let PtrToPtr | PointerCoercion(MutToConstPointer) = kind
&& let Value::Cast { kind: inner_kind, value: inner_value, from: inner_from, to: _ } =
*self.get(value)
@ -1100,9 +1173,13 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
from = inner_from;
value = inner_value;
*kind = PtrToPtr;
was_updated = true;
if inner_from == to {
return Some(inner_value);
}
}
if was_updated {
if let Some(const_) = self.try_as_constant(value) {
*operand = Operand::Constant(Box::new(const_));
} else if let Some(local) = self.try_as_local(value, location) {
@ -1178,7 +1255,7 @@ fn op_to_prop_const<'tcx>(
// If this constant has scalar ABI, return it as a `ConstValue::Scalar`.
if let Abi::Scalar(abi::Scalar::Initialized { .. }) = op.layout.abi
&& let Ok(scalar) = ecx.read_scalar(op)
&& scalar.try_to_int().is_ok()
&& scalar.try_to_scalar_int().is_ok()
{
return Some(ConstValue::Scalar(scalar));
}

View File

@ -356,15 +356,12 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
debug!("check_binary_op: reporting assert for {:?}", location);
let panic = AssertKind::Overflow(
op,
match l {
Some(l) => l.to_const_int(),
// Invent a dummy value, the diagnostic ignores it anyway
None => ConstInt::new(
ScalarInt::try_from_uint(1_u8, left_size).unwrap(),
left_ty.is_signed(),
left_ty.is_ptr_sized_integral(),
),
},
// Invent a dummy value, the diagnostic ignores it anyway
ConstInt::new(
ScalarInt::try_from_uint(1_u8, left_size).unwrap(),
left_ty.is_signed(),
left_ty.is_ptr_sized_integral(),
),
r.to_const_int(),
);
self.report_assert_as_lint(location, AssertLintKind::ArithmeticOverflow, panic);
@ -625,9 +622,10 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
let val = match null_op {
NullOp::SizeOf => op_layout.size.bytes(),
NullOp::AlignOf => op_layout.align.abi.bytes(),
NullOp::OffsetOf(fields) => {
op_layout.offset_of_subfield(self, fields.iter()).bytes()
}
NullOp::OffsetOf(fields) => self
.tcx
.offset_of_subfield(self.param_env, op_layout, fields.iter())
.bytes(),
NullOp::UbChecks => return None,
};
ImmTy::from_scalar(Scalar::from_target_usize(val, self), layout).into()
@ -786,8 +784,7 @@ impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
TerminatorKind::SwitchInt { ref discr, ref targets } => {
if let Some(ref value) = self.eval_operand(discr)
&& let Some(value_const) = self.use_ecx(|this| this.ecx.read_scalar(value))
&& let Ok(constant) = value_const.try_to_int()
&& let Ok(constant) = constant.try_to_bits(constant.size())
&& let Ok(constant) = value_const.to_bits(value_const.size())
{
// We managed to evaluate the discriminant, so we know we only need to visit
// one target.

View File

@ -55,7 +55,6 @@ mod remove_place_mention;
// This pass is public to allow external drivers to perform MIR cleanup
mod add_subtyping_projections;
pub mod cleanup_post_borrowck;
mod const_debuginfo;
mod copy_prop;
mod coroutine;
mod cost_checker;
@ -106,6 +105,7 @@ mod check_alignment;
pub mod simplify;
mod simplify_branches;
mod simplify_comparison_integral;
mod single_use_consts;
mod sroa;
mod unreachable_enum_branching;
mod unreachable_prop;
@ -593,7 +593,7 @@ fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
&gvn::GVN,
&simplify::SimplifyLocals::AfterGVN,
&dataflow_const_prop::DataflowConstProp,
&const_debuginfo::ConstDebugInfo,
&single_use_consts::SingleUseConsts,
&o1(simplify_branches::SimplifyConstCondition::AfterConstProp),
&jump_threading::JumpThreading,
&early_otherwise_branch::EarlyOtherwiseBranch,

View File

@ -372,7 +372,7 @@ impl<'tcx> SimplifyMatch<'tcx> for SimplifyToExp {
}
fn int_equal(l: ScalarInt, r: impl Into<u128>, size: Size) -> bool {
l.assert_int(l.size()) == ScalarInt::try_from_uint(r, size).unwrap().assert_int(size)
l.to_bits_unchecked() == ScalarInt::try_from_uint(r, size).unwrap().to_bits_unchecked()
}
// We first compare the two branches, and then the other branches need to fulfill the same conditions.

View File

@ -500,14 +500,14 @@ impl<'tcx> Validator<'_, 'tcx> {
}
_ => None,
};
match rhs_val.map(|x| x.assert_uint(sz)) {
match rhs_val.map(|x| x.to_uint(sz)) {
// for the zero test, int vs uint does not matter
Some(x) if x != 0 => {} // okay
_ => return Err(Unpromotable), // value not known or 0 -- not okay
}
// Furthermore, for signed divison, we also have to exclude `int::MIN / -1`.
if lhs_ty.is_signed() {
match rhs_val.map(|x| x.assert_int(sz)) {
match rhs_val.map(|x| x.to_int(sz)) {
Some(-1) | None => {
// The RHS is -1 or unknown, so we have to be careful.
// But is the LHS int::MIN?
@ -518,7 +518,7 @@ impl<'tcx> Validator<'_, 'tcx> {
_ => None,
};
let lhs_min = sz.signed_int_min();
match lhs_val.map(|x| x.assert_int(sz)) {
match lhs_val.map(|x| x.to_int(sz)) {
Some(x) if x != lhs_min => {} // okay
_ => return Err(Unpromotable), // value not known or int::MIN -- not okay
}

View File

@ -49,7 +49,7 @@ impl<'tcx> MirPass<'tcx> for SimplifyComparisonIntegral {
let layout = tcx
.layout_of(param_env.and(opt.branch_value_ty))
.expect("if we have an evaluated constant we must know the layout");
int.assert_bits(layout.size)
int.to_bits(layout.size)
}
Scalar::Ptr(..) => continue,
};

View File

@ -0,0 +1,199 @@
use rustc_index::{bit_set::BitSet, IndexVec};
use rustc_middle::bug;
use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
use rustc_middle::mir::*;
use rustc_middle::ty::TyCtxt;
/// Various parts of MIR building introduce temporaries that are commonly not needed.
///
/// Notably, `if CONST` and `match CONST` end up being used-once temporaries, which
/// obfuscates the structure for other passes and codegen, which would like to always
/// be able to just see the constant directly.
///
/// At higher optimization levels fancier passes like GVN will take care of this
/// in a more general fashion, but this handles the easy cases so can run in debug.
///
/// This only removes constants with a single-use because re-evaluating constants
/// isn't always an improvement, especially for large ones.
///
/// It also removes *never*-used constants, since it had all the information
/// needed to do that too, including updating the debug info.
pub struct SingleUseConsts;
impl<'tcx> MirPass<'tcx> for SingleUseConsts {
fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
sess.mir_opt_level() > 0
}
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
let mut finder = SingleUseConstsFinder {
ineligible_locals: BitSet::new_empty(body.local_decls.len()),
locations: IndexVec::from_elem(LocationPair::new(), &body.local_decls),
locals_in_debug_info: BitSet::new_empty(body.local_decls.len()),
};
finder.ineligible_locals.insert_range(..=Local::from_usize(body.arg_count));
finder.visit_body(body);
for (local, locations) in finder.locations.iter_enumerated() {
if finder.ineligible_locals.contains(local) {
continue;
}
let Some(init_loc) = locations.init_loc else {
continue;
};
// We're only changing an operand, not the terminator kinds or successors
let basic_blocks = body.basic_blocks.as_mut_preserves_cfg();
let init_statement =
basic_blocks[init_loc.block].statements[init_loc.statement_index].replace_nop();
let StatementKind::Assign(place_and_rvalue) = init_statement.kind else {
bug!("No longer an assign?");
};
let (place, rvalue) = *place_and_rvalue;
assert_eq!(place.as_local(), Some(local));
let Rvalue::Use(operand) = rvalue else { bug!("No longer a use?") };
let mut replacer = LocalReplacer { tcx, local, operand: Some(operand) };
if finder.locals_in_debug_info.contains(local) {
for var_debug_info in &mut body.var_debug_info {
replacer.visit_var_debug_info(var_debug_info);
}
}
let Some(use_loc) = locations.use_loc else { continue };
let use_block = &mut basic_blocks[use_loc.block];
if let Some(use_statement) = use_block.statements.get_mut(use_loc.statement_index) {
replacer.visit_statement(use_statement, use_loc);
} else {
replacer.visit_terminator(use_block.terminator_mut(), use_loc);
}
if replacer.operand.is_some() {
bug!(
"operand wasn't used replacing local {local:?} with locations {locations:?} in body {body:#?}"
);
}
}
}
}
#[derive(Copy, Clone, Debug)]
struct LocationPair {
init_loc: Option<Location>,
use_loc: Option<Location>,
}
impl LocationPair {
fn new() -> Self {
Self { init_loc: None, use_loc: None }
}
}
struct SingleUseConstsFinder {
ineligible_locals: BitSet<Local>,
locations: IndexVec<Local, LocationPair>,
locals_in_debug_info: BitSet<Local>,
}
impl<'tcx> Visitor<'tcx> for SingleUseConstsFinder {
fn visit_assign(&mut self, place: &Place<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) {
if let Some(local) = place.as_local()
&& let Rvalue::Use(operand) = rvalue
&& let Operand::Constant(_) = operand
{
let locations = &mut self.locations[local];
if locations.init_loc.is_some() {
self.ineligible_locals.insert(local);
} else {
locations.init_loc = Some(location);
}
} else {
self.super_assign(place, rvalue, location);
}
}
fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
if let Some(place) = operand.place()
&& let Some(local) = place.as_local()
{
let locations = &mut self.locations[local];
if locations.use_loc.is_some() {
self.ineligible_locals.insert(local);
} else {
locations.use_loc = Some(location);
}
} else {
self.super_operand(operand, location);
}
}
fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
match &statement.kind {
// Storage markers are irrelevant to this.
StatementKind::StorageLive(_) | StatementKind::StorageDead(_) => {}
_ => self.super_statement(statement, location),
}
}
fn visit_var_debug_info(&mut self, var_debug_info: &VarDebugInfo<'tcx>) {
if let VarDebugInfoContents::Place(place) = &var_debug_info.value
&& let Some(local) = place.as_local()
{
self.locals_in_debug_info.insert(local);
} else {
self.super_var_debug_info(var_debug_info);
}
}
fn visit_local(&mut self, local: Local, _context: PlaceContext, _location: Location) {
// If there's any path that gets here, rather than being understood elsewhere,
// then we'd better not do anything with this local.
self.ineligible_locals.insert(local);
}
}
struct LocalReplacer<'tcx> {
tcx: TyCtxt<'tcx>,
local: Local,
operand: Option<Operand<'tcx>>,
}
impl<'tcx> MutVisitor<'tcx> for LocalReplacer<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn visit_operand(&mut self, operand: &mut Operand<'tcx>, _location: Location) {
if let Operand::Copy(place) | Operand::Move(place) = operand
&& let Some(local) = place.as_local()
&& local == self.local
{
*operand = self.operand.take().unwrap_or_else(|| {
bug!("there was a second use of the operand");
});
}
}
fn visit_var_debug_info(&mut self, var_debug_info: &mut VarDebugInfo<'tcx>) {
if let VarDebugInfoContents::Place(place) = &var_debug_info.value
&& let Some(local) = place.as_local()
&& local == self.local
{
let const_op = self
.operand
.as_ref()
.unwrap_or_else(|| {
bug!("the operand was already stolen");
})
.constant()
.unwrap()
.clone();
var_debug_info.value = VarDebugInfoContents::Const(const_op);
}
}
}

View File

@ -5,12 +5,12 @@ use rustc_index::bit_set::BitSet;
use rustc_index::IndexVec;
use rustc_infer::traits::Reveal;
use rustc_middle::mir::coverage::CoverageKind;
use rustc_middle::mir::interpret::Scalar;
use rustc_middle::mir::visit::{NonUseContext, PlaceContext, Visitor};
use rustc_middle::mir::*;
use rustc_middle::ty::adjustment::PointerCoercion;
use rustc_middle::ty::{
self, CoroutineArgsExt, InstanceDef, ParamEnv, Ty, TyCtxt, TypeVisitableExt, Variance,
self, CoroutineArgsExt, InstanceDef, ParamEnv, ScalarInt, Ty, TyCtxt, TypeVisitableExt,
Variance,
};
use rustc_middle::{bug, span_bug};
use rustc_target::abi::{Size, FIRST_VARIANT};
@ -1478,7 +1478,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
});
for (value, _) in targets.iter() {
if Scalar::<()>::try_from_uint(value, size).is_none() {
if ScalarInt::try_from_uint(value, size).is_none() {
self.fail(
location,
format!("the value {value:#x} is not a proper {switch_ty:?}"),

View File

@ -5,22 +5,9 @@ edition = "2021"
[dependencies]
# tidy-alphabetical-start
derivative = "2.2.0"
rustc_ast_ir = { path = "../rustc_ast_ir", default-features = false }
rustc_data_structures = { path = "../rustc_data_structures", optional = true }
rustc_macros = { path = "../rustc_macros", optional = true }
rustc_serialize = { path = "../rustc_serialize", optional = true }
rustc_type_ir = { path = "../rustc_type_ir", default-features = false }
rustc_type_ir_macros = { path = "../rustc_type_ir_macros" }
tracing = "0.1"
# tidy-alphabetical-end
[features]
default = ["nightly"]
nightly = [
"rustc_type_ir/nightly",
"rustc_macros",
"rustc_serialize",
"rustc_data_structures",
"rustc_ast_ir/nightly",
]
nightly = ["rustc_type_ir/nightly"]

View File

@ -6,11 +6,12 @@ use rustc_hir::def_id::DefId;
use rustc_hir::HirId;
use rustc_index::{Idx, IndexVec};
use rustc_middle::middle::stability::EvalResult;
use rustc_middle::mir::interpret::Scalar;
use rustc_middle::mir::{self, Const};
use rustc_middle::thir::{self, FieldPat, Pat, PatKind, PatRange, PatRangeBoundary};
use rustc_middle::ty::layout::IntegerExt;
use rustc_middle::ty::{self, FieldDef, OpaqueTypeKey, Ty, TyCtxt, TypeVisitableExt, VariantDef};
use rustc_middle::ty::{
self, FieldDef, OpaqueTypeKey, ScalarInt, Ty, TyCtxt, TypeVisitableExt, VariantDef,
};
use rustc_middle::{bug, span_bug};
use rustc_session::lint;
use rustc_span::{ErrorGuaranteed, Span, DUMMY_SP};
@ -701,9 +702,9 @@ impl<'p, 'tcx: 'p> RustcPatCtxt<'p, 'tcx> {
ty::Int(_) => miint.as_finite_int(size.bits()).unwrap(),
_ => miint.as_finite_uint().unwrap(),
};
match Scalar::try_from_uint(bits, size) {
match ScalarInt::try_from_uint(bits, size) {
Some(scalar) => {
let value = mir::Const::from_scalar(tcx, scalar, ty.inner());
let value = mir::Const::from_scalar(tcx, scalar.into(), ty.inner());
PatRangeBoundary::Finite(value)
}
// The value doesn't fit. Since `x >= 0` and 0 always encodes the minimum value

View File

@ -7,7 +7,6 @@ edition = "2021"
# tidy-alphabetical-start
field-offset = "0.3.5"
measureme = "11"
rustc-rayon-core = { version = "0.5.0", optional = true }
rustc_data_structures = { path = "../rustc_data_structures" }
rustc_errors = { path = "../rustc_errors" }
rustc_hir = { path = "../rustc_hir" }
@ -23,5 +22,5 @@ tracing = "0.1"
[features]
# tidy-alphabetical-start
rustc_use_parallel_compiler = ["rustc-rayon-core", "rustc_query_system/rustc_use_parallel_compiler"]
rustc_use_parallel_compiler = ["rustc_query_system/rustc_use_parallel_compiler"]
# tidy-alphabetical-end

View File

@ -521,7 +521,7 @@ impl SpanData {
Span::new(self.lo, hi, self.ctxt, self.parent)
}
#[inline]
pub fn with_ctxt(&self, ctxt: SyntaxContext) -> Span {
fn with_ctxt(&self, ctxt: SyntaxContext) -> Span {
Span::new(self.lo, self.hi, ctxt, self.parent)
}
#[inline]
@ -576,8 +576,9 @@ impl Span {
self.data().with_hi(hi)
}
#[inline]
pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span {
self.data_untracked().with_ctxt(ctxt)
pub fn with_ctxt(mut self, ctxt: SyntaxContext) -> Span {
self.update_ctxt(|_| ctxt);
self
}
#[inline]
pub fn parent(self) -> Option<LocalDefId> {
@ -1058,9 +1059,9 @@ impl Span {
}
#[inline]
pub fn apply_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span {
let span = self.data();
span.with_ctxt(span.ctxt.apply_mark(expn_id, transparency))
pub fn apply_mark(mut self, expn_id: ExpnId, transparency: Transparency) -> Span {
self.update_ctxt(|ctxt| ctxt.apply_mark(expn_id, transparency));
self
}
#[inline]
@ -1108,15 +1109,15 @@ impl Span {
}
#[inline]
pub fn normalize_to_macros_2_0(self) -> Span {
let span = self.data();
span.with_ctxt(span.ctxt.normalize_to_macros_2_0())
pub fn normalize_to_macros_2_0(mut self) -> Span {
self.update_ctxt(|ctxt| ctxt.normalize_to_macros_2_0());
self
}
#[inline]
pub fn normalize_to_macro_rules(self) -> Span {
let span = self.data();
span.with_ctxt(span.ctxt.normalize_to_macro_rules())
pub fn normalize_to_macro_rules(mut self) -> Span {
self.update_ctxt(|ctxt| ctxt.normalize_to_macro_rules());
self
}
}

View File

@ -87,6 +87,45 @@ pub struct Span {
ctxt_or_parent_or_marker: u16,
}
impl Span {
#[inline]
fn data_inline_ctxt(self) -> SpanData {
let len = self.len_with_tag_or_marker as u32;
debug_assert!(len <= MAX_LEN);
SpanData {
lo: BytePos(self.lo_or_index),
hi: BytePos(self.lo_or_index.debug_strict_add(len)),
ctxt: SyntaxContext::from_u32(self.ctxt_or_parent_or_marker as u32),
parent: None,
}
}
#[inline]
fn data_inline_parent(self) -> SpanData {
let len = (self.len_with_tag_or_marker & !PARENT_TAG) as u32;
debug_assert!(len <= MAX_LEN);
let parent = LocalDefId {
local_def_index: DefIndex::from_u32(self.ctxt_or_parent_or_marker as u32),
};
SpanData {
lo: BytePos(self.lo_or_index),
hi: BytePos(self.lo_or_index.debug_strict_add(len)),
ctxt: SyntaxContext::root(),
parent: Some(parent),
}
}
#[inline]
fn data_partially_interned(self) -> SpanData {
SpanData {
ctxt: SyntaxContext::from_u32(self.ctxt_or_parent_or_marker as u32),
..with_span_interner(|interner| interner.spans[self.lo_or_index as usize])
}
}
#[inline]
fn data_interned(self) -> SpanData {
with_span_interner(|interner| interner.spans[self.lo_or_index as usize])
}
}
// `MAX_LEN` is chosen so that `PARENT_TAG | MAX_LEN` is distinct from
// `BASE_LEN_INTERNED_MARKER`. (If `MAX_LEN` was 1 higher, this wouldn't be true.)
const MAX_LEN: u32 = 0b0111_1111_1111_1110;
@ -111,42 +150,49 @@ impl Span {
std::mem::swap(&mut lo, &mut hi);
}
let (lo2, len, ctxt2) = (lo.0, hi.0 - lo.0, ctxt.as_u32());
// Small len may enable one of fully inline formats (or may not).
let (len, ctxt32) = (hi.0 - lo.0, ctxt.as_u32());
if len <= MAX_LEN {
if ctxt2 <= MAX_CTXT && parent.is_none() {
if ctxt32 <= MAX_CTXT && parent.is_none() {
// Inline-context format.
return Span {
lo_or_index: lo2,
lo_or_index: lo.0,
len_with_tag_or_marker: len as u16,
ctxt_or_parent_or_marker: ctxt2 as u16,
ctxt_or_parent_or_marker: ctxt32 as u16,
};
} else if ctxt2 == SyntaxContext::root().as_u32()
} else if ctxt32 == 0
&& let Some(parent) = parent
&& let parent2 = parent.local_def_index.as_u32()
&& parent2 <= MAX_CTXT
&& let parent32 = parent.local_def_index.as_u32()
&& parent32 <= MAX_CTXT
{
// Inline-parent format.
return Span {
lo_or_index: lo2,
lo_or_index: lo.0,
len_with_tag_or_marker: PARENT_TAG | len as u16,
ctxt_or_parent_or_marker: parent2 as u16,
ctxt_or_parent_or_marker: parent32 as u16,
};
}
}
// Partially-interned or fully-interned format.
let index =
with_span_interner(|interner| interner.intern(&SpanData { lo, hi, ctxt, parent }));
let ctxt_or_parent_or_marker = if ctxt2 <= MAX_CTXT {
ctxt2 as u16 // partially-interned
} else {
CTXT_INTERNED_MARKER // fully-interned
// Otherwise small ctxt may enable the partially inline format.
let index = |ctxt| {
with_span_interner(|interner| interner.intern(&SpanData { lo, hi, ctxt, parent }))
};
Span {
lo_or_index: index,
len_with_tag_or_marker: BASE_LEN_INTERNED_MARKER,
ctxt_or_parent_or_marker,
if ctxt32 <= MAX_CTXT {
// Partially-interned format.
Span {
// Interned ctxt should never be read, so it can use any value.
lo_or_index: index(SyntaxContext::from_u32(u32::MAX)),
len_with_tag_or_marker: BASE_LEN_INTERNED_MARKER,
ctxt_or_parent_or_marker: ctxt32 as u16,
}
} else {
// Interned format.
Span {
lo_or_index: index(ctxt),
len_with_tag_or_marker: BASE_LEN_INTERNED_MARKER,
ctxt_or_parent_or_marker: CTXT_INTERNED_MARKER,
}
}
}
@ -166,34 +212,17 @@ impl Span {
if self.len_with_tag_or_marker != BASE_LEN_INTERNED_MARKER {
if self.len_with_tag_or_marker & PARENT_TAG == 0 {
// Inline-context format.
let len = self.len_with_tag_or_marker as u32;
debug_assert!(len <= MAX_LEN);
SpanData {
lo: BytePos(self.lo_or_index),
hi: BytePos(self.lo_or_index.debug_strict_add(len)),
ctxt: SyntaxContext::from_u32(self.ctxt_or_parent_or_marker as u32),
parent: None,
}
self.data_inline_ctxt()
} else {
// Inline-parent format.
let len = (self.len_with_tag_or_marker & !PARENT_TAG) as u32;
debug_assert!(len <= MAX_LEN);
let parent = LocalDefId {
local_def_index: DefIndex::from_u32(self.ctxt_or_parent_or_marker as u32),
};
SpanData {
lo: BytePos(self.lo_or_index),
hi: BytePos(self.lo_or_index.debug_strict_add(len)),
ctxt: SyntaxContext::root(),
parent: Some(parent),
}
self.data_inline_parent()
}
} else if self.ctxt_or_parent_or_marker != CTXT_INTERNED_MARKER {
// Partially-interned format.
self.data_partially_interned()
} else {
// Fully-interned or partially-interned format. In either case,
// the interned value contains all the data, so we don't need to
// distinguish them.
let index = self.lo_or_index;
with_span_interner(|interner| interner.spans[index as usize])
// Interned format.
self.data_interned()
}
}
@ -214,27 +243,73 @@ impl Span {
}
}
// For optimization we are interested in cases in which the context is inline and the context
// update doesn't change format. All non-inline or format changing scenarios require accessing
// interner and can fall back to `Span::new`.
#[inline]
pub fn update_ctxt(&mut self, update: impl FnOnce(SyntaxContext) -> SyntaxContext) {
let (updated_ctxt32, data);
if self.len_with_tag_or_marker != BASE_LEN_INTERNED_MARKER {
if self.len_with_tag_or_marker & PARENT_TAG == 0 {
// Inline-context format.
updated_ctxt32 =
update(SyntaxContext::from_u32(self.ctxt_or_parent_or_marker as u32)).as_u32();
// Any small new context including zero will preserve the format.
if updated_ctxt32 <= MAX_CTXT {
self.ctxt_or_parent_or_marker = updated_ctxt32 as u16;
return;
}
data = self.data_inline_ctxt();
} else {
// Inline-parent format.
updated_ctxt32 = update(SyntaxContext::root()).as_u32();
// Only if the new context is zero the format will be preserved.
if updated_ctxt32 == 0 {
// Do nothing.
return;
}
data = self.data_inline_parent();
}
} else if self.ctxt_or_parent_or_marker != CTXT_INTERNED_MARKER {
// Partially-interned format.
updated_ctxt32 =
update(SyntaxContext::from_u32(self.ctxt_or_parent_or_marker as u32)).as_u32();
// Any small new context excluding zero will preserve the format.
// Zero may change the format to `InlineParent` if parent and len are small enough.
if updated_ctxt32 <= MAX_CTXT && updated_ctxt32 != 0 {
self.ctxt_or_parent_or_marker = updated_ctxt32 as u16;
return;
}
data = self.data_partially_interned();
} else {
// Interned format.
data = self.data_interned();
updated_ctxt32 = update(data.ctxt).as_u32();
}
// We could not keep the span in the same inline format, fall back to the complete logic.
*self = data.with_ctxt(SyntaxContext::from_u32(updated_ctxt32));
}
// Returns either syntactic context, if it can be retrieved without taking the interner lock,
// or an index into the interner if it cannot.
#[inline]
fn inline_ctxt(self) -> Result<SyntaxContext, usize> {
Ok(if self.len_with_tag_or_marker != BASE_LEN_INTERNED_MARKER {
if self.len_with_tag_or_marker != BASE_LEN_INTERNED_MARKER {
if self.len_with_tag_or_marker & PARENT_TAG == 0 {
// Inline-context format.
SyntaxContext::from_u32(self.ctxt_or_parent_or_marker as u32)
Ok(SyntaxContext::from_u32(self.ctxt_or_parent_or_marker as u32))
} else {
// Inline-parent format. We know that the SyntaxContext is root.
SyntaxContext::root()
// Inline-parent format.
Ok(SyntaxContext::root())
}
} else if self.ctxt_or_parent_or_marker != CTXT_INTERNED_MARKER {
// Partially-interned format. This path avoids looking up the
// interned value, and is the whole point of the
// partially-interned format.
SyntaxContext::from_u32(self.ctxt_or_parent_or_marker as u32)
// Partially-interned format.
Ok(SyntaxContext::from_u32(self.ctxt_or_parent_or_marker as u32))
} else {
// Fully-interned format.
return Err(self.lo_or_index as usize);
})
// Interned format.
Err(self.lo_or_index as usize)
}
}
/// This function is used as a fast path when decoding the full `SpanData` is not necessary.

View File

@ -1304,6 +1304,7 @@ symbols! {
offset_of,
offset_of_enum,
offset_of_nested,
offset_of_slice,
ok_or_else,
omit_gdb_pretty_printer_section,
on,

View File

@ -256,29 +256,6 @@ impl<'a, Ty> TyAndLayout<'a, Ty> {
Ty::is_transparent(self)
}
pub fn offset_of_subfield<C, I>(self, cx: &C, indices: I) -> Size
where
Ty: TyAbiInterface<'a, C>,
I: Iterator<Item = (VariantIdx, FieldIdx)>,
{
let mut layout = self;
let mut offset = Size::ZERO;
for (variant, field) in indices {
layout = layout.for_variant(cx, variant);
let index = field.index();
offset += layout.fields.offset(index);
layout = layout.field(cx, index);
assert!(
layout.is_sized(),
"offset of unsized field (type {:?}) cannot be computed statically",
layout.ty
);
}
offset
}
/// Finds the one field that is not a 1-ZST.
/// Returns `None` if there are multiple non-1-ZST fields or only 1-ZST-fields.
pub fn non_1zst_field<C>(&self, cx: &C) -> Option<(usize, Self)>

View File

@ -420,7 +420,7 @@ pub(crate) mod rustc {
fn from_tag(tag: ScalarInt, tcx: TyCtxt<'tcx>) -> Self {
use rustc_target::abi::Endian;
let size = tag.size();
let bits = tag.assert_bits(size);
let bits = tag.to_bits(size);
let bytes: [u8; 16];
let bytes = match tcx.data_layout.endian {
Endian::Little => {

View File

@ -47,7 +47,7 @@ fn destructure_const<'tcx>(
ty::Adt(def, args) => {
let (variant_idx, branches) = if def.is_enum() {
let (head, rest) = branches.split_first().unwrap();
(VariantIdx::from_u32(head.unwrap_leaf().try_to_u32().unwrap()), rest)
(VariantIdx::from_u32(head.unwrap_leaf().to_u32()), rest)
} else {
(FIRST_VARIANT, branches)
};

View File

@ -4,5 +4,4 @@ version = "0.1.0-preview"
edition = "2021"
[dependencies]
tracing = "0.1"
scoped-tls = "1.0"

View File

@ -68,6 +68,13 @@ pub mod consts {
pub const FRAC_1_SQRT_PI: f128 =
0.564189583547756286948079451560772585844050629328998856844086_f128;
/// 1/sqrt(2π)
#[doc(alias = "FRAC_1_SQRT_TAU")]
#[unstable(feature = "f128", issue = "116909")]
// Also, #[unstable(feature = "more_float_constants", issue = "103883")]
pub const FRAC_1_SQRT_2PI: f128 =
0.398942280401432677939946059934381868475858631164934657665926_f128;
/// 2/π
#[unstable(feature = "f128", issue = "116909")]
pub const FRAC_2_PI: f128 = 0.636619772367581343075535053490057448137838582961825794990669_f128;

View File

@ -67,6 +67,12 @@ pub mod consts {
// Also, #[unstable(feature = "more_float_constants", issue = "103883")]
pub const FRAC_1_SQRT_PI: f16 = 0.564189583547756286948079451560772586_f16;
/// 1/sqrt(2π)
#[doc(alias = "FRAC_1_SQRT_TAU")]
#[unstable(feature = "f16", issue = "116909")]
// Also, #[unstable(feature = "more_float_constants", issue = "103883")]
pub const FRAC_1_SQRT_2PI: f16 = 0.398942280401432677939946059934381868_f16;
/// 2/π
#[unstable(feature = "f16", issue = "116909")]
pub const FRAC_2_PI: f16 = 0.636619772367581343075535053490057448_f16;

View File

@ -327,6 +327,11 @@ pub mod consts {
#[unstable(feature = "more_float_constants", issue = "103883")]
pub const FRAC_1_SQRT_PI: f32 = 0.564189583547756286948079451560772586_f32;
/// 1/sqrt(2π)
#[doc(alias = "FRAC_1_SQRT_TAU")]
#[unstable(feature = "more_float_constants", issue = "103883")]
pub const FRAC_1_SQRT_2PI: f32 = 0.398942280401432677939946059934381868_f32;
/// 2/π
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_2_PI: f32 = 0.636619772367581343075535053490057448_f32;

View File

@ -327,6 +327,11 @@ pub mod consts {
#[unstable(feature = "more_float_constants", issue = "103883")]
pub const FRAC_1_SQRT_PI: f64 = 0.564189583547756286948079451560772586_f64;
/// 1/sqrt(2π)
#[doc(alias = "FRAC_1_SQRT_TAU")]
#[unstable(feature = "more_float_constants", issue = "103883")]
pub const FRAC_1_SQRT_2PI: f64 = 0.398942280401432677939946059934381868_f64;
/// 2/π
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_2_PI: f64 = 0.636619772367581343075535053490057448_f64;

View File

@ -517,9 +517,13 @@ macro_rules! nonzero_integer {
/// ```
/// # use std::num::NonZero;
/// #
#[doc = concat!("let n = NonZero::<", stringify!($Int), ">::new(", $leading_zeros_test, ").unwrap();")]
/// # fn main() { test().unwrap(); }
/// # fn test() -> Option<()> {
#[doc = concat!("let n = NonZero::<", stringify!($Int), ">::new(", $leading_zeros_test, ")?;")]
///
/// assert_eq!(n.leading_zeros(), 0);
/// # Some(())
/// # }
/// ```
#[stable(feature = "nonzero_leading_trailing_zeros", since = "1.53.0")]
#[rustc_const_stable(feature = "nonzero_leading_trailing_zeros", since = "1.53.0")]
@ -545,9 +549,13 @@ macro_rules! nonzero_integer {
/// ```
/// # use std::num::NonZero;
/// #
#[doc = concat!("let n = NonZero::<", stringify!($Int), ">::new(0b0101000).unwrap();")]
/// # fn main() { test().unwrap(); }
/// # fn test() -> Option<()> {
#[doc = concat!("let n = NonZero::<", stringify!($Int), ">::new(0b0101000)?;")]
///
/// assert_eq!(n.trailing_zeros(), 3);
/// # Some(())
/// # }
/// ```
#[stable(feature = "nonzero_leading_trailing_zeros", since = "1.53.0")]
#[rustc_const_stable(feature = "nonzero_leading_trailing_zeros", since = "1.53.0")]
@ -1101,9 +1109,13 @@ macro_rules! nonzero_integer_signedness_dependent_methods {
/// ```
/// # use std::num::NonZero;
/// #
#[doc = concat!("assert_eq!(NonZero::new(7", stringify!($Int), ").unwrap().ilog2(), 2);")]
#[doc = concat!("assert_eq!(NonZero::new(8", stringify!($Int), ").unwrap().ilog2(), 3);")]
#[doc = concat!("assert_eq!(NonZero::new(9", stringify!($Int), ").unwrap().ilog2(), 3);")]
/// # fn main() { test().unwrap(); }
/// # fn test() -> Option<()> {
#[doc = concat!("assert_eq!(NonZero::new(7", stringify!($Int), ")?.ilog2(), 2);")]
#[doc = concat!("assert_eq!(NonZero::new(8", stringify!($Int), ")?.ilog2(), 3);")]
#[doc = concat!("assert_eq!(NonZero::new(9", stringify!($Int), ")?.ilog2(), 3);")]
/// # Some(())
/// # }
/// ```
#[stable(feature = "int_log", since = "1.67.0")]
#[rustc_const_stable(feature = "int_log", since = "1.67.0")]
@ -1126,9 +1138,13 @@ macro_rules! nonzero_integer_signedness_dependent_methods {
/// ```
/// # use std::num::NonZero;
/// #
#[doc = concat!("assert_eq!(NonZero::new(99", stringify!($Int), ").unwrap().ilog10(), 1);")]
#[doc = concat!("assert_eq!(NonZero::new(100", stringify!($Int), ").unwrap().ilog10(), 2);")]
#[doc = concat!("assert_eq!(NonZero::new(101", stringify!($Int), ").unwrap().ilog10(), 2);")]
/// # fn main() { test().unwrap(); }
/// # fn test() -> Option<()> {
#[doc = concat!("assert_eq!(NonZero::new(99", stringify!($Int), ")?.ilog10(), 1);")]
#[doc = concat!("assert_eq!(NonZero::new(100", stringify!($Int), ")?.ilog10(), 2);")]
#[doc = concat!("assert_eq!(NonZero::new(101", stringify!($Int), ")?.ilog10(), 2);")]
/// # Some(())
/// # }
/// ```
#[stable(feature = "int_log", since = "1.67.0")]
#[rustc_const_stable(feature = "int_log", since = "1.67.0")]
@ -1187,10 +1203,16 @@ macro_rules! nonzero_integer_signedness_dependent_methods {
/// Basic usage:
///
/// ```
#[doc = concat!("let eight = std::num::NonZero::new(8", stringify!($Int), ").unwrap();")]
/// # use std::num::NonZero;
/// #
/// # fn main() { test().unwrap(); }
/// # fn test() -> Option<()> {
#[doc = concat!("let eight = NonZero::new(8", stringify!($Int), ")?;")]
/// assert!(eight.is_power_of_two());
#[doc = concat!("let ten = std::num::NonZero::new(10", stringify!($Int), ").unwrap();")]
#[doc = concat!("let ten = NonZero::new(10", stringify!($Int), ")?;")]
/// assert!(!ten.is_power_of_two());
/// # Some(())
/// # }
/// ```
#[must_use]
#[stable(feature = "nonzero_is_power_of_two", since = "1.59.0")]

View File

@ -462,21 +462,21 @@ pub fn current_exe() -> io::Result<PathBuf> {
#[cfg(target_os = "haiku")]
pub fn current_exe() -> io::Result<PathBuf> {
let mut name = vec![0; libc::PATH_MAX as usize];
unsafe {
let mut info: mem::MaybeUninit<libc::image_info> = mem::MaybeUninit::uninit();
let mut cookie: i32 = 0;
// the executable can be found at team id 0
let result = libc::_get_next_image_info(
0,
&mut cookie,
info.as_mut_ptr(),
mem::size_of::<libc::image_info>(),
let result = libc::find_path(
crate::ptr::null_mut(),
libc::path_base_directory::B_FIND_PATH_IMAGE_PATH,
crate::ptr::null_mut(),
name.as_mut_ptr(),
name.len(),
);
if result != 0 {
if result != libc::B_OK {
use crate::io::ErrorKind;
Err(io::const_io_error!(ErrorKind::Uncategorized, "Error getting executable path"))
} else {
let name = CStr::from_ptr((*info.as_ptr()).name.as_ptr()).to_bytes();
// find_path adds the null terminator.
let name = CStr::from_ptr(name.as_ptr()).to_bytes();
Ok(PathBuf::from(OsStr::from_bytes(name)))
}
}

View File

@ -1053,6 +1053,10 @@ fn signal_string(signal: i32) -> &'static str {
libc::SIGINFO => " (SIGINFO)",
#[cfg(target_os = "hurd")]
libc::SIGLOST => " (SIGLOST)",
#[cfg(target_os = "freebsd")]
libc::SIGTHR => " (SIGTHR)",
#[cfg(target_os = "freebsd")]
libc::SIGLIBRT => " (SIGLIBRT)",
_ => "",
}
}

View File

@ -251,3 +251,39 @@ pub fn get_last_error() -> WinError {
pub struct WinError {
pub code: u32,
}
impl WinError {
const fn new(code: u32) -> Self {
Self { code }
}
}
// Error code constants.
// The constant names should be the same as the winapi constants except for the leading `ERROR_`.
// Due to the sheer number of codes, error codes should only be added here on an as-needed basis.
// However, they should never be removed as the assumption is they may be useful again in the future.
#[allow(unused)]
impl WinError {
/// Success is not an error.
/// Some Windows APIs do use this to distinguish between a zero return and an error return
/// but we should never return this to users as an error.
pub const SUCCESS: Self = Self::new(c::ERROR_SUCCESS);
// tidy-alphabetical-start
pub const ACCESS_DENIED: Self = Self::new(c::ERROR_ACCESS_DENIED);
pub const ALREADY_EXISTS: Self = Self::new(c::ERROR_ALREADY_EXISTS);
pub const CANT_ACCESS_FILE: Self = Self::new(c::ERROR_CANT_ACCESS_FILE);
pub const DELETE_PENDING: Self = Self::new(c::ERROR_DELETE_PENDING);
pub const DIRECTORY: Self = Self::new(c::ERROR_DIRECTORY);
pub const FILE_NOT_FOUND: Self = Self::new(c::ERROR_FILE_NOT_FOUND);
pub const INSUFFICIENT_BUFFER: Self = Self::new(c::ERROR_INSUFFICIENT_BUFFER);
pub const INVALID_FUNCTION: Self = Self::new(c::ERROR_INVALID_FUNCTION);
pub const INVALID_HANDLE: Self = Self::new(c::ERROR_INVALID_HANDLE);
pub const INVALID_PARAMETER: Self = Self::new(c::ERROR_INVALID_PARAMETER);
pub const NO_MORE_FILES: Self = Self::new(c::ERROR_NO_MORE_FILES);
pub const NOT_FOUND: Self = Self::new(c::ERROR_NOT_FOUND);
pub const NOT_SUPPORTED: Self = Self::new(c::ERROR_NOT_SUPPORTED);
pub const OPERATION_ABORTED: Self = Self::new(c::ERROR_OPERATION_ABORTED);
pub const PATH_NOT_FOUND: Self = Self::new(c::ERROR_PATH_NOT_FOUND);
pub const SHARING_VIOLATION: Self = Self::new(c::ERROR_SHARING_VIOLATION);
pub const TIMEOUT: Self = Self::new(c::ERROR_TIMEOUT);
// tidy-alphabetical-end
}

View File

@ -18,7 +18,8 @@ use crate::sys::{c, cvt, Align8};
use crate::sys_common::{AsInner, FromInner, IntoInner};
use crate::thread;
use super::{api, to_u16s, IoResult};
use super::api::{self, WinError};
use super::{to_u16s, IoResult};
use crate::sys::path::maybe_verbatim;
pub struct File {
@ -130,10 +131,11 @@ impl Iterator for ReadDir {
let mut wfd = mem::zeroed();
loop {
if c::FindNextFileW(self.handle.0, &mut wfd) == 0 {
if api::get_last_error().code == c::ERROR_NO_MORE_FILES {
return None;
} else {
return Some(Err(Error::last_os_error()));
match api::get_last_error() {
WinError::NO_MORE_FILES => return None,
WinError { code } => {
return Some(Err(Error::from_raw_os_error(code as i32)));
}
}
}
if let Some(e) = DirEntry::new(&self.root, &wfd) {
@ -244,8 +246,6 @@ impl OpenOptions {
}
fn get_access_mode(&self) -> io::Result<c::DWORD> {
const ERROR_INVALID_PARAMETER: i32 = 87;
match (self.read, self.write, self.append, self.access_mode) {
(.., Some(mode)) => Ok(mode),
(true, false, false, None) => Ok(c::GENERIC_READ),
@ -255,23 +255,23 @@ impl OpenOptions {
(true, _, true, None) => {
Ok(c::GENERIC_READ | (c::FILE_GENERIC_WRITE & !c::FILE_WRITE_DATA))
}
(false, false, false, None) => Err(Error::from_raw_os_error(ERROR_INVALID_PARAMETER)),
(false, false, false, None) => {
Err(Error::from_raw_os_error(c::ERROR_INVALID_PARAMETER as i32))
}
}
}
fn get_creation_mode(&self) -> io::Result<c::DWORD> {
const ERROR_INVALID_PARAMETER: i32 = 87;
match (self.write, self.append) {
(true, false) => {}
(false, false) => {
if self.truncate || self.create || self.create_new {
return Err(Error::from_raw_os_error(ERROR_INVALID_PARAMETER));
return Err(Error::from_raw_os_error(c::ERROR_INVALID_PARAMETER as i32));
}
}
(_, true) => {
if self.truncate && !self.create_new {
return Err(Error::from_raw_os_error(ERROR_INVALID_PARAMETER));
return Err(Error::from_raw_os_error(c::ERROR_INVALID_PARAMETER as i32));
}
}
}
@ -315,7 +315,7 @@ impl File {
// Manual truncation. See #115745.
if opts.truncate
&& creation == c::OPEN_ALWAYS
&& unsafe { c::GetLastError() } == c::ERROR_ALREADY_EXISTS
&& api::get_last_error() == WinError::ALREADY_EXISTS
{
unsafe {
// This originally used `FileAllocationInfo` instead of
@ -845,7 +845,7 @@ fn open_link_no_reparse(parent: &File, name: &[u16], access: u32) -> io::Result<
// We make a special exception for `STATUS_DELETE_PENDING` because
// otherwise this will be mapped to `ERROR_ACCESS_DENIED` which is
// very unhelpful.
Err(io::Error::from_raw_os_error(c::ERROR_DELETE_PENDING as _))
Err(io::Error::from_raw_os_error(c::ERROR_DELETE_PENDING as i32))
} else if status == c::STATUS_INVALID_PARAMETER
&& ATTRIBUTES.load(Ordering::Relaxed) == c::OBJ_DONT_REPARSE
{
@ -1097,7 +1097,7 @@ pub fn readdir(p: &Path) -> io::Result<ReadDir> {
//
// See issue #120040: https://github.com/rust-lang/rust/issues/120040.
let last_error = api::get_last_error();
if last_error.code == c::ERROR_FILE_NOT_FOUND {
if last_error == WinError::FILE_NOT_FOUND {
return Ok(ReadDir {
handle: FindNextFileHandle(find_handle),
root: Arc::new(root),

View File

@ -1,4 +1,4 @@
use super::api;
use super::api::{self, WinError};
use crate::sys::c;
use crate::sys::dur2timeout;
use core::ffi::c_void;
@ -72,7 +72,7 @@ pub fn wake_by_address_all<T>(address: &T) {
pub fn futex_wait<W: Waitable>(futex: &W::Atomic, expected: W, timeout: Option<Duration>) -> bool {
// return false only on timeout
wait_on_address(futex, expected, timeout) || api::get_last_error().code != c::ERROR_TIMEOUT
wait_on_address(futex, expected, timeout) || api::get_last_error() != WinError::TIMEOUT
}
pub fn futex_wake<T>(futex: &T) -> bool {

View File

@ -17,7 +17,8 @@ use crate::ptr;
use crate::slice;
use crate::sys::{c, cvt};
use super::{api, to_u16s};
use super::api::{self, WinError};
use super::to_u16s;
pub fn errno() -> i32 {
api::get_last_error().code as i32
@ -333,7 +334,7 @@ fn home_dir_crt() -> Option<PathBuf> {
buf,
&mut sz,
) {
0 if api::get_last_error().code != c::ERROR_INSUFFICIENT_BUFFER => 0,
0 if api::get_last_error() != WinError::INSUFFICIENT_BUFFER => 0,
0 => sz,
_ => sz - 1, // sz includes the null terminator
}
@ -358,7 +359,7 @@ fn home_dir_crt() -> Option<PathBuf> {
super::fill_utf16_buf(
|buf, mut sz| {
match c::GetUserProfileDirectoryW(token, buf, &mut sz) {
0 if api::get_last_error().code != c::ERROR_INSUFFICIENT_BUFFER => 0,
0 if api::get_last_error() != WinError::INSUFFICIENT_BUFFER => 0,
0 => sz,
_ => sz - 1, // sz includes the null terminator
}

View File

@ -12,6 +12,7 @@ use crate::sys::c;
use crate::sys::fs::{File, OpenOptions};
use crate::sys::handle::Handle;
use crate::sys::hashmap_random_keys;
use crate::sys::pal::windows::api::{self, WinError};
use crate::sys_common::{FromInner, IntoInner};
////////////////////////////////////////////////////////////////////////////////
@ -124,20 +125,19 @@ pub fn anon_pipe(ours_readable: bool, their_handle_inheritable: bool) -> io::Res
// testing strategy
// For more info, see https://github.com/rust-lang/rust/pull/37677.
if handle == c::INVALID_HANDLE_VALUE {
let err = io::Error::last_os_error();
let raw_os_err = err.raw_os_error();
let error = api::get_last_error();
if tries < 10 {
if raw_os_err == Some(c::ERROR_ACCESS_DENIED as i32) {
if error == WinError::ACCESS_DENIED {
continue;
} else if reject_remote_clients_flag != 0
&& raw_os_err == Some(c::ERROR_INVALID_PARAMETER as i32)
&& error == WinError::INVALID_PARAMETER
{
reject_remote_clients_flag = 0;
tries -= 1;
continue;
}
}
return Err(err);
return Err(io::Error::from_raw_os_error(error.code as i32));
}
ours = Handle::from_raw_handle(handle);
break;

View File

@ -31,6 +31,8 @@ use crate::sys_common::IntoInner;
use core::ffi::c_void;
use super::api::{self, WinError};
////////////////////////////////////////////////////////////////////////////////
// Command
////////////////////////////////////////////////////////////////////////////////
@ -645,12 +647,12 @@ impl Process {
pub fn kill(&mut self) -> io::Result<()> {
let result = unsafe { c::TerminateProcess(self.handle.as_raw_handle(), 1) };
if result == c::FALSE {
let error = unsafe { c::GetLastError() };
let error = api::get_last_error();
// TerminateProcess returns ERROR_ACCESS_DENIED if the process has already been
// terminated (by us, or for any other reason). So check if the process was actually
// terminated, and if so, do not return an error.
if error != c::ERROR_ACCESS_DENIED || self.try_wait().is_err() {
return Err(crate::io::Error::from_raw_os_error(error as i32));
if error != WinError::ACCESS_DENIED || self.try_wait().is_err() {
return Err(crate::io::Error::from_raw_os_error(error.code as i32));
}
}
Ok(())

View File

@ -1,6 +1,6 @@
#![unstable(issue = "none", feature = "windows_stdio")]
use super::api;
use super::api::{self, WinError};
use crate::cmp;
use crate::io;
use crate::mem::MaybeUninit;
@ -370,7 +370,7 @@ fn read_u16s(handle: c::HANDLE, buf: &mut [MaybeUninit<u16>]) -> io::Result<usiz
// ReadConsoleW returns success with ERROR_OPERATION_ABORTED for Ctrl-C or Ctrl-Break.
// Explicitly check for that case here and try again.
if amount == 0 && api::get_last_error().code == c::ERROR_OPERATION_ABORTED {
if amount == 0 && api::get_last_error() == WinError::OPERATION_ABORTED {
continue;
}
break;

View File

@ -7,8 +7,6 @@ edition = "2021"
getopts = { version = "0.2.21", features = ['rustc-dep-of-std'] }
std = { path = "../std" }
core = { path = "../core" }
panic_unwind = { path = "../panic_unwind" }
panic_abort = { path = "../panic_abort" }
[target.'cfg(not(all(windows, target_env = "msvc")))'.dependencies]
libc = { version = "0.2.150", default-features = false }

View File

@ -1041,6 +1041,21 @@ impl Step for PlainSourceTarball {
.env("RUSTC_BOOTSTRAP", "1")
.current_dir(plain_dst_src);
// Vendor packages that are required by opt-dist to collect PGO profiles.
let pkgs_for_pgo_training = build_helper::LLVM_PGO_CRATES
.iter()
.chain(build_helper::RUSTC_PGO_CRATES)
.map(|pkg| {
let mut manifest_path =
builder.src.join("./src/tools/rustc-perf/collector/compile-benchmarks");
manifest_path.push(pkg);
manifest_path.push("Cargo.toml");
manifest_path
});
for manifest_path in pkgs_for_pgo_training {
cmd.arg("--sync").arg(manifest_path);
}
let config = if !builder.config.dry_run() {
t!(String::from_utf8(t!(cmd.output()).stdout))
} else {

View File

@ -1101,6 +1101,8 @@ impl Step for Tidy {
/// Once tidy passes, this step also runs `fmt --check` if tests are being run
/// for the `dev` or `nightly` channels.
fn run(self, builder: &Builder<'_>) {
builder.build.update_submodule(Path::new("src/tools/rustc-perf"));
let mut cmd = builder.tool_cmd(Tool::Tidy);
cmd.arg(&builder.src);
cmd.arg(&builder.initial_cargo);
@ -1487,12 +1489,6 @@ impl Step for RunMake {
}
}
host_test!(RunMakeFullDeps {
path: "tests/run-make-fulldeps",
mode: "run-make",
suite: "run-make-fulldeps"
});
default_test!(Assembly { path: "tests/assembly", mode: "assembly", suite: "assembly" });
/// Coverage tests are a bit more complicated than other test suites, because
@ -1973,9 +1969,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cmd);
}
if !builder.config.dry_run()
&& (matches!(suite, "run-make" | "run-make-fulldeps") || mode == "coverage-run")
{
if !builder.config.dry_run() && matches!(mode, "run-make" | "coverage-run") {
// The llvm/bin directory contains many useful cross-platform
// tools. Pass the path to run-make tests so they can use them.
// (The coverage-run tests also need these tools to process
@ -1987,7 +1981,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
cmd.arg("--llvm-bin-dir").arg(llvm_bin_path);
}
if !builder.config.dry_run() && matches!(suite, "run-make" | "run-make-fulldeps") {
if !builder.config.dry_run() && mode == "run-make" {
// If LLD is available, add it to the PATH
if builder.config.lld_enabled {
let lld_install_root =
@ -2007,7 +2001,7 @@ NOTE: if you're sure you want to do this, please open an issue as to why. In the
// Only pass correct values for these flags for the `run-make` suite as it
// requires that a C++ compiler was configured which isn't always the case.
if !builder.config.dry_run() && matches!(suite, "run-make" | "run-make-fulldeps") {
if !builder.config.dry_run() && mode == "run-make" {
cmd.arg("--cc")
.arg(builder.cc(target))
.arg("--cxx")

View File

@ -332,7 +332,6 @@ const PATH_REMAP: &[(&str, &[&str])] = &[
"tests/mir-opt",
"tests/pretty",
"tests/run-make",
"tests/run-make-fulldeps",
"tests/run-pass-valgrind",
"tests/rustdoc",
"tests/rustdoc-gui",
@ -828,7 +827,6 @@ impl<'a> Builder<'a> {
test::RustAnalyzer,
test::ErrorIndex,
test::Distcheck,
test::RunMakeFullDeps,
test::Nomicon,
test::Reference,
test::RustdocBook,
@ -1038,14 +1036,26 @@ impl<'a> Builder<'a> {
}
pub fn doc_rust_lang_org_channel(&self) -> String {
let channel = match &*self.config.channel {
"stable" => &self.version,
"beta" => "beta",
"nightly" | "dev" => "nightly",
// custom build of rustdoc maybe? link to the latest stable docs just in case
_ => "stable",
// When using precompiled compiler from CI, we need to use CI rustc's channel and
// ignore `rust.channel` from the configuration. Otherwise most of the rustdoc tests
// will fail due to incompatible `DOC_RUST_LANG_ORG_CHANNEL`.
let channel = if let Some(commit) = self.config.download_rustc_commit() {
self.config
.read_file_by_commit(&PathBuf::from("src/ci/channel"), commit)
.trim()
.to_owned()
} else {
match &*self.config.channel {
"stable" => &self.version,
"beta" => "beta",
"nightly" | "dev" => "nightly",
// custom build of rustdoc maybe? link to the latest stable docs just in case
_ => "stable",
}
.to_owned()
};
"https://doc.rust-lang.org/".to_owned() + channel
format!("https://doc.rust-lang.org/{channel}")
}
fn run_step_descriptions(&self, v: &[StepDescription], paths: &[PathBuf]) {

View File

@ -1608,19 +1608,8 @@ impl Config {
set(&mut config.channel, channel);
config.download_rustc_commit = config.download_ci_rustc_commit(download_rustc);
// This list is incomplete, please help by expanding it!
if config.download_rustc_commit.is_some() {
// We need the channel used by the downloaded compiler to match the one we set for rustdoc;
// otherwise rustdoc-ui tests break.
if config.channel != ci_channel
&& !(config.channel == "dev" && ci_channel == "nightly")
{
panic!(
"setting rust.channel={} is incompatible with download-rustc",
config.channel
);
}
}
// FIXME: handle download-rustc incompatible options.
debug = debug_toml;
debug_assertions = debug_assertions_toml;
@ -2134,17 +2123,29 @@ impl Config {
args
}
/// Returns the content of the given file at a specific commit.
pub(crate) fn read_file_by_commit(&self, file: &Path, commit: &str) -> String {
assert!(
self.rust_info.is_managed_git_subrepository(),
"`Config::read_file_by_commit` is not supported in non-git sources."
);
let mut git = self.git();
git.arg("show").arg(format!("{commit}:{}", file.to_str().unwrap()));
output(&mut git)
}
/// Bootstrap embeds a version number into the name of shared libraries it uploads in CI.
/// Return the version it would have used for the given commit.
pub(crate) fn artifact_version_part(&self, commit: &str) -> String {
let (channel, version) = if self.rust_info.is_managed_git_subrepository() {
let mut channel = self.git();
channel.arg("show").arg(format!("{commit}:src/ci/channel"));
let channel = output(&mut channel);
let mut version = self.git();
version.arg("show").arg(format!("{commit}:src/version"));
let version = output(&mut version);
(channel.trim().to_owned(), version.trim().to_owned())
let channel = self
.read_file_by_commit(&PathBuf::from("src/ci/channel"), commit)
.trim()
.to_owned();
let version =
self.read_file_by_commit(&PathBuf::from("src/version"), commit).trim().to_owned();
(channel, version)
} else {
let channel = fs::read_to_string(self.src.join("src/ci/channel"));
let version = fs::read_to_string(self.src.join("src/version"));

View File

@ -128,7 +128,6 @@ pub fn check(build: &mut Build) {
eprintln!(
"Consider upgrading libstdc++ or disabling the `llvm.download-ci-llvm` option."
);
crate::exit!(1);
}
}
tool::LibcxxVersion::Llvm(_) => {

View File

@ -233,7 +233,7 @@ For targets: `aarch64-unknown-linux-gnu`
- Operating System > Linux kernel version = 4.1.49
- Binary utilities > Version of binutils = 2.29.1
- C-library > glibc version = 2.17 -- aarch64 support was introduced in this version
- C compiler > gcc version = 8.5.0
- C compiler > gcc version = 13.2.0
- C compiler > C++ = ENABLE -- to cross compile LLVM
### `i586-linux-gnu.defconfig`

View File

@ -6,7 +6,5 @@ CT_ARCH_ARM=y
CT_ARCH_64=y
CT_KERNEL_LINUX=y
CT_LINUX_V_4_1=y
CT_BINUTILS_V_2_29=y
CT_GLIBC_V_2_17=y
CT_GCC_V_8=y
CT_CC_LANG_CXX=y

View File

@ -0,0 +1,40 @@
FROM ubuntu:22.04
ARG DEBIAN_FRONTEND=noninteractive
# libclang1 is required for libclang.so, required by bindgen
# clang, llvm and lld is required by RfL to compile host code
RUN apt-get update && apt-get install -y --no-install-recommends \
g++ \
make \
ninja-build \
file \
curl \
ca-certificates \
python3 \
git \
cmake \
flex \
bison \
bc \
clang-15 \
libclang1-15 \
llvm-15 \
lld-15 \
libelf-dev \
libedit-dev \
libssl-dev \
pkg-config \
zlib1g-dev \
&& rm -rf /var/lib/apt/lists/*
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
# RfL needs access to cland, lld and llvm tools
ENV PATH="${PATH}:/usr/lib/llvm-15/bin"
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu
COPY /scripts/rfl-build.sh /tmp/rfl-build.sh
ENV SCRIPT bash /tmp/rfl-build.sh

View File

@ -4,9 +4,9 @@ version = 3
[[package]]
name = "r-efi"
version = "4.4.0"
version = "4.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c47196f636c4cc0634b73b0405323d177753c2e15e866952c64ea22902567a34"
checksum = "e9e935efc5854715dfc0a4c9ef18dc69dee0ec3bf9cc3ab740db831c0fdd86a3"
[[package]]
name = "uefi_qemu_test"

View File

@ -44,6 +44,14 @@ ENV RUST_CONFIGURE_ARGS \
--set target.x86_64-unknown-linux-gnu.cc=clang \
--set target.x86_64-unknown-linux-gnu.cxx=clang++
# This job appears to be checking two separate things:
# - That we can build the compiler with `--enable-debug`
# (without necessarily testing the result).
# - That the tests with `//@ needs-matching-clang` pass, since they
# don't run by default unless RUSTBUILD_FORCE_CLANG_BASED_TESTS is set.
# - FIXME(https://github.com/rust-lang/rust/pull/126155#issuecomment-2156314273):
# Currently we only run the subset of tests with "clang" in their name.
ENV SCRIPT \
python3 ../x.py --stage 2 build && \
python3 ../x.py --stage 2 test tests/run-make-fulldeps --test-args clang
python3 ../x.py --stage 2 test tests/run-make --test-args clang

View File

@ -0,0 +1,68 @@
#!/bin/bash
set -euo pipefail
LINUX_VERSION=c13320499ba0efd93174ef6462ae8a7a2933f6e7
# Build rustc, rustdoc and cargo
../x.py build --stage 1 library rustdoc
../x.py build --stage 0 cargo
# Install rustup so that we can use the built toolchain easily, and also
# install bindgen in an easy way.
curl --proto '=https' --tlsv1.2 -sSf -o rustup.sh https://sh.rustup.rs
sh rustup.sh -y --default-toolchain none
source /cargo/env
BUILD_DIR=$(realpath ./build)
rustup toolchain link local "${BUILD_DIR}"/x86_64-unknown-linux-gnu/stage1
rustup default local
mkdir -p rfl
cd rfl
# Remove existing directory to make local builds easier
rm -rf linux || true
# Download Linux at a specific commit
mkdir -p linux
git -C linux init
git -C linux remote add origin https://github.com/torvalds/linux.git
git -C linux fetch --depth 1 origin ${LINUX_VERSION}
git -C linux checkout FETCH_HEAD
# Install bindgen
"${BUILD_DIR}"/x86_64-unknown-linux-gnu/stage0/bin/cargo install \
--version $(linux/scripts/min-tool-version.sh bindgen) \
bindgen-cli
# Configure Rust for Linux
cat <<EOF > linux/kernel/configs/rfl-for-rust-ci.config
# CONFIG_WERROR is not set
CONFIG_RUST=y
CONFIG_SAMPLES=y
CONFIG_SAMPLES_RUST=y
CONFIG_SAMPLE_RUST_MINIMAL=m
CONFIG_SAMPLE_RUST_PRINT=y
CONFIG_RUST_PHYLIB_ABSTRACTIONS=y
CONFIG_AX88796B_PHY=y
CONFIG_AX88796B_RUST_PHY=y
CONFIG_KUNIT=y
CONFIG_RUST_KERNEL_DOCTESTS=y
EOF
make -C linux LLVM=1 -j$(($(nproc) + 1)) \
rustavailable \
defconfig \
rfl-for-rust-ci.config
make -C linux LLVM=1 -j$(($(nproc) + 1)) \
samples/rust/rust_minimal.o \
samples/rust/rust_print.o \
drivers/net/phy/ax88796b_rust.o

View File

@ -4,13 +4,7 @@ set -ex
# Only run the stage 1 tests on merges, not on PR CI jobs.
if [[ -z "${PR_CI_JOB}" ]]; then
# When running gcc backend tests, we need to install `libgccjit` and to not run llvm codegen
# tests as it will fail them.
if [[ "${ENABLE_GCC_CODEGEN}" == "1" ]]; then
../x.py --stage 1 test --skip src/tools/tidy --skip tests/codegen
else
../x.py --stage 1 test --skip src/tools/tidy
fi
../x.py --stage 1 test --skip src/tools/tidy
# Run the `mir-opt` tests again but this time for a 32-bit target.
# This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have
@ -29,14 +23,8 @@ if [[ -z "${PR_CI_JOB}" ]]; then
--rustc-args "--cfg feature=\"optimize_for_size\""
fi
# When running gcc backend tests, we need to install `libgccjit` and to not run llvm codegen
# tests as it will fail them.
# NOTE: intentionally uses all of `x.py`, `x`, and `x.ps1` to make sure they all work on Linux.
if [[ "${ENABLE_GCC_CODEGEN}" == "1" ]]; then
../x.py --stage 2 test --skip src/tools/tidy --skip tests/codegen
else
../x.py --stage 2 test --skip src/tools/tidy
fi
../x.py --stage 2 test --skip src/tools/tidy
# Run the `mir-opt` tests again but this time for a 32-bit target.
# This enforces that tests using `// EMIT_MIR_FOR_EACH_BIT_WIDTH` have

View File

@ -37,7 +37,7 @@ runners:
envs:
env-x86_64-apple-tests: &env-x86_64-apple-tests
SCRIPT: ./x.py --stage 2 test --skip tests/ui --skip tests/rustdoc --skip tests/run-make-fulldeps
SCRIPT: ./x.py --stage 2 test --skip tests/ui --skip tests/rustdoc
RUST_CONFIGURE_ARGS: --build=x86_64-apple-darwin --enable-sanitizers --enable-profiler --set rust.jemalloc
RUSTC_RETRY_LINKER_ON_SEGFAULT: 1
MACOSX_DEPLOYMENT_TARGET: 10.12
@ -294,7 +294,7 @@ auto:
- image: x86_64-apple-2
env:
SCRIPT: ./x.py --stage 2 test tests/ui tests/rustdoc tests/run-make-fulldeps
SCRIPT: ./x.py --stage 2 test tests/ui tests/rustdoc
<<: *env-x86_64-apple-tests
<<: *job-macos-xl
@ -465,3 +465,8 @@ auto:
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-extended --enable-profiler
SCRIPT: python x.py dist bootstrap --include-default-paths
<<: *job-windows-8c
# Tests integration with Rust for Linux.
# Builds stage 1 compiler and tries to compile a few RfL examples with it.
- image: rfl
<<: *job-linux-8c

View File

@ -160,8 +160,7 @@ export exclude_tests='
--exclude src/tools/linkchecker
--exclude tests/ui-fulldeps
--exclude rustc
--exclude rustdoc
--exclude tests/run-make-fulldeps'
--exclude rustdoc'
env $build_env \
./x.py test \

View File

@ -431,8 +431,7 @@ fn print_const_with_custom_print_scalar<'tcx>(
(mir::Const::Val(mir::ConstValue::Scalar(int), _), ty::Int(i)) => {
let ty = ct.ty();
let size = tcx.layout_of(ty::ParamEnv::empty().and(ty)).unwrap().size;
let data = int.assert_bits(size);
let sign_extended_data = size.sign_extend(data) as i128;
let sign_extended_data = int.assert_scalar_int().to_int(size);
let mut output = if with_underscores {
format_integer_with_underscore_sep(&sign_extended_data.to_string())
} else {

View File

@ -128,6 +128,8 @@ pub(crate) struct Options {
pub(crate) enable_per_target_ignores: bool,
/// Do not run doctests, compile them if should_test is active.
pub(crate) no_run: bool,
/// What sources are being mapped.
pub(crate) remap_path_prefix: Vec<(PathBuf, PathBuf)>,
/// The path to a rustc-like binary to build tests with. If not set, we
/// default to loading from `$sysroot/bin/rustc`.
@ -211,6 +213,7 @@ impl fmt::Debug for Options {
.field("run_check", &self.run_check)
.field("no_run", &self.no_run)
.field("test_builder_wrappers", &self.test_builder_wrappers)
.field("remap-file-prefix", &self.remap_path_prefix)
.field("nocapture", &self.nocapture)
.field("scrape_examples_options", &self.scrape_examples_options)
.field("unstable_features", &self.unstable_features)
@ -372,6 +375,13 @@ impl Options {
let codegen_options = CodegenOptions::build(early_dcx, matches);
let unstable_opts = UnstableOptions::build(early_dcx, matches);
let remap_path_prefix = match parse_remap_path_prefix(&matches) {
Ok(prefix_mappings) => prefix_mappings,
Err(err) => {
early_dcx.early_fatal(err);
}
};
let dcx = new_dcx(error_format, None, diagnostic_width, &unstable_opts);
// check for deprecated options
@ -772,6 +782,7 @@ impl Options {
run_check,
no_run,
test_builder_wrappers,
remap_path_prefix,
nocapture,
crate_name,
output_format,
@ -820,6 +831,21 @@ impl Options {
}
}
fn parse_remap_path_prefix(
matches: &getopts::Matches,
) -> Result<Vec<(PathBuf, PathBuf)>, &'static str> {
matches
.opt_strs("remap-path-prefix")
.into_iter()
.map(|remap| {
remap
.rsplit_once('=')
.ok_or("--remap-path-prefix must contain '=' between FROM and TO")
.map(|(from, to)| (PathBuf::from(from), PathBuf::from(to)))
})
.collect()
}
/// Prints deprecation warnings for deprecated options
fn check_deprecated_options(matches: &getopts::Matches, dcx: &rustc_errors::DiagCtxt) {
let deprecated_flags = [];

Some files were not shown because too many files have changed in this diff Show More