Merge branch 'master' into relnotes-1.42.0

This commit is contained in:
XAMPPRocky 2020-02-26 21:39:30 +01:00 committed by GitHub
commit 526280a853
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
1542 changed files with 37908 additions and 23863 deletions

View File

@ -114,6 +114,7 @@ James Deng <cnjamesdeng@gmail.com> <cnJamesDeng@gmail.com>
James Miller <bladeon@gmail.com> <james@aatch.net>
James Perry <james.austin.perry@gmail.com>
Jason Fager <jfager@gmail.com>
Jason Liquorish <jason@liquori.sh> <Bassetts@users.noreply.github.com>
Jason Orendorff <jorendorff@mozilla.com> <jason.orendorff@gmail.com>
Jason Orendorff <jorendorff@mozilla.com> <jason@mozmac-2.local>
Jason Toffaletti <toffaletti@gmail.com> Jason Toffaletti <jason@topsy.com>

View File

@ -121,9 +121,9 @@ checksum = "1d49d90015b3c36167a20fe2810c5cd875ad504b39cff3d4eae7977e6b7c1cb2"
[[package]]
name = "backtrace"
version = "0.3.40"
version = "0.3.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "924c76597f0d9ca25d762c25a4d369d51267536465dc5064bdf0eb073ed477ea"
checksum = "e4036b9bf40f3cf16aba72a3d65e8a520fc4bafcdc7079aea8f848c58c5b5536"
dependencies = [
"backtrace-sys",
"cfg-if",
@ -281,7 +281,7 @@ dependencies = [
[[package]]
name = "cargo"
version = "0.43.0"
version = "0.44.0"
dependencies = [
"anyhow",
"atty",
@ -292,6 +292,7 @@ dependencies = [
"clap",
"core-foundation 0.7.0",
"crates-io",
"crossbeam-channel",
"crossbeam-utils 0.7.0",
"crypto-hash",
"curl",
@ -497,7 +498,7 @@ dependencies = [
"itertools 0.8.0",
"lazy_static 1.4.0",
"matches",
"pulldown-cmark 0.6.1",
"pulldown-cmark 0.7.0",
"quine-mc_cluskey",
"regex-syntax",
"semver",
@ -575,9 +576,9 @@ dependencies = [
[[package]]
name = "compiler_builtins"
version = "0.1.24"
version = "0.1.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9975aefa63997ef75ca9cf013ff1bb81487aaa0b622c21053afd3b92979a7af"
checksum = "438ac08ddc5efe81452f984a9e33ba425b00b31d1f48e6acd9e2210aa28cc52e"
dependencies = [
"cc",
"rustc-std-workspace-core",
@ -721,12 +722,11 @@ dependencies = [
[[package]]
name = "crossbeam-channel"
version = "0.3.8"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f0ed1a4de2235cabda8558ff5840bffb97fcb64c97827f354a451307df5f72b"
checksum = "acec9a3b0b3559f15aee4f90746c4e5e293b701c0f7d3925d24e01645267b68c"
dependencies = [
"crossbeam-utils 0.6.5",
"smallvec 0.6.10",
"crossbeam-utils 0.7.0",
]
[[package]]
@ -879,14 +879,13 @@ dependencies = [
[[package]]
name = "derive_more"
version = "0.13.0"
version = "0.99.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f57d78cf3bd45270dad4e70c21ec77a960b36c7a841ff9db76aaa775a8fb871"
checksum = "2159be042979966de68315bce7034bb000c775f22e3e834e1c52ff78f041cae8"
dependencies = [
"proc-macro2 0.4.30",
"quote 0.6.12",
"rustc_version",
"syn 0.15.35",
"proc-macro2 1.0.3",
"quote 1.0.2",
"syn 1.0.11",
]
[[package]]
@ -1077,13 +1076,14 @@ checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
[[package]]
name = "filetime"
version = "0.2.4"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2df5c1a8c4be27e7707789dc42ae65976e60b394afd293d1419ab915833e646"
checksum = "1ff6d4dab0aa0c8e6346d46052e93b13a16cf847b54ed357087c35011048cc7d"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"winapi 0.3.8",
]
[[package]]
@ -1537,9 +1537,9 @@ checksum = "c3360c7b59e5ffa2653671fb74b4741a5d343c03f331c0a4aeda42b5c2b0ec7d"
[[package]]
name = "ignore"
version = "0.4.10"
version = "0.4.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ec16832258409d571aaef8273f3c3cc5b060d784e159d1a0f3b0017308f84a7"
checksum = "522daefc3b69036f80c7d2990b28ff9e0471c683bad05ca258e0a01dd22c5a1e"
dependencies = [
"crossbeam-channel",
"globset",
@ -1548,7 +1548,7 @@ dependencies = [
"memchr",
"regex",
"same-file",
"thread_local",
"thread_local 1.0.1",
"walkdir",
"winapi-util",
]
@ -1564,7 +1564,7 @@ dependencies = [
"rand_xoshiro",
"sized-chunks",
"typenum",
"version_check 0.9.1",
"version_check",
]
[[package]]
@ -1657,9 +1657,9 @@ checksum = "9ad0485404155f45cce53a40d4b2d6ac356418300daed05273d9e26f91c390be"
[[package]]
name = "jsonrpc-client-transports"
version = "13.1.0"
version = "14.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39577db48b004cffb4c5b8e5c9b993c177c52599ecbee88711e815acf65144db"
checksum = "0a9ae166c4d1f702d297cd76d4b55758ace80272ffc6dbb139fdc1bf810de40b"
dependencies = [
"failure",
"futures",
@ -1676,9 +1676,9 @@ dependencies = [
[[package]]
name = "jsonrpc-core"
version = "13.2.0"
version = "14.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "91d767c183a7e58618a609499d359ce3820700b3ebb4823a18c343b4a2a41a0d"
checksum = "fe3b688648f1ef5d5072229e2d672ecb92cbff7d1c79bcf3fd5898f3f3df0970"
dependencies = [
"futures",
"log",
@ -1689,63 +1689,62 @@ dependencies = [
[[package]]
name = "jsonrpc-core-client"
version = "13.1.0"
version = "14.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f047c10738edee7c3c6acf5241a0ce33df32ef9230c1a7fb03e4a77ee72c992f"
checksum = "080dc110be17701097df238fad3c816d4a478a1899dfbcf8ec8957dd40ec7304"
dependencies = [
"jsonrpc-client-transports",
]
[[package]]
name = "jsonrpc-derive"
version = "13.1.0"
version = "14.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "29f9149f785deaae92a4c834a9a1a83a4313b8cfedccf15362cd4cf039a64501"
checksum = "8609af8f63b626e8e211f52441fcdb6ec54f1a446606b10d5c89ae9bf8a20058"
dependencies = [
"proc-macro-crate",
"proc-macro2 0.4.30",
"quote 0.6.12",
"syn 0.15.35",
"proc-macro2 1.0.3",
"quote 1.0.2",
"syn 1.0.11",
]
[[package]]
name = "jsonrpc-ipc-server"
version = "13.1.0"
version = "14.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "256c5e4292c17b4c2ecdf542299dc8e9d6b3939c075c54825570ad9317fe5751"
checksum = "b579cd0840d7db3ebaadf52f6f31ec601a260e78d610e44f68634f919e34497a"
dependencies = [
"jsonrpc-core",
"jsonrpc-server-utils",
"log",
"parity-tokio-ipc",
"parking_lot",
"parking_lot 0.9.0",
"tokio-service",
]
[[package]]
name = "jsonrpc-pubsub"
version = "13.1.0"
version = "14.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2c08b444cc0ed70263798834343d0ac875e664257df8079160f23ac1ea79446"
checksum = "5b31c9b90731276fdd24d896f31bb10aecf2e5151733364ae81123186643d939"
dependencies = [
"jsonrpc-core",
"log",
"parking_lot",
"parking_lot 0.10.0",
"serde",
]
[[package]]
name = "jsonrpc-server-utils"
version = "13.1.0"
version = "14.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44561bfdd31401bad790527f1e951dde144f2341ddc3e1b859d32945e1a34eff"
checksum = "95b7635e618a0edbbe0d2a2bbbc69874277c49383fcf6c3c0414491cfb517d22"
dependencies = [
"bytes",
"globset",
"jsonrpc-core",
"lazy_static 1.4.0",
"log",
"num_cpus",
"tokio",
"tokio-codec",
"unicase",
@ -2009,15 +2008,15 @@ checksum = "fef709d3257013bba7cff14fc504e07e80631d3fe0f6d38ce63b8f6510ccb932"
dependencies = [
"byteorder",
"memmap",
"parking_lot",
"parking_lot 0.9.0",
"rustc-hash",
]
[[package]]
name = "memchr"
version = "2.2.0"
version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2efc7bc57c883d4a4d6e3246905283d8dae951bb3bd32f49d6ef297f546e1c39"
checksum = "53445de381a1f436797497c61d851644d0e8e88e6140f22872ad33a704933978"
[[package]]
name = "memmap"
@ -2351,10 +2350,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f842b1982eb6c2fe34036a4fbfb06dd185a3f5c8edfaacdf7d1ea10b07de6252"
dependencies = [
"lock_api",
"parking_lot_core",
"parking_lot_core 0.6.2",
"rustc_version",
]
[[package]]
name = "parking_lot"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92e98c49ab0b7ce5b222f2cc9193fc4efe11c6d0bd4f648e374684a6857b1cfc"
dependencies = [
"lock_api",
"parking_lot_core 0.7.0",
]
[[package]]
name = "parking_lot_core"
version = "0.6.2"
@ -2370,6 +2379,20 @@ dependencies = [
"winapi 0.3.8",
]
[[package]]
name = "parking_lot_core"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7582838484df45743c8434fbff785e8edf260c28748353d44bc0da32e0ceabf1"
dependencies = [
"cfg-if",
"cloudabi",
"libc",
"redox_syscall",
"smallvec 1.0.0",
"winapi 0.3.8",
]
[[package]]
name = "percent-encoding"
version = "1.0.1"
@ -2504,12 +2527,11 @@ dependencies = [
[[package]]
name = "pretty_env_logger"
version = "0.3.0"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df8b3f4e0475def7d9c2e5de8e5a1306949849761e107b360d03e98eafaffd61"
checksum = "926d36b9553851b8b0005f1275891b392ee4d2d833852c417ed025477350fb9d"
dependencies = [
"chrono",
"env_logger 0.6.2",
"env_logger 0.7.1",
"log",
]
@ -2580,17 +2602,6 @@ dependencies = [
"url 2.1.0",
]
[[package]]
name = "pulldown-cmark"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77043da1282374688ee212dc44b3f37ff929431de9c9adc3053bd3cee5630357"
dependencies = [
"bitflags",
"memchr",
"unicase",
]
[[package]]
name = "pulldown-cmark"
version = "0.6.1"
@ -2603,6 +2614,17 @@ dependencies = [
"unicase",
]
[[package]]
name = "pulldown-cmark"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c2d7fd131800e0d63df52aff46201acaab70b431a4a1ec6f0343fe8e64f35a4"
dependencies = [
"bitflags",
"memchr",
"unicase",
]
[[package]]
name = "punycode"
version = "0.4.0"
@ -2641,18 +2663,24 @@ dependencies = [
[[package]]
name = "racer"
version = "2.1.29"
version = "2.1.31"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a6d7ffceb4da3e0a29c18986f0469c209f4db3ab9f2ffe286eaa1104a3e5028"
checksum = "0ff33fa15ac0384376741d16ddb05a65263dde4e2c5d0f7a9f3747db788764aa"
dependencies = [
"bitflags",
"clap",
"derive_more",
"env_logger 0.6.2",
"humantime 1.3.0",
"env_logger 0.7.1",
"humantime 2.0.0",
"lazy_static 1.4.0",
"log",
"rls-span",
"rustc-ap-rustc_ast_pretty",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_errors",
"rustc-ap-rustc_parse",
"rustc-ap-rustc_session",
"rustc-ap-rustc_span",
"rustc-ap-syntax",
]
@ -2876,7 +2904,7 @@ dependencies = [
"aho-corasick",
"memchr",
"regex-syntax",
"thread_local",
"thread_local 0.3.6",
"utf8-ranges",
]
@ -3033,7 +3061,6 @@ version = "0.6.0"
dependencies = [
"clippy_lints",
"env_logger 0.7.1",
"failure",
"futures",
"log",
"rand 0.7.3",
@ -3084,12 +3111,10 @@ dependencies = [
"bitflags",
"byteorder",
"chalk-engine",
"fmt_macros",
"graphviz",
"jobserver",
"log",
"measureme",
"parking_lot",
"parking_lot 0.9.0",
"polonius-engine",
"rustc-rayon",
"rustc-rayon-core",
@ -3112,26 +3137,57 @@ dependencies = [
[[package]]
name = "rustc-ap-arena"
version = "610.0.0"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7475f4c707269b56eb7144c53591e3cd6369a5aa1d66434829ea11df96d5e7e3"
checksum = "ea82fa3d9a8add7422228ca1a2cbba0784fa8861f56148ff64da08b3c7921b03"
dependencies = [
"rustc-ap-rustc_data_structures",
"smallvec 0.6.10",
"smallvec 1.0.0",
]
[[package]]
name = "rustc-ap-graphviz"
version = "610.0.0"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6e59a55520f140a70a3e0fad80a36e807caa85e9d7016167b91a5b521ea929be"
checksum = "638d0b2b3bcf99824e0cb5a25dbc547b61dc20942e11daf6a97e981918aa18e5"
[[package]]
name = "rustc-ap-rustc_ast_pretty"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d38bab04dd676dee6d2f9670506a18c31bfce38bf7f8420aa83eb1140ecde049"
dependencies = [
"log",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_span",
"rustc-ap-syntax",
]
[[package]]
name = "rustc-ap-rustc_attr"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10b843ba8b1ed43739133047673b9f6a54d3b3b4d328d69c6ea89ff971395f35"
dependencies = [
"rustc-ap-rustc_ast_pretty",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_errors",
"rustc-ap-rustc_feature",
"rustc-ap-rustc_macros",
"rustc-ap-rustc_session",
"rustc-ap-rustc_span",
"rustc-ap-serialize",
"rustc-ap-syntax",
"smallvec 1.0.0",
]
[[package]]
name = "rustc-ap-rustc_data_structures"
version = "610.0.0"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6420857d5a088f680ec1ba736ffba4ee9c1964b0d397e6318f38d461f4f7d5cb"
checksum = "dc3d1c6d0a80ab0c1df76405377cec0f3d5423fb5b0953a8eac70a2ad6c44df2"
dependencies = [
"bitflags",
"cfg-if",
"crossbeam-utils 0.6.5",
"ena",
@ -3139,58 +3195,78 @@ dependencies = [
"jobserver",
"lazy_static 1.4.0",
"log",
"parking_lot",
"measureme",
"parking_lot 0.9.0",
"rustc-ap-graphviz",
"rustc-ap-rustc_index",
"rustc-ap-serialize",
"rustc-hash",
"rustc-rayon",
"rustc-rayon-core",
"smallvec 0.6.10",
"smallvec 1.0.0",
"stable_deref_trait",
"winapi 0.3.8",
]
[[package]]
name = "rustc-ap-rustc_errors"
version = "610.0.0"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8abfca0960131262254a91d02ff4903526a261ede730d7a2c75b4234c867cdc0"
checksum = "4909a1eca29331332257230f29120a8ff68c9e37d868c564fcd599e430cf8914"
dependencies = [
"annotate-snippets",
"atty",
"log",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_span",
"rustc-ap-serialize",
"rustc-ap-syntax_pos",
"term_size",
"termcolor",
"termize",
"unicode-width",
"winapi 0.3.8",
]
[[package]]
name = "rustc-ap-rustc_index"
version = "610.0.0"
name = "rustc-ap-rustc_feature"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a395509dcb90a92c1479c085639594624e06b4ab3fc7c1b795b46a61f2d4f65"
checksum = "63ab887a181d795cf5fd3edadf367760deafb90aefb844f168ab5255266e3478"
dependencies = [
"lazy_static 1.4.0",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_span",
]
[[package]]
name = "rustc-ap-rustc_fs_util"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70814116df3c5fbec8f06f6a1d013ca481f620fd22a9475754e9bf3ee9ba70d8"
[[package]]
name = "rustc-ap-rustc_index"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac1bf1d3cf3d119d41353d6fd229ef7272d5097bc0924de021c0294bf86d48bf"
dependencies = [
"rustc-ap-serialize",
"smallvec 0.6.10",
"smallvec 1.0.0",
]
[[package]]
name = "rustc-ap-rustc_lexer"
version = "610.0.0"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64eac8a0e6efb8f55292aa24be0208c7c0538236c613e79952fd1fa3d54bcf8e"
checksum = "4cda21a32cebdc11ec4f5393aa2fcde5ed1b2f673a8571e5a4dcdf07e4ae9cac"
dependencies = [
"unicode-xid 0.2.0",
]
[[package]]
name = "rustc-ap-rustc_macros"
version = "610.0.0"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f99795e8be4877e9e05d59f201e1740c1cf673364655def5848606d9e25b75af"
checksum = "75c47b48ea51910ecfd853c9248a9bf4c767bc823449ab6a1d864dff65fbae16"
dependencies = [
"itertools 0.8.0",
"proc-macro2 1.0.3",
@ -3200,56 +3276,53 @@ dependencies = [
]
[[package]]
name = "rustc-ap-rustc_target"
version = "610.0.0"
name = "rustc-ap-rustc_parse"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f22e21fdd8e1c0030f507158fa79b9f1e080e6241aba994d0f97c14a0a07a826"
checksum = "abd88e89cd5b5d28dcd3a347a3d534c08627d9455570dc1a2d402cb8437b9d30"
dependencies = [
"bitflags",
"log",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_index",
"rustc-ap-serialize",
"rustc-ap-syntax_pos",
]
[[package]]
name = "rustc-ap-serialize"
version = "610.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb1cd6ef5135408d62559866e79986ca261f4c1333253d500e5e66fe66d1432e"
dependencies = [
"indexmap",
"smallvec 0.6.10",
]
[[package]]
name = "rustc-ap-syntax"
version = "610.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61fc1c901d2cbd24cae95d7bc5a58aa7661ec3dc5320c78c32830a52a685c33c"
dependencies = [
"bitflags",
"lazy_static 1.4.0",
"log",
"rustc-ap-rustc_ast_pretty",
"rustc-ap-rustc_attr",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_errors",
"rustc-ap-rustc_index",
"rustc-ap-rustc_feature",
"rustc-ap-rustc_lexer",
"rustc-ap-rustc_target",
"rustc-ap-serialize",
"rustc-ap-syntax_pos",
"scoped-tls",
"smallvec 0.6.10",
"rustc-ap-rustc_session",
"rustc-ap-rustc_span",
"rustc-ap-syntax",
"smallvec 1.0.0",
"unicode-normalization",
]
[[package]]
name = "rustc-ap-syntax_pos"
version = "610.0.0"
name = "rustc-ap-rustc_session"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "230534f638255853bb9f13987537e00a818435a0cc54b68d97221b6822c8f1bc"
checksum = "5b8487b4575fbb2d1fc6f1cd61225efd108a4d36817e6fb9b643d57fcae9cb12"
dependencies = [
"log",
"num_cpus",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_errors",
"rustc-ap-rustc_feature",
"rustc-ap-rustc_fs_util",
"rustc-ap-rustc_index",
"rustc-ap-rustc_span",
"rustc-ap-rustc_target",
"rustc-ap-serialize",
"rustc-ap-syntax",
]
[[package]]
name = "rustc-ap-rustc_span"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f69746c0d4c21bf20a5bb2bd247261a1aa8631f04202d7303352942dde70d987"
dependencies = [
"cfg-if",
"log",
"rustc-ap-arena",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_index",
@ -3259,6 +3332,48 @@ dependencies = [
"unicode-width",
]
[[package]]
name = "rustc-ap-rustc_target"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8bbc6ae09b5d42ec66edd520e8412e0615c53a7c93607fe33dc4abab60ba7c8b"
dependencies = [
"bitflags",
"log",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_index",
"rustc-ap-rustc_macros",
"rustc-ap-rustc_span",
"rustc-ap-serialize",
]
[[package]]
name = "rustc-ap-serialize"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13a1ead0252fc3d96da4c336a95950be6795f2b00c84a67ccadf26142f8cb41"
dependencies = [
"indexmap",
"smallvec 1.0.0",
]
[[package]]
name = "rustc-ap-syntax"
version = "642.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1f59f48ca3a2ec16a7e82e718ed5aadf9c9e08cf63015d28b4e774767524a6a"
dependencies = [
"log",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_index",
"rustc-ap-rustc_lexer",
"rustc-ap-rustc_macros",
"rustc-ap-rustc_span",
"rustc-ap-serialize",
"scoped-tls",
"smallvec 1.0.0",
]
[[package]]
name = "rustc-demangle"
version = "0.1.16"
@ -3442,6 +3557,7 @@ dependencies = [
"flate2",
"libc",
"log",
"measureme",
"rustc",
"rustc-demangle",
"rustc_attr",
@ -3522,7 +3638,7 @@ dependencies = [
"lazy_static 1.4.0",
"log",
"measureme",
"parking_lot",
"parking_lot 0.9.0",
"rustc-hash",
"rustc-rayon",
"rustc-rayon-core",
@ -3542,6 +3658,7 @@ dependencies = [
"log",
"rustc",
"rustc_ast_pretty",
"rustc_codegen_ssa",
"rustc_codegen_utils",
"rustc_data_structures",
"rustc_error_codes",
@ -3619,6 +3736,7 @@ version = "0.0.0"
name = "rustc_hir"
version = "0.0.0"
dependencies = [
"lazy_static 1.4.0",
"rustc_ast_pretty",
"rustc_data_structures",
"rustc_errors",
@ -3656,6 +3774,28 @@ dependencies = [
"smallvec 1.0.0",
]
[[package]]
name = "rustc_infer"
version = "0.0.0"
dependencies = [
"fmt_macros",
"graphviz",
"log",
"rustc",
"rustc_attr",
"rustc_data_structures",
"rustc_error_codes",
"rustc_errors",
"rustc_hir",
"rustc_index",
"rustc_macros",
"rustc_session",
"rustc_span",
"rustc_target",
"smallvec 1.0.0",
"syntax",
]
[[package]]
name = "rustc_interface"
version = "0.0.0"
@ -3676,6 +3816,7 @@ dependencies = [
"rustc_expand",
"rustc_hir",
"rustc_incremental",
"rustc_infer",
"rustc_lint",
"rustc_metadata",
"rustc_mir",
@ -3718,6 +3859,7 @@ dependencies = [
"rustc_feature",
"rustc_hir",
"rustc_index",
"rustc_infer",
"rustc_session",
"rustc_span",
"rustc_target",
@ -3738,7 +3880,6 @@ dependencies = [
name = "rustc_macros"
version = "0.1.0"
dependencies = [
"itertools 0.8.0",
"proc-macro2 1.0.3",
"quote 1.0.2",
"syn 1.0.11",
@ -3788,6 +3929,7 @@ dependencies = [
"rustc_errors",
"rustc_hir",
"rustc_index",
"rustc_infer",
"rustc_lexer",
"rustc_macros",
"rustc_span",
@ -3802,7 +3944,6 @@ name = "rustc_mir_build"
version = "0.0.0"
dependencies = [
"arena",
"itertools 0.8.0",
"log",
"rustc",
"rustc_apfloat",
@ -3811,6 +3952,7 @@ dependencies = [
"rustc_errors",
"rustc_hir",
"rustc_index",
"rustc_infer",
"rustc_macros",
"rustc_session",
"rustc_span",
@ -3851,6 +3993,7 @@ dependencies = [
"rustc_feature",
"rustc_hir",
"rustc_index",
"rustc_infer",
"rustc_session",
"rustc_span",
"rustc_target",
@ -3901,6 +4044,7 @@ dependencies = [
"rustc_expand",
"rustc_feature",
"rustc_hir",
"rustc_infer",
"rustc_metadata",
"rustc_session",
"rustc_span",
@ -3990,6 +4134,7 @@ dependencies = [
"rustc",
"rustc_data_structures",
"rustc_hir",
"rustc_infer",
"rustc_macros",
"rustc_span",
"rustc_target",
@ -4005,7 +4150,9 @@ dependencies = [
"rustc",
"rustc_data_structures",
"rustc_hir",
"rustc_infer",
"rustc_span",
"rustc_target",
]
[[package]]
@ -4020,6 +4167,7 @@ dependencies = [
"rustc_errors",
"rustc_hir",
"rustc_index",
"rustc_infer",
"rustc_span",
"rustc_target",
"smallvec 1.0.0",
@ -4041,7 +4189,7 @@ version = "0.0.0"
dependencies = [
"itertools 0.8.0",
"minifier",
"pulldown-cmark 0.5.3",
"pulldown-cmark 0.7.0",
"rustc-rayon",
"serde",
"serde_json",
@ -4095,7 +4243,7 @@ dependencies = [
[[package]]
name = "rustfmt-nightly"
version = "1.4.11"
version = "1.4.12"
dependencies = [
"annotate-snippets",
"bytecount",
@ -4111,9 +4259,14 @@ dependencies = [
"lazy_static 1.4.0",
"log",
"regex",
"rustc-ap-rustc_ast_pretty",
"rustc-ap-rustc_data_structures",
"rustc-ap-rustc_errors",
"rustc-ap-rustc_parse",
"rustc-ap-rustc_session",
"rustc-ap-rustc_span",
"rustc-ap-rustc_target",
"rustc-ap-syntax",
"rustc-ap-syntax_pos",
"rustc-workspace-hack",
"rustfmt-config_proc_macro",
"serde",
@ -4547,9 +4700,9 @@ dependencies = [
[[package]]
name = "tar"
version = "0.4.20"
version = "0.4.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a303ba60a099fcd2aaa646b14d2724591a96a75283e4b7ed3d1a1658909d9ae2"
checksum = "b3196bfbffbba3e57481b6ea32249fbaf590396a52505a2615adbb79d9d826d3"
dependencies = [
"filetime",
"libc",
@ -4601,17 +4754,6 @@ dependencies = [
"winapi 0.3.8",
]
[[package]]
name = "term_size"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e5b9a66db815dcfd2da92db471106457082577c3c278d4138ab3e3b4e189327"
dependencies = [
"kernel32-sys",
"libc",
"winapi 0.2.8",
]
[[package]]
name = "termcolor"
version = "1.0.4"
@ -4685,6 +4827,15 @@ dependencies = [
"lazy_static 1.4.0",
]
[[package]]
name = "thread_local"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d40c6d1b69745a6ec6fb1ca717914848da4b44ae29d9b3080cbee91d72a69b14"
dependencies = [
"lazy_static 1.4.0",
]
[[package]]
name = "tidy"
version = "0.1.0"
@ -4839,7 +4990,7 @@ dependencies = [
"log",
"mio",
"num_cpus",
"parking_lot",
"parking_lot 0.9.0",
"slab",
"tokio-executor",
"tokio-io",
@ -5038,11 +5189,11 @@ checksum = "535c204ee4d8434478593480b8f86ab45ec9aae0e83c568ca81abf0fd0e88f86"
[[package]]
name = "unicase"
version = "2.5.1"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e2e6bd1e59e56598518beb94fd6db628ded570326f0a98c679a304bd9f00150"
checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
dependencies = [
"version_check 0.1.5",
"version_check",
]
[[package]]
@ -5212,12 +5363,6 @@ dependencies = [
"failure",
]
[[package]]
name = "version_check"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
[[package]]
name = "version_check"
version = "0.9.1"

View File

@ -95,6 +95,17 @@ Compatibility Notes
[`CondVar::wait_timeout_while`]: https://doc.rust-lang.org/stable/std/sync/struct.Condvar.html#method.wait_timeout_while
Version 1.41.1 (2020-02-27)
===========================
* [Always check types of static items][69145]
* [Always check lifetime bounds of `Copy` impls][69145]
* [Fix miscompilation in callers of `Layout::repeat`][69225]
[69225]: https://github.com/rust-lang/rust/issues/69225
[69145]: https://github.com/rust-lang/rust/pull/69145
Version 1.41.0 (2020-01-30)
===========================

View File

@ -395,6 +395,15 @@
# rustc to execute.
#lld = false
# Indicates whether LLD will be used to link Rust crates during bootstrap on
# supported platforms. The LLD from the bootstrap distribution will be used
# and not the LLD compiled during the bootstrap.
#
# LLD will not be used if we're cross linking or running tests.
#
# Explicitly setting the linker for a target will override this option.
#use-lld = false
# Indicates whether some LLVM tools, like llvm-objdump, will be made available in the
# sysroot.
#llvm-tools = false
@ -435,6 +444,10 @@
# Use LLVM libunwind as the implementation for Rust's unwinder.
#llvm-libunwind = false
# Enable Windows Control Flow Guard checks in the standard library.
# This only applies from stage 1 onwards, and only for Windows targets.
#control-flow-guard = false
# =============================================================================
# Options for specific targets
#
@ -463,6 +476,7 @@
# Linker to be used to link Rust code. Note that the
# default value is platform specific, and if not specified it may also depend on
# what platform is crossing to what platform.
# Setting this will override the `use-lld` option for Rust code.
#linker = "cc"
# Path to the `llvm-config` binary of the installation of a custom LLVM to link

View File

@ -7,6 +7,7 @@ merge_derives = false
# tidy only checks files which are not ignored, each entry follows gitignore style
ignore = [
"build",
"/vendor/",
# tests for now are not formatted, as they are sometimes pretty-printing constrained
# (and generally rustfmt can move around comments in UI-testing incompatible ways)

View File

@ -134,6 +134,11 @@ fn main() {
cmd.arg(format!("-Clinker={}", host_linker));
}
// Override linker flavor if necessary.
if let Ok(host_linker_flavor) = env::var("RUSTC_HOST_LINKER_FLAVOR") {
cmd.arg(format!("-Clinker-flavor={}", host_linker_flavor));
}
if let Ok(s) = env::var("RUSTC_HOST_CRT_STATIC") {
if s == "true" {
cmd.arg("-C").arg("target-feature=+crt-static");

View File

@ -332,7 +332,6 @@ class RustBuild(object):
self.use_vendored_sources = ''
self.verbose = False
def download_stage0(self):
"""Fetch the build system for Rust, written in Rust
@ -351,7 +350,7 @@ class RustBuild(object):
try:
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
temp_path = temp_file.name
with tarfile.open(temp_path, "w:xz") as tar:
with tarfile.open(temp_path, "w:xz"):
pass
return True
except tarfile.CompressionError:

View File

@ -692,7 +692,7 @@ impl<'a> Builder<'a> {
cmd.env_remove("MAKEFLAGS");
cmd.env_remove("MFLAGS");
if let Some(linker) = self.linker(compiler.host) {
if let Some(linker) = self.linker(compiler.host, true) {
cmd.env("RUSTC_TARGET_LINKER", linker);
}
cmd
@ -952,10 +952,31 @@ impl<'a> Builder<'a> {
}
}
if let Some(host_linker) = self.linker(compiler.host) {
// FIXME: Don't use LLD if we're compiling libtest, since it fails to link it.
// See https://github.com/rust-lang/rust/issues/68647.
let can_use_lld = mode != Mode::Std;
// FIXME: The beta compiler doesn't pick the `lld-link` flavor for `*-pc-windows-msvc`
// Remove `RUSTC_HOST_LINKER_FLAVOR` when this is fixed
let lld_linker_flavor = |linker: &Path, target: Interned<String>| {
compiler.stage == 0
&& linker.file_name() == Some(OsStr::new("rust-lld"))
&& target.contains("pc-windows-msvc")
};
if let Some(host_linker) = self.linker(compiler.host, can_use_lld) {
if lld_linker_flavor(host_linker, compiler.host) {
cargo.env("RUSTC_HOST_LINKER_FLAVOR", "lld-link");
}
cargo.env("RUSTC_HOST_LINKER", host_linker);
}
if let Some(target_linker) = self.linker(target) {
if let Some(target_linker) = self.linker(target, can_use_lld) {
if lld_linker_flavor(target_linker, target) {
rustflags.arg("-Clinker-flavor=lld-link");
}
let target = crate::envify(&target);
cargo.env(&format!("CARGO_TARGET_{}_LINKER", target), target_linker);
}
@ -1114,6 +1135,20 @@ impl<'a> Builder<'a> {
);
}
// If Control Flow Guard is enabled, pass the `control_flow_guard=checks` flag to rustc
// when compiling the standard library, since this might be linked into the final outputs
// produced by rustc. Since this mitigation is only available on Windows, only enable it
// for the standard library in case the compiler is run on a non-Windows platform.
// This is not needed for stage 0 artifacts because these will only be used for building
// the stage 1 compiler.
if cfg!(windows)
&& mode == Mode::Std
&& self.config.control_flow_guard
&& compiler.stage >= 1
{
rustflags.arg("-Zcontrol_flow_guard=checks");
}
// For `cargo doc` invocations, make rustdoc print the Rust version into the docs
cargo.env("RUSTDOC_CRATE_VERSION", self.rust_version());

View File

@ -82,6 +82,7 @@ pub struct Config {
pub llvm_use_linker: Option<String>,
pub llvm_allow_old_toolchain: Option<bool>,
pub use_lld: bool,
pub lld_enabled: bool,
pub lldb_enabled: bool,
pub llvm_tools_enabled: bool,
@ -115,6 +116,7 @@ pub struct Config {
pub targets: Vec<Interned<String>>,
pub local_rebuild: bool,
pub jemalloc: bool,
pub control_flow_guard: bool,
// dist misc
pub dist_sign_folder: Option<PathBuf>,
@ -175,6 +177,15 @@ pub struct Target {
pub no_std: bool,
}
impl Target {
pub fn from_triple(triple: &str) -> Self {
let mut target: Self = Default::default();
if triple.contains("-none-") || triple.contains("nvptx") {
target.no_std = true;
}
target
}
}
/// Structure of the `config.toml` file that configuration is read from.
///
/// This structure uses `Decodable` to automatically decode a TOML configuration
@ -321,6 +332,7 @@ struct Rust {
save_toolstates: Option<String>,
codegen_backends: Option<Vec<String>>,
lld: Option<bool>,
use_lld: Option<bool>,
llvm_tools: Option<bool>,
lldb: Option<bool>,
deny_warnings: Option<bool>,
@ -331,6 +343,7 @@ struct Rust {
jemalloc: Option<bool>,
test_compare_mode: Option<bool>,
llvm_libunwind: Option<bool>,
control_flow_guard: Option<bool>,
}
/// TOML representation of how each build target is configured.
@ -349,6 +362,7 @@ struct TomlTarget {
musl_root: Option<String>,
wasi_root: Option<String>,
qemu_rootfs: Option<String>,
no_std: Option<bool>,
}
impl Config {
@ -565,6 +579,7 @@ impl Config {
if let Some(true) = rust.incremental {
config.incremental = true;
}
set(&mut config.use_lld, rust.use_lld);
set(&mut config.lld_enabled, rust.lld);
set(&mut config.lldb_enabled, rust.lldb);
set(&mut config.llvm_tools_enabled, rust.llvm_tools);
@ -577,6 +592,7 @@ impl Config {
set(&mut config.rust_verify_llvm_ir, rust.verify_llvm_ir);
config.rust_thin_lto_import_instr_limit = rust.thin_lto_import_instr_limit;
set(&mut config.rust_remap_debuginfo, rust.remap_debuginfo);
set(&mut config.control_flow_guard, rust.control_flow_guard);
if let Some(ref backends) = rust.codegen_backends {
config.rust_codegen_backends =
@ -589,7 +605,7 @@ impl Config {
if let Some(ref t) = toml.target {
for (triple, cfg) in t {
let mut target = Target::default();
let mut target = Target::from_triple(triple);
if let Some(ref s) = cfg.llvm_config {
target.llvm_config = Some(config.src.join(s));
@ -600,6 +616,9 @@ impl Config {
if let Some(ref s) = cfg.android_ndk {
target.ndk = Some(config.src.join(s));
}
if let Some(s) = cfg.no_std {
target.no_std = s;
}
target.cc = cfg.cc.clone().map(PathBuf::from);
target.cxx = cfg.cxx.clone().map(PathBuf::from);
target.ar = cfg.ar.clone().map(PathBuf::from);

View File

@ -60,10 +60,11 @@ o("lld", "rust.lld", "build lld")
o("lldb", "rust.lldb", "build lldb")
o("missing-tools", "dist.missing-tools", "allow failures when building tools")
o("use-libcxx", "llvm.use-libcxx", "build LLVM with libc++")
o("control-flow-guard", "rust.control-flow-guard", "Enable Control Flow Guard")
o("cflags", "llvm.cflags", "build LLVM with these extra compiler flags")
o("cxxflags", "llvm.cxxflags", "build LLVM with these extra compiler flags")
o("ldflags", "llvm.ldflags", "build LLVM with these extra linker flags")
v("llvm-cflags", "llvm.cflags", "build LLVM with these extra compiler flags")
v("llvm-cxxflags", "llvm.cxxflags", "build LLVM with these extra compiler flags")
v("llvm-ldflags", "llvm.ldflags", "build LLVM with these extra linker flags")
o("llvm-libunwind", "rust.llvm-libunwind", "use LLVM libunwind")
@ -398,6 +399,7 @@ def is_number(value):
except ValueError:
return False
# Here we walk through the constructed configuration we have from the parsed
# command line arguments. We then apply each piece of configuration by
# basically just doing a `sed` to change the various configuration line to what

View File

@ -103,7 +103,6 @@
//! More documentation can be found in each respective module below, and you can
//! also check out the `src/bootstrap/README.md` file for more information.
#![feature(core_intrinsics)]
#![feature(drain_filter)]
use std::cell::{Cell, RefCell};
@ -239,9 +238,10 @@ pub struct Build {
hosts: Vec<Interned<String>>,
targets: Vec<Interned<String>>,
// Stage 0 (downloaded) compiler and cargo or their local rust equivalents
// Stage 0 (downloaded) compiler, lld and cargo or their local rust equivalents
initial_rustc: PathBuf,
initial_cargo: PathBuf,
initial_lld: PathBuf,
// Runtime state filled in later on
// C/C++ compilers and archiver for all targets
@ -343,9 +343,18 @@ impl Build {
// we always try to use git for LLVM builds
let in_tree_llvm_info = channel::GitInfo::new(false, &src.join("src/llvm-project"));
let initial_sysroot = config.initial_rustc.parent().unwrap().parent().unwrap();
let initial_lld = initial_sysroot
.join("lib")
.join("rustlib")
.join(config.build)
.join("bin")
.join("rust-lld");
let mut build = Build {
initial_rustc: config.initial_rustc.clone(),
initial_cargo: config.initial_cargo.clone(),
initial_lld,
local_rebuild: config.local_rebuild,
fail_fast: config.cmd.fail_fast(),
doc_tests: config.cmd.doc_tests(),
@ -810,7 +819,7 @@ impl Build {
}
/// Returns the path to the linker for the given target if it needs to be overridden.
fn linker(&self, target: Interned<String>) -> Option<&Path> {
fn linker(&self, target: Interned<String>, can_use_lld: bool) -> Option<&Path> {
if let Some(linker) = self.config.target_config.get(&target).and_then(|c| c.linker.as_ref())
{
Some(linker)
@ -819,6 +828,8 @@ impl Build {
&& !target.contains("msvc")
{
Some(self.cc(target))
} else if can_use_lld && self.config.use_lld && self.build == target {
Some(&self.initial_lld)
} else {
None
}

View File

@ -262,7 +262,7 @@ impl Step for Llvm {
cfg.define("PYTHON_EXECUTABLE", python);
}
configure_cmake(builder, target, &mut cfg);
configure_cmake(builder, target, &mut cfg, true);
// FIXME: we don't actually need to build all LLVM tools and all LLVM
// libraries here, e.g., we just want a few components and a few
@ -301,7 +301,12 @@ fn check_llvm_version(builder: &Builder<'_>, llvm_config: &Path) {
panic!("\n\nbad LLVM version: {}, need >=7.0\n\n", version)
}
fn configure_cmake(builder: &Builder<'_>, target: Interned<String>, cfg: &mut cmake::Config) {
fn configure_cmake(
builder: &Builder<'_>,
target: Interned<String>,
cfg: &mut cmake::Config,
use_compiler_launcher: bool,
) {
// Do not print installation messages for up-to-date files.
// LLVM and LLD builds can produce a lot of those and hit CI limits on log size.
cfg.define("CMAKE_INSTALL_MESSAGE", "LAZY");
@ -372,10 +377,12 @@ fn configure_cmake(builder: &Builder<'_>, target: Interned<String>, cfg: &mut cm
} else {
// If ccache is configured we inform the build a little differently how
// to invoke ccache while also invoking our compilers.
if use_compiler_launcher {
if let Some(ref ccache) = builder.config.ccache {
cfg.define("CMAKE_C_COMPILER_LAUNCHER", ccache)
.define("CMAKE_CXX_COMPILER_LAUNCHER", ccache);
}
}
cfg.define("CMAKE_C_COMPILER", sanitize_cc(cc))
.define("CMAKE_CXX_COMPILER", sanitize_cc(cxx));
}
@ -458,7 +465,7 @@ impl Step for Lld {
t!(fs::create_dir_all(&out_dir));
let mut cfg = cmake::Config::new(builder.src.join("src/llvm-project/lld"));
configure_cmake(builder, target, &mut cfg);
configure_cmake(builder, target, &mut cfg, true);
// This is an awful, awful hack. Discovered when we migrated to using
// clang-cl to compile LLVM/LLD it turns out that LLD, when built out of
@ -571,7 +578,7 @@ impl Step for Sanitizers {
}
let out_dir = builder.native_dir(self.target).join("sanitizers");
let runtimes = supported_sanitizers(&out_dir, self.target);
let runtimes = supported_sanitizers(&out_dir, self.target, &builder.config.channel);
if runtimes.is_empty() {
return runtimes;
}
@ -595,10 +602,7 @@ impl Step for Sanitizers {
let _time = util::timeit(&builder);
let mut cfg = cmake::Config::new(&compiler_rt_dir);
cfg.target(&self.target);
cfg.host(&builder.config.build);
cfg.profile("Release");
cfg.define("CMAKE_C_COMPILER_TARGET", self.target);
cfg.define("COMPILER_RT_BUILD_BUILTINS", "OFF");
cfg.define("COMPILER_RT_BUILD_CRT", "OFF");
@ -610,6 +614,12 @@ impl Step for Sanitizers {
cfg.define("COMPILER_RT_USE_LIBCXX", "OFF");
cfg.define("LLVM_CONFIG_PATH", &llvm_config);
// On Darwin targets the sanitizer runtimes are build as universal binaries.
// Unfortunately sccache currently lacks support to build them successfully.
// Disable compiler launcher on Darwin targets to avoid potential issues.
let use_compiler_launcher = !self.target.contains("apple-darwin");
configure_cmake(builder, self.target, &mut cfg, use_compiler_launcher);
t!(fs::create_dir_all(&out_dir));
cfg.out_dir(out_dir);
@ -635,7 +645,11 @@ pub struct SanitizerRuntime {
}
/// Returns sanitizers available on a given target.
fn supported_sanitizers(out_dir: &Path, target: Interned<String>) -> Vec<SanitizerRuntime> {
fn supported_sanitizers(
out_dir: &Path,
target: Interned<String>,
channel: &str,
) -> Vec<SanitizerRuntime> {
let mut result = Vec::new();
match &*target {
"x86_64-apple-darwin" => {
@ -644,7 +658,7 @@ fn supported_sanitizers(out_dir: &Path, target: Interned<String>) -> Vec<Sanitiz
cmake_target: format!("clang_rt.{}_osx_dynamic", s),
path: out_dir
.join(&format!("build/lib/darwin/libclang_rt.{}_osx_dynamic.dylib", s)),
name: format!("librustc_rt.{}.dylib", s),
name: format!("librustc-{}_rt.{}.dylib", channel, s),
});
}
}
@ -653,7 +667,7 @@ fn supported_sanitizers(out_dir: &Path, target: Interned<String>) -> Vec<Sanitiz
result.push(SanitizerRuntime {
cmake_target: format!("clang_rt.{}-x86_64", s),
path: out_dir.join(&format!("build/lib/linux/libclang_rt.{}-x86_64.a", s)),
name: format!("librustc_rt.{}.a", s),
name: format!("librustc-{}_rt.{}.a", channel, s),
});
}
}
@ -662,7 +676,7 @@ fn supported_sanitizers(out_dir: &Path, target: Interned<String>) -> Vec<Sanitiz
result.push(SanitizerRuntime {
cmake_target: format!("clang_rt.{}-x86_64", s),
path: out_dir.join(&format!("build/lib/fuchsia/libclang_rt.{}-x86_64.a", s)),
name: format!("librustc_rt.{}.a", s),
name: format!("librustc-{}_rt.{}.a", channel, s),
});
}
}
@ -671,7 +685,7 @@ fn supported_sanitizers(out_dir: &Path, target: Interned<String>) -> Vec<Sanitiz
result.push(SanitizerRuntime {
cmake_target: format!("clang_rt.{}-aarch64", s),
path: out_dir.join(&format!("build/lib/fuchsia/libclang_rt.{}-aarch64.a", s)),
name: format!("librustc_rt.{}.a", s),
name: format!("librustc-{}_rt.{}.a", channel, s),
});
}
}

View File

@ -17,6 +17,7 @@ use std::process::Command;
use build_helper::{output, t};
use crate::config::Target;
use crate::Build;
struct Finder {
@ -192,13 +193,9 @@ pub fn check(build: &mut Build) {
panic!("the iOS target is only supported on macOS");
}
build.config.target_config.entry(target.clone()).or_insert(Target::from_triple(target));
if target.contains("-none-") || target.contains("nvptx") {
if build.no_std(*target).is_none() {
let target = build.config.target_config.entry(target.clone()).or_default();
target.no_std = true;
}
if build.no_std(*target) == Some(false) {
panic!("All the *-none-* and nvptx* targets are no-std targets")
}

View File

@ -388,6 +388,8 @@ impl Step for Miri {
cargo.env("XARGO_RUST_SRC", builder.src.join("src"));
// Debug things.
cargo.env("RUST_BACKTRACE", "1");
// Overwrite bootstrap's `rustc` wrapper overwriting our flags.
cargo.env("RUSTC_DEBUG_ASSERTIONS", "true");
// Let cargo-miri know where xargo ended up.
cargo.env("XARGO", builder.out.join("bin").join("xargo"));
@ -397,7 +399,7 @@ impl Step for Miri {
}
// # Determine where Miri put its sysroot.
// To this end, we run `cargo miri setup --env` and capture the output.
// To this end, we run `cargo miri setup --print-sysroot` and capture the output.
// (We do this separately from the above so that when the setup actually
// happens we get some output.)
// We re-use the `cargo` from above.
@ -596,7 +598,7 @@ impl Step for RustdocTheme {
.env("RUSTDOC_REAL", builder.rustdoc(self.compiler))
.env("RUSTDOC_CRATE_VERSION", builder.rust_version())
.env("RUSTC_BOOTSTRAP", "1");
if let Some(linker) = builder.linker(self.compiler.host) {
if let Some(linker) = builder.linker(self.compiler.host, true) {
cmd.env("RUSTC_TARGET_LINKER", linker);
}
try_run(builder, &mut cmd);
@ -662,7 +664,7 @@ impl Step for RustdocJSNotStd {
target: self.target,
mode: "js-doc-test",
suite: "rustdoc-js",
path: None,
path: "src/test/rustdoc-js",
compare_mode: None,
});
} else {
@ -698,7 +700,7 @@ impl Step for RustdocUi {
target: self.target,
mode: "ui",
suite: "rustdoc-ui",
path: Some("src/test/rustdoc-ui"),
path: "src/test/rustdoc-ui",
compare_mode: None,
})
}
@ -843,7 +845,7 @@ macro_rules! test_definitions {
target: self.target,
mode: $mode,
suite: $suite,
path: Some($path),
path: $path,
compare_mode: $compare_mode,
})
}
@ -926,7 +928,7 @@ struct Compiletest {
target: Interned<String>,
mode: &'static str,
suite: &'static str,
path: Option<&'static str>,
path: &'static str,
compare_mode: Option<&'static str>,
}
@ -949,7 +951,7 @@ impl Step for Compiletest {
let suite = self.suite;
// Path for test suite
let suite_path = self.path.unwrap_or("");
let suite_path = self.path;
// Skip codegen tests if they aren't enabled in configuration.
if !builder.config.codegen_tests && suite == "codegen" {
@ -1035,7 +1037,8 @@ impl Step for Compiletest {
flags.push("-Zunstable-options".to_string());
flags.push(builder.config.cmd.rustc_args().join(" "));
if let Some(linker) = builder.linker(target) {
// Don't use LLD here since we want to test that rustc finds and uses a linker by itself.
if let Some(linker) = builder.linker(target, false) {
cmd.arg("--linker").arg(linker);
}
@ -1050,10 +1053,10 @@ impl Step for Compiletest {
cmd.arg("--docck-python").arg(builder.python());
if builder.config.build.ends_with("apple-darwin") {
// Force /usr/bin/python on macOS for LLDB tests because we're loading the
// Force /usr/bin/python3 on macOS for LLDB tests because we're loading the
// LLDB plugin's compiled module which only works with the system python
// (namely not Homebrew-installed python)
cmd.arg("--lldb-python").arg("/usr/bin/python");
cmd.arg("--lldb-python").arg("/usr/bin/python3");
} else {
cmd.arg("--lldb-python").arg(builder.python());
}
@ -1263,15 +1266,15 @@ impl Step for Compiletest {
}
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
struct DocTest {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct BookTest {
compiler: Compiler,
path: &'static str,
path: PathBuf,
name: &'static str,
is_ext_doc: bool,
}
impl Step for DocTest {
impl Step for BookTest {
type Output = ();
const ONLY_HOSTS: bool = true;
@ -1279,12 +1282,59 @@ impl Step for DocTest {
run.never()
}
/// Runs `rustdoc --test` for all documentation in `src/doc`.
/// Runs the documentation tests for a book in `src/doc`.
///
/// This will run all tests in our markdown documentation (e.g., the book)
/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
/// `compiler`.
/// This uses the `rustdoc` that sits next to `compiler`.
fn run(self, builder: &Builder<'_>) {
// External docs are different from local because:
// - Some books need pre-processing by mdbook before being tested.
// - They need to save their state to toolstate.
// - They are only tested on the "checktools" builders.
//
// The local docs are tested by default, and we don't want to pay the
// cost of building mdbook, so they use `rustdoc --test` directly.
// Also, the unstable book is special because SUMMARY.md is generated,
// so it is easier to just run `rustdoc` on its files.
if self.is_ext_doc {
self.run_ext_doc(builder);
} else {
self.run_local_doc(builder);
}
}
}
impl BookTest {
/// This runs the equivalent of `mdbook test` (via the rustbook wrapper)
/// which in turn runs `rustdoc --test` on each file in the book.
fn run_ext_doc(self, builder: &Builder<'_>) {
let compiler = self.compiler;
builder.ensure(compile::Std { compiler, target: compiler.host });
// mdbook just executes a binary named "rustdoc", so we need to update
// PATH so that it points to our rustdoc.
let mut rustdoc_path = builder.rustdoc(compiler);
rustdoc_path.pop();
let old_path = env::var_os("PATH").unwrap_or_default();
let new_path = env::join_paths(iter::once(rustdoc_path).chain(env::split_paths(&old_path)))
.expect("could not add rustdoc to PATH");
let mut rustbook_cmd = builder.tool_cmd(Tool::Rustbook);
let path = builder.src.join(&self.path);
rustbook_cmd.env("PATH", new_path).arg("test").arg(path);
builder.add_rust_test_threads(&mut rustbook_cmd);
builder.info(&format!("Testing rustbook {}", self.path.display()));
let _time = util::timeit(&builder);
let toolstate = if try_run(builder, &mut rustbook_cmd) {
ToolState::TestPass
} else {
ToolState::TestFail
};
builder.save_toolstate(self.name, toolstate);
}
/// This runs `rustdoc --test` on all `.md` files in the path.
fn run_local_doc(self, builder: &Builder<'_>) {
let compiler = self.compiler;
builder.ensure(compile::Std { compiler, target: compiler.host });
@ -1293,7 +1343,6 @@ impl Step for DocTest {
// tests for all files that end in `*.md`
let mut stack = vec![builder.src.join(self.path)];
let _time = util::timeit(&builder);
let mut files = Vec::new();
while let Some(p) = stack.pop() {
if p.is_dir() {
@ -1305,25 +1354,13 @@ impl Step for DocTest {
continue;
}
// The nostarch directory in the book is for no starch, and so isn't
// guaranteed to builder. We don't care if it doesn't build, so skip it.
if p.to_str().map_or(false, |p| p.contains("nostarch")) {
continue;
}
files.push(p);
}
files.sort();
let mut toolstate = ToolState::TestPass;
for file in files {
if !markdown_test(builder, compiler, &file) {
toolstate = ToolState::TestFail;
}
}
if self.is_ext_doc {
builder.save_toolstate(self.name, toolstate);
markdown_test(builder, compiler, &file);
}
}
}
@ -1352,9 +1389,9 @@ macro_rules! test_book {
}
fn run(self, builder: &Builder<'_>) {
builder.ensure(DocTest {
builder.ensure(BookTest {
compiler: self.compiler,
path: $path,
path: PathBuf::from($path),
name: $book_name,
is_ext_doc: !$default,
});

View File

@ -63,7 +63,7 @@ jobs:
- job: macOS
timeoutInMinutes: 600
pool:
vmImage: macos-10.13
vmImage: macos-10.15
steps:
- template: steps/run.yml
strategy:

View File

@ -31,6 +31,9 @@ steps:
- bash: src/ci/scripts/setup-environment.sh
displayName: Setup environment
- bash: src/ci/scripts/clean-disk.sh
displayName: Clean disk
- bash: src/ci/scripts/should-skip-this.sh
displayName: Decide whether to run this job
@ -48,10 +51,6 @@ steps:
displayName: Install clang
condition: and(succeeded(), not(variables.SKIP_JOB))
- bash: src/ci/scripts/switch-xcode.sh
displayName: Switch to Xcode 9.3
condition: and(succeeded(), not(variables.SKIP_JOB))
- bash: src/ci/scripts/install-wix.sh
displayName: Install wix
condition: and(succeeded(), not(variables.SKIP_JOB))

View File

@ -25,7 +25,7 @@ jobs:
# - job: macOS
# timeoutInMinutes: 600
# pool:
# vmImage: macos-10.13
# vmImage: macos-10.15
# steps:
# - template: steps/run.yml
# strategy:

View File

@ -148,11 +148,11 @@ else:
print('unknown platform', sys.platform)
sys.exit(1)
cur_state = State();
cur_state = State()
print("Time,Idle")
while True:
time.sleep(1);
next_state = State();
time.sleep(1)
next_state = State()
now = datetime.datetime.utcnow().isoformat()
idle = next_state.idle_since(cur_state)
print("%s,%s" % (now, idle))

View File

@ -48,7 +48,7 @@ RUN /tmp/build-solaris-toolchain.sh sparcv9 sparcv9 solaris-sparc
COPY dist-various-2/build-x86_64-fortanix-unknown-sgx-toolchain.sh /tmp/
# We pass the commit id of the port of LLVM's libunwind to the build script.
# Any update to the commit id here, should cause the container image to be re-built from this point on.
RUN /tmp/build-x86_64-fortanix-unknown-sgx-toolchain.sh "53b586346f2c7870e20b170decdc30729d97c42b"
RUN /tmp/build-x86_64-fortanix-unknown-sgx-toolchain.sh "5125c169b30837208a842f85f7ae44a83533bd0e"
COPY dist-various-2/build-wasi-toolchain.sh /tmp/
RUN /tmp/build-wasi-toolchain.sh

16
src/ci/scripts/clean-disk.sh Executable file
View File

@ -0,0 +1,16 @@
#!/bin/bash
# This script deletes some of the Azure-provided artifacts. We don't use these,
# and disk space is at a premium on our builders.
set -euo pipefail
IFS=$'\n\t'
source "$(cd "$(dirname "$0")" && pwd)/../shared.sh"
# All the Linux builds happen inside Docker.
if isLinux; then
# 6.7GB
sudo rm -rf /opt/ghc
# 16GB
sudo rm -rf /usr/share/dotnet
fi

View File

@ -19,9 +19,7 @@ if isMacOS; then
# native clang is configured to use the correct path, but our custom one
# doesn't. This sets the SDKROOT environment variable to the SDK so that
# our own clang can figure out the correct include path on its own.
if ! [[ -d "/usr/include" ]]; then
ciCommandSetEnv SDKROOT "$(xcrun --sdk macosx --show-sdk-path)"
fi
# Configure `AR` specifically so rustbuild doesn't try to infer it as
# `clang-ar` by accident.

View File

@ -1,13 +0,0 @@
#!/bin/bash
# Switch to XCode 9.3 on OSX since it seems to be the last version that supports
# i686-apple-darwin. We'll eventually want to upgrade this and it will probably
# force us to drop i686-apple-darwin, but let's keep the wheels turning for now.
set -euo pipefail
IFS=$'\n\t'
source "$(cd "$(dirname "$0")" && pwd)/../shared.sh"
if isMacOS; then
sudo xcode-select --switch /Applications/Xcode_9.3.app
fi

@ -1 +1 @@
Subproject commit 87dd6843678575f8dda962f239d14ef4be14b352
Subproject commit 6fb3705e5230311b096d47f7e2c91f9ce24393d0

@ -1 +1 @@
Subproject commit 1a2390247ad6d08160e0dd74f40a01a9578659c2
Subproject commit 37f9e6848411188a1062ead1bd8ebe4b8aa16899

@ -1 +1 @@
Subproject commit 4d78994915af1bde9a95c04a8c27d8dca066232a
Subproject commit b2e1092bf67bd4d7686c4553f186edbb7f5f92db

@ -1 +1 @@
Subproject commit 11e893fc1357bc688418ddf1087c2b7aa25d154d
Subproject commit 64239df6d173562b9deb4f012e4c3e6e960c4754

@ -1 +1 @@
Subproject commit 1c2bd024d13f8011307e13386cf1fea2180352b5
Subproject commit 32facd5522ddbbf37baf01e4e4b6562bc55c071a

View File

@ -215,21 +215,29 @@ This controls which [target](targets/index.md) to produce.
This flag will set which lints should be set to the [warn level](lints/levels.md#warn).
_Note:_ The order of these lint level arguments is taken into account, see [lint level via compiler flag](lints/levels.md#via-compiler-flag) for more information.
<a id="option-a-allow"></a>
## `-A`: set lint allowed
This flag will set which lints should be set to the [allow level](lints/levels.md#allow).
_Note:_ The order of these lint level arguments is taken into account, see [lint level via compiler flag](lints/levels.md#via-compiler-flag) for more information.
<a id="option-d-deny"></a>
## `-D`: set lint denied
This flag will set which lints should be set to the [deny level](lints/levels.md#deny).
_Note:_ The order of these lint level arguments is taken into account, see [lint level via compiler flag](lints/levels.md#via-compiler-flag) for more information.
<a id="option-f-forbid"></a>
## `-F`: set lint forbidden
This flag will set which lints should be set to the [forbid level](lints/levels.md#forbid).
_Note:_ The order of these lint level arguments is taken into account, see [lint level via compiler flag](lints/levels.md#via-compiler-flag) for more information.
<a id="option-z-unstable"></a>
## `-Z`: set unstable options

View File

@ -164,6 +164,18 @@ And of course, you can mix these four flags together:
$ rustc lib.rs --crate-type=lib -D missing-docs -A unused-variables
```
The order of these command line arguments is taken into account. The following allows the `unused-variables` lint, because it is the last argument for that lint:
```bash
$ rustc lib.rs --crate-type=lib -D unused-variables -A unused-variables
```
You can make use of this behavior by overriding the level of one specific lint out of a group of lints. The following example denies all the lints in the `unused` group, but explicitly allows the `unused-variables` lint in that group:
```bash
$ rustc lib.rs --crate-type=lib -D unused -A unused-variables
```
### Via an attribute
You can also modify the lint level with a crate-wide attribute:

View File

@ -0,0 +1,4 @@
[book]
authors = ["The Rust Project Developers"]
src = "src"
title = "The rustdoc book"

View File

@ -0,0 +1,34 @@
# `control_flow_guard`
The tracking issue for this feature is: [#68793](https://github.com/rust-lang/rust/issues/68793).
------------------------
The `-Zcontrol_flow_guard=checks` compiler flag enables the Windows [Control Flow Guard][cfguard-docs] platform security feature. When enabled, the compiler outputs a list of valid indirect call targets, and inserts runtime checks on all indirect jump instructions to ensure that the destination is in the list of valid call targets.
[cfguard-docs]: https://docs.microsoft.com/en-us/windows/win32/secbp/control-flow-guard
For testing purposes, the `-Zcontrol_flow_guard=nochecks` compiler flag can be used to emit only the list of valid call targets, but not the runtime checks.
It is strongly recommended to also enable Control Flow Guard checks in all linked libraries, including the standard library.
To enable Control Flow Guard in the standard library, you can use the [cargo `-Zbuild-std` functionality][build-std] to recompile the standard library with the same configuration options as the main program.
[build-std]: https://doc.rust-lang.org/nightly/cargo/reference/unstable.html#build-std
For example:
```cmd
rustup toolchain install --force nightly
rustup component add rust-src
SET RUSTFLAGS=-Zcontrol_flow_guard=checks
cargo +nightly build -Z build-std --target x86_64-pc-windows-msvc
```
```PowerShell
rustup toolchain install --force nightly
rustup component add rust-src
$Env:RUSTFLAGS = "-Zcontrol_flow_guard=checks"
cargo +nightly build -Z build-std --target x86_64-pc-windows-msvc
```
Alternatively, if you are building the standard library from source, you can set `control-flow-guard = true` in the config.toml file.

View File

@ -0,0 +1,74 @@
# `self-profile-events`
---------------------
The `-Zself-profile-events` compiler flag controls what events are recorded by the self-profiler when it is enabled via the `-Zself-profile` flag.
This flag takes a comma delimited list of event types to record.
For example:
```console
$ rustc -Zself-profile -Zself-profile-events=default,args
```
## Event types
- `query-provider`
- Traces each query used internally by the compiler.
- `generic-activity`
- Traces other parts of the compiler not covered by the query system.
- `query-cache-hit`
- Adds tracing information that records when the in-memory query cache is "hit" and does not need to re-execute a query which has been cached.
- Disabled by default because this significantly increases the trace file size.
- `query-blocked`
- Tracks time that a query tries to run but is blocked waiting on another thread executing the same query to finish executing.
- Query blocking only occurs when the compiler is built with parallel mode support.
- `incr-cache-load`
- Tracks time that is spent loading and deserializing query results from the incremental compilation on-disk cache.
- `query-keys`
- Adds a serialized representation of each query's query key to the tracing data.
- Disabled by default because this significantly increases the trace file size.
- `function-args`
- Adds additional tracing data to some `generic-activity` events.
- Disabled by default for parity with `query-keys`.
- `llvm`
- Adds tracing information about LLVM passes and codegeneration.
- Disabled by default because this only works when `-Znew-llvm-pass-manager` is enabled.
## Event synonyms
- `none`
- Disables all events.
Equivalent to the self-profiler being disabled.
- `default`
- The default set of events which stikes a balance between providing detailed tracing data and adding additional overhead to the compilation.
- `args`
- Equivalent to `query-keys` and `function-args`.
- `all`
- Enables all events.
## Examples
Enable the profiler and capture the default set of events (both invocations are equivalent):
```console
$ rustc -Zself-profile
$ rustc -Zself-profile -Zself-profile-events=default
```
Enable the profiler and capture the default events and their arguments:
```console
$ rustc -Zself-profile -Zself-profile-events=default,args
```

View File

@ -0,0 +1,47 @@
# `self-profile`
--------------------
The `-Zself-profile` compiler flag enables rustc's internal profiler.
When enabled, the compiler will output three binary files in the specified directory (or the current working directory if no directory is specified).
These files can be analyzed by using the tools in the [`measureme`] repository.
To control the data recorded in the trace files, use the `-Zself-profile-events` flag.
For example:
First, run a compilation session and provide the `-Zself-profile` flag:
```console
$ rustc --crate-name foo -Zself-profile`
```
This will generate three files in the working directory such as:
- `foo-1234.events`
- `foo-1234.string_data`
- `foo-1234.string_index`
Where `foo` is the name of the crate and `1234` is the process id of the rustc process.
To get a summary of where the compiler is spending its time:
```console
$ ../measureme/target/release/summarize summarize foo-1234
```
To generate a flamegraph of the same data:
```console
$ ../measureme/target/release/inferno foo-1234
```
To dump the event data in a Chromium-profiler compatible format:
```console
$ ../measureme/target/release/crox foo-1234
```
For more information, consult the [`measureme`] documentation.
[`measureme`]: https://github.com/rust-lang/measureme.git

View File

@ -212,7 +212,6 @@ class Type(object):
# REGULAR STRUCT
return TYPE_KIND_REGULAR_STRUCT
def __classify_union(self):
assert self.get_dwarf_type_kind() == DWARF_TYPE_CODE_UNION
@ -233,7 +232,6 @@ class Type(object):
else:
return TYPE_KIND_REGULAR_UNION
def __conforms_to_field_layout(self, expected_fields):
actual_fields = self.get_fields()
actual_field_count = len(actual_fields)
@ -363,6 +361,7 @@ def extract_tail_head_ptr_and_cap_from_std_vecdeque(vec_val):
assert data_ptr.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_PTR
return (tail, head, data_ptr, capacity)
def extract_length_and_ptr_from_slice(slice_val):
assert (slice_val.type.get_type_kind() == TYPE_KIND_SLICE or
slice_val.type.get_type_kind() == TYPE_KIND_STR_SLICE)
@ -376,8 +375,10 @@ def extract_length_and_ptr_from_slice(slice_val):
assert data_ptr.type.get_dwarf_type_kind() == DWARF_TYPE_CODE_PTR
return (length, data_ptr)
UNQUALIFIED_TYPE_MARKERS = frozenset(["(", "[", "&", "*"])
def extract_type_name(qualified_type_name):
"""Extracts the type name from a fully qualified path"""
if qualified_type_name[0] in UNQUALIFIED_TYPE_MARKERS:
@ -393,6 +394,7 @@ def extract_type_name(qualified_type_name):
else:
return qualified_type_name[index + 2:]
try:
compat_str = unicode # Python 2
except NameError:

View File

@ -14,7 +14,6 @@ is used because (u64, i16) has a ton of padding which would make the table
even larger, and it's already uncomfortably large (6 KiB).
"""
from __future__ import print_function
import sys
from math import ceil, log
from fractions import Fraction
from collections import namedtuple
@ -82,6 +81,7 @@ def error(f, e, z):
ulp_err = abs_err / Fraction(2) ** z.exp
return float(ulp_err)
HEADER = """
//! Tables of approximations of powers of ten.
//! DO NOT MODIFY: Generated by `src/etc/dec2flt_table.py`

View File

@ -25,6 +25,7 @@ if _match:
# GDB Pretty Printing Module for Rust
# ===============================================================================
class GdbType(rustpp.Type):
def __init__(self, ty):
@ -355,6 +356,7 @@ def children_of_node(boxed_node, height, want_values):
else:
yield keys[i]['value']['value']
class RustStdBTreeSetPrinter(object):
def __init__(self, val):
self.__val = val
@ -429,6 +431,7 @@ class RustOsStringPrinter(object):
def display_hint(self):
return "string"
class RustCStyleVariantPrinter(object):
def __init__(self, val):
assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_ENUM

View File

@ -8,7 +8,8 @@ derives have spans that point to the fields, rather than the
sample usage: src/etc/generate-deriving-span-tests.py
"""
import os, stat
import os
import stat
TEST_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../test/ui/derives/'))
@ -56,6 +57,7 @@ struct Struct(
ENUM_TUPLE, ENUM_STRUCT, STRUCT_FIELDS, STRUCT_TUPLE = range(4)
def create_test_case(type, trait, super_traits, error_count):
string = [ENUM_STRING, ENUM_STRUCT_VARIANT_STRING, STRUCT_STRING, STRUCT_TUPLE_STRING][type]
all_traits = ','.join([trait] + super_traits)
@ -66,6 +68,7 @@ def create_test_case(type, trait, super_traits, error_count):
code = string.format(traits=all_traits, errors=errors)
return TEMPLATE.format(error_deriving=error_deriving, code=code)
def write_file(name, string):
test_file = os.path.join(TEST_DIR, 'derives-span-%s.rs' % name)

View File

@ -11,7 +11,6 @@ sample usage: src/etc/generate-keyword-tests.py as break
import sys
import os
import datetime
import stat

View File

@ -131,6 +131,7 @@ try:
except NameError:
unichr = chr
class CustomHTMLParser(HTMLParser):
"""simplified HTML parser.
@ -169,14 +170,18 @@ class CustomHTMLParser(HTMLParser):
HTMLParser.close(self)
return self.__builder.close()
Command = namedtuple('Command', 'negated cmd args lineno context')
class FailedCheck(Exception):
pass
class InvalidCheck(Exception):
pass
def concat_multi_lines(f):
"""returns a generator out of the file object, which
- removes `\\` then `\n` then a shared prefix with the previous line then
@ -208,6 +213,7 @@ def concat_multi_lines(f):
if lastline is not None:
print_err(lineno, line, 'Trailing backslash at the end of the file')
LINE_PATTERN = re.compile(r'''
(?<=(?<!\S)@)(?P<negated>!?)
(?P<cmd>[A-Za-z]+(?:-[A-Za-z]+)*)
@ -353,7 +359,7 @@ def check_tree_text(tree, path, pat, regexp):
ret = check_string(value, pat, regexp)
if ret:
break
except Exception as e:
except Exception:
print('Failed to get path "{}"'.format(path))
raise
return ret
@ -363,6 +369,7 @@ def get_tree_count(tree, path):
path = normalize_xpath(path)
return len(tree.findall(path))
def stderr(*args):
if sys.version_info.major < 3:
file = codecs.getwriter('utf-8')(sys.stderr)
@ -371,6 +378,7 @@ def stderr(*args):
print(*args, file=file)
def print_err(lineno, context, err, message=None):
global ERR_COUNT
ERR_COUNT += 1
@ -381,8 +389,10 @@ def print_err(lineno, context, err, message=None):
if context:
stderr("\t{}".format(context))
ERR_COUNT = 0
def check_command(c, cache):
try:
cerr = ""
@ -448,11 +458,13 @@ def check_command(c, cache):
except InvalidCheck as err:
print_err(c.lineno, c.context, str(err))
def check(target, commands):
cache = CachedFiles(target)
for c in commands:
check_command(c, cache)
if __name__ == '__main__':
if len(sys.argv) != 3:
stderr('Usage: {} <doc dir> <template>'.format(sys.argv[0]))

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.7 KiB

After

Width:  |  Height:  |  Size: 3.8 KiB

View File

@ -157,6 +157,7 @@ def start_watchdog():
# ~main
####################################################################################################
if len(sys.argv) != 3:
print("usage: python lldb_batchmode.py target-path script-path")
sys.exit(1)

View File

@ -1,11 +1,11 @@
import lldb
import re
import debugger_pretty_printers_common as rustpp
# ===============================================================================
# LLDB Pretty Printing Module for Rust
# ===============================================================================
class LldbType(rustpp.Type):
def __init__(self, ty):
@ -157,9 +157,9 @@ def print_val(lldb_val, internal_dict):
return lldb_val.GetValue()
#=--------------------------------------------------------------------------------------------------
# =---------------------------------------------------------------------------------------
# Type-Specialized Printing Functions
#=--------------------------------------------------------------------------------------------------
# =---------------------------------------------------------------------------------------
def print_struct_val(val, internal_dict, omit_first_field, omit_type_name, is_tuple_like):
"""
@ -212,6 +212,7 @@ def print_struct_val(val, internal_dict, omit_first_field, omit_type_name, is_tu
return template % {"type_name": type_name,
"body": body}
def print_pointer_val(val, internal_dict):
"""Prints a pointer value with Rust syntax"""
assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR
@ -253,18 +254,21 @@ def print_std_vec_val(val, internal_dict):
length,
internal_dict)
def print_str_slice_val(val, internal_dict):
(length, data_ptr) = rustpp.extract_length_and_ptr_from_slice(val)
return read_utf8_string(data_ptr, length)
def print_std_string_val(val, internal_dict):
vec = val.get_child_at_index(0)
(length, data_ptr, cap) = rustpp.extract_length_ptr_and_cap_from_std_vec(vec)
return read_utf8_string(data_ptr, length)
#=--------------------------------------------------------------------------------------------------
# =-----------------------------------------------------------------------
# Helper Functions
#=--------------------------------------------------------------------------------------------------
# =-----------------------------------------------------------------------
def print_array_of_values(array_name, data_ptr_val, length, internal_dict):
"""Prints a contiguous memory range, interpreting it as values of the

View File

@ -200,21 +200,27 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
align as *mut u8
} else {
let layout = Layout::from_size_align_unchecked(size, align);
let ptr = alloc(layout);
if !ptr.is_null() { ptr } else { handle_alloc_error(layout) }
match Global.alloc(layout) {
Ok(ptr) => ptr.as_ptr(),
Err(_) => handle_alloc_error(layout),
}
}
}
#[cfg_attr(not(test), lang = "box_free")]
#[inline]
// This signature has to be the same as `Box`, otherwise an ICE will happen.
// When an additional parameter to `Box` is added (like `A: AllocRef`), this has to be added here as
// well.
// For example if `Box` is changed to `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
let ptr = ptr.as_ptr();
let size = size_of_val(&*ptr);
let align = min_align_of_val(&*ptr);
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
// We do not allocate for Box<T> when T is ZST, so deallocation is also not necessary.
if size != 0 {
let layout = Layout::from_size_align_unchecked(size, align);
dealloc(ptr as *mut u8, layout);
Global.dealloc(ptr.cast().into(), layout);
}
}

View File

@ -196,12 +196,14 @@ impl<T> Box<T> {
#[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
if layout.size() == 0 {
return Box(NonNull::dangling().into());
unsafe {
let ptr = if layout.size() == 0 {
NonNull::dangling()
} else {
Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast()
};
Box::from_raw(ptr.as_ptr())
}
let ptr =
unsafe { Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)) };
Box(ptr.cast().into())
}
/// Constructs a new `Box` with uninitialized contents, with the memory
@ -264,15 +266,14 @@ impl<T> Box<[T]> {
#[unstable(feature = "new_uninit", issue = "63291")]
pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
let layout = alloc::Layout::array::<mem::MaybeUninit<T>>(len).unwrap();
unsafe {
let ptr = if layout.size() == 0 {
NonNull::dangling()
} else {
unsafe {
Global.alloc(layout).unwrap_or_else(|_| alloc::handle_alloc_error(layout)).cast()
}
};
let slice = unsafe { slice::from_raw_parts_mut(ptr.as_ptr(), len) };
Box(Unique::from(slice))
Box::from_raw(slice::from_raw_parts_mut(ptr.as_ptr(), len))
}
}
}
@ -308,7 +309,7 @@ impl<T> Box<mem::MaybeUninit<T>> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
pub unsafe fn assume_init(self) -> Box<T> {
Box(Box::into_unique(self).cast())
Box::from_raw(Box::into_raw(self) as *mut T)
}
}
@ -346,7 +347,7 @@ impl<T> Box<[mem::MaybeUninit<T>]> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
pub unsafe fn assume_init(self) -> Box<[T]> {
Box(Unique::new_unchecked(Box::into_raw(self) as _))
Box::from_raw(Box::into_raw(self) as *mut [T])
}
}

View File

@ -147,7 +147,7 @@
use core::fmt;
use core::iter::{FromIterator, FusedIterator, TrustedLen};
use core::mem::{size_of, swap, ManuallyDrop};
use core::mem::{self, size_of, swap, ManuallyDrop};
use core::ops::{Deref, DerefMut};
use core::ptr;
@ -1239,7 +1239,19 @@ pub struct DrainSorted<'a, T: Ord> {
impl<'a, T: Ord> Drop for DrainSorted<'a, T> {
/// Removes heap elements in heap order.
fn drop(&mut self) {
while let Some(_) = self.inner.pop() {}
struct DropGuard<'r, 'a, T: Ord>(&'r mut DrainSorted<'a, T>);
impl<'r, 'a, T: Ord> Drop for DropGuard<'r, 'a, T> {
fn drop(&mut self) {
while let Some(_) = self.0.inner.pop() {}
}
}
while let Some(item) = self.inner.pop() {
let guard = DropGuard(self);
drop(item);
mem::forget(guard);
}
}
}

View File

@ -227,7 +227,7 @@ impl<K: Clone, V: Clone> BTreeClone for BTreeMap<K, V> {
impl<K: Clone + Ord, V: Clone> BTreeClone for BTreeMap<K, V> {
fn clone_from(&mut self, other: &Self) {
// This truncates `self` to `other.len()` by calling `split_off` on
// the first key after `other.len()` elements if it exists
// the first key after `other.len()` elements if it exists.
let split_off_key = if self.len() > other.len() {
let diff = self.len() - other.len();
if diff <= other.len() {
@ -247,11 +247,10 @@ impl<K: Clone + Ord, V: Clone> BTreeClone for BTreeMap<K, V> {
// After truncation, `self` is at most as long as `other` so this loop
// replaces every key-value pair in `self`. Since `oiter` is in sorted
// order and the structure of the `BTreeMap` stays the same,
// the BTree invariants are maintained at the end of the loop
// the BTree invariants are maintained at the end of the loop.
while !siter.is_empty() {
if let Some((ok, ov)) = oiter.next() {
// SAFETY: This is safe because the `siter.front != siter.back` check
// ensures that `siter` is nonempty
// SAFETY: This is safe because `siter` is nonempty.
let (sk, sv) = unsafe { siter.next_unchecked() };
sk.clone_from(ok);
sv.clone_from(ov);
@ -259,7 +258,7 @@ impl<K: Clone + Ord, V: Clone> BTreeClone for BTreeMap<K, V> {
break;
}
}
// If `other` is longer than `self`, the remaining elements are inserted
// If `other` is longer than `self`, the remaining elements are inserted.
self.extend(oiter.map(|(k, v)| ((*k).clone(), (*v).clone())));
}
}
@ -675,13 +674,15 @@ impl<K: Ord, V> BTreeMap<K, V> {
T: Ord,
K: Borrow<T>,
{
match self.length {
0 => None,
_ => Some(OccupiedEntry {
handle: self.root.as_mut().first_kv(),
let front = self.root.as_mut().first_leaf_edge();
if let Ok(kv) = front.right_kv() {
Some(OccupiedEntry {
handle: kv.forget_node_type(),
length: &mut self.length,
_marker: PhantomData,
}),
})
} else {
None
}
}
@ -736,13 +737,15 @@ impl<K: Ord, V> BTreeMap<K, V> {
T: Ord,
K: Borrow<T>,
{
match self.length {
0 => None,
_ => Some(OccupiedEntry {
handle: self.root.as_mut().last_kv(),
let back = self.root.as_mut().last_leaf_edge();
if let Ok(kv) = back.left_kv() {
Some(OccupiedEntry {
handle: kv.forget_node_type(),
length: &mut self.length,
_marker: PhantomData,
}),
})
} else {
None
}
}
@ -1467,7 +1470,22 @@ impl<K, V> IntoIterator for BTreeMap<K, V> {
#[stable(feature = "btree_drop", since = "1.7.0")]
impl<K, V> Drop for IntoIter<K, V> {
fn drop(&mut self) {
self.for_each(drop);
struct DropGuard<'a, K, V>(&'a mut IntoIter<K, V>);
impl<'a, K, V> Drop for DropGuard<'a, K, V> {
fn drop(&mut self) {
// Continue the same loop we perform below. This only runs when unwinding, so we
// don't have to care about panics this time (they'll abort).
while let Some(_) = self.0.next() {}
}
}
while let Some(pair) = self.next() {
let guard = DropGuard(self);
drop(pair);
mem::forget(guard);
}
unsafe {
let leaf_node = ptr::read(&self.front).into_node();
if leaf_node.is_shared_root() {

View File

@ -878,6 +878,52 @@ impl<T> LinkedList<T> {
unsafe { self.split_off_after_node(split_node, at) }
}
/// Removes the element at the given index and returns it.
///
/// This operation should compute in O(n) time.
///
/// # Panics
/// Panics if at >= len
///
/// # Examples
///
/// ```
/// #![feature(linked_list_remove)]
/// use std::collections::LinkedList;
///
/// let mut d = LinkedList::new();
///
/// d.push_front(1);
/// d.push_front(2);
/// d.push_front(3);
///
/// assert_eq!(d.remove(1), 2);
/// assert_eq!(d.remove(0), 3);
/// assert_eq!(d.remove(0), 1);
/// ```
#[unstable(feature = "linked_list_remove", issue = "69210")]
pub fn remove(&mut self, at: usize) -> T {
let len = self.len();
assert!(at < len, "Cannot remove at an index outside of the list bounds");
// Below, we iterate towards the node at the given index, either from
// the start or the end, depending on which would be faster.
let offset_from_end = len - at - 1;
if at <= offset_from_end {
let mut cursor = self.cursor_front_mut();
for _ in 0..at {
cursor.move_next();
}
cursor.remove_current().unwrap()
} else {
let mut cursor = self.cursor_back_mut();
for _ in 0..offset_from_end {
cursor.move_prev();
}
cursor.remove_current().unwrap()
}
}
/// Creates an iterator which uses a closure to determine if an element should be removed.
///
/// If the closure returns true, then the element is removed and yielded.
@ -1565,7 +1611,24 @@ where
F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
self.for_each(drop);
struct DropGuard<'r, 'a, T, F>(&'r mut DrainFilter<'a, T, F>)
where
F: FnMut(&mut T) -> bool;
impl<'r, 'a, T, F> Drop for DropGuard<'r, 'a, T, F>
where
F: FnMut(&mut T) -> bool,
{
fn drop(&mut self) {
self.0.for_each(drop);
}
}
while let Some(item) = self.next() {
let guard = DropGuard(self);
drop(item);
mem::forget(guard);
}
}
}

View File

@ -22,6 +22,11 @@ use crate::collections::TryReserveError;
use crate::raw_vec::RawVec;
use crate::vec::Vec;
#[stable(feature = "drain", since = "1.6.0")]
pub use self::drain::Drain;
mod drain;
#[cfg(test)]
mod tests;
@ -866,6 +871,18 @@ impl<T> VecDeque<T> {
/// ```
#[stable(feature = "deque_extras", since = "1.16.0")]
pub fn truncate(&mut self, len: usize) {
/// Runs the destructor for all items in the slice when it gets dropped (normally or
/// during unwinding).
struct Dropper<'a, T>(&'a mut [T]);
impl<'a, T> Drop for Dropper<'a, T> {
fn drop(&mut self) {
unsafe {
ptr::drop_in_place(self.0);
}
}
}
// Safe because:
//
// * Any slice passed to `drop_in_place` is valid; the second case has
@ -888,8 +905,11 @@ impl<T> VecDeque<T> {
let drop_back = back as *mut _;
let drop_front = front.get_unchecked_mut(len..) as *mut _;
self.head = self.wrap_sub(self.head, num_dropped);
// Make sure the second half is dropped even when a destructor
// in the first one panics.
let _back_dropper = Dropper(&mut *drop_back);
ptr::drop_in_place(drop_front);
ptr::drop_in_place(drop_back);
}
}
}
@ -2526,113 +2546,6 @@ impl<T> ExactSizeIterator for IntoIter<T> {
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for IntoIter<T> {}
/// A draining iterator over the elements of a `VecDeque`.
///
/// This `struct` is created by the [`drain`] method on [`VecDeque`]. See its
/// documentation for more.
///
/// [`drain`]: struct.VecDeque.html#method.drain
/// [`VecDeque`]: struct.VecDeque.html
#[stable(feature = "drain", since = "1.6.0")]
pub struct Drain<'a, T: 'a> {
after_tail: usize,
after_head: usize,
iter: Iter<'a, T>,
deque: NonNull<VecDeque<T>>,
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Drain")
.field(&self.after_tail)
.field(&self.after_head)
.field(&self.iter)
.finish()
}
}
#[stable(feature = "drain", since = "1.6.0")]
unsafe impl<T: Sync> Sync for Drain<'_, T> {}
#[stable(feature = "drain", since = "1.6.0")]
unsafe impl<T: Send> Send for Drain<'_, T> {}
#[stable(feature = "drain", since = "1.6.0")]
impl<T> Drop for Drain<'_, T> {
fn drop(&mut self) {
self.for_each(drop);
let source_deque = unsafe { self.deque.as_mut() };
// T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
//
// T t h H
// [. . . o o x x o o . . .]
//
let orig_tail = source_deque.tail;
let drain_tail = source_deque.head;
let drain_head = self.after_tail;
let orig_head = self.after_head;
let tail_len = count(orig_tail, drain_tail, source_deque.cap());
let head_len = count(drain_head, orig_head, source_deque.cap());
// Restore the original head value
source_deque.head = orig_head;
match (tail_len, head_len) {
(0, 0) => {
source_deque.head = 0;
source_deque.tail = 0;
}
(0, _) => {
source_deque.tail = drain_head;
}
(_, 0) => {
source_deque.head = drain_tail;
}
_ => unsafe {
if tail_len <= head_len {
source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
} else {
source_deque.head = source_deque.wrap_add(drain_tail, head_len);
source_deque.wrap_copy(drain_tail, drain_head, head_len);
}
},
}
}
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T> Iterator for Drain<'_, T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.iter.next().map(|elt| unsafe { ptr::read(elt) })
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T> DoubleEndedIterator for Drain<'_, T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back().map(|elt| unsafe { ptr::read(elt) })
}
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T> ExactSizeIterator for Drain<'_, T> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for Drain<'_, T> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<A: PartialEq> PartialEq for VecDeque<A> {
fn eq(&self, other: &VecDeque<A>) -> bool {

View File

@ -0,0 +1,126 @@
use core::iter::FusedIterator;
use core::ptr::{self, NonNull};
use core::{fmt, mem};
use super::{count, Iter, VecDeque};
/// A draining iterator over the elements of a `VecDeque`.
///
/// This `struct` is created by the [`drain`] method on [`VecDeque`]. See its
/// documentation for more.
///
/// [`drain`]: struct.VecDeque.html#method.drain
/// [`VecDeque`]: struct.VecDeque.html
#[stable(feature = "drain", since = "1.6.0")]
pub struct Drain<'a, T: 'a> {
pub(crate) after_tail: usize,
pub(crate) after_head: usize,
pub(crate) iter: Iter<'a, T>,
pub(crate) deque: NonNull<VecDeque<T>>,
}
#[stable(feature = "collection_debug", since = "1.17.0")]
impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Drain")
.field(&self.after_tail)
.field(&self.after_head)
.field(&self.iter)
.finish()
}
}
#[stable(feature = "drain", since = "1.6.0")]
unsafe impl<T: Sync> Sync for Drain<'_, T> {}
#[stable(feature = "drain", since = "1.6.0")]
unsafe impl<T: Send> Send for Drain<'_, T> {}
#[stable(feature = "drain", since = "1.6.0")]
impl<T> Drop for Drain<'_, T> {
fn drop(&mut self) {
struct DropGuard<'r, 'a, T>(&'r mut Drain<'a, T>);
impl<'r, 'a, T> Drop for DropGuard<'r, 'a, T> {
fn drop(&mut self) {
self.0.for_each(drop);
let source_deque = unsafe { self.0.deque.as_mut() };
// T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
//
// T t h H
// [. . . o o x x o o . . .]
//
let orig_tail = source_deque.tail;
let drain_tail = source_deque.head;
let drain_head = self.0.after_tail;
let orig_head = self.0.after_head;
let tail_len = count(orig_tail, drain_tail, source_deque.cap());
let head_len = count(drain_head, orig_head, source_deque.cap());
// Restore the original head value
source_deque.head = orig_head;
match (tail_len, head_len) {
(0, 0) => {
source_deque.head = 0;
source_deque.tail = 0;
}
(0, _) => {
source_deque.tail = drain_head;
}
(_, 0) => {
source_deque.head = drain_tail;
}
_ => unsafe {
if tail_len <= head_len {
source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
} else {
source_deque.head = source_deque.wrap_add(drain_tail, head_len);
source_deque.wrap_copy(drain_tail, drain_head, head_len);
}
},
}
}
}
while let Some(item) = self.next() {
let guard = DropGuard(self);
drop(item);
mem::forget(guard);
}
DropGuard(self);
}
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T> Iterator for Drain<'_, T> {
type Item = T;
#[inline]
fn next(&mut self) -> Option<T> {
self.iter.next().map(|elt| unsafe { ptr::read(elt) })
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T> DoubleEndedIterator for Drain<'_, T> {
#[inline]
fn next_back(&mut self) -> Option<T> {
self.iter.next_back().map(|elt| unsafe { ptr::read(elt) })
}
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T> ExactSizeIterator for Drain<'_, T> {}
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for Drain<'_, T> {}

View File

@ -280,7 +280,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
// 0, getting to here necessarily means the `RawVec` is overfull.
assert!(elem_size != 0, "capacity overflow");
let (new_cap, uniq) = match self.current_layout() {
let (new_cap, ptr) = match self.current_layout() {
Some(cur) => {
// Since we guarantee that we never allocate more than
// `isize::MAX` bytes, `elem_size * self.cap <= isize::MAX` as
@ -297,7 +297,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
alloc_guard(new_size).unwrap_or_else(|_| capacity_overflow());
let ptr_res = self.a.realloc(NonNull::from(self.ptr).cast(), cur, new_size);
match ptr_res {
Ok(ptr) => (new_cap, ptr.cast().into()),
Ok(ptr) => (new_cap, ptr),
Err(_) => handle_alloc_error(Layout::from_size_align_unchecked(
new_size,
cur.align(),
@ -308,13 +308,14 @@ impl<T, A: AllocRef> RawVec<T, A> {
// Skip to 4 because tiny `Vec`'s are dumb; but not if that
// would cause overflow.
let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 };
match self.a.alloc_array::<T>(new_cap) {
Ok(ptr) => (new_cap, ptr.into()),
Err(_) => handle_alloc_error(Layout::array::<T>(new_cap).unwrap()),
let layout = Layout::array::<T>(new_cap).unwrap();
match self.a.alloc(layout) {
Ok(ptr) => (new_cap, ptr),
Err(_) => handle_alloc_error(layout),
}
}
};
self.ptr = uniq;
self.ptr = ptr.cast().into();
self.cap = new_cap;
}
}

View File

@ -319,7 +319,7 @@ pub struct String {
/// assert_eq!(vec![0, 159], value.unwrap_err().into_bytes());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct FromUtf8Error {
bytes: Vec<u8>,
error: Utf8Error,
@ -2106,18 +2106,11 @@ impl ops::DerefMut for String {
}
}
/// An error when parsing a `String`.
/// A type alias for [`Infallible`].
///
/// This `enum` is slightly awkward: it will never actually exist. This error is
/// part of the type signature of the implementation of [`FromStr`] on
/// [`String`]. The return type of [`from_str`], requires that an error be
/// defined, but, given that a [`String`] can always be made into a new
/// [`String`] without error, this type will never actually be returned. As
/// such, it is only here to satisfy said signature, and is useless otherwise.
/// This alias exists for backwards compatibility, and may be eventually deprecated.
///
/// [`FromStr`]: ../../std/str/trait.FromStr.html
/// [`String`]: struct.String.html
/// [`from_str`]: ../../std/str/trait.FromStr.html#tymethod.from_str
/// [`Infallible`]: ../../core/convert/enum.Infallible.html
#[stable(feature = "str_parse_error", since = "1.5.0")]
pub type ParseError = core::convert::Infallible;
@ -2125,7 +2118,7 @@ pub type ParseError = core::convert::Infallible;
impl FromStr for String {
type Err = core::convert::Infallible;
#[inline]
fn from_str(s: &str) -> Result<String, ParseError> {
fn from_str(s: &str) -> Result<String, Self::Err> {
Ok(String::from(s))
}
}
@ -2208,6 +2201,14 @@ impl AsRef<str> for String {
}
}
#[stable(feature = "string_as_mut", since = "1.43.0")]
impl AsMut<str> for String {
#[inline]
fn as_mut(&mut self) -> &mut str {
self
}
}
#[stable(feature = "rust1", since = "1.0.0")]
impl AsRef<[u8]> for String {
#[inline]

View File

@ -1,6 +1,8 @@
use std::collections::binary_heap::{Drain, PeekMut};
use std::collections::BinaryHeap;
use std::iter::TrustedLen;
use std::panic::{catch_unwind, AssertUnwindSafe};
use std::sync::atomic::{AtomicU32, Ordering};
#[test]
fn test_iterator() {
@ -275,6 +277,37 @@ fn test_drain_sorted() {
assert!(q.is_empty());
}
#[test]
fn test_drain_sorted_leak() {
static DROPS: AtomicU32 = AtomicU32::new(0);
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
struct D(u32, bool);
impl Drop for D {
fn drop(&mut self) {
DROPS.fetch_add(1, Ordering::SeqCst);
if self.1 {
panic!("panic in `drop`");
}
}
}
let mut q = BinaryHeap::from(vec![
D(0, false),
D(1, false),
D(2, false),
D(3, true),
D(4, false),
D(5, false),
]);
catch_unwind(AssertUnwindSafe(|| drop(q.drain_sorted()))).ok();
assert_eq!(DROPS.load(Ordering::SeqCst), 6);
}
#[test]
fn test_extend_ref() {
let mut a = BinaryHeap::new();

View File

@ -5,7 +5,9 @@ use std::fmt::Debug;
use std::iter::FromIterator;
use std::ops::Bound::{self, Excluded, Included, Unbounded};
use std::ops::RangeBounds;
use std::panic::catch_unwind;
use std::rc::Rc;
use std::sync::atomic::{AtomicU32, Ordering};
use super::DeterministicRng;
@ -15,7 +17,7 @@ fn test_basic_large() {
#[cfg(not(miri))] // Miri is too slow
let size = 10000;
#[cfg(miri)]
let size = 200;
let size = 144; // to obtain height 3 tree (having edges to both kinds of nodes)
assert_eq!(map.len(), 0);
for i in 0..size {
@ -23,6 +25,11 @@ fn test_basic_large() {
assert_eq!(map.len(), i + 1);
}
assert_eq!(map.first_key_value(), Some((&0, &0)));
assert_eq!(map.last_key_value(), Some((&(size - 1), &(10 * (size - 1)))));
assert_eq!(map.first_entry().unwrap().key(), &0);
assert_eq!(map.last_entry().unwrap().key(), &(size - 1));
for i in 0..size {
assert_eq!(map.get(&i).unwrap(), &(i * 10));
}
@ -376,8 +383,8 @@ fn test_range_small() {
}
#[test]
fn test_range_depth_2() {
// Assuming that node.CAPACITY is 11, having 12 pairs implies a depth 2 tree
fn test_range_height_2() {
// Assuming that node.CAPACITY is 11, having 12 pairs implies a height 2 tree
// with 2 leaves. Depending on details we don't want or need to rely upon,
// the single key at the root will be 6 or 7.
@ -519,7 +526,7 @@ fn test_range_1000() {
#[cfg(not(miri))] // Miri is too slow
let size = 1000;
#[cfg(miri)]
let size = 200;
let size = 144; // to obtain height 3 tree (having edges to both kinds of nodes)
let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
fn test(map: &BTreeMap<u32, u32>, size: u32, min: Bound<&u32>, max: Bound<&u32>) {
@ -556,14 +563,15 @@ fn test_range_borrowed_key() {
#[test]
fn test_range() {
#[cfg(not(miri))] // Miri is too slow
let size = 200;
#[cfg(not(miri))] // Miri is too slow
let step = 1;
#[cfg(miri)]
let size = 30;
let step = 66;
let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
for i in 0..size {
for j in i..size {
for i in (0..size).step_by(step) {
for j in (i..size).step_by(step) {
let mut kvs = map.range((Included(&i), Included(&j))).map(|(&k, &v)| (k, v));
let mut pairs = (i..=j).map(|i| (i, i));
@ -578,14 +586,15 @@ fn test_range() {
#[test]
fn test_range_mut() {
#[cfg(not(miri))] // Miri is too slow
let size = 200;
#[cfg(not(miri))] // Miri is too slow
let step = 1;
#[cfg(miri)]
let size = 30;
let step = 66;
let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
for i in 0..size {
for j in i..size {
for i in (0..size).step_by(step) {
for j in (i..size).step_by(step) {
let mut kvs = map.range_mut((Included(&i), Included(&j))).map(|(&k, &mut v)| (k, v));
let mut pairs = (i..=j).map(|i| (i, i));
@ -753,10 +762,7 @@ fn test_bad_zst() {
#[test]
fn test_clone() {
let mut map = BTreeMap::new();
#[cfg(not(miri))] // Miri is too slow
let size = 100;
#[cfg(miri)]
let size = 30;
let size = 12; // to obtain height 2 tree (having edges to leaf nodes)
assert_eq!(map.len(), 0);
for i in 0..size {
@ -783,24 +789,36 @@ fn test_clone() {
assert_eq!(map.len(), size / 2 - i - 1);
assert_eq!(map, map.clone());
}
// Full 2-level and minimal 3-level tree (sizes 143, 144 -- the only ones we clone for).
for i in 1..=144 {
assert_eq!(map.insert(i, i), None);
assert_eq!(map.len(), i);
if i >= 143 {
assert_eq!(map, map.clone());
}
}
}
#[test]
fn test_clone_from() {
let mut map1 = BTreeMap::new();
let size = 30;
let max_size = 12; // to obtain height 2 tree (having edges to leaf nodes)
for i in 0..size {
// Range to max_size inclusive, because i is the size of map1 being tested.
for i in 0..=max_size {
let mut map2 = BTreeMap::new();
for j in 0..i {
let mut map1_copy = map2.clone();
map1_copy.clone_from(&map1);
map1_copy.clone_from(&map1); // small cloned from large
assert_eq!(map1_copy, map1);
let mut map2_copy = map1.clone();
map2_copy.clone_from(&map2);
map2_copy.clone_from(&map2); // large cloned from small
assert_eq!(map2_copy, map2);
map2.insert(100 * j + 1, 2 * j + 1);
}
map2.clone_from(&map1); // same length
assert_eq!(map2, map1);
map1.insert(i, 10 * i);
}
}
@ -951,6 +969,7 @@ create_append_test!(test_append_145, 145);
// Tests for several randomly chosen sizes.
create_append_test!(test_append_170, 170);
create_append_test!(test_append_181, 181);
#[cfg(not(miri))] // Miri is too slow
create_append_test!(test_append_239, 239);
#[cfg(not(miri))] // Miri is too slow
create_append_test!(test_append_1700, 1700);
@ -1000,3 +1019,29 @@ fn test_split_off_large_random_sorted() {
assert!(map.into_iter().eq(data.clone().into_iter().filter(|x| x.0 < key)));
assert!(right.into_iter().eq(data.into_iter().filter(|x| x.0 >= key)));
}
#[test]
fn test_into_iter_drop_leak() {
static DROPS: AtomicU32 = AtomicU32::new(0);
struct D;
impl Drop for D {
fn drop(&mut self) {
if DROPS.fetch_add(1, Ordering::SeqCst) == 3 {
panic!("panic in `drop`");
}
}
}
let mut map = BTreeMap::new();
map.insert("a", D);
map.insert("b", D);
map.insert("c", D);
map.insert("d", D);
map.insert("e", D);
catch_unwind(move || drop(map.into_iter())).ok();
assert_eq!(DROPS.load(Ordering::SeqCst), 5);
}

View File

@ -487,21 +487,26 @@ fn test_first_last() {
a.insert(2);
assert_eq!(a.first(), Some(&1));
assert_eq!(a.last(), Some(&2));
a.insert(3);
for i in 3..=12 {
a.insert(i);
}
assert_eq!(a.first(), Some(&1));
assert_eq!(a.last(), Some(&3));
assert_eq!(a.len(), 3);
assert_eq!(a.last(), Some(&12));
assert_eq!(a.pop_first(), Some(1));
assert_eq!(a.len(), 2);
assert_eq!(a.pop_last(), Some(3));
assert_eq!(a.len(), 1);
assert_eq!(a.pop_last(), Some(12));
assert_eq!(a.pop_first(), Some(2));
assert_eq!(a.len(), 0);
assert_eq!(a.pop_last(), None);
assert_eq!(a.len(), 0);
assert_eq!(a.pop_last(), Some(11));
assert_eq!(a.pop_first(), Some(3));
assert_eq!(a.pop_last(), Some(10));
assert_eq!(a.pop_first(), Some(4));
assert_eq!(a.pop_first(), Some(5));
assert_eq!(a.pop_first(), Some(6));
assert_eq!(a.pop_first(), Some(7));
assert_eq!(a.pop_first(), Some(8));
assert_eq!(a.clone().pop_last(), Some(9));
assert_eq!(a.pop_first(), Some(9));
assert_eq!(a.pop_first(), None);
assert_eq!(a.len(), 0);
assert_eq!(a.pop_last(), None);
}
fn rand_data(len: usize) -> Vec<u32> {

View File

@ -12,6 +12,7 @@
#![feature(binary_heap_into_iter_sorted)]
#![feature(binary_heap_drain_sorted)]
#![feature(vec_remove_item)]
#![feature(split_inclusive)]
use std::collections::hash_map::DefaultHasher;
use std::hash::{Hash, Hasher};

View File

@ -1,5 +1,5 @@
use std::collections::LinkedList;
use std::panic::catch_unwind;
use std::panic::{catch_unwind, AssertUnwindSafe};
#[test]
fn test_basic() {
@ -531,6 +531,74 @@ fn drain_filter_complex() {
}
}
#[test]
fn drain_filter_drop_panic_leak() {
static mut DROPS: i32 = 0;
struct D(bool);
impl Drop for D {
fn drop(&mut self) {
unsafe {
DROPS += 1;
}
if self.0 {
panic!("panic in `drop`");
}
}
}
let mut q = LinkedList::new();
q.push_back(D(false));
q.push_back(D(false));
q.push_back(D(false));
q.push_back(D(false));
q.push_back(D(false));
q.push_front(D(false));
q.push_front(D(true));
q.push_front(D(false));
catch_unwind(AssertUnwindSafe(|| drop(q.drain_filter(|_| true)))).ok();
assert_eq!(unsafe { DROPS }, 8);
assert!(q.is_empty());
}
#[test]
fn drain_filter_pred_panic_leak() {
static mut DROPS: i32 = 0;
#[derive(Debug)]
struct D(u32);
impl Drop for D {
fn drop(&mut self) {
unsafe {
DROPS += 1;
}
}
}
let mut q = LinkedList::new();
q.push_back(D(3));
q.push_back(D(4));
q.push_back(D(5));
q.push_back(D(6));
q.push_back(D(7));
q.push_front(D(2));
q.push_front(D(1));
q.push_front(D(0));
catch_unwind(AssertUnwindSafe(|| {
drop(q.drain_filter(|item| if item.0 >= 2 { panic!() } else { true }))
}))
.ok();
assert_eq!(unsafe { DROPS }, 2); // 0 and 1
assert_eq!(q.len(), 6);
}
#[test]
fn test_drop() {
static mut DROPS: i32 = 0;

View File

@ -851,6 +851,86 @@ fn test_splitator() {
assert_eq!(xs.split(|x| *x == 5).collect::<Vec<&[i32]>>(), splits);
}
#[test]
fn test_splitator_inclusive() {
let xs = &[1, 2, 3, 4, 5];
let splits: &[&[_]] = &[&[1, 2], &[3, 4], &[5]];
assert_eq!(xs.split_inclusive(|x| *x % 2 == 0).collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[1], &[2, 3, 4, 5]];
assert_eq!(xs.split_inclusive(|x| *x == 1).collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
assert_eq!(xs.split_inclusive(|x| *x == 5).collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
assert_eq!(xs.split_inclusive(|x| *x == 10).collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[1], &[2], &[3], &[4], &[5]];
assert_eq!(xs.split_inclusive(|_| true).collect::<Vec<&[i32]>>(), splits);
let xs: &[i32] = &[];
let splits: &[&[i32]] = &[&[]];
assert_eq!(xs.split_inclusive(|x| *x == 5).collect::<Vec<&[i32]>>(), splits);
}
#[test]
fn test_splitator_inclusive_reverse() {
let xs = &[1, 2, 3, 4, 5];
let splits: &[&[_]] = &[&[5], &[3, 4], &[1, 2]];
assert_eq!(xs.split_inclusive(|x| *x % 2 == 0).rev().collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[2, 3, 4, 5], &[1]];
assert_eq!(xs.split_inclusive(|x| *x == 1).rev().collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
assert_eq!(xs.split_inclusive(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
assert_eq!(xs.split_inclusive(|x| *x == 10).rev().collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[5], &[4], &[3], &[2], &[1]];
assert_eq!(xs.split_inclusive(|_| true).rev().collect::<Vec<_>>(), splits);
let xs: &[i32] = &[];
let splits: &[&[i32]] = &[&[]];
assert_eq!(xs.split_inclusive(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
}
#[test]
fn test_splitator_mut_inclusive() {
let xs = &mut [1, 2, 3, 4, 5];
let splits: &[&[_]] = &[&[1, 2], &[3, 4], &[5]];
assert_eq!(xs.split_inclusive_mut(|x| *x % 2 == 0).collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[1], &[2, 3, 4, 5]];
assert_eq!(xs.split_inclusive_mut(|x| *x == 1).collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
assert_eq!(xs.split_inclusive_mut(|x| *x == 5).collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
assert_eq!(xs.split_inclusive_mut(|x| *x == 10).collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[1], &[2], &[3], &[4], &[5]];
assert_eq!(xs.split_inclusive_mut(|_| true).collect::<Vec<_>>(), splits);
let xs: &mut [i32] = &mut [];
let splits: &[&[i32]] = &[&[]];
assert_eq!(xs.split_inclusive_mut(|x| *x == 5).collect::<Vec<_>>(), splits);
}
#[test]
fn test_splitator_mut_inclusive_reverse() {
let xs = &mut [1, 2, 3, 4, 5];
let splits: &[&[_]] = &[&[5], &[3, 4], &[1, 2]];
assert_eq!(xs.split_inclusive_mut(|x| *x % 2 == 0).rev().collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[2, 3, 4, 5], &[1]];
assert_eq!(xs.split_inclusive_mut(|x| *x == 1).rev().collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
assert_eq!(xs.split_inclusive_mut(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
assert_eq!(xs.split_inclusive_mut(|x| *x == 10).rev().collect::<Vec<_>>(), splits);
let splits: &[&[_]] = &[&[5], &[4], &[3], &[2], &[1]];
assert_eq!(xs.split_inclusive_mut(|_| true).rev().collect::<Vec<_>>(), splits);
let xs: &mut [i32] = &mut [];
let splits: &[&[i32]] = &[&[]];
assert_eq!(xs.split_inclusive_mut(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
}
#[test]
fn test_splitnator() {
let xs = &[1, 2, 3, 4, 5];

View File

@ -1247,6 +1247,49 @@ fn test_split_char_iterator_no_trailing() {
assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb"]);
}
#[test]
fn test_split_char_iterator_inclusive() {
let data = "\nMäry häd ä little lämb\nLittle lämb\n";
let split: Vec<&str> = data.split_inclusive('\n').collect();
assert_eq!(split, ["\n", "Märy häd ä little lämb\n", "Little lämb\n"]);
let uppercase_separated = "SheePSharKTurtlECaT";
let mut first_char = true;
let split: Vec<&str> = uppercase_separated
.split_inclusive(|c: char| {
let split = !first_char && c.is_uppercase();
first_char = split;
split
})
.collect();
assert_eq!(split, ["SheeP", "SharK", "TurtlE", "CaT"]);
}
#[test]
fn test_split_char_iterator_inclusive_rev() {
let data = "\nMäry häd ä little lämb\nLittle lämb\n";
let split: Vec<&str> = data.split_inclusive('\n').rev().collect();
assert_eq!(split, ["Little lämb\n", "Märy häd ä little lämb\n", "\n"]);
// Note that the predicate is stateful and thus dependent
// on the iteration order.
// (A different predicate is needed for reverse iterator vs normal iterator.)
// Not sure if anything can be done though.
let uppercase_separated = "SheePSharKTurtlECaT";
let mut term_char = true;
let split: Vec<&str> = uppercase_separated
.split_inclusive(|c: char| {
let split = term_char && c.is_uppercase();
term_char = c.is_uppercase();
split
})
.rev()
.collect();
assert_eq!(split, ["CaT", "TurtlE", "SharK", "SheeP"]);
}
#[test]
fn test_rsplit() {
let data = "\nMäry häd ä little lämb\nLittle lämb\n";

View File

@ -50,7 +50,11 @@ fn test_from_utf8() {
let xs = b"hello\xFF".to_vec();
let err = String::from_utf8(xs).unwrap_err();
assert_eq!(err.as_bytes(), b"hello\xff");
let err_clone = err.clone();
assert_eq!(err, err_clone);
assert_eq!(err.into_bytes(), b"hello\xff".to_vec());
assert_eq!(err_clone.utf8_error().valid_up_to(), 5);
}
#[test]

View File

@ -1,6 +1,7 @@
use std::borrow::Cow;
use std::collections::TryReserveError::*;
use std::mem::size_of;
use std::panic::{catch_unwind, AssertUnwindSafe};
use std::vec::{Drain, IntoIter};
use std::{isize, usize};
@ -585,6 +586,44 @@ fn test_drain_inclusive_out_of_bounds() {
v.drain(5..=5);
}
#[test]
fn test_drain_leak() {
static mut DROPS: i32 = 0;
#[derive(Debug, PartialEq)]
struct D(u32, bool);
impl Drop for D {
fn drop(&mut self) {
unsafe {
DROPS += 1;
}
if self.1 {
panic!("panic in `drop`");
}
}
}
let mut v = vec![
D(0, false),
D(1, false),
D(2, false),
D(3, false),
D(4, true),
D(5, false),
D(6, false),
];
catch_unwind(AssertUnwindSafe(|| {
v.drain(2..=5);
}))
.ok();
assert_eq!(unsafe { DROPS }, 4);
assert_eq!(v, vec![D(0, false), D(1, false), D(6, false),]);
}
#[test]
fn test_splice() {
let mut v = vec![1, 2, 3, 4, 5];
@ -726,6 +765,31 @@ fn test_into_iter_clone() {
assert_eq!(it.next(), None);
}
#[test]
fn test_into_iter_leak() {
static mut DROPS: i32 = 0;
struct D(bool);
impl Drop for D {
fn drop(&mut self) {
unsafe {
DROPS += 1;
}
if self.0 {
panic!("panic in `drop`");
}
}
}
let v = vec![D(false), D(true), D(false)];
catch_unwind(move || drop(v.into_iter())).ok();
assert_eq!(unsafe { DROPS }, 3);
}
#[test]
fn test_cow_from() {
let borrowed: &[_] = &["borrowed", "(slice)"];

View File

@ -2,7 +2,7 @@ use std::collections::TryReserveError::*;
use std::collections::{vec_deque::Drain, VecDeque};
use std::fmt::Debug;
use std::mem::size_of;
use std::panic::catch_unwind;
use std::panic::{catch_unwind, AssertUnwindSafe};
use std::{isize, usize};
use crate::hash;
@ -1573,3 +1573,75 @@ fn test_try_rfold_moves_iter() {
assert_eq!(iter.try_rfold(0_i8, |acc, &x| acc.checked_add(x)), None);
assert_eq!(iter.next_back(), Some(&70));
}
#[test]
fn truncate_leak() {
static mut DROPS: i32 = 0;
struct D(bool);
impl Drop for D {
fn drop(&mut self) {
unsafe {
DROPS += 1;
}
if self.0 {
panic!("panic in `drop`");
}
}
}
let mut q = VecDeque::new();
q.push_back(D(false));
q.push_back(D(false));
q.push_back(D(false));
q.push_back(D(false));
q.push_back(D(false));
q.push_front(D(true));
q.push_front(D(false));
q.push_front(D(false));
catch_unwind(AssertUnwindSafe(|| q.truncate(1))).ok();
assert_eq!(unsafe { DROPS }, 7);
}
#[test]
fn test_drain_leak() {
static mut DROPS: i32 = 0;
#[derive(Debug, PartialEq)]
struct D(u32, bool);
impl Drop for D {
fn drop(&mut self) {
unsafe {
DROPS += 1;
}
if self.1 {
panic!("panic in `drop`");
}
}
}
let mut v = VecDeque::new();
v.push_back(D(4, false));
v.push_back(D(5, false));
v.push_back(D(6, false));
v.push_front(D(3, false));
v.push_front(D(2, true));
v.push_front(D(1, false));
v.push_front(D(0, false));
catch_unwind(AssertUnwindSafe(|| {
v.drain(1..=4);
}))
.ok();
assert_eq!(unsafe { DROPS }, 4);
assert_eq!(v.len(), 3);
drop(v);
assert_eq!(unsafe { DROPS }, 7);
}

View File

@ -2622,7 +2622,9 @@ impl<T: Clone> Clone for IntoIter<T> {
unsafe impl<#[may_dangle] T> Drop for IntoIter<T> {
fn drop(&mut self) {
// destroy the remaining elements
for _x in self.by_ref() {}
unsafe {
ptr::drop_in_place(self.as_mut_slice());
}
// RawVec handles deallocation
let _ = unsafe { RawVec::from_raw_parts(self.buf.as_ptr(), self.cap) };
@ -2702,26 +2704,45 @@ impl<T> DoubleEndedIterator for Drain<'_, T> {
#[stable(feature = "drain", since = "1.6.0")]
impl<T> Drop for Drain<'_, T> {
fn drop(&mut self) {
// exhaust self first
self.for_each(drop);
/// Continues dropping the remaining elements in the `Drain`, then moves back the
/// un-`Drain`ed elements to restore the original `Vec`.
struct DropGuard<'r, 'a, T>(&'r mut Drain<'a, T>);
if self.tail_len > 0 {
impl<'r, 'a, T> Drop for DropGuard<'r, 'a, T> {
fn drop(&mut self) {
// Continue the same loop we have below. If the loop already finished, this does
// nothing.
self.0.for_each(drop);
if self.0.tail_len > 0 {
unsafe {
let source_vec = self.vec.as_mut();
let source_vec = self.0.vec.as_mut();
// memmove back untouched tail, update to new length
let start = source_vec.len();
let tail = self.tail_start;
let tail = self.0.tail_start;
if tail != start {
let src = source_vec.as_ptr().add(tail);
let dst = source_vec.as_mut_ptr().add(start);
ptr::copy(src, dst, self.tail_len);
ptr::copy(src, dst, self.0.tail_len);
}
source_vec.set_len(start + self.tail_len);
source_vec.set_len(start + self.0.tail_len);
}
}
}
}
// exhaust self first
while let Some(item) = self.next() {
let guard = DropGuard(self);
drop(item);
mem::forget(guard);
}
// Drop a `DropGuard` to move back the non-drained tail of `self`.
DropGuard(self);
}
}
#[stable(feature = "drain", since = "1.6.0")]
impl<T> ExactSizeIterator for Drain<'_, T> {
fn is_empty(&self) -> bool {

View File

@ -241,11 +241,13 @@ impl Layout {
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
#[inline]
pub fn repeat(&self, n: usize) -> Result<(Self, usize), LayoutErr> {
// This cannot overflow. Quoting from the invariant of Layout:
// > `size`, when rounded up to the nearest multiple of `align`,
// > must not overflow (i.e., the rounded value must be less than
// > `usize::MAX`)
let padded_size = self.size() + self.padding_needed_for(self.align());
// Warning, removing the checked_add here led to segfaults in #67174. Further
// analysis in #69225 seems to indicate that this is an LTO-related
// miscompilation, so #67174 might be able to be reapplied in the future.
let padded_size = self
.size()
.checked_add(self.padding_needed_for(self.align()))
.ok_or(LayoutErr { private: () })?;
let alloc_size = padded_size.checked_mul(n).ok_or(LayoutErr { private: () })?;
unsafe {
@ -593,9 +595,8 @@ pub unsafe trait GlobalAlloc {
///
/// * the starting address for that memory block was previously
/// returned by a previous call to an allocation method (`alloc`,
/// `alloc_zeroed`, `alloc_excess`, `alloc_one`, `alloc_array`) or
/// reallocation method (`realloc`, `realloc_excess`, or
/// `realloc_array`), and
/// `alloc_zeroed`, `alloc_excess`) or reallocation method
/// (`realloc`, `realloc_excess`), and
///
/// * the memory block has not been subsequently deallocated, where
/// blocks are deallocated either by being passed to a deallocation
@ -606,11 +607,6 @@ pub unsafe trait GlobalAlloc {
/// methods in the `AllocRef` trait state that allocation requests
/// must be non-zero size, or else undefined behavior can result.
///
/// * However, some higher-level allocation methods (`alloc_one`,
/// `alloc_array`) are well-defined on zero-sized types and can
/// optionally support them: it is left up to the implementor
/// whether to return `Err`, or to return `Ok` with some pointer.
///
/// * If an `AllocRef` implementation chooses to return `Ok` in this
/// case (i.e., the pointer denotes a zero-sized inaccessible block)
/// then that returned pointer must be considered "currently
@ -853,6 +849,59 @@ pub unsafe trait AllocRef {
result
}
/// Behaves like `realloc`, but also ensures that the new contents
/// are set to zero before being returned.
///
/// # Safety
///
/// This function is unsafe for the same reasons that `realloc` is.
///
/// # Errors
///
/// Returns `Err` only if the new layout
/// does not meet the allocator's size
/// and alignment constraints of the allocator, or if reallocation
/// otherwise fails.
///
/// Implementations are encouraged to return `Err` on memory
/// exhaustion rather than panicking or aborting, but this is not
/// a strict requirement. (Specifically: it is *legal* to
/// implement this trait atop an underlying native allocation
/// library that aborts on memory exhaustion.)
///
/// Clients wishing to abort computation in response to a
/// reallocation error are encouraged to call the [`handle_alloc_error`] function,
/// rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
unsafe fn realloc_zeroed(
&mut self,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
) -> Result<NonNull<u8>, AllocErr> {
let old_size = layout.size();
if new_size >= old_size {
if let Ok(()) = self.grow_in_place_zeroed(ptr, layout, new_size) {
return Ok(ptr);
}
} else if new_size < old_size {
if let Ok(()) = self.shrink_in_place(ptr, layout, new_size) {
return Ok(ptr);
}
}
// otherwise, fall back on alloc + copy + dealloc.
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
let result = self.alloc_zeroed(new_layout);
if let Ok(new_ptr) = result {
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size));
self.dealloc(ptr, layout);
}
result
}
/// Behaves like `alloc`, but also ensures that the contents
/// are set to zero before being returned.
///
@ -904,6 +953,31 @@ pub unsafe trait AllocRef {
self.alloc(layout).map(|p| Excess(p, usable_size.1))
}
/// Behaves like `alloc`, but also returns the whole size of
/// the returned block. For some `layout` inputs, like arrays, this
/// may include extra storage usable for additional data.
/// Also it ensures that the contents are set to zero before being returned.
///
/// # Safety
///
/// This function is unsafe for the same reasons that `alloc` is.
///
/// # Errors
///
/// Returning `Err` indicates that either memory is exhausted or
/// `layout` does not meet allocator's size or alignment
/// constraints, just as in `alloc`.
///
/// Clients wishing to abort computation in response to an
/// allocation error are encouraged to call the [`handle_alloc_error`] function,
/// rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
unsafe fn alloc_excess_zeroed(&mut self, layout: Layout) -> Result<Excess, AllocErr> {
let usable_size = self.usable_size(&layout);
self.alloc_zeroed(layout).map(|p| Excess(p, usable_size.1))
}
/// Behaves like `realloc`, but also returns the whole size of
/// the returned block. For some `layout` inputs, like arrays, this
/// may include extra storage usable for additional data.
@ -934,6 +1008,37 @@ pub unsafe trait AllocRef {
self.realloc(ptr, layout, new_size).map(|p| Excess(p, usable_size.1))
}
/// Behaves like `realloc`, but also returns the whole size of
/// the returned block. For some `layout` inputs, like arrays, this
/// may include extra storage usable for additional data.
/// Also it ensures that the contents are set to zero before being returned.
///
/// # Safety
///
/// This function is unsafe for the same reasons that `realloc` is.
///
/// # Errors
///
/// Returning `Err` indicates that either memory is exhausted or
/// `layout` does not meet allocator's size or alignment
/// constraints, just as in `realloc`.
///
/// Clients wishing to abort computation in response to a
/// reallocation error are encouraged to call the [`handle_alloc_error`] function,
/// rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
unsafe fn realloc_excess_zeroed(
&mut self,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
) -> Result<Excess, AllocErr> {
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
let usable_size = self.usable_size(&new_layout);
self.realloc_zeroed(ptr, layout, new_size).map(|p| Excess(p, usable_size.1))
}
/// Attempts to extend the allocation referenced by `ptr` to fit `new_size`.
///
/// If this returns `Ok`, then the allocator has asserted that the
@ -983,6 +1088,34 @@ pub unsafe trait AllocRef {
if new_size <= u { Ok(()) } else { Err(CannotReallocInPlace) }
}
/// Behaves like `grow_in_place`, but also ensures that the new
/// contents are set to zero before being returned.
///
/// # Safety
///
/// This function is unsafe for the same reasons that `grow_in_place` is.
///
/// # Errors
///
/// Returns `Err(CannotReallocInPlace)` when the allocator is
/// unable to assert that the memory block referenced by `ptr`
/// could fit `layout`.
///
/// Note that one cannot pass `CannotReallocInPlace` to the `handle_alloc_error`
/// function; clients are expected either to be able to recover from
/// `grow_in_place` failures without aborting, or to fall back on
/// another reallocation method before resorting to an abort.
unsafe fn grow_in_place_zeroed(
&mut self,
ptr: NonNull<u8>,
layout: Layout,
new_size: usize,
) -> Result<(), CannotReallocInPlace> {
self.grow_in_place(ptr, layout, new_size)?;
ptr.as_ptr().add(layout.size()).write_bytes(0, new_size - layout.size());
Ok(())
}
/// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`.
///
/// If this returns `Ok`, then the allocator has asserted that the
@ -1035,195 +1168,4 @@ pub unsafe trait AllocRef {
// new_layout.size() <= layout.size() [required by this method]
if l <= new_size { Ok(()) } else { Err(CannotReallocInPlace) }
}
// == COMMON USAGE PATTERNS ==
// alloc_one, dealloc_one, alloc_array, realloc_array. dealloc_array
/// Allocates a block suitable for holding an instance of `T`.
///
/// Captures a common usage pattern for allocators.
///
/// The returned block is suitable for passing to the
/// `realloc`/`dealloc` methods of this allocator.
///
/// Note to implementors: If this returns `Ok(ptr)`, then `ptr`
/// must be considered "currently allocated" and must be
/// acceptable input to methods such as `realloc` or `dealloc`,
/// *even if* `T` is a zero-sized type. In other words, if your
/// `AllocRef` implementation overrides this method in a manner
/// that can return a zero-sized `ptr`, then all reallocation and
/// deallocation methods need to be similarly overridden to accept
/// such values as input.
///
/// # Errors
///
/// Returning `Err` indicates that either memory is exhausted or
/// `T` does not meet allocator's size or alignment constraints.
///
/// For zero-sized `T`, may return either of `Ok` or `Err`, but
/// will *not* yield undefined behavior.
///
/// Clients wishing to abort computation in response to an
/// allocation error are encouraged to call the [`handle_alloc_error`] function,
/// rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
fn alloc_one<T>(&mut self) -> Result<NonNull<T>, AllocErr>
where
Self: Sized,
{
let k = Layout::new::<T>();
if k.size() > 0 { unsafe { self.alloc(k).map(|p| p.cast()) } } else { Err(AllocErr) }
}
/// Deallocates a block suitable for holding an instance of `T`.
///
/// The given block must have been produced by this allocator,
/// and must be suitable for storing a `T` (in terms of alignment
/// as well as minimum and maximum size); otherwise yields
/// undefined behavior.
///
/// Captures a common usage pattern for allocators.
///
/// # Safety
///
/// This function is unsafe because undefined behavior can result
/// if the caller does not ensure both:
///
/// * `ptr` must denote a block of memory currently allocated via this allocator
///
/// * the layout of `T` must *fit* that block of memory.
unsafe fn dealloc_one<T>(&mut self, ptr: NonNull<T>)
where
Self: Sized,
{
let k = Layout::new::<T>();
if k.size() > 0 {
self.dealloc(ptr.cast(), k);
}
}
/// Allocates a block suitable for holding `n` instances of `T`.
///
/// Captures a common usage pattern for allocators.
///
/// The returned block is suitable for passing to the
/// `realloc`/`dealloc` methods of this allocator.
///
/// Note to implementors: If this returns `Ok(ptr)`, then `ptr`
/// must be considered "currently allocated" and must be
/// acceptable input to methods such as `realloc` or `dealloc`,
/// *even if* `T` is a zero-sized type. In other words, if your
/// `AllocRef` implementation overrides this method in a manner
/// that can return a zero-sized `ptr`, then all reallocation and
/// deallocation methods need to be similarly overridden to accept
/// such values as input.
///
/// # Errors
///
/// Returning `Err` indicates that either memory is exhausted or
/// `[T; n]` does not meet allocator's size or alignment
/// constraints.
///
/// For zero-sized `T` or `n == 0`, may return either of `Ok` or
/// `Err`, but will *not* yield undefined behavior.
///
/// Always returns `Err` on arithmetic overflow.
///
/// Clients wishing to abort computation in response to an
/// allocation error are encouraged to call the [`handle_alloc_error`] function,
/// rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
fn alloc_array<T>(&mut self, n: usize) -> Result<NonNull<T>, AllocErr>
where
Self: Sized,
{
match Layout::array::<T>(n) {
Ok(layout) if layout.size() > 0 => unsafe { self.alloc(layout).map(|p| p.cast()) },
_ => Err(AllocErr),
}
}
/// Reallocates a block previously suitable for holding `n_old`
/// instances of `T`, returning a block suitable for holding
/// `n_new` instances of `T`.
///
/// Captures a common usage pattern for allocators.
///
/// The returned block is suitable for passing to the
/// `realloc`/`dealloc` methods of this allocator.
///
/// # Safety
///
/// This function is unsafe because undefined behavior can result
/// if the caller does not ensure all of the following:
///
/// * `ptr` must be currently allocated via this allocator,
///
/// * the layout of `[T; n_old]` must *fit* that block of memory.
///
/// # Errors
///
/// Returning `Err` indicates that either memory is exhausted or
/// `[T; n_new]` does not meet allocator's size or alignment
/// constraints.
///
/// For zero-sized `T` or `n_new == 0`, may return either of `Ok` or
/// `Err`, but will *not* yield undefined behavior.
///
/// Always returns `Err` on arithmetic overflow.
///
/// Clients wishing to abort computation in response to a
/// reallocation error are encouraged to call the [`handle_alloc_error`] function,
/// rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
unsafe fn realloc_array<T>(
&mut self,
ptr: NonNull<T>,
n_old: usize,
n_new: usize,
) -> Result<NonNull<T>, AllocErr>
where
Self: Sized,
{
match (Layout::array::<T>(n_old), Layout::array::<T>(n_new)) {
(Ok(k_old), Ok(k_new)) if k_old.size() > 0 && k_new.size() > 0 => {
debug_assert!(k_old.align() == k_new.align());
self.realloc(ptr.cast(), k_old, k_new.size()).map(NonNull::cast)
}
_ => Err(AllocErr),
}
}
/// Deallocates a block suitable for holding `n` instances of `T`.
///
/// Captures a common usage pattern for allocators.
///
/// # Safety
///
/// This function is unsafe because undefined behavior can result
/// if the caller does not ensure both:
///
/// * `ptr` must denote a block of memory currently allocated via this allocator
///
/// * the layout of `[T; n]` must *fit* that block of memory.
///
/// # Errors
///
/// Returning `Err` indicates that either `[T; n]` or the given
/// memory block does not meet allocator's size or alignment
/// constraints.
///
/// Always returns `Err` on arithmetic overflow.
unsafe fn dealloc_array<T>(&mut self, ptr: NonNull<T>, n: usize) -> Result<(), AllocErr>
where
Self: Sized,
{
match Layout::array::<T>(n) {
Ok(k) if k.size() > 0 => Ok(self.dealloc(ptr.cast(), k)),
_ => Err(AllocErr),
}
}
}

View File

@ -1245,6 +1245,38 @@ impl<'b, T: ?Sized> Ref<'b, T> {
let borrow = orig.borrow.clone();
(Ref { value: a, borrow }, Ref { value: b, borrow: orig.borrow })
}
/// Convert into a reference to the underlying data.
///
/// The underlying `RefCell` can never be mutably borrowed from again and will always appear
/// already immutably borrowed. It is not a good idea to leak more than a constant number of
/// references. The `RefCell` can be immutably borrowed again if only a smaller number of leaks
/// have occurred in total.
///
/// This is an associated function that needs to be used as
/// `Ref::leak(...)`. A method would interfere with methods of the
/// same name on the contents of a `RefCell` used through `Deref`.
///
/// # Examples
///
/// ```
/// #![feature(cell_leak)]
/// use std::cell::{RefCell, Ref};
/// let cell = RefCell::new(0);
///
/// let value = Ref::leak(cell.borrow());
/// assert_eq!(*value, 0);
///
/// assert!(cell.try_borrow().is_ok());
/// assert!(cell.try_borrow_mut().is_err());
/// ```
#[unstable(feature = "cell_leak", issue = "69099")]
pub fn leak(orig: Ref<'b, T>) -> &'b T {
// By forgetting this Ref we ensure that the borrow counter in the RefCell never goes back
// to UNUSED again. No further mutable references can be created from the original cell.
mem::forget(orig.borrow);
orig.value
}
}
#[unstable(feature = "coerce_unsized", issue = "27732")]
@ -1330,6 +1362,37 @@ impl<'b, T: ?Sized> RefMut<'b, T> {
let borrow = orig.borrow.clone();
(RefMut { value: a, borrow }, RefMut { value: b, borrow: orig.borrow })
}
/// Convert into a mutable reference to the underlying data.
///
/// The underlying `RefCell` can not be borrowed from again and will always appear already
/// mutably borrowed, making the returned reference the only to the interior.
///
/// This is an associated function that needs to be used as
/// `RefMut::leak(...)`. A method would interfere with methods of the
/// same name on the contents of a `RefCell` used through `Deref`.
///
/// # Examples
///
/// ```
/// #![feature(cell_leak)]
/// use std::cell::{RefCell, RefMut};
/// let cell = RefCell::new(0);
///
/// let value = RefMut::leak(cell.borrow_mut());
/// assert_eq!(*value, 0);
/// *value = 1;
///
/// assert!(cell.try_borrow_mut().is_err());
/// ```
#[unstable(feature = "cell_leak", issue = "69099")]
pub fn leak(orig: RefMut<'b, T>) -> &'b mut T {
// By forgetting this BorrowRefMut we ensure that the borrow counter in the RefCell never
// goes back to UNUSED again. No further references can be created from the original cell,
// making the current borrow the only reference for the remaining lifetime.
mem::forget(orig.borrow);
orig.value
}
}
struct BorrowRefMut<'b> {
@ -1475,6 +1538,7 @@ impl<T: ?Sized + fmt::Display> fmt::Display for RefMut<'_, T> {
#[lang = "unsafe_cell"]
#[stable(feature = "rust1", since = "1.0.0")]
#[repr(transparent)]
#[cfg_attr(not(bootstrap), repr(no_niche))] // rust-lang/rust#68303.
pub struct UnsafeCell<T: ?Sized> {
value: T,
}

View File

@ -1072,9 +1072,13 @@ impl char {
/// assert!(!esc.is_ascii_alphabetic());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_alphabetic(&self) -> bool {
self.is_ascii() && (*self as u8).is_ascii_alphabetic()
pub const fn is_ascii_alphabetic(&self) -> bool {
match *self {
'A'..='Z' | 'a'..='z' => true,
_ => false,
}
}
/// Checks if the value is an ASCII uppercase character:
@ -1104,9 +1108,13 @@ impl char {
/// assert!(!esc.is_ascii_uppercase());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_uppercase(&self) -> bool {
self.is_ascii() && (*self as u8).is_ascii_uppercase()
pub const fn is_ascii_uppercase(&self) -> bool {
match *self {
'A'..='Z' => true,
_ => false,
}
}
/// Checks if the value is an ASCII lowercase character:
@ -1136,9 +1144,13 @@ impl char {
/// assert!(!esc.is_ascii_lowercase());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_lowercase(&self) -> bool {
self.is_ascii() && (*self as u8).is_ascii_lowercase()
pub const fn is_ascii_lowercase(&self) -> bool {
match *self {
'a'..='z' => true,
_ => false,
}
}
/// Checks if the value is an ASCII alphanumeric character:
@ -1171,9 +1183,13 @@ impl char {
/// assert!(!esc.is_ascii_alphanumeric());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_alphanumeric(&self) -> bool {
self.is_ascii() && (*self as u8).is_ascii_alphanumeric()
pub const fn is_ascii_alphanumeric(&self) -> bool {
match *self {
'0'..='9' | 'A'..='Z' | 'a'..='z' => true,
_ => false,
}
}
/// Checks if the value is an ASCII decimal digit:
@ -1203,9 +1219,13 @@ impl char {
/// assert!(!esc.is_ascii_digit());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_digit(&self) -> bool {
self.is_ascii() && (*self as u8).is_ascii_digit()
pub const fn is_ascii_digit(&self) -> bool {
match *self {
'0'..='9' => true,
_ => false,
}
}
/// Checks if the value is an ASCII hexadecimal digit:
@ -1238,9 +1258,13 @@ impl char {
/// assert!(!esc.is_ascii_hexdigit());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_hexdigit(&self) -> bool {
self.is_ascii() && (*self as u8).is_ascii_hexdigit()
pub const fn is_ascii_hexdigit(&self) -> bool {
match *self {
'0'..='9' | 'A'..='F' | 'a'..='f' => true,
_ => false,
}
}
/// Checks if the value is an ASCII punctuation character:
@ -1274,9 +1298,13 @@ impl char {
/// assert!(!esc.is_ascii_punctuation());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_punctuation(&self) -> bool {
self.is_ascii() && (*self as u8).is_ascii_punctuation()
pub const fn is_ascii_punctuation(&self) -> bool {
match *self {
'!'..='/' | ':'..='@' | '['..='`' | '{'..='~' => true,
_ => false,
}
}
/// Checks if the value is an ASCII graphic character:
@ -1306,9 +1334,13 @@ impl char {
/// assert!(!esc.is_ascii_graphic());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_graphic(&self) -> bool {
self.is_ascii() && (*self as u8).is_ascii_graphic()
pub const fn is_ascii_graphic(&self) -> bool {
match *self {
'!'..='~' => true,
_ => false,
}
}
/// Checks if the value is an ASCII whitespace character:
@ -1355,9 +1387,13 @@ impl char {
/// assert!(!esc.is_ascii_whitespace());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_whitespace(&self) -> bool {
self.is_ascii() && (*self as u8).is_ascii_whitespace()
pub const fn is_ascii_whitespace(&self) -> bool {
match *self {
'\t' | '\n' | '\x0C' | '\r' | ' ' => true,
_ => false,
}
}
/// Checks if the value is an ASCII control character:
@ -1389,8 +1425,12 @@ impl char {
/// assert!(esc.is_ascii_control());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_control(&self) -> bool {
self.is_ascii() && (*self as u8).is_ascii_control()
pub const fn is_ascii_control(&self) -> bool {
match *self {
'\0'..='\x1F' | '\x7F' => true,
_ => false,
}
}
}

View File

@ -361,6 +361,7 @@ impl Ordering {
/// assert!(data == b);
/// ```
#[inline]
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn reverse(self) -> Ordering {
match self {
@ -398,6 +399,7 @@ impl Ordering {
/// assert_eq!(result, Ordering::Less);
/// ```
#[inline]
#[must_use]
#[stable(feature = "ordering_chaining", since = "1.17.0")]
pub fn then(self, other: Ordering) -> Ordering {
match self {
@ -435,6 +437,7 @@ impl Ordering {
/// assert_eq!(result, Ordering::Less);
/// ```
#[inline]
#[must_use]
#[stable(feature = "ordering_chaining", since = "1.17.0")]
pub fn then_with<F: FnOnce() -> Ordering>(self, f: F) -> Ordering {
match self {
@ -576,6 +579,7 @@ pub trait Ord: Eq + PartialOrd<Self> {
/// assert_eq!(10.cmp(&5), Ordering::Greater);
/// assert_eq!(5.cmp(&5), Ordering::Equal);
/// ```
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
fn cmp(&self, other: &Self) -> Ordering;
@ -591,6 +595,7 @@ pub trait Ord: Eq + PartialOrd<Self> {
/// ```
#[stable(feature = "ord_max_min", since = "1.21.0")]
#[inline]
#[must_use]
fn max(self, other: Self) -> Self
where
Self: Sized,
@ -610,6 +615,7 @@ pub trait Ord: Eq + PartialOrd<Self> {
/// ```
#[stable(feature = "ord_max_min", since = "1.21.0")]
#[inline]
#[must_use]
fn min(self, other: Self) -> Self
where
Self: Sized,
@ -635,6 +641,7 @@ pub trait Ord: Eq + PartialOrd<Self> {
/// assert!(0.clamp(-2, 1) == 0);
/// assert!(2.clamp(-2, 1) == 1);
/// ```
#[must_use]
#[unstable(feature = "clamp", issue = "44095")]
fn clamp(self, min: Self, max: Self) -> Self
where
@ -915,6 +922,7 @@ pub macro PartialOrd($item:item) {
/// assert_eq!(2, cmp::min(2, 2));
/// ```
#[inline]
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn min<T: Ord>(v1: T, v2: T) -> T {
v1.min(v2)
@ -935,6 +943,7 @@ pub fn min<T: Ord>(v1: T, v2: T) -> T {
/// assert_eq!(cmp::min_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), -2);
/// ```
#[inline]
#[must_use]
#[unstable(feature = "cmp_min_max_by", issue = "64460")]
pub fn min_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T {
match compare(&v1, &v2) {
@ -958,6 +967,7 @@ pub fn min_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T {
/// assert_eq!(cmp::min_by_key(-2, 2, |x: &i32| x.abs()), -2);
/// ```
#[inline]
#[must_use]
#[unstable(feature = "cmp_min_max_by", issue = "64460")]
pub fn min_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T {
min_by(v1, v2, |v1, v2| f(v1).cmp(&f(v2)))
@ -978,6 +988,7 @@ pub fn min_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T {
/// assert_eq!(2, cmp::max(2, 2));
/// ```
#[inline]
#[must_use]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn max<T: Ord>(v1: T, v2: T) -> T {
v1.max(v2)
@ -998,6 +1009,7 @@ pub fn max<T: Ord>(v1: T, v2: T) -> T {
/// assert_eq!(cmp::max_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), 2);
/// ```
#[inline]
#[must_use]
#[unstable(feature = "cmp_min_max_by", issue = "64460")]
pub fn max_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T {
match compare(&v1, &v2) {
@ -1021,6 +1033,7 @@ pub fn max_by<T, F: FnOnce(&T, &T) -> Ordering>(v1: T, v2: T, compare: F) -> T {
/// assert_eq!(cmp::max_by_key(-2, 2, |x: &i32| x.abs()), 2);
/// ```
#[inline]
#[must_use]
#[unstable(feature = "cmp_min_max_by", issue = "64460")]
pub fn max_by_key<T, F: FnMut(&T) -> K, K: Ord>(v1: T, v2: T, mut f: F) -> T {
max_by(v1, v2, |v1, v2| f(v1).cmp(&f(v2)))

View File

@ -29,7 +29,6 @@ where
*num,
sign,
precision,
false,
buf.get_mut(),
parts.get_mut(),
);
@ -59,7 +58,6 @@ where
*num,
sign,
precision,
false,
buf.get_mut(),
parts.get_mut(),
);

View File

@ -238,16 +238,8 @@ pub struct Formatter<'a> {
// NB. Argument is essentially an optimized partially applied formatting function,
// equivalent to `exists T.(&T, fn(&T, &mut Formatter<'_>) -> Result`.
struct Void {
_priv: (),
/// Erases all oibits, because `Void` erases the type of the object that
/// will be used to produce formatted output. Since we do not know what
/// oibits the real types have (and they can have any or none), we need to
/// take the most conservative approach and forbid all oibits.
///
/// It was added after #45197 showed that one could share a `!Sync`
/// object across threads by passing it into `format_args!`.
_oibit_remover: PhantomData<*mut dyn Fn()>,
extern "C" {
type Opaque;
}
/// This struct represents the generic "argument" which is taken by the Xprintf
@ -259,16 +251,23 @@ struct Void {
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
#[doc(hidden)]
pub struct ArgumentV1<'a> {
value: &'a Void,
formatter: fn(&Void, &mut Formatter<'_>) -> Result,
value: &'a Opaque,
formatter: fn(&Opaque, &mut Formatter<'_>) -> Result,
}
// This gurantees a single stable value for the function pointer associated with
// indices/counts in the formatting infrastructure.
//
// Note that a function defined as such would not be correct as functions are
// always tagged unnamed_addr with the current lowering to LLVM IR, so their
// address is not considered important to LLVM and as such the as_usize cast
// could have been miscompiled. In practice, we never call as_usize on non-usize
// containing data (as a matter of static generation of the formatting
// arguments), so this is merely an additional check.
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
static USIZE_MARKER: fn(&usize, &mut Formatter<'_>) -> Result = |_, _| loop {};
impl<'a> ArgumentV1<'a> {
#[inline(never)]
fn show_usize(x: &usize, f: &mut Formatter<'_>) -> Result {
Display::fmt(x, f)
}
#[doc(hidden)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
pub fn new<'b, T>(x: &'b T, f: fn(&T, &mut Formatter<'_>) -> Result) -> ArgumentV1<'b> {
@ -278,11 +277,13 @@ impl<'a> ArgumentV1<'a> {
#[doc(hidden)]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
pub fn from_usize(x: &usize) -> ArgumentV1<'_> {
ArgumentV1::new(x, ArgumentV1::show_usize)
ArgumentV1::new(x, USIZE_MARKER)
}
fn as_usize(&self) -> Option<usize> {
if self.formatter as usize == ArgumentV1::show_usize as usize {
if self.formatter as usize == USIZE_MARKER as usize {
// SAFETY: The `formatter` field is only set to USIZE_MARKER if
// the value is a usize, so this is safe
Some(unsafe { *(self.value as *const _ as *const usize) })
} else {
None
@ -1356,11 +1357,11 @@ impl<'a> Formatter<'a> {
let mut align = old_align;
if self.sign_aware_zero_pad() {
// a sign always goes first
let sign = unsafe { str::from_utf8_unchecked(formatted.sign) };
let sign = formatted.sign;
self.buf.write_str(sign)?;
// remove the sign from the formatted parts
formatted.sign = b"";
formatted.sign = "";
width = width.saturating_sub(sign.len());
align = rt::v1::Alignment::Right;
self.fill = '0';
@ -1392,7 +1393,7 @@ impl<'a> Formatter<'a> {
}
if !formatted.sign.is_empty() {
write_bytes(self.buf, formatted.sign)?;
self.buf.write_str(formatted.sign)?;
}
for part in formatted.parts {
match *part {

View File

@ -4,6 +4,7 @@
use crate::fmt;
use crate::mem::MaybeUninit;
use crate::num::flt2dec;
use crate::ops::{Div, Rem, Sub};
use crate::ptr;
use crate::slice;
@ -256,6 +257,161 @@ macro_rules! impl_Display {
};
}
macro_rules! impl_Exp {
($($t:ident),* as $u:ident via $conv_fn:ident named $name:ident) => {
fn $name(
mut n: $u,
is_nonnegative: bool,
upper: bool,
f: &mut fmt::Formatter<'_>
) -> fmt::Result {
let (mut n, mut exponent, trailing_zeros, added_precision) = {
let mut exponent = 0;
// count and remove trailing decimal zeroes
while n % 10 == 0 && n >= 10 {
n /= 10;
exponent += 1;
}
let trailing_zeros = exponent;
let (added_precision, subtracted_precision) = match f.precision() {
Some(fmt_prec) => {
// number of decimal digits minus 1
let mut tmp = n;
let mut prec = 0;
while tmp >= 10 {
tmp /= 10;
prec += 1;
}
(fmt_prec.saturating_sub(prec), prec.saturating_sub(fmt_prec))
}
None => (0,0)
};
for _ in 1..subtracted_precision {
n/=10;
exponent += 1;
}
if subtracted_precision != 0 {
let rem = n % 10;
n /= 10;
exponent += 1;
// round up last digit
if rem >= 5 {
n += 1;
}
}
(n, exponent, trailing_zeros, added_precision)
};
// 39 digits (worst case u128) + . = 40
let mut buf = [MaybeUninit::<u8>::uninit(); 40];
let mut curr = buf.len() as isize; //index for buf
let buf_ptr = MaybeUninit::first_ptr_mut(&mut buf);
let lut_ptr = DEC_DIGITS_LUT.as_ptr();
// decode 2 chars at a time
while n >= 100 {
let d1 = ((n % 100) as isize) << 1;
curr -= 2;
unsafe {
ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);
}
n /= 100;
exponent += 2;
}
// n is <= 99, so at most 2 chars long
let mut n = n as isize; // possibly reduce 64bit math
// decode second-to-last character
if n >= 10 {
curr -= 1;
unsafe {
*buf_ptr.offset(curr) = (n as u8 % 10_u8) + b'0';
}
n /= 10;
exponent += 1;
}
// add decimal point iff >1 mantissa digit will be printed
if exponent != trailing_zeros || added_precision != 0 {
curr -= 1;
unsafe {
*buf_ptr.offset(curr) = b'.';
}
}
let buf_slice = unsafe {
// decode last character
curr -= 1;
*buf_ptr.offset(curr) = (n as u8) + b'0';
let len = buf.len() - curr as usize;
slice::from_raw_parts(buf_ptr.offset(curr), len)
};
// stores 'e' (or 'E') and the up to 2-digit exponent
let mut exp_buf = [MaybeUninit::<u8>::uninit(); 3];
let exp_ptr = MaybeUninit::first_ptr_mut(&mut exp_buf);
let exp_slice = unsafe {
*exp_ptr.offset(0) = if upper {b'E'} else {b'e'};
let len = if exponent < 10 {
*exp_ptr.offset(1) = (exponent as u8) + b'0';
2
} else {
let off = exponent << 1;
ptr::copy_nonoverlapping(lut_ptr.offset(off), exp_ptr.offset(1), 2);
3
};
slice::from_raw_parts(exp_ptr, len)
};
let parts = &[
flt2dec::Part::Copy(buf_slice),
flt2dec::Part::Zero(added_precision),
flt2dec::Part::Copy(exp_slice)
];
let sign = if !is_nonnegative {
"-"
} else if f.sign_plus() {
"+"
} else {
""
};
let formatted = flt2dec::Formatted{sign, parts};
f.pad_formatted_parts(&formatted)
}
$(
#[stable(feature = "integer_exp_format", since = "1.42.0")]
impl fmt::LowerExp for $t {
#[allow(unused_comparisons)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let is_nonnegative = *self >= 0;
let n = if is_nonnegative {
self.$conv_fn()
} else {
// convert the negative num to positive by summing 1 to it's 2 complement
(!self.$conv_fn()).wrapping_add(1)
};
$name(n, is_nonnegative, false, f)
}
})*
$(
#[stable(feature = "integer_exp_format", since = "1.42.0")]
impl fmt::UpperExp for $t {
#[allow(unused_comparisons)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let is_nonnegative = *self >= 0;
let n = if is_nonnegative {
self.$conv_fn()
} else {
// convert the negative num to positive by summing 1 to it's 2 complement
(!self.$conv_fn()).wrapping_add(1)
};
$name(n, is_nonnegative, true, f)
}
})*
};
}
// Include wasm32 in here since it doesn't reflect the native pointer size, and
// often cares strongly about getting a smaller code size.
#[cfg(any(target_pointer_width = "64", target_arch = "wasm32"))]
@ -265,6 +421,10 @@ mod imp {
i8, u8, i16, u16, i32, u32, i64, u64, usize, isize
as u64 via to_u64 named fmt_u64
);
impl_Exp!(
i8, u8, i16, u16, i32, u32, i64, u64, usize, isize
as u64 via to_u64 named exp_u64
);
}
#[cfg(not(any(target_pointer_width = "64", target_arch = "wasm32")))]
@ -272,6 +432,9 @@ mod imp {
use super::*;
impl_Display!(i8, u8, i16, u16, i32, u32, isize, usize as u32 via to_u32 named fmt_u32);
impl_Display!(i64, u64 as u64 via to_u64 named fmt_u64);
impl_Exp!(i8, u8, i16, u16, i32, u32, isize, usize as u32 via to_u32 named exp_u32);
impl_Exp!(i64, u64 as u64 via to_u64 named exp_u64);
}
impl_Display!(i128, u128 as u128 via to_u128 named fmt_u128);
impl_Exp!(i128, u128 as u128 via to_u128 named exp_u128);

View File

@ -121,7 +121,9 @@ macro_rules! load_int_le {
}};
}
/// Loads an u64 using up to 7 bytes of a byte slice.
/// Loads a u64 using up to 7 bytes of a byte slice. It looks clumsy but the
/// `copy_nonoverlapping` calls that occur (via `load_int_le!`) all have fixed
/// sizes and avoid calling `memcpy`, which is good for speed.
///
/// Unsafe because: unchecked indexing at start..start+len
#[inline]

View File

@ -1515,6 +1515,7 @@ fn overlaps<T>(src: *const T, dst: *const T, count: usize) -> bool {
/// ```
///
/// [`Vec::append`]: ../../std/vec/struct.Vec.html#method.append
#[doc(alias = "memcpy")]
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize) {
@ -1579,6 +1580,7 @@ pub unsafe fn copy_nonoverlapping<T>(src: *const T, dst: *mut T, count: usize) {
/// dst
/// }
/// ```
#[doc(alias = "memmove")]
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub unsafe fn copy<T>(src: *const T, dst: *mut T, count: usize) {

View File

@ -1890,17 +1890,15 @@ where
#[inline]
fn nth(&mut self, n: usize) -> Option<I::Item> {
// Can't just add n + self.n due to overflow.
if self.n == 0 {
self.iter.nth(n)
} else {
if self.n > 0 {
let to_skip = self.n;
self.n = 0;
// nth(n) skips n+1
if self.iter.nth(to_skip - 1).is_none() {
return None;
}
self.iter.nth(n)
}
self.iter.nth(n)
}
#[inline]
@ -1916,17 +1914,13 @@ where
#[inline]
fn last(mut self) -> Option<I::Item> {
if self.n == 0 {
if self.n > 0 {
// nth(n) skips n+1
if self.iter.nth(self.n - 1).is_none() {
return None;
}
}
self.iter.last()
} else {
let next = self.next();
if next.is_some() {
// recurse. n should be 0.
self.last().or(next)
} else {
None
}
}
}
#[inline]

View File

@ -341,16 +341,15 @@ impl<A: Step> Iterator for ops::RangeInclusive<A> {
#[inline]
fn next(&mut self) -> Option<A> {
self.compute_is_empty();
if self.is_empty.unwrap_or_default() {
if self.is_empty() {
return None;
}
let is_iterating = self.start < self.end;
self.is_empty = Some(!is_iterating);
Some(if is_iterating {
let n = self.start.add_one();
mem::replace(&mut self.start, n)
} else {
self.exhausted = true;
self.start.clone()
})
}
@ -369,8 +368,7 @@ impl<A: Step> Iterator for ops::RangeInclusive<A> {
#[inline]
fn nth(&mut self, n: usize) -> Option<A> {
self.compute_is_empty();
if self.is_empty.unwrap_or_default() {
if self.is_empty() {
return None;
}
@ -379,13 +377,12 @@ impl<A: Step> Iterator for ops::RangeInclusive<A> {
match plus_n.partial_cmp(&self.end) {
Some(Less) => {
self.is_empty = Some(false);
self.start = plus_n.add_one();
return Some(plus_n);
}
Some(Equal) => {
self.is_empty = Some(true);
self.start = plus_n.clone();
self.exhausted = true;
return Some(plus_n);
}
_ => {}
@ -393,7 +390,7 @@ impl<A: Step> Iterator for ops::RangeInclusive<A> {
}
self.start = self.end.clone();
self.is_empty = Some(true);
self.exhausted = true;
None
}
@ -404,8 +401,6 @@ impl<A: Step> Iterator for ops::RangeInclusive<A> {
F: FnMut(B, Self::Item) -> R,
R: Try<Ok = B>,
{
self.compute_is_empty();
if self.is_empty() {
return Try::from_ok(init);
}
@ -418,7 +413,7 @@ impl<A: Step> Iterator for ops::RangeInclusive<A> {
accum = f(accum, n)?;
}
self.is_empty = Some(true);
self.exhausted = true;
if self.start == self.end {
accum = f(accum, self.start.clone())?;
@ -447,24 +442,22 @@ impl<A: Step> Iterator for ops::RangeInclusive<A> {
impl<A: Step> DoubleEndedIterator for ops::RangeInclusive<A> {
#[inline]
fn next_back(&mut self) -> Option<A> {
self.compute_is_empty();
if self.is_empty.unwrap_or_default() {
if self.is_empty() {
return None;
}
let is_iterating = self.start < self.end;
self.is_empty = Some(!is_iterating);
Some(if is_iterating {
let n = self.end.sub_one();
mem::replace(&mut self.end, n)
} else {
self.exhausted = true;
self.end.clone()
})
}
#[inline]
fn nth_back(&mut self, n: usize) -> Option<A> {
self.compute_is_empty();
if self.is_empty.unwrap_or_default() {
if self.is_empty() {
return None;
}
@ -473,13 +466,12 @@ impl<A: Step> DoubleEndedIterator for ops::RangeInclusive<A> {
match minus_n.partial_cmp(&self.start) {
Some(Greater) => {
self.is_empty = Some(false);
self.end = minus_n.sub_one();
return Some(minus_n);
}
Some(Equal) => {
self.is_empty = Some(true);
self.end = minus_n.clone();
self.exhausted = true;
return Some(minus_n);
}
_ => {}
@ -487,7 +479,7 @@ impl<A: Step> DoubleEndedIterator for ops::RangeInclusive<A> {
}
self.end = self.start.clone();
self.is_empty = Some(true);
self.exhausted = true;
None
}
@ -498,8 +490,6 @@ impl<A: Step> DoubleEndedIterator for ops::RangeInclusive<A> {
F: FnMut(B, Self::Item) -> R,
R: Try<Ok = B>,
{
self.compute_is_empty();
if self.is_empty() {
return Try::from_ok(init);
}
@ -512,7 +502,7 @@ impl<A: Step> DoubleEndedIterator for ops::RangeInclusive<A> {
accum = f(accum, n)?;
}
self.is_empty = Some(true);
self.exhausted = true;
if self.start == self.end {
accum = f(accum, self.start.clone())?;

View File

@ -70,13 +70,17 @@
#![feature(bound_cloned)]
#![feature(cfg_target_has_atomic)]
#![feature(concat_idents)]
#![feature(const_ascii_ctype_on_intrinsics)]
#![feature(const_alloc_layout)]
#![feature(const_if_match)]
#![feature(const_loop)]
#![feature(const_checked_int_methods)]
#![feature(const_euclidean_int_methods)]
#![feature(const_overflowing_int_methods)]
#![feature(const_saturating_int_methods)]
#![feature(const_int_unchecked_arith)]
#![feature(const_int_pow)]
#![feature(constctlz)]
#![feature(const_panic)]
#![feature(const_fn_union)]
#![feature(const_generics)]
@ -138,6 +142,7 @@
#![feature(const_type_id)]
#![feature(const_caller_location)]
#![feature(assoc_int_consts)]
#![cfg_attr(not(bootstrap), feature(no_niche))] // rust-lang/rust#68303
#[prelude_import]
#[allow(unused)]
@ -262,6 +267,9 @@ mod bool;
mod tuple;
mod unit;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub mod primitive;
// Pull in the `core_arch` crate directly into libcore. The contents of
// `core_arch` are in a different repository: rust-lang/stdarch.
//

View File

@ -727,6 +727,10 @@ unsafe impl<T: ?Sized> Freeze for &mut T {}
/// [`Pin<P>`]: ../pin/struct.Pin.html
/// [`pin module`]: ../../std/pin/index.html
#[stable(feature = "pin", since = "1.33.0")]
#[rustc_on_unimplemented(
on(_Self = "std::future::Future", note = "consider using `Box::pin`",),
message = "`{Self}` cannot be unpinned"
)]
#[lang = "unpin"]
pub auto trait Unpin {}

View File

@ -669,7 +669,7 @@ impl<T> MaybeUninit<T> {
/// // Now we can use `buf` as a normal slice:
/// buf.sort_unstable();
/// assert!(
/// buf.chunks(2).all(|chunk| chunk[0] <= chunk[1]),
/// buf.windows(2).all(|pair| pair[0] <= pair[1]),
/// "buffer is sorted",
/// );
/// ```

View File

@ -130,7 +130,7 @@ pub mod consts {
pub const LOG2_E: f32 = 1.44269504088896340735992468100189214_f32;
/// log<sub>2</sub>(10)
#[unstable(feature = "extra_log_consts", issue = "50540")]
#[stable(feature = "extra_log_consts", since = "1.43.0")]
pub const LOG2_10: f32 = 3.32192809488736234787031942948939018_f32;
/// log<sub>10</sub>(e)
@ -138,7 +138,7 @@ pub mod consts {
pub const LOG10_E: f32 = 0.434294481903251827651128918916605082_f32;
/// log<sub>10</sub>(2)
#[unstable(feature = "extra_log_consts", issue = "50540")]
#[stable(feature = "extra_log_consts", since = "1.43.0")]
pub const LOG10_2: f32 = 0.301029995663981195213738894724493027_f32;
/// ln(2)

View File

@ -126,7 +126,7 @@ pub mod consts {
pub const E: f64 = 2.71828182845904523536028747135266250_f64;
/// log<sub>2</sub>(10)
#[unstable(feature = "extra_log_consts", issue = "50540")]
#[stable(feature = "extra_log_consts", since = "1.43.0")]
pub const LOG2_10: f64 = 3.32192809488736234787031942948939018_f64;
/// log<sub>2</sub>(e)
@ -134,7 +134,7 @@ pub mod consts {
pub const LOG2_E: f64 = 1.44269504088896340735992468100189214_f64;
/// log<sub>10</sub>(2)
#[unstable(feature = "extra_log_consts", issue = "50540")]
#[stable(feature = "extra_log_consts", since = "1.43.0")]
pub const LOG10_2: f64 = 0.301029995663981195213738894724493027_f64;
/// log<sub>10</sub>(e)

View File

@ -237,7 +237,7 @@ impl<'a> Part<'a> {
#[derive(Clone)]
pub struct Formatted<'a> {
/// A byte slice representing a sign, either `""`, `"-"` or `"+"`.
pub sign: &'static [u8],
pub sign: &'static str,
/// Formatted parts to be rendered after a sign and optional zero padding.
pub parts: &'a [Part<'a>],
}
@ -259,7 +259,7 @@ impl<'a> Formatted<'a> {
if out.len() < self.sign.len() {
return None;
}
out[..self.sign.len()].copy_from_slice(self.sign);
out[..self.sign.len()].copy_from_slice(self.sign.as_bytes());
let mut written = self.sign.len();
for part in self.parts {
@ -402,38 +402,38 @@ pub enum Sign {
}
/// Returns the static byte string corresponding to the sign to be formatted.
/// It can be either `b""`, `b"+"` or `b"-"`.
fn determine_sign(sign: Sign, decoded: &FullDecoded, negative: bool) -> &'static [u8] {
/// It can be either `""`, `"+"` or `"-"`.
fn determine_sign(sign: Sign, decoded: &FullDecoded, negative: bool) -> &'static str {
match (*decoded, sign) {
(FullDecoded::Nan, _) => b"",
(FullDecoded::Zero, Sign::Minus) => b"",
(FullDecoded::Nan, _) => "",
(FullDecoded::Zero, Sign::Minus) => "",
(FullDecoded::Zero, Sign::MinusRaw) => {
if negative {
b"-"
"-"
} else {
b""
""
}
}
(FullDecoded::Zero, Sign::MinusPlus) => b"+",
(FullDecoded::Zero, Sign::MinusPlus) => "+",
(FullDecoded::Zero, Sign::MinusPlusRaw) => {
if negative {
b"-"
"-"
} else {
b"+"
"+"
}
}
(_, Sign::Minus) | (_, Sign::MinusRaw) => {
if negative {
b"-"
"-"
} else {
b""
""
}
}
(_, Sign::MinusPlus) | (_, Sign::MinusPlusRaw) => {
if negative {
b"-"
"-"
} else {
b"+"
"+"
}
}
}
@ -462,7 +462,6 @@ pub fn to_shortest_str<'a, T, F>(
v: T,
sign: Sign,
frac_digits: usize,
_upper: bool,
buf: &'a mut [u8],
parts: &'a mut [Part<'a>],
) -> Formatted<'a>
@ -679,7 +678,6 @@ pub fn to_exact_fixed_str<'a, T, F>(
v: T,
sign: Sign,
frac_digits: usize,
_upper: bool,
buf: &'a mut [u8],
parts: &'a mut [Part<'a>],
) -> Formatted<'a>

View File

@ -8,9 +8,18 @@ use crate::convert::Infallible;
use crate::fmt;
use crate::intrinsics;
use crate::mem;
use crate::ops;
use crate::str::FromStr;
// Used because the `?` operator is not allowed in a const context.
macro_rules! try_opt {
($e:expr) => {
match $e {
Some(x) => x,
None => return None,
}
};
}
macro_rules! impl_nonzero_fmt {
( #[$stability: meta] ( $( $Trait: ident ),+ ) for $Ty: ident ) => {
$(
@ -69,8 +78,9 @@ assert_eq!(size_of::<Option<core::num::", stringify!($Ty), ">>(), size_of::<", s
/// Creates a non-zero if the given value is not zero.
#[$stability]
#[rustc_const_unstable(feature = "const_nonzero_int_methods", issue = "53718")]
#[inline]
pub fn new(n: $Int) -> Option<Self> {
pub const fn new(n: $Int) -> Option<Self> {
if n != 0 {
// SAFETY: we just checked that there's no `0`
Some(unsafe { Self(n) })
@ -992,26 +1002,27 @@ $EndFeature, "
```"),
#[stable(feature = "no_panic_pow", since = "1.34.0")]
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub fn checked_pow(self, mut exp: u32) -> Option<Self> {
pub const fn checked_pow(self, mut exp: u32) -> Option<Self> {
let mut base = self;
let mut acc: Self = 1;
while exp > 1 {
if (exp & 1) == 1 {
acc = acc.checked_mul(base)?;
acc = try_opt!(acc.checked_mul(base));
}
exp /= 2;
base = base.checked_mul(base)?;
base = try_opt!(base.checked_mul(base));
}
// Deal with the final bit of the exponent separately, since
// squaring the base afterwards is not necessary and may cause a
// needless overflow.
if exp == 1 {
acc = acc.checked_mul(base)?;
acc = try_opt!(acc.checked_mul(base));
}
Some(acc)
@ -1179,10 +1190,11 @@ assert_eq!(", stringify!($SelfT), "::MIN.saturating_pow(3), ", stringify!($SelfT
$EndFeature, "
```"),
#[stable(feature = "no_panic_pow", since = "1.34.0")]
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub fn saturating_pow(self, exp: u32) -> Self {
pub const fn saturating_pow(self, exp: u32) -> Self {
match self.checked_pow(exp) {
Some(x) => x,
None if self < 0 && exp % 2 == 1 => Self::min_value(),
@ -1522,10 +1534,11 @@ assert_eq!(3i8.wrapping_pow(6), -39);",
$EndFeature, "
```"),
#[stable(feature = "no_panic_pow", since = "1.34.0")]
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub fn wrapping_pow(self, mut exp: u32) -> Self {
pub const fn wrapping_pow(self, mut exp: u32) -> Self {
let mut base = self;
let mut acc: Self = 1;
@ -1899,10 +1912,11 @@ assert_eq!(3i8.overflowing_pow(5), (-13, true));",
$EndFeature, "
```"),
#[stable(feature = "no_panic_pow", since = "1.34.0")]
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub fn overflowing_pow(self, mut exp: u32) -> (Self, bool) {
pub const fn overflowing_pow(self, mut exp: u32) -> (Self, bool) {
let mut base = self;
let mut acc: Self = 1;
let mut overflown = false;
@ -1948,11 +1962,12 @@ assert_eq!(x.pow(5), 32);",
$EndFeature, "
```"),
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
#[rustc_inherit_overflow_checks]
pub fn pow(self, mut exp: u32) -> Self {
pub const fn pow(self, mut exp: u32) -> Self {
let mut base = self;
let mut acc = 1;
@ -3118,26 +3133,27 @@ Basic usage:
assert_eq!(", stringify!($SelfT), "::max_value().checked_pow(2), None);", $EndFeature, "
```"),
#[stable(feature = "no_panic_pow", since = "1.34.0")]
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub fn checked_pow(self, mut exp: u32) -> Option<Self> {
pub const fn checked_pow(self, mut exp: u32) -> Option<Self> {
let mut base = self;
let mut acc: Self = 1;
while exp > 1 {
if (exp & 1) == 1 {
acc = acc.checked_mul(base)?;
acc = try_opt!(acc.checked_mul(base));
}
exp /= 2;
base = base.checked_mul(base)?;
base = try_opt!(base.checked_mul(base));
}
// Deal with the final bit of the exponent separately, since
// squaring the base afterwards is not necessary and may cause a
// needless overflow.
if exp == 1 {
acc = acc.checked_mul(base)?;
acc = try_opt!(acc.checked_mul(base));
}
Some(acc)
@ -3233,10 +3249,11 @@ assert_eq!(", stringify!($SelfT), "::MAX.saturating_pow(2), ", stringify!($SelfT
$EndFeature, "
```"),
#[stable(feature = "no_panic_pow", since = "1.34.0")]
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub fn saturating_pow(self, exp: u32) -> Self {
pub const fn saturating_pow(self, exp: u32) -> Self {
match self.checked_pow(exp) {
Some(x) => x,
None => Self::max_value(),
@ -3526,10 +3543,11 @@ Basic usage:
assert_eq!(3u8.wrapping_pow(6), 217);", $EndFeature, "
```"),
#[stable(feature = "no_panic_pow", since = "1.34.0")]
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub fn wrapping_pow(self, mut exp: u32) -> Self {
pub const fn wrapping_pow(self, mut exp: u32) -> Self {
let mut base = self;
let mut acc: Self = 1;
@ -3852,10 +3870,11 @@ Basic usage:
assert_eq!(3u8.overflowing_pow(6), (217, true));", $EndFeature, "
```"),
#[stable(feature = "no_panic_pow", since = "1.34.0")]
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
pub fn overflowing_pow(self, mut exp: u32) -> (Self, bool) {
pub const fn overflowing_pow(self, mut exp: u32) -> (Self, bool) {
let mut base = self;
let mut acc: Self = 1;
let mut overflown = false;
@ -3898,11 +3917,12 @@ Basic usage:
", $Feature, "assert_eq!(2", stringify!($SelfT), ".pow(5), 32);", $EndFeature, "
```"),
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
#[must_use = "this returns the result of the operation, \
without modifying the original"]
#[inline]
#[rustc_inherit_overflow_checks]
pub fn pow(self, mut exp: u32) -> Self {
pub const fn pow(self, mut exp: u32) -> Self {
let mut base = self;
let mut acc = 1;
@ -4013,7 +4033,8 @@ assert!(!10", stringify!($SelfT), ".is_power_of_two());", $EndFeature, "
// overflow cases it instead ends up returning the maximum value
// of the type, and can return 0 for 0.
#[inline]
fn one_less_than_next_power_of_two(self) -> Self {
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
const fn one_less_than_next_power_of_two(self) -> Self {
if self <= 1 { return 0; }
let p = self - 1;
@ -4041,10 +4062,11 @@ Basic usage:
assert_eq!(3", stringify!($SelfT), ".next_power_of_two(), 4);", $EndFeature, "
```"),
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
#[inline]
pub fn next_power_of_two(self) -> Self {
// Call the trait to get overflow checks
ops::Add::add(self.one_less_than_next_power_of_two(), 1)
#[rustc_inherit_overflow_checks]
pub const fn next_power_of_two(self) -> Self {
self.one_less_than_next_power_of_two() + 1
}
}
@ -4066,7 +4088,8 @@ $EndFeature, "
```"),
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn checked_next_power_of_two(self) -> Option<Self> {
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
pub const fn checked_next_power_of_two(self) -> Option<Self> {
self.one_less_than_next_power_of_two().checked_add(1)
}
}
@ -4090,7 +4113,8 @@ $EndFeature, "
```"),
#[unstable(feature = "wrapping_next_power_of_two", issue = "32463",
reason = "needs decision on wrapping behaviour")]
pub fn wrapping_next_power_of_two(self) -> Self {
#[rustc_const_unstable(feature = "const_int_pow", issue = "53718")]
pub const fn wrapping_next_power_of_two(self) -> Self {
self.one_less_than_next_power_of_two().wrapping_add(1)
}
}
@ -4300,8 +4324,9 @@ impl u8 {
/// assert!(!non_ascii.is_ascii());
/// ```
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[rustc_const_stable(feature = "const_ascii_methods_on_intrinsics", since = "1.43.0")]
#[inline]
pub fn is_ascii(&self) -> bool {
pub const fn is_ascii(&self) -> bool {
*self & 128 == 0
}
@ -4448,8 +4473,9 @@ impl u8 {
/// assert!(!esc.is_ascii_alphabetic());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_alphabetic(&self) -> bool {
pub const fn is_ascii_alphabetic(&self) -> bool {
matches!(*self, b'A'..=b'Z' | b'a'..=b'z')
}
@ -4480,8 +4506,9 @@ impl u8 {
/// assert!(!esc.is_ascii_uppercase());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_uppercase(&self) -> bool {
pub const fn is_ascii_uppercase(&self) -> bool {
matches!(*self, b'A'..=b'Z')
}
@ -4512,8 +4539,9 @@ impl u8 {
/// assert!(!esc.is_ascii_lowercase());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_lowercase(&self) -> bool {
pub const fn is_ascii_lowercase(&self) -> bool {
matches!(*self, b'a'..=b'z')
}
@ -4547,8 +4575,9 @@ impl u8 {
/// assert!(!esc.is_ascii_alphanumeric());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_alphanumeric(&self) -> bool {
pub const fn is_ascii_alphanumeric(&self) -> bool {
matches!(*self, b'0'..=b'9' | b'A'..=b'Z' | b'a'..=b'z')
}
@ -4579,8 +4608,9 @@ impl u8 {
/// assert!(!esc.is_ascii_digit());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_digit(&self) -> bool {
pub const fn is_ascii_digit(&self) -> bool {
matches!(*self, b'0'..=b'9')
}
@ -4614,8 +4644,9 @@ impl u8 {
/// assert!(!esc.is_ascii_hexdigit());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_hexdigit(&self) -> bool {
pub const fn is_ascii_hexdigit(&self) -> bool {
matches!(*self, b'0'..=b'9' | b'A'..=b'F' | b'a'..=b'f')
}
@ -4650,8 +4681,9 @@ impl u8 {
/// assert!(!esc.is_ascii_punctuation());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_punctuation(&self) -> bool {
pub const fn is_ascii_punctuation(&self) -> bool {
matches!(*self, b'!'..=b'/' | b':'..=b'@' | b'['..=b'`' | b'{'..=b'~')
}
@ -4682,8 +4714,9 @@ impl u8 {
/// assert!(!esc.is_ascii_graphic());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_graphic(&self) -> bool {
pub const fn is_ascii_graphic(&self) -> bool {
matches!(*self, b'!'..=b'~')
}
@ -4731,8 +4764,9 @@ impl u8 {
/// assert!(!esc.is_ascii_whitespace());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_whitespace(&self) -> bool {
pub const fn is_ascii_whitespace(&self) -> bool {
matches!(*self, b'\t' | b'\n' | b'\x0C' | b'\r' | b' ')
}
@ -4765,8 +4799,9 @@ impl u8 {
/// assert!(esc.is_ascii_control());
/// ```
#[stable(feature = "ascii_ctype_on_intrinsics", since = "1.24.0")]
#[rustc_const_unstable(feature = "const_ascii_ctype_on_intrinsics", issue = "68983")]
#[inline]
pub fn is_ascii_control(&self) -> bool {
pub const fn is_ascii_control(&self) -> bool {
matches!(*self, b'\0'..=b'\x1F' | b'\x7F')
}
}

View File

@ -340,24 +340,21 @@ pub struct RangeInclusive<Idx> {
// support that mode.
pub(crate) start: Idx,
pub(crate) end: Idx,
pub(crate) is_empty: Option<bool>,
// This field is:
// - `None` when next() or next_back() was never called
// - `Some(false)` when `start < end`
// - `Some(true)` when `end < start`
// - `Some(false)` when `start == end` and the range hasn't yet completed iteration
// - `Some(true)` when `start == end` and the range has completed iteration
// The field cannot be a simple `bool` because the `..=` constructor can
// accept non-PartialOrd types, also we want the constructor to be const.
// - `false` upon construction
// - `false` when iteration has yielded an element and the iterator is not exhausted
// - `true` when iteration has been used to exhaust the iterator
//
// This is required to support PartialEq and Hash without a PartialOrd bound or specialization.
pub(crate) exhausted: bool,
}
#[stable(feature = "inclusive_range", since = "1.26.0")]
impl<Idx: PartialEq> PartialEq for RangeInclusive<Idx> {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.start == other.start
&& self.end == other.end
&& self.is_exhausted() == other.is_exhausted()
self.start == other.start && self.end == other.end && self.exhausted == other.exhausted
}
}
@ -369,8 +366,7 @@ impl<Idx: Hash> Hash for RangeInclusive<Idx> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.start.hash(state);
self.end.hash(state);
// Ideally we would hash `is_exhausted` here as well, but there's no
// way for us to call it.
self.exhausted.hash(state);
}
}
@ -389,7 +385,7 @@ impl<Idx> RangeInclusive<Idx> {
#[rustc_promotable]
#[rustc_const_stable(feature = "const_range_new", since = "1.32.0")]
pub const fn new(start: Idx, end: Idx) -> Self {
Self { start, end, is_empty: None }
Self { start, end, exhausted: false }
}
/// Returns the lower bound of the range (inclusive).
@ -465,18 +461,13 @@ impl<Idx: fmt::Debug> fmt::Debug for RangeInclusive<Idx> {
self.start.fmt(fmt)?;
write!(fmt, "..=")?;
self.end.fmt(fmt)?;
if self.exhausted {
write!(fmt, " (exhausted)")?;
}
Ok(())
}
}
impl<Idx: PartialEq<Idx>> RangeInclusive<Idx> {
// Returns true if this is a range that started non-empty, and was iterated
// to exhaustion.
fn is_exhausted(&self) -> bool {
Some(true) == self.is_empty && self.start == self.end
}
}
impl<Idx: PartialOrd<Idx>> RangeInclusive<Idx> {
/// Returns `true` if `item` is contained in the range.
///
@ -544,15 +535,7 @@ impl<Idx: PartialOrd<Idx>> RangeInclusive<Idx> {
#[unstable(feature = "range_is_empty", reason = "recently added", issue = "48111")]
#[inline]
pub fn is_empty(&self) -> bool {
self.is_empty.unwrap_or_else(|| !(self.start <= self.end))
}
// If this range's `is_empty` is field is unknown (`None`), update it to be a concrete value.
#[inline]
pub(crate) fn compute_is_empty(&mut self) {
if self.is_empty.is_none() {
self.is_empty = Some(!(self.start <= self.end));
}
self.exhausted || !(self.start <= self.end)
}
}

View File

@ -317,7 +317,7 @@ impl<T> Option<T> {
// Getting to contained values
/////////////////////////////////////////////////////////////////////////
/// Unwraps an option, yielding the content of a [`Some`].
/// Returns the contained [`Some`] value, consuming the `self` value.
///
/// # Panics
///
@ -348,17 +348,22 @@ impl<T> Option<T> {
}
}
/// Moves the value `v` out of the `Option<T>` if it is [`Some(v)`].
/// Returns the contained [`Some`] value, consuming the `self` value.
///
/// In general, because this function may panic, its use is discouraged.
/// Because this function may panic, its use is generally discouraged.
/// Instead, prefer to use pattern matching and handle the [`None`]
/// case explicitly.
/// case explicitly, or call [`unwrap_or`], [`unwrap_or_else`], or
/// [`unwrap_or_default`].
///
/// [`unwrap_or`]: #method.unwrap_or
/// [`unwrap_or_else`]: #method.unwrap_or_else
/// [`unwrap_or_default`]: #method.unwrap_or_default
///
/// # Panics
///
/// Panics if the self value equals [`None`].
///
/// [`Some(v)`]: #variant.Some
/// [`Some`]: #variant.Some
/// [`None`]: #variant.None
///
/// # Examples
@ -382,12 +387,13 @@ impl<T> Option<T> {
}
}
/// Returns the contained value or a default.
/// Returns the contained [`Some`] value or a provided default.
///
/// Arguments passed to `unwrap_or` are eagerly evaluated; if you are passing
/// the result of a function call, it is recommended to use [`unwrap_or_else`],
/// which is lazily evaluated.
///
/// [`Some`]: #variant.Some
/// [`unwrap_or_else`]: #method.unwrap_or_else
///
/// # Examples
@ -405,7 +411,7 @@ impl<T> Option<T> {
}
}
/// Returns the contained value or computes it from a closure.
/// Returns the contained [`Some`] value or computes it from a closure.
///
/// # Examples
///
@ -986,7 +992,7 @@ impl<T: Clone> Option<&mut T> {
}
impl<T: fmt::Debug> Option<T> {
/// Unwraps an option, expecting [`None`] and returning nothing.
/// Consumes `self` while expecting [`None`] and returning nothing.
///
/// # Panics
///
@ -1029,7 +1035,7 @@ impl<T: fmt::Debug> Option<T> {
}
}
/// Unwraps an option, expecting [`None`] and returning nothing.
/// Consumes `self` while expecting [`None`] and returning nothing.
///
/// # Panics
///
@ -1074,7 +1080,7 @@ impl<T: fmt::Debug> Option<T> {
}
impl<T: Default> Option<T> {
/// Returns the contained value or a default
/// Returns the contained [`Some`] value or a default
///
/// Consumes the `self` argument then, if [`Some`], returns the contained
/// value, otherwise if [`None`], returns the [default value] for that

67
src/libcore/primitive.rs Normal file
View File

@ -0,0 +1,67 @@
//! This module reexports the primitive types to allow usage that is not
//! possibly shadowed by other declared types.
//!
//! This is normally only useful in macro generated code.
//!
//! An example of this is when generating a new struct and an impl for it:
//!
//! ```rust,compile_fail
//! pub struct bool;
//!
//! impl QueryId for bool {
//! const SOME_PROPERTY: bool = true;
//! }
//!
//! # trait QueryId { const SOME_PROPERTY: core::primitive::bool; }
//! ```
//!
//! Note that the `SOME_PROPERTY` associated constant would not compile, as its
//! type `bool` refers to the struct, rather than to the primitive bool type.
//!
//! A correct implementation could look like:
//!
//! ```rust
//! # #[allow(non_camel_case_types)]
//! pub struct bool;
//!
//! impl QueryId for bool {
//! const SOME_PROPERTY: core::primitive::bool = true;
//! }
//!
//! # trait QueryId { const SOME_PROPERTY: core::primitive::bool; }
//! ```
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use bool;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use char;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use f32;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use f64;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use i128;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use i16;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use i32;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use i64;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use i8;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use isize;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use str;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use u128;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use u16;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use u32;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use u64;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use u8;
#[stable(feature = "core_primitive", since = "1.43.0")]
pub use usize;

View File

@ -119,10 +119,13 @@ mod mut_ptr;
///
/// Behavior is undefined if any of the following conditions are violated:
///
/// * `to_drop` must be [valid] for reads.
/// * `to_drop` must be [valid] for both reads and writes.
///
/// * `to_drop` must be properly aligned.
///
/// * The value `to_drop` points to must be valid for dropping, which may mean it must uphold
/// additional invariants - this is type-dependent.
///
/// Additionally, if `T` is not [`Copy`], using the pointed-to value after
/// calling `drop_in_place` can cause undefined behavior. Note that `*to_drop =
/// foo` counts as a use because it will cause the value to be dropped
@ -289,7 +292,7 @@ pub const fn slice_from_raw_parts_mut<T>(data: *mut T, len: usize) -> *mut [T] {
///
/// Behavior is undefined if any of the following conditions are violated:
///
/// * Both `x` and `y` must be [valid] for reads and writes.
/// * Both `x` and `y` must be [valid] for both reads and writes.
///
/// * Both `x` and `y` must be properly aligned.
///
@ -355,7 +358,7 @@ pub unsafe fn swap<T>(x: *mut T, y: *mut T) {
///
/// Behavior is undefined if any of the following conditions are violated:
///
/// * Both `x` and `y` must be [valid] for reads and writes of `count *
/// * Both `x` and `y` must be [valid] for both reads and writes of `count *
/// size_of::<T>()` bytes.
///
/// * Both `x` and `y` must be properly aligned.
@ -471,10 +474,12 @@ unsafe fn swap_nonoverlapping_bytes(x: *mut u8, y: *mut u8, len: usize) {
///
/// Behavior is undefined if any of the following conditions are violated:
///
/// * `dst` must be [valid] for writes.
/// * `dst` must be [valid] for both reads and writes.
///
/// * `dst` must be properly aligned.
///
/// * `dst` must point to a properly initialized value of type `T`.
///
/// Note that even if `T` has size `0`, the pointer must be non-NULL and properly aligned.
///
/// [valid]: ../ptr/index.html#safety
@ -514,6 +519,8 @@ pub unsafe fn replace<T>(dst: *mut T, mut src: T) -> T {
/// * `src` must be properly aligned. Use [`read_unaligned`] if this is not the
/// case.
///
/// * `src` must point to a properly initialized value of type `T`.
///
/// Note that even if `T` has size `0`, the pointer must be non-NULL and properly aligned.
///
/// # Examples
@ -628,6 +635,8 @@ pub unsafe fn read<T>(src: *const T) -> T {
///
/// * `src` must be [valid] for reads.
///
/// * `src` must point to a properly initialized value of type `T`.
///
/// Like [`read`], `read_unaligned` creates a bitwise copy of `T`, regardless of
/// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the returned
/// value and the value at `*src` can [violate memory safety][read-ownership].
@ -922,6 +931,8 @@ pub unsafe fn write_unaligned<T>(dst: *mut T, src: T) {
///
/// * `src` must be properly aligned.
///
/// * `src` must point to a properly initialized value of type `T`.
///
/// Like [`read`], `read_volatile` creates a bitwise copy of `T`, regardless of
/// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the returned
/// value and the value at `*src` can [violate memory safety][read-ownership].

View File

@ -798,8 +798,7 @@ impl<T, E> Result<T, E> {
}
}
/// Unwraps a result, yielding the content of an [`Ok`].
/// Else, it returns `optb`.
/// Returns the contained [`Ok`] value or a provided default.
///
/// Arguments passed to `unwrap_or` are eagerly evaluated; if you are passing
/// the result of a function call, it is recommended to use [`unwrap_or_else`],
@ -814,27 +813,25 @@ impl<T, E> Result<T, E> {
/// Basic usage:
///
/// ```
/// let optb = 2;
/// let default = 2;
/// let x: Result<u32, &str> = Ok(9);
/// assert_eq!(x.unwrap_or(optb), 9);
/// assert_eq!(x.unwrap_or(default), 9);
///
/// let x: Result<u32, &str> = Err("error");
/// assert_eq!(x.unwrap_or(optb), optb);
/// assert_eq!(x.unwrap_or(default), default);
/// ```
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn unwrap_or(self, optb: T) -> T {
pub fn unwrap_or(self, default: T) -> T {
match self {
Ok(t) => t,
Err(_) => optb,
Err(_) => default,
}
}
/// Unwraps a result, yielding the content of an [`Ok`].
/// If the value is an [`Err`] then it calls `op` with its value.
/// Returns the contained [`Ok`] value or computes it from a closure.
///
/// [`Ok`]: enum.Result.html#variant.Ok
/// [`Err`]: enum.Result.html#variant.Err
///
/// # Examples
///
@ -937,7 +934,44 @@ impl<T: Clone, E> Result<&mut T, E> {
}
impl<T, E: fmt::Debug> Result<T, E> {
/// Unwraps a result, yielding the content of an [`Ok`].
/// Returns the contained [`Ok`] value, consuming the `self` value.
///
/// # Panics
///
/// Panics if the value is an [`Err`], with a panic message including the
/// passed message, and the content of the [`Err`].
///
/// [`Ok`]: enum.Result.html#variant.Ok
/// [`Err`]: enum.Result.html#variant.Err
///
/// # Examples
///
/// Basic usage:
///
/// ```{.should_panic}
/// let x: Result<u32, &str> = Err("emergency failure");
/// x.expect("Testing expect"); // panics with `Testing expect: emergency failure`
/// ```
#[inline]
#[track_caller]
#[stable(feature = "result_expect", since = "1.4.0")]
pub fn expect(self, msg: &str) -> T {
match self {
Ok(t) => t,
Err(e) => unwrap_failed(msg, &e),
}
}
/// Returns the contained [`Ok`] value, consuming the `self` value.
///
/// Because this function may panic, its use is generally discouraged.
/// Instead, prefer to use pattern matching and handle the [`Err`]
/// case explicitly, or call [`unwrap_or`], [`unwrap_or_else`], or
/// [`unwrap_or_default`].
///
/// [`unwrap_or`]: #method.unwrap_or
/// [`unwrap_or_else`]: #method.unwrap_or_else
/// [`unwrap_or_default`]: #method.unwrap_or_default
///
/// # Panics
///
@ -969,13 +1003,15 @@ impl<T, E: fmt::Debug> Result<T, E> {
Err(e) => unwrap_failed("called `Result::unwrap()` on an `Err` value", &e),
}
}
}
/// Unwraps a result, yielding the content of an [`Ok`].
impl<T: fmt::Debug, E> Result<T, E> {
/// Returns the contained [`Err`] value, consuming the `self` value.
///
/// # Panics
///
/// Panics if the value is an [`Err`], with a panic message including the
/// passed message, and the content of the [`Err`].
/// Panics if the value is an [`Ok`], with a panic message including the
/// passed message, and the content of the [`Ok`].
///
/// [`Ok`]: enum.Result.html#variant.Ok
/// [`Err`]: enum.Result.html#variant.Err
@ -985,22 +1021,20 @@ impl<T, E: fmt::Debug> Result<T, E> {
/// Basic usage:
///
/// ```{.should_panic}
/// let x: Result<u32, &str> = Err("emergency failure");
/// x.expect("Testing expect"); // panics with `Testing expect: emergency failure`
/// let x: Result<u32, &str> = Ok(10);
/// x.expect_err("Testing expect_err"); // panics with `Testing expect_err: 10`
/// ```
#[inline]
#[track_caller]
#[stable(feature = "result_expect", since = "1.4.0")]
pub fn expect(self, msg: &str) -> T {
#[stable(feature = "result_expect_err", since = "1.17.0")]
pub fn expect_err(self, msg: &str) -> E {
match self {
Ok(t) => t,
Err(e) => unwrap_failed(msg, &e),
}
Ok(t) => unwrap_failed(msg, &t),
Err(e) => e,
}
}
impl<T: fmt::Debug, E> Result<T, E> {
/// Unwraps a result, yielding the content of an [`Err`].
/// Returns the contained [`Err`] value, consuming the `self` value.
///
/// # Panics
///
@ -1031,38 +1065,10 @@ impl<T: fmt::Debug, E> Result<T, E> {
Err(e) => e,
}
}
/// Unwraps a result, yielding the content of an [`Err`].
///
/// # Panics
///
/// Panics if the value is an [`Ok`], with a panic message including the
/// passed message, and the content of the [`Ok`].
///
/// [`Ok`]: enum.Result.html#variant.Ok
/// [`Err`]: enum.Result.html#variant.Err
///
/// # Examples
///
/// Basic usage:
///
/// ```{.should_panic}
/// let x: Result<u32, &str> = Ok(10);
/// x.expect_err("Testing expect_err"); // panics with `Testing expect_err: 10`
/// ```
#[inline]
#[track_caller]
#[stable(feature = "result_expect_err", since = "1.17.0")]
pub fn expect_err(self, msg: &str) -> E {
match self {
Ok(t) => unwrap_failed(msg, &t),
Err(e) => e,
}
}
}
impl<T: Default, E> Result<T, E> {
/// Returns the contained value or a default
/// Returns the contained [`Ok`] value or a default
///
/// Consumes the `self` argument then, if [`Ok`], returns the contained
/// value, otherwise if [`Err`], returns the default value for that
@ -1101,7 +1107,7 @@ impl<T: Default, E> Result<T, E> {
#[unstable(feature = "unwrap_infallible", reason = "newly added", issue = "61695")]
impl<T, E: Into<!>> Result<T, E> {
/// Unwraps a result that can never be an [`Err`], yielding the content of the [`Ok`].
/// Returns the contained [`Ok`] value, but never panics.
///
/// Unlike [`unwrap`], this method is known to never panic on the
/// result types it is implemented for. Therefore, it can be used

View File

@ -1155,6 +1155,69 @@ impl<T> [T] {
SplitMut { v: self, pred, finished: false }
}
/// Returns an iterator over subslices separated by elements that match
/// `pred`. The matched element is contained in the end of the previous
/// subslice as a terminator.
///
/// # Examples
///
/// ```
/// #![feature(split_inclusive)]
/// let slice = [10, 40, 33, 20];
/// let mut iter = slice.split_inclusive(|num| num % 3 == 0);
///
/// assert_eq!(iter.next().unwrap(), &[10, 40, 33]);
/// assert_eq!(iter.next().unwrap(), &[20]);
/// assert!(iter.next().is_none());
/// ```
///
/// If the last element of the slice is matched,
/// that element will be considered the terminator of the preceding slice.
/// That slice will be the last item returned by the iterator.
///
/// ```
/// #![feature(split_inclusive)]
/// let slice = [3, 10, 40, 33];
/// let mut iter = slice.split_inclusive(|num| num % 3 == 0);
///
/// assert_eq!(iter.next().unwrap(), &[3]);
/// assert_eq!(iter.next().unwrap(), &[10, 40, 33]);
/// assert!(iter.next().is_none());
/// ```
#[unstable(feature = "split_inclusive", issue = "none")]
#[inline]
pub fn split_inclusive<F>(&self, pred: F) -> SplitInclusive<'_, T, F>
where
F: FnMut(&T) -> bool,
{
SplitInclusive { v: self, pred, finished: false }
}
/// Returns an iterator over mutable subslices separated by elements that
/// match `pred`. The matched element is contained in the previous
/// subslice as a terminator.
///
/// # Examples
///
/// ```
/// #![feature(split_inclusive)]
/// let mut v = [10, 40, 30, 20, 60, 50];
///
/// for group in v.split_inclusive_mut(|num| *num % 3 == 0) {
/// let terminator_idx = group.len()-1;
/// group[terminator_idx] = 1;
/// }
/// assert_eq!(v, [10, 40, 1, 20, 1, 1]);
/// ```
#[unstable(feature = "split_inclusive", issue = "none")]
#[inline]
pub fn split_inclusive_mut<F>(&mut self, pred: F) -> SplitInclusiveMut<'_, T, F>
where
F: FnMut(&T) -> bool,
{
SplitInclusiveMut { v: self, pred, finished: false }
}
/// Returns an iterator over subslices separated by elements that match
/// `pred`, starting at the end of the slice and working backwards.
/// The matched element is not contained in the subslices.
@ -3675,7 +3738,106 @@ where
#[stable(feature = "fused", since = "1.26.0")]
impl<T, P> FusedIterator for Split<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over the subslices of the vector which are separated
/// An iterator over subslices separated by elements that match a predicate
/// function. Unlike `Split`, it contains the matched part as a terminator
/// of the subslice.
///
/// This struct is created by the [`split_inclusive`] method on [slices].
///
/// [`split_inclusive`]: ../../std/primitive.slice.html#method.split_inclusive
/// [slices]: ../../std/primitive.slice.html
#[unstable(feature = "split_inclusive", issue = "none")]
pub struct SplitInclusive<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
v: &'a [T],
pred: P,
finished: bool,
}
#[unstable(feature = "split_inclusive", issue = "none")]
impl<T: fmt::Debug, P> fmt::Debug for SplitInclusive<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitInclusive")
.field("v", &self.v)
.field("finished", &self.finished)
.finish()
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[unstable(feature = "split_inclusive", issue = "none")]
impl<T, P> Clone for SplitInclusive<'_, T, P>
where
P: Clone + FnMut(&T) -> bool,
{
fn clone(&self) -> Self {
SplitInclusive { v: self.v, pred: self.pred.clone(), finished: self.finished }
}
}
#[unstable(feature = "split_inclusive", issue = "none")]
impl<'a, T, P> Iterator for SplitInclusive<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a [T];
#[inline]
fn next(&mut self) -> Option<&'a [T]> {
if self.finished {
return None;
}
let idx =
self.v.iter().position(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(self.v.len());
if idx == self.v.len() {
self.finished = true;
}
let ret = Some(&self.v[..idx]);
self.v = &self.v[idx..];
ret
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.finished { (0, Some(0)) } else { (1, Some(self.v.len() + 1)) }
}
}
#[unstable(feature = "split_inclusive", issue = "none")]
impl<'a, T, P> DoubleEndedIterator for SplitInclusive<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a [T]> {
if self.finished {
return None;
}
// The last index of self.v is already checked and found to match
// by the last iteration, so we start searching a new match
// one index to the left.
let remainder = if self.v.len() == 0 { &[] } else { &self.v[..(self.v.len() - 1)] };
let idx = remainder.iter().rposition(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(0);
if idx == 0 {
self.finished = true;
}
let ret = Some(&self.v[idx..]);
self.v = &self.v[..idx];
ret
}
}
#[unstable(feature = "split_inclusive", issue = "none")]
impl<T, P> FusedIterator for SplitInclusive<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over the mutable subslices of the vector which are separated
/// by elements that match `pred`.
///
/// This struct is created by the [`split_mut`] method on [slices].
@ -3789,6 +3951,114 @@ where
#[stable(feature = "fused", since = "1.26.0")]
impl<T, P> FusedIterator for SplitMut<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over the mutable subslices of the vector which are separated
/// by elements that match `pred`. Unlike `SplitMut`, it contains the matched
/// parts in the ends of the subslices.
///
/// This struct is created by the [`split_inclusive_mut`] method on [slices].
///
/// [`split_inclusive_mut`]: ../../std/primitive.slice.html#method.split_inclusive_mut
/// [slices]: ../../std/primitive.slice.html
#[unstable(feature = "split_inclusive", issue = "none")]
pub struct SplitInclusiveMut<'a, T: 'a, P>
where
P: FnMut(&T) -> bool,
{
v: &'a mut [T],
pred: P,
finished: bool,
}
#[unstable(feature = "split_inclusive", issue = "none")]
impl<T: fmt::Debug, P> fmt::Debug for SplitInclusiveMut<'_, T, P>
where
P: FnMut(&T) -> bool,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitInclusiveMut")
.field("v", &self.v)
.field("finished", &self.finished)
.finish()
}
}
#[unstable(feature = "split_inclusive", issue = "none")]
impl<'a, T, P> Iterator for SplitInclusiveMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
type Item = &'a mut [T];
#[inline]
fn next(&mut self) -> Option<&'a mut [T]> {
if self.finished {
return None;
}
let idx_opt = {
// work around borrowck limitations
let pred = &mut self.pred;
self.v.iter().position(|x| (*pred)(x))
};
let idx = idx_opt.map(|idx| idx + 1).unwrap_or(self.v.len());
if idx == self.v.len() {
self.finished = true;
}
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = tail;
Some(head)
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
if self.finished {
(0, Some(0))
} else {
// if the predicate doesn't match anything, we yield one slice
// if it matches every element, we yield len+1 empty slices.
(1, Some(self.v.len() + 1))
}
}
}
#[unstable(feature = "split_inclusive", issue = "none")]
impl<'a, T, P> DoubleEndedIterator for SplitInclusiveMut<'a, T, P>
where
P: FnMut(&T) -> bool,
{
#[inline]
fn next_back(&mut self) -> Option<&'a mut [T]> {
if self.finished {
return None;
}
let idx_opt = if self.v.len() == 0 {
None
} else {
// work around borrowck limitations
let pred = &mut self.pred;
// The last index of self.v is already checked and found to match
// by the last iteration, so we start searching a new match
// one index to the left.
let remainder = &self.v[..(self.v.len() - 1)];
remainder.iter().rposition(|x| (*pred)(x))
};
let idx = idx_opt.map(|idx| idx + 1).unwrap_or(0);
if idx == 0 {
self.finished = true;
}
let tmp = mem::replace(&mut self.v, &mut []);
let (head, tail) = tmp.split_at_mut(idx);
self.v = head;
Some(tail)
}
}
#[unstable(feature = "split_inclusive", issue = "none")]
impl<T, P> FusedIterator for SplitInclusiveMut<'_, T, P> where P: FnMut(&T) -> bool {}
/// An iterator over subslices separated by elements that match a predicate
/// function, starting from the end of the slice.
///

View File

@ -1132,6 +1132,26 @@ impl<'a, P: Pattern<'a>> SplitInternal<'a, P> {
}
}
#[inline]
fn next_inclusive(&mut self) -> Option<&'a str> {
if self.finished {
return None;
}
let haystack = self.matcher.haystack();
match self.matcher.next_match() {
// SAFETY: `Searcher` guarantees that `b` lies on unicode boundary,
// and self.start is either the start of the original string,
// or `b` was assigned to it, so it also lies on unicode boundary.
Some((_, b)) => unsafe {
let elt = haystack.get_unchecked(self.start..b);
self.start = b;
Some(elt)
},
None => self.get_end(),
}
}
#[inline]
fn next_back(&mut self) -> Option<&'a str>
where
@ -1168,6 +1188,49 @@ impl<'a, P: Pattern<'a>> SplitInternal<'a, P> {
},
}
}
#[inline]
fn next_back_inclusive(&mut self) -> Option<&'a str>
where
P::Searcher: ReverseSearcher<'a>,
{
if self.finished {
return None;
}
if !self.allow_trailing_empty {
self.allow_trailing_empty = true;
match self.next_back_inclusive() {
Some(elt) if !elt.is_empty() => return Some(elt),
_ => {
if self.finished {
return None;
}
}
}
}
let haystack = self.matcher.haystack();
match self.matcher.next_match_back() {
// SAFETY: `Searcher` guarantees that `b` lies on unicode boundary,
// and self.end is either the end of the original string,
// or `b` was assigned to it, so it also lies on unicode boundary.
Some((_, b)) => unsafe {
let elt = haystack.get_unchecked(b..self.end);
self.end = b;
Some(elt)
},
// SAFETY: self.start is either the start of the original string,
// or start of a substring that represents the part of the string that hasn't
// iterated yet. Either way, it is guaranteed to lie on unicode boundary.
// self.end is either the end of the original string,
// or `b` was assigned to it, so it also lies on unicode boundary.
None => unsafe {
self.finished = true;
Some(haystack.get_unchecked(self.start..self.end))
},
}
}
}
generate_pattern_iterators! {
@ -1499,7 +1562,7 @@ fn contains_nonascii(x: usize) -> bool {
/// Walks through `v` checking that it's a valid UTF-8 sequence,
/// returning `Ok(())` in that case, or, if it is invalid, `Err(err)`.
#[inline]
#[inline(always)]
fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> {
let mut index = 0;
let len = v.len();
@ -2423,7 +2486,7 @@ impl str {
/// Callers of this function are responsible that these preconditions are
/// satisfied:
///
/// * The starting index must come before the ending index;
/// * The starting index must not exceed the ending index;
/// * Indexes must be within bounds of the original slice;
/// * Indexes must lie on UTF-8 sequence boundaries.
///
@ -2455,7 +2518,7 @@ impl str {
/// Callers of this function are responsible that these preconditions are
/// satisfied:
///
/// * The starting index must come before the ending index;
/// * The starting index must not exceed the ending index;
/// * Indexes must be within bounds of the original slice;
/// * Indexes must lie on UTF-8 sequence boundaries.
///
@ -2500,7 +2563,7 @@ impl str {
/// Callers of this function are responsible that three preconditions are
/// satisfied:
///
/// * `begin` must come before `end`.
/// * `begin` must not exceed `end`.
/// * `begin` and `end` must be byte positions within the string slice.
/// * `begin` and `end` must lie on UTF-8 sequence boundaries.
///
@ -2549,7 +2612,7 @@ impl str {
/// Callers of this function are responsible that three preconditions are
/// satisfied:
///
/// * `begin` must come before `end`.
/// * `begin` must not exceed `end`.
/// * `begin` and `end` must be byte positions within the string slice.
/// * `begin` and `end` must lie on UTF-8 sequence boundaries.
#[stable(feature = "str_slice_mut", since = "1.5.0")]
@ -2658,7 +2721,8 @@ impl str {
///
/// It's important to remember that [`char`] represents a Unicode Scalar
/// Value, and may not match your idea of what a 'character' is. Iteration
/// over grapheme clusters may be what you actually want.
/// over grapheme clusters may be what you actually want. This functionality
/// is not provided by Rust's standard library, check crates.io instead.
///
/// # Examples
///
@ -3212,6 +3276,42 @@ impl str {
})
}
/// An iterator over substrings of this string slice, separated by
/// characters matched by a pattern. Differs from the iterator produced by
/// `split` in that `split_inclusive` leaves the matched part as the
/// terminator of the substring.
///
/// # Examples
///
/// ```
/// #![feature(split_inclusive)]
/// let v: Vec<&str> = "Mary had a little lamb\nlittle lamb\nlittle lamb."
/// .split_inclusive('\n').collect();
/// assert_eq!(v, ["Mary had a little lamb\n", "little lamb\n", "little lamb."]);
/// ```
///
/// If the last element of the string is matched,
/// that element will be considered the terminator of the preceding substring.
/// That substring will be the last item returned by the iterator.
///
/// ```
/// #![feature(split_inclusive)]
/// let v: Vec<&str> = "Mary had a little lamb\nlittle lamb\nlittle lamb.\n"
/// .split_inclusive('\n').collect();
/// assert_eq!(v, ["Mary had a little lamb\n", "little lamb\n", "little lamb.\n"]);
/// ```
#[unstable(feature = "split_inclusive", issue = "none")]
#[inline]
pub fn split_inclusive<'a, P: Pattern<'a>>(&'a self, pat: P) -> SplitInclusive<'a, P> {
SplitInclusive(SplitInternal {
start: 0,
end: self.len(),
matcher: pat.into_searcher(self),
allow_trailing_empty: false,
finished: false,
})
}
/// An iterator over substrings of the given string slice, separated by
/// characters matched by a pattern and yielded in reverse order.
///
@ -4405,6 +4505,19 @@ pub struct SplitAsciiWhitespace<'a> {
inner: Map<Filter<SliceSplit<'a, u8, IsAsciiWhitespace>, BytesIsNotEmpty>, UnsafeBytesToStr>,
}
/// An iterator over the substrings of a string,
/// terminated by a substring matching to a predicate function
/// Unlike `Split`, it contains the matched part as a terminator
/// of the subslice.
///
/// This struct is created by the [`split_inclusive`] method on [`str`].
/// See its documentation for more.
///
/// [`split_inclusive`]: ../../std/primitive.str.html#method.split_inclusive
/// [`str`]: ../../std/primitive.str.html
#[unstable(feature = "split_inclusive", issue = "none")]
pub struct SplitInclusive<'a, P: Pattern<'a>>(SplitInternal<'a, P>);
impl_fn_for_zst! {
#[derive(Clone)]
struct IsWhitespace impl Fn = |c: char| -> bool {
@ -4495,6 +4608,44 @@ impl<'a> DoubleEndedIterator for SplitAsciiWhitespace<'a> {
#[stable(feature = "split_ascii_whitespace", since = "1.34.0")]
impl FusedIterator for SplitAsciiWhitespace<'_> {}
#[unstable(feature = "split_inclusive", issue = "none")]
impl<'a, P: Pattern<'a>> Iterator for SplitInclusive<'a, P> {
type Item = &'a str;
#[inline]
fn next(&mut self) -> Option<&'a str> {
self.0.next_inclusive()
}
}
#[unstable(feature = "split_inclusive", issue = "none")]
impl<'a, P: Pattern<'a, Searcher: fmt::Debug>> fmt::Debug for SplitInclusive<'a, P> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("SplitInclusive").field("0", &self.0).finish()
}
}
// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
#[unstable(feature = "split_inclusive", issue = "none")]
impl<'a, P: Pattern<'a, Searcher: Clone>> Clone for SplitInclusive<'a, P> {
fn clone(&self) -> Self {
SplitInclusive(self.0.clone())
}
}
#[unstable(feature = "split_inclusive", issue = "none")]
impl<'a, P: Pattern<'a, Searcher: ReverseSearcher<'a>>> DoubleEndedIterator
for SplitInclusive<'a, P>
{
#[inline]
fn next_back(&mut self) -> Option<&'a str> {
self.0.next_back_inclusive()
}
}
#[unstable(feature = "split_inclusive", issue = "none")]
impl<'a, P: Pattern<'a>> FusedIterator for SplitInclusive<'a, P> {}
/// An iterator of [`u16`] over the string encoded as UTF-16.
///
/// [`u16`]: ../../std/primitive.u16.html

View File

@ -38,6 +38,16 @@ fn test_format_int() {
assert_eq!(format!("{:o}", 1i16), "1");
assert_eq!(format!("{:o}", 1i32), "1");
assert_eq!(format!("{:o}", 1i64), "1");
assert_eq!(format!("{:e}", 1isize), "1e0");
assert_eq!(format!("{:e}", 1i8), "1e0");
assert_eq!(format!("{:e}", 1i16), "1e0");
assert_eq!(format!("{:e}", 1i32), "1e0");
assert_eq!(format!("{:e}", 1i64), "1e0");
assert_eq!(format!("{:E}", 1isize), "1E0");
assert_eq!(format!("{:E}", 1i8), "1E0");
assert_eq!(format!("{:E}", 1i16), "1E0");
assert_eq!(format!("{:E}", 1i32), "1E0");
assert_eq!(format!("{:E}", 1i64), "1E0");
assert_eq!(format!("{}", 1usize), "1");
assert_eq!(format!("{}", 1u8), "1");
@ -69,6 +79,14 @@ fn test_format_int() {
assert_eq!(format!("{:o}", 1u16), "1");
assert_eq!(format!("{:o}", 1u32), "1");
assert_eq!(format!("{:o}", 1u64), "1");
assert_eq!(format!("{:e}", 1u8), "1e0");
assert_eq!(format!("{:e}", 1u16), "1e0");
assert_eq!(format!("{:e}", 1u32), "1e0");
assert_eq!(format!("{:e}", 1u64), "1e0");
assert_eq!(format!("{:E}", 1u8), "1E0");
assert_eq!(format!("{:E}", 1u16), "1E0");
assert_eq!(format!("{:E}", 1u32), "1E0");
assert_eq!(format!("{:E}", 1u64), "1E0");
// Test a larger number
assert_eq!(format!("{:b}", 55), "110111");
@ -76,6 +94,64 @@ fn test_format_int() {
assert_eq!(format!("{}", 55), "55");
assert_eq!(format!("{:x}", 55), "37");
assert_eq!(format!("{:X}", 55), "37");
assert_eq!(format!("{:e}", 55), "5.5e1");
assert_eq!(format!("{:E}", 55), "5.5E1");
assert_eq!(format!("{:e}", 10000000000u64), "1e10");
assert_eq!(format!("{:E}", 10000000000u64), "1E10");
assert_eq!(format!("{:e}", 10000000001u64), "1.0000000001e10");
assert_eq!(format!("{:E}", 10000000001u64), "1.0000000001E10");
}
#[test]
fn test_format_int_exp_limits() {
use core::{i128, i16, i32, i64, i8, u128, u16, u32, u64, u8};
assert_eq!(format!("{:e}", i8::MIN), "-1.28e2");
assert_eq!(format!("{:e}", i8::MAX), "1.27e2");
assert_eq!(format!("{:e}", i16::MIN), "-3.2768e4");
assert_eq!(format!("{:e}", i16::MAX), "3.2767e4");
assert_eq!(format!("{:e}", i32::MIN), "-2.147483648e9");
assert_eq!(format!("{:e}", i32::MAX), "2.147483647e9");
assert_eq!(format!("{:e}", i64::MIN), "-9.223372036854775808e18");
assert_eq!(format!("{:e}", i64::MAX), "9.223372036854775807e18");
assert_eq!(format!("{:e}", i128::MIN), "-1.70141183460469231731687303715884105728e38");
assert_eq!(format!("{:e}", i128::MAX), "1.70141183460469231731687303715884105727e38");
assert_eq!(format!("{:e}", u8::MAX), "2.55e2");
assert_eq!(format!("{:e}", u16::MAX), "6.5535e4");
assert_eq!(format!("{:e}", u32::MAX), "4.294967295e9");
assert_eq!(format!("{:e}", u64::MAX), "1.8446744073709551615e19");
assert_eq!(format!("{:e}", u128::MAX), "3.40282366920938463463374607431768211455e38");
}
#[test]
fn test_format_int_exp_precision() {
use core::{i128, i16, i32, i64, i8};
//test that float and integer match
let big_int: u32 = 314_159_265;
assert_eq!(format!("{:.1e}", big_int), format!("{:.1e}", f64::from(big_int)));
//test adding precision
assert_eq!(format!("{:.10e}", i8::MIN), "-1.2800000000e2");
assert_eq!(format!("{:.10e}", i16::MIN), "-3.2768000000e4");
assert_eq!(format!("{:.10e}", i32::MIN), "-2.1474836480e9");
assert_eq!(format!("{:.20e}", i64::MIN), "-9.22337203685477580800e18");
assert_eq!(format!("{:.40e}", i128::MIN), "-1.7014118346046923173168730371588410572800e38");
//test rounding
assert_eq!(format!("{:.1e}", i8::MIN), "-1.3e2");
assert_eq!(format!("{:.1e}", i16::MIN), "-3.3e4");
assert_eq!(format!("{:.1e}", i32::MIN), "-2.1e9");
assert_eq!(format!("{:.1e}", i64::MIN), "-9.2e18");
assert_eq!(format!("{:.1e}", i128::MIN), "-1.7e38");
//test huge precision
assert_eq!(format!("{:.1000e}", 1), format!("1.{}e0", "0".repeat(1000)));
//test zero precision
assert_eq!(format!("{:.0e}", 1), format!("1e0",));
//test padding with precision (and sign)
assert_eq!(format!("{:+10.3e}", 1), " +1.000e0");
}
#[test]
@ -86,6 +162,8 @@ fn test_format_int_zero() {
assert_eq!(format!("{:o}", 0), "0");
assert_eq!(format!("{:x}", 0), "0");
assert_eq!(format!("{:X}", 0), "0");
assert_eq!(format!("{:e}", 0), "0e0");
assert_eq!(format!("{:E}", 0), "0E0");
assert_eq!(format!("{}", 0u32), "0");
assert_eq!(format!("{:?}", 0u32), "0");
@ -93,6 +171,8 @@ fn test_format_int_zero() {
assert_eq!(format!("{:o}", 0u32), "0");
assert_eq!(format!("{:x}", 0u32), "0");
assert_eq!(format!("{:X}", 0u32), "0");
assert_eq!(format!("{:e}", 0u32), "0e0");
assert_eq!(format!("{:E}", 0u32), "0E0");
}
#[test]

View File

@ -500,94 +500,91 @@ where
{
use core::num::flt2dec::Sign::*;
fn to_string<T, F>(f: &mut F, v: T, sign: Sign, frac_digits: usize, upper: bool) -> String
fn to_string<T, F>(f: &mut F, v: T, sign: Sign, frac_digits: usize) -> String
where
T: DecodableFloat,
F: FnMut(&Decoded, &mut [u8]) -> (usize, i16),
{
to_string_with_parts(|buf, parts| {
to_shortest_str(|d, b| f(d, b), v, sign, frac_digits, upper, buf, parts)
to_shortest_str(|d, b| f(d, b), v, sign, frac_digits, buf, parts)
})
}
let f = &mut f_;
assert_eq!(to_string(f, 0.0, Minus, 0, false), "0");
assert_eq!(to_string(f, 0.0, MinusRaw, 0, false), "0");
assert_eq!(to_string(f, 0.0, MinusPlus, 0, false), "+0");
assert_eq!(to_string(f, 0.0, MinusPlusRaw, 0, false), "+0");
assert_eq!(to_string(f, -0.0, Minus, 0, false), "0");
assert_eq!(to_string(f, -0.0, MinusRaw, 0, false), "-0");
assert_eq!(to_string(f, -0.0, MinusPlus, 0, false), "+0");
assert_eq!(to_string(f, -0.0, MinusPlusRaw, 0, false), "-0");
assert_eq!(to_string(f, 0.0, Minus, 1, true), "0.0");
assert_eq!(to_string(f, 0.0, MinusRaw, 1, true), "0.0");
assert_eq!(to_string(f, 0.0, MinusPlus, 1, true), "+0.0");
assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1, true), "+0.0");
assert_eq!(to_string(f, -0.0, Minus, 8, true), "0.00000000");
assert_eq!(to_string(f, -0.0, MinusRaw, 8, true), "-0.00000000");
assert_eq!(to_string(f, -0.0, MinusPlus, 8, true), "+0.00000000");
assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8, true), "-0.00000000");
assert_eq!(to_string(f, 0.0, Minus, 0), "0");
assert_eq!(to_string(f, 0.0, MinusRaw, 0), "0");
assert_eq!(to_string(f, 0.0, MinusPlus, 0), "+0");
assert_eq!(to_string(f, 0.0, MinusPlusRaw, 0), "+0");
assert_eq!(to_string(f, -0.0, Minus, 0), "0");
assert_eq!(to_string(f, -0.0, MinusRaw, 0), "-0");
assert_eq!(to_string(f, -0.0, MinusPlus, 0), "+0");
assert_eq!(to_string(f, -0.0, MinusPlusRaw, 0), "-0");
assert_eq!(to_string(f, 0.0, Minus, 1), "0.0");
assert_eq!(to_string(f, 0.0, MinusRaw, 1), "0.0");
assert_eq!(to_string(f, 0.0, MinusPlus, 1), "+0.0");
assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1), "+0.0");
assert_eq!(to_string(f, -0.0, Minus, 8), "0.00000000");
assert_eq!(to_string(f, -0.0, MinusRaw, 8), "-0.00000000");
assert_eq!(to_string(f, -0.0, MinusPlus, 8), "+0.00000000");
assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8), "-0.00000000");
assert_eq!(to_string(f, 1.0 / 0.0, Minus, 0, false), "inf");
assert_eq!(to_string(f, 1.0 / 0.0, MinusRaw, 0, true), "inf");
assert_eq!(to_string(f, 1.0 / 0.0, MinusPlus, 0, false), "+inf");
assert_eq!(to_string(f, 1.0 / 0.0, MinusPlusRaw, 0, true), "+inf");
assert_eq!(to_string(f, 0.0 / 0.0, Minus, 0, false), "NaN");
assert_eq!(to_string(f, 0.0 / 0.0, MinusRaw, 1, true), "NaN");
assert_eq!(to_string(f, 0.0 / 0.0, MinusPlus, 8, false), "NaN");
assert_eq!(to_string(f, 0.0 / 0.0, MinusPlusRaw, 64, true), "NaN");
assert_eq!(to_string(f, -1.0 / 0.0, Minus, 0, false), "-inf");
assert_eq!(to_string(f, -1.0 / 0.0, MinusRaw, 1, true), "-inf");
assert_eq!(to_string(f, -1.0 / 0.0, MinusPlus, 8, false), "-inf");
assert_eq!(to_string(f, -1.0 / 0.0, MinusPlusRaw, 64, true), "-inf");
assert_eq!(to_string(f, 1.0 / 0.0, Minus, 0), "inf");
assert_eq!(to_string(f, 1.0 / 0.0, MinusRaw, 0), "inf");
assert_eq!(to_string(f, 1.0 / 0.0, MinusPlus, 0), "+inf");
assert_eq!(to_string(f, 1.0 / 0.0, MinusPlusRaw, 0), "+inf");
assert_eq!(to_string(f, 0.0 / 0.0, Minus, 0), "NaN");
assert_eq!(to_string(f, 0.0 / 0.0, MinusRaw, 1), "NaN");
assert_eq!(to_string(f, 0.0 / 0.0, MinusPlus, 8), "NaN");
assert_eq!(to_string(f, 0.0 / 0.0, MinusPlusRaw, 64), "NaN");
assert_eq!(to_string(f, -1.0 / 0.0, Minus, 0), "-inf");
assert_eq!(to_string(f, -1.0 / 0.0, MinusRaw, 1), "-inf");
assert_eq!(to_string(f, -1.0 / 0.0, MinusPlus, 8), "-inf");
assert_eq!(to_string(f, -1.0 / 0.0, MinusPlusRaw, 64), "-inf");
assert_eq!(to_string(f, 3.14, Minus, 0, false), "3.14");
assert_eq!(to_string(f, 3.14, MinusRaw, 0, false), "3.14");
assert_eq!(to_string(f, 3.14, MinusPlus, 0, false), "+3.14");
assert_eq!(to_string(f, 3.14, MinusPlusRaw, 0, false), "+3.14");
assert_eq!(to_string(f, -3.14, Minus, 0, false), "-3.14");
assert_eq!(to_string(f, -3.14, MinusRaw, 0, false), "-3.14");
assert_eq!(to_string(f, -3.14, MinusPlus, 0, false), "-3.14");
assert_eq!(to_string(f, -3.14, MinusPlusRaw, 0, false), "-3.14");
assert_eq!(to_string(f, 3.14, Minus, 1, true), "3.14");
assert_eq!(to_string(f, 3.14, MinusRaw, 2, true), "3.14");
assert_eq!(to_string(f, 3.14, MinusPlus, 3, true), "+3.140");
assert_eq!(to_string(f, 3.14, MinusPlusRaw, 4, true), "+3.1400");
assert_eq!(to_string(f, -3.14, Minus, 8, true), "-3.14000000");
assert_eq!(to_string(f, -3.14, MinusRaw, 8, true), "-3.14000000");
assert_eq!(to_string(f, -3.14, MinusPlus, 8, true), "-3.14000000");
assert_eq!(to_string(f, -3.14, MinusPlusRaw, 8, true), "-3.14000000");
assert_eq!(to_string(f, 3.14, Minus, 0), "3.14");
assert_eq!(to_string(f, 3.14, MinusRaw, 0), "3.14");
assert_eq!(to_string(f, 3.14, MinusPlus, 0), "+3.14");
assert_eq!(to_string(f, 3.14, MinusPlusRaw, 0), "+3.14");
assert_eq!(to_string(f, -3.14, Minus, 0), "-3.14");
assert_eq!(to_string(f, -3.14, MinusRaw, 0), "-3.14");
assert_eq!(to_string(f, -3.14, MinusPlus, 0), "-3.14");
assert_eq!(to_string(f, -3.14, MinusPlusRaw, 0), "-3.14");
assert_eq!(to_string(f, 3.14, Minus, 1), "3.14");
assert_eq!(to_string(f, 3.14, MinusRaw, 2), "3.14");
assert_eq!(to_string(f, 3.14, MinusPlus, 3), "+3.140");
assert_eq!(to_string(f, 3.14, MinusPlusRaw, 4), "+3.1400");
assert_eq!(to_string(f, -3.14, Minus, 8), "-3.14000000");
assert_eq!(to_string(f, -3.14, MinusRaw, 8), "-3.14000000");
assert_eq!(to_string(f, -3.14, MinusPlus, 8), "-3.14000000");
assert_eq!(to_string(f, -3.14, MinusPlusRaw, 8), "-3.14000000");
assert_eq!(to_string(f, 7.5e-11, Minus, 0, false), "0.000000000075");
assert_eq!(to_string(f, 7.5e-11, Minus, 3, false), "0.000000000075");
assert_eq!(to_string(f, 7.5e-11, Minus, 12, false), "0.000000000075");
assert_eq!(to_string(f, 7.5e-11, Minus, 13, false), "0.0000000000750");
assert_eq!(to_string(f, 7.5e-11, Minus, 0), "0.000000000075");
assert_eq!(to_string(f, 7.5e-11, Minus, 3), "0.000000000075");
assert_eq!(to_string(f, 7.5e-11, Minus, 12), "0.000000000075");
assert_eq!(to_string(f, 7.5e-11, Minus, 13), "0.0000000000750");
assert_eq!(to_string(f, 1.9971e20, Minus, 0, false), "199710000000000000000");
assert_eq!(to_string(f, 1.9971e20, Minus, 1, false), "199710000000000000000.0");
assert_eq!(to_string(f, 1.9971e20, Minus, 8, false), "199710000000000000000.00000000");
assert_eq!(to_string(f, 1.9971e20, Minus, 0), "199710000000000000000");
assert_eq!(to_string(f, 1.9971e20, Minus, 1), "199710000000000000000.0");
assert_eq!(to_string(f, 1.9971e20, Minus, 8), "199710000000000000000.00000000");
assert_eq!(to_string(f, f32::MAX, Minus, 0, false), format!("34028235{:0>31}", ""));
assert_eq!(to_string(f, f32::MAX, Minus, 1, false), format!("34028235{:0>31}.0", ""));
assert_eq!(to_string(f, f32::MAX, Minus, 8, false), format!("34028235{:0>31}.00000000", ""));
assert_eq!(to_string(f, f32::MAX, Minus, 0), format!("34028235{:0>31}", ""));
assert_eq!(to_string(f, f32::MAX, Minus, 1), format!("34028235{:0>31}.0", ""));
assert_eq!(to_string(f, f32::MAX, Minus, 8), format!("34028235{:0>31}.00000000", ""));
let minf32 = ldexp_f32(1.0, -149);
assert_eq!(to_string(f, minf32, Minus, 0, false), format!("0.{:0>44}1", ""));
assert_eq!(to_string(f, minf32, Minus, 45, false), format!("0.{:0>44}1", ""));
assert_eq!(to_string(f, minf32, Minus, 46, false), format!("0.{:0>44}10", ""));
assert_eq!(to_string(f, minf32, Minus, 0), format!("0.{:0>44}1", ""));
assert_eq!(to_string(f, minf32, Minus, 45), format!("0.{:0>44}1", ""));
assert_eq!(to_string(f, minf32, Minus, 46), format!("0.{:0>44}10", ""));
assert_eq!(to_string(f, f64::MAX, Minus, 0, false), format!("17976931348623157{:0>292}", ""));
assert_eq!(to_string(f, f64::MAX, Minus, 1, false), format!("17976931348623157{:0>292}.0", ""));
assert_eq!(
to_string(f, f64::MAX, Minus, 8, false),
format!("17976931348623157{:0>292}.00000000", "")
);
assert_eq!(to_string(f, f64::MAX, Minus, 0), format!("17976931348623157{:0>292}", ""));
assert_eq!(to_string(f, f64::MAX, Minus, 1), format!("17976931348623157{:0>292}.0", ""));
assert_eq!(to_string(f, f64::MAX, Minus, 8), format!("17976931348623157{:0>292}.00000000", ""));
let minf64 = ldexp_f64(1.0, -1074);
assert_eq!(to_string(f, minf64, Minus, 0, false), format!("0.{:0>323}5", ""));
assert_eq!(to_string(f, minf64, Minus, 324, false), format!("0.{:0>323}5", ""));
assert_eq!(to_string(f, minf64, Minus, 325, false), format!("0.{:0>323}50", ""));
assert_eq!(to_string(f, minf64, Minus, 0), format!("0.{:0>323}5", ""));
assert_eq!(to_string(f, minf64, Minus, 324), format!("0.{:0>323}5", ""));
assert_eq!(to_string(f, minf64, Minus, 325), format!("0.{:0>323}50", ""));
if cfg!(miri) {
// Miri is too slow
@ -595,7 +592,7 @@ where
}
// very large output
assert_eq!(to_string(f, 1.1, Minus, 80000, false), format!("1.1{:0>79999}", ""));
assert_eq!(to_string(f, 1.1, Minus, 80000), format!("1.1{:0>79999}", ""));
}
pub fn to_shortest_exp_str_test<F>(mut f_: F)
@ -996,166 +993,157 @@ where
{
use core::num::flt2dec::Sign::*;
fn to_string<T, F>(f: &mut F, v: T, sign: Sign, frac_digits: usize, upper: bool) -> String
fn to_string<T, F>(f: &mut F, v: T, sign: Sign, frac_digits: usize) -> String
where
T: DecodableFloat,
F: FnMut(&Decoded, &mut [u8], i16) -> (usize, i16),
{
to_string_with_parts(|buf, parts| {
to_exact_fixed_str(|d, b, l| f(d, b, l), v, sign, frac_digits, upper, buf, parts)
to_exact_fixed_str(|d, b, l| f(d, b, l), v, sign, frac_digits, buf, parts)
})
}
let f = &mut f_;
assert_eq!(to_string(f, 0.0, Minus, 0, false), "0");
assert_eq!(to_string(f, 0.0, MinusRaw, 0, false), "0");
assert_eq!(to_string(f, 0.0, MinusPlus, 0, false), "+0");
assert_eq!(to_string(f, 0.0, MinusPlusRaw, 0, false), "+0");
assert_eq!(to_string(f, -0.0, Minus, 0, false), "0");
assert_eq!(to_string(f, -0.0, MinusRaw, 0, false), "-0");
assert_eq!(to_string(f, -0.0, MinusPlus, 0, false), "+0");
assert_eq!(to_string(f, -0.0, MinusPlusRaw, 0, false), "-0");
assert_eq!(to_string(f, 0.0, Minus, 1, true), "0.0");
assert_eq!(to_string(f, 0.0, MinusRaw, 1, true), "0.0");
assert_eq!(to_string(f, 0.0, MinusPlus, 1, true), "+0.0");
assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1, true), "+0.0");
assert_eq!(to_string(f, -0.0, Minus, 8, true), "0.00000000");
assert_eq!(to_string(f, -0.0, MinusRaw, 8, true), "-0.00000000");
assert_eq!(to_string(f, -0.0, MinusPlus, 8, true), "+0.00000000");
assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8, true), "-0.00000000");
assert_eq!(to_string(f, 0.0, Minus, 0), "0");
assert_eq!(to_string(f, 0.0, MinusRaw, 0), "0");
assert_eq!(to_string(f, 0.0, MinusPlus, 0), "+0");
assert_eq!(to_string(f, 0.0, MinusPlusRaw, 0), "+0");
assert_eq!(to_string(f, -0.0, Minus, 0), "0");
assert_eq!(to_string(f, -0.0, MinusRaw, 0), "-0");
assert_eq!(to_string(f, -0.0, MinusPlus, 0), "+0");
assert_eq!(to_string(f, -0.0, MinusPlusRaw, 0), "-0");
assert_eq!(to_string(f, 0.0, Minus, 1), "0.0");
assert_eq!(to_string(f, 0.0, MinusRaw, 1), "0.0");
assert_eq!(to_string(f, 0.0, MinusPlus, 1), "+0.0");
assert_eq!(to_string(f, 0.0, MinusPlusRaw, 1), "+0.0");
assert_eq!(to_string(f, -0.0, Minus, 8), "0.00000000");
assert_eq!(to_string(f, -0.0, MinusRaw, 8), "-0.00000000");
assert_eq!(to_string(f, -0.0, MinusPlus, 8), "+0.00000000");
assert_eq!(to_string(f, -0.0, MinusPlusRaw, 8), "-0.00000000");
assert_eq!(to_string(f, 1.0 / 0.0, Minus, 0, false), "inf");
assert_eq!(to_string(f, 1.0 / 0.0, MinusRaw, 1, true), "inf");
assert_eq!(to_string(f, 1.0 / 0.0, MinusPlus, 8, false), "+inf");
assert_eq!(to_string(f, 1.0 / 0.0, MinusPlusRaw, 64, true), "+inf");
assert_eq!(to_string(f, 0.0 / 0.0, Minus, 0, false), "NaN");
assert_eq!(to_string(f, 0.0 / 0.0, MinusRaw, 1, true), "NaN");
assert_eq!(to_string(f, 0.0 / 0.0, MinusPlus, 8, false), "NaN");
assert_eq!(to_string(f, 0.0 / 0.0, MinusPlusRaw, 64, true), "NaN");
assert_eq!(to_string(f, -1.0 / 0.0, Minus, 0, false), "-inf");
assert_eq!(to_string(f, -1.0 / 0.0, MinusRaw, 1, true), "-inf");
assert_eq!(to_string(f, -1.0 / 0.0, MinusPlus, 8, false), "-inf");
assert_eq!(to_string(f, -1.0 / 0.0, MinusPlusRaw, 64, true), "-inf");
assert_eq!(to_string(f, 1.0 / 0.0, Minus, 0), "inf");
assert_eq!(to_string(f, 1.0 / 0.0, MinusRaw, 1), "inf");
assert_eq!(to_string(f, 1.0 / 0.0, MinusPlus, 8), "+inf");
assert_eq!(to_string(f, 1.0 / 0.0, MinusPlusRaw, 64), "+inf");
assert_eq!(to_string(f, 0.0 / 0.0, Minus, 0), "NaN");
assert_eq!(to_string(f, 0.0 / 0.0, MinusRaw, 1), "NaN");
assert_eq!(to_string(f, 0.0 / 0.0, MinusPlus, 8), "NaN");
assert_eq!(to_string(f, 0.0 / 0.0, MinusPlusRaw, 64), "NaN");
assert_eq!(to_string(f, -1.0 / 0.0, Minus, 0), "-inf");
assert_eq!(to_string(f, -1.0 / 0.0, MinusRaw, 1), "-inf");
assert_eq!(to_string(f, -1.0 / 0.0, MinusPlus, 8), "-inf");
assert_eq!(to_string(f, -1.0 / 0.0, MinusPlusRaw, 64), "-inf");
assert_eq!(to_string(f, 3.14, Minus, 0, false), "3");
assert_eq!(to_string(f, 3.14, MinusRaw, 0, false), "3");
assert_eq!(to_string(f, 3.14, MinusPlus, 0, false), "+3");
assert_eq!(to_string(f, 3.14, MinusPlusRaw, 0, false), "+3");
assert_eq!(to_string(f, -3.14, Minus, 0, false), "-3");
assert_eq!(to_string(f, -3.14, MinusRaw, 0, false), "-3");
assert_eq!(to_string(f, -3.14, MinusPlus, 0, false), "-3");
assert_eq!(to_string(f, -3.14, MinusPlusRaw, 0, false), "-3");
assert_eq!(to_string(f, 3.14, Minus, 1, true), "3.1");
assert_eq!(to_string(f, 3.14, MinusRaw, 2, true), "3.14");
assert_eq!(to_string(f, 3.14, MinusPlus, 3, true), "+3.140");
assert_eq!(to_string(f, 3.14, MinusPlusRaw, 4, true), "+3.1400");
assert_eq!(to_string(f, -3.14, Minus, 8, true), "-3.14000000");
assert_eq!(to_string(f, -3.14, MinusRaw, 8, true), "-3.14000000");
assert_eq!(to_string(f, -3.14, MinusPlus, 8, true), "-3.14000000");
assert_eq!(to_string(f, -3.14, MinusPlusRaw, 8, true), "-3.14000000");
assert_eq!(to_string(f, 3.14, Minus, 0), "3");
assert_eq!(to_string(f, 3.14, MinusRaw, 0), "3");
assert_eq!(to_string(f, 3.14, MinusPlus, 0), "+3");
assert_eq!(to_string(f, 3.14, MinusPlusRaw, 0), "+3");
assert_eq!(to_string(f, -3.14, Minus, 0), "-3");
assert_eq!(to_string(f, -3.14, MinusRaw, 0), "-3");
assert_eq!(to_string(f, -3.14, MinusPlus, 0), "-3");
assert_eq!(to_string(f, -3.14, MinusPlusRaw, 0), "-3");
assert_eq!(to_string(f, 3.14, Minus, 1), "3.1");
assert_eq!(to_string(f, 3.14, MinusRaw, 2), "3.14");
assert_eq!(to_string(f, 3.14, MinusPlus, 3), "+3.140");
assert_eq!(to_string(f, 3.14, MinusPlusRaw, 4), "+3.1400");
assert_eq!(to_string(f, -3.14, Minus, 8), "-3.14000000");
assert_eq!(to_string(f, -3.14, MinusRaw, 8), "-3.14000000");
assert_eq!(to_string(f, -3.14, MinusPlus, 8), "-3.14000000");
assert_eq!(to_string(f, -3.14, MinusPlusRaw, 8), "-3.14000000");
assert_eq!(to_string(f, 0.195, Minus, 0, false), "0");
assert_eq!(to_string(f, 0.195, MinusRaw, 0, false), "0");
assert_eq!(to_string(f, 0.195, MinusPlus, 0, false), "+0");
assert_eq!(to_string(f, 0.195, MinusPlusRaw, 0, false), "+0");
assert_eq!(to_string(f, -0.195, Minus, 0, false), "-0");
assert_eq!(to_string(f, -0.195, MinusRaw, 0, false), "-0");
assert_eq!(to_string(f, -0.195, MinusPlus, 0, false), "-0");
assert_eq!(to_string(f, -0.195, MinusPlusRaw, 0, false), "-0");
assert_eq!(to_string(f, 0.195, Minus, 1, true), "0.2");
assert_eq!(to_string(f, 0.195, MinusRaw, 2, true), "0.20");
assert_eq!(to_string(f, 0.195, MinusPlus, 3, true), "+0.195");
assert_eq!(to_string(f, 0.195, MinusPlusRaw, 4, true), "+0.1950");
assert_eq!(to_string(f, -0.195, Minus, 5, true), "-0.19500");
assert_eq!(to_string(f, -0.195, MinusRaw, 6, true), "-0.195000");
assert_eq!(to_string(f, -0.195, MinusPlus, 7, true), "-0.1950000");
assert_eq!(to_string(f, -0.195, MinusPlusRaw, 8, true), "-0.19500000");
assert_eq!(to_string(f, 0.195, Minus, 0), "0");
assert_eq!(to_string(f, 0.195, MinusRaw, 0), "0");
assert_eq!(to_string(f, 0.195, MinusPlus, 0), "+0");
assert_eq!(to_string(f, 0.195, MinusPlusRaw, 0), "+0");
assert_eq!(to_string(f, -0.195, Minus, 0), "-0");
assert_eq!(to_string(f, -0.195, MinusRaw, 0), "-0");
assert_eq!(to_string(f, -0.195, MinusPlus, 0), "-0");
assert_eq!(to_string(f, -0.195, MinusPlusRaw, 0), "-0");
assert_eq!(to_string(f, 0.195, Minus, 1), "0.2");
assert_eq!(to_string(f, 0.195, MinusRaw, 2), "0.20");
assert_eq!(to_string(f, 0.195, MinusPlus, 3), "+0.195");
assert_eq!(to_string(f, 0.195, MinusPlusRaw, 4), "+0.1950");
assert_eq!(to_string(f, -0.195, Minus, 5), "-0.19500");
assert_eq!(to_string(f, -0.195, MinusRaw, 6), "-0.195000");
assert_eq!(to_string(f, -0.195, MinusPlus, 7), "-0.1950000");
assert_eq!(to_string(f, -0.195, MinusPlusRaw, 8), "-0.19500000");
assert_eq!(to_string(f, 999.5, Minus, 0, false), "1000");
assert_eq!(to_string(f, 999.5, Minus, 1, false), "999.5");
assert_eq!(to_string(f, 999.5, Minus, 2, false), "999.50");
assert_eq!(to_string(f, 999.5, Minus, 3, false), "999.500");
assert_eq!(to_string(f, 999.5, Minus, 30, false), "999.500000000000000000000000000000");
assert_eq!(to_string(f, 999.5, Minus, 0), "1000");
assert_eq!(to_string(f, 999.5, Minus, 1), "999.5");
assert_eq!(to_string(f, 999.5, Minus, 2), "999.50");
assert_eq!(to_string(f, 999.5, Minus, 3), "999.500");
assert_eq!(to_string(f, 999.5, Minus, 30), "999.500000000000000000000000000000");
assert_eq!(to_string(f, 0.5, Minus, 0, false), "1");
assert_eq!(to_string(f, 0.5, Minus, 1, false), "0.5");
assert_eq!(to_string(f, 0.5, Minus, 2, false), "0.50");
assert_eq!(to_string(f, 0.5, Minus, 3, false), "0.500");
assert_eq!(to_string(f, 0.5, Minus, 0), "1");
assert_eq!(to_string(f, 0.5, Minus, 1), "0.5");
assert_eq!(to_string(f, 0.5, Minus, 2), "0.50");
assert_eq!(to_string(f, 0.5, Minus, 3), "0.500");
assert_eq!(to_string(f, 0.95, Minus, 0, false), "1");
assert_eq!(to_string(f, 0.95, Minus, 1, false), "0.9"); // because it really is less than 0.95
assert_eq!(to_string(f, 0.95, Minus, 2, false), "0.95");
assert_eq!(to_string(f, 0.95, Minus, 3, false), "0.950");
assert_eq!(to_string(f, 0.95, Minus, 10, false), "0.9500000000");
assert_eq!(to_string(f, 0.95, Minus, 30, false), "0.949999999999999955591079014994");
assert_eq!(to_string(f, 0.95, Minus, 0), "1");
assert_eq!(to_string(f, 0.95, Minus, 1), "0.9"); // because it really is less than 0.95
assert_eq!(to_string(f, 0.95, Minus, 2), "0.95");
assert_eq!(to_string(f, 0.95, Minus, 3), "0.950");
assert_eq!(to_string(f, 0.95, Minus, 10), "0.9500000000");
assert_eq!(to_string(f, 0.95, Minus, 30), "0.949999999999999955591079014994");
assert_eq!(to_string(f, 0.095, Minus, 0, false), "0");
assert_eq!(to_string(f, 0.095, Minus, 1, false), "0.1");
assert_eq!(to_string(f, 0.095, Minus, 2, false), "0.10");
assert_eq!(to_string(f, 0.095, Minus, 3, false), "0.095");
assert_eq!(to_string(f, 0.095, Minus, 4, false), "0.0950");
assert_eq!(to_string(f, 0.095, Minus, 10, false), "0.0950000000");
assert_eq!(to_string(f, 0.095, Minus, 30, false), "0.095000000000000001110223024625");
assert_eq!(to_string(f, 0.095, Minus, 0), "0");
assert_eq!(to_string(f, 0.095, Minus, 1), "0.1");
assert_eq!(to_string(f, 0.095, Minus, 2), "0.10");
assert_eq!(to_string(f, 0.095, Minus, 3), "0.095");
assert_eq!(to_string(f, 0.095, Minus, 4), "0.0950");
assert_eq!(to_string(f, 0.095, Minus, 10), "0.0950000000");
assert_eq!(to_string(f, 0.095, Minus, 30), "0.095000000000000001110223024625");
assert_eq!(to_string(f, 0.0095, Minus, 0, false), "0");
assert_eq!(to_string(f, 0.0095, Minus, 1, false), "0.0");
assert_eq!(to_string(f, 0.0095, Minus, 2, false), "0.01");
assert_eq!(to_string(f, 0.0095, Minus, 3, false), "0.009"); // really is less than 0.0095
assert_eq!(to_string(f, 0.0095, Minus, 4, false), "0.0095");
assert_eq!(to_string(f, 0.0095, Minus, 5, false), "0.00950");
assert_eq!(to_string(f, 0.0095, Minus, 10, false), "0.0095000000");
assert_eq!(to_string(f, 0.0095, Minus, 30, false), "0.009499999999999999764077607267");
assert_eq!(to_string(f, 0.0095, Minus, 0), "0");
assert_eq!(to_string(f, 0.0095, Minus, 1), "0.0");
assert_eq!(to_string(f, 0.0095, Minus, 2), "0.01");
assert_eq!(to_string(f, 0.0095, Minus, 3), "0.009"); // really is less than 0.0095
assert_eq!(to_string(f, 0.0095, Minus, 4), "0.0095");
assert_eq!(to_string(f, 0.0095, Minus, 5), "0.00950");
assert_eq!(to_string(f, 0.0095, Minus, 10), "0.0095000000");
assert_eq!(to_string(f, 0.0095, Minus, 30), "0.009499999999999999764077607267");
assert_eq!(to_string(f, 7.5e-11, Minus, 0, false), "0");
assert_eq!(to_string(f, 7.5e-11, Minus, 3, false), "0.000");
assert_eq!(to_string(f, 7.5e-11, Minus, 10, false), "0.0000000001");
assert_eq!(to_string(f, 7.5e-11, Minus, 11, false), "0.00000000007"); // ditto
assert_eq!(to_string(f, 7.5e-11, Minus, 12, false), "0.000000000075");
assert_eq!(to_string(f, 7.5e-11, Minus, 13, false), "0.0000000000750");
assert_eq!(to_string(f, 7.5e-11, Minus, 20, false), "0.00000000007500000000");
assert_eq!(to_string(f, 7.5e-11, Minus, 30, false), "0.000000000074999999999999999501");
assert_eq!(to_string(f, 7.5e-11, Minus, 0), "0");
assert_eq!(to_string(f, 7.5e-11, Minus, 3), "0.000");
assert_eq!(to_string(f, 7.5e-11, Minus, 10), "0.0000000001");
assert_eq!(to_string(f, 7.5e-11, Minus, 11), "0.00000000007"); // ditto
assert_eq!(to_string(f, 7.5e-11, Minus, 12), "0.000000000075");
assert_eq!(to_string(f, 7.5e-11, Minus, 13), "0.0000000000750");
assert_eq!(to_string(f, 7.5e-11, Minus, 20), "0.00000000007500000000");
assert_eq!(to_string(f, 7.5e-11, Minus, 30), "0.000000000074999999999999999501");
assert_eq!(to_string(f, 1.0e25, Minus, 0, false), "10000000000000000905969664");
assert_eq!(to_string(f, 1.0e25, Minus, 1, false), "10000000000000000905969664.0");
assert_eq!(to_string(f, 1.0e25, Minus, 3, false), "10000000000000000905969664.000");
assert_eq!(to_string(f, 1.0e25, Minus, 0), "10000000000000000905969664");
assert_eq!(to_string(f, 1.0e25, Minus, 1), "10000000000000000905969664.0");
assert_eq!(to_string(f, 1.0e25, Minus, 3), "10000000000000000905969664.000");
assert_eq!(to_string(f, 1.0e-6, Minus, 0, false), "0");
assert_eq!(to_string(f, 1.0e-6, Minus, 3, false), "0.000");
assert_eq!(to_string(f, 1.0e-6, Minus, 6, false), "0.000001");
assert_eq!(to_string(f, 1.0e-6, Minus, 9, false), "0.000001000");
assert_eq!(to_string(f, 1.0e-6, Minus, 12, false), "0.000001000000");
assert_eq!(to_string(f, 1.0e-6, Minus, 22, false), "0.0000010000000000000000");
assert_eq!(to_string(f, 1.0e-6, Minus, 23, false), "0.00000099999999999999995");
assert_eq!(to_string(f, 1.0e-6, Minus, 24, false), "0.000000999999999999999955");
assert_eq!(to_string(f, 1.0e-6, Minus, 25, false), "0.0000009999999999999999547");
assert_eq!(to_string(f, 1.0e-6, Minus, 35, false), "0.00000099999999999999995474811182589");
assert_eq!(to_string(f, 1.0e-6, Minus, 0), "0");
assert_eq!(to_string(f, 1.0e-6, Minus, 3), "0.000");
assert_eq!(to_string(f, 1.0e-6, Minus, 6), "0.000001");
assert_eq!(to_string(f, 1.0e-6, Minus, 9), "0.000001000");
assert_eq!(to_string(f, 1.0e-6, Minus, 12), "0.000001000000");
assert_eq!(to_string(f, 1.0e-6, Minus, 22), "0.0000010000000000000000");
assert_eq!(to_string(f, 1.0e-6, Minus, 23), "0.00000099999999999999995");
assert_eq!(to_string(f, 1.0e-6, Minus, 24), "0.000000999999999999999955");
assert_eq!(to_string(f, 1.0e-6, Minus, 25), "0.0000009999999999999999547");
assert_eq!(to_string(f, 1.0e-6, Minus, 35), "0.00000099999999999999995474811182589");
assert_eq!(to_string(f, 1.0e-6, Minus, 45), "0.000000999999999999999954748111825886258685614");
assert_eq!(
to_string(f, 1.0e-6, Minus, 45, false),
"0.000000999999999999999954748111825886258685614"
);
assert_eq!(
to_string(f, 1.0e-6, Minus, 55, false),
to_string(f, 1.0e-6, Minus, 55),
"0.0000009999999999999999547481118258862586856139387236908"
);
assert_eq!(
to_string(f, 1.0e-6, Minus, 65, false),
to_string(f, 1.0e-6, Minus, 65),
"0.00000099999999999999995474811182588625868561393872369080781936646"
);
assert_eq!(
to_string(f, 1.0e-6, Minus, 75, false),
to_string(f, 1.0e-6, Minus, 75),
"0.000000999999999999999954748111825886258685613938723690807819366455078125000"
);
assert_eq!(to_string(f, f32::MAX, Minus, 0, false), "340282346638528859811704183484516925440");
assert_eq!(
to_string(f, f32::MAX, Minus, 1, false),
"340282346638528859811704183484516925440.0"
);
assert_eq!(
to_string(f, f32::MAX, Minus, 2, false),
"340282346638528859811704183484516925440.00"
);
assert_eq!(to_string(f, f32::MAX, Minus, 0), "340282346638528859811704183484516925440");
assert_eq!(to_string(f, f32::MAX, Minus, 1), "340282346638528859811704183484516925440.0");
assert_eq!(to_string(f, f32::MAX, Minus, 2), "340282346638528859811704183484516925440.00");
if cfg!(miri) {
// Miri is too slow
@ -1163,24 +1151,24 @@ where
}
let minf32 = ldexp_f32(1.0, -149);
assert_eq!(to_string(f, minf32, Minus, 0, false), "0");
assert_eq!(to_string(f, minf32, Minus, 1, false), "0.0");
assert_eq!(to_string(f, minf32, Minus, 2, false), "0.00");
assert_eq!(to_string(f, minf32, Minus, 4, false), "0.0000");
assert_eq!(to_string(f, minf32, Minus, 8, false), "0.00000000");
assert_eq!(to_string(f, minf32, Minus, 16, false), "0.0000000000000000");
assert_eq!(to_string(f, minf32, Minus, 32, false), "0.00000000000000000000000000000000");
assert_eq!(to_string(f, minf32, Minus, 0), "0");
assert_eq!(to_string(f, minf32, Minus, 1), "0.0");
assert_eq!(to_string(f, minf32, Minus, 2), "0.00");
assert_eq!(to_string(f, minf32, Minus, 4), "0.0000");
assert_eq!(to_string(f, minf32, Minus, 8), "0.00000000");
assert_eq!(to_string(f, minf32, Minus, 16), "0.0000000000000000");
assert_eq!(to_string(f, minf32, Minus, 32), "0.00000000000000000000000000000000");
assert_eq!(
to_string(f, minf32, Minus, 64, false),
to_string(f, minf32, Minus, 64),
"0.0000000000000000000000000000000000000000000014012984643248170709"
);
assert_eq!(
to_string(f, minf32, Minus, 128, false),
to_string(f, minf32, Minus, 128),
"0.0000000000000000000000000000000000000000000014012984643248170709\
2372958328991613128026194187651577175706828388979108268586060149"
);
assert_eq!(
to_string(f, minf32, Minus, 256, false),
to_string(f, minf32, Minus, 256),
"0.0000000000000000000000000000000000000000000014012984643248170709\
2372958328991613128026194187651577175706828388979108268586060148\
6638188362121582031250000000000000000000000000000000000000000000\
@ -1188,7 +1176,7 @@ where
);
assert_eq!(
to_string(f, f64::MAX, Minus, 0, false),
to_string(f, f64::MAX, Minus, 0),
"1797693134862315708145274237317043567980705675258449965989174768\
0315726078002853876058955863276687817154045895351438246423432132\
6889464182768467546703537516986049910576551282076245490090389328\
@ -1196,7 +1184,7 @@ where
26204144723168738177180919299881250404026184124858368"
);
assert_eq!(
to_string(f, f64::MAX, Minus, 10, false),
to_string(f, f64::MAX, Minus, 10),
"1797693134862315708145274237317043567980705675258449965989174768\
0315726078002853876058955863276687817154045895351438246423432132\
6889464182768467546703537516986049910576551282076245490090389328\
@ -1205,16 +1193,16 @@ where
);
let minf64 = ldexp_f64(1.0, -1074);
assert_eq!(to_string(f, minf64, Minus, 0, false), "0");
assert_eq!(to_string(f, minf64, Minus, 1, false), "0.0");
assert_eq!(to_string(f, minf64, Minus, 10, false), "0.0000000000");
assert_eq!(to_string(f, minf64, Minus, 0), "0");
assert_eq!(to_string(f, minf64, Minus, 1), "0.0");
assert_eq!(to_string(f, minf64, Minus, 10), "0.0000000000");
assert_eq!(
to_string(f, minf64, Minus, 100, false),
to_string(f, minf64, Minus, 100),
"0.0000000000000000000000000000000000000000000000000000000000000000\
000000000000000000000000000000000000"
);
assert_eq!(
to_string(f, minf64, Minus, 1000, false),
to_string(f, minf64, Minus, 1000),
"0.0000000000000000000000000000000000000000000000000000000000000000\
0000000000000000000000000000000000000000000000000000000000000000\
0000000000000000000000000000000000000000000000000000000000000000\
@ -1234,15 +1222,15 @@ where
);
// very large output
assert_eq!(to_string(f, 0.0, Minus, 80000, false), format!("0.{:0>80000}", ""));
assert_eq!(to_string(f, 1.0e1, Minus, 80000, false), format!("10.{:0>80000}", ""));
assert_eq!(to_string(f, 1.0e0, Minus, 80000, false), format!("1.{:0>80000}", ""));
assert_eq!(to_string(f, 0.0, Minus, 80000), format!("0.{:0>80000}", ""));
assert_eq!(to_string(f, 1.0e1, Minus, 80000), format!("10.{:0>80000}", ""));
assert_eq!(to_string(f, 1.0e0, Minus, 80000), format!("1.{:0>80000}", ""));
assert_eq!(
to_string(f, 1.0e-1, Minus, 80000, false),
to_string(f, 1.0e-1, Minus, 80000),
format!("0.1000000000000000055511151231257827021181583404541015625{:0>79945}", "")
);
assert_eq!(
to_string(f, 1.0e-20, Minus, 80000, false),
to_string(f, 1.0e-20, Minus, 80000),
format!(
"0.0000000000000000000099999999999999994515327145420957165172950370\
2787392447107715776066783064379706047475337982177734375{:0>79881}",

View File

@ -11,6 +11,12 @@ fn creation() {
assert_eq!(Duration::from_millis(4000), Duration::new(4, 0));
}
#[test]
#[should_panic]
fn new_overflow() {
let _ = Duration::new(::core::u64::MAX, 1_000_000_000);
}
#[test]
fn secs() {
assert_eq!(Duration::new(0, 0).as_secs(), 0);

View File

@ -597,6 +597,8 @@ pub enum RenderOption {
NoNodeLabels,
NoEdgeStyles,
NoNodeStyles,
Monospace,
}
/// Returns vec holding all the default render options.
@ -626,6 +628,14 @@ where
W: Write,
{
writeln!(w, "digraph {} {{", g.graph_id().as_slice())?;
// Global graph properties
if options.contains(&RenderOption::Monospace) {
writeln!(w, r#" graph[fontname="monospace"];"#)?;
writeln!(w, r#" node[fontname="monospace"];"#)?;
writeln!(w, r#" edge[fontname="monospace"];"#)?;
}
for n in g.nodes().iter() {
write!(w, " ")?;
let id = g.node_id(n);

View File

@ -12,8 +12,6 @@ doctest = false
[dependencies]
arena = { path = "../libarena" }
bitflags = "1.2.1"
fmt_macros = { path = "../libfmt_macros" }
graphviz = { path = "../libgraphviz" }
jobserver = "0.1"
scoped-tls = "1.0"
log = { version = "0.4", features = ["release_max_level_info", "std"] }

View File

@ -35,7 +35,8 @@ macro_rules! arena_types {
rustc::mir::Promoted,
rustc::mir::BodyAndCache<$tcx>
>,
[] tables: rustc::ty::TypeckTables<$tcx>,
[decode] tables: rustc::ty::TypeckTables<$tcx>,
[decode] borrowck_result: rustc::mir::BorrowCheckResult<$tcx>,
[] const_allocs: rustc::mir::interpret::Allocation,
[] vtable_method: Option<(
rustc_hir::def_id::DefId,
@ -47,22 +48,23 @@ macro_rules! arena_types {
[] item_local_set: rustc_hir::ItemLocalSet,
[decode] mir_const_qualif: rustc_index::bit_set::BitSet<rustc::mir::Local>,
[] trait_impls_of: rustc::ty::trait_def::TraitImpls,
[] associated_items: rustc::ty::AssociatedItems,
[] dropck_outlives:
rustc::infer::canonical::Canonical<'tcx,
rustc::infer::canonical::QueryResponse<'tcx,
rustc::traits::query::dropck_outlives::DropckOutlivesResult<'tcx>
rustc::traits::query::DropckOutlivesResult<'tcx>
>
>,
[] normalize_projection_ty:
rustc::infer::canonical::Canonical<'tcx,
rustc::infer::canonical::QueryResponse<'tcx,
rustc::traits::query::normalize::NormalizationResult<'tcx>
rustc::traits::query::NormalizationResult<'tcx>
>
>,
[] implied_outlives_bounds:
rustc::infer::canonical::Canonical<'tcx,
rustc::infer::canonical::QueryResponse<'tcx,
Vec<rustc::traits::query::outlives_bounds::OutlivesBound<'tcx>>
Vec<rustc::traits::query::OutlivesBound<'tcx>>
>
>,
[] type_op_subtype:

View File

@ -76,10 +76,6 @@ macro_rules! erase {
($x:tt) => {{}};
}
macro_rules! replace {
($x:tt with $($y:tt)*) => ($($y)*)
}
macro_rules! is_anon_attr {
(anon) => {
true
@ -99,19 +95,18 @@ macro_rules! is_eval_always_attr {
}
macro_rules! contains_anon_attr {
($($attr:ident),*) => ({$(is_anon_attr!($attr) | )* false});
($($attr:ident $(($($attr_args:tt)*))* ),*) => ({$(is_anon_attr!($attr) | )* false});
}
macro_rules! contains_eval_always_attr {
($($attr:ident),*) => ({$(is_eval_always_attr!($attr) | )* false});
($($attr:ident $(($($attr_args:tt)*))* ),*) => ({$(is_eval_always_attr!($attr) | )* false});
}
macro_rules! define_dep_nodes {
(<$tcx:tt>
$(
[$($attr:ident),* ]
[$($attrs:tt)*]
$variant:ident $(( $tuple_arg_ty:ty $(,)? ))*
$({ $($struct_arg_name:ident : $struct_arg_ty:ty),* })*
,)*
) => (
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash,
@ -126,7 +121,7 @@ macro_rules! define_dep_nodes {
match *self {
$(
DepKind :: $variant => {
if contains_anon_attr!($($attr),*) {
if contains_anon_attr!($($attrs)*) {
return false;
}
@ -136,13 +131,6 @@ macro_rules! define_dep_nodes {
::CAN_RECONSTRUCT_QUERY_KEY;
})*
// struct args
$({
return <( $($struct_arg_ty,)* ) as DepNodeParams>
::CAN_RECONSTRUCT_QUERY_KEY;
})*
true
}
)*
@ -152,7 +140,7 @@ macro_rules! define_dep_nodes {
pub fn is_anon(&self) -> bool {
match *self {
$(
DepKind :: $variant => { contains_anon_attr!($($attr),*) }
DepKind :: $variant => { contains_anon_attr!($($attrs)*) }
)*
}
}
@ -160,7 +148,7 @@ macro_rules! define_dep_nodes {
pub fn is_eval_always(&self) -> bool {
match *self {
$(
DepKind :: $variant => { contains_eval_always_attr!($($attr), *) }
DepKind :: $variant => { contains_eval_always_attr!($($attrs)*) }
)*
}
}
@ -176,12 +164,6 @@ macro_rules! define_dep_nodes {
return true;
})*
// struct args
$({
$(erase!($struct_arg_name);)*
return true;
})*
false
}
)*
@ -189,36 +171,17 @@ macro_rules! define_dep_nodes {
}
}
pub enum DepConstructor<$tcx> {
pub struct DepConstructor;
impl DepConstructor {
$(
$variant $(( $tuple_arg_ty ))*
$({ $($struct_arg_name : $struct_arg_ty),* })*
),*
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash,
RustcEncodable, RustcDecodable)]
pub struct DepNode {
pub kind: DepKind,
pub hash: Fingerprint,
}
impl DepNode {
#[inline(always)]
#[allow(unreachable_code, non_snake_case)]
pub fn new<'tcx>(tcx: TyCtxt<'tcx>,
dep: DepConstructor<'tcx>)
-> DepNode
{
match dep {
$(
DepConstructor :: $variant $(( replace!(($tuple_arg_ty) with arg) ))*
$({ $($struct_arg_name),* })*
=>
{
pub fn $variant<'tcx>(_tcx: TyCtxt<'tcx>, $(arg: $tuple_arg_ty)*) -> DepNode {
// tuple args
$({
erase!($tuple_arg_ty);
let hash = DepNodeParams::to_fingerprint(&arg, tcx);
let hash = DepNodeParams::to_fingerprint(&arg, _tcx);
let dep_node = DepNode {
kind: DepKind::$variant,
hash
@ -227,36 +190,11 @@ macro_rules! define_dep_nodes {
#[cfg(debug_assertions)]
{
if !dep_node.kind.can_reconstruct_query_key() &&
(tcx.sess.opts.debugging_opts.incremental_info ||
tcx.sess.opts.debugging_opts.query_dep_graph)
(_tcx.sess.opts.debugging_opts.incremental_info ||
_tcx.sess.opts.debugging_opts.query_dep_graph)
{
tcx.dep_graph.register_dep_node_debug_str(dep_node, || {
arg.to_debug_str(tcx)
});
}
}
return dep_node;
})*
// struct args
$({
let tupled_args = ( $($struct_arg_name,)* );
let hash = DepNodeParams::to_fingerprint(&tupled_args,
tcx);
let dep_node = DepNode {
kind: DepKind::$variant,
hash
};
#[cfg(debug_assertions)]
{
if !dep_node.kind.can_reconstruct_query_key() &&
(tcx.sess.opts.debugging_opts.incremental_info ||
tcx.sess.opts.debugging_opts.query_dep_graph)
{
tcx.dep_graph.register_dep_node_debug_str(dep_node, || {
tupled_args.to_debug_str(tcx)
_tcx.dep_graph.register_dep_node_debug_str(dep_node, || {
arg.to_debug_str(_tcx)
});
}
}
@ -271,8 +209,15 @@ macro_rules! define_dep_nodes {
}
)*
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash,
RustcEncodable, RustcDecodable)]
pub struct DepNode {
pub kind: DepKind,
pub hash: Fingerprint,
}
impl DepNode {
/// Construct a DepNode from the given DepKind and DefPathHash. This
/// method will assert that the given DepKind actually requires a
/// single DefId/DefPathHash parameter.

View File

@ -1122,6 +1122,7 @@ impl CurrentDepGraph {
}
impl DepGraphData {
#[inline(never)]
fn read_index(&self, source: DepNodeIndex) {
ty::tls::with_context_opt(|icx| {
let icx = if let Some(icx) = icx { icx } else { return };

View File

@ -140,6 +140,7 @@ impl<'a, 'hir> NodeCollector<'a, 'hir> {
trait_impls: _,
body_ids: _,
modules: _,
proc_macros: _,
} = *krate;
alloc_hir_dep_nodes(

View File

@ -7,7 +7,7 @@ use rustc_hir::intravisit;
use rustc_hir::itemlikevisit::ItemLikeVisitor;
use rustc_hir::{HirId, ItemLocalId};
pub fn check_crate(hir_map: &Map<'_>) {
pub fn check_crate(hir_map: &Map<'_>, sess: &rustc_session::Session) {
hir_map.dep_graph.assert_ignored();
let errors = Lock::new(Vec::new());
@ -24,7 +24,7 @@ pub fn check_crate(hir_map: &Map<'_>) {
if !errors.is_empty() {
let message = errors.iter().fold(String::new(), |s1, s2| s1 + "\n" + s2);
bug!("{}", message);
sess.delay_span_bug(rustc_span::DUMMY_SP, &message);
}
}

View File

@ -1235,7 +1235,7 @@ pub fn map_crate<'hir>(
let map = Map { krate, dep_graph, crate_hash, map, hir_to_node_id, definitions };
sess.time("validate_HIR_map", || {
hir_id_validator::check_crate(&map);
hir_id_validator::check_crate(&map, sess);
});
map

View File

@ -2,7 +2,6 @@
//!
//! [rustc guide]: https://rust-lang.github.io/rustc-guide/hir.html
pub mod check_attr;
pub mod exports;
pub mod map;

View File

@ -149,7 +149,7 @@ impl<'a> StableHashingContext<'a> {
#[inline]
pub fn source_map(&mut self) -> &mut CachingSourceMapView<'a> {
match self.caching_source_map {
Some(ref mut cm) => cm,
Some(ref mut sm) => sm,
ref mut none => {
*none = Some(CachingSourceMapView::new(self.raw_source_map));
none.as_mut().unwrap()
@ -220,27 +220,8 @@ impl<'a> ToStableHashKey<StableHashingContext<'a>> for hir::HirId {
}
impl<'a> HashStable<StableHashingContext<'a>> for ast::NodeId {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
match hcx.node_id_hashing_mode {
NodeIdHashingMode::Ignore => {
// Don't do anything.
}
NodeIdHashingMode::HashDefPath => {
hcx.definitions.node_to_hir_id(*self).hash_stable(hcx, hasher);
}
}
}
}
impl<'a> ToStableHashKey<StableHashingContext<'a>> for ast::NodeId {
type KeyType = (DefPathHash, hir::ItemLocalId);
#[inline]
fn to_stable_hash_key(
&self,
hcx: &StableHashingContext<'a>,
) -> (DefPathHash, hir::ItemLocalId) {
hcx.definitions.node_to_hir_id(*self).to_stable_hash_key(hcx)
fn hash_stable(&self, _: &mut StableHashingContext<'a>, _: &mut StableHasher) {
panic!("Node IDs should not appear in incremental state");
}
}
@ -249,6 +230,12 @@ impl<'a> rustc_span::HashStableContext for StableHashingContext<'a> {
self.hash_spans
}
#[inline]
fn hash_def_id(&mut self, def_id: DefId, hasher: &mut StableHasher) {
let hcx = self;
hcx.def_path_hash(def_id).hash_stable(hcx, hasher);
}
fn byte_pos_to_line_and_col(
&mut self,
byte: BytePos,

Some files were not shown because too many files have changed in this diff Show More