mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-17 09:23:05 +00:00
Merge branch 'master' into iwr_progress
This commit is contained in:
commit
6c534c316f
@ -274,7 +274,7 @@ jobs:
|
||||
MSYS_BITS: 32
|
||||
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
|
||||
SCRIPT: make ci-subset-1
|
||||
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
|
||||
MINGW_URL: https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror
|
||||
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
|
||||
MINGW_DIR: mingw32
|
||||
# FIXME(#59637)
|
||||
@ -284,14 +284,14 @@ jobs:
|
||||
MSYS_BITS: 32
|
||||
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
|
||||
SCRIPT: make ci-subset-2
|
||||
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
|
||||
MINGW_URL: https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror
|
||||
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
|
||||
MINGW_DIR: mingw32
|
||||
x86_64-mingw-1:
|
||||
MSYS_BITS: 64
|
||||
SCRIPT: make ci-subset-1
|
||||
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
|
||||
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
|
||||
MINGW_URL: https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror
|
||||
MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
|
||||
MINGW_DIR: mingw64
|
||||
# FIXME(#59637)
|
||||
@ -301,7 +301,7 @@ jobs:
|
||||
MSYS_BITS: 64
|
||||
SCRIPT: make ci-subset-2
|
||||
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
|
||||
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
|
||||
MINGW_URL: https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror
|
||||
MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
|
||||
MINGW_DIR: mingw64
|
||||
|
||||
@ -328,7 +328,7 @@ jobs:
|
||||
MSYS_BITS: 32
|
||||
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --enable-full-tools
|
||||
SCRIPT: python x.py dist
|
||||
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
|
||||
MINGW_URL: https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror
|
||||
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
|
||||
MINGW_DIR: mingw32
|
||||
DIST_REQUIRE_ALL_TOOLS: 1
|
||||
@ -337,7 +337,7 @@ jobs:
|
||||
MSYS_BITS: 64
|
||||
SCRIPT: python x.py dist
|
||||
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu --enable-full-tools
|
||||
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
|
||||
MINGW_URL: https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror
|
||||
MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
|
||||
MINGW_DIR: mingw64
|
||||
DIST_REQUIRE_ALL_TOOLS: 1
|
||||
|
@ -27,7 +27,7 @@ steps:
|
||||
# Original downloaded here came from
|
||||
# http://releases.llvm.org/7.0.0/LLVM-7.0.0-win64.exe
|
||||
- script: |
|
||||
powershell -Command "$ProgressPreference = 'SilentlyContinue'; iwr -outf %TEMP%\LLVM-7.0.0-win64.exe https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/LLVM-7.0.0-win64.exe"
|
||||
powershell -Command "$ProgressPreference = 'SilentlyContinue'; iwr -outf %TEMP%\LLVM-7.0.0-win64.exe https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/LLVM-7.0.0-win64.exe"
|
||||
set CLANG_DIR=%CD%\citools\clang-rust
|
||||
%TEMP%\LLVM-7.0.0-win64.exe /S /NCRC /D=%CLANG_DIR%
|
||||
set RUST_CONFIGURE_ARGS=%RUST_CONFIGURE_ARGS% --set llvm.clang-cl=%CLANG_DIR%\bin\clang-cl.exe
|
||||
|
@ -2,14 +2,14 @@ steps:
|
||||
|
||||
- bash: |
|
||||
set -e
|
||||
curl -fo /usr/local/bin/sccache https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2018-04-02-sccache-x86_64-apple-darwin
|
||||
curl -fo /usr/local/bin/sccache https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/2018-04-02-sccache-x86_64-apple-darwin
|
||||
chmod +x /usr/local/bin/sccache
|
||||
displayName: Install sccache (OSX)
|
||||
condition: and(succeeded(), eq(variables['Agent.OS'], 'Darwin'))
|
||||
|
||||
- script: |
|
||||
md sccache
|
||||
powershell -Command "$ProgressPreference = 'SilentlyContinue'; iwr -outf sccache\sccache.exe https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2018-04-26-sccache-x86_64-pc-windows-msvc"
|
||||
powershell -Command "$ProgressPreference = 'SilentlyContinue'; iwr -outf sccache\sccache.exe https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/2018-04-26-sccache-x86_64-pc-windows-msvc"
|
||||
echo ##vso[task.prependpath]%CD%\sccache
|
||||
displayName: Install sccache (Windows)
|
||||
condition: and(succeeded(), eq(variables['Agent.OS'], 'Windows_NT'))
|
||||
|
@ -81,7 +81,7 @@ steps:
|
||||
# Note that this is originally from the github releases patch of Ninja
|
||||
- script: |
|
||||
md ninja
|
||||
powershell -Command "$ProgressPreference = 'SilentlyContinue'; iwr -outf 2017-03-15-ninja-win.zip https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-03-15-ninja-win.zip"
|
||||
powershell -Command "$ProgressPreference = 'SilentlyContinue'; iwr -outf 2017-03-15-ninja-win.zip https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/2017-03-15-ninja-win.zip"
|
||||
7z x -oninja 2017-03-15-ninja-win.zip
|
||||
del 2017-03-15-ninja-win.zip
|
||||
set RUST_CONFIGURE_ARGS=%RUST_CONFIGURE_ARGS% --enable-ninja
|
||||
|
33
Cargo.lock
33
Cargo.lock
@ -421,7 +421,7 @@ dependencies = [
|
||||
"itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"matches 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pulldown-cmark 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pulldown-cmark 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quine-mc_cluskey 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"regex-syntax 0.6.6 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"semver 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -860,7 +860,7 @@ dependencies = [
|
||||
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2014,23 +2014,13 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "pulldown-cmark"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicase 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pulldown-cmark"
|
||||
version = "0.5.0"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"getopts 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"memchr 2.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicase 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2875,7 +2865,7 @@ dependencies = [
|
||||
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3077,7 +3067,7 @@ version = "0.0.0"
|
||||
dependencies = [
|
||||
"minifier 0.0.30 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"parking_lot 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pulldown-cmark 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"pulldown-cmark 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
@ -3428,7 +3418,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "synstructure"
|
||||
version = "0.10.1"
|
||||
version = "0.10.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -3854,7 +3844,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "unicase"
|
||||
version = "2.3.0"
|
||||
version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -4285,8 +4275,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
"checksum proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)" = "77619697826f31a02ae974457af0b29b723e5619e113e9397b8b82c6bd253f09"
|
||||
"checksum proptest 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "24f5844db2f839e97e3021980975f6ebf8691d9b9b2ca67ed3feb38dc3edb52c"
|
||||
"checksum pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d6fdf85cda6cadfae5428a54661d431330b312bc767ddbc57adbedc24da66e32"
|
||||
"checksum pulldown-cmark 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "d1b74cc784b038a9921fd1a48310cc2e238101aa8ae0b94201e2d85121dd68b5"
|
||||
"checksum pulldown-cmark 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "157737d41407de9c5e0563a991d085117d60ae729af2cc1bf28d6dfbc97bcc1f"
|
||||
"checksum pulldown-cmark 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "051e60ace841b3bfecd402fe5051c06cb3bec4a6e6fdd060a37aa8eb829a1db3"
|
||||
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
|
||||
"checksum quine-mc_cluskey 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)" = "07589615d719a60c8dd8a4622e7946465dfef20d1a428f969e3443e7386d5f45"
|
||||
"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
|
||||
@ -4367,7 +4356,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"
|
||||
"checksum syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)" = "ae8b29eb5210bc5cf63ed6149cbf9adfc82ac0be023d8735c176ee74a2db4da7"
|
||||
"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6"
|
||||
"checksum synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015"
|
||||
"checksum synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02353edf96d6e4dc81aea2d8490a7e9db177bf8acb0e951c24940bf866cb313f"
|
||||
"checksum tar 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)" = "a303ba60a099fcd2aaa646b14d2724591a96a75283e4b7ed3d1a1658909d9ae2"
|
||||
"checksum tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "7e91405c14320e5c79b3d148e1c86f40749a36e490642202a31689cb1a3452b2"
|
||||
"checksum tendril 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9de21546595a0873061940d994bbbc5c35f024ae4fd61ec5c5b159115684f508"
|
||||
@ -4400,7 +4389,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
"checksum typenum 1.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "612d636f949607bdf9b123b4a6f6d966dedf3ff669f7f045890d3a4a73948169"
|
||||
"checksum ucd-trie 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "71a9c5b1fe77426cf144cc30e49e955270f5086e31a6441dfa8b32efc09b9d77"
|
||||
"checksum ucd-util 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "535c204ee4d8434478593480b8f86ab45ec9aae0e83c568ca81abf0fd0e88f86"
|
||||
"checksum unicase 2.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "41d17211f887da8e4a70a45b9536f26fc5de166b81e2d5d80de4a17fd22553bd"
|
||||
"checksum unicase 2.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a84e5511b2a947f3ae965dcb29b13b7b1691b6e7332cf5dbc1744138d5acb7f6"
|
||||
"checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
|
||||
"checksum unicode-normalization 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6a0180bc61fc5a987082bfa111f4cc95c4caff7f9799f3e46df09163a937aa25"
|
||||
"checksum unicode-segmentation 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "aa6024fc12ddfd1c6dbc14a80fa2324d4568849869b779f6bd37e5e4c03344d1"
|
||||
|
20
appveyor.yml
20
appveyor.yml
@ -59,7 +59,7 @@ environment:
|
||||
MSYS_BITS: 32
|
||||
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
|
||||
SCRIPT: make ci-subset-1
|
||||
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
|
||||
MINGW_URL: https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror
|
||||
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
|
||||
MINGW_DIR: mingw32
|
||||
# FIXME(#59637)
|
||||
@ -69,14 +69,14 @@ environment:
|
||||
MSYS_BITS: 32
|
||||
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu
|
||||
SCRIPT: make ci-subset-2
|
||||
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
|
||||
MINGW_URL: https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror
|
||||
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
|
||||
MINGW_DIR: mingw32
|
||||
- CI_JOB_NAME: x86_64-mingw
|
||||
MSYS_BITS: 64
|
||||
SCRIPT: python x.py test
|
||||
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu
|
||||
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
|
||||
MINGW_URL: https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror
|
||||
MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
|
||||
MINGW_DIR: mingw64
|
||||
# FIXME(#59637)
|
||||
@ -106,7 +106,7 @@ environment:
|
||||
MSYS_BITS: 32
|
||||
RUST_CONFIGURE_ARGS: --build=i686-pc-windows-gnu --enable-full-tools --enable-profiler
|
||||
SCRIPT: python x.py dist
|
||||
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
|
||||
MINGW_URL: https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror
|
||||
MINGW_ARCHIVE: i686-6.3.0-release-posix-dwarf-rt_v5-rev2.7z
|
||||
MINGW_DIR: mingw32
|
||||
DIST_REQUIRE_ALL_TOOLS: 1
|
||||
@ -115,7 +115,7 @@ environment:
|
||||
MSYS_BITS: 64
|
||||
SCRIPT: python x.py dist
|
||||
RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-gnu --enable-full-tools --enable-profiler
|
||||
MINGW_URL: https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
|
||||
MINGW_URL: https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror
|
||||
MINGW_ARCHIVE: x86_64-6.3.0-release-posix-seh-rt_v5-rev2.7z
|
||||
MINGW_DIR: mingw64
|
||||
DIST_REQUIRE_ALL_TOOLS: 1
|
||||
@ -159,7 +159,7 @@ install:
|
||||
#
|
||||
# Original downloaded here came from
|
||||
# http://releases.llvm.org/8.0.0/LLVM-8.0.0-win64.exe
|
||||
- if NOT defined MINGW_URL appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/LLVM-8.0.0-win64.exe
|
||||
- if NOT defined MINGW_URL appveyor-retry appveyor DownloadFile https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/LLVM-8.0.0-win64.exe
|
||||
- if NOT defined MINGW_URL .\LLVM-8.0.0-win64.exe /S /NCRC /D=C:\clang-rust
|
||||
- if NOT defined MINGW_URL set RUST_CONFIGURE_ARGS=%RUST_CONFIGURE_ARGS% --set llvm.clang-cl=C:\clang-rust\bin\clang-cl.exe
|
||||
|
||||
@ -191,25 +191,25 @@ install:
|
||||
- set PATH=C:\Python27;%PATH%
|
||||
|
||||
# Download and install sccache
|
||||
- appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2018-04-26-sccache-x86_64-pc-windows-msvc
|
||||
- appveyor-retry appveyor DownloadFile https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/2018-04-26-sccache-x86_64-pc-windows-msvc
|
||||
- mv 2018-04-26-sccache-x86_64-pc-windows-msvc sccache.exe
|
||||
- set PATH=%PATH%;%CD%
|
||||
|
||||
# Download and install ninja
|
||||
#
|
||||
# Note that this is originally from the github releases patch of Ninja
|
||||
- appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-03-15-ninja-win.zip
|
||||
- appveyor-retry appveyor DownloadFile https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/2017-03-15-ninja-win.zip
|
||||
- 7z x 2017-03-15-ninja-win.zip
|
||||
- set RUST_CONFIGURE_ARGS=%RUST_CONFIGURE_ARGS% --enable-ninja
|
||||
# - set PATH=%PATH%;%CD% -- this already happens above for sccache
|
||||
|
||||
# Install InnoSetup to get `iscc` used to produce installers
|
||||
- appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-08-22-is.exe
|
||||
- appveyor-retry appveyor DownloadFile https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/2017-08-22-is.exe
|
||||
- 2017-08-22-is.exe /VERYSILENT /SUPPRESSMSGBOXES /NORESTART /SP-
|
||||
- set PATH="C:\Program Files (x86)\Inno Setup 5";%PATH%
|
||||
|
||||
# Help debug some handle issues on AppVeyor
|
||||
- appveyor-retry appveyor DownloadFile https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2017-05-15-Handle.zip
|
||||
- appveyor-retry appveyor DownloadFile https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/2017-05-15-Handle.zip
|
||||
- mkdir handle
|
||||
- 7z x -ohandle 2017-05-15-Handle.zip
|
||||
- set PATH=%PATH%;%CD%\handle
|
||||
|
@ -581,6 +581,30 @@ impl<'a> Builder<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
/// Similar to `compiler`, except handles the full-bootstrap option to
|
||||
/// silently use the stage1 compiler instead of a stage2 compiler if one is
|
||||
/// requested.
|
||||
///
|
||||
/// Note that this does *not* have the side effect of creating
|
||||
/// `compiler(stage, host)`, unlike `compiler` above which does have such
|
||||
/// a side effect. The returned compiler here can only be used to compile
|
||||
/// new artifacts, it can't be used to rely on the presence of a particular
|
||||
/// sysroot.
|
||||
///
|
||||
/// See `force_use_stage1` for documentation on what each argument is.
|
||||
pub fn compiler_for(
|
||||
&self,
|
||||
stage: u32,
|
||||
host: Interned<String>,
|
||||
target: Interned<String>,
|
||||
) -> Compiler {
|
||||
if self.build.force_use_stage1(Compiler { stage, host }, target) {
|
||||
self.compiler(1, self.config.build)
|
||||
} else {
|
||||
self.compiler(stage, host)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sysroot(&self, compiler: Compiler) -> Interned<PathBuf> {
|
||||
self.ensure(compile::Sysroot { compiler })
|
||||
}
|
||||
@ -754,11 +778,7 @@ impl<'a> Builder<'a> {
|
||||
// This is for the original compiler, but if we're forced to use stage 1, then
|
||||
// std/test/rustc stamps won't exist in stage 2, so we need to get those from stage 1, since
|
||||
// we copy the libs forward.
|
||||
let cmp = if self.force_use_stage1(compiler, target) {
|
||||
self.compiler(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let cmp = self.compiler_for(compiler.stage, compiler.host, target);
|
||||
|
||||
let libstd_stamp = match cmd {
|
||||
"check" | "clippy" | "fix" => check::libstd_stamp(self, cmp, target),
|
||||
@ -1358,7 +1378,7 @@ mod __test {
|
||||
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<dist::Docs>()),
|
||||
&[dist::Docs { stage: 2, host: a },]
|
||||
&[dist::Docs { host: a },]
|
||||
);
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<dist::Mingw>()),
|
||||
@ -1373,7 +1393,7 @@ mod __test {
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<dist::Std>()),
|
||||
&[dist::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: a,
|
||||
},]
|
||||
);
|
||||
@ -1392,8 +1412,8 @@ mod __test {
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<dist::Docs>()),
|
||||
&[
|
||||
dist::Docs { stage: 2, host: a },
|
||||
dist::Docs { stage: 2, host: b },
|
||||
dist::Docs { host: a },
|
||||
dist::Docs { host: b },
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
@ -1410,7 +1430,7 @@ mod __test {
|
||||
first(builder.cache.all::<dist::Std>()),
|
||||
&[
|
||||
dist::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: a,
|
||||
},
|
||||
dist::Std {
|
||||
@ -1434,8 +1454,8 @@ mod __test {
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<dist::Docs>()),
|
||||
&[
|
||||
dist::Docs { stage: 2, host: a },
|
||||
dist::Docs { stage: 2, host: b },
|
||||
dist::Docs { host: a },
|
||||
dist::Docs { host: b },
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
@ -1457,11 +1477,11 @@ mod __test {
|
||||
first(builder.cache.all::<dist::Std>()),
|
||||
&[
|
||||
dist::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: a,
|
||||
},
|
||||
dist::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
},
|
||||
]
|
||||
@ -1469,6 +1489,40 @@ mod __test {
|
||||
assert_eq!(first(builder.cache.all::<dist::Src>()), &[dist::Src]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dist_only_cross_host() {
|
||||
let a = INTERNER.intern_str("A");
|
||||
let b = INTERNER.intern_str("B");
|
||||
let mut build = Build::new(configure(&["B"], &[]));
|
||||
build.config.docs = false;
|
||||
build.config.extended = true;
|
||||
build.hosts = vec![b];
|
||||
let mut builder = Builder::new(&build);
|
||||
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
|
||||
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<dist::Rustc>()),
|
||||
&[
|
||||
dist::Rustc {
|
||||
compiler: Compiler { host: b, stage: 2 }
|
||||
},
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<compile::Rustc>()),
|
||||
&[
|
||||
compile::Rustc {
|
||||
compiler: Compiler { host: a, stage: 0 },
|
||||
target: a,
|
||||
},
|
||||
compile::Rustc {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dist_with_targets_and_hosts() {
|
||||
let build = Build::new(configure(&["B"], &["C"]));
|
||||
@ -1482,9 +1536,9 @@ mod __test {
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<dist::Docs>()),
|
||||
&[
|
||||
dist::Docs { stage: 2, host: a },
|
||||
dist::Docs { stage: 2, host: b },
|
||||
dist::Docs { stage: 2, host: c },
|
||||
dist::Docs { host: a },
|
||||
dist::Docs { host: b },
|
||||
dist::Docs { host: c },
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
@ -1510,11 +1564,11 @@ mod __test {
|
||||
first(builder.cache.all::<dist::Std>()),
|
||||
&[
|
||||
dist::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: a,
|
||||
},
|
||||
dist::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
},
|
||||
dist::Std {
|
||||
@ -1541,9 +1595,9 @@ mod __test {
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<dist::Docs>()),
|
||||
&[
|
||||
dist::Docs { stage: 2, host: a },
|
||||
dist::Docs { stage: 2, host: b },
|
||||
dist::Docs { stage: 2, host: c },
|
||||
dist::Docs { host: a },
|
||||
dist::Docs { host: b },
|
||||
dist::Docs { host: c },
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
@ -1559,11 +1613,11 @@ mod __test {
|
||||
first(builder.cache.all::<dist::Std>()),
|
||||
&[
|
||||
dist::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: a,
|
||||
},
|
||||
dist::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
},
|
||||
dist::Std {
|
||||
@ -1587,8 +1641,8 @@ mod __test {
|
||||
assert_eq!(
|
||||
first(builder.cache.all::<dist::Docs>()),
|
||||
&[
|
||||
dist::Docs { stage: 2, host: a },
|
||||
dist::Docs { stage: 2, host: b },
|
||||
dist::Docs { host: a },
|
||||
dist::Docs { host: b },
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
@ -1610,11 +1664,11 @@ mod __test {
|
||||
first(builder.cache.all::<dist::Std>()),
|
||||
&[
|
||||
dist::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: a,
|
||||
},
|
||||
dist::Std {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
},
|
||||
]
|
||||
@ -1664,10 +1718,6 @@ mod __test {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 2 },
|
||||
target: b,
|
||||
},
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
@ -1720,10 +1770,6 @@ mod __test {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: a,
|
||||
},
|
||||
compile::Rustc {
|
||||
compiler: Compiler { host: a, stage: 0 },
|
||||
target: b,
|
||||
},
|
||||
compile::Rustc {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
@ -1758,10 +1804,6 @@ mod __test {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 0 },
|
||||
target: b,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
@ -1808,9 +1850,6 @@ mod __test {
|
||||
compile::Assemble {
|
||||
target_compiler: Compiler { host: a, stage: 1 },
|
||||
},
|
||||
compile::Assemble {
|
||||
target_compiler: Compiler { host: b, stage: 1 },
|
||||
},
|
||||
compile::Assemble {
|
||||
target_compiler: Compiler { host: a, stage: 2 },
|
||||
},
|
||||
@ -1830,10 +1869,6 @@ mod __test {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: a,
|
||||
},
|
||||
compile::Rustc {
|
||||
compiler: Compiler { host: a, stage: 0 },
|
||||
target: b,
|
||||
},
|
||||
compile::Rustc {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
@ -1860,10 +1895,6 @@ mod __test {
|
||||
compiler: Compiler { host: b, stage: 2 },
|
||||
target: a,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 0 },
|
||||
target: b,
|
||||
},
|
||||
compile::Test {
|
||||
compiler: Compiler { host: a, stage: 1 },
|
||||
target: b,
|
||||
|
@ -70,20 +70,20 @@ impl Step for Std {
|
||||
|
||||
builder.ensure(StartupObjects { compiler, target });
|
||||
|
||||
if builder.force_use_stage1(compiler, target) {
|
||||
let from = builder.compiler(1, builder.config.build);
|
||||
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
|
||||
if compiler_to_use != compiler {
|
||||
builder.ensure(Std {
|
||||
compiler: from,
|
||||
compiler: compiler_to_use,
|
||||
target,
|
||||
});
|
||||
builder.info(&format!("Uplifting stage1 std ({} -> {})", from.host, target));
|
||||
builder.info(&format!("Uplifting stage1 std ({} -> {})", compiler_to_use.host, target));
|
||||
|
||||
// Even if we're not building std this stage, the new sysroot must
|
||||
// still contain the third party objects needed by various targets.
|
||||
copy_third_party_objects(builder, &compiler, target);
|
||||
|
||||
builder.ensure(StdLink {
|
||||
compiler: from,
|
||||
compiler: compiler_to_use,
|
||||
target_compiler: compiler,
|
||||
target,
|
||||
});
|
||||
@ -403,15 +403,16 @@ impl Step for Test {
|
||||
return;
|
||||
}
|
||||
|
||||
if builder.force_use_stage1(compiler, target) {
|
||||
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
|
||||
if compiler_to_use != compiler {
|
||||
builder.ensure(Test {
|
||||
compiler: builder.compiler(1, builder.config.build),
|
||||
compiler: compiler_to_use,
|
||||
target,
|
||||
});
|
||||
builder.info(
|
||||
&format!("Uplifting stage1 test ({} -> {})", builder.config.build, target));
|
||||
builder.ensure(TestLink {
|
||||
compiler: builder.compiler(1, builder.config.build),
|
||||
compiler: compiler_to_use,
|
||||
target_compiler: compiler,
|
||||
target,
|
||||
});
|
||||
@ -529,15 +530,16 @@ impl Step for Rustc {
|
||||
return;
|
||||
}
|
||||
|
||||
if builder.force_use_stage1(compiler, target) {
|
||||
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
|
||||
if compiler_to_use != compiler {
|
||||
builder.ensure(Rustc {
|
||||
compiler: builder.compiler(1, builder.config.build),
|
||||
compiler: compiler_to_use,
|
||||
target,
|
||||
});
|
||||
builder.info(&format!("Uplifting stage1 rustc ({} -> {})",
|
||||
builder.config.build, target));
|
||||
builder.ensure(RustcLink {
|
||||
compiler: builder.compiler(1, builder.config.build),
|
||||
compiler: compiler_to_use,
|
||||
target_compiler: compiler,
|
||||
target,
|
||||
});
|
||||
@ -687,9 +689,10 @@ impl Step for CodegenBackend {
|
||||
return;
|
||||
}
|
||||
|
||||
if builder.force_use_stage1(compiler, target) {
|
||||
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
|
||||
if compiler_to_use != compiler {
|
||||
builder.ensure(CodegenBackend {
|
||||
compiler: builder.compiler(1, builder.config.build),
|
||||
compiler: compiler_to_use,
|
||||
target,
|
||||
backend,
|
||||
});
|
||||
|
@ -68,7 +68,6 @@ fn missing_tool(tool_name: &str, skip: bool) {
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct Docs {
|
||||
pub stage: u32,
|
||||
pub host: Interned<String>,
|
||||
}
|
||||
|
||||
@ -82,7 +81,6 @@ impl Step for Docs {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Docs {
|
||||
stage: run.builder.top_stage,
|
||||
host: run.target,
|
||||
});
|
||||
}
|
||||
@ -130,7 +128,6 @@ impl Step for Docs {
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct RustcDocs {
|
||||
pub stage: u32,
|
||||
pub host: Interned<String>,
|
||||
}
|
||||
|
||||
@ -144,7 +141,6 @@ impl Step for RustcDocs {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(RustcDocs {
|
||||
stage: run.builder.top_stage,
|
||||
host: run.target,
|
||||
});
|
||||
}
|
||||
@ -647,7 +643,11 @@ impl Step for Std {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Std {
|
||||
compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
|
||||
compiler: run.builder.compiler_for(
|
||||
run.builder.top_stage,
|
||||
run.builder.config.build,
|
||||
run.target,
|
||||
),
|
||||
target: run.target,
|
||||
});
|
||||
}
|
||||
@ -737,7 +737,14 @@ impl Step for Analysis {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Analysis {
|
||||
compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
|
||||
// Find the actual compiler (handling the full bootstrap option) which
|
||||
// produced the save-analysis data because that data isn't copied
|
||||
// through the sysroot uplifting.
|
||||
compiler: run.builder.compiler_for(
|
||||
run.builder.top_stage,
|
||||
run.builder.config.build,
|
||||
run.target,
|
||||
),
|
||||
target: run.target,
|
||||
});
|
||||
}
|
||||
@ -757,14 +764,6 @@ impl Step for Analysis {
|
||||
|
||||
builder.ensure(Std { compiler, target });
|
||||
|
||||
// Package save-analysis from stage1 if not doing a full bootstrap, as the
|
||||
// stage2 artifacts is simply copied from stage1 in that case.
|
||||
let compiler = if builder.force_use_stage1(compiler, target) {
|
||||
builder.compiler(1, compiler.host)
|
||||
} else {
|
||||
compiler.clone()
|
||||
};
|
||||
|
||||
let image = tmpdir(builder).join(format!("{}-{}-image", name, target));
|
||||
|
||||
let src = builder.stage_out(compiler, Mode::Std)
|
||||
@ -1066,7 +1065,7 @@ pub fn sanitize_sh(path: &Path) -> String {
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct Cargo {
|
||||
pub stage: u32,
|
||||
pub compiler: Compiler,
|
||||
pub target: Interned<String>,
|
||||
}
|
||||
|
||||
@ -1080,16 +1079,20 @@ impl Step for Cargo {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Cargo {
|
||||
stage: run.builder.top_stage,
|
||||
compiler: run.builder.compiler_for(
|
||||
run.builder.top_stage,
|
||||
run.builder.config.build,
|
||||
run.target,
|
||||
),
|
||||
target: run.target,
|
||||
});
|
||||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) -> PathBuf {
|
||||
let stage = self.stage;
|
||||
let compiler = self.compiler;
|
||||
let target = self.target;
|
||||
|
||||
builder.info(&format!("Dist cargo stage{} ({})", stage, target));
|
||||
builder.info(&format!("Dist cargo stage{} ({})", compiler.stage, target));
|
||||
let src = builder.src.join("src/tools/cargo");
|
||||
let etc = src.join("src/etc");
|
||||
let release_num = builder.release_num("cargo");
|
||||
@ -1104,10 +1107,7 @@ impl Step for Cargo {
|
||||
// Prepare the image directory
|
||||
builder.create_dir(&image.join("share/zsh/site-functions"));
|
||||
builder.create_dir(&image.join("etc/bash_completion.d"));
|
||||
let cargo = builder.ensure(tool::Cargo {
|
||||
compiler: builder.compiler(stage, builder.config.build),
|
||||
target
|
||||
});
|
||||
let cargo = builder.ensure(tool::Cargo { compiler, target });
|
||||
builder.install(&cargo, &image.join("bin"), 0o755);
|
||||
for man in t!(etc.join("man").read_dir()) {
|
||||
let man = t!(man);
|
||||
@ -1152,7 +1152,7 @@ impl Step for Cargo {
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct Rls {
|
||||
pub stage: u32,
|
||||
pub compiler: Compiler,
|
||||
pub target: Interned<String>,
|
||||
}
|
||||
|
||||
@ -1166,17 +1166,21 @@ impl Step for Rls {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Rls {
|
||||
stage: run.builder.top_stage,
|
||||
compiler: run.builder.compiler_for(
|
||||
run.builder.top_stage,
|
||||
run.builder.config.build,
|
||||
run.target,
|
||||
),
|
||||
target: run.target,
|
||||
});
|
||||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) -> Option<PathBuf> {
|
||||
let stage = self.stage;
|
||||
let compiler = self.compiler;
|
||||
let target = self.target;
|
||||
assert!(builder.config.extended);
|
||||
|
||||
builder.info(&format!("Dist RLS stage{} ({})", stage, target));
|
||||
builder.info(&format!("Dist RLS stage{} ({})", compiler.stage, target));
|
||||
let src = builder.src.join("src/tools/rls");
|
||||
let release_num = builder.release_num("rls");
|
||||
let name = pkgname(builder, "rls");
|
||||
@ -1191,8 +1195,9 @@ impl Step for Rls {
|
||||
// We expect RLS to build, because we've exited this step above if tool
|
||||
// state for RLS isn't testing.
|
||||
let rls = builder.ensure(tool::Rls {
|
||||
compiler: builder.compiler(stage, builder.config.build),
|
||||
target, extra_features: Vec::new()
|
||||
compiler,
|
||||
target,
|
||||
extra_features: Vec::new(),
|
||||
}).or_else(|| { missing_tool("RLS", builder.build.config.missing_tools); None })?;
|
||||
|
||||
builder.install(&rls, &image.join("bin"), 0o755);
|
||||
@ -1231,7 +1236,7 @@ impl Step for Rls {
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct Clippy {
|
||||
pub stage: u32,
|
||||
pub compiler: Compiler,
|
||||
pub target: Interned<String>,
|
||||
}
|
||||
|
||||
@ -1245,17 +1250,21 @@ impl Step for Clippy {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Clippy {
|
||||
stage: run.builder.top_stage,
|
||||
compiler: run.builder.compiler_for(
|
||||
run.builder.top_stage,
|
||||
run.builder.config.build,
|
||||
run.target,
|
||||
),
|
||||
target: run.target,
|
||||
});
|
||||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) -> Option<PathBuf> {
|
||||
let stage = self.stage;
|
||||
let compiler = self.compiler;
|
||||
let target = self.target;
|
||||
assert!(builder.config.extended);
|
||||
|
||||
builder.info(&format!("Dist clippy stage{} ({})", stage, target));
|
||||
builder.info(&format!("Dist clippy stage{} ({})", compiler.stage, target));
|
||||
let src = builder.src.join("src/tools/clippy");
|
||||
let release_num = builder.release_num("clippy");
|
||||
let name = pkgname(builder, "clippy");
|
||||
@ -1270,11 +1279,12 @@ impl Step for Clippy {
|
||||
// We expect clippy to build, because we've exited this step above if tool
|
||||
// state for clippy isn't testing.
|
||||
let clippy = builder.ensure(tool::Clippy {
|
||||
compiler: builder.compiler(stage, builder.config.build),
|
||||
target, extra_features: Vec::new()
|
||||
compiler,
|
||||
target,
|
||||
extra_features: Vec::new(),
|
||||
}).or_else(|| { missing_tool("clippy", builder.build.config.missing_tools); None })?;
|
||||
let cargoclippy = builder.ensure(tool::CargoClippy {
|
||||
compiler: builder.compiler(stage, builder.config.build),
|
||||
compiler,
|
||||
target, extra_features: Vec::new()
|
||||
}).or_else(|| { missing_tool("cargo clippy", builder.build.config.missing_tools); None })?;
|
||||
|
||||
@ -1315,7 +1325,7 @@ impl Step for Clippy {
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct Miri {
|
||||
pub stage: u32,
|
||||
pub compiler: Compiler,
|
||||
pub target: Interned<String>,
|
||||
}
|
||||
|
||||
@ -1329,17 +1339,21 @@ impl Step for Miri {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Miri {
|
||||
stage: run.builder.top_stage,
|
||||
compiler: run.builder.compiler_for(
|
||||
run.builder.top_stage,
|
||||
run.builder.config.build,
|
||||
run.target,
|
||||
),
|
||||
target: run.target,
|
||||
});
|
||||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) -> Option<PathBuf> {
|
||||
let stage = self.stage;
|
||||
let compiler = self.compiler;
|
||||
let target = self.target;
|
||||
assert!(builder.config.extended);
|
||||
|
||||
builder.info(&format!("Dist miri stage{} ({})", stage, target));
|
||||
builder.info(&format!("Dist miri stage{} ({})", compiler.stage, target));
|
||||
let src = builder.src.join("src/tools/miri");
|
||||
let release_num = builder.release_num("miri");
|
||||
let name = pkgname(builder, "miri");
|
||||
@ -1354,12 +1368,14 @@ impl Step for Miri {
|
||||
// We expect miri to build, because we've exited this step above if tool
|
||||
// state for miri isn't testing.
|
||||
let miri = builder.ensure(tool::Miri {
|
||||
compiler: builder.compiler(stage, builder.config.build),
|
||||
target, extra_features: Vec::new()
|
||||
compiler,
|
||||
target,
|
||||
extra_features: Vec::new(),
|
||||
}).or_else(|| { missing_tool("miri", builder.build.config.missing_tools); None })?;
|
||||
let cargomiri = builder.ensure(tool::CargoMiri {
|
||||
compiler: builder.compiler(stage, builder.config.build),
|
||||
target, extra_features: Vec::new()
|
||||
compiler,
|
||||
target,
|
||||
extra_features: Vec::new()
|
||||
}).or_else(|| { missing_tool("cargo miri", builder.build.config.missing_tools); None })?;
|
||||
|
||||
builder.install(&miri, &image.join("bin"), 0o755);
|
||||
@ -1399,7 +1415,7 @@ impl Step for Miri {
|
||||
|
||||
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct Rustfmt {
|
||||
pub stage: u32,
|
||||
pub compiler: Compiler,
|
||||
pub target: Interned<String>,
|
||||
}
|
||||
|
||||
@ -1413,16 +1429,20 @@ impl Step for Rustfmt {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Rustfmt {
|
||||
stage: run.builder.top_stage,
|
||||
compiler: run.builder.compiler_for(
|
||||
run.builder.top_stage,
|
||||
run.builder.config.build,
|
||||
run.target,
|
||||
),
|
||||
target: run.target,
|
||||
});
|
||||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) -> Option<PathBuf> {
|
||||
let stage = self.stage;
|
||||
let compiler = self.compiler;
|
||||
let target = self.target;
|
||||
|
||||
builder.info(&format!("Dist Rustfmt stage{} ({})", stage, target));
|
||||
builder.info(&format!("Dist Rustfmt stage{} ({})", compiler.stage, target));
|
||||
let src = builder.src.join("src/tools/rustfmt");
|
||||
let release_num = builder.release_num("rustfmt");
|
||||
let name = pkgname(builder, "rustfmt");
|
||||
@ -1435,12 +1455,14 @@ impl Step for Rustfmt {
|
||||
|
||||
// Prepare the image directory
|
||||
let rustfmt = builder.ensure(tool::Rustfmt {
|
||||
compiler: builder.compiler(stage, builder.config.build),
|
||||
target, extra_features: Vec::new()
|
||||
compiler,
|
||||
target,
|
||||
extra_features: Vec::new(),
|
||||
}).or_else(|| { missing_tool("Rustfmt", builder.build.config.missing_tools); None })?;
|
||||
let cargofmt = builder.ensure(tool::Cargofmt {
|
||||
compiler: builder.compiler(stage, builder.config.build),
|
||||
target, extra_features: Vec::new()
|
||||
compiler,
|
||||
target,
|
||||
extra_features: Vec::new(),
|
||||
}).or_else(|| { missing_tool("Cargofmt", builder.build.config.missing_tools); None })?;
|
||||
|
||||
builder.install(&rustfmt, &image.join("bin"), 0o755);
|
||||
@ -1505,30 +1527,28 @@ impl Step for Extended {
|
||||
|
||||
/// Creates a combined installer for the specified target in the provided stage.
|
||||
fn run(self, builder: &Builder<'_>) {
|
||||
let stage = self.stage;
|
||||
let target = self.target;
|
||||
let stage = self.stage;
|
||||
let compiler = builder.compiler_for(self.stage, self.host, self.target);
|
||||
|
||||
builder.info(&format!("Dist extended stage{} ({})", stage, target));
|
||||
builder.info(&format!("Dist extended stage{} ({})", compiler.stage, target));
|
||||
|
||||
let rustc_installer = builder.ensure(Rustc {
|
||||
compiler: builder.compiler(stage, target),
|
||||
});
|
||||
let cargo_installer = builder.ensure(Cargo { stage, target });
|
||||
let rustfmt_installer = builder.ensure(Rustfmt { stage, target });
|
||||
let rls_installer = builder.ensure(Rls { stage, target });
|
||||
let llvm_tools_installer = builder.ensure(LlvmTools { stage, target });
|
||||
let clippy_installer = builder.ensure(Clippy { stage, target });
|
||||
let miri_installer = builder.ensure(Miri { stage, target });
|
||||
let cargo_installer = builder.ensure(Cargo { compiler, target });
|
||||
let rustfmt_installer = builder.ensure(Rustfmt { compiler, target });
|
||||
let rls_installer = builder.ensure(Rls { compiler, target });
|
||||
let llvm_tools_installer = builder.ensure(LlvmTools { target });
|
||||
let clippy_installer = builder.ensure(Clippy { compiler, target });
|
||||
let miri_installer = builder.ensure(Miri { compiler, target });
|
||||
let lldb_installer = builder.ensure(Lldb { target });
|
||||
let mingw_installer = builder.ensure(Mingw { host: target });
|
||||
let analysis_installer = builder.ensure(Analysis {
|
||||
compiler: builder.compiler(stage, self.host),
|
||||
target
|
||||
});
|
||||
let analysis_installer = builder.ensure(Analysis { compiler, target });
|
||||
|
||||
let docs_installer = builder.ensure(Docs { stage, host: target, });
|
||||
let docs_installer = builder.ensure(Docs { host: target, });
|
||||
let std_installer = builder.ensure(Std {
|
||||
compiler: builder.compiler(stage, self.host),
|
||||
compiler: builder.compiler(stage, target),
|
||||
target,
|
||||
});
|
||||
|
||||
@ -2076,7 +2096,6 @@ pub fn maybe_install_llvm_dylib(builder: &Builder<'_>,
|
||||
|
||||
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
|
||||
pub struct LlvmTools {
|
||||
pub stage: u32,
|
||||
pub target: Interned<String>,
|
||||
}
|
||||
|
||||
@ -2090,26 +2109,24 @@ impl Step for LlvmTools {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(LlvmTools {
|
||||
stage: run.builder.top_stage,
|
||||
target: run.target,
|
||||
});
|
||||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) -> Option<PathBuf> {
|
||||
let stage = self.stage;
|
||||
let target = self.target;
|
||||
assert!(builder.config.extended);
|
||||
|
||||
/* run only if llvm-config isn't used */
|
||||
if let Some(config) = builder.config.target_config.get(&target) {
|
||||
if let Some(ref _s) = config.llvm_config {
|
||||
builder.info(&format!("Skipping LlvmTools stage{} ({}): external LLVM",
|
||||
stage, target));
|
||||
builder.info(&format!("Skipping LlvmTools ({}): external LLVM",
|
||||
target));
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
builder.info(&format!("Dist LlvmTools stage{} ({})", stage, target));
|
||||
builder.info(&format!("Dist LlvmTools ({})", target));
|
||||
let src = builder.src.join("src/llvm-project/llvm");
|
||||
let name = pkgname(builder, "llvm-tools");
|
||||
|
||||
|
@ -475,12 +475,7 @@ impl Step for Std {
|
||||
builder.info(&format!("Documenting stage{} std ({})", stage, target));
|
||||
let out = builder.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = builder.compiler(stage, builder.config.build);
|
||||
let compiler = if builder.force_use_stage1(compiler, target) {
|
||||
builder.compiler(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let compiler = builder.compiler_for(stage, builder.config.build, target);
|
||||
|
||||
builder.ensure(compile::Std { compiler, target });
|
||||
let out_dir = builder.stage_out(compiler, Mode::Std)
|
||||
@ -563,12 +558,7 @@ impl Step for Test {
|
||||
builder.info(&format!("Documenting stage{} test ({})", stage, target));
|
||||
let out = builder.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = builder.compiler(stage, builder.config.build);
|
||||
let compiler = if builder.force_use_stage1(compiler, target) {
|
||||
builder.compiler(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let compiler = builder.compiler_for(stage, builder.config.build, target);
|
||||
|
||||
// Build libstd docs so that we generate relative links
|
||||
builder.ensure(Std { stage, target });
|
||||
@ -632,12 +622,7 @@ impl Step for WhitelistedRustc {
|
||||
builder.info(&format!("Documenting stage{} whitelisted compiler ({})", stage, target));
|
||||
let out = builder.doc_out(target);
|
||||
t!(fs::create_dir_all(&out));
|
||||
let compiler = builder.compiler(stage, builder.config.build);
|
||||
let compiler = if builder.force_use_stage1(compiler, target) {
|
||||
builder.compiler(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let compiler = builder.compiler_for(stage, builder.config.build, target);
|
||||
|
||||
// Build libstd docs so that we generate relative links
|
||||
builder.ensure(Std { stage, target });
|
||||
@ -706,12 +691,7 @@ impl Step for Rustc {
|
||||
t!(fs::create_dir_all(&out));
|
||||
|
||||
// Get the correct compiler for this stage.
|
||||
let compiler = builder.compiler(stage, builder.config.build);
|
||||
let compiler = if builder.force_use_stage1(compiler, target) {
|
||||
builder.compiler(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let compiler = builder.compiler_for(stage, builder.config.build, target);
|
||||
|
||||
if !builder.config.compiler_docs {
|
||||
builder.info("\tskipping - compiler/librustdoc docs disabled");
|
||||
@ -728,7 +708,7 @@ impl Step for Rustc {
|
||||
|
||||
// Build cargo command.
|
||||
let mut cargo = builder.cargo(compiler, Mode::Rustc, target, "doc");
|
||||
cargo.env("RUSTDOCFLAGS", "--document-private-items");
|
||||
cargo.env("RUSTDOCFLAGS", "--document-private-items --passes strip-hidden");
|
||||
compile::rustc_cargo(builder, &mut cargo);
|
||||
|
||||
// Only include compiler crates, no dependencies of those, such as `libc`.
|
||||
@ -807,12 +787,7 @@ impl Step for Rustdoc {
|
||||
t!(fs::create_dir_all(&out));
|
||||
|
||||
// Get the correct compiler for this stage.
|
||||
let compiler = builder.compiler(stage, builder.config.build);
|
||||
let compiler = if builder.force_use_stage1(compiler, target) {
|
||||
builder.compiler(1, compiler.host)
|
||||
} else {
|
||||
compiler
|
||||
};
|
||||
let compiler = builder.compiler_for(stage, builder.config.build, target);
|
||||
|
||||
if !builder.config.compiler_docs {
|
||||
builder.info("\tskipping - compiler/librustdoc docs disabled");
|
||||
|
@ -5,12 +5,13 @@
|
||||
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf, Component};
|
||||
use std::path::{Component, Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
use build_helper::t;
|
||||
|
||||
use crate::dist::{self, pkgname, sanitize_sh, tmpdir};
|
||||
use crate::Compiler;
|
||||
|
||||
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
|
||||
use crate::cache::Interned;
|
||||
@ -58,7 +59,7 @@ fn install_sh(
|
||||
package: &str,
|
||||
name: &str,
|
||||
stage: u32,
|
||||
host: Option<Interned<String>>
|
||||
host: Option<Interned<String>>,
|
||||
) {
|
||||
builder.info(&format!("Install {} stage{} ({:?})", package, stage, host));
|
||||
|
||||
@ -144,9 +145,8 @@ macro_rules! install {
|
||||
$(
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
||||
pub struct $name {
|
||||
pub stage: u32,
|
||||
pub compiler: Compiler,
|
||||
pub target: Interned<String>,
|
||||
pub host: Interned<String>,
|
||||
}
|
||||
|
||||
impl $name {
|
||||
@ -175,9 +175,8 @@ macro_rules! install {
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure($name {
|
||||
stage: run.builder.top_stage,
|
||||
compiler: run.builder.compiler(run.builder.top_stage, run.builder.config.build),
|
||||
target: run.target,
|
||||
host: run.builder.config.build,
|
||||
});
|
||||
}
|
||||
|
||||
@ -190,67 +189,78 @@ macro_rules! install {
|
||||
|
||||
install!((self, builder, _config),
|
||||
Docs, "src/doc", _config.docs, only_hosts: false, {
|
||||
builder.ensure(dist::Docs { stage: self.stage, host: self.target });
|
||||
install_docs(builder, self.stage, self.target);
|
||||
builder.ensure(dist::Docs { host: self.target });
|
||||
install_docs(builder, self.compiler.stage, self.target);
|
||||
};
|
||||
Std, "src/libstd", true, only_hosts: true, {
|
||||
for target in &builder.targets {
|
||||
builder.ensure(dist::Std {
|
||||
compiler: builder.compiler(self.stage, self.host),
|
||||
compiler: self.compiler,
|
||||
target: *target
|
||||
});
|
||||
install_std(builder, self.stage, *target);
|
||||
install_std(builder, self.compiler.stage, *target);
|
||||
}
|
||||
};
|
||||
Cargo, "cargo", Self::should_build(_config), only_hosts: true, {
|
||||
builder.ensure(dist::Cargo { stage: self.stage, target: self.target });
|
||||
install_cargo(builder, self.stage, self.target);
|
||||
builder.ensure(dist::Cargo { compiler: self.compiler, target: self.target });
|
||||
install_cargo(builder, self.compiler.stage, self.target);
|
||||
};
|
||||
Rls, "rls", Self::should_build(_config), only_hosts: true, {
|
||||
if builder.ensure(dist::Rls { stage: self.stage, target: self.target }).is_some() ||
|
||||
if builder.ensure(dist::Rls { compiler: self.compiler, target: self.target }).is_some() ||
|
||||
Self::should_install(builder) {
|
||||
install_rls(builder, self.stage, self.target);
|
||||
install_rls(builder, self.compiler.stage, self.target);
|
||||
} else {
|
||||
builder.info(&format!("skipping Install RLS stage{} ({})", self.stage, self.target));
|
||||
builder.info(
|
||||
&format!("skipping Install RLS stage{} ({})", self.compiler.stage, self.target),
|
||||
);
|
||||
}
|
||||
};
|
||||
Clippy, "clippy", Self::should_build(_config), only_hosts: true, {
|
||||
if builder.ensure(dist::Clippy { stage: self.stage, target: self.target }).is_some() ||
|
||||
Self::should_install(builder) {
|
||||
install_clippy(builder, self.stage, self.target);
|
||||
if builder.ensure(dist::Clippy {
|
||||
compiler: self.compiler,
|
||||
target: self.target,
|
||||
}).is_some() || Self::should_install(builder) {
|
||||
install_clippy(builder, self.compiler.stage, self.target);
|
||||
} else {
|
||||
builder.info(&format!("skipping Install clippy stage{} ({})", self.stage, self.target));
|
||||
builder.info(
|
||||
&format!("skipping Install clippy stage{} ({})", self.compiler.stage, self.target),
|
||||
);
|
||||
}
|
||||
};
|
||||
Miri, "miri", Self::should_build(_config), only_hosts: true, {
|
||||
if builder.ensure(dist::Miri { stage: self.stage, target: self.target }).is_some() ||
|
||||
if builder.ensure(dist::Miri { compiler: self.compiler, target: self.target }).is_some() ||
|
||||
Self::should_install(builder) {
|
||||
install_miri(builder, self.stage, self.target);
|
||||
install_miri(builder, self.compiler.stage, self.target);
|
||||
} else {
|
||||
builder.info(&format!("skipping Install miri stage{} ({})", self.stage, self.target));
|
||||
builder.info(
|
||||
&format!("skipping Install miri stage{} ({})", self.compiler.stage, self.target),
|
||||
);
|
||||
}
|
||||
};
|
||||
Rustfmt, "rustfmt", Self::should_build(_config), only_hosts: true, {
|
||||
if builder.ensure(dist::Rustfmt { stage: self.stage, target: self.target }).is_some() ||
|
||||
Self::should_install(builder) {
|
||||
install_rustfmt(builder, self.stage, self.target);
|
||||
if builder.ensure(dist::Rustfmt {
|
||||
compiler: self.compiler,
|
||||
target: self.target
|
||||
}).is_some() || Self::should_install(builder) {
|
||||
install_rustfmt(builder, self.compiler.stage, self.target);
|
||||
} else {
|
||||
builder.info(
|
||||
&format!("skipping Install Rustfmt stage{} ({})", self.stage, self.target));
|
||||
&format!("skipping Install Rustfmt stage{} ({})", self.compiler.stage, self.target),
|
||||
);
|
||||
}
|
||||
};
|
||||
Analysis, "analysis", Self::should_build(_config), only_hosts: false, {
|
||||
builder.ensure(dist::Analysis {
|
||||
compiler: builder.compiler(self.stage, self.host),
|
||||
compiler: self.compiler,
|
||||
target: self.target
|
||||
});
|
||||
install_analysis(builder, self.stage, self.target);
|
||||
install_analysis(builder, self.compiler.stage, self.target);
|
||||
};
|
||||
Rustc, "src/librustc", true, only_hosts: true, {
|
||||
builder.ensure(dist::Rustc {
|
||||
compiler: builder.compiler(self.stage, self.target),
|
||||
compiler: self.compiler,
|
||||
});
|
||||
install_rustc(builder, self.stage, self.target);
|
||||
install_rustc(builder, self.compiler.stage, self.target);
|
||||
};
|
||||
);
|
||||
|
||||
@ -266,15 +276,12 @@ impl Step for Src {
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
let config = &run.builder.config;
|
||||
let cond = config.extended &&
|
||||
config.tools.as_ref().map_or(true, |t| t.contains("src"));
|
||||
let cond = config.extended && config.tools.as_ref().map_or(true, |t| t.contains("src"));
|
||||
run.path("src").default_condition(cond)
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
run.builder.ensure(Src {
|
||||
stage: run.builder.top_stage,
|
||||
});
|
||||
run.builder.ensure(Src { stage: run.builder.top_stage });
|
||||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) {
|
||||
|
@ -1690,15 +1690,11 @@ impl Step for Crate {
|
||||
builder.ensure(compile::Test { compiler, target });
|
||||
builder.ensure(RemoteCopyLibs { compiler, target });
|
||||
|
||||
// If we're not doing a full bootstrap but we're testing a stage2 version of
|
||||
// libstd, then what we're actually testing is the libstd produced in
|
||||
// stage1. Reflect that here by updating the compiler that we're working
|
||||
// with automatically.
|
||||
let compiler = if builder.force_use_stage1(compiler, target) {
|
||||
builder.compiler(1, compiler.host)
|
||||
} else {
|
||||
compiler.clone()
|
||||
};
|
||||
// If we're not doing a full bootstrap but we're testing a stage2
|
||||
// version of libstd, then what we're actually testing is the libstd
|
||||
// produced in stage1. Reflect that here by updating the compiler that
|
||||
// we're working with automatically.
|
||||
let compiler = builder.compiler_for(compiler.stage, compiler.host, target);
|
||||
|
||||
let mut cargo = builder.cargo(compiler, mode, target, test_kind.subcommand());
|
||||
match mode {
|
||||
|
@ -535,9 +535,9 @@ impl Step for Cargo {
|
||||
}
|
||||
|
||||
fn run(self, builder: &Builder<'_>) -> PathBuf {
|
||||
// Cargo depends on procedural macros, which requires a full host
|
||||
// compiler to be available, so we need to depend on that.
|
||||
builder.ensure(compile::Rustc {
|
||||
// Cargo depends on procedural macros, so make sure the host
|
||||
// libstd/libproc_macro is available.
|
||||
builder.ensure(compile::Test {
|
||||
compiler: self.compiler,
|
||||
target: builder.config.build,
|
||||
});
|
||||
@ -609,26 +609,26 @@ macro_rules! tool_extended {
|
||||
tool_extended!((self, builder),
|
||||
Cargofmt, rustfmt, "src/tools/rustfmt", "cargo-fmt", {};
|
||||
CargoClippy, clippy, "src/tools/clippy", "cargo-clippy", {
|
||||
// Clippy depends on procedural macros (serde), which requires a full host
|
||||
// compiler to be available, so we need to depend on that.
|
||||
builder.ensure(compile::Rustc {
|
||||
// Clippy depends on procedural macros, so make sure that's built for
|
||||
// the compiler itself.
|
||||
builder.ensure(compile::Test {
|
||||
compiler: self.compiler,
|
||||
target: builder.config.build,
|
||||
});
|
||||
};
|
||||
Clippy, clippy, "src/tools/clippy", "clippy-driver", {
|
||||
// Clippy depends on procedural macros (serde), which requires a full host
|
||||
// compiler to be available, so we need to depend on that.
|
||||
builder.ensure(compile::Rustc {
|
||||
// Clippy depends on procedural macros, so make sure that's built for
|
||||
// the compiler itself.
|
||||
builder.ensure(compile::Test {
|
||||
compiler: self.compiler,
|
||||
target: builder.config.build,
|
||||
});
|
||||
};
|
||||
Miri, miri, "src/tools/miri", "miri", {};
|
||||
CargoMiri, miri, "src/tools/miri", "cargo-miri", {
|
||||
// Miri depends on procedural macros (serde), which requires a full host
|
||||
// compiler to be available, so we need to depend on that.
|
||||
builder.ensure(compile::Rustc {
|
||||
// Miri depends on procedural macros, so make sure that's built for
|
||||
// the compiler itself.
|
||||
builder.ensure(compile::Test {
|
||||
compiler: self.compiler,
|
||||
target: builder.config.build,
|
||||
});
|
||||
@ -642,9 +642,9 @@ tool_extended!((self, builder),
|
||||
if clippy.is_some() {
|
||||
self.extra_features.push("clippy".to_owned());
|
||||
}
|
||||
// RLS depends on procedural macros, which requires a full host
|
||||
// compiler to be available, so we need to depend on that.
|
||||
builder.ensure(compile::Rustc {
|
||||
// RLS depends on procedural macros, so make sure that's built for
|
||||
// the compiler itself.
|
||||
builder.ensure(compile::Test {
|
||||
compiler: self.compiler,
|
||||
target: builder.config.build,
|
||||
});
|
||||
|
@ -72,7 +72,7 @@ RUN arm-linux-gnueabihf-gcc addentropy.c -o rootfs/addentropy -static
|
||||
|
||||
# TODO: What is this?!
|
||||
# Source of the file: https://github.com/vfdev-5/qemu-rpi2-vexpress/raw/master/vexpress-v2p-ca15-tc1.dtb
|
||||
RUN curl -O https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/vexpress-v2p-ca15-tc1.dtb
|
||||
RUN curl -O https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/vexpress-v2p-ca15-tc1.dtb
|
||||
|
||||
COPY scripts/sccache.sh /scripts/
|
||||
RUN sh /scripts/sccache.sh
|
||||
|
@ -5,7 +5,7 @@ mkdir /usr/local/mips-linux-musl
|
||||
# originally from
|
||||
# https://downloads.openwrt.org/snapshots/trunk/ar71xx/generic/
|
||||
# OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2
|
||||
URL="https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror"
|
||||
URL="https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror"
|
||||
FILE="OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2"
|
||||
curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mips-linux-musl --strip-components=2
|
||||
|
||||
|
@ -5,7 +5,7 @@ mkdir /usr/local/mipsel-linux-musl
|
||||
# Note that this originally came from:
|
||||
# https://downloads.openwrt.org/snapshots/trunk/malta/generic/
|
||||
# OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2
|
||||
URL="https://s3-us-west-1.amazonaws.com/rust-lang-ci2/libc"
|
||||
URL="https://rust-lang-ci2.s3.amazonaws.com/libc"
|
||||
FILE="OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2"
|
||||
curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mipsel-linux-musl --strip-components=2
|
||||
|
||||
|
@ -32,10 +32,9 @@ COPY dist-various-2/build-cloudabi-toolchain.sh /tmp/
|
||||
RUN /tmp/build-cloudabi-toolchain.sh x86_64-unknown-cloudabi
|
||||
COPY dist-various-2/build-fuchsia-toolchain.sh /tmp/
|
||||
RUN /tmp/build-fuchsia-toolchain.sh
|
||||
# FIXME(#61022) - reenable solaris
|
||||
# COPY dist-various-2/build-solaris-toolchain.sh /tmp/
|
||||
# RUN /tmp/build-solaris-toolchain.sh x86_64 amd64 solaris-i386
|
||||
# RUN /tmp/build-solaris-toolchain.sh sparcv9 sparcv9 solaris-sparc
|
||||
COPY dist-various-2/build-solaris-toolchain.sh /tmp/
|
||||
RUN /tmp/build-solaris-toolchain.sh x86_64 amd64 solaris-i386
|
||||
RUN /tmp/build-solaris-toolchain.sh sparcv9 sparcv9 solaris-sparc
|
||||
COPY dist-various-2/build-x86_64-fortanix-unknown-sgx-toolchain.sh /tmp/
|
||||
# We pass the commit id of the port of LLVM's libunwind to the build script.
|
||||
# Any update to the commit id here, should cause the container image to be re-built from this point on.
|
||||
@ -76,9 +75,8 @@ ENV TARGETS=x86_64-fuchsia
|
||||
ENV TARGETS=$TARGETS,aarch64-fuchsia
|
||||
ENV TARGETS=$TARGETS,wasm32-unknown-unknown
|
||||
ENV TARGETS=$TARGETS,wasm32-wasi
|
||||
# FIXME(#61022) - reenable solaris
|
||||
# ENV TARGETS=$TARGETS,sparcv9-sun-solaris
|
||||
# ENV TARGETS=$TARGETS,x86_64-sun-solaris
|
||||
ENV TARGETS=$TARGETS,sparcv9-sun-solaris
|
||||
ENV TARGETS=$TARGETS,x86_64-sun-solaris
|
||||
ENV TARGETS=$TARGETS,x86_64-unknown-linux-gnux32
|
||||
ENV TARGETS=$TARGETS,x86_64-unknown-cloudabi
|
||||
ENV TARGETS=$TARGETS,x86_64-fortanix-unknown-sgx
|
||||
|
@ -5,7 +5,7 @@
|
||||
set -ex
|
||||
|
||||
# Originally from https://releases.llvm.org/8.0.0/clang+llvm-8.0.0-x86_64-linux-gnu-ubuntu-14.04.tar.xz
|
||||
curl https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/clang%2Bllvm-8.0.0-x86_64-linux-gnu-ubuntu-14.04.tar.xz | \
|
||||
curl https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/clang%2Bllvm-8.0.0-x86_64-linux-gnu-ubuntu-14.04.tar.xz | \
|
||||
tar xJf -
|
||||
export PATH=`pwd`/clang+llvm-8.0.0-x86_64-linux-gnu-ubuntu-14.04/bin:$PATH
|
||||
|
||||
|
@ -4,7 +4,7 @@ set -ex
|
||||
source shared.sh
|
||||
|
||||
VERSION=1.0.2k
|
||||
URL=https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/openssl-$VERSION.tar.gz
|
||||
URL=https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/openssl-$VERSION.tar.gz
|
||||
|
||||
curl $URL | tar xzf -
|
||||
|
||||
|
@ -25,7 +25,7 @@ cd netbsd
|
||||
|
||||
mkdir -p /x-tools/x86_64-unknown-netbsd/sysroot
|
||||
|
||||
URL=https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror
|
||||
URL=https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror
|
||||
|
||||
# Originally from ftp://ftp.netbsd.org/pub/NetBSD/NetBSD-$BSD/source/sets/*.tgz
|
||||
curl $URL/2018-03-01-netbsd-src.tgz | tar xzf -
|
||||
|
@ -18,7 +18,7 @@ exit 1
|
||||
}
|
||||
|
||||
cd /
|
||||
curl -fL https://s3.amazonaws.com/mozilla-games/emscripten/releases/emsdk-portable.tar.gz | \
|
||||
curl -fL https://mozilla-games.s3.amazonaws.com/emscripten/releases/emsdk-portable.tar.gz | \
|
||||
tar -xz
|
||||
|
||||
cd /emsdk-portable
|
||||
|
@ -59,7 +59,7 @@ done
|
||||
|
||||
# Originally downloaded from:
|
||||
# https://download.freebsd.org/ftp/releases/${freebsd_arch}/${freebsd_version}-RELEASE/base.txz
|
||||
URL=https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2019-04-04-freebsd-${freebsd_arch}-${freebsd_version}-RELEASE-base.txz
|
||||
URL=https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/2019-04-04-freebsd-${freebsd_arch}-${freebsd_version}-RELEASE-base.txz
|
||||
curl "$URL" | tar xJf - -C "$sysroot" --wildcards "${files_to_extract[@]}"
|
||||
|
||||
# Fix up absolute symlinks from the system image. This can be removed
|
||||
|
@ -1,6 +1,6 @@
|
||||
set -ex
|
||||
|
||||
curl -fo /usr/local/bin/sccache \
|
||||
https://s3-us-west-1.amazonaws.com/rust-lang-ci2/rust-ci-mirror/2018-04-02-sccache-x86_64-unknown-linux-musl
|
||||
https://rust-lang-ci2.s3.amazonaws.com/rust-ci-mirror/2018-04-02-sccache-x86_64-unknown-linux-musl
|
||||
|
||||
chmod +x /usr/local/bin/sccache
|
||||
|
@ -1,5 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# FIXME(61301): we need to debug spurious failures with this on Windows on
|
||||
# Azure, so let's print more information in the logs.
|
||||
set -x
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
@ -253,6 +253,19 @@ conversion, so type inference fails because the type is not unique. Please note
|
||||
that you must write the `(())` in one sequence without intermediate whitespace
|
||||
so that rustdoc understands you want an implicit `Result`-returning function.
|
||||
|
||||
As of version 1.37.0, this simplification also works with `Option`s, which can
|
||||
be handy to test e.g. iterators or checked arithmetic, for example:
|
||||
|
||||
```ignore
|
||||
/// ```
|
||||
/// let _ = &[].iter().next()?;
|
||||
///# Some(())
|
||||
/// ```
|
||||
```
|
||||
|
||||
Note that the result must be a `Some(())` and this has to be written in one go.
|
||||
In this case disambiguating the result isn't required.
|
||||
|
||||
## Documenting macros
|
||||
|
||||
Here’s an example of documenting a macro:
|
||||
|
@ -1,7 +0,0 @@
|
||||
# `borrow_state`
|
||||
|
||||
The tracking issue for this feature is: [#27733]
|
||||
|
||||
[#27733]: https://github.com/rust-lang/rust/issues/27733
|
||||
|
||||
------------------------
|
@ -239,7 +239,7 @@ use core::fmt;
|
||||
use core::hash::{Hash, Hasher};
|
||||
use core::intrinsics::abort;
|
||||
use core::marker::{self, Unpin, Unsize, PhantomData};
|
||||
use core::mem::{self, align_of_val, forget, size_of_val};
|
||||
use core::mem::{self, align_of, align_of_val, forget, size_of_val};
|
||||
use core::ops::{Deref, Receiver, CoerceUnsized, DispatchFromDyn};
|
||||
use core::pin::Pin;
|
||||
use core::ptr::{self, NonNull};
|
||||
@ -416,11 +416,7 @@ impl<T: ?Sized> Rc<T> {
|
||||
/// ```
|
||||
#[stable(feature = "rc_raw", since = "1.17.0")]
|
||||
pub unsafe fn from_raw(ptr: *const T) -> Self {
|
||||
// Align the unsized value to the end of the RcBox.
|
||||
// Because it is ?Sized, it will always be the last field in memory.
|
||||
let align = align_of_val(&*ptr);
|
||||
let layout = Layout::new::<RcBox<()>>();
|
||||
let offset = (layout.size() + layout.padding_needed_for(align)) as isize;
|
||||
let offset = data_offset(ptr);
|
||||
|
||||
// Reverse the offset to find the original RcBox.
|
||||
let fake_ptr = ptr as *mut RcBox<T>;
|
||||
@ -1262,6 +1258,143 @@ impl<T> Weak<T> {
|
||||
ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
|
||||
///
|
||||
/// It is up to the caller to ensure that the object is still alive when accessing it through
|
||||
/// the pointer.
|
||||
///
|
||||
/// The pointer may be [`null`] or be dangling in case the object has already been destroyed.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(weak_into_raw)]
|
||||
///
|
||||
/// use std::rc::{Rc, Weak};
|
||||
/// use std::ptr;
|
||||
///
|
||||
/// let strong = Rc::new(42);
|
||||
/// let weak = Rc::downgrade(&strong);
|
||||
/// // Both point to the same object
|
||||
/// assert!(ptr::eq(&*strong, Weak::as_raw(&weak)));
|
||||
/// // The strong here keeps it alive, so we can still access the object.
|
||||
/// assert_eq!(42, unsafe { *Weak::as_raw(&weak) });
|
||||
///
|
||||
/// drop(strong);
|
||||
/// // But not any more. We can do Weak::as_raw(&weak), but accessing the pointer would lead to
|
||||
/// // undefined behaviour.
|
||||
/// // assert_eq!(42, unsafe { *Weak::as_raw(&weak) });
|
||||
/// ```
|
||||
///
|
||||
/// [`null`]: ../../std/ptr/fn.null.html
|
||||
#[unstable(feature = "weak_into_raw", issue = "60728")]
|
||||
pub fn as_raw(this: &Self) -> *const T {
|
||||
match this.inner() {
|
||||
None => ptr::null(),
|
||||
Some(inner) => {
|
||||
let offset = data_offset_sized::<T>();
|
||||
let ptr = inner as *const RcBox<T>;
|
||||
// Note: while the pointer we create may already point to dropped value, the
|
||||
// allocation still lives (it must hold the weak point as long as we are alive).
|
||||
// Therefore, the offset is OK to do, it won't get out of the allocation.
|
||||
let ptr = unsafe { (ptr as *const u8).offset(offset) };
|
||||
ptr as *const T
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Consumes the `Weak<T>` and turns it into a raw pointer.
|
||||
///
|
||||
/// This converts the weak pointer into a raw pointer, preserving the original weak count. It
|
||||
/// can be turned back into the `Weak<T>` with [`from_raw`].
|
||||
///
|
||||
/// The same restrictions of accessing the target of the pointer as with
|
||||
/// [`as_raw`] apply.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(weak_into_raw)]
|
||||
///
|
||||
/// use std::rc::{Rc, Weak};
|
||||
///
|
||||
/// let strong = Rc::new(42);
|
||||
/// let weak = Rc::downgrade(&strong);
|
||||
/// let raw = Weak::into_raw(weak);
|
||||
///
|
||||
/// assert_eq!(1, Rc::weak_count(&strong));
|
||||
/// assert_eq!(42, unsafe { *raw });
|
||||
///
|
||||
/// drop(unsafe { Weak::from_raw(raw) });
|
||||
/// assert_eq!(0, Rc::weak_count(&strong));
|
||||
/// ```
|
||||
///
|
||||
/// [`from_raw`]: struct.Weak.html#method.from_raw
|
||||
/// [`as_raw`]: struct.Weak.html#method.as_raw
|
||||
#[unstable(feature = "weak_into_raw", issue = "60728")]
|
||||
pub fn into_raw(this: Self) -> *const T {
|
||||
let result = Self::as_raw(&this);
|
||||
mem::forget(this);
|
||||
result
|
||||
}
|
||||
|
||||
/// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
|
||||
///
|
||||
/// This can be used to safely get a strong reference (by calling [`upgrade`]
|
||||
/// later) or to deallocate the weak count by dropping the `Weak<T>`.
|
||||
///
|
||||
/// It takes ownership of one weak count. In case a [`null`] is passed, a dangling [`Weak`] is
|
||||
/// returned.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// The pointer must represent one valid weak count. In other words, it must point to `T` which
|
||||
/// is or *was* managed by an [`Rc`] and the weak count of that [`Rc`] must not have reached
|
||||
/// 0. It is allowed for the strong count to be 0.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(weak_into_raw)]
|
||||
///
|
||||
/// use std::rc::{Rc, Weak};
|
||||
///
|
||||
/// let strong = Rc::new(42);
|
||||
///
|
||||
/// let raw_1 = Weak::into_raw(Rc::downgrade(&strong));
|
||||
/// let raw_2 = Weak::into_raw(Rc::downgrade(&strong));
|
||||
///
|
||||
/// assert_eq!(2, Rc::weak_count(&strong));
|
||||
///
|
||||
/// assert_eq!(42, *Weak::upgrade(&unsafe { Weak::from_raw(raw_1) }).unwrap());
|
||||
/// assert_eq!(1, Rc::weak_count(&strong));
|
||||
///
|
||||
/// drop(strong);
|
||||
///
|
||||
/// // Decrement the last weak count.
|
||||
/// assert!(Weak::upgrade(&unsafe { Weak::from_raw(raw_2) }).is_none());
|
||||
/// ```
|
||||
///
|
||||
/// [`null`]: ../../std/ptr/fn.null.html
|
||||
/// [`into_raw`]: struct.Weak.html#method.into_raw
|
||||
/// [`upgrade`]: struct.Weak.html#method.upgrade
|
||||
/// [`Rc`]: struct.Rc.html
|
||||
/// [`Weak`]: struct.Weak.html
|
||||
#[unstable(feature = "weak_into_raw", issue = "60728")]
|
||||
pub unsafe fn from_raw(ptr: *const T) -> Self {
|
||||
if ptr.is_null() {
|
||||
Self::new()
|
||||
} else {
|
||||
// See Rc::from_raw for details
|
||||
let offset = data_offset(ptr);
|
||||
let fake_ptr = ptr as *mut RcBox<T>;
|
||||
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
|
||||
Weak {
|
||||
ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_dangling<T: ?Sized>(ptr: NonNull<T>) -> bool {
|
||||
@ -2007,3 +2140,20 @@ impl<T: ?Sized> AsRef<T> for Rc<T> {
|
||||
|
||||
#[stable(feature = "pin", since = "1.33.0")]
|
||||
impl<T: ?Sized> Unpin for Rc<T> { }
|
||||
|
||||
unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
|
||||
// Align the unsized value to the end of the RcBox.
|
||||
// Because it is ?Sized, it will always be the last field in memory.
|
||||
let align = align_of_val(&*ptr);
|
||||
let layout = Layout::new::<RcBox<()>>();
|
||||
(layout.size() + layout.padding_needed_for(align)) as isize
|
||||
}
|
||||
|
||||
/// Computes the offset of the data field within ArcInner.
|
||||
///
|
||||
/// Unlike [`data_offset`], this doesn't need the pointer, but it works only on `T: Sized`.
|
||||
fn data_offset_sized<T>() -> isize {
|
||||
let align = align_of::<T>();
|
||||
let layout = Layout::new::<RcBox<()>>();
|
||||
(layout.size() + layout.padding_needed_for(align)) as isize
|
||||
}
|
||||
|
@ -137,17 +137,16 @@ pub use hack::to_vec;
|
||||
// `core::slice::SliceExt` - we need to supply these functions for the
|
||||
// `test_permutations` test
|
||||
mod hack {
|
||||
use core::mem;
|
||||
|
||||
use crate::boxed::Box;
|
||||
use crate::vec::Vec;
|
||||
#[cfg(test)]
|
||||
use crate::string::ToString;
|
||||
|
||||
pub fn into_vec<T>(mut b: Box<[T]>) -> Vec<T> {
|
||||
pub fn into_vec<T>(b: Box<[T]>) -> Vec<T> {
|
||||
unsafe {
|
||||
let xs = Vec::from_raw_parts(b.as_mut_ptr(), b.len(), b.len());
|
||||
mem::forget(b);
|
||||
let len = b.len();
|
||||
let b = Box::into_raw(b);
|
||||
let xs = Vec::from_raw_parts(b as *mut T, len, len);
|
||||
xs
|
||||
}
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ use core::borrow;
|
||||
use core::fmt;
|
||||
use core::cmp::{self, Ordering};
|
||||
use core::intrinsics::abort;
|
||||
use core::mem::{self, align_of_val, size_of_val};
|
||||
use core::mem::{self, align_of, align_of_val, size_of_val};
|
||||
use core::ops::{Deref, Receiver, CoerceUnsized, DispatchFromDyn};
|
||||
use core::pin::Pin;
|
||||
use core::ptr::{self, NonNull};
|
||||
@ -397,11 +397,7 @@ impl<T: ?Sized> Arc<T> {
|
||||
/// ```
|
||||
#[stable(feature = "rc_raw", since = "1.17.0")]
|
||||
pub unsafe fn from_raw(ptr: *const T) -> Self {
|
||||
// Align the unsized value to the end of the ArcInner.
|
||||
// Because it is ?Sized, it will always be the last field in memory.
|
||||
let align = align_of_val(&*ptr);
|
||||
let layout = Layout::new::<ArcInner<()>>();
|
||||
let offset = (layout.size() + layout.padding_needed_for(align)) as isize;
|
||||
let offset = data_offset(ptr);
|
||||
|
||||
// Reverse the offset to find the original ArcInner.
|
||||
let fake_ptr = ptr as *mut ArcInner<T>;
|
||||
@ -1071,6 +1067,144 @@ impl<T> Weak<T> {
|
||||
ptr: NonNull::new(usize::MAX as *mut ArcInner<T>).expect("MAX is not 0"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
|
||||
///
|
||||
/// It is up to the caller to ensure that the object is still alive when accessing it through
|
||||
/// the pointer.
|
||||
///
|
||||
/// The pointer may be [`null`] or be dangling in case the object has already been destroyed.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(weak_into_raw)]
|
||||
///
|
||||
/// use std::sync::{Arc, Weak};
|
||||
/// use std::ptr;
|
||||
///
|
||||
/// let strong = Arc::new(42);
|
||||
/// let weak = Arc::downgrade(&strong);
|
||||
/// // Both point to the same object
|
||||
/// assert!(ptr::eq(&*strong, Weak::as_raw(&weak)));
|
||||
/// // The strong here keeps it alive, so we can still access the object.
|
||||
/// assert_eq!(42, unsafe { *Weak::as_raw(&weak) });
|
||||
///
|
||||
/// drop(strong);
|
||||
/// // But not any more. We can do Weak::as_raw(&weak), but accessing the pointer would lead to
|
||||
/// // undefined behaviour.
|
||||
/// // assert_eq!(42, unsafe { *Weak::as_raw(&weak) });
|
||||
/// ```
|
||||
///
|
||||
/// [`null`]: ../../std/ptr/fn.null.html
|
||||
#[unstable(feature = "weak_into_raw", issue = "60728")]
|
||||
pub fn as_raw(this: &Self) -> *const T {
|
||||
match this.inner() {
|
||||
None => ptr::null(),
|
||||
Some(inner) => {
|
||||
let offset = data_offset_sized::<T>();
|
||||
let ptr = inner as *const ArcInner<T>;
|
||||
// Note: while the pointer we create may already point to dropped value, the
|
||||
// allocation still lives (it must hold the weak point as long as we are alive).
|
||||
// Therefore, the offset is OK to do, it won't get out of the allocation.
|
||||
let ptr = unsafe { (ptr as *const u8).offset(offset) };
|
||||
ptr as *const T
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Consumes the `Weak<T>` and turns it into a raw pointer.
|
||||
///
|
||||
/// This converts the weak pointer into a raw pointer, preserving the original weak count. It
|
||||
/// can be turned back into the `Weak<T>` with [`from_raw`].
|
||||
///
|
||||
/// The same restrictions of accessing the target of the pointer as with
|
||||
/// [`as_raw`] apply.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(weak_into_raw)]
|
||||
///
|
||||
/// use std::sync::{Arc, Weak};
|
||||
///
|
||||
/// let strong = Arc::new(42);
|
||||
/// let weak = Arc::downgrade(&strong);
|
||||
/// let raw = Weak::into_raw(weak);
|
||||
///
|
||||
/// assert_eq!(1, Arc::weak_count(&strong));
|
||||
/// assert_eq!(42, unsafe { *raw });
|
||||
///
|
||||
/// drop(unsafe { Weak::from_raw(raw) });
|
||||
/// assert_eq!(0, Arc::weak_count(&strong));
|
||||
/// ```
|
||||
///
|
||||
/// [`from_raw`]: struct.Weak.html#method.from_raw
|
||||
/// [`as_raw`]: struct.Weak.html#method.as_raw
|
||||
#[unstable(feature = "weak_into_raw", issue = "60728")]
|
||||
pub fn into_raw(this: Self) -> *const T {
|
||||
let result = Self::as_raw(&this);
|
||||
mem::forget(this);
|
||||
result
|
||||
}
|
||||
|
||||
/// Converts a raw pointer previously created by [`into_raw`] back into
|
||||
/// `Weak<T>`.
|
||||
///
|
||||
/// This can be used to safely get a strong reference (by calling [`upgrade`]
|
||||
/// later) or to deallocate the weak count by dropping the `Weak<T>`.
|
||||
///
|
||||
/// It takes ownership of one weak count. In case a [`null`] is passed, a dangling [`Weak`] is
|
||||
/// returned.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// The pointer must represent one valid weak count. In other words, it must point to `T` which
|
||||
/// is or *was* managed by an [`Arc`] and the weak count of that [`Arc`] must not have reached
|
||||
/// 0. It is allowed for the strong count to be 0.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(weak_into_raw)]
|
||||
///
|
||||
/// use std::sync::{Arc, Weak};
|
||||
///
|
||||
/// let strong = Arc::new(42);
|
||||
///
|
||||
/// let raw_1 = Weak::into_raw(Arc::downgrade(&strong));
|
||||
/// let raw_2 = Weak::into_raw(Arc::downgrade(&strong));
|
||||
///
|
||||
/// assert_eq!(2, Arc::weak_count(&strong));
|
||||
///
|
||||
/// assert_eq!(42, *Weak::upgrade(&unsafe { Weak::from_raw(raw_1) }).unwrap());
|
||||
/// assert_eq!(1, Arc::weak_count(&strong));
|
||||
///
|
||||
/// drop(strong);
|
||||
///
|
||||
/// // Decrement the last weak count.
|
||||
/// assert!(Weak::upgrade(&unsafe { Weak::from_raw(raw_2) }).is_none());
|
||||
/// ```
|
||||
///
|
||||
/// [`null`]: ../../std/ptr/fn.null.html
|
||||
/// [`into_raw`]: struct.Weak.html#method.into_raw
|
||||
/// [`upgrade`]: struct.Weak.html#method.upgrade
|
||||
/// [`Weak`]: struct.Weak.html
|
||||
/// [`Arc`]: struct.Arc.html
|
||||
#[unstable(feature = "weak_into_raw", issue = "60728")]
|
||||
pub unsafe fn from_raw(ptr: *const T) -> Self {
|
||||
if ptr.is_null() {
|
||||
Self::new()
|
||||
} else {
|
||||
// See Arc::from_raw for details
|
||||
let offset = data_offset(ptr);
|
||||
let fake_ptr = ptr as *mut ArcInner<T>;
|
||||
let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
|
||||
Weak {
|
||||
ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> Weak<T> {
|
||||
@ -2150,3 +2284,21 @@ impl<T: ?Sized> AsRef<T> for Arc<T> {
|
||||
|
||||
#[stable(feature = "pin", since = "1.33.0")]
|
||||
impl<T: ?Sized> Unpin for Arc<T> { }
|
||||
|
||||
/// Computes the offset of the data field within ArcInner.
|
||||
unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
|
||||
// Align the unsized value to the end of the ArcInner.
|
||||
// Because it is ?Sized, it will always be the last field in memory.
|
||||
let align = align_of_val(&*ptr);
|
||||
let layout = Layout::new::<ArcInner<()>>();
|
||||
(layout.size() + layout.padding_needed_for(align)) as isize
|
||||
}
|
||||
|
||||
/// Computes the offset of the data field within ArcInner.
|
||||
///
|
||||
/// Unlike [`data_offset`], this doesn't need the pointer, but it works only on `T: Sized`.
|
||||
fn data_offset_sized<T>() -> isize {
|
||||
let align = align_of::<T>();
|
||||
let layout = Layout::new::<ArcInner<()>>();
|
||||
(layout.size() + layout.padding_needed_for(align)) as isize
|
||||
}
|
||||
|
@ -967,7 +967,6 @@ impl<T: ?Sized> RefCell<T> {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![feature(borrow_state)]
|
||||
/// use std::cell::RefCell;
|
||||
///
|
||||
/// let c = RefCell::new(5);
|
||||
@ -982,7 +981,7 @@ impl<T: ?Sized> RefCell<T> {
|
||||
/// assert!(unsafe { c.try_borrow_unguarded() }.is_ok());
|
||||
/// }
|
||||
/// ```
|
||||
#[unstable(feature = "borrow_state", issue = "27733")]
|
||||
#[stable(feature = "borrow_state", since = "1.37.0")]
|
||||
#[inline]
|
||||
pub unsafe fn try_borrow_unguarded(&self) -> Result<&T, BorrowError> {
|
||||
if !is_writing(self.borrow.get()) {
|
||||
|
@ -34,6 +34,13 @@ pub trait Step: Clone + PartialOrd + Sized {
|
||||
|
||||
/// Adds a `usize`, returning `None` on overflow.
|
||||
fn add_usize(&self, n: usize) -> Option<Self>;
|
||||
|
||||
/// Subtracts a `usize`, returning `None` on underflow.
|
||||
fn sub_usize(&self, n: usize) -> Option<Self> {
|
||||
// this default implementation makes the addition of `sub_usize` a non-breaking change
|
||||
let _ = n;
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
|
||||
// These are still macro-generated because the integer literals resolve to different types.
|
||||
@ -85,6 +92,15 @@ macro_rules! step_impl_unsigned {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(unreachable_patterns)]
|
||||
fn sub_usize(&self, n: usize) -> Option<Self> {
|
||||
match <$t>::try_from(n) {
|
||||
Ok(n_as_t) => self.checked_sub(n_as_t),
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
step_identical_methods!();
|
||||
}
|
||||
)*)
|
||||
@ -125,6 +141,25 @@ macro_rules! step_impl_signed {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[allow(unreachable_patterns)]
|
||||
fn sub_usize(&self, n: usize) -> Option<Self> {
|
||||
match <$unsigned>::try_from(n) {
|
||||
Ok(n_as_unsigned) => {
|
||||
// Wrapping in unsigned space handles cases like
|
||||
// `80_i8.sub_usize(200) == Some(-120_i8)`,
|
||||
// even though 200_usize is out of range for i8.
|
||||
let wrapped = (*self as $unsigned).wrapping_sub(n_as_unsigned) as $t;
|
||||
if wrapped <= *self {
|
||||
Some(wrapped)
|
||||
} else {
|
||||
None // Subtraction underflowed
|
||||
}
|
||||
}
|
||||
Err(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
step_identical_methods!();
|
||||
}
|
||||
)*)
|
||||
|
@ -223,3 +223,113 @@ impl<T, U, E> Product<Result<U, E>> for Result<T, E>
|
||||
ResultShunt::process(iter, |i| i.product())
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator adapter that produces output as long as the underlying
|
||||
/// iterator produces `Option::Some` values.
|
||||
struct OptionShunt<I> {
|
||||
iter: I,
|
||||
exited_early: bool,
|
||||
}
|
||||
|
||||
impl<I, T> OptionShunt<I>
|
||||
where
|
||||
I: Iterator<Item = Option<T>>,
|
||||
{
|
||||
/// Process the given iterator as if it yielded a `T` instead of a
|
||||
/// `Option<T>`. Any `None` value will stop the inner iterator and
|
||||
/// the overall result will be a `None`.
|
||||
pub fn process<F, U>(iter: I, mut f: F) -> Option<U>
|
||||
where
|
||||
F: FnMut(&mut Self) -> U,
|
||||
{
|
||||
let mut shunt = OptionShunt::new(iter);
|
||||
let value = f(shunt.by_ref());
|
||||
shunt.reconstruct(value)
|
||||
}
|
||||
|
||||
fn new(iter: I) -> Self {
|
||||
OptionShunt {
|
||||
iter,
|
||||
exited_early: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Consume the adapter and rebuild a `Option` value.
|
||||
fn reconstruct<U>(self, val: U) -> Option<U> {
|
||||
if self.exited_early {
|
||||
None
|
||||
} else {
|
||||
Some(val)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<I, T> Iterator for OptionShunt<I>
|
||||
where
|
||||
I: Iterator<Item = Option<T>>,
|
||||
{
|
||||
type Item = T;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self.iter.next() {
|
||||
Some(Some(v)) => Some(v),
|
||||
Some(None) => {
|
||||
self.exited_early = true;
|
||||
None
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
if self.exited_early {
|
||||
(0, Some(0))
|
||||
} else {
|
||||
let (_, upper) = self.iter.size_hint();
|
||||
(0, upper)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "iter_arith_traits_option", since = "1.37.0")]
|
||||
impl<T, U> Sum<Option<U>> for Option<T>
|
||||
where
|
||||
T: Sum<U>,
|
||||
{
|
||||
/// Takes each element in the `Iterator`: if it is a `None`, no further
|
||||
/// elements are taken, and the `None` is returned. Should no `None` occur,
|
||||
/// the sum of all elements is returned.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// This sums up the position of the character 'a' in a vector of strings,
|
||||
/// if a word did not have the character 'a' the operation returns `None`:
|
||||
///
|
||||
/// ```
|
||||
/// let words = vec!["have", "a", "great", "day"];
|
||||
/// let total: Option<usize> = words.iter().map(|w| w.find('a')).sum();
|
||||
/// assert_eq!(total, Some(5));
|
||||
/// ```
|
||||
fn sum<I>(iter: I) -> Option<T>
|
||||
where
|
||||
I: Iterator<Item = Option<U>>,
|
||||
{
|
||||
OptionShunt::process(iter, |i| i.sum())
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "iter_arith_traits_option", since = "1.37.0")]
|
||||
impl<T, U> Product<Option<U>> for Option<T>
|
||||
where
|
||||
T: Product<U>,
|
||||
{
|
||||
/// Takes each element in the `Iterator`: if it is a `None`, no further
|
||||
/// elements are taken, and the `None` is returned. Should no `None` occur,
|
||||
/// the product of all elements is returned.
|
||||
fn product<I>(iter: I) -> Option<T>
|
||||
where
|
||||
I: Iterator<Item = Option<U>>,
|
||||
{
|
||||
OptionShunt::process(iter, |i| i.product())
|
||||
}
|
||||
}
|
||||
|
1406
src/libcore/mem.rs
1406
src/libcore/mem.rs
File diff suppressed because it is too large
Load Diff
146
src/libcore/mem/manually_drop.rs
Normal file
146
src/libcore/mem/manually_drop.rs
Normal file
@ -0,0 +1,146 @@
|
||||
use crate::ptr;
|
||||
use crate::ops::{Deref, DerefMut};
|
||||
|
||||
/// A wrapper to inhibit compiler from automatically calling `T`’s destructor.
|
||||
///
|
||||
/// This wrapper is 0-cost.
|
||||
///
|
||||
/// `ManuallyDrop<T>` is subject to the same layout optimizations as `T`.
|
||||
/// As a consequence, it has *no effect* on the assumptions that the compiler makes
|
||||
/// about all values being initialized at their type. In particular, initializing
|
||||
/// a `ManuallyDrop<&mut T>` with [`mem::zeroed`] is undefined behavior.
|
||||
/// If you need to handle uninitialized data, use [`MaybeUninit<T>`] instead.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// This wrapper helps with explicitly documenting the drop order dependencies between fields of
|
||||
/// the type:
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::mem::ManuallyDrop;
|
||||
/// struct Peach;
|
||||
/// struct Banana;
|
||||
/// struct Melon;
|
||||
/// struct FruitBox {
|
||||
/// // Immediately clear there’s something non-trivial going on with these fields.
|
||||
/// peach: ManuallyDrop<Peach>,
|
||||
/// melon: Melon, // Field that’s independent of the other two.
|
||||
/// banana: ManuallyDrop<Banana>,
|
||||
/// }
|
||||
///
|
||||
/// impl Drop for FruitBox {
|
||||
/// fn drop(&mut self) {
|
||||
/// unsafe {
|
||||
/// // Explicit ordering in which field destructors are run specified in the intuitive
|
||||
/// // location – the destructor of the structure containing the fields.
|
||||
/// // Moreover, one can now reorder fields within the struct however much they want.
|
||||
/// ManuallyDrop::drop(&mut self.peach);
|
||||
/// ManuallyDrop::drop(&mut self.banana);
|
||||
/// }
|
||||
/// // After destructor for `FruitBox` runs (this function), the destructor for Melon gets
|
||||
/// // invoked in the usual manner, as it is not wrapped in `ManuallyDrop`.
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// [`mem::zeroed`]: fn.zeroed.html
|
||||
/// [`MaybeUninit<T>`]: union.MaybeUninit.html
|
||||
#[stable(feature = "manually_drop", since = "1.20.0")]
|
||||
#[lang = "manually_drop"]
|
||||
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct ManuallyDrop<T: ?Sized> {
|
||||
value: T,
|
||||
}
|
||||
|
||||
impl<T> ManuallyDrop<T> {
|
||||
/// Wrap a value to be manually dropped.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::mem::ManuallyDrop;
|
||||
/// ManuallyDrop::new(Box::new(()));
|
||||
/// ```
|
||||
#[stable(feature = "manually_drop", since = "1.20.0")]
|
||||
#[inline(always)]
|
||||
pub const fn new(value: T) -> ManuallyDrop<T> {
|
||||
ManuallyDrop { value }
|
||||
}
|
||||
|
||||
/// Extracts the value from the `ManuallyDrop` container.
|
||||
///
|
||||
/// This allows the value to be dropped again.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::mem::ManuallyDrop;
|
||||
/// let x = ManuallyDrop::new(Box::new(()));
|
||||
/// let _: Box<()> = ManuallyDrop::into_inner(x); // This drops the `Box`.
|
||||
/// ```
|
||||
#[stable(feature = "manually_drop", since = "1.20.0")]
|
||||
#[inline(always)]
|
||||
pub const fn into_inner(slot: ManuallyDrop<T>) -> T {
|
||||
slot.value
|
||||
}
|
||||
|
||||
/// Takes the contained value out.
|
||||
///
|
||||
/// This method is primarily intended for moving out values in drop.
|
||||
/// Instead of using [`ManuallyDrop::drop`] to manually drop the value,
|
||||
/// you can use this method to take the value and use it however desired.
|
||||
/// `Drop` will be invoked on the returned value following normal end-of-scope rules.
|
||||
///
|
||||
/// If you have ownership of the container, you can use [`ManuallyDrop::into_inner`] instead.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function semantically moves out the contained value without preventing further usage.
|
||||
/// It is up to the user of this method to ensure that this container is not used again.
|
||||
///
|
||||
/// [`ManuallyDrop::drop`]: #method.drop
|
||||
/// [`ManuallyDrop::into_inner`]: #method.into_inner
|
||||
#[must_use = "if you don't need the value, you can use `ManuallyDrop::drop` instead"]
|
||||
#[unstable(feature = "manually_drop_take", issue = "55422")]
|
||||
#[inline]
|
||||
pub unsafe fn take(slot: &mut ManuallyDrop<T>) -> T {
|
||||
ManuallyDrop::into_inner(ptr::read(slot))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> ManuallyDrop<T> {
|
||||
/// Manually drops the contained value.
|
||||
///
|
||||
/// If you have ownership of the value, you can use [`ManuallyDrop::into_inner`] instead.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This function runs the destructor of the contained value and thus the wrapped value
|
||||
/// now represents uninitialized data. It is up to the user of this method to ensure the
|
||||
/// uninitialized data is not actually used.
|
||||
///
|
||||
/// [`ManuallyDrop::into_inner`]: #method.into_inner
|
||||
#[stable(feature = "manually_drop", since = "1.20.0")]
|
||||
#[inline]
|
||||
pub unsafe fn drop(slot: &mut ManuallyDrop<T>) {
|
||||
ptr::drop_in_place(&mut slot.value)
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "manually_drop", since = "1.20.0")]
|
||||
impl<T: ?Sized> Deref for ManuallyDrop<T> {
|
||||
type Target = T;
|
||||
#[inline(always)]
|
||||
fn deref(&self) -> &T {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "manually_drop", since = "1.20.0")]
|
||||
impl<T: ?Sized> DerefMut for ManuallyDrop<T> {
|
||||
#[inline(always)]
|
||||
fn deref_mut(&mut self) -> &mut T {
|
||||
&mut self.value
|
||||
}
|
||||
}
|
519
src/libcore/mem/maybe_uninit.rs
Normal file
519
src/libcore/mem/maybe_uninit.rs
Normal file
@ -0,0 +1,519 @@
|
||||
use crate::intrinsics;
|
||||
use crate::mem::ManuallyDrop;
|
||||
|
||||
/// A wrapper type to construct uninitialized instances of `T`.
|
||||
///
|
||||
/// # Initialization invariant
|
||||
///
|
||||
/// The compiler, in general, assumes that variables are properly initialized
|
||||
/// at their respective type. For example, a variable of reference type must
|
||||
/// be aligned and non-NULL. This is an invariant that must *always* be upheld,
|
||||
/// even in unsafe code. As a consequence, zero-initializing a variable of reference
|
||||
/// type causes instantaneous [undefined behavior][ub], no matter whether that reference
|
||||
/// ever gets used to access memory:
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// use std::mem::{self, MaybeUninit};
|
||||
///
|
||||
/// let x: &i32 = unsafe { mem::zeroed() }; // undefined behavior!
|
||||
/// // The equivalent code with `MaybeUninit<&i32>`:
|
||||
/// let x: &i32 = unsafe { MaybeUninit::zeroed().assume_init() }; // undefined behavior!
|
||||
/// ```
|
||||
///
|
||||
/// This is exploited by the compiler for various optimizations, such as eliding
|
||||
/// run-time checks and optimizing `enum` layout.
|
||||
///
|
||||
/// Similarly, entirely uninitialized memory may have any content, while a `bool` must
|
||||
/// always be `true` or `false`. Hence, creating an uninitialized `bool` is undefined behavior:
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// use std::mem::{self, MaybeUninit};
|
||||
///
|
||||
/// let b: bool = unsafe { mem::uninitialized() }; // undefined behavior!
|
||||
/// // The equivalent code with `MaybeUninit<bool>`:
|
||||
/// let b: bool = unsafe { MaybeUninit::uninit().assume_init() }; // undefined behavior!
|
||||
/// ```
|
||||
///
|
||||
/// Moreover, uninitialized memory is special in that the compiler knows that
|
||||
/// it does not have a fixed value. This makes it undefined behavior to have
|
||||
/// uninitialized data in a variable even if that variable has an integer type,
|
||||
/// which otherwise can hold any *fixed* bit pattern:
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// use std::mem::{self, MaybeUninit};
|
||||
///
|
||||
/// let x: i32 = unsafe { mem::uninitialized() }; // undefined behavior!
|
||||
/// // The equivalent code with `MaybeUninit<i32>`:
|
||||
/// let x: i32 = unsafe { MaybeUninit::uninit().assume_init() }; // undefined behavior!
|
||||
/// ```
|
||||
/// (Notice that the rules around uninitialized integers are not finalized yet, but
|
||||
/// until they are, it is advisable to avoid them.)
|
||||
///
|
||||
/// On top of that, remember that most types have additional invariants beyond merely
|
||||
/// being considered initialized at the type level. For example, a `1`-initialized [`Vec<T>`]
|
||||
/// is considered initialized because the only requirement the compiler knows about it
|
||||
/// is that the data pointer must be non-null. Creating such a `Vec<T>` does not cause
|
||||
/// *immediate* undefined behavior, but will cause undefined behavior with most
|
||||
/// safe operations (including dropping it).
|
||||
///
|
||||
/// [`Vec<T>`]: ../../std/vec/struct.Vec.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// `MaybeUninit<T>` serves to enable unsafe code to deal with uninitialized data.
|
||||
/// It is a signal to the compiler indicating that the data here might *not*
|
||||
/// be initialized:
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// // Create an explicitly uninitialized reference. The compiler knows that data inside
|
||||
/// // a `MaybeUninit<T>` may be invalid, and hence this is not UB:
|
||||
/// let mut x = MaybeUninit::<&i32>::uninit();
|
||||
/// // Set it to a valid value.
|
||||
/// unsafe { x.as_mut_ptr().write(&0); }
|
||||
/// // Extract the initialized data -- this is only allowed *after* properly
|
||||
/// // initializing `x`!
|
||||
/// let x = unsafe { x.assume_init() };
|
||||
/// ```
|
||||
///
|
||||
/// The compiler then knows to not make any incorrect assumptions or optimizations on this code.
|
||||
///
|
||||
/// You can think of `MaybeUninit<T>` as being a bit like `Option<T>` but without
|
||||
/// any of the run-time tracking and without any of the safety checks.
|
||||
///
|
||||
/// ## out-pointers
|
||||
///
|
||||
/// You can use `MaybeUninit<T>` to implement "out-pointers": instead of returning data
|
||||
/// from a function, pass it a pointer to some (uninitialized) memory to put the
|
||||
/// result into. This can be useful when it is important for the caller to control
|
||||
/// how the memory the result is stored in gets allocated, and you want to avoid
|
||||
/// unnecessary moves.
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// unsafe fn make_vec(out: *mut Vec<i32>) {
|
||||
/// // `write` does not drop the old contents, which is important.
|
||||
/// out.write(vec![1, 2, 3]);
|
||||
/// }
|
||||
///
|
||||
/// let mut v = MaybeUninit::uninit();
|
||||
/// unsafe { make_vec(v.as_mut_ptr()); }
|
||||
/// // Now we know `v` is initialized! This also makes sure the vector gets
|
||||
/// // properly dropped.
|
||||
/// let v = unsafe { v.assume_init() };
|
||||
/// assert_eq!(&v, &[1, 2, 3]);
|
||||
/// ```
|
||||
///
|
||||
/// ## Initializing an array element-by-element
|
||||
///
|
||||
/// `MaybeUninit<T>` can be used to initialize a large array element-by-element:
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem::{self, MaybeUninit};
|
||||
/// use std::ptr;
|
||||
///
|
||||
/// let data = {
|
||||
/// // Create an uninitialized array of `MaybeUninit`. The `assume_init` is
|
||||
/// // safe because the type we are claiming to have initialized here is a
|
||||
/// // bunch of `MaybeUninit`s, which do not require initialization.
|
||||
/// let mut data: [MaybeUninit<Vec<u32>>; 1000] = unsafe {
|
||||
/// MaybeUninit::uninit().assume_init()
|
||||
/// };
|
||||
///
|
||||
/// // Dropping a `MaybeUninit` does nothing, so if there is a panic during this loop,
|
||||
/// // we have a memory leak, but there is no memory safety issue.
|
||||
/// for elem in &mut data[..] {
|
||||
/// unsafe { ptr::write(elem.as_mut_ptr(), vec![42]); }
|
||||
/// }
|
||||
///
|
||||
/// // Everything is initialized. Transmute the array to the
|
||||
/// // initialized type.
|
||||
/// unsafe { mem::transmute::<_, [Vec<u32>; 1000]>(data) }
|
||||
/// };
|
||||
///
|
||||
/// assert_eq!(&data[0], &[42]);
|
||||
/// ```
|
||||
///
|
||||
/// You can also work with partially initialized arrays, which could
|
||||
/// be found in low-level datastructures.
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem::MaybeUninit;
|
||||
/// use std::ptr;
|
||||
///
|
||||
/// // Create an uninitialized array of `MaybeUninit`. The `assume_init` is
|
||||
/// // safe because the type we are claiming to have initialized here is a
|
||||
/// // bunch of `MaybeUninit`s, which do not require initialization.
|
||||
/// let mut data: [MaybeUninit<String>; 1000] = unsafe { MaybeUninit::uninit().assume_init() };
|
||||
/// // Count the number of elements we have assigned.
|
||||
/// let mut data_len: usize = 0;
|
||||
///
|
||||
/// for elem in &mut data[0..500] {
|
||||
/// unsafe { ptr::write(elem.as_mut_ptr(), String::from("hello")); }
|
||||
/// data_len += 1;
|
||||
/// }
|
||||
///
|
||||
/// // For each item in the array, drop if we allocated it.
|
||||
/// for elem in &mut data[0..data_len] {
|
||||
/// unsafe { ptr::drop_in_place(elem.as_mut_ptr()); }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// ## Initializing a struct field-by-field
|
||||
///
|
||||
/// There is currently no supported way to create a raw pointer or reference
|
||||
/// to a field of a struct inside `MaybeUninit<Struct>`. That means it is not possible
|
||||
/// to create a struct by calling `MaybeUninit::uninit::<Struct>()` and then writing
|
||||
/// to its fields.
|
||||
///
|
||||
/// [ub]: ../../reference/behavior-considered-undefined.html
|
||||
///
|
||||
/// # Layout
|
||||
///
|
||||
/// `MaybeUninit<T>` is guaranteed to have the same size and alignment as `T`:
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::mem::{MaybeUninit, size_of, align_of};
|
||||
/// assert_eq!(size_of::<MaybeUninit<u64>>(), size_of::<u64>());
|
||||
/// assert_eq!(align_of::<MaybeUninit<u64>>(), align_of::<u64>());
|
||||
/// ```
|
||||
///
|
||||
/// However remember that a type *containing* a `MaybeUninit<T>` is not necessarily the same
|
||||
/// layout; Rust does not in general guarantee that the fields of a `Foo<T>` have the same order as
|
||||
/// a `Foo<U>` even if `T` and `U` have the same size and alignment. Furthermore because any bit
|
||||
/// value is valid for a `MaybeUninit<T>` the compiler can't apply non-zero/niche-filling
|
||||
/// optimizations, potentially resulting in a larger size:
|
||||
///
|
||||
/// ```rust
|
||||
/// # use std::mem::{MaybeUninit, size_of};
|
||||
/// assert_eq!(size_of::<Option<bool>>(), 1);
|
||||
/// assert_eq!(size_of::<Option<MaybeUninit<bool>>>(), 2);
|
||||
/// ```
|
||||
#[allow(missing_debug_implementations)]
|
||||
#[stable(feature = "maybe_uninit", since = "1.36.0")]
|
||||
#[derive(Copy)]
|
||||
pub union MaybeUninit<T> {
|
||||
uninit: (),
|
||||
value: ManuallyDrop<T>,
|
||||
}
|
||||
|
||||
#[stable(feature = "maybe_uninit", since = "1.36.0")]
|
||||
impl<T: Copy> Clone for MaybeUninit<T> {
|
||||
#[inline(always)]
|
||||
fn clone(&self) -> Self {
|
||||
// Not calling `T::clone()`, we cannot know if we are initialized enough for that.
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> MaybeUninit<T> {
|
||||
/// Creates a new `MaybeUninit<T>` initialized with the given value.
|
||||
/// It is safe to call [`assume_init`] on the return value of this function.
|
||||
///
|
||||
/// Note that dropping a `MaybeUninit<T>` will never call `T`'s drop code.
|
||||
/// It is your responsibility to make sure `T` gets dropped if it got initialized.
|
||||
///
|
||||
/// [`assume_init`]: #method.assume_init
|
||||
#[stable(feature = "maybe_uninit", since = "1.36.0")]
|
||||
#[inline(always)]
|
||||
pub const fn new(val: T) -> MaybeUninit<T> {
|
||||
MaybeUninit { value: ManuallyDrop::new(val) }
|
||||
}
|
||||
|
||||
/// Creates a new `MaybeUninit<T>` in an uninitialized state.
|
||||
///
|
||||
/// Note that dropping a `MaybeUninit<T>` will never call `T`'s drop code.
|
||||
/// It is your responsibility to make sure `T` gets dropped if it got initialized.
|
||||
///
|
||||
/// See the [type-level documentation][type] for some examples.
|
||||
///
|
||||
/// [type]: union.MaybeUninit.html
|
||||
#[stable(feature = "maybe_uninit", since = "1.36.0")]
|
||||
#[inline(always)]
|
||||
pub const fn uninit() -> MaybeUninit<T> {
|
||||
MaybeUninit { uninit: () }
|
||||
}
|
||||
|
||||
/// Creates a new `MaybeUninit<T>` in an uninitialized state, with the memory being
|
||||
/// filled with `0` bytes. It depends on `T` whether that already makes for
|
||||
/// proper initialization. For example, `MaybeUninit<usize>::zeroed()` is initialized,
|
||||
/// but `MaybeUninit<&'static i32>::zeroed()` is not because references must not
|
||||
/// be null.
|
||||
///
|
||||
/// Note that dropping a `MaybeUninit<T>` will never call `T`'s drop code.
|
||||
/// It is your responsibility to make sure `T` gets dropped if it got initialized.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Correct usage of this function: initializing a struct with zero, where all
|
||||
/// fields of the struct can hold the bit-pattern 0 as a valid value.
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// let x = MaybeUninit::<(u8, bool)>::zeroed();
|
||||
/// let x = unsafe { x.assume_init() };
|
||||
/// assert_eq!(x, (0, false));
|
||||
/// ```
|
||||
///
|
||||
/// *Incorrect* usage of this function: initializing a struct with zero, where some fields
|
||||
/// cannot hold 0 as a valid value.
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// enum NotZero { One = 1, Two = 2 };
|
||||
///
|
||||
/// let x = MaybeUninit::<(u8, NotZero)>::zeroed();
|
||||
/// let x = unsafe { x.assume_init() };
|
||||
/// // Inside a pair, we create a `NotZero` that does not have a valid discriminant.
|
||||
/// // This is undefined behavior.
|
||||
/// ```
|
||||
#[stable(feature = "maybe_uninit", since = "1.36.0")]
|
||||
#[inline]
|
||||
pub fn zeroed() -> MaybeUninit<T> {
|
||||
let mut u = MaybeUninit::<T>::uninit();
|
||||
unsafe {
|
||||
u.as_mut_ptr().write_bytes(0u8, 1);
|
||||
}
|
||||
u
|
||||
}
|
||||
|
||||
/// Sets the value of the `MaybeUninit<T>`. This overwrites any previous value
|
||||
/// without dropping it, so be careful not to use this twice unless you want to
|
||||
/// skip running the destructor. For your convenience, this also returns a mutable
|
||||
/// reference to the (now safely initialized) contents of `self`.
|
||||
#[unstable(feature = "maybe_uninit_extra", issue = "53491")]
|
||||
#[inline(always)]
|
||||
pub fn write(&mut self, val: T) -> &mut T {
|
||||
unsafe {
|
||||
self.value = ManuallyDrop::new(val);
|
||||
self.get_mut()
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets a pointer to the contained value. Reading from this pointer or turning it
|
||||
/// into a reference is undefined behavior unless the `MaybeUninit<T>` is initialized.
|
||||
/// Writing to memory that this pointer (non-transitively) points to is undefined behavior
|
||||
/// (except inside an `UnsafeCell<T>`).
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Correct usage of this method:
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// let mut x = MaybeUninit::<Vec<u32>>::uninit();
|
||||
/// unsafe { x.as_mut_ptr().write(vec![0,1,2]); }
|
||||
/// // Create a reference into the `MaybeUninit<T>`. This is okay because we initialized it.
|
||||
/// let x_vec = unsafe { &*x.as_ptr() };
|
||||
/// assert_eq!(x_vec.len(), 3);
|
||||
/// ```
|
||||
///
|
||||
/// *Incorrect* usage of this method:
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// let x = MaybeUninit::<Vec<u32>>::uninit();
|
||||
/// let x_vec = unsafe { &*x.as_ptr() };
|
||||
/// // We have created a reference to an uninitialized vector! This is undefined behavior.
|
||||
/// ```
|
||||
///
|
||||
/// (Notice that the rules around references to uninitialized data are not finalized yet, but
|
||||
/// until they are, it is advisable to avoid them.)
|
||||
#[stable(feature = "maybe_uninit", since = "1.36.0")]
|
||||
#[inline(always)]
|
||||
pub fn as_ptr(&self) -> *const T {
|
||||
unsafe { &*self.value as *const T }
|
||||
}
|
||||
|
||||
/// Gets a mutable pointer to the contained value. Reading from this pointer or turning it
|
||||
/// into a reference is undefined behavior unless the `MaybeUninit<T>` is initialized.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Correct usage of this method:
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// let mut x = MaybeUninit::<Vec<u32>>::uninit();
|
||||
/// unsafe { x.as_mut_ptr().write(vec![0,1,2]); }
|
||||
/// // Create a reference into the `MaybeUninit<Vec<u32>>`.
|
||||
/// // This is okay because we initialized it.
|
||||
/// let x_vec = unsafe { &mut *x.as_mut_ptr() };
|
||||
/// x_vec.push(3);
|
||||
/// assert_eq!(x_vec.len(), 4);
|
||||
/// ```
|
||||
///
|
||||
/// *Incorrect* usage of this method:
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// let mut x = MaybeUninit::<Vec<u32>>::uninit();
|
||||
/// let x_vec = unsafe { &mut *x.as_mut_ptr() };
|
||||
/// // We have created a reference to an uninitialized vector! This is undefined behavior.
|
||||
/// ```
|
||||
///
|
||||
/// (Notice that the rules around references to uninitialized data are not finalized yet, but
|
||||
/// until they are, it is advisable to avoid them.)
|
||||
#[stable(feature = "maybe_uninit", since = "1.36.0")]
|
||||
#[inline(always)]
|
||||
pub fn as_mut_ptr(&mut self) -> *mut T {
|
||||
unsafe { &mut *self.value as *mut T }
|
||||
}
|
||||
|
||||
/// Extracts the value from the `MaybeUninit<T>` container. This is a great way
|
||||
/// to ensure that the data will get dropped, because the resulting `T` is
|
||||
/// subject to the usual drop handling.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// It is up to the caller to guarantee that the `MaybeUninit<T>` really is in an initialized
|
||||
/// state. Calling this when the content is not yet fully initialized causes immediate undefined
|
||||
/// behavior. The [type-level documentation][inv] contains more information about
|
||||
/// this initialization invariant.
|
||||
///
|
||||
/// [inv]: #initialization-invariant
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Correct usage of this method:
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// let mut x = MaybeUninit::<bool>::uninit();
|
||||
/// unsafe { x.as_mut_ptr().write(true); }
|
||||
/// let x_init = unsafe { x.assume_init() };
|
||||
/// assert_eq!(x_init, true);
|
||||
/// ```
|
||||
///
|
||||
/// *Incorrect* usage of this method:
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// let x = MaybeUninit::<Vec<u32>>::uninit();
|
||||
/// let x_init = unsafe { x.assume_init() };
|
||||
/// // `x` had not been initialized yet, so this last line caused undefined behavior.
|
||||
/// ```
|
||||
#[stable(feature = "maybe_uninit", since = "1.36.0")]
|
||||
#[inline(always)]
|
||||
pub unsafe fn assume_init(self) -> T {
|
||||
intrinsics::panic_if_uninhabited::<T>();
|
||||
ManuallyDrop::into_inner(self.value)
|
||||
}
|
||||
|
||||
/// Reads the value from the `MaybeUninit<T>` container. The resulting `T` is subject
|
||||
/// to the usual drop handling.
|
||||
///
|
||||
/// Whenever possible, it is preferrable to use [`assume_init`] instead, which
|
||||
/// prevents duplicating the content of the `MaybeUninit<T>`.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// It is up to the caller to guarantee that the `MaybeUninit<T>` really is in an initialized
|
||||
/// state. Calling this when the content is not yet fully initialized causes undefined
|
||||
/// behavior. The [type-level documentation][inv] contains more information about
|
||||
/// this initialization invariant.
|
||||
///
|
||||
/// Moreover, this leaves a copy of the same data behind in the `MaybeUninit<T>`. When using
|
||||
/// multiple copies of the data (by calling `read` multiple times, or first
|
||||
/// calling `read` and then [`assume_init`]), it is your responsibility
|
||||
/// to ensure that that data may indeed be duplicated.
|
||||
///
|
||||
/// [inv]: #initialization-invariant
|
||||
/// [`assume_init`]: #method.assume_init
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Correct usage of this method:
|
||||
///
|
||||
/// ```rust
|
||||
/// #![feature(maybe_uninit_extra)]
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// let mut x = MaybeUninit::<u32>::uninit();
|
||||
/// x.write(13);
|
||||
/// let x1 = unsafe { x.read() };
|
||||
/// // `u32` is `Copy`, so we may read multiple times.
|
||||
/// let x2 = unsafe { x.read() };
|
||||
/// assert_eq!(x1, x2);
|
||||
///
|
||||
/// let mut x = MaybeUninit::<Option<Vec<u32>>>::uninit();
|
||||
/// x.write(None);
|
||||
/// let x1 = unsafe { x.read() };
|
||||
/// // Duplicating a `None` value is okay, so we may read multiple times.
|
||||
/// let x2 = unsafe { x.read() };
|
||||
/// assert_eq!(x1, x2);
|
||||
/// ```
|
||||
///
|
||||
/// *Incorrect* usage of this method:
|
||||
///
|
||||
/// ```rust,no_run
|
||||
/// #![feature(maybe_uninit_extra)]
|
||||
/// use std::mem::MaybeUninit;
|
||||
///
|
||||
/// let mut x = MaybeUninit::<Option<Vec<u32>>>::uninit();
|
||||
/// x.write(Some(vec![0,1,2]));
|
||||
/// let x1 = unsafe { x.read() };
|
||||
/// let x2 = unsafe { x.read() };
|
||||
/// // We now created two copies of the same vector, leading to a double-free when
|
||||
/// // they both get dropped!
|
||||
/// ```
|
||||
#[unstable(feature = "maybe_uninit_extra", issue = "53491")]
|
||||
#[inline(always)]
|
||||
pub unsafe fn read(&self) -> T {
|
||||
intrinsics::panic_if_uninhabited::<T>();
|
||||
self.as_ptr().read()
|
||||
}
|
||||
|
||||
/// Gets a reference to the contained value.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// It is up to the caller to guarantee that the `MaybeUninit<T>` really is in an initialized
|
||||
/// state. Calling this when the content is not yet fully initialized causes undefined
|
||||
/// behavior.
|
||||
#[unstable(feature = "maybe_uninit_ref", issue = "53491")]
|
||||
#[inline(always)]
|
||||
pub unsafe fn get_ref(&self) -> &T {
|
||||
&*self.value
|
||||
}
|
||||
|
||||
/// Gets a mutable reference to the contained value.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// It is up to the caller to guarantee that the `MaybeUninit<T>` really is in an initialized
|
||||
/// state. Calling this when the content is not yet fully initialized causes undefined
|
||||
/// behavior.
|
||||
// FIXME(#53491): We currently rely on the above being incorrect, i.e., we have references
|
||||
// to uninitialized data (e.g., in `libcore/fmt/float.rs`). We should make
|
||||
// a final decision about the rules before stabilization.
|
||||
#[unstable(feature = "maybe_uninit_ref", issue = "53491")]
|
||||
#[inline(always)]
|
||||
pub unsafe fn get_mut(&mut self) -> &mut T {
|
||||
&mut *self.value
|
||||
}
|
||||
|
||||
/// Gets a pointer to the first element of the array.
|
||||
#[unstable(feature = "maybe_uninit_slice", issue = "53491")]
|
||||
#[inline(always)]
|
||||
pub fn first_ptr(this: &[MaybeUninit<T>]) -> *const T {
|
||||
this as *const [MaybeUninit<T>] as *const T
|
||||
}
|
||||
|
||||
/// Gets a mutable pointer to the first element of the array.
|
||||
#[unstable(feature = "maybe_uninit_slice", issue = "53491")]
|
||||
#[inline(always)]
|
||||
pub fn first_ptr_mut(this: &mut [MaybeUninit<T>]) -> *mut T {
|
||||
this as *mut [MaybeUninit<T>] as *mut T
|
||||
}
|
||||
}
|
752
src/libcore/mem/mod.rs
Normal file
752
src/libcore/mem/mod.rs
Normal file
@ -0,0 +1,752 @@
|
||||
//! Basic functions for dealing with memory.
|
||||
//!
|
||||
//! This module contains functions for querying the size and alignment of
|
||||
//! types, initializing and manipulating memory.
|
||||
|
||||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
use crate::clone;
|
||||
use crate::cmp;
|
||||
use crate::fmt;
|
||||
use crate::hash;
|
||||
use crate::intrinsics;
|
||||
use crate::marker::{Copy, PhantomData, Sized};
|
||||
use crate::ptr;
|
||||
|
||||
mod manually_drop;
|
||||
#[stable(feature = "manually_drop", since = "1.20.0")]
|
||||
pub use manually_drop::ManuallyDrop;
|
||||
|
||||
mod maybe_uninit;
|
||||
#[stable(feature = "maybe_uninit", since = "1.36.0")]
|
||||
pub use maybe_uninit::MaybeUninit;
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[doc(inline)]
|
||||
pub use crate::intrinsics::transmute;
|
||||
|
||||
/// Takes ownership and "forgets" about the value **without running its destructor**.
|
||||
///
|
||||
/// Any resources the value manages, such as heap memory or a file handle, will linger
|
||||
/// forever in an unreachable state. However, it does not guarantee that pointers
|
||||
/// to this memory will remain valid.
|
||||
///
|
||||
/// * If you want to leak memory, see [`Box::leak`][leak].
|
||||
/// * If you want to obtain a raw pointer to the memory, see [`Box::into_raw`][into_raw].
|
||||
/// * If you want to dispose of a value properly, running its destructor, see
|
||||
/// [`mem::drop`][drop].
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// `forget` is not marked as `unsafe`, because Rust's safety guarantees
|
||||
/// do not include a guarantee that destructors will always run. For example,
|
||||
/// a program can create a reference cycle using [`Rc`][rc], or call
|
||||
/// [`process::exit`][exit] to exit without running destructors. Thus, allowing
|
||||
/// `mem::forget` from safe code does not fundamentally change Rust's safety
|
||||
/// guarantees.
|
||||
///
|
||||
/// That said, leaking resources such as memory or I/O objects is usually undesirable,
|
||||
/// so `forget` is only recommended for specialized use cases like those shown below.
|
||||
///
|
||||
/// Because forgetting a value is allowed, any `unsafe` code you write must
|
||||
/// allow for this possibility. You cannot return a value and expect that the
|
||||
/// caller will necessarily run the value's destructor.
|
||||
///
|
||||
/// [rc]: ../../std/rc/struct.Rc.html
|
||||
/// [exit]: ../../std/process/fn.exit.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Leak an I/O object, never closing the file:
|
||||
///
|
||||
/// ```no_run
|
||||
/// use std::mem;
|
||||
/// use std::fs::File;
|
||||
///
|
||||
/// let file = File::open("foo.txt").unwrap();
|
||||
/// mem::forget(file);
|
||||
/// ```
|
||||
///
|
||||
/// The practical use cases for `forget` are rather specialized and mainly come
|
||||
/// up in unsafe or FFI code.
|
||||
///
|
||||
/// [drop]: fn.drop.html
|
||||
/// [uninit]: fn.uninitialized.html
|
||||
/// [clone]: ../clone/trait.Clone.html
|
||||
/// [swap]: fn.swap.html
|
||||
/// [box]: ../../std/boxed/struct.Box.html
|
||||
/// [leak]: ../../std/boxed/struct.Box.html#method.leak
|
||||
/// [into_raw]: ../../std/boxed/struct.Box.html#method.into_raw
|
||||
/// [ub]: ../../reference/behavior-considered-undefined.html
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn forget<T>(t: T) {
|
||||
ManuallyDrop::new(t);
|
||||
}
|
||||
|
||||
/// Like [`forget`], but also accepts unsized values.
|
||||
///
|
||||
/// This function is just a shim intended to be removed when the `unsized_locals` feature gets
|
||||
/// stabilized.
|
||||
///
|
||||
/// [`forget`]: fn.forget.html
|
||||
#[inline]
|
||||
#[unstable(feature = "forget_unsized", issue = "0")]
|
||||
pub fn forget_unsized<T: ?Sized>(t: T) {
|
||||
unsafe { intrinsics::forget(t) }
|
||||
}
|
||||
|
||||
/// Returns the size of a type in bytes.
|
||||
///
|
||||
/// More specifically, this is the offset in bytes between successive elements
|
||||
/// in an array with that item type including alignment padding. Thus, for any
|
||||
/// type `T` and length `n`, `[T; n]` has a size of `n * size_of::<T>()`.
|
||||
///
|
||||
/// In general, the size of a type is not stable across compilations, but
|
||||
/// specific types such as primitives are.
|
||||
///
|
||||
/// The following table gives the size for primitives.
|
||||
///
|
||||
/// Type | size_of::\<Type>()
|
||||
/// ---- | ---------------
|
||||
/// () | 0
|
||||
/// bool | 1
|
||||
/// u8 | 1
|
||||
/// u16 | 2
|
||||
/// u32 | 4
|
||||
/// u64 | 8
|
||||
/// u128 | 16
|
||||
/// i8 | 1
|
||||
/// i16 | 2
|
||||
/// i32 | 4
|
||||
/// i64 | 8
|
||||
/// i128 | 16
|
||||
/// f32 | 4
|
||||
/// f64 | 8
|
||||
/// char | 4
|
||||
///
|
||||
/// Furthermore, `usize` and `isize` have the same size.
|
||||
///
|
||||
/// The types `*const T`, `&T`, `Box<T>`, `Option<&T>`, and `Option<Box<T>>` all have
|
||||
/// the same size. If `T` is Sized, all of those types have the same size as `usize`.
|
||||
///
|
||||
/// The mutability of a pointer does not change its size. As such, `&T` and `&mut T`
|
||||
/// have the same size. Likewise for `*const T` and `*mut T`.
|
||||
///
|
||||
/// # Size of `#[repr(C)]` items
|
||||
///
|
||||
/// The `C` representation for items has a defined layout. With this layout,
|
||||
/// the size of items is also stable as long as all fields have a stable size.
|
||||
///
|
||||
/// ## Size of Structs
|
||||
///
|
||||
/// For `structs`, the size is determined by the following algorithm.
|
||||
///
|
||||
/// For each field in the struct ordered by declaration order:
|
||||
///
|
||||
/// 1. Add the size of the field.
|
||||
/// 2. Round up the current size to the nearest multiple of the next field's [alignment].
|
||||
///
|
||||
/// Finally, round the size of the struct to the nearest multiple of its [alignment].
|
||||
/// The alignment of the struct is usually the largest alignment of all its
|
||||
/// fields; this can be changed with the use of `repr(align(N))`.
|
||||
///
|
||||
/// Unlike `C`, zero sized structs are not rounded up to one byte in size.
|
||||
///
|
||||
/// ## Size of Enums
|
||||
///
|
||||
/// Enums that carry no data other than the discriminant have the same size as C enums
|
||||
/// on the platform they are compiled for.
|
||||
///
|
||||
/// ## Size of Unions
|
||||
///
|
||||
/// The size of a union is the size of its largest field.
|
||||
///
|
||||
/// Unlike `C`, zero sized unions are not rounded up to one byte in size.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem;
|
||||
///
|
||||
/// // Some primitives
|
||||
/// assert_eq!(4, mem::size_of::<i32>());
|
||||
/// assert_eq!(8, mem::size_of::<f64>());
|
||||
/// assert_eq!(0, mem::size_of::<()>());
|
||||
///
|
||||
/// // Some arrays
|
||||
/// assert_eq!(8, mem::size_of::<[i32; 2]>());
|
||||
/// assert_eq!(12, mem::size_of::<[i32; 3]>());
|
||||
/// assert_eq!(0, mem::size_of::<[i32; 0]>());
|
||||
///
|
||||
///
|
||||
/// // Pointer size equality
|
||||
/// assert_eq!(mem::size_of::<&i32>(), mem::size_of::<*const i32>());
|
||||
/// assert_eq!(mem::size_of::<&i32>(), mem::size_of::<Box<i32>>());
|
||||
/// assert_eq!(mem::size_of::<&i32>(), mem::size_of::<Option<&i32>>());
|
||||
/// assert_eq!(mem::size_of::<Box<i32>>(), mem::size_of::<Option<Box<i32>>>());
|
||||
/// ```
|
||||
///
|
||||
/// Using `#[repr(C)]`.
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem;
|
||||
///
|
||||
/// #[repr(C)]
|
||||
/// struct FieldStruct {
|
||||
/// first: u8,
|
||||
/// second: u16,
|
||||
/// third: u8
|
||||
/// }
|
||||
///
|
||||
/// // The size of the first field is 1, so add 1 to the size. Size is 1.
|
||||
/// // The alignment of the second field is 2, so add 1 to the size for padding. Size is 2.
|
||||
/// // The size of the second field is 2, so add 2 to the size. Size is 4.
|
||||
/// // The alignment of the third field is 1, so add 0 to the size for padding. Size is 4.
|
||||
/// // The size of the third field is 1, so add 1 to the size. Size is 5.
|
||||
/// // Finally, the alignment of the struct is 2 (because the largest alignment amongst its
|
||||
/// // fields is 2), so add 1 to the size for padding. Size is 6.
|
||||
/// assert_eq!(6, mem::size_of::<FieldStruct>());
|
||||
///
|
||||
/// #[repr(C)]
|
||||
/// struct TupleStruct(u8, u16, u8);
|
||||
///
|
||||
/// // Tuple structs follow the same rules.
|
||||
/// assert_eq!(6, mem::size_of::<TupleStruct>());
|
||||
///
|
||||
/// // Note that reordering the fields can lower the size. We can remove both padding bytes
|
||||
/// // by putting `third` before `second`.
|
||||
/// #[repr(C)]
|
||||
/// struct FieldStructOptimized {
|
||||
/// first: u8,
|
||||
/// third: u8,
|
||||
/// second: u16
|
||||
/// }
|
||||
///
|
||||
/// assert_eq!(4, mem::size_of::<FieldStructOptimized>());
|
||||
///
|
||||
/// // Union size is the size of the largest field.
|
||||
/// #[repr(C)]
|
||||
/// union ExampleUnion {
|
||||
/// smaller: u8,
|
||||
/// larger: u16
|
||||
/// }
|
||||
///
|
||||
/// assert_eq!(2, mem::size_of::<ExampleUnion>());
|
||||
/// ```
|
||||
///
|
||||
/// [alignment]: ./fn.align_of.html
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_promotable]
|
||||
pub const fn size_of<T>() -> usize {
|
||||
intrinsics::size_of::<T>()
|
||||
}
|
||||
|
||||
/// Returns the size of the pointed-to value in bytes.
|
||||
///
|
||||
/// This is usually the same as `size_of::<T>()`. However, when `T` *has* no
|
||||
/// statically-known size, e.g., a slice [`[T]`][slice] or a [trait object],
|
||||
/// then `size_of_val` can be used to get the dynamically-known size.
|
||||
///
|
||||
/// [slice]: ../../std/primitive.slice.html
|
||||
/// [trait object]: ../../book/ch17-02-trait-objects.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem;
|
||||
///
|
||||
/// assert_eq!(4, mem::size_of_val(&5i32));
|
||||
///
|
||||
/// let x: [u8; 13] = [0; 13];
|
||||
/// let y: &[u8] = &x;
|
||||
/// assert_eq!(13, mem::size_of_val(y));
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn size_of_val<T: ?Sized>(val: &T) -> usize {
|
||||
unsafe { intrinsics::size_of_val(val) }
|
||||
}
|
||||
|
||||
/// Returns the [ABI]-required minimum alignment of a type.
|
||||
///
|
||||
/// Every reference to a value of the type `T` must be a multiple of this number.
|
||||
///
|
||||
/// This is the alignment used for struct fields. It may be smaller than the preferred alignment.
|
||||
///
|
||||
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// # #![allow(deprecated)]
|
||||
/// use std::mem;
|
||||
///
|
||||
/// assert_eq!(4, mem::min_align_of::<i32>());
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_deprecated(reason = "use `align_of` instead", since = "1.2.0")]
|
||||
pub fn min_align_of<T>() -> usize {
|
||||
intrinsics::min_align_of::<T>()
|
||||
}
|
||||
|
||||
/// Returns the [ABI]-required minimum alignment of the type of the value that `val` points to.
|
||||
///
|
||||
/// Every reference to a value of the type `T` must be a multiple of this number.
|
||||
///
|
||||
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// # #![allow(deprecated)]
|
||||
/// use std::mem;
|
||||
///
|
||||
/// assert_eq!(4, mem::min_align_of_val(&5i32));
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_deprecated(reason = "use `align_of_val` instead", since = "1.2.0")]
|
||||
pub fn min_align_of_val<T: ?Sized>(val: &T) -> usize {
|
||||
unsafe { intrinsics::min_align_of_val(val) }
|
||||
}
|
||||
|
||||
/// Returns the [ABI]-required minimum alignment of a type.
|
||||
///
|
||||
/// Every reference to a value of the type `T` must be a multiple of this number.
|
||||
///
|
||||
/// This is the alignment used for struct fields. It may be smaller than the preferred alignment.
|
||||
///
|
||||
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem;
|
||||
///
|
||||
/// assert_eq!(4, mem::align_of::<i32>());
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_promotable]
|
||||
pub const fn align_of<T>() -> usize {
|
||||
intrinsics::min_align_of::<T>()
|
||||
}
|
||||
|
||||
/// Returns the [ABI]-required minimum alignment of the type of the value that `val` points to.
|
||||
///
|
||||
/// Every reference to a value of the type `T` must be a multiple of this number.
|
||||
///
|
||||
/// [ABI]: https://en.wikipedia.org/wiki/Application_binary_interface
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem;
|
||||
///
|
||||
/// assert_eq!(4, mem::align_of_val(&5i32));
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn align_of_val<T: ?Sized>(val: &T) -> usize {
|
||||
unsafe { intrinsics::min_align_of_val(val) }
|
||||
}
|
||||
|
||||
/// Returns `true` if dropping values of type `T` matters.
|
||||
///
|
||||
/// This is purely an optimization hint, and may be implemented conservatively:
|
||||
/// it may return `true` for types that don't actually need to be dropped.
|
||||
/// As such always returning `true` would be a valid implementation of
|
||||
/// this function. However if this function actually returns `false`, then you
|
||||
/// can be certain dropping `T` has no side effect.
|
||||
///
|
||||
/// Low level implementations of things like collections, which need to manually
|
||||
/// drop their data, should use this function to avoid unnecessarily
|
||||
/// trying to drop all their contents when they are destroyed. This might not
|
||||
/// make a difference in release builds (where a loop that has no side-effects
|
||||
/// is easily detected and eliminated), but is often a big win for debug builds.
|
||||
///
|
||||
/// Note that `ptr::drop_in_place` already performs this check, so if your workload
|
||||
/// can be reduced to some small number of drop_in_place calls, using this is
|
||||
/// unnecessary. In particular note that you can drop_in_place a slice, and that
|
||||
/// will do a single needs_drop check for all the values.
|
||||
///
|
||||
/// Types like Vec therefore just `drop_in_place(&mut self[..])` without using
|
||||
/// needs_drop explicitly. Types like HashMap, on the other hand, have to drop
|
||||
/// values one at a time and should use this API.
|
||||
///
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Here's an example of how a collection might make use of needs_drop:
|
||||
///
|
||||
/// ```
|
||||
/// use std::{mem, ptr};
|
||||
///
|
||||
/// pub struct MyCollection<T> {
|
||||
/// # data: [T; 1],
|
||||
/// /* ... */
|
||||
/// }
|
||||
/// # impl<T> MyCollection<T> {
|
||||
/// # fn iter_mut(&mut self) -> &mut [T] { &mut self.data }
|
||||
/// # fn free_buffer(&mut self) {}
|
||||
/// # }
|
||||
///
|
||||
/// impl<T> Drop for MyCollection<T> {
|
||||
/// fn drop(&mut self) {
|
||||
/// unsafe {
|
||||
/// // drop the data
|
||||
/// if mem::needs_drop::<T>() {
|
||||
/// for x in self.iter_mut() {
|
||||
/// ptr::drop_in_place(x);
|
||||
/// }
|
||||
/// }
|
||||
/// self.free_buffer();
|
||||
/// }
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "needs_drop", since = "1.21.0")]
|
||||
pub const fn needs_drop<T>() -> bool {
|
||||
intrinsics::needs_drop::<T>()
|
||||
}
|
||||
|
||||
/// Creates a value whose bytes are all zero.
|
||||
///
|
||||
/// This has the same effect as [`MaybeUninit::zeroed().assume_init()`][zeroed].
|
||||
/// It is useful for FFI sometimes, but should generally be avoided.
|
||||
///
|
||||
/// There is no guarantee that an all-zero byte-pattern represents a valid value of
|
||||
/// some type `T`. For example, the all-zero byte-pattern is not a valid value
|
||||
/// for reference types (`&T` and `&mut T`). Using `zeroed` on such types
|
||||
/// causes immediate [undefined behavior][ub] because [the Rust compiler assumes][inv]
|
||||
/// that there always is a valid value in a variable it considers initialized.
|
||||
///
|
||||
/// [zeroed]: union.MaybeUninit.html#method.zeroed
|
||||
/// [ub]: ../../reference/behavior-considered-undefined.html
|
||||
/// [inv]: union.MaybeUninit.html#initialization-invariant
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Correct usage of this function: initializing an integer with zero.
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem;
|
||||
///
|
||||
/// let x: i32 = unsafe { mem::zeroed() };
|
||||
/// assert_eq!(0, x);
|
||||
/// ```
|
||||
///
|
||||
/// *Incorrect* usage of this function: initializing a reference with zero.
|
||||
///
|
||||
/// ```no_run
|
||||
/// use std::mem;
|
||||
///
|
||||
/// let _x: &i32 = unsafe { mem::zeroed() }; // Undefined behavior!
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub unsafe fn zeroed<T>() -> T {
|
||||
intrinsics::panic_if_uninhabited::<T>();
|
||||
intrinsics::init()
|
||||
}
|
||||
|
||||
/// Bypasses Rust's normal memory-initialization checks by pretending to
|
||||
/// produce a value of type `T`, while doing nothing at all.
|
||||
///
|
||||
/// **This functon is deprecated.** Use [`MaybeUninit<T>`] instead.
|
||||
///
|
||||
/// The reason for deprecation is that the function basically cannot be used
|
||||
/// correctly: [the Rust compiler assumes][inv] that values are properly initialized.
|
||||
/// As a consequence, calling e.g. `mem::uninitialized::<bool>()` causes immediate
|
||||
/// undefined behavior for returning a `bool` that is not definitely either `true`
|
||||
/// or `false`. Worse, truly uninitialized memory like what gets returned here
|
||||
/// is special in that the compiler knows that it does not have a fixed value.
|
||||
/// This makes it undefined behavior to have uninitialized data in a variable even
|
||||
/// if that variable has an integer type.
|
||||
/// (Notice that the rules around uninitialized integers are not finalized yet, but
|
||||
/// until they are, it is advisable to avoid them.)
|
||||
///
|
||||
/// [`MaybeUninit<T>`]: union.MaybeUninit.html
|
||||
/// [inv]: union.MaybeUninit.html#initialization-invariant
|
||||
#[inline]
|
||||
#[rustc_deprecated(since = "1.38.0", reason = "use `mem::MaybeUninit` instead")]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub unsafe fn uninitialized<T>() -> T {
|
||||
intrinsics::panic_if_uninhabited::<T>();
|
||||
intrinsics::uninit()
|
||||
}
|
||||
|
||||
/// Swaps the values at two mutable locations, without deinitializing either one.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem;
|
||||
///
|
||||
/// let mut x = 5;
|
||||
/// let mut y = 42;
|
||||
///
|
||||
/// mem::swap(&mut x, &mut y);
|
||||
///
|
||||
/// assert_eq!(42, x);
|
||||
/// assert_eq!(5, y);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn swap<T>(x: &mut T, y: &mut T) {
|
||||
unsafe {
|
||||
ptr::swap_nonoverlapping_one(x, y);
|
||||
}
|
||||
}
|
||||
|
||||
/// Moves `src` into the referenced `dest`, returning the previous `dest` value.
|
||||
///
|
||||
/// Neither value is dropped.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// A simple example:
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem;
|
||||
///
|
||||
/// let mut v: Vec<i32> = vec![1, 2];
|
||||
///
|
||||
/// let old_v = mem::replace(&mut v, vec![3, 4, 5]);
|
||||
/// assert_eq!(vec![1, 2], old_v);
|
||||
/// assert_eq!(vec![3, 4, 5], v);
|
||||
/// ```
|
||||
///
|
||||
/// `replace` allows consumption of a struct field by replacing it with another value.
|
||||
/// Without `replace` you can run into issues like these:
|
||||
///
|
||||
/// ```compile_fail,E0507
|
||||
/// struct Buffer<T> { buf: Vec<T> }
|
||||
///
|
||||
/// impl<T> Buffer<T> {
|
||||
/// fn get_and_reset(&mut self) -> Vec<T> {
|
||||
/// // error: cannot move out of dereference of `&mut`-pointer
|
||||
/// let buf = self.buf;
|
||||
/// self.buf = Vec::new();
|
||||
/// buf
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Note that `T` does not necessarily implement [`Clone`], so it can't even clone and reset
|
||||
/// `self.buf`. But `replace` can be used to disassociate the original value of `self.buf` from
|
||||
/// `self`, allowing it to be returned:
|
||||
///
|
||||
/// ```
|
||||
/// # #![allow(dead_code)]
|
||||
/// use std::mem;
|
||||
///
|
||||
/// # struct Buffer<T> { buf: Vec<T> }
|
||||
/// impl<T> Buffer<T> {
|
||||
/// fn get_and_reset(&mut self) -> Vec<T> {
|
||||
/// mem::replace(&mut self.buf, Vec::new())
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// [`Clone`]: ../../std/clone/trait.Clone.html
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn replace<T>(dest: &mut T, mut src: T) -> T {
|
||||
swap(dest, &mut src);
|
||||
src
|
||||
}
|
||||
|
||||
/// Disposes of a value.
|
||||
///
|
||||
/// This does call the argument's implementation of [`Drop`][drop].
|
||||
///
|
||||
/// This effectively does nothing for types which implement `Copy`, e.g.
|
||||
/// integers. Such values are copied and _then_ moved into the function, so the
|
||||
/// value persists after this function call.
|
||||
///
|
||||
/// This function is not magic; it is literally defined as
|
||||
///
|
||||
/// ```
|
||||
/// pub fn drop<T>(_x: T) { }
|
||||
/// ```
|
||||
///
|
||||
/// Because `_x` is moved into the function, it is automatically dropped before
|
||||
/// the function returns.
|
||||
///
|
||||
/// [drop]: ../ops/trait.Drop.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// Basic usage:
|
||||
///
|
||||
/// ```
|
||||
/// let v = vec![1, 2, 3];
|
||||
///
|
||||
/// drop(v); // explicitly drop the vector
|
||||
/// ```
|
||||
///
|
||||
/// Since [`RefCell`] enforces the borrow rules at runtime, `drop` can
|
||||
/// release a [`RefCell`] borrow:
|
||||
///
|
||||
/// ```
|
||||
/// use std::cell::RefCell;
|
||||
///
|
||||
/// let x = RefCell::new(1);
|
||||
///
|
||||
/// let mut mutable_borrow = x.borrow_mut();
|
||||
/// *mutable_borrow = 1;
|
||||
///
|
||||
/// drop(mutable_borrow); // relinquish the mutable borrow on this slot
|
||||
///
|
||||
/// let borrow = x.borrow();
|
||||
/// println!("{}", *borrow);
|
||||
/// ```
|
||||
///
|
||||
/// Integers and other types implementing [`Copy`] are unaffected by `drop`.
|
||||
///
|
||||
/// ```
|
||||
/// #[derive(Copy, Clone)]
|
||||
/// struct Foo(u8);
|
||||
///
|
||||
/// let x = 1;
|
||||
/// let y = Foo(2);
|
||||
/// drop(x); // a copy of `x` is moved and dropped
|
||||
/// drop(y); // a copy of `y` is moved and dropped
|
||||
///
|
||||
/// println!("x: {}, y: {}", x, y.0); // still available
|
||||
/// ```
|
||||
///
|
||||
/// [`RefCell`]: ../../std/cell/struct.RefCell.html
|
||||
/// [`Copy`]: ../../std/marker/trait.Copy.html
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn drop<T>(_x: T) { }
|
||||
|
||||
/// Interprets `src` as having type `&U`, and then reads `src` without moving
|
||||
/// the contained value.
|
||||
///
|
||||
/// This function will unsafely assume the pointer `src` is valid for
|
||||
/// [`size_of::<U>`][size_of] bytes by transmuting `&T` to `&U` and then reading
|
||||
/// the `&U`. It will also unsafely create a copy of the contained value instead of
|
||||
/// moving out of `src`.
|
||||
///
|
||||
/// It is not a compile-time error if `T` and `U` have different sizes, but it
|
||||
/// is highly encouraged to only invoke this function where `T` and `U` have the
|
||||
/// same size. This function triggers [undefined behavior][ub] if `U` is larger than
|
||||
/// `T`.
|
||||
///
|
||||
/// [ub]: ../../reference/behavior-considered-undefined.html
|
||||
/// [size_of]: fn.size_of.html
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem;
|
||||
///
|
||||
/// #[repr(packed)]
|
||||
/// struct Foo {
|
||||
/// bar: u8,
|
||||
/// }
|
||||
///
|
||||
/// let foo_slice = [10u8];
|
||||
///
|
||||
/// unsafe {
|
||||
/// // Copy the data from 'foo_slice' and treat it as a 'Foo'
|
||||
/// let mut foo_struct: Foo = mem::transmute_copy(&foo_slice);
|
||||
/// assert_eq!(foo_struct.bar, 10);
|
||||
///
|
||||
/// // Modify the copied data
|
||||
/// foo_struct.bar = 20;
|
||||
/// assert_eq!(foo_struct.bar, 20);
|
||||
/// }
|
||||
///
|
||||
/// // The contents of 'foo_slice' should not have changed
|
||||
/// assert_eq!(foo_slice, [10]);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub unsafe fn transmute_copy<T, U>(src: &T) -> U {
|
||||
ptr::read_unaligned(src as *const T as *const U)
|
||||
}
|
||||
|
||||
/// Opaque type representing the discriminant of an enum.
|
||||
///
|
||||
/// See the [`discriminant`] function in this module for more information.
|
||||
///
|
||||
/// [`discriminant`]: fn.discriminant.html
|
||||
#[stable(feature = "discriminant_value", since = "1.21.0")]
|
||||
pub struct Discriminant<T>(u64, PhantomData<fn() -> T>);
|
||||
|
||||
// N.B. These trait implementations cannot be derived because we don't want any bounds on T.
|
||||
|
||||
#[stable(feature = "discriminant_value", since = "1.21.0")]
|
||||
impl<T> Copy for Discriminant<T> {}
|
||||
|
||||
#[stable(feature = "discriminant_value", since = "1.21.0")]
|
||||
impl<T> clone::Clone for Discriminant<T> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "discriminant_value", since = "1.21.0")]
|
||||
impl<T> cmp::PartialEq for Discriminant<T> {
|
||||
fn eq(&self, rhs: &Self) -> bool {
|
||||
self.0 == rhs.0
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "discriminant_value", since = "1.21.0")]
|
||||
impl<T> cmp::Eq for Discriminant<T> {}
|
||||
|
||||
#[stable(feature = "discriminant_value", since = "1.21.0")]
|
||||
impl<T> hash::Hash for Discriminant<T> {
|
||||
fn hash<H: hash::Hasher>(&self, state: &mut H) {
|
||||
self.0.hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "discriminant_value", since = "1.21.0")]
|
||||
impl<T> fmt::Debug for Discriminant<T> {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
fmt.debug_tuple("Discriminant")
|
||||
.field(&self.0)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a value uniquely identifying the enum variant in `v`.
|
||||
///
|
||||
/// If `T` is not an enum, calling this function will not result in undefined behavior, but the
|
||||
/// return value is unspecified.
|
||||
///
|
||||
/// # Stability
|
||||
///
|
||||
/// The discriminant of an enum variant may change if the enum definition changes. A discriminant
|
||||
/// of some variant will not change between compilations with the same compiler.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// This can be used to compare enums that carry data, while disregarding
|
||||
/// the actual data:
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem;
|
||||
///
|
||||
/// enum Foo { A(&'static str), B(i32), C(i32) }
|
||||
///
|
||||
/// assert!(mem::discriminant(&Foo::A("bar")) == mem::discriminant(&Foo::A("baz")));
|
||||
/// assert!(mem::discriminant(&Foo::B(1)) == mem::discriminant(&Foo::B(2)));
|
||||
/// assert!(mem::discriminant(&Foo::B(3)) != mem::discriminant(&Foo::C(3)));
|
||||
/// ```
|
||||
#[stable(feature = "discriminant_value", since = "1.21.0")]
|
||||
pub fn discriminant<T>(v: &T) -> Discriminant<T> {
|
||||
unsafe {
|
||||
Discriminant(intrinsics::discriminant_value(v), PhantomData)
|
||||
}
|
||||
}
|
@ -4158,6 +4158,24 @@ impl<'a, T> DoubleEndedIterator for Chunks<'a, T> {
|
||||
Some(snd)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
let len = self.len();
|
||||
if n >= len {
|
||||
self.v = &[];
|
||||
None
|
||||
} else {
|
||||
let start = (len - 1 - n) * self.chunk_size;
|
||||
let end = match start.checked_add(self.chunk_size) {
|
||||
Some(res) => cmp::min(res, self.v.len()),
|
||||
None => self.v.len(),
|
||||
};
|
||||
let nth_back = &self.v[start..end];
|
||||
self.v = &self.v[..start];
|
||||
Some(nth_back)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
@ -4649,6 +4667,23 @@ impl<'a, T> DoubleEndedIterator for RChunks<'a, T> {
|
||||
Some(fst)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
let len = self.len();
|
||||
if n >= len {
|
||||
self.v = &[];
|
||||
None
|
||||
} else {
|
||||
// can't underflow because `n < len`
|
||||
let offset_from_end = (len - 1 - n) * self.chunk_size;
|
||||
let end = self.v.len() - offset_from_end;
|
||||
let start = end.saturating_sub(self.chunk_size);
|
||||
let nth_back = &self.v[start..end];
|
||||
self.v = &self.v[end..];
|
||||
Some(nth_back)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rchunks", since = "1.31.0")]
|
||||
@ -4774,6 +4809,24 @@ impl<'a, T> DoubleEndedIterator for RChunksMut<'a, T> {
|
||||
Some(head)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
let len = self.len();
|
||||
if n >= len {
|
||||
self.v = &mut [];
|
||||
None
|
||||
} else {
|
||||
// can't underflow because `n < len`
|
||||
let offset_from_end = (len - 1 - n) * self.chunk_size;
|
||||
let end = self.v.len() - offset_from_end;
|
||||
let start = end.saturating_sub(self.chunk_size);
|
||||
let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
|
||||
let (_, nth_back) = tmp.split_at_mut(start);
|
||||
self.v = tail;
|
||||
Some(nth_back)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rchunks", since = "1.31.0")]
|
||||
@ -4898,6 +4951,24 @@ impl<'a, T> DoubleEndedIterator for RChunksExact<'a, T> {
|
||||
Some(fst)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
let len = self.len();
|
||||
if n >= len {
|
||||
self.v = &[];
|
||||
None
|
||||
} else {
|
||||
// now that we know that `n` corresponds to a chunk,
|
||||
// none of these operations can underflow/overflow
|
||||
let offset = (len - n) * self.chunk_size;
|
||||
let start = self.v.len() - offset;
|
||||
let end = start + self.chunk_size;
|
||||
let nth_back = &self.v[start..end];
|
||||
self.v = &self.v[end..];
|
||||
Some(nth_back)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rchunks", since = "1.31.0")]
|
||||
@ -5016,6 +5087,25 @@ impl<'a, T> DoubleEndedIterator for RChunksExactMut<'a, T> {
|
||||
Some(head)
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
|
||||
let len = self.len();
|
||||
if n >= len {
|
||||
self.v = &mut [];
|
||||
None
|
||||
} else {
|
||||
// now that we know that `n` corresponds to a chunk,
|
||||
// none of these operations can underflow/overflow
|
||||
let offset = (len - n) * self.chunk_size;
|
||||
let start = self.v.len() - offset;
|
||||
let end = start + self.chunk_size;
|
||||
let (tmp, tail) = mem::replace(&mut self.v, &mut []).split_at_mut(end);
|
||||
let (_, nth_back) = tmp.split_at_mut(start);
|
||||
self.v = tail;
|
||||
Some(nth_back)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rchunks", since = "1.31.0")]
|
||||
|
@ -1084,6 +1084,14 @@ fn test_iterator_sum_result() {
|
||||
assert_eq!(v.iter().cloned().sum::<Result<i32, _>>(), Err(()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iterator_sum_option() {
|
||||
let v: &[Option<i32>] = &[Some(1), Some(2), Some(3), Some(4)];
|
||||
assert_eq!(v.iter().cloned().sum::<Option<i32>>(), Some(10));
|
||||
let v: &[Option<i32>] = &[Some(1), None, Some(3), Some(4)];
|
||||
assert_eq!(v.iter().cloned().sum::<Option<i32>>(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iterator_product() {
|
||||
let v: &[i32] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
@ -1126,6 +1134,14 @@ impl Ord for Mod3 {
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iterator_product_option() {
|
||||
let v: &[Option<i32>] = &[Some(1), Some(2), Some(3), Some(4)];
|
||||
assert_eq!(v.iter().cloned().product::<Option<i32>>(), Some(24));
|
||||
let v: &[Option<i32>] = &[Some(1), None, Some(3), Some(4)];
|
||||
assert_eq!(v.iter().cloned().product::<Option<i32>>(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iterator_max() {
|
||||
let v: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
|
||||
|
@ -134,6 +134,30 @@ fn test_chunks_nth() {
|
||||
assert_eq!(c2.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_chunks_nth_back() {
|
||||
let v: &[i32] = &[0, 1, 2, 3, 4, 5];
|
||||
let mut c = v.chunks(2);
|
||||
assert_eq!(c.nth_back(1).unwrap(), &[2, 3]);
|
||||
assert_eq!(c.next().unwrap(), &[0, 1]);
|
||||
assert_eq!(c.next(), None);
|
||||
|
||||
let v2: &[i32] = &[0, 1, 2, 3, 4];
|
||||
let mut c2 = v2.chunks(3);
|
||||
assert_eq!(c2.nth_back(1).unwrap(), &[0, 1, 2]);
|
||||
assert_eq!(c2.next(), None);
|
||||
assert_eq!(c2.next_back(), None);
|
||||
|
||||
let v3: &[i32] = &[0, 1, 2, 3, 4];
|
||||
let mut c3 = v3.chunks(10);
|
||||
assert_eq!(c3.nth_back(0).unwrap(), &[0, 1, 2, 3, 4]);
|
||||
assert_eq!(c3.next(), None);
|
||||
|
||||
let v4: &[i32] = &[0, 1, 2];
|
||||
let mut c4 = v4.chunks(10);
|
||||
assert_eq!(c4.nth_back(1_000_000_000usize), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_chunks_last() {
|
||||
let v: &[i32] = &[0, 1, 2, 3, 4, 5];
|
||||
@ -356,6 +380,19 @@ fn test_rchunks_nth() {
|
||||
assert_eq!(c2.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rchunks_nth_back() {
|
||||
let v: &[i32] = &[0, 1, 2, 3, 4, 5];
|
||||
let mut c = v.rchunks(2);
|
||||
assert_eq!(c.nth_back(1).unwrap(), &[2, 3]);
|
||||
assert_eq!(c.next_back().unwrap(), &[4, 5]);
|
||||
|
||||
let v2: &[i32] = &[0, 1, 2, 3, 4];
|
||||
let mut c2 = v2.rchunks(3);
|
||||
assert_eq!(c2.nth_back(1).unwrap(), &[2, 3, 4]);
|
||||
assert_eq!(c2.next_back(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rchunks_last() {
|
||||
let v: &[i32] = &[0, 1, 2, 3, 4, 5];
|
||||
@ -407,6 +444,19 @@ fn test_rchunks_mut_nth() {
|
||||
assert_eq!(c2.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rchunks_mut_nth_back() {
|
||||
let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5];
|
||||
let mut c = v.rchunks_mut(2);
|
||||
assert_eq!(c.nth_back(1).unwrap(), &[2, 3]);
|
||||
assert_eq!(c.next_back().unwrap(), &[4, 5]);
|
||||
|
||||
let v2: &mut [i32] = &mut [0, 1, 2, 3, 4];
|
||||
let mut c2 = v2.rchunks_mut(3);
|
||||
assert_eq!(c2.nth_back(1).unwrap(), &[2, 3, 4]);
|
||||
assert_eq!(c2.next_back(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rchunks_mut_last() {
|
||||
let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5];
|
||||
@ -460,6 +510,19 @@ fn test_rchunks_exact_nth() {
|
||||
assert_eq!(c2.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rchunks_exact_nth_back() {
|
||||
let v: &[i32] = &[0, 1, 2, 3, 4, 5];
|
||||
let mut c = v.rchunks_exact(2);
|
||||
assert_eq!(c.nth_back(1).unwrap(), &[2, 3]);
|
||||
assert_eq!(c.next_back().unwrap(), &[4, 5]);
|
||||
|
||||
let v2: &[i32] = &[0, 1, 2, 3, 4, 5, 6];
|
||||
let mut c2 = v2.rchunks_exact(3);
|
||||
assert_eq!(c2.nth_back(1).unwrap(), &[4, 5, 6]);
|
||||
assert_eq!(c2.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rchunks_exact_last() {
|
||||
let v: &[i32] = &[0, 1, 2, 3, 4, 5];
|
||||
@ -518,6 +581,19 @@ fn test_rchunks_exact_mut_nth() {
|
||||
assert_eq!(c2.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rchunks_exact_mut_nth_back() {
|
||||
let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5];
|
||||
let mut c = v.rchunks_exact_mut(2);
|
||||
assert_eq!(c.nth_back(1).unwrap(), &[2, 3]);
|
||||
assert_eq!(c.next_back().unwrap(), &[4, 5]);
|
||||
|
||||
let v2: &mut [i32] = &mut [0, 1, 2, 3, 4, 5, 6];
|
||||
let mut c2 = v2.rchunks_exact_mut(3);
|
||||
assert_eq!(c2.nth_back(1).unwrap(), &[4, 5, 6]);
|
||||
assert_eq!(c2.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rchunks_exact_mut_last() {
|
||||
let v: &mut [i32] = &mut [0, 1, 2, 3, 4, 5];
|
||||
|
@ -1083,6 +1083,18 @@ impl<'a> LoweringContext<'a> {
|
||||
.chain(in_band_defs)
|
||||
.collect();
|
||||
|
||||
// FIXME(const_generics): the compiler doesn't always cope with
|
||||
// unsorted generic parameters at the moment, so we make sure
|
||||
// that they're ordered correctly here for now. (When we chain
|
||||
// the `in_band_defs`, we might make the order unsorted.)
|
||||
lowered_generics.params.sort_by_key(|param| {
|
||||
match param.kind {
|
||||
hir::GenericParamKind::Lifetime { .. } => ParamKindOrd::Lifetime,
|
||||
hir::GenericParamKind::Type { .. } => ParamKindOrd::Type,
|
||||
hir::GenericParamKind::Const { .. } => ParamKindOrd::Const,
|
||||
}
|
||||
});
|
||||
|
||||
(lowered_generics, res)
|
||||
}
|
||||
|
||||
|
@ -425,6 +425,13 @@ impl GenericArg {
|
||||
GenericArg::Const(c) => c.value.hir_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_const(&self) -> bool {
|
||||
match self {
|
||||
GenericArg::Const(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
|
||||
|
@ -255,7 +255,7 @@ declare_lint! {
|
||||
|
||||
declare_lint! {
|
||||
pub BARE_TRAIT_OBJECTS,
|
||||
Allow,
|
||||
Warn,
|
||||
"suggest using `dyn Trait` for trait objects"
|
||||
}
|
||||
|
||||
|
@ -170,16 +170,11 @@ pub enum Set1<T> {
|
||||
|
||||
impl<T: PartialEq> Set1<T> {
|
||||
pub fn insert(&mut self, value: T) {
|
||||
if let Set1::Empty = *self {
|
||||
*self = Set1::One(value);
|
||||
return;
|
||||
}
|
||||
if let Set1::One(ref old) = *self {
|
||||
if *old == value {
|
||||
return;
|
||||
}
|
||||
}
|
||||
*self = Set1::Many;
|
||||
*self = match self {
|
||||
Set1::Empty => Set1::One(value),
|
||||
Set1::One(old) if *old == value => return,
|
||||
_ => Set1::Many,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2037,7 +2037,7 @@ impl<'tcx> Place<'tcx> {
|
||||
/// a single deref of a local.
|
||||
//
|
||||
// FIXME: can we safely swap the semantics of `fn base_local` below in here instead?
|
||||
pub fn local(&self) -> Option<Local> {
|
||||
pub fn local_or_deref_local(&self) -> Option<Local> {
|
||||
match self {
|
||||
Place::Base(PlaceBase::Local(local)) |
|
||||
Place::Projection(box Projection {
|
||||
|
@ -188,49 +188,6 @@ impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for CodegenUnit<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Stats {
|
||||
pub n_glues_created: usize,
|
||||
pub n_null_glues: usize,
|
||||
pub n_real_glues: usize,
|
||||
pub n_fns: usize,
|
||||
pub n_inlines: usize,
|
||||
pub n_closures: usize,
|
||||
pub n_llvm_insns: usize,
|
||||
pub llvm_insns: FxHashMap<String, usize>,
|
||||
// (ident, llvm-instructions)
|
||||
pub fn_stats: Vec<(String, usize)>,
|
||||
}
|
||||
|
||||
impl_stable_hash_for!(struct self::Stats {
|
||||
n_glues_created,
|
||||
n_null_glues,
|
||||
n_real_glues,
|
||||
n_fns,
|
||||
n_inlines,
|
||||
n_closures,
|
||||
n_llvm_insns,
|
||||
llvm_insns,
|
||||
fn_stats
|
||||
});
|
||||
|
||||
impl Stats {
|
||||
pub fn extend(&mut self, stats: Stats) {
|
||||
self.n_glues_created += stats.n_glues_created;
|
||||
self.n_null_glues += stats.n_null_glues;
|
||||
self.n_real_glues += stats.n_real_glues;
|
||||
self.n_fns += stats.n_fns;
|
||||
self.n_inlines += stats.n_inlines;
|
||||
self.n_closures += stats.n_closures;
|
||||
self.n_llvm_insns += stats.n_llvm_insns;
|
||||
|
||||
for (k, v) in stats.llvm_insns {
|
||||
*self.llvm_insns.entry(k).or_insert(0) += v;
|
||||
}
|
||||
self.fn_stats.extend(stats.fn_stats);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CodegenUnitNameBuilder<'a, 'gcx: 'tcx, 'tcx: 'a> {
|
||||
tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||
cache: FxHashMap<CrateNum, String>,
|
||||
|
@ -1216,21 +1216,12 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
|
||||
"measure time of each rustc pass"),
|
||||
time: bool = (false, parse_bool, [UNTRACKED],
|
||||
"measure time of rustc processes"),
|
||||
count_llvm_insns: bool = (false, parse_bool,
|
||||
[UNTRACKED_WITH_WARNING(true,
|
||||
"The output generated by `-Z count_llvm_insns` might not be reliable \
|
||||
when used with incremental compilation")],
|
||||
"count where LLVM instrs originate"),
|
||||
time_llvm_passes: bool = (false, parse_bool, [UNTRACKED_WITH_WARNING(true,
|
||||
"The output of `-Z time-llvm-passes` will only reflect timings of \
|
||||
re-codegened modules when used with incremental compilation" )],
|
||||
"measure time of each LLVM pass"),
|
||||
input_stats: bool = (false, parse_bool, [UNTRACKED],
|
||||
"gather statistics about the input"),
|
||||
codegen_stats: bool = (false, parse_bool, [UNTRACKED_WITH_WARNING(true,
|
||||
"The output of `-Z codegen-stats` might not be accurate when incremental \
|
||||
compilation is enabled")],
|
||||
"gather codegen statistics"),
|
||||
asm_comments: bool = (false, parse_bool, [TRACKED],
|
||||
"generate comments into the assembly (may change behavior)"),
|
||||
verify_llvm_ir: bool = (false, parse_bool, [TRACKED],
|
||||
@ -3259,14 +3250,10 @@ mod tests {
|
||||
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
|
||||
opts.debugging_opts.time_passes = true;
|
||||
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
|
||||
opts.debugging_opts.count_llvm_insns = true;
|
||||
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
|
||||
opts.debugging_opts.time_llvm_passes = true;
|
||||
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
|
||||
opts.debugging_opts.input_stats = true;
|
||||
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
|
||||
opts.debugging_opts.codegen_stats = true;
|
||||
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
|
||||
opts.debugging_opts.borrowck_stats = true;
|
||||
assert_eq!(reference.dep_tracking_hash(), opts.dep_tracking_hash());
|
||||
opts.debugging_opts.meta_stats = true;
|
||||
|
@ -519,15 +519,9 @@ impl Session {
|
||||
pub fn instrument_mcount(&self) -> bool {
|
||||
self.opts.debugging_opts.instrument_mcount
|
||||
}
|
||||
pub fn count_llvm_insns(&self) -> bool {
|
||||
self.opts.debugging_opts.count_llvm_insns
|
||||
}
|
||||
pub fn time_llvm_passes(&self) -> bool {
|
||||
self.opts.debugging_opts.time_llvm_passes
|
||||
}
|
||||
pub fn codegen_stats(&self) -> bool {
|
||||
self.opts.debugging_opts.codegen_stats
|
||||
}
|
||||
pub fn meta_stats(&self) -> bool {
|
||||
self.opts.debugging_opts.meta_stats
|
||||
}
|
||||
@ -1291,6 +1285,18 @@ fn validate_commandline_args_with_session_available(sess: &Session) {
|
||||
path.display()));
|
||||
}
|
||||
}
|
||||
|
||||
// PGO does not work reliably with panic=unwind on Windows. Let's make it
|
||||
// an error to combine the two for now. It always runs into an assertions
|
||||
// if LLVM is built with assertions, but without assertions it sometimes
|
||||
// does not crash and will probably generate a corrupted binary.
|
||||
if sess.opts.debugging_opts.pgo_gen.enabled() &&
|
||||
sess.target.target.options.is_like_msvc &&
|
||||
sess.panic_strategy() == PanicStrategy::Unwind {
|
||||
sess.err("Profile-guided optimization does not yet work in conjunction \
|
||||
with `-Cpanic=unwind` on Windows when targeting MSVC. \
|
||||
See https://github.com/rust-lang/rust/issues/61002 for details.");
|
||||
}
|
||||
}
|
||||
|
||||
/// Hash value constructed out of all the `-C metadata` arguments passed to the
|
||||
|
@ -24,7 +24,7 @@ use crate::common;
|
||||
use crate::context::CodegenCx;
|
||||
use crate::monomorphize::partitioning::CodegenUnitExt;
|
||||
use rustc::dep_graph;
|
||||
use rustc::mir::mono::{Linkage, Visibility, Stats};
|
||||
use rustc::mir::mono::{Linkage, Visibility};
|
||||
use rustc::middle::cstore::{EncodedMetadata};
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc::middle::exported_symbols;
|
||||
@ -104,17 +104,17 @@ pub fn iter_globals(llmod: &'ll llvm::Module) -> ValueIter<'ll> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compile_codegen_unit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
cgu_name: InternedString)
|
||||
-> Stats {
|
||||
pub fn compile_codegen_unit(tcx: TyCtxt<'a, 'tcx, 'tcx>, cgu_name: InternedString) {
|
||||
let start_time = Instant::now();
|
||||
|
||||
let dep_node = tcx.codegen_unit(cgu_name).codegen_dep_node(tcx);
|
||||
let ((stats, module), _) = tcx.dep_graph.with_task(dep_node,
|
||||
tcx,
|
||||
cgu_name,
|
||||
module_codegen,
|
||||
dep_graph::hash_result);
|
||||
let (module, _) = tcx.dep_graph.with_task(
|
||||
dep_node,
|
||||
tcx,
|
||||
cgu_name,
|
||||
module_codegen,
|
||||
dep_graph::hash_result,
|
||||
);
|
||||
let time_to_codegen = start_time.elapsed();
|
||||
|
||||
// We assume that the cost to run LLVM on a CGU is proportional to
|
||||
@ -123,17 +123,15 @@ pub fn compile_codegen_unit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
time_to_codegen.subsec_nanos() as u64;
|
||||
|
||||
submit_codegened_module_to_llvm(&LlvmCodegenBackend(()), tcx, module, cost);
|
||||
return stats;
|
||||
|
||||
fn module_codegen<'ll, 'tcx>(
|
||||
tcx: TyCtxt<'ll, 'tcx, 'tcx>,
|
||||
cgu_name: InternedString)
|
||||
-> (Stats, ModuleCodegen<ModuleLlvm>)
|
||||
{
|
||||
cgu_name: InternedString,
|
||||
) -> ModuleCodegen<ModuleLlvm> {
|
||||
let cgu = tcx.codegen_unit(cgu_name);
|
||||
// Instantiate monomorphizations without filling out definitions yet...
|
||||
let llvm_module = ModuleLlvm::new(tcx, &cgu_name.as_str());
|
||||
let stats = {
|
||||
{
|
||||
let cx = CodegenCx::new(tcx, cgu, &llvm_module);
|
||||
let mono_items = cx.codegen_unit
|
||||
.items_in_deterministic_order(cx.tcx);
|
||||
@ -169,15 +167,13 @@ pub fn compile_codegen_unit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
if cx.sess().opts.debuginfo != DebugInfo::None {
|
||||
cx.debuginfo_finalize();
|
||||
}
|
||||
}
|
||||
|
||||
cx.consume_stats().into_inner()
|
||||
};
|
||||
|
||||
(stats, ModuleCodegen {
|
||||
ModuleCodegen {
|
||||
name: cgu_name.to_string(),
|
||||
module_llvm: llvm_module,
|
||||
kind: ModuleKind::Regular,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -147,21 +147,18 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn ret_void(&mut self) {
|
||||
self.count_insn("retvoid");
|
||||
unsafe {
|
||||
llvm::LLVMBuildRetVoid(self.llbuilder);
|
||||
}
|
||||
}
|
||||
|
||||
fn ret(&mut self, v: &'ll Value) {
|
||||
self.count_insn("ret");
|
||||
unsafe {
|
||||
llvm::LLVMBuildRet(self.llbuilder, v);
|
||||
}
|
||||
}
|
||||
|
||||
fn br(&mut self, dest: &'ll BasicBlock) {
|
||||
self.count_insn("br");
|
||||
unsafe {
|
||||
llvm::LLVMBuildBr(self.llbuilder, dest);
|
||||
}
|
||||
@ -173,7 +170,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
then_llbb: &'ll BasicBlock,
|
||||
else_llbb: &'ll BasicBlock,
|
||||
) {
|
||||
self.count_insn("condbr");
|
||||
unsafe {
|
||||
llvm::LLVMBuildCondBr(self.llbuilder, cond, then_llbb, else_llbb);
|
||||
}
|
||||
@ -204,7 +200,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
catch: &'ll BasicBlock,
|
||||
funclet: Option<&Funclet<'ll>>,
|
||||
) -> &'ll Value {
|
||||
self.count_insn("invoke");
|
||||
|
||||
debug!("Invoke {:?} with args ({:?})",
|
||||
llfn,
|
||||
@ -227,7 +222,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn unreachable(&mut self) {
|
||||
self.count_insn("unreachable");
|
||||
unsafe {
|
||||
llvm::LLVMBuildUnreachable(self.llbuilder);
|
||||
}
|
||||
@ -235,21 +229,18 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
|
||||
/* Arithmetic */
|
||||
fn add(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("add");
|
||||
unsafe {
|
||||
llvm::LLVMBuildAdd(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fadd(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("fadd");
|
||||
unsafe {
|
||||
llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fadd_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("fadd");
|
||||
unsafe {
|
||||
let instr = llvm::LLVMBuildFAdd(self.llbuilder, lhs, rhs, noname());
|
||||
llvm::LLVMRustSetHasUnsafeAlgebra(instr);
|
||||
@ -258,21 +249,18 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn sub(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("sub");
|
||||
unsafe {
|
||||
llvm::LLVMBuildSub(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fsub(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("fsub");
|
||||
unsafe {
|
||||
llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fsub_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("fsub");
|
||||
unsafe {
|
||||
let instr = llvm::LLVMBuildFSub(self.llbuilder, lhs, rhs, noname());
|
||||
llvm::LLVMRustSetHasUnsafeAlgebra(instr);
|
||||
@ -281,21 +269,18 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn mul(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("mul");
|
||||
unsafe {
|
||||
llvm::LLVMBuildMul(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fmul(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("fmul");
|
||||
unsafe {
|
||||
llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fmul_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("fmul");
|
||||
unsafe {
|
||||
let instr = llvm::LLVMBuildFMul(self.llbuilder, lhs, rhs, noname());
|
||||
llvm::LLVMRustSetHasUnsafeAlgebra(instr);
|
||||
@ -305,42 +290,36 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
|
||||
|
||||
fn udiv(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("udiv");
|
||||
unsafe {
|
||||
llvm::LLVMBuildUDiv(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn exactudiv(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("exactudiv");
|
||||
unsafe {
|
||||
llvm::LLVMBuildExactUDiv(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn sdiv(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("sdiv");
|
||||
unsafe {
|
||||
llvm::LLVMBuildSDiv(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn exactsdiv(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("exactsdiv");
|
||||
unsafe {
|
||||
llvm::LLVMBuildExactSDiv(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fdiv(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("fdiv");
|
||||
unsafe {
|
||||
llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fdiv_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("fdiv");
|
||||
unsafe {
|
||||
let instr = llvm::LLVMBuildFDiv(self.llbuilder, lhs, rhs, noname());
|
||||
llvm::LLVMRustSetHasUnsafeAlgebra(instr);
|
||||
@ -349,28 +328,24 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn urem(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("urem");
|
||||
unsafe {
|
||||
llvm::LLVMBuildURem(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn srem(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("srem");
|
||||
unsafe {
|
||||
llvm::LLVMBuildSRem(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn frem(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("frem");
|
||||
unsafe {
|
||||
llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn frem_fast(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("frem");
|
||||
unsafe {
|
||||
let instr = llvm::LLVMBuildFRem(self.llbuilder, lhs, rhs, noname());
|
||||
llvm::LLVMRustSetHasUnsafeAlgebra(instr);
|
||||
@ -379,63 +354,54 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn shl(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("shl");
|
||||
unsafe {
|
||||
llvm::LLVMBuildShl(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn lshr(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("lshr");
|
||||
unsafe {
|
||||
llvm::LLVMBuildLShr(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn ashr(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("ashr");
|
||||
unsafe {
|
||||
llvm::LLVMBuildAShr(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn and(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("and");
|
||||
unsafe {
|
||||
llvm::LLVMBuildAnd(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn or(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("or");
|
||||
unsafe {
|
||||
llvm::LLVMBuildOr(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn xor(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("xor");
|
||||
unsafe {
|
||||
llvm::LLVMBuildXor(self.llbuilder, lhs, rhs, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn neg(&mut self, v: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("neg");
|
||||
unsafe {
|
||||
llvm::LLVMBuildNeg(self.llbuilder, v, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fneg(&mut self, v: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("fneg");
|
||||
unsafe {
|
||||
llvm::LLVMBuildFNeg(self.llbuilder, v, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn not(&mut self, v: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("not");
|
||||
unsafe {
|
||||
llvm::LLVMBuildNot(self.llbuilder, v, noname())
|
||||
}
|
||||
@ -524,7 +490,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn dynamic_alloca(&mut self, ty: &'ll Type, name: &str, align: Align) -> &'ll Value {
|
||||
self.count_insn("alloca");
|
||||
unsafe {
|
||||
let alloca = if name.is_empty() {
|
||||
llvm::LLVMBuildAlloca(self.llbuilder, ty, noname())
|
||||
@ -543,7 +508,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
len: &'ll Value,
|
||||
name: &str,
|
||||
align: Align) -> &'ll Value {
|
||||
self.count_insn("alloca");
|
||||
unsafe {
|
||||
let alloca = if name.is_empty() {
|
||||
llvm::LLVMBuildArrayAlloca(self.llbuilder, ty, len, noname())
|
||||
@ -558,7 +522,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn load(&mut self, ptr: &'ll Value, align: Align) -> &'ll Value {
|
||||
self.count_insn("load");
|
||||
unsafe {
|
||||
let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
|
||||
llvm::LLVMSetAlignment(load, align.bytes() as c_uint);
|
||||
@ -567,11 +530,10 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn volatile_load(&mut self, ptr: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("load.volatile");
|
||||
unsafe {
|
||||
let insn = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
|
||||
llvm::LLVMSetVolatile(insn, llvm::True);
|
||||
insn
|
||||
let load = llvm::LLVMBuildLoad(self.llbuilder, ptr, noname());
|
||||
llvm::LLVMSetVolatile(load, llvm::True);
|
||||
load
|
||||
}
|
||||
}
|
||||
|
||||
@ -581,7 +543,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
order: rustc_codegen_ssa::common::AtomicOrdering,
|
||||
size: Size,
|
||||
) -> &'ll Value {
|
||||
self.count_insn("load.atomic");
|
||||
unsafe {
|
||||
let load = llvm::LLVMRustBuildAtomicLoad(
|
||||
self.llbuilder,
|
||||
@ -745,7 +706,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
flags: MemFlags,
|
||||
) -> &'ll Value {
|
||||
debug!("Store {:?} -> {:?} ({:?})", val, ptr, flags);
|
||||
self.count_insn("store");
|
||||
let ptr = self.check_store(val, ptr);
|
||||
unsafe {
|
||||
let store = llvm::LLVMBuildStore(self.llbuilder, val, ptr);
|
||||
@ -774,7 +734,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
fn atomic_store(&mut self, val: &'ll Value, ptr: &'ll Value,
|
||||
order: rustc_codegen_ssa::common::AtomicOrdering, size: Size) {
|
||||
debug!("Store {:?} -> {:?}", val, ptr);
|
||||
self.count_insn("store.atomic");
|
||||
let ptr = self.check_store(val, ptr);
|
||||
unsafe {
|
||||
let store = llvm::LLVMRustBuildAtomicStore(
|
||||
@ -789,7 +748,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn gep(&mut self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
|
||||
self.count_insn("gep");
|
||||
unsafe {
|
||||
llvm::LLVMBuildGEP(self.llbuilder, ptr, indices.as_ptr(),
|
||||
indices.len() as c_uint, noname())
|
||||
@ -797,7 +755,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn inbounds_gep(&mut self, ptr: &'ll Value, indices: &[&'ll Value]) -> &'ll Value {
|
||||
self.count_insn("inboundsgep");
|
||||
unsafe {
|
||||
llvm::LLVMBuildInBoundsGEP(
|
||||
self.llbuilder, ptr, indices.as_ptr(), indices.len() as c_uint, noname())
|
||||
@ -805,7 +762,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn struct_gep(&mut self, ptr: &'ll Value, idx: u64) -> &'ll Value {
|
||||
self.count_insn("structgep");
|
||||
assert_eq!(idx as c_uint as u64, idx);
|
||||
unsafe {
|
||||
llvm::LLVMBuildStructGEP(self.llbuilder, ptr, idx as c_uint, noname())
|
||||
@ -814,77 +770,66 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
|
||||
/* Casts */
|
||||
fn trunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("trunc");
|
||||
unsafe {
|
||||
llvm::LLVMBuildTrunc(self.llbuilder, val, dest_ty, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn sext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("sext");
|
||||
unsafe {
|
||||
llvm::LLVMBuildSExt(self.llbuilder, val, dest_ty, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fptoui(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("fptoui");
|
||||
unsafe {
|
||||
llvm::LLVMBuildFPToUI(self.llbuilder, val, dest_ty, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fptosi(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("fptosi");
|
||||
unsafe {
|
||||
llvm::LLVMBuildFPToSI(self.llbuilder, val, dest_ty,noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn uitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("uitofp");
|
||||
unsafe {
|
||||
llvm::LLVMBuildUIToFP(self.llbuilder, val, dest_ty, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn sitofp(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("sitofp");
|
||||
unsafe {
|
||||
llvm::LLVMBuildSIToFP(self.llbuilder, val, dest_ty, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fptrunc(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("fptrunc");
|
||||
unsafe {
|
||||
llvm::LLVMBuildFPTrunc(self.llbuilder, val, dest_ty, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn fpext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("fpext");
|
||||
unsafe {
|
||||
llvm::LLVMBuildFPExt(self.llbuilder, val, dest_ty, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn ptrtoint(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("ptrtoint");
|
||||
unsafe {
|
||||
llvm::LLVMBuildPtrToInt(self.llbuilder, val, dest_ty, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn inttoptr(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("inttoptr");
|
||||
unsafe {
|
||||
llvm::LLVMBuildIntToPtr(self.llbuilder, val, dest_ty, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn bitcast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("bitcast");
|
||||
unsafe {
|
||||
llvm::LLVMBuildBitCast(self.llbuilder, val, dest_ty, noname())
|
||||
}
|
||||
@ -892,14 +837,12 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
|
||||
|
||||
fn intcast(&mut self, val: &'ll Value, dest_ty: &'ll Type, is_signed: bool) -> &'ll Value {
|
||||
self.count_insn("intcast");
|
||||
unsafe {
|
||||
llvm::LLVMRustBuildIntCast(self.llbuilder, val, dest_ty, is_signed)
|
||||
}
|
||||
}
|
||||
|
||||
fn pointercast(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("pointercast");
|
||||
unsafe {
|
||||
llvm::LLVMBuildPointerCast(self.llbuilder, val, dest_ty, noname())
|
||||
}
|
||||
@ -907,7 +850,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
|
||||
/* Comparisons */
|
||||
fn icmp(&mut self, op: IntPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("icmp");
|
||||
let op = llvm::IntPredicate::from_generic(op);
|
||||
unsafe {
|
||||
llvm::LLVMBuildICmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
|
||||
@ -915,7 +857,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn fcmp(&mut self, op: RealPredicate, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("fcmp");
|
||||
unsafe {
|
||||
llvm::LLVMBuildFCmp(self.llbuilder, op as c_uint, lhs, rhs, noname())
|
||||
}
|
||||
@ -984,7 +925,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
then_val: &'ll Value,
|
||||
else_val: &'ll Value,
|
||||
) -> &'ll Value {
|
||||
self.count_insn("select");
|
||||
unsafe {
|
||||
llvm::LLVMBuildSelect(self.llbuilder, cond, then_val, else_val, noname())
|
||||
}
|
||||
@ -992,14 +932,12 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
|
||||
#[allow(dead_code)]
|
||||
fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("vaarg");
|
||||
unsafe {
|
||||
llvm::LLVMBuildVAArg(self.llbuilder, list, ty, noname())
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_element(&mut self, vec: &'ll Value, idx: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("extractelement");
|
||||
unsafe {
|
||||
llvm::LLVMBuildExtractElement(self.llbuilder, vec, idx, noname())
|
||||
}
|
||||
@ -1016,7 +954,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn extract_value(&mut self, agg_val: &'ll Value, idx: u64) -> &'ll Value {
|
||||
self.count_insn("extractvalue");
|
||||
assert_eq!(idx as c_uint as u64, idx);
|
||||
unsafe {
|
||||
llvm::LLVMBuildExtractValue(self.llbuilder, agg_val, idx as c_uint, noname())
|
||||
@ -1025,7 +962,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
|
||||
fn insert_value(&mut self, agg_val: &'ll Value, elt: &'ll Value,
|
||||
idx: u64) -> &'ll Value {
|
||||
self.count_insn("insertvalue");
|
||||
assert_eq!(idx as c_uint as u64, idx);
|
||||
unsafe {
|
||||
llvm::LLVMBuildInsertValue(self.llbuilder, agg_val, elt, idx as c_uint,
|
||||
@ -1035,7 +971,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
|
||||
fn landing_pad(&mut self, ty: &'ll Type, pers_fn: &'ll Value,
|
||||
num_clauses: usize) -> &'ll Value {
|
||||
self.count_insn("landingpad");
|
||||
unsafe {
|
||||
llvm::LLVMBuildLandingPad(self.llbuilder, ty, pers_fn,
|
||||
num_clauses as c_uint, noname())
|
||||
@ -1043,14 +978,12 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn set_cleanup(&mut self, landing_pad: &'ll Value) {
|
||||
self.count_insn("setcleanup");
|
||||
unsafe {
|
||||
llvm::LLVMSetCleanup(landing_pad, llvm::True);
|
||||
}
|
||||
}
|
||||
|
||||
fn resume(&mut self, exn: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("resume");
|
||||
unsafe {
|
||||
llvm::LLVMBuildResume(self.llbuilder, exn)
|
||||
}
|
||||
@ -1059,7 +992,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
fn cleanup_pad(&mut self,
|
||||
parent: Option<&'ll Value>,
|
||||
args: &[&'ll Value]) -> Funclet<'ll> {
|
||||
self.count_insn("cleanuppad");
|
||||
let name = const_cstr!("cleanuppad");
|
||||
let ret = unsafe {
|
||||
llvm::LLVMRustBuildCleanupPad(self.llbuilder,
|
||||
@ -1075,7 +1007,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
&mut self, funclet: &Funclet<'ll>,
|
||||
unwind: Option<&'ll BasicBlock>,
|
||||
) -> &'ll Value {
|
||||
self.count_insn("cleanupret");
|
||||
let ret = unsafe {
|
||||
llvm::LLVMRustBuildCleanupRet(self.llbuilder, funclet.cleanuppad(), unwind)
|
||||
};
|
||||
@ -1085,7 +1016,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
fn catch_pad(&mut self,
|
||||
parent: &'ll Value,
|
||||
args: &[&'ll Value]) -> Funclet<'ll> {
|
||||
self.count_insn("catchpad");
|
||||
let name = const_cstr!("catchpad");
|
||||
let ret = unsafe {
|
||||
llvm::LLVMRustBuildCatchPad(self.llbuilder, parent,
|
||||
@ -1101,7 +1031,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
unwind: Option<&'ll BasicBlock>,
|
||||
num_handlers: usize,
|
||||
) -> &'ll Value {
|
||||
self.count_insn("catchswitch");
|
||||
let name = const_cstr!("catchswitch");
|
||||
let ret = unsafe {
|
||||
llvm::LLVMRustBuildCatchSwitch(self.llbuilder, parent, unwind,
|
||||
@ -1199,7 +1128,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
args: &[&'ll Value],
|
||||
funclet: Option<&Funclet<'ll>>,
|
||||
) -> &'ll Value {
|
||||
self.count_insn("call");
|
||||
|
||||
debug!("Call {:?} with args ({:?})",
|
||||
llfn,
|
||||
@ -1221,7 +1149,6 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn zext(&mut self, val: &'ll Value, dest_ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("zext");
|
||||
unsafe {
|
||||
llvm::LLVMBuildZExt(self.llbuilder, val, dest_ty, noname())
|
||||
}
|
||||
@ -1285,19 +1212,6 @@ impl Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn count_insn(&self, category: &str) {
|
||||
if self.sess().codegen_stats() {
|
||||
self.stats.borrow_mut().n_llvm_insns += 1;
|
||||
}
|
||||
if self.sess().count_llvm_insns() {
|
||||
*self.stats
|
||||
.borrow_mut()
|
||||
.llvm_insns
|
||||
.entry(category.to_string())
|
||||
.or_insert(0) += 1;
|
||||
}
|
||||
}
|
||||
|
||||
fn position_at_start(&mut self, llbb: &'ll BasicBlock) {
|
||||
unsafe {
|
||||
llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
|
||||
@ -1305,12 +1219,10 @@ impl Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn minnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("minnum");
|
||||
unsafe { llvm::LLVMRustBuildMinNum(self.llbuilder, lhs, rhs) }
|
||||
}
|
||||
|
||||
pub fn maxnum(&mut self, lhs: &'ll Value, rhs: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("maxnum");
|
||||
unsafe { llvm::LLVMRustBuildMaxNum(self.llbuilder, lhs, rhs) }
|
||||
}
|
||||
|
||||
@ -1319,7 +1231,6 @@ impl Builder<'a, 'll, 'tcx> {
|
||||
elt: &'ll Value,
|
||||
idx: &'ll Value,
|
||||
) -> &'ll Value {
|
||||
self.count_insn("insertelement");
|
||||
unsafe {
|
||||
llvm::LLVMBuildInsertElement(self.llbuilder, vec, elt, idx, noname())
|
||||
}
|
||||
@ -1331,14 +1242,12 @@ impl Builder<'a, 'll, 'tcx> {
|
||||
v2: &'ll Value,
|
||||
mask: &'ll Value,
|
||||
) -> &'ll Value {
|
||||
self.count_insn("shufflevector");
|
||||
unsafe {
|
||||
llvm::LLVMBuildShuffleVector(self.llbuilder, v1, v2, mask, noname())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn vector_reduce_fadd_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.fadd_fast");
|
||||
unsafe {
|
||||
// FIXME: add a non-fast math version once
|
||||
// https://bugs.llvm.org/show_bug.cgi?id=36732
|
||||
@ -1349,7 +1258,6 @@ impl Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
}
|
||||
pub fn vector_reduce_fmul_fast(&mut self, acc: &'ll Value, src: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.fmul_fast");
|
||||
unsafe {
|
||||
// FIXME: add a non-fast math version once
|
||||
// https://bugs.llvm.org/show_bug.cgi?id=36732
|
||||
@ -1360,35 +1268,27 @@ impl Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
}
|
||||
pub fn vector_reduce_add(&mut self, src: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.add");
|
||||
unsafe { llvm::LLVMRustBuildVectorReduceAdd(self.llbuilder, src) }
|
||||
}
|
||||
pub fn vector_reduce_mul(&mut self, src: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.mul");
|
||||
unsafe { llvm::LLVMRustBuildVectorReduceMul(self.llbuilder, src) }
|
||||
}
|
||||
pub fn vector_reduce_and(&mut self, src: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.and");
|
||||
unsafe { llvm::LLVMRustBuildVectorReduceAnd(self.llbuilder, src) }
|
||||
}
|
||||
pub fn vector_reduce_or(&mut self, src: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.or");
|
||||
unsafe { llvm::LLVMRustBuildVectorReduceOr(self.llbuilder, src) }
|
||||
}
|
||||
pub fn vector_reduce_xor(&mut self, src: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.xor");
|
||||
unsafe { llvm::LLVMRustBuildVectorReduceXor(self.llbuilder, src) }
|
||||
}
|
||||
pub fn vector_reduce_fmin(&mut self, src: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.fmin");
|
||||
unsafe { llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ false) }
|
||||
}
|
||||
pub fn vector_reduce_fmax(&mut self, src: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.fmax");
|
||||
unsafe { llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ false) }
|
||||
}
|
||||
pub fn vector_reduce_fmin_fast(&mut self, src: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.fmin_fast");
|
||||
unsafe {
|
||||
let instr = llvm::LLVMRustBuildVectorReduceFMin(self.llbuilder, src, /*NoNaNs:*/ true);
|
||||
llvm::LLVMRustSetHasUnsafeAlgebra(instr);
|
||||
@ -1396,7 +1296,6 @@ impl Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
}
|
||||
pub fn vector_reduce_fmax_fast(&mut self, src: &'ll Value) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.fmax_fast");
|
||||
unsafe {
|
||||
let instr = llvm::LLVMRustBuildVectorReduceFMax(self.llbuilder, src, /*NoNaNs:*/ true);
|
||||
llvm::LLVMRustSetHasUnsafeAlgebra(instr);
|
||||
@ -1404,11 +1303,9 @@ impl Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
}
|
||||
pub fn vector_reduce_min(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.min");
|
||||
unsafe { llvm::LLVMRustBuildVectorReduceMin(self.llbuilder, src, is_signed) }
|
||||
}
|
||||
pub fn vector_reduce_max(&mut self, src: &'ll Value, is_signed: bool) -> &'ll Value {
|
||||
self.count_insn("vector.reduce.max");
|
||||
unsafe { llvm::LLVMRustBuildVectorReduceMax(self.llbuilder, src, is_signed) }
|
||||
}
|
||||
|
||||
@ -1419,7 +1316,6 @@ impl Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn catch_ret(&mut self, funclet: &Funclet<'ll>, unwind: &'ll BasicBlock) -> &'ll Value {
|
||||
self.count_insn("catchret");
|
||||
let ret = unsafe {
|
||||
llvm::LLVMRustBuildCatchRet(self.llbuilder, funclet.cleanuppad(), unwind)
|
||||
};
|
||||
@ -1488,7 +1384,6 @@ impl Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn va_arg(&mut self, list: &'ll Value, ty: &'ll Type) -> &'ll Value {
|
||||
self.count_insn("vaarg");
|
||||
unsafe {
|
||||
llvm::LLVMBuildVAArg(self.llbuilder, list, ty, noname())
|
||||
}
|
||||
@ -1511,7 +1406,6 @@ impl Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn phi(&mut self, ty: &'ll Type, vals: &[&'ll Value], bbs: &[&'ll BasicBlock]) -> &'ll Value {
|
||||
self.count_insn("addincoming");
|
||||
assert_eq!(vals.len(), bbs.len());
|
||||
let phi = unsafe {
|
||||
llvm::LLVMBuildPhi(self.llbuilder, ty, noname())
|
||||
@ -1525,7 +1419,6 @@ impl Builder<'a, 'll, 'tcx> {
|
||||
}
|
||||
|
||||
fn add_incoming_to_phi(&mut self, phi: &'ll Value, val: &'ll Value, bb: &'ll BasicBlock) {
|
||||
self.count_insn("addincoming");
|
||||
unsafe {
|
||||
llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ fn check_and_apply_linkage(
|
||||
attrs: &CodegenFnAttrs,
|
||||
ty: Ty<'tcx>,
|
||||
sym: LocalInternedString,
|
||||
span: Option<Span>
|
||||
span: Span
|
||||
) -> &'ll Value {
|
||||
let llty = cx.layout_of(ty).llvm_type(cx);
|
||||
if let Some(linkage) = attrs.linkage {
|
||||
@ -116,11 +116,8 @@ fn check_and_apply_linkage(
|
||||
let llty2 = if let ty::RawPtr(ref mt) = ty.sty {
|
||||
cx.layout_of(mt.ty).llvm_type(cx)
|
||||
} else {
|
||||
if let Some(span) = span {
|
||||
cx.sess().span_fatal(span, "must have type `*const T` or `*mut T`")
|
||||
} else {
|
||||
bug!("must have type `*const T` or `*mut T`")
|
||||
}
|
||||
cx.sess().span_fatal(
|
||||
span, "must have type `*const T` or `*mut T` due to `#[linkage]` attribute")
|
||||
};
|
||||
unsafe {
|
||||
// Declare a symbol `foo` with the desired linkage.
|
||||
@ -136,14 +133,7 @@ fn check_and_apply_linkage(
|
||||
let mut real_name = "_rust_extern_with_linkage_".to_string();
|
||||
real_name.push_str(&sym);
|
||||
let g2 = cx.define_global(&real_name, llty).unwrap_or_else(||{
|
||||
if let Some(span) = span {
|
||||
cx.sess().span_fatal(
|
||||
span,
|
||||
&format!("symbol `{}` is already defined", &sym)
|
||||
)
|
||||
} else {
|
||||
bug!("symbol `{}` is already defined", &sym)
|
||||
}
|
||||
cx.sess().span_fatal(span, &format!("symbol `{}` is already defined", &sym))
|
||||
});
|
||||
llvm::LLVMRustSetLinkage(g2, llvm::Linkage::InternalLinkage);
|
||||
llvm::LLVMSetInitializer(g2, g1);
|
||||
@ -240,7 +230,7 @@ impl CodegenCx<'ll, 'tcx> {
|
||||
ref attrs, span, node: hir::ForeignItemKind::Static(..), ..
|
||||
}) => {
|
||||
let fn_attrs = self.tcx.codegen_fn_attrs(def_id);
|
||||
(check_and_apply_linkage(&self, &fn_attrs, ty, sym, Some(span)), attrs)
|
||||
(check_and_apply_linkage(&self, &fn_attrs, ty, sym, span), attrs)
|
||||
}
|
||||
|
||||
item => bug!("get_static: expected static, found {:?}", item)
|
||||
@ -260,7 +250,8 @@ impl CodegenCx<'ll, 'tcx> {
|
||||
debug!("get_static: sym={} item_attr={:?}", sym, self.tcx.item_attrs(def_id));
|
||||
|
||||
let attrs = self.tcx.codegen_fn_attrs(def_id);
|
||||
let g = check_and_apply_linkage(&self, &attrs, ty, sym, None);
|
||||
let span = self.tcx.def_span(def_id);
|
||||
let g = check_and_apply_linkage(&self, &attrs, ty, sym, span);
|
||||
|
||||
// Thread-local statics in some other crate need to *always* be linked
|
||||
// against in a thread-local fashion, so we need to be sure to apply the
|
||||
|
@ -12,7 +12,6 @@ use rustc_codegen_ssa::traits::*;
|
||||
|
||||
use rustc_data_structures::base_n;
|
||||
use rustc_data_structures::small_c_str::SmallCStr;
|
||||
use rustc::mir::mono::Stats;
|
||||
use rustc::session::config::{self, DebugInfo};
|
||||
use rustc::session::Session;
|
||||
use rustc::ty::layout::{
|
||||
@ -44,7 +43,6 @@ pub struct CodegenCx<'ll, 'tcx: 'll> {
|
||||
|
||||
pub llmod: &'ll llvm::Module,
|
||||
pub llcx: &'ll llvm::Context,
|
||||
pub stats: RefCell<Stats>,
|
||||
pub codegen_unit: Arc<CodegenUnit<'tcx>>,
|
||||
|
||||
/// Cache instances of monomorphic and polymorphic items
|
||||
@ -284,7 +282,6 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
|
||||
tls_model,
|
||||
llmod,
|
||||
llcx,
|
||||
stats: RefCell::new(Stats::default()),
|
||||
codegen_unit,
|
||||
instances: Default::default(),
|
||||
vtables: Default::default(),
|
||||
@ -408,14 +405,6 @@ impl MiscMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
||||
self.check_overflow
|
||||
}
|
||||
|
||||
fn stats(&self) -> &RefCell<Stats> {
|
||||
&self.stats
|
||||
}
|
||||
|
||||
fn consume_stats(self) -> RefCell<Stats> {
|
||||
self.stats
|
||||
}
|
||||
|
||||
fn codegen_unit(&self) -> &Arc<CodegenUnit<'tcx>> {
|
||||
&self.codegen_unit
|
||||
}
|
||||
|
@ -52,7 +52,6 @@ use rustc_codegen_ssa::CompiledModule;
|
||||
use errors::{FatalError, Handler};
|
||||
use rustc::dep_graph::WorkProduct;
|
||||
use syntax_pos::symbol::InternedString;
|
||||
use rustc::mir::mono::Stats;
|
||||
pub use llvm_util::target_features;
|
||||
use std::any::Any;
|
||||
use std::sync::{mpsc, Arc};
|
||||
@ -130,8 +129,8 @@ impl ExtraBackendMethods for LlvmCodegenBackend {
|
||||
&self,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
cgu_name: InternedString,
|
||||
) -> Stats {
|
||||
base::compile_codegen_unit(tcx, cgu_name)
|
||||
) {
|
||||
base::compile_codegen_unit(tcx, cgu_name);
|
||||
}
|
||||
fn target_machine_factory(
|
||||
&self,
|
||||
|
@ -20,7 +20,7 @@ use rustc::hir::def_id::{DefId, LOCAL_CRATE};
|
||||
use rustc::middle::cstore::EncodedMetadata;
|
||||
use rustc::middle::lang_items::StartFnLangItem;
|
||||
use rustc::middle::weak_lang_items;
|
||||
use rustc::mir::mono::{Stats, CodegenUnitNameBuilder};
|
||||
use rustc::mir::mono::CodegenUnitNameBuilder;
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use rustc::ty::layout::{self, Align, TyLayout, LayoutOf, VariantIdx, HasTyCtxt};
|
||||
use rustc::ty::query::Providers;
|
||||
@ -28,7 +28,6 @@ use rustc::middle::cstore::{self, LinkagePreference};
|
||||
use rustc::util::common::{time, print_time_passes_entry};
|
||||
use rustc::session::config::{self, EntryFnType, Lto};
|
||||
use rustc::session::Session;
|
||||
use rustc_mir::monomorphize::item::DefPathBasedNames;
|
||||
use rustc_mir::monomorphize::Instance;
|
||||
use rustc_mir::monomorphize::partitioning::{CodegenUnit, CodegenUnitExt};
|
||||
use rustc::util::nodemap::FxHashMap;
|
||||
@ -58,40 +57,6 @@ use rustc::hir;
|
||||
|
||||
use crate::mir::operand::OperandValue;
|
||||
|
||||
use std::marker::PhantomData;
|
||||
|
||||
pub struct StatRecorder<'a, 'tcx, Cx: 'a + CodegenMethods<'tcx>> {
|
||||
cx: &'a Cx,
|
||||
name: Option<String>,
|
||||
istart: usize,
|
||||
_marker: PhantomData<&'tcx ()>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, Cx: CodegenMethods<'tcx>> StatRecorder<'a, 'tcx, Cx> {
|
||||
pub fn new(cx: &'a Cx, name: String) -> Self {
|
||||
let istart = cx.stats().borrow().n_llvm_insns;
|
||||
StatRecorder {
|
||||
cx,
|
||||
name: Some(name),
|
||||
istart,
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, Cx: CodegenMethods<'tcx>> Drop for StatRecorder<'a, 'tcx, Cx> {
|
||||
fn drop(&mut self) {
|
||||
if self.cx.sess().codegen_stats() {
|
||||
let mut stats = self.cx.stats().borrow_mut();
|
||||
let iend = stats.n_llvm_insns;
|
||||
stats.fn_stats.push((self.name.take().unwrap(), iend - self.istart));
|
||||
stats.n_fns += 1;
|
||||
// Reset LLVM insn count to avoid compound costs.
|
||||
stats.n_llvm_insns = self.istart;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bin_op_to_icmp_predicate(op: hir::BinOpKind,
|
||||
signed: bool)
|
||||
-> IntPredicate {
|
||||
@ -408,15 +373,6 @@ pub fn codegen_instance<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
|
||||
cx: &'a Bx::CodegenCx,
|
||||
instance: Instance<'tcx>,
|
||||
) {
|
||||
let _s = if cx.sess().codegen_stats() {
|
||||
let mut instance_name = String::new();
|
||||
DefPathBasedNames::new(cx.tcx(), true, true)
|
||||
.push_def_path(instance.def_id(), &mut instance_name);
|
||||
Some(StatRecorder::new(cx, instance_name))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// this is an info! to allow collecting monomorphization statistics
|
||||
// and to allow finding the last function before LLVM aborts from
|
||||
// release builds.
|
||||
@ -428,8 +384,6 @@ pub fn codegen_instance<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
|
||||
let lldecl = cx.instances().borrow().get(&instance).cloned().unwrap_or_else(||
|
||||
bug!("Instance `{:?}` not already declared", instance));
|
||||
|
||||
cx.stats().borrow_mut().n_closures += 1;
|
||||
|
||||
let mir = cx.tcx().instance_mir(instance.def);
|
||||
mir::codegen_mir::<Bx>(cx, lldecl, &mir, instance, sig);
|
||||
}
|
||||
@ -653,7 +607,6 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
|
||||
};
|
||||
|
||||
let mut total_codegen_time = Duration::new(0, 0);
|
||||
let mut all_stats = Stats::default();
|
||||
|
||||
for cgu in codegen_units.into_iter() {
|
||||
ongoing_codegen.wait_for_signal_to_codegen_item();
|
||||
@ -666,8 +619,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
|
||||
CguReuse::No => {
|
||||
tcx.sess.profiler(|p| p.start_activity(format!("codegen {}", cgu.name())));
|
||||
let start_time = Instant::now();
|
||||
let stats = backend.compile_codegen_unit(tcx, *cgu.name());
|
||||
all_stats.extend(stats);
|
||||
backend.compile_codegen_unit(tcx, *cgu.name());
|
||||
total_codegen_time += start_time.elapsed();
|
||||
tcx.sess.profiler(|p| p.end_activity(format!("codegen {}", cgu.name())));
|
||||
false
|
||||
@ -701,28 +653,6 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
|
||||
|
||||
symbol_names_test::report_symbol_names(tcx);
|
||||
|
||||
if tcx.sess.codegen_stats() {
|
||||
println!("--- codegen stats ---");
|
||||
println!("n_glues_created: {}", all_stats.n_glues_created);
|
||||
println!("n_null_glues: {}", all_stats.n_null_glues);
|
||||
println!("n_real_glues: {}", all_stats.n_real_glues);
|
||||
|
||||
println!("n_fns: {}", all_stats.n_fns);
|
||||
println!("n_inlines: {}", all_stats.n_inlines);
|
||||
println!("n_closures: {}", all_stats.n_closures);
|
||||
println!("fn stats:");
|
||||
all_stats.fn_stats.sort_by_key(|&(_, insns)| insns);
|
||||
for &(ref name, insns) in all_stats.fn_stats.iter() {
|
||||
println!("{} insns, {}", insns, *name);
|
||||
}
|
||||
}
|
||||
|
||||
if tcx.sess.count_llvm_insns() {
|
||||
for (k, v) in all_stats.llvm_insns.iter() {
|
||||
println!("{:7} {}", *v, *k);
|
||||
}
|
||||
}
|
||||
|
||||
ongoing_codegen.check_for_errors(tcx.sess);
|
||||
|
||||
assert_and_save_dep_graph(tcx);
|
||||
|
@ -396,22 +396,20 @@ impl<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
let cx = self.cx;
|
||||
let tcx = self.cx.tcx();
|
||||
|
||||
if let mir::Place::Base(mir::PlaceBase::Local(index)) = *place {
|
||||
match self.locals[index] {
|
||||
LocalRef::Place(place) => {
|
||||
return place;
|
||||
}
|
||||
LocalRef::UnsizedPlace(place) => {
|
||||
return bx.load_operand(place).deref(cx);
|
||||
}
|
||||
LocalRef::Operand(..) => {
|
||||
bug!("using operand local {:?} as place", place);
|
||||
let result = match *place {
|
||||
mir::Place::Base(mir::PlaceBase::Local(index)) => {
|
||||
match self.locals[index] {
|
||||
LocalRef::Place(place) => {
|
||||
return place;
|
||||
}
|
||||
LocalRef::UnsizedPlace(place) => {
|
||||
return bx.load_operand(place).deref(cx);
|
||||
}
|
||||
LocalRef::Operand(..) => {
|
||||
bug!("using operand local {:?} as place", place);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let result = match *place {
|
||||
mir::Place::Base(mir::PlaceBase::Local(_)) => bug!(), // handled above
|
||||
mir::Place::Base(
|
||||
mir::PlaceBase::Static(
|
||||
box mir::Static { ty, kind: mir::StaticKind::Promoted(promoted) }
|
||||
|
@ -5,7 +5,6 @@ use super::write::WriteBackendMethods;
|
||||
use super::CodegenObject;
|
||||
use rustc::middle::allocator::AllocatorKind;
|
||||
use rustc::middle::cstore::EncodedMetadata;
|
||||
use rustc::mir::mono::Stats;
|
||||
use rustc::session::{Session, config};
|
||||
use rustc::ty::TyCtxt;
|
||||
use rustc_codegen_utils::codegen_backend::CodegenBackend;
|
||||
@ -49,7 +48,7 @@ pub trait ExtraBackendMethods: CodegenBackend + WriteBackendMethods + Sized + Se
|
||||
&self,
|
||||
tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
cgu_name: InternedString,
|
||||
) -> Stats;
|
||||
);
|
||||
// If find_features is true this won't access `sess.crate_types` by assuming
|
||||
// that `is_pie_binary` is false. When we discover LLVM target features
|
||||
// `sess.crate_types` is uninitialized so we cannot access it.
|
||||
|
@ -1,5 +1,4 @@
|
||||
use super::BackendTypes;
|
||||
use rustc::mir::mono::Stats;
|
||||
use rustc::session::Session;
|
||||
use rustc::ty::{self, Instance, Ty};
|
||||
use rustc::util::nodemap::FxHashMap;
|
||||
@ -17,8 +16,6 @@ pub trait MiscMethods<'tcx>: BackendTypes {
|
||||
fn eh_personality(&self) -> Self::Value;
|
||||
fn eh_unwind_resume(&self) -> Self::Value;
|
||||
fn sess(&self) -> &Session;
|
||||
fn stats(&self) -> &RefCell<Stats>;
|
||||
fn consume_stats(self) -> RefCell<Stats>;
|
||||
fn codegen_unit(&self) -> &Arc<CodegenUnit<'tcx>>;
|
||||
fn used_statics(&self) -> &RefCell<Vec<Self::Value>>;
|
||||
fn set_frame_pointer_elimination(&self, llfn: Self::Value);
|
||||
|
@ -212,6 +212,11 @@ macro_rules! newtype_index {
|
||||
fn add_usize(&self, u: usize) -> Option<Self> {
|
||||
Idx::index(*self).checked_add(u).map(Self::new)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn sub_usize(&self, u: usize) -> Option<Self> {
|
||||
Idx::index(*self).checked_sub(u).map(Self::new)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<$type> for u32 {
|
||||
|
@ -162,6 +162,7 @@ impl ColorConfig {
|
||||
}
|
||||
}
|
||||
|
||||
/// Handles the writing of `HumanReadableErrorType::Default` and `HumanReadableErrorType::Short`
|
||||
pub struct EmitterWriter {
|
||||
dst: Destination,
|
||||
sm: Option<Lrc<SourceMapperDyn>>,
|
||||
@ -170,7 +171,8 @@ pub struct EmitterWriter {
|
||||
ui_testing: bool,
|
||||
}
|
||||
|
||||
struct FileWithAnnotatedLines {
|
||||
#[derive(Debug)]
|
||||
pub struct FileWithAnnotatedLines {
|
||||
file: Lrc<SourceFile>,
|
||||
lines: Vec<Line>,
|
||||
multiline_depth: usize,
|
||||
@ -221,169 +223,6 @@ impl EmitterWriter {
|
||||
}
|
||||
}
|
||||
|
||||
fn preprocess_annotations(&mut self, msp: &MultiSpan) -> Vec<FileWithAnnotatedLines> {
|
||||
fn add_annotation_to_file(file_vec: &mut Vec<FileWithAnnotatedLines>,
|
||||
file: Lrc<SourceFile>,
|
||||
line_index: usize,
|
||||
ann: Annotation) {
|
||||
|
||||
for slot in file_vec.iter_mut() {
|
||||
// Look through each of our files for the one we're adding to
|
||||
if slot.file.name == file.name {
|
||||
// See if we already have a line for it
|
||||
for line_slot in &mut slot.lines {
|
||||
if line_slot.line_index == line_index {
|
||||
line_slot.annotations.push(ann);
|
||||
return;
|
||||
}
|
||||
}
|
||||
// We don't have a line yet, create one
|
||||
slot.lines.push(Line {
|
||||
line_index,
|
||||
annotations: vec![ann],
|
||||
});
|
||||
slot.lines.sort();
|
||||
return;
|
||||
}
|
||||
}
|
||||
// This is the first time we're seeing the file
|
||||
file_vec.push(FileWithAnnotatedLines {
|
||||
file,
|
||||
lines: vec![Line {
|
||||
line_index,
|
||||
annotations: vec![ann],
|
||||
}],
|
||||
multiline_depth: 0,
|
||||
});
|
||||
}
|
||||
|
||||
let mut output = vec![];
|
||||
let mut multiline_annotations = vec![];
|
||||
|
||||
if let Some(ref sm) = self.sm {
|
||||
for span_label in msp.span_labels() {
|
||||
if span_label.span.is_dummy() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let lo = sm.lookup_char_pos(span_label.span.lo());
|
||||
let mut hi = sm.lookup_char_pos(span_label.span.hi());
|
||||
|
||||
// Watch out for "empty spans". If we get a span like 6..6, we
|
||||
// want to just display a `^` at 6, so convert that to
|
||||
// 6..7. This is degenerate input, but it's best to degrade
|
||||
// gracefully -- and the parser likes to supply a span like
|
||||
// that for EOF, in particular.
|
||||
|
||||
if lo.col_display == hi.col_display && lo.line == hi.line {
|
||||
hi.col_display += 1;
|
||||
}
|
||||
|
||||
let ann_type = if lo.line != hi.line {
|
||||
let ml = MultilineAnnotation {
|
||||
depth: 1,
|
||||
line_start: lo.line,
|
||||
line_end: hi.line,
|
||||
start_col: lo.col_display,
|
||||
end_col: hi.col_display,
|
||||
is_primary: span_label.is_primary,
|
||||
label: span_label.label.clone(),
|
||||
overlaps_exactly: false,
|
||||
};
|
||||
multiline_annotations.push((lo.file.clone(), ml.clone()));
|
||||
AnnotationType::Multiline(ml)
|
||||
} else {
|
||||
AnnotationType::Singleline
|
||||
};
|
||||
let ann = Annotation {
|
||||
start_col: lo.col_display,
|
||||
end_col: hi.col_display,
|
||||
is_primary: span_label.is_primary,
|
||||
label: span_label.label.clone(),
|
||||
annotation_type: ann_type,
|
||||
};
|
||||
|
||||
if !ann.is_multiline() {
|
||||
add_annotation_to_file(&mut output, lo.file, lo.line, ann);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find overlapping multiline annotations, put them at different depths
|
||||
multiline_annotations.sort_by_key(|&(_, ref ml)| (ml.line_start, ml.line_end));
|
||||
for item in multiline_annotations.clone() {
|
||||
let ann = item.1;
|
||||
for item in multiline_annotations.iter_mut() {
|
||||
let ref mut a = item.1;
|
||||
// Move all other multiline annotations overlapping with this one
|
||||
// one level to the right.
|
||||
if !(ann.same_span(a)) &&
|
||||
num_overlap(ann.line_start, ann.line_end, a.line_start, a.line_end, true)
|
||||
{
|
||||
a.increase_depth();
|
||||
} else if ann.same_span(a) && &ann != a {
|
||||
a.overlaps_exactly = true;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut max_depth = 0; // max overlapping multiline spans
|
||||
for (file, ann) in multiline_annotations {
|
||||
if ann.depth > max_depth {
|
||||
max_depth = ann.depth;
|
||||
}
|
||||
let mut end_ann = ann.as_end();
|
||||
if !ann.overlaps_exactly {
|
||||
// avoid output like
|
||||
//
|
||||
// | foo(
|
||||
// | _____^
|
||||
// | |_____|
|
||||
// | || bar,
|
||||
// | || );
|
||||
// | || ^
|
||||
// | ||______|
|
||||
// | |______foo
|
||||
// | baz
|
||||
//
|
||||
// and instead get
|
||||
//
|
||||
// | foo(
|
||||
// | _____^
|
||||
// | | bar,
|
||||
// | | );
|
||||
// | | ^
|
||||
// | | |
|
||||
// | |______foo
|
||||
// | baz
|
||||
add_annotation_to_file(&mut output, file.clone(), ann.line_start, ann.as_start());
|
||||
// 4 is the minimum vertical length of a multiline span when presented: two lines
|
||||
// of code and two lines of underline. This is not true for the special case where
|
||||
// the beginning doesn't have an underline, but the current logic seems to be
|
||||
// working correctly.
|
||||
let middle = min(ann.line_start + 4, ann.line_end);
|
||||
for line in ann.line_start + 1..middle {
|
||||
// Every `|` that joins the beginning of the span (`___^`) to the end (`|__^`).
|
||||
add_annotation_to_file(&mut output, file.clone(), line, ann.as_line());
|
||||
}
|
||||
if middle < ann.line_end - 1 {
|
||||
for line in ann.line_end - 1..ann.line_end {
|
||||
add_annotation_to_file(&mut output, file.clone(), line, ann.as_line());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
end_ann.annotation_type = AnnotationType::Singleline;
|
||||
}
|
||||
add_annotation_to_file(&mut output, file, ann.line_end, end_ann);
|
||||
}
|
||||
for file_vec in output.iter_mut() {
|
||||
file_vec.multiline_depth = max_depth;
|
||||
}
|
||||
output
|
||||
}
|
||||
|
||||
fn render_source_line(&self,
|
||||
buffer: &mut StyledBuffer,
|
||||
file: Lrc<SourceFile>,
|
||||
@ -1093,9 +932,7 @@ impl EmitterWriter {
|
||||
}
|
||||
}
|
||||
|
||||
// Preprocess all the annotations so that they are grouped by file and by line number
|
||||
// This helps us quickly iterate over the whole message (including secondary file spans)
|
||||
let mut annotated_files = self.preprocess_annotations(msp);
|
||||
let mut annotated_files = FileWithAnnotatedLines::collect_annotations(msp, &self.sm);
|
||||
|
||||
// Make sure our primary file comes first
|
||||
let (primary_lo, sm) = if let (Some(sm), Some(ref primary_span)) =
|
||||
@ -1503,6 +1340,176 @@ impl EmitterWriter {
|
||||
}
|
||||
}
|
||||
|
||||
impl FileWithAnnotatedLines {
|
||||
/// Preprocess all the annotations so that they are grouped by file and by line number
|
||||
/// This helps us quickly iterate over the whole message (including secondary file spans)
|
||||
pub fn collect_annotations(
|
||||
msp: &MultiSpan,
|
||||
source_map: &Option<Lrc<SourceMapperDyn>>
|
||||
) -> Vec<FileWithAnnotatedLines> {
|
||||
fn add_annotation_to_file(file_vec: &mut Vec<FileWithAnnotatedLines>,
|
||||
file: Lrc<SourceFile>,
|
||||
line_index: usize,
|
||||
ann: Annotation) {
|
||||
|
||||
for slot in file_vec.iter_mut() {
|
||||
// Look through each of our files for the one we're adding to
|
||||
if slot.file.name == file.name {
|
||||
// See if we already have a line for it
|
||||
for line_slot in &mut slot.lines {
|
||||
if line_slot.line_index == line_index {
|
||||
line_slot.annotations.push(ann);
|
||||
return;
|
||||
}
|
||||
}
|
||||
// We don't have a line yet, create one
|
||||
slot.lines.push(Line {
|
||||
line_index,
|
||||
annotations: vec![ann],
|
||||
});
|
||||
slot.lines.sort();
|
||||
return;
|
||||
}
|
||||
}
|
||||
// This is the first time we're seeing the file
|
||||
file_vec.push(FileWithAnnotatedLines {
|
||||
file,
|
||||
lines: vec![Line {
|
||||
line_index,
|
||||
annotations: vec![ann],
|
||||
}],
|
||||
multiline_depth: 0,
|
||||
});
|
||||
}
|
||||
|
||||
let mut output = vec![];
|
||||
let mut multiline_annotations = vec![];
|
||||
|
||||
if let Some(ref sm) = source_map {
|
||||
for span_label in msp.span_labels() {
|
||||
if span_label.span.is_dummy() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let lo = sm.lookup_char_pos(span_label.span.lo());
|
||||
let mut hi = sm.lookup_char_pos(span_label.span.hi());
|
||||
|
||||
// Watch out for "empty spans". If we get a span like 6..6, we
|
||||
// want to just display a `^` at 6, so convert that to
|
||||
// 6..7. This is degenerate input, but it's best to degrade
|
||||
// gracefully -- and the parser likes to supply a span like
|
||||
// that for EOF, in particular.
|
||||
|
||||
if lo.col_display == hi.col_display && lo.line == hi.line {
|
||||
hi.col_display += 1;
|
||||
}
|
||||
|
||||
let ann_type = if lo.line != hi.line {
|
||||
let ml = MultilineAnnotation {
|
||||
depth: 1,
|
||||
line_start: lo.line,
|
||||
line_end: hi.line,
|
||||
start_col: lo.col_display,
|
||||
end_col: hi.col_display,
|
||||
is_primary: span_label.is_primary,
|
||||
label: span_label.label.clone(),
|
||||
overlaps_exactly: false,
|
||||
};
|
||||
multiline_annotations.push((lo.file.clone(), ml.clone()));
|
||||
AnnotationType::Multiline(ml)
|
||||
} else {
|
||||
AnnotationType::Singleline
|
||||
};
|
||||
let ann = Annotation {
|
||||
start_col: lo.col_display,
|
||||
end_col: hi.col_display,
|
||||
is_primary: span_label.is_primary,
|
||||
label: span_label.label.clone(),
|
||||
annotation_type: ann_type,
|
||||
};
|
||||
|
||||
if !ann.is_multiline() {
|
||||
add_annotation_to_file(&mut output, lo.file, lo.line, ann);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find overlapping multiline annotations, put them at different depths
|
||||
multiline_annotations.sort_by_key(|&(_, ref ml)| (ml.line_start, ml.line_end));
|
||||
for item in multiline_annotations.clone() {
|
||||
let ann = item.1;
|
||||
for item in multiline_annotations.iter_mut() {
|
||||
let ref mut a = item.1;
|
||||
// Move all other multiline annotations overlapping with this one
|
||||
// one level to the right.
|
||||
if !(ann.same_span(a)) &&
|
||||
num_overlap(ann.line_start, ann.line_end, a.line_start, a.line_end, true)
|
||||
{
|
||||
a.increase_depth();
|
||||
} else if ann.same_span(a) && &ann != a {
|
||||
a.overlaps_exactly = true;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut max_depth = 0; // max overlapping multiline spans
|
||||
for (file, ann) in multiline_annotations {
|
||||
if ann.depth > max_depth {
|
||||
max_depth = ann.depth;
|
||||
}
|
||||
let mut end_ann = ann.as_end();
|
||||
if !ann.overlaps_exactly {
|
||||
// avoid output like
|
||||
//
|
||||
// | foo(
|
||||
// | _____^
|
||||
// | |_____|
|
||||
// | || bar,
|
||||
// | || );
|
||||
// | || ^
|
||||
// | ||______|
|
||||
// | |______foo
|
||||
// | baz
|
||||
//
|
||||
// and instead get
|
||||
//
|
||||
// | foo(
|
||||
// | _____^
|
||||
// | | bar,
|
||||
// | | );
|
||||
// | | ^
|
||||
// | | |
|
||||
// | |______foo
|
||||
// | baz
|
||||
add_annotation_to_file(&mut output, file.clone(), ann.line_start, ann.as_start());
|
||||
// 4 is the minimum vertical length of a multiline span when presented: two lines
|
||||
// of code and two lines of underline. This is not true for the special case where
|
||||
// the beginning doesn't have an underline, but the current logic seems to be
|
||||
// working correctly.
|
||||
let middle = min(ann.line_start + 4, ann.line_end);
|
||||
for line in ann.line_start + 1..middle {
|
||||
// Every `|` that joins the beginning of the span (`___^`) to the end (`|__^`).
|
||||
add_annotation_to_file(&mut output, file.clone(), line, ann.as_line());
|
||||
}
|
||||
if middle < ann.line_end - 1 {
|
||||
for line in ann.line_end - 1..ann.line_end {
|
||||
add_annotation_to_file(&mut output, file.clone(), line, ann.as_line());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
end_ann.annotation_type = AnnotationType::Singleline;
|
||||
}
|
||||
add_annotation_to_file(&mut output, file, ann.line_end, end_ann);
|
||||
}
|
||||
for file_vec in output.iter_mut() {
|
||||
file_vec.multiline_depth = max_depth;
|
||||
}
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
fn draw_col_separator(buffer: &mut StyledBuffer, line: usize, col: usize) {
|
||||
buffer.puts(line, col, "| ", Style::LineNumber);
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ edition = "2018"
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
synstructure = "0.10.1"
|
||||
synstructure = "0.10.2"
|
||||
syn = { version = "0.15.22", features = ["full"] }
|
||||
proc-macro2 = "0.4.24"
|
||||
quote = "0.6.10"
|
||||
|
@ -595,12 +595,11 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
|
||||
) -> (String, String, String, String) {
|
||||
// Define a small closure that we can use to check if the type of a place
|
||||
// is a union.
|
||||
let is_union = |place: &Place<'tcx>| -> bool {
|
||||
place.ty(self.mir, self.infcx.tcx).ty
|
||||
.ty_adt_def()
|
||||
.map(|adt| adt.is_union())
|
||||
.unwrap_or(false)
|
||||
let union_ty = |place: &Place<'tcx>| -> Option<Ty<'tcx>> {
|
||||
let ty = place.ty(self.mir, self.infcx.tcx).ty;
|
||||
ty.ty_adt_def().filter(|adt| adt.is_union()).map(|_| ty)
|
||||
};
|
||||
let describe_place = |place| self.describe_place(place).unwrap_or_else(|| "_".to_owned());
|
||||
|
||||
// Start with an empty tuple, so we can use the functions on `Option` to reduce some
|
||||
// code duplication (particularly around returning an empty description in the failure
|
||||
@ -619,7 +618,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
|
||||
let mut current = first_borrowed_place;
|
||||
while let Place::Projection(box Projection { base, elem }) = current {
|
||||
match elem {
|
||||
ProjectionElem::Field(field, _) if is_union(base) => {
|
||||
ProjectionElem::Field(field, _) if union_ty(base).is_some() => {
|
||||
return Some((base, field));
|
||||
},
|
||||
_ => current = base,
|
||||
@ -632,34 +631,32 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
|
||||
// borrowed place and look for a access to a different field of the same union.
|
||||
let mut current = second_borrowed_place;
|
||||
while let Place::Projection(box Projection { base, elem }) = current {
|
||||
match elem {
|
||||
ProjectionElem::Field(field, _) if {
|
||||
is_union(base) && field != target_field && base == target_base
|
||||
} => {
|
||||
let desc_base = self.describe_place(base)
|
||||
.unwrap_or_else(|| "_".to_owned());
|
||||
let desc_first = self.describe_place(first_borrowed_place)
|
||||
.unwrap_or_else(|| "_".to_owned());
|
||||
let desc_second = self.describe_place(second_borrowed_place)
|
||||
.unwrap_or_else(|| "_".to_owned());
|
||||
|
||||
// Also compute the name of the union type, eg. `Foo` so we
|
||||
// can add a helpful note with it.
|
||||
let ty = base.ty(self.mir, self.infcx.tcx).ty;
|
||||
|
||||
return Some((desc_base, desc_first, desc_second, ty.to_string()));
|
||||
},
|
||||
_ => current = base,
|
||||
if let ProjectionElem::Field(field, _) = elem {
|
||||
if let Some(union_ty) = union_ty(base) {
|
||||
if field != target_field && base == target_base {
|
||||
return Some((
|
||||
describe_place(base),
|
||||
describe_place(first_borrowed_place),
|
||||
describe_place(second_borrowed_place),
|
||||
union_ty.to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
current = base;
|
||||
}
|
||||
None
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
// If we didn't find a field access into a union, or both places match, then
|
||||
// only return the description of the first place.
|
||||
let desc_place = self.describe_place(first_borrowed_place)
|
||||
.unwrap_or_else(|| "_".to_owned());
|
||||
(desc_place, "".to_string(), "".to_string(), "".to_string())
|
||||
(
|
||||
describe_place(first_borrowed_place),
|
||||
"".to_string(),
|
||||
"".to_string(),
|
||||
"".to_string(),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
@ -1616,7 +1613,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
|
||||
);
|
||||
|
||||
// Find the local from the operand.
|
||||
let assigned_from_local = match assigned_from.local() {
|
||||
let assigned_from_local = match assigned_from.local_or_deref_local() {
|
||||
Some(local) => local,
|
||||
None => continue,
|
||||
};
|
||||
@ -1672,7 +1669,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
|
||||
);
|
||||
|
||||
// Find the local from the rvalue.
|
||||
let assigned_from_local = match assigned_from.local() {
|
||||
let assigned_from_local = match assigned_from.local_or_deref_local() {
|
||||
Some(local) => local,
|
||||
None => continue,
|
||||
};
|
||||
@ -1735,7 +1732,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
|
||||
assigned_from,
|
||||
);
|
||||
|
||||
if let Some(assigned_from_local) = assigned_from.local() {
|
||||
if let Some(assigned_from_local) = assigned_from.local_or_deref_local() {
|
||||
debug!(
|
||||
"annotate_argument_and_return_for_borrow: assigned_from_local={:?}",
|
||||
assigned_from_local,
|
||||
|
@ -37,15 +37,15 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
|
||||
diag: &mut DiagnosticBuilder<'_>,
|
||||
) {
|
||||
debug!("add_moved_or_invoked_closure_note: location={:?} place={:?}", location, place);
|
||||
let mut target = place.local();
|
||||
let mut target = place.local_or_deref_local();
|
||||
for stmt in &self.mir[location.block].statements[location.statement_index..] {
|
||||
debug!("add_moved_or_invoked_closure_note: stmt={:?} target={:?}", stmt, target);
|
||||
if let StatementKind::Assign(into, box Rvalue::Use(from)) = &stmt.kind {
|
||||
debug!("add_fnonce_closure_note: into={:?} from={:?}", into, from);
|
||||
match from {
|
||||
Operand::Copy(ref place) |
|
||||
Operand::Move(ref place) if target == place.local() =>
|
||||
target = into.local(),
|
||||
Operand::Move(ref place) if target == place.local_or_deref_local() =>
|
||||
target = into.local_or_deref_local(),
|
||||
_ => {},
|
||||
}
|
||||
}
|
||||
@ -69,8 +69,8 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
|
||||
if self.infcx.tcx.parent(id) == self.infcx.tcx.lang_items().fn_once_trait() {
|
||||
let closure = match args.first() {
|
||||
Some(Operand::Copy(ref place)) |
|
||||
Some(Operand::Move(ref place)) if target == place.local() =>
|
||||
place.local().unwrap(),
|
||||
Some(Operand::Move(ref place)) if target == place.local_or_deref_local() =>
|
||||
place.local_or_deref_local().unwrap(),
|
||||
_ => return,
|
||||
};
|
||||
|
||||
|
@ -528,7 +528,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
|
||||
}) => {
|
||||
// Not projected from the implicit `self` in a closure.
|
||||
debug_assert!(
|
||||
match base.local() {
|
||||
match base.local_or_deref_local() {
|
||||
Some(local) => local == Local::new(1),
|
||||
None => false,
|
||||
},
|
||||
|
@ -46,8 +46,10 @@ impl<'a, 'tcx> BitDenotation<'tcx> for MaybeStorageLive<'a, 'tcx> {
|
||||
sets: &mut BlockSets<'_, Local>,
|
||||
loc: Location) {
|
||||
match &self.mir[loc.block].terminator().kind {
|
||||
TerminatorKind::Drop { location, .. } => if let Some(l) = location.local() {
|
||||
sets.kill(l);
|
||||
TerminatorKind::Drop { location, .. } => {
|
||||
if let Some(l) = location.local_or_deref_local() {
|
||||
sets.kill(l);
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
@ -1703,8 +1703,13 @@ impl<'a, 'tcx> PrivateItemsInPublicInterfacesVisitor<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn check_trait_or_impl_item(&self, hir_id: hir::HirId, assoc_item_kind: AssocItemKind,
|
||||
defaultness: hir::Defaultness, vis: ty::Visibility) {
|
||||
fn check_assoc_item(
|
||||
&self,
|
||||
hir_id: hir::HirId,
|
||||
assoc_item_kind: AssocItemKind,
|
||||
defaultness: hir::Defaultness,
|
||||
vis: ty::Visibility,
|
||||
) {
|
||||
let mut check = self.check(hir_id, vis);
|
||||
|
||||
let (check_ty, is_assoc_ty) = match assoc_item_kind {
|
||||
@ -1754,8 +1759,12 @@ impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx>
|
||||
self.check(item.hir_id, item_visibility).generics().predicates();
|
||||
|
||||
for trait_item_ref in trait_item_refs {
|
||||
self.check_trait_or_impl_item(trait_item_ref.id.hir_id, trait_item_ref.kind,
|
||||
trait_item_ref.defaultness, item_visibility);
|
||||
self.check_assoc_item(
|
||||
trait_item_ref.id.hir_id,
|
||||
trait_item_ref.kind,
|
||||
trait_item_ref.defaultness,
|
||||
item_visibility,
|
||||
);
|
||||
}
|
||||
}
|
||||
hir::ItemKind::TraitAlias(..) => {
|
||||
@ -1803,8 +1812,12 @@ impl<'a, 'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'a, 'tcx>
|
||||
} else {
|
||||
impl_vis
|
||||
};
|
||||
self.check_trait_or_impl_item(impl_item_ref.id.hir_id, impl_item_ref.kind,
|
||||
impl_item_ref.defaultness, impl_item_vis);
|
||||
self.check_assoc_item(
|
||||
impl_item_ref.id.hir_id,
|
||||
impl_item_ref.kind,
|
||||
impl_item_ref.defaultness,
|
||||
impl_item_vis,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -860,7 +860,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Resolver<'a> {
|
||||
FnKind::ItemFn(_, ref header, ..) =>
|
||||
(FnItemRibKind, &header.asyncness.node),
|
||||
FnKind::Method(_, ref sig, _, _) =>
|
||||
(TraitOrImplItemRibKind, &sig.header.asyncness.node),
|
||||
(AssocItemRibKind, &sig.header.asyncness.node),
|
||||
FnKind::Closure(_) =>
|
||||
// Async closures aren't resolved through `visit_fn`-- they're
|
||||
// processed separately
|
||||
@ -1033,7 +1033,7 @@ enum RibKind<'a> {
|
||||
/// methods or associated types. Allow references to ty params that impl or trait
|
||||
/// binds. Disallow any other upvars (including other ty params that are
|
||||
/// upvars).
|
||||
TraitOrImplItemRibKind,
|
||||
AssocItemRibKind,
|
||||
|
||||
/// We passed through a function definition. Disallow upvars.
|
||||
/// Permit only those const parameters that are specified in the function's generics.
|
||||
@ -2612,7 +2612,7 @@ impl<'a> Resolver<'a> {
|
||||
|
||||
for trait_item in trait_items {
|
||||
let generic_params = HasGenericParams(&trait_item.generics,
|
||||
TraitOrImplItemRibKind);
|
||||
AssocItemRibKind);
|
||||
this.with_generic_param_rib(generic_params, |this| {
|
||||
match trait_item.node {
|
||||
TraitItemKind::Const(ref ty, ref default) => {
|
||||
@ -2899,7 +2899,7 @@ impl<'a> Resolver<'a> {
|
||||
|
||||
// We also need a new scope for the impl item type parameters.
|
||||
let generic_params = HasGenericParams(&impl_item.generics,
|
||||
TraitOrImplItemRibKind);
|
||||
AssocItemRibKind);
|
||||
this.with_generic_param_rib(generic_params, |this| {
|
||||
use self::ResolutionError::*;
|
||||
match impl_item.node {
|
||||
@ -4074,7 +4074,7 @@ impl<'a> Resolver<'a> {
|
||||
seen.insert(node_id, depth);
|
||||
}
|
||||
}
|
||||
ItemRibKind | FnItemRibKind | TraitOrImplItemRibKind => {
|
||||
ItemRibKind | FnItemRibKind | AssocItemRibKind => {
|
||||
// This was an attempt to access an upvar inside a
|
||||
// named function item. This is not allowed, so we
|
||||
// report an error.
|
||||
@ -4103,7 +4103,7 @@ impl<'a> Resolver<'a> {
|
||||
Res::Def(DefKind::TyParam, _) | Res::SelfTy(..) => {
|
||||
for rib in ribs {
|
||||
match rib.kind {
|
||||
NormalRibKind | TraitOrImplItemRibKind | ClosureRibKind(..) |
|
||||
NormalRibKind | AssocItemRibKind | ClosureRibKind(..) |
|
||||
ModuleRibKind(..) | MacroDefinition(..) | ForwardTyParamBanRibKind |
|
||||
ConstantItemRibKind | TyParamAsConstParamTy => {
|
||||
// Nothing to do. Continue.
|
||||
|
@ -270,7 +270,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
None
|
||||
}
|
||||
|
||||
fn is_hir_id_from_struct_pattern_shorthand_field(&self, hir_id: hir::HirId, sp: Span) -> bool {
|
||||
crate fn is_hir_id_from_struct_pattern_shorthand_field(
|
||||
&self,
|
||||
hir_id: hir::HirId,
|
||||
sp: Span,
|
||||
) -> bool {
|
||||
let cm = self.sess().source_map();
|
||||
let parent_id = self.tcx.hir().get_parent_node_by_hir_id(hir_id);
|
||||
if let Some(parent) = self.tcx.hir().find_by_hir_id(parent_id) {
|
||||
|
@ -5010,6 +5010,10 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
} else if !self.check_for_cast(err, expr, found, expected) {
|
||||
let is_struct_pat_shorthand_field = self.is_hir_id_from_struct_pattern_shorthand_field(
|
||||
expr.hir_id,
|
||||
expr.span,
|
||||
);
|
||||
let methods = self.get_conversion_methods(expr.span, expected, found);
|
||||
if let Ok(expr_text) = self.sess().source_map().span_to_snippet(expr.span) {
|
||||
let mut suggestions = iter::repeat(&expr_text).zip(methods.iter())
|
||||
@ -5019,14 +5023,18 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
None // do not suggest code that is already there (#53348)
|
||||
} else {
|
||||
let method_call_list = [".to_vec()", ".to_string()"];
|
||||
if receiver.ends_with(".clone()")
|
||||
let sugg = if receiver.ends_with(".clone()")
|
||||
&& method_call_list.contains(&method_call.as_str()) {
|
||||
let max_len = receiver.rfind(".").unwrap();
|
||||
Some(format!("{}{}", &receiver[..max_len], method_call))
|
||||
}
|
||||
else {
|
||||
Some(format!("{}{}", receiver, method_call))
|
||||
}
|
||||
format!("{}{}", &receiver[..max_len], method_call)
|
||||
} else {
|
||||
format!("{}{}", receiver, method_call)
|
||||
};
|
||||
Some(if is_struct_pat_shorthand_field {
|
||||
format!("{}: {}", receiver, sugg)
|
||||
} else {
|
||||
sugg
|
||||
})
|
||||
}
|
||||
}).peekable();
|
||||
if suggestions.peek().is_some() {
|
||||
|
@ -27,7 +27,7 @@ use rustc::ty::subst::{Subst, InternalSubsts};
|
||||
use rustc::ty::util::Discr;
|
||||
use rustc::ty::util::IntTypeExt;
|
||||
use rustc::ty::subst::UnpackedKind;
|
||||
use rustc::ty::{self, AdtKind, ToPolyTraitRef, Ty, TyCtxt};
|
||||
use rustc::ty::{self, AdtKind, DefIdTree, ToPolyTraitRef, Ty, TyCtxt};
|
||||
use rustc::ty::{ReprOptions, ToPredicate};
|
||||
use rustc::util::captures::Captures;
|
||||
use rustc::util::nodemap::FxHashMap;
|
||||
@ -1349,65 +1349,61 @@ pub fn checked_type_of<'a, 'tcx>(
|
||||
|
||||
match path {
|
||||
QPath::Resolved(_, ref path) => {
|
||||
let mut arg_index = 0;
|
||||
let mut found_const = false;
|
||||
for seg in &path.segments {
|
||||
if let Some(generic_args) = &seg.args {
|
||||
let args = &generic_args.args;
|
||||
for arg in args {
|
||||
if let GenericArg::Const(ct) = arg {
|
||||
if ct.value.hir_id == hir_id {
|
||||
found_const = true;
|
||||
break;
|
||||
}
|
||||
arg_index += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Sanity check to make sure everything is as expected.
|
||||
if !found_const {
|
||||
if !fail {
|
||||
return None;
|
||||
}
|
||||
bug!("no arg matching AnonConst in path")
|
||||
}
|
||||
match path.res {
|
||||
// We've encountered an `AnonConst` in some path, so we need to
|
||||
// figure out which generic parameter it corresponds to and return
|
||||
// the relevant type.
|
||||
Res::Def(DefKind::Struct, def_id)
|
||||
| Res::Def(DefKind::Union, def_id)
|
||||
| Res::Def(DefKind::Enum, def_id)
|
||||
| Res::Def(DefKind::Fn, def_id) => {
|
||||
let generics = tcx.generics_of(def_id);
|
||||
let mut param_index = 0;
|
||||
for param in &generics.params {
|
||||
if let ty::GenericParamDefKind::Const = param.kind {
|
||||
if param_index == arg_index {
|
||||
return Some(tcx.type_of(param.def_id));
|
||||
}
|
||||
param_index += 1;
|
||||
}
|
||||
}
|
||||
// This is no generic parameter associated with the arg. This is
|
||||
// probably from an extra arg where one is not needed.
|
||||
return Some(tcx.types.err);
|
||||
}
|
||||
Res::Err => tcx.types.err,
|
||||
x => {
|
||||
let arg_index = path.segments.iter()
|
||||
.filter_map(|seg| seg.args.as_ref())
|
||||
.map(|generic_args| generic_args.args.as_ref())
|
||||
.find_map(|args| {
|
||||
args.iter()
|
||||
.filter(|arg| arg.is_const())
|
||||
.enumerate()
|
||||
.filter(|(_, arg)| arg.id() == hir_id)
|
||||
.map(|(index, _)| index)
|
||||
.next()
|
||||
})
|
||||
.or_else(|| {
|
||||
if !fail {
|
||||
return None;
|
||||
None
|
||||
} else {
|
||||
bug!("no arg matching AnonConst in path")
|
||||
}
|
||||
})?;
|
||||
|
||||
// We've encountered an `AnonConst` in some path, so we need to
|
||||
// figure out which generic parameter it corresponds to and return
|
||||
// the relevant type.
|
||||
let generics = match path.res {
|
||||
Res::Def(DefKind::Ctor(..), def_id) =>
|
||||
tcx.generics_of(tcx.parent(def_id).unwrap()),
|
||||
Res::Def(_, def_id) =>
|
||||
tcx.generics_of(def_id),
|
||||
Res::Err =>
|
||||
return Some(tcx.types.err),
|
||||
_ if !fail =>
|
||||
return None,
|
||||
x => {
|
||||
tcx.sess.delay_span_bug(
|
||||
DUMMY_SP,
|
||||
&format!(
|
||||
"unexpected const parent path def {:?}", x
|
||||
),
|
||||
);
|
||||
tcx.types.err
|
||||
return Some(tcx.types.err);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
generics.params.iter()
|
||||
.filter(|param| {
|
||||
if let ty::GenericParamDefKind::Const = param.kind {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
.nth(arg_index)
|
||||
.map(|param| tcx.type_of(param.def_id))
|
||||
// This is no generic parameter associated with the arg. This is
|
||||
// probably from an extra arg where one is not needed.
|
||||
.unwrap_or(tcx.types.err)
|
||||
}
|
||||
x => {
|
||||
if !fail {
|
||||
|
@ -9,7 +9,7 @@ name = "rustdoc"
|
||||
path = "lib.rs"
|
||||
|
||||
[dependencies]
|
||||
pulldown-cmark = { version = "0.4.1", default-features = false }
|
||||
pulldown-cmark = { version = "0.5.2", default-features = false }
|
||||
minifier = "0.0.30"
|
||||
tempfile = "3"
|
||||
parking_lot = "0.7"
|
||||
|
@ -3145,10 +3145,7 @@ impl<'tcx> Clean<Constant> for ty::Const<'tcx> {
|
||||
fn clean(&self, cx: &DocContext<'_>) -> Constant {
|
||||
Constant {
|
||||
type_: self.ty.clean(cx),
|
||||
expr: match self.val {
|
||||
ConstValue::Param(ty::ParamConst { name, .. }) => format!("{}", name),
|
||||
e => format!("{:?}", e), // FIXME generic consts with expressions
|
||||
},
|
||||
expr: format!("{}", self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -351,9 +351,11 @@ impl<'a, 'b, 'ids, I: Iterator<Item = Event<'a>>> Iterator for HeadingLinks<'a,
|
||||
if let Some(Event::Start(Tag::Header(level))) = event {
|
||||
let mut id = String::new();
|
||||
for event in &mut self.inner {
|
||||
match event {
|
||||
match &event {
|
||||
Event::End(Tag::Header(..)) => break,
|
||||
Event::Text(ref text) => id.extend(text.chars().filter_map(slugify)),
|
||||
Event::Text(text) | Event::Code(text) => {
|
||||
id.extend(text.chars().filter_map(slugify));
|
||||
}
|
||||
_ => {},
|
||||
}
|
||||
self.buf.push_back(event);
|
||||
@ -402,7 +404,6 @@ fn check_if_allowed_tag(t: &Tag<'_>) -> bool {
|
||||
| Tag::Item
|
||||
| Tag::Emphasis
|
||||
| Tag::Strong
|
||||
| Tag::Code
|
||||
| Tag::Link(..)
|
||||
| Tag::BlockQuote => true,
|
||||
_ => false,
|
||||
@ -790,9 +791,8 @@ pub fn plain_summary_line_full(md: &str, limit_length: bool) -> String {
|
||||
let next_event = next_event.unwrap();
|
||||
let (ret, is_in) = match next_event {
|
||||
Event::Start(Tag::Paragraph) => (None, 1),
|
||||
Event::Start(Tag::Code) => (Some("`".to_owned()), 1),
|
||||
Event::End(Tag::Code) => (Some("`".to_owned()), -1),
|
||||
Event::Start(Tag::Header(_)) => (None, 1),
|
||||
Event::Code(code) => (Some(format!("`{}`", code)), 0),
|
||||
Event::Text(ref s) if self.is_in > 0 => (Some(s.as_ref().to_owned()), 0),
|
||||
Event::End(Tag::Paragraph) | Event::End(Tag::Header(_)) => (None, -1),
|
||||
_ => (None, 0),
|
||||
|
@ -17,7 +17,7 @@ use std::io::prelude::*;
|
||||
use std::io;
|
||||
use std::panic::{self, AssertUnwindSafe};
|
||||
use std::path::PathBuf;
|
||||
use std::process::Command;
|
||||
use std::process::{self, Command};
|
||||
use std::str;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use syntax::symbol::sym;
|
||||
@ -160,13 +160,45 @@ fn scrape_test_config(krate: &::rustc::hir::Crate) -> TestOptions {
|
||||
opts
|
||||
}
|
||||
|
||||
fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
|
||||
cfgs: Vec<String>, libs: Vec<SearchPath>,
|
||||
cg: CodegenOptions, externs: Externs,
|
||||
should_panic: bool, no_run: bool, as_test_harness: bool,
|
||||
compile_fail: bool, mut error_codes: Vec<String>, opts: &TestOptions,
|
||||
maybe_sysroot: Option<PathBuf>, linker: Option<PathBuf>, edition: Edition,
|
||||
persist_doctests: Option<PathBuf>) {
|
||||
/// Documentation test failure modes.
|
||||
enum TestFailure {
|
||||
/// The test failed to compile.
|
||||
CompileError,
|
||||
/// The test is marked `compile_fail` but compiled successfully.
|
||||
UnexpectedCompilePass,
|
||||
/// The test failed to compile (as expected) but the compiler output did not contain all
|
||||
/// expected error codes.
|
||||
MissingErrorCodes(Vec<String>),
|
||||
/// The test binary was unable to be executed.
|
||||
ExecutionError(io::Error),
|
||||
/// The test binary exited with a non-zero exit code.
|
||||
///
|
||||
/// This typically means an assertion in the test failed or another form of panic occurred.
|
||||
ExecutionFailure(process::Output),
|
||||
/// The test is marked `should_panic` but the test binary executed successfully.
|
||||
UnexpectedRunPass,
|
||||
}
|
||||
|
||||
fn run_test(
|
||||
test: &str,
|
||||
cratename: &str,
|
||||
filename: &FileName,
|
||||
line: usize,
|
||||
cfgs: Vec<String>,
|
||||
libs: Vec<SearchPath>,
|
||||
cg: CodegenOptions,
|
||||
externs: Externs,
|
||||
should_panic: bool,
|
||||
no_run: bool,
|
||||
as_test_harness: bool,
|
||||
compile_fail: bool,
|
||||
mut error_codes: Vec<String>,
|
||||
opts: &TestOptions,
|
||||
maybe_sysroot: Option<PathBuf>,
|
||||
linker: Option<PathBuf>,
|
||||
edition: Edition,
|
||||
persist_doctests: Option<PathBuf>,
|
||||
) -> Result<(), TestFailure> {
|
||||
let (test, line_offset) = match panic::catch_unwind(|| {
|
||||
make_test(test, Some(cratename), as_test_harness, opts, edition)
|
||||
}) {
|
||||
@ -307,44 +339,43 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
|
||||
|
||||
match (compile_result, compile_fail) {
|
||||
(Ok(()), true) => {
|
||||
panic!("test compiled while it wasn't supposed to")
|
||||
return Err(TestFailure::UnexpectedCompilePass);
|
||||
}
|
||||
(Ok(()), false) => {}
|
||||
(Err(_), true) => {
|
||||
if error_codes.len() > 0 {
|
||||
if !error_codes.is_empty() {
|
||||
let out = String::from_utf8(data.lock().unwrap().to_vec()).unwrap();
|
||||
error_codes.retain(|err| !out.contains(err));
|
||||
|
||||
if !error_codes.is_empty() {
|
||||
return Err(TestFailure::MissingErrorCodes(error_codes));
|
||||
}
|
||||
}
|
||||
}
|
||||
(Err(_), false) => {
|
||||
panic!("couldn't compile the test")
|
||||
return Err(TestFailure::CompileError);
|
||||
}
|
||||
}
|
||||
|
||||
if error_codes.len() > 0 {
|
||||
panic!("Some expected error codes were not found: {:?}", error_codes);
|
||||
if no_run {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
if no_run { return }
|
||||
|
||||
// Run the code!
|
||||
let mut cmd = Command::new(output_file);
|
||||
|
||||
match cmd.output() {
|
||||
Err(e) => panic!("couldn't run the test: {}{}", e,
|
||||
if e.kind() == io::ErrorKind::PermissionDenied {
|
||||
" - maybe your tempdir is mounted with noexec?"
|
||||
} else { "" }),
|
||||
Err(e) => return Err(TestFailure::ExecutionError(e)),
|
||||
Ok(out) => {
|
||||
if should_panic && out.status.success() {
|
||||
panic!("test executable succeeded when it should have failed");
|
||||
return Err(TestFailure::UnexpectedRunPass);
|
||||
} else if !should_panic && !out.status.success() {
|
||||
panic!("test executable failed:\n{}\n{}\n",
|
||||
str::from_utf8(&out.stdout).unwrap_or(""),
|
||||
str::from_utf8(&out.stderr).unwrap_or(""));
|
||||
return Err(TestFailure::ExecutionFailure(out));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Transforms a test into code that can be compiled into a Rust binary, and returns the number of
|
||||
@ -499,8 +530,13 @@ pub fn make_test(s: &str,
|
||||
prog.push_str(everything_else);
|
||||
} else {
|
||||
let returns_result = everything_else.trim_end().ends_with("(())");
|
||||
let returns_option = everything_else.trim_end().ends_with("Some(())");
|
||||
let (main_pre, main_post) = if returns_result {
|
||||
("fn main() { fn _inner() -> Result<(), impl core::fmt::Debug> {",
|
||||
(if returns_option {
|
||||
"fn main() { fn _inner() -> Option<()> {"
|
||||
} else {
|
||||
"fn main() { fn _inner() -> Result<(), impl core::fmt::Debug> {"
|
||||
},
|
||||
"}\n_inner().unwrap() }")
|
||||
} else {
|
||||
("fn main() {\n", "\n}")
|
||||
@ -711,7 +747,7 @@ impl Tester for Collector {
|
||||
allow_fail: config.allow_fail,
|
||||
},
|
||||
testfn: testing::DynTestFn(box move || {
|
||||
run_test(
|
||||
let res = run_test(
|
||||
&test,
|
||||
&cratename,
|
||||
&filename,
|
||||
@ -730,7 +766,65 @@ impl Tester for Collector {
|
||||
linker,
|
||||
edition,
|
||||
persist_doctests
|
||||
)
|
||||
);
|
||||
|
||||
if let Err(err) = res {
|
||||
match err {
|
||||
TestFailure::CompileError => {
|
||||
eprint!("Couldn't compile the test.");
|
||||
}
|
||||
TestFailure::UnexpectedCompilePass => {
|
||||
eprint!("Test compiled successfully, but it's marked `compile_fail`.");
|
||||
}
|
||||
TestFailure::UnexpectedRunPass => {
|
||||
eprint!("Test executable succeeded, but it's marked `should_panic`.");
|
||||
}
|
||||
TestFailure::MissingErrorCodes(codes) => {
|
||||
eprint!("Some expected error codes were not found: {:?}", codes);
|
||||
}
|
||||
TestFailure::ExecutionError(err) => {
|
||||
eprint!("Couldn't run the test: {}", err);
|
||||
if err.kind() == io::ErrorKind::PermissionDenied {
|
||||
eprint!(" - maybe your tempdir is mounted with noexec?");
|
||||
}
|
||||
}
|
||||
TestFailure::ExecutionFailure(out) => {
|
||||
let reason = if let Some(code) = out.status.code() {
|
||||
format!("exit code {}", code)
|
||||
} else {
|
||||
String::from("terminated by signal")
|
||||
};
|
||||
|
||||
eprintln!("Test executable failed ({}).", reason);
|
||||
|
||||
// FIXME(#12309): An unfortunate side-effect of capturing the test
|
||||
// executable's output is that the relative ordering between the test's
|
||||
// stdout and stderr is lost. However, this is better than the
|
||||
// alternative: if the test executable inherited the parent's I/O
|
||||
// handles the output wouldn't be captured at all, even on success.
|
||||
//
|
||||
// The ordering could be preserved if the test process' stderr was
|
||||
// redirected to stdout, but that functionality does not exist in the
|
||||
// standard library, so it may not be portable enough.
|
||||
let stdout = str::from_utf8(&out.stdout).unwrap_or_default();
|
||||
let stderr = str::from_utf8(&out.stderr).unwrap_or_default();
|
||||
|
||||
if !stdout.is_empty() || !stderr.is_empty() {
|
||||
eprintln!();
|
||||
|
||||
if !stdout.is_empty() {
|
||||
eprintln!("stdout:\n{}", stdout);
|
||||
}
|
||||
|
||||
if !stderr.is_empty() {
|
||||
eprintln!("stderr:\n{}", stderr);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
panic::resume_unwind(box ());
|
||||
}
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ pub trait Error: Debug + Display {
|
||||
/// "I'm the superhero of errors"
|
||||
/// }
|
||||
///
|
||||
/// fn cause(&self) -> Option<&Error> {
|
||||
/// fn cause(&self) -> Option<&dyn Error> {
|
||||
/// Some(&self.side)
|
||||
/// }
|
||||
/// }
|
||||
@ -244,7 +244,7 @@ impl<'a, E: Error + 'a> From<E> for Box<dyn Error + 'a> {
|
||||
///
|
||||
/// let an_error = AnError;
|
||||
/// assert!(0 == mem::size_of_val(&an_error));
|
||||
/// let a_boxed_error = Box::<Error>::from(an_error);
|
||||
/// let a_boxed_error = Box::<dyn Error>::from(an_error);
|
||||
/// assert!(mem::size_of::<Box<dyn Error>>() == mem::size_of_val(&a_boxed_error))
|
||||
/// ```
|
||||
fn from(err: E) -> Box<dyn Error + 'a> {
|
||||
@ -287,7 +287,7 @@ impl<'a, E: Error + Send + Sync + 'a> From<E> for Box<dyn Error + Send + Sync +
|
||||
///
|
||||
/// let an_error = AnError;
|
||||
/// assert!(0 == mem::size_of_val(&an_error));
|
||||
/// let a_boxed_error = Box::<Error + Send + Sync>::from(an_error);
|
||||
/// let a_boxed_error = Box::<dyn Error + Send + Sync>::from(an_error);
|
||||
/// assert!(
|
||||
/// mem::size_of::<Box<dyn Error + Send + Sync>>() == mem::size_of_val(&a_boxed_error))
|
||||
/// ```
|
||||
@ -309,7 +309,7 @@ impl From<String> for Box<dyn Error + Send + Sync> {
|
||||
/// use std::mem;
|
||||
///
|
||||
/// let a_string_error = "a string error".to_string();
|
||||
/// let a_boxed_error = Box::<Error + Send + Sync>::from(a_string_error);
|
||||
/// let a_boxed_error = Box::<dyn Error + Send + Sync>::from(a_string_error);
|
||||
/// assert!(
|
||||
/// mem::size_of::<Box<dyn Error + Send + Sync>>() == mem::size_of_val(&a_boxed_error))
|
||||
/// ```
|
||||
@ -344,7 +344,7 @@ impl From<String> for Box<dyn Error> {
|
||||
/// use std::mem;
|
||||
///
|
||||
/// let a_string_error = "a string error".to_string();
|
||||
/// let a_boxed_error = Box::<Error>::from(a_string_error);
|
||||
/// let a_boxed_error = Box::<dyn Error>::from(a_string_error);
|
||||
/// assert!(mem::size_of::<Box<dyn Error>>() == mem::size_of_val(&a_boxed_error))
|
||||
/// ```
|
||||
fn from(str_err: String) -> Box<dyn Error> {
|
||||
@ -367,7 +367,7 @@ impl<'a> From<&str> for Box<dyn Error + Send + Sync + 'a> {
|
||||
/// use std::mem;
|
||||
///
|
||||
/// let a_str_error = "a str error";
|
||||
/// let a_boxed_error = Box::<Error + Send + Sync>::from(a_str_error);
|
||||
/// let a_boxed_error = Box::<dyn Error + Send + Sync>::from(a_str_error);
|
||||
/// assert!(
|
||||
/// mem::size_of::<Box<dyn Error + Send + Sync>>() == mem::size_of_val(&a_boxed_error))
|
||||
/// ```
|
||||
@ -389,7 +389,7 @@ impl From<&str> for Box<dyn Error> {
|
||||
/// use std::mem;
|
||||
///
|
||||
/// let a_str_error = "a str error";
|
||||
/// let a_boxed_error = Box::<Error>::from(a_str_error);
|
||||
/// let a_boxed_error = Box::<dyn Error>::from(a_str_error);
|
||||
/// assert!(mem::size_of::<Box<dyn Error>>() == mem::size_of_val(&a_boxed_error))
|
||||
/// ```
|
||||
fn from(err: &str) -> Box<dyn Error> {
|
||||
@ -412,7 +412,7 @@ impl<'a, 'b> From<Cow<'b, str>> for Box<dyn Error + Send + Sync + 'a> {
|
||||
/// use std::borrow::Cow;
|
||||
///
|
||||
/// let a_cow_str_error = Cow::from("a str error");
|
||||
/// let a_boxed_error = Box::<Error + Send + Sync>::from(a_cow_str_error);
|
||||
/// let a_boxed_error = Box::<dyn Error + Send + Sync>::from(a_cow_str_error);
|
||||
/// assert!(
|
||||
/// mem::size_of::<Box<dyn Error + Send + Sync>>() == mem::size_of_val(&a_boxed_error))
|
||||
/// ```
|
||||
@ -436,7 +436,7 @@ impl<'a> From<Cow<'a, str>> for Box<dyn Error> {
|
||||
/// use std::borrow::Cow;
|
||||
///
|
||||
/// let a_cow_str_error = Cow::from("a str error");
|
||||
/// let a_boxed_error = Box::<Error>::from(a_cow_str_error);
|
||||
/// let a_boxed_error = Box::<dyn Error>::from(a_cow_str_error);
|
||||
/// assert!(mem::size_of::<Box<dyn Error>>() == mem::size_of_val(&a_boxed_error))
|
||||
/// ```
|
||||
fn from(err: Cow<'a, str>) -> Box<dyn Error> {
|
||||
|
@ -1976,7 +1976,7 @@ pub fn remove_dir_all<P: AsRef<Path>>(path: P) -> io::Result<()> {
|
||||
/// use std::path::Path;
|
||||
///
|
||||
/// // one possible implementation of walking a directory only visiting files
|
||||
/// fn visit_dirs(dir: &Path, cb: &Fn(&DirEntry)) -> io::Result<()> {
|
||||
/// fn visit_dirs(dir: &Path, cb: &dyn Fn(&DirEntry)) -> io::Result<()> {
|
||||
/// if dir.is_dir() {
|
||||
/// for entry in fs::read_dir(dir)? {
|
||||
/// let entry = entry?;
|
||||
|
@ -158,7 +158,6 @@ impl<R> BufReader<R> {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```no_run
|
||||
/// # #![feature(bufreader_buffer)]
|
||||
/// use std::io::{BufReader, BufRead};
|
||||
/// use std::fs::File;
|
||||
///
|
||||
@ -173,7 +172,7 @@ impl<R> BufReader<R> {
|
||||
/// Ok(())
|
||||
/// }
|
||||
/// ```
|
||||
#[unstable(feature = "bufreader_buffer", issue = "45323")]
|
||||
#[stable(feature = "bufreader_buffer", since = "1.37.0")]
|
||||
pub fn buffer(&self) -> &[u8] {
|
||||
&self.buf[self.pos..self.cap]
|
||||
}
|
||||
@ -552,7 +551,6 @@ impl<W: Write> BufWriter<W> {
|
||||
/// # Examples
|
||||
///
|
||||
/// ```no_run
|
||||
/// # #![feature(bufreader_buffer)]
|
||||
/// use std::io::BufWriter;
|
||||
/// use std::net::TcpStream;
|
||||
///
|
||||
@ -561,7 +559,7 @@ impl<W: Write> BufWriter<W> {
|
||||
/// // See how many bytes are currently buffered
|
||||
/// let bytes_buffered = buf_writer.buffer().len();
|
||||
/// ```
|
||||
#[unstable(feature = "bufreader_buffer", issue = "45323")]
|
||||
#[stable(feature = "bufreader_buffer", since = "1.37.0")]
|
||||
pub fn buffer(&self) -> &[u8] {
|
||||
&self.buf
|
||||
}
|
||||
|
@ -1087,6 +1087,11 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
|
||||
fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
|
||||
self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
|
||||
}
|
||||
|
||||
/// Is the current token one of the keywords that signals a bare function type?
|
||||
fn token_is_bare_fn_keyword(&mut self) -> bool {
|
||||
self.check_keyword(kw::Fn) ||
|
||||
@ -4270,7 +4275,7 @@ impl<'a> Parser<'a> {
|
||||
self.token.is_keyword(kw::Async) &&
|
||||
(
|
||||
( // `async move {`
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Move)) &&
|
||||
self.is_keyword_ahead(1, &[kw::Move]) &&
|
||||
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
|
||||
) || ( // `async {`
|
||||
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
|
||||
@ -4280,12 +4285,12 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn is_async_fn(&self) -> bool {
|
||||
self.token.is_keyword(kw::Async) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Fn))
|
||||
self.is_keyword_ahead(1, &[kw::Fn])
|
||||
}
|
||||
|
||||
fn is_do_catch_block(&self) -> bool {
|
||||
self.token.is_keyword(kw::Do) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Catch)) &&
|
||||
self.is_keyword_ahead(1, &[kw::Catch]) &&
|
||||
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
|
||||
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
|
||||
}
|
||||
@ -4309,17 +4314,17 @@ impl<'a> Parser<'a> {
|
||||
|
||||
fn is_existential_type_decl(&self) -> bool {
|
||||
self.token.is_keyword(kw::Existential) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Type))
|
||||
self.is_keyword_ahead(1, &[kw::Type])
|
||||
}
|
||||
|
||||
fn is_auto_trait_item(&self) -> bool {
|
||||
// auto trait
|
||||
(self.token.is_keyword(kw::Auto)
|
||||
&& self.look_ahead(1, |t| t.is_keyword(kw::Trait)))
|
||||
(self.token.is_keyword(kw::Auto) &&
|
||||
self.is_keyword_ahead(1, &[kw::Trait]))
|
||||
|| // unsafe auto trait
|
||||
(self.token.is_keyword(kw::Unsafe) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Auto)) &&
|
||||
self.look_ahead(2, |t| t.is_keyword(kw::Trait)))
|
||||
self.is_keyword_ahead(1, &[kw::Auto]) &&
|
||||
self.is_keyword_ahead(2, &[kw::Trait]))
|
||||
}
|
||||
|
||||
fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
|
||||
@ -5486,7 +5491,7 @@ impl<'a> Parser<'a> {
|
||||
(if isolated_self(self, 1) {
|
||||
self.bump();
|
||||
SelfKind::Region(None, Mutability::Immutable)
|
||||
} else if self.look_ahead(1, |t| t.is_keyword(kw::Mut)) &&
|
||||
} else if self.is_keyword_ahead(1, &[kw::Mut]) &&
|
||||
isolated_self(self, 2) {
|
||||
self.bump();
|
||||
self.bump();
|
||||
@ -5497,7 +5502,7 @@ impl<'a> Parser<'a> {
|
||||
let lt = self.expect_lifetime();
|
||||
SelfKind::Region(Some(lt), Mutability::Immutable)
|
||||
} else if self.look_ahead(1, |t| t.is_lifetime()) &&
|
||||
self.look_ahead(2, |t| t.is_keyword(kw::Mut)) &&
|
||||
self.is_keyword_ahead(2, &[kw::Mut]) &&
|
||||
isolated_self(self, 3) {
|
||||
self.bump();
|
||||
let lt = self.expect_lifetime();
|
||||
@ -5676,8 +5681,7 @@ impl<'a> Parser<'a> {
|
||||
/// (returns `false` for things like `const fn`, etc.).
|
||||
fn is_const_item(&self) -> bool {
|
||||
self.token.is_keyword(kw::Const) &&
|
||||
!self.look_ahead(1, |t| t.is_keyword(kw::Fn)) &&
|
||||
!self.look_ahead(1, |t| t.is_keyword(kw::Unsafe))
|
||||
!self.is_keyword_ahead(1, &[kw::Fn, kw::Unsafe])
|
||||
}
|
||||
|
||||
/// Parses all the "front matter" for a `fn` declaration, up to
|
||||
@ -5955,7 +5959,7 @@ impl<'a> Parser<'a> {
|
||||
self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) &&
|
||||
self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma ||
|
||||
t == &token::Colon || t == &token::Eq) ||
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Const)))
|
||||
self.is_keyword_ahead(1, &[kw::Const]))
|
||||
}
|
||||
|
||||
fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> {
|
||||
@ -6316,7 +6320,7 @@ impl<'a> Parser<'a> {
|
||||
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
|
||||
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
|
||||
// by the following tokens.
|
||||
if self.look_ahead(1, |t| t.is_keyword(kw::Crate)) &&
|
||||
if self.is_keyword_ahead(1, &[kw::Crate]) &&
|
||||
self.look_ahead(2, |t| t != &token::ModSep) // account for `pub(crate::foo)`
|
||||
{
|
||||
// `pub(crate)`
|
||||
@ -6328,7 +6332,7 @@ impl<'a> Parser<'a> {
|
||||
VisibilityKind::Crate(CrateSugar::PubCrate),
|
||||
);
|
||||
return Ok(vis)
|
||||
} else if self.look_ahead(1, |t| t.is_keyword(kw::In)) {
|
||||
} else if self.is_keyword_ahead(1, &[kw::In]) {
|
||||
// `pub(in path)`
|
||||
self.bump(); // `(`
|
||||
self.bump(); // `in`
|
||||
@ -6340,8 +6344,7 @@ impl<'a> Parser<'a> {
|
||||
});
|
||||
return Ok(vis)
|
||||
} else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Super) ||
|
||||
t.is_keyword(kw::SelfLower))
|
||||
self.is_keyword_ahead(1, &[kw::Super, kw::SelfLower])
|
||||
{
|
||||
// `pub(self)` or `pub(super)`
|
||||
self.bump(); // `(`
|
||||
@ -6380,13 +6383,16 @@ impl<'a> Parser<'a> {
|
||||
fn parse_defaultness(&mut self) -> Defaultness {
|
||||
// `pub` is included for better error messages
|
||||
if self.check_keyword(kw::Default) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Impl) ||
|
||||
t.is_keyword(kw::Const) ||
|
||||
t.is_keyword(kw::Fn) ||
|
||||
t.is_keyword(kw::Unsafe) ||
|
||||
t.is_keyword(kw::Extern) ||
|
||||
t.is_keyword(kw::Type) ||
|
||||
t.is_keyword(kw::Pub)) {
|
||||
self.is_keyword_ahead(1, &[
|
||||
kw::Impl,
|
||||
kw::Const,
|
||||
kw::Fn,
|
||||
kw::Unsafe,
|
||||
kw::Extern,
|
||||
kw::Type,
|
||||
kw::Pub,
|
||||
])
|
||||
{
|
||||
self.bump(); // `default`
|
||||
Defaultness::Default
|
||||
} else {
|
||||
@ -6880,7 +6886,7 @@ impl<'a> Parser<'a> {
|
||||
// Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
|
||||
if self.check_keyword(kw::Type) ||
|
||||
self.check_keyword(kw::Existential) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Type)) {
|
||||
self.is_keyword_ahead(1, &[kw::Type]) {
|
||||
let existential = self.eat_keyword(kw::Existential);
|
||||
assert!(self.eat_keyword(kw::Type));
|
||||
Some(self.parse_existential_or_alias(existential))
|
||||
@ -7157,7 +7163,7 @@ impl<'a> Parser<'a> {
|
||||
let const_span = self.prev_span;
|
||||
if self.check_keyword(kw::Fn)
|
||||
|| (self.check_keyword(kw::Unsafe)
|
||||
&& self.look_ahead(1, |t| t.is_keyword(kw::Fn))) {
|
||||
&& self.is_keyword_ahead(1, &[kw::Fn])) {
|
||||
// CONST FUNCTION ITEM
|
||||
let unsafety = self.parse_unsafety();
|
||||
self.bump();
|
||||
@ -7202,10 +7208,10 @@ impl<'a> Parser<'a> {
|
||||
// `unsafe async fn` or `async fn`
|
||||
if (
|
||||
self.check_keyword(kw::Unsafe) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Async))
|
||||
self.is_keyword_ahead(1, &[kw::Async])
|
||||
) || (
|
||||
self.check_keyword(kw::Async) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Fn))
|
||||
self.is_keyword_ahead(1, &[kw::Fn])
|
||||
)
|
||||
{
|
||||
// ASYNC FUNCTION ITEM
|
||||
@ -7239,8 +7245,7 @@ impl<'a> Parser<'a> {
|
||||
return Ok(Some(item));
|
||||
}
|
||||
if self.check_keyword(kw::Unsafe) &&
|
||||
(self.look_ahead(1, |t| t.is_keyword(kw::Trait)) ||
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Auto)))
|
||||
self.is_keyword_ahead(1, &[kw::Trait, kw::Auto])
|
||||
{
|
||||
// UNSAFE TRAIT ITEM
|
||||
self.bump(); // `unsafe`
|
||||
@ -7263,11 +7268,9 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
if self.check_keyword(kw::Impl) ||
|
||||
self.check_keyword(kw::Unsafe) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Impl)) ||
|
||||
self.is_keyword_ahead(1, &[kw::Impl]) ||
|
||||
self.check_keyword(kw::Default) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Impl)) ||
|
||||
self.check_keyword(kw::Default) &&
|
||||
self.look_ahead(1, |t| t.is_keyword(kw::Unsafe)) {
|
||||
self.is_keyword_ahead(1, &[kw::Impl, kw::Unsafe]) {
|
||||
// IMPL ITEM
|
||||
let defaultness = self.parse_defaultness();
|
||||
let unsafety = self.parse_unsafety();
|
||||
@ -7360,7 +7363,7 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
if self.check_keyword(kw::Trait)
|
||||
|| (self.check_keyword(kw::Auto)
|
||||
&& self.look_ahead(1, |t| t.is_keyword(kw::Trait)))
|
||||
&& self.is_keyword_ahead(1, &[kw::Trait]))
|
||||
{
|
||||
let is_auto = if self.eat_keyword(kw::Trait) {
|
||||
IsAuto::No
|
||||
|
@ -1,20 +1,23 @@
|
||||
// Test that `-Zpgo-gen` creates expected instrumentation artifacts in LLVM IR.
|
||||
// Compiling with `-Cpanic=abort` because PGO+unwinding isn't supported on all platforms.
|
||||
|
||||
// needs-profiler-support
|
||||
// compile-flags: -Z pgo-gen -Ccodegen-units=1
|
||||
// compile-flags: -Z pgo-gen -Ccodegen-units=1 -Cpanic=abort
|
||||
|
||||
// CHECK: @__llvm_profile_raw_version =
|
||||
// CHECK: @__profc_{{.*}}pgo_instrumentation{{.*}}some_function{{.*}} = private global
|
||||
// CHECK: @__profd_{{.*}}pgo_instrumentation{{.*}}some_function{{.*}} = private global
|
||||
// CHECK: @__profc_{{.*}}pgo_instrumentation{{.*}}main{{.*}} = private global
|
||||
// CHECK: @__profd_{{.*}}pgo_instrumentation{{.*}}main{{.*}} = private global
|
||||
// CHECK: @__profc_{{.*}}pgo_instrumentation{{.*}}some_other_function{{.*}} = private global
|
||||
// CHECK: @__profd_{{.*}}pgo_instrumentation{{.*}}some_other_function{{.*}} = private global
|
||||
// CHECK: @__llvm_profile_filename = {{.*}}"default_%m.profraw\00"{{.*}}
|
||||
|
||||
#![crate_type="lib"]
|
||||
|
||||
#[inline(never)]
|
||||
fn some_function() {
|
||||
|
||||
}
|
||||
|
||||
fn main() {
|
||||
pub fn some_other_function() {
|
||||
some_function();
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ use std::ops::{Index};
|
||||
trait Hierarchy {
|
||||
type Value;
|
||||
type ChildKey;
|
||||
type Children = Index<Self::ChildKey, Output=Hierarchy>;
|
||||
type Children = dyn Index<Self::ChildKey, Output=dyn Hierarchy>;
|
||||
//~^ ERROR: the value of the associated types `Value` (from the trait `Hierarchy`), `ChildKey`
|
||||
|
||||
fn data(&self) -> Option<(Self::Value, Self::Children)>;
|
||||
|
@ -2,7 +2,17 @@
|
||||
|
||||
-include ../tools.mk
|
||||
|
||||
COMPILE_FLAGS=-Copt-level=3 -Clto=fat -Z pgo-gen="$(TMPDIR)"
|
||||
|
||||
# LLVM doesn't yet support instrumenting binaries that use unwinding on MSVC:
|
||||
# https://github.com/rust-lang/rust/issues/61002
|
||||
#
|
||||
# Things work fine with -Cpanic=abort though.
|
||||
ifdef IS_MSVC
|
||||
COMPILE_FLAGS+= -Cpanic=abort
|
||||
endif
|
||||
|
||||
all:
|
||||
$(RUSTC) -Copt-level=3 -Clto=fat -Z pgo-gen="$(TMPDIR)" test.rs
|
||||
$(RUSTC) $(COMPILE_FLAGS) test.rs
|
||||
$(call RUN,test) || exit 1
|
||||
[ -e "$(TMPDIR)"/default_*.profraw ] || (echo "No .profraw file"; exit 1)
|
||||
|
@ -2,8 +2,18 @@
|
||||
|
||||
-include ../tools.mk
|
||||
|
||||
COMPILE_FLAGS=-O -Ccodegen-units=1 -Z pgo-gen="$(TMPDIR)"
|
||||
|
||||
# LLVM doesn't yet support instrumenting binaries that use unwinding on MSVC:
|
||||
# https://github.com/rust-lang/rust/issues/61002
|
||||
#
|
||||
# Things work fine with -Cpanic=abort though.
|
||||
ifdef IS_MSVC
|
||||
COMPILE_FLAGS+= -Cpanic=abort
|
||||
endif
|
||||
|
||||
all:
|
||||
$(RUSTC) -O -Ccodegen-units=1 -Z pgo-gen="$(TMPDIR)" --emit=llvm-ir test.rs
|
||||
$(RUSTC) $(COMPILE_FLAGS) --emit=llvm-ir test.rs
|
||||
# We expect symbols starting with "__llvm_profile_".
|
||||
$(CGREP) "__llvm_profile_" < $(TMPDIR)/test.ll
|
||||
# We do NOT expect the "__imp_" version of these symbols.
|
||||
|
@ -2,7 +2,17 @@
|
||||
|
||||
-include ../tools.mk
|
||||
|
||||
COMPILE_FLAGS=-g -Z pgo-gen="$(TMPDIR)"
|
||||
|
||||
# LLVM doesn't yet support instrumenting binaries that use unwinding on MSVC:
|
||||
# https://github.com/rust-lang/rust/issues/61002
|
||||
#
|
||||
# Things work fine with -Cpanic=abort though.
|
||||
ifdef IS_MSVC
|
||||
COMPILE_FLAGS+= -Cpanic=abort
|
||||
endif
|
||||
|
||||
all:
|
||||
$(RUSTC) -g -Z pgo-gen="$(TMPDIR)" test.rs
|
||||
$(RUSTC) $(COMPILE_FLAGS) test.rs
|
||||
$(call RUN,test) || exit 1
|
||||
[ -e "$(TMPDIR)"/default_*.profraw ] || (echo "No .profraw file"; exit 1)
|
||||
|
@ -16,7 +16,7 @@
|
||||
COMMON_FLAGS=-Copt-level=s -Ccodegen-units=1
|
||||
|
||||
# LLVM doesn't support instrumenting binaries that use SEH:
|
||||
# https://bugs.llvm.org/show_bug.cgi?id=41279
|
||||
# https://github.com/rust-lang/rust/issues/61002
|
||||
#
|
||||
# Things work fine with -Cpanic=abort though.
|
||||
ifdef IS_MSVC
|
||||
|
@ -62,7 +62,7 @@ fn make_x() -> P<Expr> {
|
||||
/// Iterate over exprs of depth up to `depth`. The goal is to explore all "interesting"
|
||||
/// combinations of expression nesting. For example, we explore combinations using `if`, but not
|
||||
/// `while` or `match`, since those should print and parse in much the same way as `if`.
|
||||
fn iter_exprs(depth: usize, f: &mut FnMut(P<Expr>)) {
|
||||
fn iter_exprs(depth: usize, f: &mut dyn FnMut(P<Expr>)) {
|
||||
if depth == 0 {
|
||||
f(make_x());
|
||||
return;
|
||||
|
@ -22,11 +22,11 @@ impl<A:Clone> Invokable<A> for Invoker<A> {
|
||||
}
|
||||
}
|
||||
|
||||
fn f<A:Clone + 'static>(a: A, b: u16) -> Box<Invokable<A>+'static> {
|
||||
fn f<A:Clone + 'static>(a: A, b: u16) -> Box<dyn Invokable<A>+'static> {
|
||||
box Invoker {
|
||||
a: a,
|
||||
b: b,
|
||||
} as (Box<Invokable<A>+'static>)
|
||||
} as (Box<dyn Invokable<A>+'static>)
|
||||
}
|
||||
|
||||
pub fn main() {
|
||||
|
@ -1,7 +1,7 @@
|
||||
// run-pass
|
||||
#![feature(box_syntax)]
|
||||
|
||||
fn pairwise_sub(mut t: Box<DoubleEndedIterator<Item=isize>>) -> isize {
|
||||
fn pairwise_sub(mut t: Box<dyn DoubleEndedIterator<Item=isize>>) -> isize {
|
||||
let mut result = 0;
|
||||
loop {
|
||||
let front = t.next();
|
||||
|
@ -15,7 +15,7 @@ impl Foo for char {
|
||||
fn boo(&self) -> Bar { Bar }
|
||||
}
|
||||
|
||||
fn baz(x: &Foo<A=Bar>) -> Bar {
|
||||
fn baz(x: &dyn Foo<A=Bar>) -> Bar {
|
||||
x.boo()
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,7 @@ pub trait Subscriber {
|
||||
|
||||
pub trait Publisher<'a> {
|
||||
type Output;
|
||||
fn subscribe(&mut self, _: Box<Subscriber<Input=Self::Output> + 'a>);
|
||||
fn subscribe(&mut self, _: Box<dyn Subscriber<Input=Self::Output> + 'a>);
|
||||
}
|
||||
|
||||
pub trait Processor<'a> : Subscriber + Publisher<'a> { }
|
||||
@ -27,12 +27,12 @@ pub trait Processor<'a> : Subscriber + Publisher<'a> { }
|
||||
impl<'a, P> Processor<'a> for P where P : Subscriber + Publisher<'a> { }
|
||||
|
||||
struct MyStruct<'a> {
|
||||
sub: Box<Subscriber<Input=u64> + 'a>
|
||||
sub: Box<dyn Subscriber<Input=u64> + 'a>
|
||||
}
|
||||
|
||||
impl<'a> Publisher<'a> for MyStruct<'a> {
|
||||
type Output = u64;
|
||||
fn subscribe(&mut self, t : Box<Subscriber<Input=u64> + 'a>) {
|
||||
fn subscribe(&mut self, t : Box<dyn Subscriber<Input=u64> + 'a>) {
|
||||
self.sub = t;
|
||||
}
|
||||
}
|
||||
|
@ -11,6 +11,6 @@ impl double for usize {
|
||||
}
|
||||
|
||||
pub fn main() {
|
||||
let x: Box<_> = box (box 3usize as Box<double>);
|
||||
let x: Box<_> = box (box 3usize as Box<dyn double>);
|
||||
assert_eq!(x.double(), 6);
|
||||
}
|
||||
|
@ -12,7 +12,7 @@ use std::marker;
|
||||
fn main() {
|
||||
trait T { fn foo(&self) {} }
|
||||
|
||||
fn f<'a, V: T>(v: &'a V) -> &'a T {
|
||||
v as &'a T
|
||||
fn f<'a, V: T>(v: &'a V) -> &'a dyn T {
|
||||
v as &'a dyn T
|
||||
}
|
||||
}
|
||||
|
@ -15,9 +15,9 @@ impl<T> Foo<T> for () {}
|
||||
impl Foo<u32> for u32 { fn foo(&self, _: u32) -> u32 { self+43 } }
|
||||
impl Bar for () {}
|
||||
|
||||
unsafe fn round_trip_and_call<'a>(t: *const (Foo<u32>+'a)) -> u32 {
|
||||
let foo_e : *const Foo<u16> = t as *const _;
|
||||
let r_1 = foo_e as *mut Foo<u32>;
|
||||
unsafe fn round_trip_and_call<'a>(t: *const (dyn Foo<u32>+'a)) -> u32 {
|
||||
let foo_e : *const dyn Foo<u16> = t as *const _;
|
||||
let r_1 = foo_e as *mut dyn Foo<u32>;
|
||||
|
||||
(&*r_1).foo(0)
|
||||
}
|
||||
@ -38,8 +38,8 @@ fn tuple_i32_to_u32<T:?Sized>(u: *const (i32, T)) -> *const (u32, T) {
|
||||
|
||||
fn main() {
|
||||
let x = 4u32;
|
||||
let y : &Foo<u32> = &x;
|
||||
let fl = unsafe { round_trip_and_call(y as *const Foo<u32>) };
|
||||
let y : &dyn Foo<u32> = &x;
|
||||
let fl = unsafe { round_trip_and_call(y as *const dyn Foo<u32>) };
|
||||
assert_eq!(fl, (43+4));
|
||||
|
||||
let s = FooS([0,1,2]);
|
||||
|
@ -25,8 +25,8 @@ fn main()
|
||||
// coercion-cast
|
||||
let mut it = vec![137].into_iter();
|
||||
let itr: &mut vec::IntoIter<u32> = &mut it;
|
||||
assert_eq!((itr as &mut Iterator<Item=u32>).next(), Some(137));
|
||||
assert_eq!((itr as &mut Iterator<Item=u32>).next(), None);
|
||||
assert_eq!((itr as &mut dyn Iterator<Item=u32>).next(), Some(137));
|
||||
assert_eq!((itr as &mut dyn Iterator<Item=u32>).next(), None);
|
||||
|
||||
assert_eq!(Some(4u32) as Option<u32>, Some(4u32));
|
||||
assert_eq!((1u32,2u32) as (u32,u32), (1,2));
|
||||
|
@ -24,11 +24,11 @@ impl<A:Clone> Invokable<A> for Invoker<A> {
|
||||
}
|
||||
}
|
||||
|
||||
fn f<A:Clone + 'static>(a: A, b: u16) -> Box<Invokable<A>+'static> {
|
||||
fn f<A:Clone + 'static>(a: A, b: u16) -> Box<dyn Invokable<A>+'static> {
|
||||
box Invoker {
|
||||
a: a,
|
||||
b: b,
|
||||
} as (Box<Invokable<A>+'static>)
|
||||
} as (Box<dyn Invokable<A>+'static>)
|
||||
}
|
||||
|
||||
pub fn main() {
|
||||
|
@ -1,4 +1,4 @@
|
||||
fn main() {
|
||||
assert_eq!((ToString::to_string as fn(&(ToString+'static)) -> String)(&"foo"),
|
||||
assert_eq!((ToString::to_string as fn(&(dyn ToString+'static)) -> String)(&"foo"),
|
||||
String::from("foo"));
|
||||
}
|
||||
|
@ -12,16 +12,16 @@ pub fn main() {
|
||||
let _: Box<[isize]> = Box::new({ [1, 2, 3] });
|
||||
let _: Box<[isize]> = Box::new(if true { [1, 2, 3] } else { [1, 3, 4] });
|
||||
let _: Box<[isize]> = Box::new(match true { true => [1, 2, 3], false => [1, 3, 4] });
|
||||
let _: Box<Fn(isize) -> _> = Box::new({ |x| (x as u8) });
|
||||
let _: Box<Debug> = Box::new(if true { false } else { true });
|
||||
let _: Box<Debug> = Box::new(match true { true => 'a', false => 'b' });
|
||||
let _: Box<dyn Fn(isize) -> _> = Box::new({ |x| (x as u8) });
|
||||
let _: Box<dyn Debug> = Box::new(if true { false } else { true });
|
||||
let _: Box<dyn Debug> = Box::new(match true { true => 'a', false => 'b' });
|
||||
|
||||
let _: &[isize] = &{ [1, 2, 3] };
|
||||
let _: &[isize] = &if true { [1, 2, 3] } else { [1, 3, 4] };
|
||||
let _: &[isize] = &match true { true => [1, 2, 3], false => [1, 3, 4] };
|
||||
let _: &Fn(isize) -> _ = &{ |x| (x as u8) };
|
||||
let _: &Debug = &if true { false } else { true };
|
||||
let _: &Debug = &match true { true => 'a', false => 'b' };
|
||||
let _: &dyn Fn(isize) -> _ = &{ |x| (x as u8) };
|
||||
let _: &dyn Debug = &if true { false } else { true };
|
||||
let _: &dyn Debug = &match true { true => 'a', false => 'b' };
|
||||
|
||||
let _: &str = &{ String::new() };
|
||||
let _: &str = &if true { String::from("...") } else { 5.to_string() };
|
||||
@ -31,12 +31,12 @@ pub fn main() {
|
||||
};
|
||||
|
||||
let _: Box<[isize]> = Box::new([1, 2, 3]);
|
||||
let _: Box<Fn(isize) -> _> = Box::new(|x| (x as u8));
|
||||
let _: Box<dyn Fn(isize) -> _> = Box::new(|x| (x as u8));
|
||||
|
||||
let _: Rc<RefCell<[isize]>> = Rc::new(RefCell::new([1, 2, 3]));
|
||||
let _: Rc<RefCell<FnMut(isize) -> _>> = Rc::new(RefCell::new(|x| (x as u8)));
|
||||
let _: Rc<RefCell<dyn FnMut(isize) -> _>> = Rc::new(RefCell::new(|x| (x as u8)));
|
||||
|
||||
let _: Vec<Box<Fn(isize) -> _>> = vec![
|
||||
let _: Vec<Box<dyn Fn(isize) -> _>> = vec![
|
||||
Box::new(|x| (x as u8)),
|
||||
Box::new(|x| (x as i16 as u8)),
|
||||
];
|
||||
|
@ -8,7 +8,7 @@ struct Bar;
|
||||
impl Trait for Bar {}
|
||||
|
||||
fn main() {
|
||||
let x: &[&Trait] = &[{ &Bar }];
|
||||
let x: &[&dyn Trait] = &[{ &Bar }];
|
||||
}
|
||||
|
||||
// Issue #25748 - the cast causes an &Encoding -> &Encoding coercion:
|
||||
@ -16,9 +16,9 @@ pub struct UTF8Encoding;
|
||||
pub const UTF_8: &'static UTF8Encoding = &UTF8Encoding;
|
||||
pub trait Encoding {}
|
||||
impl Encoding for UTF8Encoding {}
|
||||
pub fn f() -> &'static Encoding { UTF_8 as &'static Encoding }
|
||||
pub fn f() -> &'static dyn Encoding { UTF_8 as &'static dyn Encoding }
|
||||
|
||||
// Root of the problem: &Trait -> &Trait coercions:
|
||||
const FOO: &'static Trait = &Bar;
|
||||
const BAR: &'static Trait = FOO;
|
||||
const FOO: &'static dyn Trait = &Bar;
|
||||
const BAR: &'static dyn Trait = FOO;
|
||||
fn foo() { let _x = BAR; }
|
||||
|
@ -17,7 +17,7 @@ enum Enum {
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Pointers(*const Send, *mut Sync);
|
||||
struct Pointers(*const dyn Send, *mut dyn Sync);
|
||||
|
||||
macro_rules! t {
|
||||
($x:expr, $expected:expr) => {
|
||||
|
@ -30,7 +30,7 @@ impl Drop for Cat {
|
||||
pub fn main() {
|
||||
{
|
||||
let x = box Cat {name: 22};
|
||||
let nyan: Box<Dummy> = x as Box<Dummy>;
|
||||
let nyan: Box<dyn Dummy> = x as Box<dyn Dummy>;
|
||||
}
|
||||
unsafe {
|
||||
assert_eq!(value, 22);
|
||||
|
@ -36,7 +36,7 @@ fn main() {
|
||||
|
||||
// Trait objects.
|
||||
let a: Bar<i32> = Bar { x: &42 };
|
||||
let b: Bar<Baz> = a;
|
||||
let b: Bar<dyn Baz> = a;
|
||||
unsafe {
|
||||
assert_eq!((*b.x).get(), 42);
|
||||
}
|
||||
|
@ -26,17 +26,17 @@ fn main() {
|
||||
assert_eq!(b[2], 3);
|
||||
|
||||
let a: Rc<i32> = Rc::new(42);
|
||||
let b: Rc<Baz> = a.clone();
|
||||
let b: Rc<dyn Baz> = a.clone();
|
||||
assert_eq!(b.get(), 42);
|
||||
|
||||
let c: Weak<i32> = Rc::downgrade(&a);
|
||||
let d: Weak<Baz> = c.clone();
|
||||
let d: Weak<dyn Baz> = c.clone();
|
||||
|
||||
let _c = b.clone();
|
||||
|
||||
let a: Rc<RefCell<i32>> = Rc::new(RefCell::new(42));
|
||||
let b: Rc<RefCell<Baz>> = a.clone();
|
||||
let b: Rc<RefCell<dyn Baz>> = a.clone();
|
||||
assert_eq!(b.borrow().get(), 42);
|
||||
// FIXME
|
||||
let c: Weak<RefCell<Baz>> = Rc::downgrade(&a) as Weak<_>;
|
||||
let c: Weak<RefCell<dyn Baz>> = Rc::downgrade(&a) as Weak<_>;
|
||||
}
|
||||
|
@ -9,20 +9,20 @@ trait T { fn dummy(&self) { } }
|
||||
impl T for S {}
|
||||
|
||||
pub fn main() {
|
||||
let x: &T = &S;
|
||||
let x: &dyn T = &S;
|
||||
// Test we can convert from &-ptr to *-ptr of trait objects
|
||||
let x: *const T = &S;
|
||||
let x: *const dyn T = &S;
|
||||
|
||||
// Test we can convert from &-ptr to *-ptr of struct pointer (not DST)
|
||||
let x: *const S = &S;
|
||||
|
||||
// As above, but mut
|
||||
let x: &mut T = &mut S;
|
||||
let x: *mut T = &mut S;
|
||||
let x: &mut dyn T = &mut S;
|
||||
let x: *mut dyn T = &mut S;
|
||||
|
||||
let x: *mut S = &mut S;
|
||||
|
||||
// Test we can change the mutability from mut to const.
|
||||
let x: &T = &mut S;
|
||||
let x: *const T = &mut S;
|
||||
let x: &dyn T = &mut S;
|
||||
let x: *const dyn T = &mut S;
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user