mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 00:03:43 +00:00
Merge commit 'b385428e3ddf330805241e7758e773f933357c4b' into subtree-update_cg_gcc_2024-03-05
This commit is contained in:
commit
0d359efbe6
@ -19,8 +19,8 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
libgccjit_version:
|
||||
- { gcc: "libgccjit.so", artifacts_branch: "master" }
|
||||
- { gcc: "libgccjit_without_int128.so", artifacts_branch: "master-without-128bit-integers" }
|
||||
- { gcc: "gcc-13.deb" }
|
||||
- { gcc: "gcc-13-without-int128.deb" }
|
||||
commands: [
|
||||
"--mini-tests",
|
||||
"--std-tests",
|
||||
@ -32,60 +32,39 @@ jobs:
|
||||
"--extended-regex-tests",
|
||||
"--test-successful-rustc --nb-parts 2 --current-part 0",
|
||||
"--test-successful-rustc --nb-parts 2 --current-part 1",
|
||||
"--projects",
|
||||
]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# `rustup show` installs from rust-toolchain.toml
|
||||
- name: Setup rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Setup rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Install packages
|
||||
# `llvm-14-tools` is needed to install the `FileCheck` binary which is used for asm tests.
|
||||
run: sudo apt-get install ninja-build ripgrep llvm-14-tools
|
||||
|
||||
- name: Install rustfmt
|
||||
run: rustup component add rustfmt
|
||||
|
||||
- name: Download artifact
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
workflow: main.yml
|
||||
name: gcc-13
|
||||
path: gcc-13
|
||||
repo: antoyo/gcc
|
||||
branch: ${{ matrix.libgccjit_version.artifacts_branch }}
|
||||
event: push
|
||||
search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts.
|
||||
run: curl -LO https://github.com/antoyo/gcc/releases/latest/download/${{ matrix.libgccjit_version.gcc }}
|
||||
|
||||
- name: Setup path to libgccjit
|
||||
run: |
|
||||
sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb
|
||||
echo /usr/lib/ > gcc_path
|
||||
sudo dpkg --force-overwrite -i ${{ matrix.libgccjit_version.gcc }}
|
||||
echo 'gcc-path = "/usr/lib/"' > config.toml
|
||||
|
||||
- name: Set env
|
||||
run: |
|
||||
echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
|
||||
echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV
|
||||
|
||||
- name: Cache cargo installed crates
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/bin
|
||||
key: cargo-installed-crates2-ubuntu-latest
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }}
|
||||
echo "LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
|
||||
|
||||
#- name: Cache rust repository
|
||||
## We only clone the rust repository for rustc tests
|
||||
@ -99,11 +78,9 @@ jobs:
|
||||
- name: Build
|
||||
run: |
|
||||
./y.sh prepare --only-libcore
|
||||
# TODO: remove --features master when it is back to the default.
|
||||
./y.sh build --features master
|
||||
# TODO: remove --features master when it is back to the default.
|
||||
cargo test --features master
|
||||
./clean_all.sh
|
||||
./y.sh build
|
||||
cargo test
|
||||
./y.sh clean all
|
||||
|
||||
- name: Prepare dependencies
|
||||
run: |
|
||||
@ -111,23 +88,27 @@ jobs:
|
||||
git config --global user.name "User"
|
||||
./y.sh prepare
|
||||
|
||||
# Compile is a separate step, as the actions-rs/cargo action supports error annotations
|
||||
- name: Compile
|
||||
uses: actions-rs/cargo@v1.0.3
|
||||
with:
|
||||
command: build
|
||||
args: --release
|
||||
|
||||
- name: Add more failing tests because the sysroot is not compiled with LTO
|
||||
run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt
|
||||
run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
# TODO: remove --features master when it is back to the default.
|
||||
./test.sh --features master --release --clean --build-sysroot ${{ matrix.commands }}
|
||||
./y.sh test --release --clean --build-sysroot ${{ matrix.commands }}
|
||||
|
||||
- name: Check formatting
|
||||
run: cargo fmt -- --check
|
||||
|
||||
duplicates:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- run: python tools/check_intrinsics_duplicates.py
|
||||
|
||||
build_system:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Test build system
|
||||
run: |
|
||||
cd build_system
|
||||
cargo test
|
||||
|
@ -21,14 +21,11 @@ jobs:
|
||||
libgccjit_version:
|
||||
- gcc: "libgccjit.so"
|
||||
artifacts_branch: "master"
|
||||
# TODO: switch back to --no-default-features in the case of libgccjit 12 when the default is to enable
|
||||
# master again.
|
||||
extra: "--features master"
|
||||
- gcc: "libgccjit_without_int128.so"
|
||||
artifacts_branch: "master-without-128bit-integers"
|
||||
extra: "--features master"
|
||||
- gcc: "libgccjit12.so"
|
||||
artifacts_branch: "gcc12"
|
||||
extra: "--no-default-features"
|
||||
# FIXME(antoyo): we need to set GCC_EXEC_PREFIX so that the linker can find the linker plugin.
|
||||
# Not sure why it's not found otherwise.
|
||||
env_extra: "TEST_FLAGS='-Cpanic=abort -Zpanic-abort-tests' GCC_EXEC_PREFIX=/usr/lib/gcc/"
|
||||
@ -36,6 +33,13 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# `rustup show` installs from rust-toolchain.toml
|
||||
- name: Setup rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Setup rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Install packages
|
||||
run: sudo apt-get install ninja-build ripgrep
|
||||
|
||||
@ -45,56 +49,27 @@ jobs:
|
||||
|
||||
- name: Setup path to libgccjit
|
||||
if: matrix.libgccjit_version.gcc == 'libgccjit12.so'
|
||||
run: echo /usr/lib/gcc/x86_64-linux-gnu/12 > gcc_path
|
||||
run: |
|
||||
echo 'gcc-path = "/usr/lib/gcc/x86_64-linux-gnu/12"' > config.toml
|
||||
echo "LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/12" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/12" >> $GITHUB_ENV
|
||||
|
||||
- name: Download artifact
|
||||
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
workflow: main.yml
|
||||
name: gcc-13
|
||||
path: gcc-13
|
||||
repo: antoyo/gcc
|
||||
branch: ${{ matrix.libgccjit_version.artifacts_branch }}
|
||||
event: push
|
||||
search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts.
|
||||
run: curl -LO https://github.com/antoyo/gcc/releases/latest/download/gcc-13.deb
|
||||
|
||||
- name: Setup path to libgccjit
|
||||
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
|
||||
run: |
|
||||
sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb
|
||||
echo /usr/lib/ > gcc_path
|
||||
sudo dpkg --force-overwrite -i gcc-13.deb
|
||||
echo 'gcc-path = "/usr/lib"' > config.toml
|
||||
echo "LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
|
||||
|
||||
- name: Set env
|
||||
run: |
|
||||
echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
|
||||
echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV
|
||||
|
||||
- name: Cache cargo installed crates
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/bin
|
||||
key: cargo-installed-crates2-ubuntu-latest
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }}
|
||||
|
||||
#- name: Cache rust repository
|
||||
#uses: actions/cache@v3
|
||||
#id: cache-rust-repository
|
||||
@ -115,18 +90,11 @@ jobs:
|
||||
if: matrix.libgccjit_version.gcc != 'libgccjit12.so'
|
||||
run: ./y.sh prepare
|
||||
|
||||
# Compile is a separate step, as the actions-rs/cargo action supports error annotations
|
||||
- name: Compile
|
||||
uses: actions-rs/cargo@v1.0.3
|
||||
with:
|
||||
command: build
|
||||
args: --release
|
||||
|
||||
- name: Add more failing tests because the sysroot is not compiled with LTO
|
||||
run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt
|
||||
run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt
|
||||
|
||||
- name: Run tests
|
||||
id: tests
|
||||
run: |
|
||||
${{ matrix.libgccjit_version.env_extra }} ./test.sh --release --clean --build-sysroot --test-failing-rustc ${{ matrix.libgccjit_version.extra }} | tee output_log
|
||||
${{ matrix.libgccjit_version.env_extra }} ./y.sh test --release --clean --build-sysroot --test-failing-rustc ${{ matrix.libgccjit_version.extra }} | tee output_log
|
||||
rg --text "test result" output_log >> $GITHUB_STEP_SUMMARY
|
||||
|
@ -28,9 +28,6 @@ jobs:
|
||||
# FIXME: re-enable asm tests when GCC can emit in the right syntax.
|
||||
# "--asm-tests",
|
||||
"--test-libcore",
|
||||
"--extended-rand-tests",
|
||||
"--extended-regex-example-tests",
|
||||
"--extended-regex-tests",
|
||||
"--test-successful-rustc --nb-parts 2 --current-part 0",
|
||||
"--test-successful-rustc --nb-parts 2 --current-part 1",
|
||||
]
|
||||
@ -38,42 +35,25 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# `rustup show` installs from rust-toolchain.toml
|
||||
- name: Setup rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Setup rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Install packages
|
||||
# `llvm-14-tools` is needed to install the `FileCheck` binary which is used for asm tests.
|
||||
run: sudo apt-get install ninja-build ripgrep llvm-14-tools libgccjit-12-dev
|
||||
|
||||
- name: Setup path to libgccjit
|
||||
run: echo /usr/lib/gcc/x86_64-linux-gnu/12 > gcc_path
|
||||
run: echo 'gcc-path = "/usr/lib/gcc/x86_64-linux-gnu/12"' > config.toml
|
||||
|
||||
- name: Set env
|
||||
run: |
|
||||
echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
|
||||
echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV
|
||||
|
||||
- name: Cache cargo installed crates
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/bin
|
||||
key: cargo-installed-crates2-ubuntu-latest
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }}
|
||||
echo "LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/12" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=/usr/lib/gcc/x86_64-linux-gnu/12" >> $GITHUB_ENV
|
||||
|
||||
#- name: Cache rust repository
|
||||
## We only clone the rust repository for rustc tests
|
||||
@ -89,7 +69,7 @@ jobs:
|
||||
./y.sh prepare --only-libcore --libgccjit12-patches
|
||||
./y.sh build --no-default-features --sysroot-panic-abort
|
||||
cargo test --no-default-features
|
||||
./clean_all.sh
|
||||
./y.sh clean all
|
||||
|
||||
- name: Prepare dependencies
|
||||
run: |
|
||||
@ -97,19 +77,12 @@ jobs:
|
||||
git config --global user.name "User"
|
||||
./y.sh prepare --libgccjit12-patches
|
||||
|
||||
# Compile is a separate step, as the actions-rs/cargo action supports error annotations
|
||||
- name: Compile
|
||||
uses: actions-rs/cargo@v1.0.3
|
||||
with:
|
||||
command: build
|
||||
args: --release
|
||||
|
||||
- name: Add more failing tests for GCC 12
|
||||
run: cat failing-ui-tests12.txt >> failing-ui-tests.txt
|
||||
run: cat tests/failing-ui-tests12.txt >> tests/failing-ui-tests.txt
|
||||
|
||||
- name: Add more failing tests because the sysroot is not compiled with LTO
|
||||
run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt
|
||||
run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
./test.sh --release --clean --build-sysroot ${{ matrix.commands }} --no-default-features
|
||||
./y.sh test --release --clean --build-sysroot ${{ matrix.commands }} --no-default-features
|
||||
|
@ -36,21 +36,22 @@ jobs:
|
||||
]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# `rustup show` installs from rust-toolchain.toml
|
||||
- name: Setup rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Setup rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Install packages
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install qemu qemu-user-static
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Download GCC artifact
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
workflow: m68k.yml
|
||||
name: gcc-m68k-13
|
||||
repo: cross-cg-gcc-tools/cross-gcc
|
||||
branch: master
|
||||
event: push
|
||||
- name: Download artifact
|
||||
run: curl -LO https://github.com/cross-cg-gcc-tools/cross-gcc/releases/latest/download/gcc-m68k-13.deb
|
||||
|
||||
- name: Download VM artifact
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
@ -64,37 +65,13 @@ jobs:
|
||||
- name: Setup path to libgccjit
|
||||
run: |
|
||||
sudo dpkg -i gcc-m68k-13.deb
|
||||
echo /usr/lib/ > gcc_path
|
||||
echo 'gcc-path = "/usr/lib/"' > config.toml
|
||||
|
||||
- name: Set env
|
||||
run: |
|
||||
echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
|
||||
echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV
|
||||
|
||||
- name: Cache cargo installed crates
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/bin
|
||||
key: cargo-installed-crates2-ubuntu-latest
|
||||
|
||||
#- name: Cache cargo registry
|
||||
#uses: actions/cache@v3
|
||||
#with:
|
||||
#path: ~/.cargo/registry
|
||||
#key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
#- name: Cache cargo index
|
||||
#uses: actions/cache@v3
|
||||
#with:
|
||||
#path: ~/.cargo/git
|
||||
#key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }}
|
||||
echo "LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
|
||||
|
||||
#- name: Cache rust repository
|
||||
## We only clone the rust repository for rustc tests
|
||||
@ -114,11 +91,9 @@ jobs:
|
||||
- name: Build
|
||||
run: |
|
||||
./y.sh prepare --only-libcore --cross
|
||||
# TODO: remove --features master when it is back to the default.
|
||||
./y.sh build --target-triple m68k-unknown-linux-gnu --features master
|
||||
# TODO: remove --features master when it is back to the default.
|
||||
CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu cargo test --features master
|
||||
./clean_all.sh
|
||||
./y.sh build --target-triple m68k-unknown-linux-gnu
|
||||
CG_GCC_TEST_TARGET=m68k-unknown-linux-gnu cargo test
|
||||
./y.sh clean all
|
||||
|
||||
- name: Prepare dependencies
|
||||
run: |
|
||||
@ -126,17 +101,9 @@ jobs:
|
||||
git config --global user.name "User"
|
||||
./y.sh prepare --cross
|
||||
|
||||
# Compile is a separate step, as the actions-rs/cargo action supports error annotations
|
||||
- name: Compile
|
||||
uses: actions-rs/cargo@v1.0.3
|
||||
with:
|
||||
command: build
|
||||
args: --release
|
||||
|
||||
- name: Add more failing tests because the sysroot is not compiled with LTO
|
||||
run: cat failing-non-lto-tests.txt >> failing-ui-tests.txt
|
||||
run: cat tests/failing-non-lto-tests.txt >> tests/failing-ui-tests.txt
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
# TODO: remove --features master when it is back to the default.
|
||||
./test.sh --release --features master --clean --build-sysroot ${{ matrix.commands }}
|
||||
./y.sh test --release --clean --build-sysroot ${{ matrix.commands }}
|
||||
|
@ -26,63 +26,36 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# `rustup show` installs from rust-toolchain.toml
|
||||
- name: Setup rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Setup rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Install packages
|
||||
run: sudo apt-get install ninja-build ripgrep
|
||||
|
||||
- name: Download artifact
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
workflow: main.yml
|
||||
name: gcc-13
|
||||
path: gcc-13
|
||||
repo: antoyo/gcc
|
||||
branch: "master"
|
||||
event: push
|
||||
search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts.
|
||||
run: curl -LO https://github.com/antoyo/gcc/releases/latest/download/gcc-13.deb
|
||||
|
||||
- name: Setup path to libgccjit
|
||||
run: |
|
||||
sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb
|
||||
echo /usr/lib/ > gcc_path
|
||||
sudo dpkg --force-overwrite -i gcc-13.deb
|
||||
echo 'gcc-path = "/usr/lib/"' > config.toml
|
||||
|
||||
- name: Set env
|
||||
run: |
|
||||
echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
|
||||
echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV
|
||||
|
||||
- name: Cache cargo installed crates
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/bin
|
||||
key: cargo-installed-crates2-ubuntu-latest
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }}
|
||||
echo "LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=/usr/lib" >> $GITHUB_ENV
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
./y.sh prepare --only-libcore
|
||||
# TODO: remove --features master when it is back to the default.
|
||||
EMBED_LTO_BITCODE=1 ./y.sh build --release --release-sysroot --features master
|
||||
# TODO: remove --features master when it is back to the default.
|
||||
cargo test --features master
|
||||
./clean_all.sh
|
||||
EMBED_LTO_BITCODE=1 ./y.sh build --release --release-sysroot
|
||||
cargo test
|
||||
./y.sh clean all
|
||||
|
||||
- name: Prepare dependencies
|
||||
run: |
|
||||
@ -92,17 +65,9 @@ jobs:
|
||||
# FIXME(antoyo): we cannot enable LTO for stdarch tests currently because of some failing LTO tests using proc-macros.
|
||||
echo -n 'lto = "fat"' >> build_sysroot/Cargo.toml
|
||||
|
||||
# Compile is a separate step, as the actions-rs/cargo action supports error annotations
|
||||
- name: Compile
|
||||
uses: actions-rs/cargo@v1.0.3
|
||||
with:
|
||||
command: build
|
||||
args: --release
|
||||
|
||||
- name: Add more failing tests because of undefined symbol errors (FIXME)
|
||||
run: cat failing-lto-tests.txt >> failing-ui-tests.txt
|
||||
run: cat tests/failing-lto-tests.txt >> tests/failing-ui-tests.txt
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
# TODO: remove --features master when it is back to the default.
|
||||
EMBED_LTO_BITCODE=1 ./test.sh --release --clean --release-sysroot --build-sysroot ${{ matrix.commands }} --features master
|
||||
EMBED_LTO_BITCODE=1 ./y.sh test --release --clean --release-sysroot --build-sysroot ${{ matrix.commands }}
|
||||
|
@ -26,6 +26,13 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
# `rustup show` installs from rust-toolchain.toml
|
||||
- name: Setup rust toolchain
|
||||
run: rustup show
|
||||
|
||||
- name: Setup rust cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
|
||||
- name: Install packages
|
||||
run: sudo apt-get install ninja-build ripgrep
|
||||
|
||||
@ -34,73 +41,39 @@ jobs:
|
||||
run: |
|
||||
mkdir intel-sde
|
||||
cd intel-sde
|
||||
dir=sde-external-9.14.0-2022-10-25-lin
|
||||
dir=sde-external-9.33.0-2024-01-07-lin
|
||||
file=$dir.tar.xz
|
||||
wget https://downloadmirror.intel.com/751535/$file
|
||||
wget https://downloadmirror.intel.com/813591/$file
|
||||
tar xvf $file
|
||||
sudo mkdir /usr/share/intel-sde
|
||||
sudo cp -r $dir/* /usr/share/intel-sde
|
||||
sudo ln -s /usr/share/intel-sde/sde /usr/bin/sde
|
||||
sudo ln -s /usr/share/intel-sde/sde64 /usr/bin/sde64
|
||||
|
||||
- name: Download artifact
|
||||
uses: dawidd6/action-download-artifact@v2
|
||||
with:
|
||||
workflow: main.yml
|
||||
name: gcc-13
|
||||
path: gcc-13
|
||||
repo: antoyo/gcc
|
||||
branch: "master"
|
||||
event: push
|
||||
search_artifacts: true # Because, instead, the action only check the last job ran and that won't work since we want multiple artifacts.
|
||||
|
||||
- name: Setup path to libgccjit
|
||||
run: |
|
||||
sudo dpkg --force-overwrite -i gcc-13/gcc-13.deb
|
||||
echo /usr/lib/ > gcc_path
|
||||
|
||||
- name: Set env
|
||||
run: |
|
||||
echo "LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
|
||||
echo "LD_LIBRARY_PATH=$(cat gcc_path)" >> $GITHUB_ENV
|
||||
echo "workspace="$GITHUB_WORKSPACE >> $GITHUB_ENV
|
||||
|
||||
- name: Cache cargo installed crates
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/bin
|
||||
key: cargo-installed-crates2-ubuntu-latest
|
||||
|
||||
- name: Cache cargo registry
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/registry
|
||||
key: ${{ runner.os }}-cargo-registry2-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo index
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ~/.cargo/git
|
||||
key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }}
|
||||
|
||||
- name: Cache cargo target dir
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: target
|
||||
key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('rust-toolchain') }}
|
||||
echo 'download-gccjit = true' > config.toml
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
./y.sh prepare --only-libcore
|
||||
# TODO: remove `--features master` when it is back to the default.
|
||||
./y.sh build --release --release-sysroot --features master
|
||||
# TODO: remove --features master when it is back to the default.
|
||||
cargo test --features master
|
||||
./y.sh build --release --release-sysroot
|
||||
|
||||
- name: Set env (part 2)
|
||||
run: |
|
||||
# Set the `LD_LIBRARY_PATH` and `LIBRARY_PATH` env variables...
|
||||
echo "LD_LIBRARY_PATH="$(./y.sh info | grep -v Using) >> $GITHUB_ENV
|
||||
echo "LIBRARY_PATH="$(./y.sh info | grep -v Using) >> $GITHUB_ENV
|
||||
|
||||
- name: Build (part 2)
|
||||
run: |
|
||||
cargo test
|
||||
|
||||
- name: Clean
|
||||
if: ${{ !matrix.cargo_runner }}
|
||||
run: |
|
||||
./clean_all.sh
|
||||
./y.sh clean all
|
||||
|
||||
- name: Prepare dependencies
|
||||
run: |
|
||||
@ -108,29 +81,20 @@ jobs:
|
||||
git config --global user.name "User"
|
||||
./y.sh prepare
|
||||
|
||||
# Compile is a separate step, as the actions-rs/cargo action supports error annotations
|
||||
- name: Compile
|
||||
uses: actions-rs/cargo@v1.0.3
|
||||
with:
|
||||
command: build
|
||||
# TODO: remove `--features master` when it is back to the default.
|
||||
args: --release --features master
|
||||
|
||||
- name: Run tests
|
||||
if: ${{ !matrix.cargo_runner }}
|
||||
run: |
|
||||
# TODO: remove `--features master` when it is back to the default.
|
||||
./test.sh --release --clean --release-sysroot --build-sysroot --mini-tests --std-tests --test-libcore --features master
|
||||
./y.sh test --release --clean --release-sysroot --build-sysroot --mini-tests --std-tests --test-libcore
|
||||
|
||||
- name: Run stdarch tests
|
||||
if: ${{ !matrix.cargo_runner }}
|
||||
run: |
|
||||
cd build_sysroot/sysroot_src/library/stdarch/
|
||||
CHANNEL=release TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../cargo.sh test
|
||||
CHANNEL=release TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../y.sh cargo test
|
||||
|
||||
- name: Run stdarch tests
|
||||
if: ${{ matrix.cargo_runner }}
|
||||
run: |
|
||||
cd build_sysroot/sysroot_src/library/stdarch/
|
||||
# FIXME: these tests fail when the sysroot is compiled with LTO because of a missing symbol in proc-macro.
|
||||
STDARCH_TEST_EVERYTHING=1 CHANNEL=release CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER="${{ matrix.cargo_runner }}" TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../cargo.sh test -- --skip rtm --skip tbm --skip sse4a
|
||||
STDARCH_TEST_EVERYTHING=1 CHANNEL=release CARGO_TARGET_X86_64_UNKNOWN_LINUX_GNU_RUNNER="${{ matrix.cargo_runner }}" TARGET=x86_64-unknown-linux-gnu CG_RUSTFLAGS="-Ainternal_features" ../../../../y.sh cargo test -- --skip rtm --skip tbm --skip sse4a
|
||||
|
8
compiler/rustc_codegen_gcc/.gitignore
vendored
8
compiler/rustc_codegen_gcc/.gitignore
vendored
@ -10,15 +10,11 @@ perf.data.old
|
||||
/build_sysroot/sysroot_src
|
||||
/build_sysroot/Cargo.lock
|
||||
/build_sysroot/test_target/Cargo.lock
|
||||
/rust
|
||||
/simple-raytracer
|
||||
/regex
|
||||
/rand
|
||||
gimple*
|
||||
*asm
|
||||
res
|
||||
test-backend
|
||||
gcc_path
|
||||
projects
|
||||
benchmarks
|
||||
tools/llvm-project
|
||||
tools/llvmint
|
||||
@ -26,3 +22,5 @@ tools/llvmint-2
|
||||
# The `llvm` folder is generated by the `tools/generate_intrinsics.py` script to update intrinsics.
|
||||
llvm
|
||||
build_system/target
|
||||
config.toml
|
||||
build
|
@ -8,3 +8,4 @@
|
||||
!*gimple*
|
||||
!*asm*
|
||||
!.github
|
||||
!config.toml
|
||||
|
@ -1 +1 @@
|
||||
disable_all_formatting = true
|
||||
use_small_heuristics = "Max"
|
||||
|
@ -23,6 +23,12 @@ version = "2.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635"
|
||||
|
||||
[[package]]
|
||||
name = "boml"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85fdb93f04c73bff54305fa437ffea5449c41edcaadfe882f35836206b166ac5"
|
||||
|
||||
[[package]]
|
||||
name = "cc"
|
||||
version = "1.0.79"
|
||||
@ -64,9 +70,9 @@ checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764"
|
||||
|
||||
[[package]]
|
||||
name = "fm"
|
||||
version = "0.1.4"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68fda3cff2cce84c19e5dfa5179a4b35d2c0f18b893f108002b8a6a54984acca"
|
||||
checksum = "21bcf4db620a804cf7e9d84fbcb5d4ac83a8c43396203b2507d62ea31814dfd4"
|
||||
dependencies = [
|
||||
"regex",
|
||||
]
|
||||
@ -74,7 +80,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gccjit"
|
||||
version = "1.0.0"
|
||||
source = "git+https://github.com/antoyo/gccjit.rs#6e290f25b1d1edab5ae9ace486fd2dc8c08d6421"
|
||||
source = "git+https://github.com/antoyo/gccjit.rs#9f8f67edc006d543b17529a001803ffece48349e"
|
||||
dependencies = [
|
||||
"gccjit_sys",
|
||||
]
|
||||
@ -82,7 +88,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gccjit_sys"
|
||||
version = "0.0.1"
|
||||
source = "git+https://github.com/antoyo/gccjit.rs#6e290f25b1d1edab5ae9ace486fd2dc8c08d6421"
|
||||
source = "git+https://github.com/antoyo/gccjit.rs#9f8f67edc006d543b17529a001803ffece48349e"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
@ -104,9 +110,9 @@ checksum = "fed44880c466736ef9a5c5b5facefb5ed0785676d0c02d612db14e54f0d84286"
|
||||
|
||||
[[package]]
|
||||
name = "lang_tester"
|
||||
version = "0.3.13"
|
||||
version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "96bd995a092cac79868250589869b5a5d656b02a02bd74c8ebdc566dc7203090"
|
||||
checksum = "9af8149dbb3ed7d8e529fcb141fe033b1c26ed54cbffc6762d3a86483c485d23"
|
||||
dependencies = [
|
||||
"fm",
|
||||
"getopts",
|
||||
@ -185,6 +191,7 @@ checksum = "436b050e76ed2903236f032a59761c1eb99e1b0aead2c257922771dab1fc8c78"
|
||||
name = "rustc_codegen_gcc"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"boml",
|
||||
"gccjit",
|
||||
"lang_tester",
|
||||
"object",
|
||||
|
@ -19,6 +19,7 @@ harness = false
|
||||
|
||||
[features]
|
||||
master = ["gccjit/master"]
|
||||
default = ["master"]
|
||||
|
||||
[dependencies]
|
||||
gccjit = { git = "https://github.com/antoyo/gccjit.rs" }
|
||||
@ -35,8 +36,9 @@ smallvec = { version = "1.6.1", features = ["union", "may_dangle"] }
|
||||
tempfile = "3.7.1"
|
||||
|
||||
[dev-dependencies]
|
||||
lang_tester = "0.3.9"
|
||||
lang_tester = "0.8.0"
|
||||
tempfile = "3.1.0"
|
||||
boml = "0.3.1"
|
||||
|
||||
[profile.dev]
|
||||
# By compiling dependencies with optimizations, performing tests gets much faster.
|
||||
@ -55,3 +57,6 @@ debug = false
|
||||
[profile.release.build-override]
|
||||
opt-level = 0
|
||||
debug = false
|
||||
|
||||
[package.metadata.rust-analyzer]
|
||||
rustc_private = true
|
||||
|
@ -17,6 +17,18 @@ A secondary goal is to check if using the gcc backend will provide any run-time
|
||||
**This requires a patched libgccjit in order to work.
|
||||
You need to use my [fork of gcc](https://github.com/antoyo/gcc) which already includes these patches.**
|
||||
|
||||
```bash
|
||||
$ cp config.example.toml config.toml
|
||||
```
|
||||
|
||||
If don't need to test GCC patches you wrote in our GCC fork, then the default configuration should
|
||||
be all you need. You can update the `rustc_codegen_gcc` without worrying about GCC.
|
||||
|
||||
### Building with your own GCC version
|
||||
|
||||
If you wrote a patch for GCC and want to test it without this backend, you will need
|
||||
to do a few more things.
|
||||
|
||||
To build it (most of these instructions come from [here](https://gcc.gnu.org/onlinedocs/jit/internals/index.html), so don't hesitate to take a look there if you encounter an issue):
|
||||
|
||||
```bash
|
||||
@ -49,23 +61,32 @@ $ make check-jit
|
||||
$ make check-jit RUNTESTFLAGS="-v -v -v jit.exp=jit.dg/test-asm.cc"
|
||||
```
|
||||
|
||||
**Put the path to your custom build of libgccjit in the file `gcc_path`.**
|
||||
**Put the path to your custom build of libgccjit in the file `config.toml`.**
|
||||
|
||||
You now need to set the `gcc-path` value in `config.toml` with the result of this command:
|
||||
|
||||
```bash
|
||||
$ dirname $(readlink -f `find . -name libgccjit.so`) > gcc_path
|
||||
$ dirname $(readlink -f `find . -name libgccjit.so`)
|
||||
```
|
||||
|
||||
and to comment the `download-gccjit` setting:
|
||||
|
||||
```toml
|
||||
gcc-path = "[MY PATH]"
|
||||
# download-gccjit = true
|
||||
```
|
||||
|
||||
Then you can run commands like this:
|
||||
|
||||
```bash
|
||||
$ ./y.sh prepare # download and patch sysroot src and install hyperfine for benchmarking
|
||||
$ LIBRARY_PATH=$(cat gcc_path) LD_LIBRARY_PATH=$(cat gcc_path) ./y.sh build --release
|
||||
$ ./y.sh build --release
|
||||
```
|
||||
|
||||
To run the tests:
|
||||
|
||||
```bash
|
||||
$ ./test.sh --release
|
||||
$ ./y.sh test --release
|
||||
```
|
||||
|
||||
## Usage
|
||||
@ -79,10 +100,10 @@ export CG_GCCJIT_DIR=[the full path to rustc_codegen_gcc]
|
||||
### Cargo
|
||||
|
||||
```bash
|
||||
$ CHANNEL="release" $CG_GCCJIT_DIR/cargo.sh run
|
||||
$ CHANNEL="release" $CG_GCCJIT_DIR/y.sh cargo run
|
||||
```
|
||||
|
||||
If you compiled cg_gccjit in debug mode (aka you didn't pass `--release` to `./test.sh`) you should use `CHANNEL="debug"` instead or omit `CHANNEL="release"` completely.
|
||||
If you compiled cg_gccjit in debug mode (aka you didn't pass `--release` to `./y.sh test`) you should use `CHANNEL="debug"` instead or omit `CHANNEL="release"` completely.
|
||||
|
||||
### LTO
|
||||
|
||||
@ -100,7 +121,7 @@ error: failed to copy bitcode to object file: No such file or directory (os erro
|
||||
> You should prefer using the Cargo method.
|
||||
|
||||
```bash
|
||||
$ LIBRARY_PATH=$(cat gcc_path) LD_LIBRARY_PATH=$(cat gcc_path) rustc +$(cat $CG_GCCJIT_DIR/rust-toolchain | grep 'channel' | cut -d '=' -f 2 | sed 's/"//g' | sed 's/ //g') -Cpanic=abort -Zcodegen-backend=$CG_GCCJIT_DIR/target/release/librustc_codegen_gcc.so --sysroot $CG_GCCJIT_DIR/build_sysroot/sysroot my_crate.rs
|
||||
$ LIBRARY_PATH="[gcc-path value]" LD_LIBRARY_PATH="[gcc-path value]" rustc +$(cat $CG_GCCJIT_DIR/rust-toolchain | grep 'channel' | cut -d '=' -f 2 | sed 's/"//g' | sed 's/ //g') -Cpanic=abort -Zcodegen-backend=$CG_GCCJIT_DIR/target/release/librustc_codegen_gcc.so --sysroot $CG_GCCJIT_DIR/build_sysroot/sysroot my_crate.rs
|
||||
```
|
||||
|
||||
## Env vars
|
||||
@ -118,221 +139,19 @@ $ LIBRARY_PATH=$(cat gcc_path) LD_LIBRARY_PATH=$(cat gcc_path) rustc +$(cat $CG_
|
||||
<dd>Dump a C-like representation to /tmp/gccjit_dumps and enable debug info in order to debug this C-like representation.</dd>
|
||||
</dl>
|
||||
|
||||
## Extra documentation
|
||||
|
||||
More specific documentation is available in the [`doc`](./doc) folder:
|
||||
|
||||
* [Common errors](./doc/errors.md)
|
||||
* [Debugging GCC LTO](./doc/debugging-gcc-lto.md)
|
||||
* [Debugging libgccjit](./doc/debugging-libgccjit.md)
|
||||
* [Git subtree sync](./doc/subtree.md)
|
||||
* [List of useful commands](./doc/tips.md)
|
||||
* [Send a patch to GCC](./doc/sending-gcc-patch.md)
|
||||
|
||||
## Licensing
|
||||
|
||||
While this crate is licensed under a dual Apache/MIT license, it links to `libgccjit` which is under the GPLv3+ and thus, the resulting toolchain (rustc + GCC codegen) will need to be released under the GPL license.
|
||||
|
||||
However, programs compiled with `rustc_codegen_gcc` do not need to be released under a GPL license.
|
||||
|
||||
## Debugging
|
||||
|
||||
Sometimes, libgccjit will crash and output an error like this:
|
||||
|
||||
```
|
||||
during RTL pass: expand
|
||||
libgccjit.so: error: in expmed_mode_index, at expmed.h:249
|
||||
0x7f0da2e61a35 expmed_mode_index
|
||||
../../../gcc/gcc/expmed.h:249
|
||||
0x7f0da2e61aa4 expmed_op_cost_ptr
|
||||
../../../gcc/gcc/expmed.h:271
|
||||
0x7f0da2e620dc sdiv_cost_ptr
|
||||
../../../gcc/gcc/expmed.h:540
|
||||
0x7f0da2e62129 sdiv_cost
|
||||
../../../gcc/gcc/expmed.h:558
|
||||
0x7f0da2e73c12 expand_divmod(int, tree_code, machine_mode, rtx_def*, rtx_def*, rtx_def*, int)
|
||||
../../../gcc/gcc/expmed.c:4335
|
||||
0x7f0da2ea1423 expand_expr_real_2(separate_ops*, rtx_def*, machine_mode, expand_modifier)
|
||||
../../../gcc/gcc/expr.c:9240
|
||||
0x7f0da2cd1a1e expand_gimple_stmt_1
|
||||
../../../gcc/gcc/cfgexpand.c:3796
|
||||
0x7f0da2cd1c30 expand_gimple_stmt
|
||||
../../../gcc/gcc/cfgexpand.c:3857
|
||||
0x7f0da2cd90a9 expand_gimple_basic_block
|
||||
../../../gcc/gcc/cfgexpand.c:5898
|
||||
0x7f0da2cdade8 execute
|
||||
../../../gcc/gcc/cfgexpand.c:6582
|
||||
```
|
||||
|
||||
To see the code which causes this error, call the following function:
|
||||
|
||||
```c
|
||||
gcc_jit_context_dump_to_file(ctxt, "/tmp/output.c", 1 /* update_locations */)
|
||||
```
|
||||
|
||||
This will create a C-like file and add the locations into the IR pointing to this C file.
|
||||
Then, rerun the program and it will output the location in the second line:
|
||||
|
||||
```
|
||||
libgccjit.so: /tmp/something.c:61322:0: error: in expmed_mode_index, at expmed.h:249
|
||||
```
|
||||
|
||||
Or add a breakpoint to `add_error` in gdb and print the line number using:
|
||||
|
||||
```
|
||||
p loc->m_line
|
||||
p loc->m_filename->m_buffer
|
||||
```
|
||||
|
||||
To print a debug representation of a tree:
|
||||
|
||||
```c
|
||||
debug_tree(expr);
|
||||
```
|
||||
|
||||
(defined in print-tree.h)
|
||||
|
||||
To print a debug reprensentation of a gimple struct:
|
||||
|
||||
```c
|
||||
debug_gimple_stmt(gimple_struct)
|
||||
```
|
||||
|
||||
To get the `rustc` command to run in `gdb`, add the `--verbose` flag to `cargo build`.
|
||||
|
||||
To have the correct file paths in `gdb` instead of `/usr/src/debug/gcc/libstdc++-v3/libsupc++/eh_personality.cc`:
|
||||
|
||||
Maybe by calling the following at the beginning of gdb:
|
||||
|
||||
```
|
||||
set substitute-path /usr/src/debug/gcc /path/to/gcc-repo/gcc
|
||||
```
|
||||
|
||||
TODO(antoyo): but that's not what I remember I was doing.
|
||||
|
||||
### `failed to build archive` error
|
||||
|
||||
When you get this error:
|
||||
|
||||
```
|
||||
error: failed to build archive: failed to open object file: No such file or directory (os error 2)
|
||||
```
|
||||
|
||||
That can be caused by the fact that you try to compile with `lto = "fat"`, but you didn't compile the sysroot with LTO.
|
||||
(Not sure if that's the reason since I cannot reproduce anymore. Maybe it happened when forgetting setting `FAT_LTO`.)
|
||||
|
||||
### ld: cannot find crtbegin.o
|
||||
|
||||
When compiling an executable with libgccijt, if setting the `*LIBRARY_PATH` variables to the install directory, you will get the following errors:
|
||||
|
||||
```
|
||||
ld: cannot find crtbegin.o: No such file or directory
|
||||
ld: cannot find -lgcc: No such file or directory
|
||||
ld: cannot find -lgcc: No such file or directory
|
||||
libgccjit.so: error: error invoking gcc driver
|
||||
```
|
||||
|
||||
To fix this, set the variables to `gcc-build/build/gcc`.
|
||||
|
||||
### How to debug GCC LTO
|
||||
|
||||
Run do the command with `-v -save-temps` and then extract the `lto1` line from the output and run that under the debugger.
|
||||
|
||||
### How to send arguments to the GCC linker
|
||||
|
||||
```
|
||||
CG_RUSTFLAGS="-Clink-args=-save-temps -v" ../cargo.sh build
|
||||
```
|
||||
|
||||
### How to see the personality functions in the asm dump
|
||||
|
||||
```
|
||||
CG_RUSTFLAGS="-Clink-arg=-save-temps -v -Clink-arg=-dA" ../cargo.sh build
|
||||
```
|
||||
|
||||
### How to see the LLVM IR for a sysroot crate
|
||||
|
||||
```
|
||||
cargo build -v --target x86_64-unknown-linux-gnu -Zbuild-std
|
||||
# Take the command from the output and add --emit=llvm-ir
|
||||
```
|
||||
|
||||
### To prevent the linker from unmangling symbols
|
||||
|
||||
Run with:
|
||||
|
||||
```
|
||||
COLLECT_NO_DEMANGLE=1
|
||||
```
|
||||
|
||||
### How to use a custom-build rustc
|
||||
|
||||
* Build the stage2 compiler (`rustup toolchain link debug-current build/x86_64-unknown-linux-gnu/stage2`).
|
||||
* Clean and rebuild the codegen with `debug-current` in the file `rust-toolchain`.
|
||||
|
||||
### How to install a forked git-subtree
|
||||
|
||||
Using git-subtree with `rustc` requires a patched git to make it work.
|
||||
The PR that is needed is [here](https://github.com/gitgitgadget/git/pull/493).
|
||||
Use the following instructions to install it:
|
||||
|
||||
```bash
|
||||
git clone git@github.com:tqc/git.git
|
||||
cd git
|
||||
git checkout tqc/subtree
|
||||
make
|
||||
make install
|
||||
cd contrib/subtree
|
||||
make
|
||||
cp git-subtree ~/bin
|
||||
```
|
||||
|
||||
Then, do a sync with this command:
|
||||
|
||||
```bash
|
||||
PATH="$HOME/bin:$PATH" ~/bin/git-subtree push -P compiler/rustc_codegen_gcc/ ../rustc_codegen_gcc/ sync_branch_name
|
||||
cd ../rustc_codegen_gcc
|
||||
git checkout master
|
||||
git pull
|
||||
git checkout sync_branch_name
|
||||
git merge master
|
||||
```
|
||||
|
||||
To send the changes to the rust repo:
|
||||
|
||||
```bash
|
||||
cd ../rust
|
||||
git pull origin master
|
||||
git checkout -b subtree-update_cg_gcc_YYYY-MM-DD
|
||||
PATH="$HOME/bin:$PATH" ~/bin/git-subtree pull --prefix=compiler/rustc_codegen_gcc/ https://github.com/rust-lang/rustc_codegen_gcc.git master
|
||||
git push
|
||||
```
|
||||
|
||||
TODO: write a script that does the above.
|
||||
|
||||
https://rust-lang.zulipchat.com/#narrow/stream/301329-t-devtools/topic/subtree.20madness/near/258877725
|
||||
|
||||
### How to use [mem-trace](https://github.com/antoyo/mem-trace)
|
||||
|
||||
`rustc` needs to be built without `jemalloc` so that `mem-trace` can overload `malloc` since `jemalloc` is linked statically, so a `LD_PRELOAD`-ed library won't a chance to intercept the calls to `malloc`.
|
||||
|
||||
### How to generate GIMPLE
|
||||
|
||||
If you need to check what gccjit is generating (GIMPLE), then take a look at how to
|
||||
generate it in [gimple.md](./doc/gimple.md).
|
||||
|
||||
### How to build a cross-compiling libgccjit
|
||||
|
||||
#### Building libgccjit
|
||||
|
||||
* Follow the instructions on [this repo](https://github.com/cross-cg-gcc-tools/cross-gcc).
|
||||
|
||||
#### Configuring rustc_codegen_gcc
|
||||
|
||||
* Run `./y.sh prepare --cross` so that the sysroot is patched for the cross-compiling case.
|
||||
* Set the path to the cross-compiling libgccjit in `gcc_path`.
|
||||
* Make sure you have the linker for your target (for instance `m68k-unknown-linux-gnu-gcc`) in your `$PATH`. Currently, the linker name is hardcoded as being `$TARGET-gcc`. Specify the target when building the sysroot: `./y.sh build --target-triple m68k-unknown-linux-gnu`.
|
||||
* Build your project by specifying the target: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../cargo.sh build --target m68k-unknown-linux-gnu`.
|
||||
|
||||
If the target is not yet supported by the Rust compiler, create a [target specification file](https://docs.rust-embedded.org/embedonomicon/custom-target.html) (note that the `arch` specified in this file must be supported by the rust compiler).
|
||||
Then, you can use it the following way:
|
||||
|
||||
* Add the target specification file using `--target` as an **absolute** path to build the sysroot: `./y.sh build --target-triple m68k-unknown-linux-gnu --target $(pwd)/m68k-unknown-linux-gnu.json`
|
||||
* Build your project by specifying the target specification file: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../cargo.sh build --target path/to/m68k-unknown-linux-gnu.json`.
|
||||
|
||||
If you get the following error:
|
||||
|
||||
```
|
||||
/usr/bin/ld: unrecognised emulation mode: m68kelf
|
||||
```
|
||||
|
||||
Make sure you set `gcc_path` to the install directory.
|
||||
|
6
compiler/rustc_codegen_gcc/build.rs
Normal file
6
compiler/rustc_codegen_gcc/build.rs
Normal file
@ -0,0 +1,6 @@
|
||||
// TODO: remove this file and deps/libLLVM-18-rust-1.78.0-nightly.so when
|
||||
// https://github.com/rust-lang/rust/pull/121967 is merged.
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=deps/libLLVM-18-rust-1.78.0-nightly.so");
|
||||
println!("cargo:rustc-link-search=deps");
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# Requires the CHANNEL env var to be set to `debug` or `release.`
|
||||
|
||||
set -e
|
||||
cd $(dirname "$0")
|
||||
|
||||
pushd ../
|
||||
source ./config.sh
|
||||
popd
|
||||
|
||||
# Cleanup for previous run
|
||||
# v Clean target dir except for build scripts and incremental cache
|
||||
rm -r target/*/{debug,release}/{build,deps,examples,libsysroot*,native} 2>/dev/null || true
|
||||
rm Cargo.lock test_target/Cargo.lock 2>/dev/null || true
|
||||
rm -r sysroot/ 2>/dev/null || true
|
||||
|
||||
# Build libs
|
||||
export RUSTFLAGS="$RUSTFLAGS -Z force-unstable-if-unmarked"
|
||||
if [[ "$1" == "--release" ]]; then
|
||||
sysroot_channel='release'
|
||||
RUSTFLAGS="$RUSTFLAGS -Zmir-opt-level=3" cargo build --target $TARGET_TRIPLE --release
|
||||
else
|
||||
sysroot_channel='debug'
|
||||
cargo build --target $TARGET_TRIPLE
|
||||
fi
|
||||
|
||||
# Copy files to sysroot
|
||||
mkdir -p sysroot/lib/rustlib/$TARGET_TRIPLE/lib/
|
||||
cp -r target/$TARGET_TRIPLE/$sysroot_channel/deps/* sysroot/lib/rustlib/$TARGET_TRIPLE/lib/
|
||||
# Copy the source files to the sysroot (Rust for Linux needs this).
|
||||
source_dir=sysroot/lib/rustlib/src/rust
|
||||
mkdir -p $source_dir
|
||||
cp -r sysroot_src/library/ $source_dir
|
@ -2,6 +2,15 @@
|
||||
# It is not intended for manual editing.
|
||||
version = 3
|
||||
|
||||
[[package]]
|
||||
name = "boml"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85fdb93f04c73bff54305fa437ffea5449c41edcaadfe882f35836206b166ac5"
|
||||
|
||||
[[package]]
|
||||
name = "y"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"boml",
|
||||
]
|
||||
|
@ -3,6 +3,9 @@ name = "y"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
boml = "0.3.1"
|
||||
|
||||
[[bin]]
|
||||
name = "y"
|
||||
path = "src/main.rs"
|
||||
|
@ -1,7 +1,5 @@
|
||||
use crate::config::{set_config, ConfigInfo};
|
||||
use crate::utils::{
|
||||
get_gcc_path, run_command, run_command_with_output_and_env, walk_dir,
|
||||
};
|
||||
use crate::config::{Channel, ConfigInfo};
|
||||
use crate::utils::{run_command, run_command_with_output_and_env, walk_dir};
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
@ -9,33 +7,18 @@ use std::path::Path;
|
||||
|
||||
#[derive(Default)]
|
||||
struct BuildArg {
|
||||
codegen_release_channel: bool,
|
||||
sysroot_release_channel: bool,
|
||||
sysroot_panic_abort: bool,
|
||||
flags: Vec<String>,
|
||||
gcc_path: String,
|
||||
config_info: ConfigInfo,
|
||||
}
|
||||
|
||||
impl BuildArg {
|
||||
fn new() -> Result<Option<Self>, String> {
|
||||
let gcc_path = get_gcc_path()?;
|
||||
let mut build_arg = Self {
|
||||
gcc_path,
|
||||
..Default::default()
|
||||
};
|
||||
let mut build_arg = Self::default();
|
||||
// We skip binary name and the `build` command.
|
||||
let mut args = std::env::args().skip(2);
|
||||
|
||||
while let Some(arg) = args.next() {
|
||||
match arg.as_str() {
|
||||
"--release" => build_arg.codegen_release_channel = true,
|
||||
"--release-sysroot" => build_arg.sysroot_release_channel = true,
|
||||
"--no-default-features" => {
|
||||
build_arg.flags.push("--no-default-features".to_string());
|
||||
}
|
||||
"--sysroot-panic-abort" => {
|
||||
build_arg.sysroot_panic_abort = true;
|
||||
},
|
||||
"--features" => {
|
||||
if let Some(arg) = args.next() {
|
||||
build_arg.flags.push("--features".to_string());
|
||||
@ -50,25 +33,11 @@ impl BuildArg {
|
||||
Self::usage();
|
||||
return Ok(None);
|
||||
}
|
||||
"--target-triple" => {
|
||||
if args.next().is_some() {
|
||||
// Handled in config.rs.
|
||||
} else {
|
||||
return Err(
|
||||
"Expected a value after `--target-triple`, found nothing".to_string()
|
||||
);
|
||||
arg => {
|
||||
if !build_arg.config_info.parse_argument(arg, &mut args)? {
|
||||
return Err(format!("Unknown argument `{}`", arg));
|
||||
}
|
||||
}
|
||||
"--target" => {
|
||||
if args.next().is_some() {
|
||||
// Handled in config.rs.
|
||||
} else {
|
||||
return Err(
|
||||
"Expected a value after `--target`, found nothing".to_string()
|
||||
);
|
||||
}
|
||||
}
|
||||
arg => return Err(format!("Unknown argument `{}`", arg)),
|
||||
}
|
||||
}
|
||||
Ok(Some(build_arg))
|
||||
@ -79,29 +48,19 @@ impl BuildArg {
|
||||
r#"
|
||||
`build` command help:
|
||||
|
||||
--release : Build codegen in release mode
|
||||
--release-sysroot : Build sysroot in release mode
|
||||
--sysroot-panic-abort : Build the sysroot without unwinding support.
|
||||
--no-default-features : Add `--no-default-features` flag
|
||||
--features [arg] : Add a new feature [arg]
|
||||
--target-triple [arg] : Set the target triple to [arg]
|
||||
--help : Show this help
|
||||
"#
|
||||
)
|
||||
--features [arg] : Add a new feature [arg]"#
|
||||
);
|
||||
ConfigInfo::show_usage();
|
||||
println!(" --help : Show this help");
|
||||
}
|
||||
}
|
||||
|
||||
fn build_sysroot(
|
||||
env: &mut HashMap<String, String>,
|
||||
args: &BuildArg,
|
||||
config: &ConfigInfo,
|
||||
) -> Result<(), String> {
|
||||
std::env::set_current_dir("build_sysroot")
|
||||
.map_err(|error| format!("Failed to go to `build_sysroot` directory: {:?}", error))?;
|
||||
pub fn build_sysroot(env: &HashMap<String, String>, config: &ConfigInfo) -> Result<(), String> {
|
||||
let start_dir = Path::new("build_sysroot");
|
||||
// Cleanup for previous run
|
||||
// Clean target dir except for build scripts and incremental cache
|
||||
let _ = walk_dir(
|
||||
"target",
|
||||
start_dir.join("target"),
|
||||
|dir: &Path| {
|
||||
for top in &["debug", "release"] {
|
||||
let _ = fs::remove_dir_all(dir.join(top).join("build"));
|
||||
@ -138,92 +97,114 @@ fn build_sysroot(
|
||||
|_| Ok(()),
|
||||
);
|
||||
|
||||
let _ = fs::remove_file("Cargo.lock");
|
||||
let _ = fs::remove_file("test_target/Cargo.lock");
|
||||
let _ = fs::remove_dir_all("sysroot");
|
||||
let _ = fs::remove_file(start_dir.join("Cargo.lock"));
|
||||
let _ = fs::remove_file(start_dir.join("test_target/Cargo.lock"));
|
||||
let _ = fs::remove_dir_all(start_dir.join("sysroot"));
|
||||
|
||||
// Builds libs
|
||||
let mut rustflags = env
|
||||
.get("RUSTFLAGS")
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
if args.sysroot_panic_abort {
|
||||
let mut rustflags = env.get("RUSTFLAGS").cloned().unwrap_or_default();
|
||||
if config.sysroot_panic_abort {
|
||||
rustflags.push_str(" -Cpanic=abort -Zpanic-abort-tests");
|
||||
}
|
||||
env.insert(
|
||||
"RUSTFLAGS".to_string(),
|
||||
format!("{} -Zmir-opt-level=3", rustflags),
|
||||
);
|
||||
let channel = if args.sysroot_release_channel {
|
||||
run_command_with_output_and_env(
|
||||
&[
|
||||
&"cargo",
|
||||
&"build",
|
||||
&"--target",
|
||||
&config.target,
|
||||
&"--release",
|
||||
],
|
||||
None,
|
||||
Some(&env),
|
||||
)?;
|
||||
rustflags.push_str(" -Z force-unstable-if-unmarked");
|
||||
let mut env = env.clone();
|
||||
|
||||
let mut args: Vec<&dyn AsRef<OsStr>> = vec![&"cargo", &"build", &"--target", &config.target];
|
||||
|
||||
if config.no_default_features {
|
||||
rustflags.push_str(" -Csymbol-mangling-version=v0");
|
||||
args.push(&"--no-default-features");
|
||||
}
|
||||
|
||||
let channel = if config.sysroot_release_channel {
|
||||
rustflags.push_str(" -Zmir-opt-level=3");
|
||||
args.push(&"--release");
|
||||
"release"
|
||||
} else {
|
||||
run_command_with_output_and_env(
|
||||
&[
|
||||
&"cargo",
|
||||
&"build",
|
||||
&"--target",
|
||||
&config.target,
|
||||
],
|
||||
None,
|
||||
Some(env),
|
||||
)?;
|
||||
"debug"
|
||||
};
|
||||
|
||||
env.insert("RUSTFLAGS".to_string(), rustflags);
|
||||
run_command_with_output_and_env(&args, Some(start_dir), Some(&env))?;
|
||||
|
||||
// Copy files to sysroot
|
||||
let sysroot_path = format!("sysroot/lib/rustlib/{}/lib/", config.target_triple);
|
||||
fs::create_dir_all(&sysroot_path)
|
||||
.map_err(|error| format!("Failed to create directory `{}`: {:?}", sysroot_path, error))?;
|
||||
let sysroot_path = start_dir.join(format!("sysroot/lib/rustlib/{}/lib/", config.target_triple));
|
||||
fs::create_dir_all(&sysroot_path).map_err(|error| {
|
||||
format!(
|
||||
"Failed to create directory `{}`: {:?}",
|
||||
sysroot_path.display(),
|
||||
error
|
||||
)
|
||||
})?;
|
||||
let copier = |dir_to_copy: &Path| {
|
||||
// FIXME: should not use shell command!
|
||||
run_command(&[&"cp", &"-r", &dir_to_copy, &sysroot_path], None).map(|_| ())
|
||||
};
|
||||
walk_dir(
|
||||
&format!("target/{}/{}/deps", config.target_triple, channel),
|
||||
start_dir.join(&format!("target/{}/{}/deps", config.target_triple, channel)),
|
||||
copier,
|
||||
copier,
|
||||
)?;
|
||||
|
||||
// Copy the source files to the sysroot (Rust for Linux needs this).
|
||||
let sysroot_src_path = "sysroot/lib/rustlib/src/rust";
|
||||
fs::create_dir_all(&sysroot_src_path)
|
||||
.map_err(|error| format!("Failed to create directory `{}`: {:?}", sysroot_src_path, error))?;
|
||||
run_command(&[&"cp", &"-r", &"sysroot_src/library/", &sysroot_src_path], None)?;
|
||||
let sysroot_src_path = start_dir.join("sysroot/lib/rustlib/src/rust");
|
||||
fs::create_dir_all(&sysroot_src_path).map_err(|error| {
|
||||
format!(
|
||||
"Failed to create directory `{}`: {:?}",
|
||||
sysroot_src_path.display(),
|
||||
error
|
||||
)
|
||||
})?;
|
||||
run_command(
|
||||
&[
|
||||
&"cp",
|
||||
&"-r",
|
||||
&start_dir.join("sysroot_src/library/"),
|
||||
&sysroot_src_path,
|
||||
],
|
||||
None,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_codegen(args: &BuildArg) -> Result<(), String> {
|
||||
fn build_codegen(args: &mut BuildArg) -> Result<(), String> {
|
||||
let mut env = HashMap::new();
|
||||
|
||||
env.insert("LD_LIBRARY_PATH".to_string(), args.gcc_path.clone());
|
||||
env.insert("LIBRARY_PATH".to_string(), args.gcc_path.clone());
|
||||
env.insert(
|
||||
"LD_LIBRARY_PATH".to_string(),
|
||||
args.config_info.gcc_path.clone(),
|
||||
);
|
||||
env.insert(
|
||||
"LIBRARY_PATH".to_string(),
|
||||
args.config_info.gcc_path.clone(),
|
||||
);
|
||||
|
||||
if args.config_info.no_default_features {
|
||||
env.insert(
|
||||
"RUSTFLAGS".to_string(),
|
||||
"-Csymbol-mangling-version=v0".to_string(),
|
||||
);
|
||||
}
|
||||
|
||||
let mut command: Vec<&dyn AsRef<OsStr>> = vec![&"cargo", &"rustc"];
|
||||
if args.codegen_release_channel {
|
||||
if args.config_info.channel == Channel::Release {
|
||||
command.push(&"--release");
|
||||
env.insert("CHANNEL".to_string(), "release".to_string());
|
||||
env.insert("CARGO_INCREMENTAL".to_string(), "1".to_string());
|
||||
} else {
|
||||
env.insert("CHANNEL".to_string(), "debug".to_string());
|
||||
}
|
||||
if args.config_info.no_default_features {
|
||||
command.push(&"--no-default-features");
|
||||
}
|
||||
let flags = args.flags.iter().map(|s| s.as_str()).collect::<Vec<_>>();
|
||||
for flag in &flags {
|
||||
command.push(flag);
|
||||
}
|
||||
run_command_with_output_and_env(&command, None, Some(&env))?;
|
||||
|
||||
let config = set_config(&mut env, &[], Some(&args.gcc_path))?;
|
||||
args.config_info.setup(&mut env, false)?;
|
||||
|
||||
// We voluntarily ignore the error.
|
||||
let _ = fs::remove_dir_all("target/out");
|
||||
@ -236,19 +217,16 @@ fn build_codegen(args: &BuildArg) -> Result<(), String> {
|
||||
})?;
|
||||
|
||||
println!("[BUILD] sysroot");
|
||||
build_sysroot(
|
||||
&mut env,
|
||||
args,
|
||||
&config,
|
||||
)?;
|
||||
build_sysroot(&env, &args.config_info)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run() -> Result<(), String> {
|
||||
let args = match BuildArg::new()? {
|
||||
let mut args = match BuildArg::new()? {
|
||||
Some(args) => args,
|
||||
None => return Ok(()),
|
||||
};
|
||||
build_codegen(&args)?;
|
||||
args.config_info.setup_gcc_path()?;
|
||||
build_codegen(&mut args)?;
|
||||
Ok(())
|
||||
}
|
||||
|
114
compiler/rustc_codegen_gcc/build_system/src/cargo.rs
Normal file
114
compiler/rustc_codegen_gcc/build_system/src/cargo.rs
Normal file
@ -0,0 +1,114 @@
|
||||
use crate::config::ConfigInfo;
|
||||
use crate::utils::{
|
||||
get_toolchain, run_command_with_output_and_env_no_err, rustc_toolchain_version_info,
|
||||
rustc_version_info,
|
||||
};
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn args() -> Result<Option<Vec<String>>, String> {
|
||||
// We skip the binary and the "cargo" option.
|
||||
if let Some("--help") = std::env::args().skip(2).next().as_deref() {
|
||||
usage();
|
||||
return Ok(None);
|
||||
}
|
||||
let args = std::env::args().skip(2).collect::<Vec<_>>();
|
||||
if args.is_empty() {
|
||||
return Err(
|
||||
"Expected at least one argument for `cargo` subcommand, found none".to_string(),
|
||||
);
|
||||
}
|
||||
Ok(Some(args))
|
||||
}
|
||||
|
||||
fn usage() {
|
||||
println!(
|
||||
r#"
|
||||
`cargo` command help:
|
||||
|
||||
[args] : Arguments to be passed to the cargo command
|
||||
--help : Show this help
|
||||
"#
|
||||
)
|
||||
}
|
||||
|
||||
pub fn run() -> Result<(), String> {
|
||||
let args = match args()? {
|
||||
Some(a) => a,
|
||||
None => return Ok(()),
|
||||
};
|
||||
|
||||
// We first need to go to the original location to ensure that the config setup will go as
|
||||
// expected.
|
||||
let current_dir = std::env::current_dir()
|
||||
.and_then(|path| path.canonicalize())
|
||||
.map_err(|error| format!("Failed to get current directory path: {:?}", error))?;
|
||||
let current_exe = std::env::current_exe()
|
||||
.and_then(|path| path.canonicalize())
|
||||
.map_err(|error| format!("Failed to get current exe path: {:?}", error))?;
|
||||
let mut parent_dir = current_exe
|
||||
.components()
|
||||
.map(|comp| comp.as_os_str())
|
||||
.collect::<Vec<_>>();
|
||||
// We run this script from "build_system/target/release/y", so we need to remove these elements.
|
||||
for to_remove in &["y", "release", "target", "build_system"] {
|
||||
if parent_dir
|
||||
.last()
|
||||
.map(|part| part == to_remove)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
parent_dir.pop();
|
||||
} else {
|
||||
return Err(format!(
|
||||
"Build script not executed from `build_system/target/release/y` (in path {})",
|
||||
current_exe.display(),
|
||||
));
|
||||
}
|
||||
}
|
||||
let parent_dir = PathBuf::from(parent_dir.join(&OsStr::new("/")));
|
||||
std::env::set_current_dir(&parent_dir).map_err(|error| {
|
||||
format!(
|
||||
"Failed to go to `{}` folder: {:?}",
|
||||
parent_dir.display(),
|
||||
error
|
||||
)
|
||||
})?;
|
||||
|
||||
let mut env: HashMap<String, String> = std::env::vars().collect();
|
||||
ConfigInfo::default().setup(&mut env, false)?;
|
||||
let toolchain = get_toolchain()?;
|
||||
|
||||
let toolchain_version = rustc_toolchain_version_info(&toolchain)?;
|
||||
let default_version = rustc_version_info(None)?;
|
||||
if toolchain_version != default_version {
|
||||
println!(
|
||||
"rustc_codegen_gcc is built for {} but the default rustc version is {}.",
|
||||
toolchain_version.short, default_version.short,
|
||||
);
|
||||
println!("Using {}.", toolchain_version.short);
|
||||
}
|
||||
|
||||
// We go back to the original folder since we now have set up everything we needed.
|
||||
std::env::set_current_dir(¤t_dir).map_err(|error| {
|
||||
format!(
|
||||
"Failed to go back to `{}` folder: {:?}",
|
||||
current_dir.display(),
|
||||
error
|
||||
)
|
||||
})?;
|
||||
|
||||
let rustflags = env.get("RUSTFLAGS").cloned().unwrap_or_default();
|
||||
env.insert("RUSTDOCFLAGS".to_string(), rustflags);
|
||||
let toolchain = format!("+{}", toolchain);
|
||||
let mut command: Vec<&dyn AsRef<OsStr>> = vec![&"cargo", &toolchain];
|
||||
for arg in &args {
|
||||
command.push(arg);
|
||||
}
|
||||
if run_command_with_output_and_env_no_err(&command, None, Some(&env)).is_err() {
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
82
compiler/rustc_codegen_gcc/build_system/src/clean.rs
Normal file
82
compiler/rustc_codegen_gcc/build_system/src/clean.rs
Normal file
@ -0,0 +1,82 @@
|
||||
use crate::utils::{remove_file, run_command};
|
||||
|
||||
use std::fs::remove_dir_all;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Default)]
|
||||
enum CleanArg {
|
||||
/// `clean all`
|
||||
All,
|
||||
/// `clean ui-tests`
|
||||
UiTests,
|
||||
/// `clean --help`
|
||||
#[default]
|
||||
Help,
|
||||
}
|
||||
|
||||
impl CleanArg {
|
||||
fn new() -> Result<Self, String> {
|
||||
// We skip the binary and the "clean" option.
|
||||
for arg in std::env::args().skip(2) {
|
||||
return match arg.as_str() {
|
||||
"all" => Ok(Self::All),
|
||||
"ui-tests" => Ok(Self::UiTests),
|
||||
"--help" => Ok(Self::Help),
|
||||
a => Err(format!("Unknown argument `{}`", a)),
|
||||
};
|
||||
}
|
||||
Ok(Self::default())
|
||||
}
|
||||
}
|
||||
|
||||
fn usage() {
|
||||
println!(
|
||||
r#"
|
||||
`clean` command help:
|
||||
|
||||
all : Clean all data
|
||||
ui-tests : Clean ui tests
|
||||
--help : Show this help
|
||||
"#
|
||||
)
|
||||
}
|
||||
|
||||
fn clean_all() -> Result<(), String> {
|
||||
let dirs_to_remove = [
|
||||
"target",
|
||||
"build_sysroot/sysroot",
|
||||
"build_sysroot/sysroot_src",
|
||||
"build_sysroot/target",
|
||||
];
|
||||
for dir in dirs_to_remove {
|
||||
let _ = remove_dir_all(dir);
|
||||
}
|
||||
let dirs_to_remove = ["regex", "rand", "simple-raytracer"];
|
||||
for dir in dirs_to_remove {
|
||||
let _ = remove_dir_all(Path::new(crate::BUILD_DIR).join(dir));
|
||||
}
|
||||
|
||||
let files_to_remove = ["build_sysroot/Cargo.lock", "perf.data", "perf.data.old"];
|
||||
|
||||
for file in files_to_remove {
|
||||
let _ = remove_file(file);
|
||||
}
|
||||
|
||||
println!("Successfully ran `clean all`");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn clean_ui_tests() -> Result<(), String> {
|
||||
let path = Path::new(crate::BUILD_DIR).join("rust/build/x86_64-unknown-linux-gnu/test/ui/");
|
||||
run_command(&[&"find", &path, &"-name", &"stamp", &"-delete"], None)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run() -> Result<(), String> {
|
||||
match CleanArg::new()? {
|
||||
CleanArg::All => clean_all()?,
|
||||
CleanArg::UiTests => clean_ui_tests()?,
|
||||
CleanArg::Help => usage(),
|
||||
}
|
||||
Ok(())
|
||||
}
|
79
compiler/rustc_codegen_gcc/build_system/src/clone_gcc.rs
Normal file
79
compiler/rustc_codegen_gcc/build_system/src/clone_gcc.rs
Normal file
@ -0,0 +1,79 @@
|
||||
use crate::config::ConfigInfo;
|
||||
use crate::utils::{git_clone, run_command_with_output};
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
fn show_usage() {
|
||||
println!(
|
||||
r#"
|
||||
`clone-gcc` command help:
|
||||
|
||||
--out-path : Location where the GCC repository will be cloned (default: `./gcc`)"#
|
||||
);
|
||||
ConfigInfo::show_usage();
|
||||
println!(" --help : Show this help");
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Args {
|
||||
out_path: PathBuf,
|
||||
config_info: ConfigInfo,
|
||||
}
|
||||
|
||||
impl Args {
|
||||
fn new() -> Result<Option<Self>, String> {
|
||||
let mut command_args = Self::default();
|
||||
|
||||
let mut out_path = None;
|
||||
|
||||
// We skip binary name and the `clone-gcc` command.
|
||||
let mut args = std::env::args().skip(2);
|
||||
|
||||
while let Some(arg) = args.next() {
|
||||
match arg.as_str() {
|
||||
"--out-path" => match args.next() {
|
||||
Some(path) if !path.is_empty() => out_path = Some(path),
|
||||
_ => {
|
||||
return Err("Expected an argument after `--out-path`, found nothing".into())
|
||||
}
|
||||
},
|
||||
"--help" => {
|
||||
show_usage();
|
||||
return Ok(None);
|
||||
}
|
||||
arg => {
|
||||
if !command_args.config_info.parse_argument(arg, &mut args)? {
|
||||
return Err(format!("Unknown option {}", arg));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
command_args.out_path = match out_path {
|
||||
Some(p) => p.into(),
|
||||
None => PathBuf::from("./gcc"),
|
||||
};
|
||||
return Ok(Some(command_args));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run() -> Result<(), String> {
|
||||
let Some(args) = Args::new()? else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let result = git_clone("https://github.com/antoyo/gcc", Some(&args.out_path), false)?;
|
||||
if result.ran_clone {
|
||||
let gcc_commit = args.config_info.get_gcc_commit()?;
|
||||
println!("Checking out GCC commit `{}`...", gcc_commit);
|
||||
run_command_with_output(
|
||||
&[&"git", &"checkout", &gcc_commit],
|
||||
Some(Path::new(&result.repo_dir)),
|
||||
)?;
|
||||
} else {
|
||||
println!(
|
||||
"There is already a GCC folder in `{}`, leaving things as is...",
|
||||
args.out_path.display()
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
@ -1,101 +1,435 @@
|
||||
use crate::utils::{get_gcc_path, get_os_name, get_rustc_host_triple};
|
||||
use crate::utils::{
|
||||
create_symlink, get_os_name, run_command_with_output, rustc_version_info, split_args,
|
||||
};
|
||||
use std::collections::HashMap;
|
||||
use std::env as std_env;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use boml::{types::TomlValue, Toml};
|
||||
|
||||
#[derive(Default, PartialEq, Eq, Clone, Copy, Debug)]
|
||||
pub enum Channel {
|
||||
#[default]
|
||||
Debug,
|
||||
Release,
|
||||
}
|
||||
|
||||
impl Channel {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Debug => "debug",
|
||||
Self::Release => "release",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn failed_config_parsing(config_file: &Path, err: &str) -> Result<ConfigFile, String> {
|
||||
Err(format!(
|
||||
"Failed to parse `{}`: {}",
|
||||
config_file.display(),
|
||||
err
|
||||
))
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ConfigFile {
|
||||
gcc_path: Option<String>,
|
||||
download_gccjit: Option<bool>,
|
||||
}
|
||||
|
||||
impl ConfigFile {
|
||||
pub fn new(config_file: &Path) -> Result<Self, String> {
|
||||
let content = fs::read_to_string(config_file).map_err(|_| {
|
||||
format!(
|
||||
"Failed to read `{}`. Take a look at `Readme.md` to see how to set up the project",
|
||||
config_file.display(),
|
||||
)
|
||||
})?;
|
||||
let toml = Toml::parse(&content).map_err(|err| {
|
||||
format!(
|
||||
"Error occurred around `{}`: {:?}",
|
||||
&content[err.start..=err.end],
|
||||
err.kind
|
||||
)
|
||||
})?;
|
||||
let mut config = Self::default();
|
||||
for (key, value) in toml.iter() {
|
||||
match (key, value) {
|
||||
("gcc-path", TomlValue::String(value)) => {
|
||||
config.gcc_path = Some(value.as_str().to_string())
|
||||
}
|
||||
("gcc-path", _) => {
|
||||
return failed_config_parsing(config_file, "Expected a string for `gcc-path`")
|
||||
}
|
||||
("download-gccjit", TomlValue::Boolean(value)) => {
|
||||
config.download_gccjit = Some(*value)
|
||||
}
|
||||
("download-gccjit", _) => {
|
||||
return failed_config_parsing(
|
||||
config_file,
|
||||
"Expected a boolean for `download-gccjit`",
|
||||
)
|
||||
}
|
||||
_ => return failed_config_parsing(config_file, &format!("Unknown key `{}`", key)),
|
||||
}
|
||||
}
|
||||
match (config.gcc_path.as_mut(), config.download_gccjit) {
|
||||
(None, None | Some(false)) => {
|
||||
return failed_config_parsing(
|
||||
config_file,
|
||||
"At least one of `gcc-path` or `download-gccjit` value must be set",
|
||||
)
|
||||
}
|
||||
(Some(_), Some(true)) => {
|
||||
println!(
|
||||
"WARNING: both `gcc-path` and `download-gccjit` arguments are used, \
|
||||
ignoring `gcc-path`"
|
||||
);
|
||||
}
|
||||
(Some(gcc_path), _) => {
|
||||
let path = Path::new(gcc_path);
|
||||
*gcc_path = path
|
||||
.canonicalize()
|
||||
.map_err(|err| {
|
||||
format!("Failed to get absolute path of `{}`: {:?}", gcc_path, err)
|
||||
})?
|
||||
.display()
|
||||
.to_string();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
Ok(config)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct ConfigInfo {
|
||||
pub target: String,
|
||||
pub target_triple: String,
|
||||
pub host_triple: String,
|
||||
pub rustc_command: Vec<String>,
|
||||
pub run_in_vm: bool,
|
||||
pub cargo_target_dir: String,
|
||||
pub dylib_ext: String,
|
||||
pub sysroot_release_channel: bool,
|
||||
pub channel: Channel,
|
||||
pub sysroot_panic_abort: bool,
|
||||
pub cg_backend_path: String,
|
||||
pub sysroot_path: String,
|
||||
pub gcc_path: String,
|
||||
config_file: Option<String>,
|
||||
// This is used in particular in rust compiler bootstrap because it doesn't run at the root
|
||||
// of the `cg_gcc` folder, making it complicated for us to get access to local files we need
|
||||
// like `libgccjit.version` or `config.toml`.
|
||||
cg_gcc_path: Option<PathBuf>,
|
||||
// Needed for the `info` command which doesn't want to actually download the lib if needed,
|
||||
// just to set the `gcc_path` field to display it.
|
||||
pub no_download: bool,
|
||||
pub no_default_features: bool,
|
||||
}
|
||||
|
||||
// Returns the beginning for the command line of rustc.
|
||||
pub fn set_config(
|
||||
impl ConfigInfo {
|
||||
/// Returns `true` if the argument was taken into account.
|
||||
pub fn parse_argument(
|
||||
&mut self,
|
||||
arg: &str,
|
||||
args: &mut impl Iterator<Item = String>,
|
||||
) -> Result<bool, String> {
|
||||
match arg {
|
||||
"--target" => {
|
||||
if let Some(arg) = args.next() {
|
||||
self.target = arg;
|
||||
} else {
|
||||
return Err("Expected a value after `--target`, found nothing".to_string());
|
||||
}
|
||||
}
|
||||
"--target-triple" => match args.next() {
|
||||
Some(arg) if !arg.is_empty() => self.target_triple = arg.to_string(),
|
||||
_ => {
|
||||
return Err(
|
||||
"Expected a value after `--target-triple`, found nothing".to_string()
|
||||
)
|
||||
}
|
||||
},
|
||||
"--out-dir" => match args.next() {
|
||||
Some(arg) if !arg.is_empty() => {
|
||||
self.cargo_target_dir = arg.to_string();
|
||||
}
|
||||
_ => return Err("Expected a value after `--out-dir`, found nothing".to_string()),
|
||||
},
|
||||
"--config-file" => match args.next() {
|
||||
Some(arg) if !arg.is_empty() => {
|
||||
self.config_file = Some(arg.to_string());
|
||||
}
|
||||
_ => {
|
||||
return Err("Expected a value after `--config-file`, found nothing".to_string())
|
||||
}
|
||||
},
|
||||
"--release-sysroot" => self.sysroot_release_channel = true,
|
||||
"--release" => self.channel = Channel::Release,
|
||||
"--sysroot-panic-abort" => self.sysroot_panic_abort = true,
|
||||
"--cg_gcc-path" => match args.next() {
|
||||
Some(arg) if !arg.is_empty() => {
|
||||
self.cg_gcc_path = Some(arg.into());
|
||||
}
|
||||
_ => {
|
||||
return Err("Expected a value after `--cg_gcc-path`, found nothing".to_string())
|
||||
}
|
||||
},
|
||||
"--no-default-features" => self.no_default_features = true,
|
||||
_ => return Ok(false),
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
|
||||
pub fn rustc_command_vec(&self) -> Vec<&dyn AsRef<OsStr>> {
|
||||
let mut command: Vec<&dyn AsRef<OsStr>> = Vec::with_capacity(self.rustc_command.len());
|
||||
for arg in self.rustc_command.iter() {
|
||||
command.push(arg);
|
||||
}
|
||||
command
|
||||
}
|
||||
|
||||
pub fn get_gcc_commit(&self) -> Result<String, String> {
|
||||
let commit_hash_file = self.compute_path("libgccjit.version");
|
||||
let content = fs::read_to_string(&commit_hash_file).map_err(|_| {
|
||||
format!(
|
||||
"Failed to read `{}`. Take a look at `Readme.md` to see how to set up the project",
|
||||
commit_hash_file.display(),
|
||||
)
|
||||
})?;
|
||||
let commit = content.trim();
|
||||
// This is a very simple check to ensure this is not a path. For the rest, it'll just fail
|
||||
// when trying to download the file so we should be fine.
|
||||
if commit.contains('/') || commit.contains('\\') {
|
||||
return Err(format!(
|
||||
"{}: invalid commit hash `{}`",
|
||||
commit_hash_file.display(),
|
||||
commit,
|
||||
));
|
||||
}
|
||||
Ok(commit.to_string())
|
||||
}
|
||||
|
||||
fn download_gccjit_if_needed(&mut self) -> Result<(), String> {
|
||||
let output_dir = Path::new(crate::BUILD_DIR).join("libgccjit");
|
||||
let commit = self.get_gcc_commit()?;
|
||||
|
||||
let output_dir = output_dir.join(&commit);
|
||||
if !output_dir.is_dir() {
|
||||
std::fs::create_dir_all(&output_dir).map_err(|err| {
|
||||
format!(
|
||||
"failed to create folder `{}`: {:?}",
|
||||
output_dir.display(),
|
||||
err,
|
||||
)
|
||||
})?;
|
||||
}
|
||||
let output_dir = output_dir.canonicalize().map_err(|err| {
|
||||
format!(
|
||||
"Failed to get absolute path of `{}`: {:?}",
|
||||
output_dir.display(),
|
||||
err
|
||||
)
|
||||
})?;
|
||||
|
||||
let libgccjit_so_name = "libgccjit.so";
|
||||
let libgccjit_so = output_dir.join(libgccjit_so_name);
|
||||
if !libgccjit_so.is_file() && !self.no_download {
|
||||
// Download time!
|
||||
let tempfile_name = format!("{}.download", libgccjit_so_name);
|
||||
let tempfile = output_dir.join(&tempfile_name);
|
||||
let is_in_ci = std::env::var("GITHUB_ACTIONS").is_ok();
|
||||
|
||||
let url = format!(
|
||||
"https://github.com/antoyo/gcc/releases/download/master-{}/libgccjit.so",
|
||||
commit,
|
||||
);
|
||||
|
||||
println!("Downloading `{}`...", url);
|
||||
download_gccjit(url, &output_dir, tempfile_name, !is_in_ci)?;
|
||||
|
||||
let libgccjit_so = output_dir.join(libgccjit_so_name);
|
||||
// If we reach this point, it means the file was correctly downloaded, so let's
|
||||
// rename it!
|
||||
std::fs::rename(&tempfile, &libgccjit_so).map_err(|err| {
|
||||
format!(
|
||||
"Failed to rename `{}` into `{}`: {:?}",
|
||||
tempfile.display(),
|
||||
libgccjit_so.display(),
|
||||
err,
|
||||
)
|
||||
})?;
|
||||
|
||||
println!("Downloaded libgccjit.so version {} successfully!", commit);
|
||||
// We need to create a link named `libgccjit.so.0` because that's what the linker is
|
||||
// looking for.
|
||||
create_symlink(
|
||||
&libgccjit_so,
|
||||
output_dir.join(&format!("{}.0", libgccjit_so_name)),
|
||||
)?;
|
||||
}
|
||||
|
||||
self.gcc_path = output_dir.display().to_string();
|
||||
println!("Using `{}` as path for libgccjit", self.gcc_path);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn compute_path<P: AsRef<Path>>(&self, other: P) -> PathBuf {
|
||||
match self.cg_gcc_path {
|
||||
Some(ref path) => path.join(other),
|
||||
None => PathBuf::new().join(other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn setup_gcc_path(&mut self) -> Result<(), String> {
|
||||
let config_file = match self.config_file.as_deref() {
|
||||
Some(config_file) => config_file.into(),
|
||||
None => self.compute_path("config.toml"),
|
||||
};
|
||||
let ConfigFile {
|
||||
gcc_path,
|
||||
download_gccjit,
|
||||
} = ConfigFile::new(&config_file)?;
|
||||
|
||||
if let Some(true) = download_gccjit {
|
||||
self.download_gccjit_if_needed()?;
|
||||
return Ok(());
|
||||
}
|
||||
self.gcc_path = match gcc_path {
|
||||
Some(path) => path,
|
||||
None => {
|
||||
return Err(format!(
|
||||
"missing `gcc-path` value from `{}`",
|
||||
config_file.display(),
|
||||
))
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn setup(
|
||||
&mut self,
|
||||
env: &mut HashMap<String, String>,
|
||||
test_flags: &[String],
|
||||
gcc_path: Option<&str>,
|
||||
) -> Result<ConfigInfo, String> {
|
||||
use_system_gcc: bool,
|
||||
) -> Result<(), String> {
|
||||
env.insert("CARGO_INCREMENTAL".to_string(), "0".to_string());
|
||||
|
||||
let gcc_path = match gcc_path {
|
||||
Some(path) => path.to_string(),
|
||||
None => get_gcc_path()?,
|
||||
};
|
||||
env.insert("GCC_PATH".to_string(), gcc_path.clone());
|
||||
if self.gcc_path.is_empty() && !use_system_gcc {
|
||||
self.setup_gcc_path()?;
|
||||
}
|
||||
env.insert("GCC_PATH".to_string(), self.gcc_path.clone());
|
||||
|
||||
if self.cargo_target_dir.is_empty() {
|
||||
match env.get("CARGO_TARGET_DIR").filter(|dir| !dir.is_empty()) {
|
||||
Some(cargo_target_dir) => self.cargo_target_dir = cargo_target_dir.clone(),
|
||||
None => self.cargo_target_dir = "target/out".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
let os_name = get_os_name()?;
|
||||
let dylib_ext = match os_name.as_str() {
|
||||
self.dylib_ext = match os_name.as_str() {
|
||||
"Linux" => "so",
|
||||
"Darwin" => "dylib",
|
||||
os => return Err(format!("unsupported OS `{}`", os)),
|
||||
}
|
||||
.to_string();
|
||||
let rustc = match env.get("RUSTC") {
|
||||
Some(r) if !r.is_empty() => r.to_string(),
|
||||
_ => "rustc".to_string(),
|
||||
};
|
||||
let host_triple = get_rustc_host_triple()?;
|
||||
self.host_triple = match rustc_version_info(Some(&rustc))?.host {
|
||||
Some(host) => host,
|
||||
None => return Err("no host found".to_string()),
|
||||
};
|
||||
|
||||
if self.target_triple.is_empty() {
|
||||
if let Some(overwrite) = env.get("OVERWRITE_TARGET_TRIPLE") {
|
||||
self.target_triple = overwrite.clone();
|
||||
}
|
||||
}
|
||||
if self.target_triple.is_empty() {
|
||||
self.target_triple = self.host_triple.clone();
|
||||
}
|
||||
if self.target.is_empty() && !self.target_triple.is_empty() {
|
||||
self.target = self.target_triple.clone();
|
||||
}
|
||||
|
||||
let mut linker = None;
|
||||
let mut target_triple = host_triple.clone();
|
||||
let mut target = target_triple.clone();
|
||||
|
||||
// We skip binary name and the command.
|
||||
let mut args = std::env::args().skip(2);
|
||||
|
||||
let mut set_target_triple = false;
|
||||
let mut set_target = false;
|
||||
while let Some(arg) = args.next() {
|
||||
match arg.as_str() {
|
||||
"--target-triple" => {
|
||||
if let Some(arg) = args.next() {
|
||||
target_triple = arg;
|
||||
set_target_triple = true;
|
||||
} else {
|
||||
return Err(
|
||||
"Expected a value after `--target-triple`, found nothing".to_string()
|
||||
);
|
||||
}
|
||||
},
|
||||
"--target" => {
|
||||
if let Some(arg) = args.next() {
|
||||
target = arg;
|
||||
set_target = true;
|
||||
} else {
|
||||
return Err(
|
||||
"Expected a value after `--target`, found nothing".to_string()
|
||||
);
|
||||
}
|
||||
},
|
||||
_ => (),
|
||||
if self.host_triple != self.target_triple {
|
||||
if self.target_triple.is_empty() {
|
||||
return Err("Unknown non-native platform".to_string());
|
||||
}
|
||||
linker = Some(format!("-Clinker={}-gcc", self.target_triple));
|
||||
self.run_in_vm = true;
|
||||
}
|
||||
|
||||
if set_target_triple && !set_target {
|
||||
target = target_triple.clone();
|
||||
}
|
||||
|
||||
if host_triple != target_triple {
|
||||
linker = Some(format!("-Clinker={}-gcc", target_triple));
|
||||
}
|
||||
let current_dir =
|
||||
std_env::current_dir().map_err(|error| format!("`current_dir` failed: {:?}", error))?;
|
||||
let channel = if let Some(channel) = env.get("CHANNEL") {
|
||||
let channel = if self.channel == Channel::Release {
|
||||
"release"
|
||||
} else if let Some(channel) = env.get("CHANNEL") {
|
||||
channel.as_str()
|
||||
} else {
|
||||
"debug"
|
||||
};
|
||||
let cg_backend_path = current_dir
|
||||
|
||||
let has_builtin_backend = env
|
||||
.get("BUILTIN_BACKEND")
|
||||
.map(|backend| !backend.is_empty())
|
||||
.unwrap_or(false);
|
||||
|
||||
let mut rustflags = Vec::new();
|
||||
if has_builtin_backend {
|
||||
// It means we're building inside the rustc testsuite, so some options need to be handled
|
||||
// a bit differently.
|
||||
self.cg_backend_path = "gcc".to_string();
|
||||
|
||||
match env.get("RUSTC_SYSROOT") {
|
||||
Some(rustc_sysroot) if !rustc_sysroot.is_empty() => {
|
||||
rustflags.extend_from_slice(&["--sysroot".to_string(), rustc_sysroot.clone()]);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
// This should not be needed, but is necessary for the CI in the rust repository.
|
||||
// FIXME: Remove when the rust CI switches to the master version of libgccjit.
|
||||
rustflags.push("-Cpanic=abort".to_string());
|
||||
} else {
|
||||
self.cg_backend_path = current_dir
|
||||
.join("target")
|
||||
.join(channel)
|
||||
.join(&format!("librustc_codegen_gcc.{}", dylib_ext));
|
||||
let sysroot_path = current_dir.join("build_sysroot/sysroot");
|
||||
let mut rustflags = Vec::new();
|
||||
.join(&format!("librustc_codegen_gcc.{}", self.dylib_ext))
|
||||
.display()
|
||||
.to_string();
|
||||
self.sysroot_path = current_dir
|
||||
.join("build_sysroot/sysroot")
|
||||
.display()
|
||||
.to_string();
|
||||
rustflags.extend_from_slice(&["--sysroot".to_string(), self.sysroot_path.clone()]);
|
||||
};
|
||||
|
||||
// This environment variable is useful in case we want to change options of rustc commands.
|
||||
if let Some(cg_rustflags) = env.get("CG_RUSTFLAGS") {
|
||||
rustflags.push(cg_rustflags.clone());
|
||||
rustflags.extend_from_slice(&split_args(&cg_rustflags)?);
|
||||
}
|
||||
if let Some(test_flags) = env.get("TEST_FLAGS") {
|
||||
rustflags.extend_from_slice(&split_args(&test_flags)?);
|
||||
}
|
||||
|
||||
if let Some(linker) = linker {
|
||||
rustflags.push(linker.to_string());
|
||||
}
|
||||
|
||||
if self.no_default_features {
|
||||
rustflags.push("-Csymbol-mangling-version=v0".to_string());
|
||||
}
|
||||
|
||||
rustflags.extend_from_slice(&[
|
||||
"-Csymbol-mangling-version=v0".to_string(),
|
||||
"-Cdebuginfo=2".to_string(),
|
||||
format!("-Zcodegen-backend={}", cg_backend_path.display()),
|
||||
"--sysroot".to_string(),
|
||||
sysroot_path.display().to_string(),
|
||||
format!("-Zcodegen-backend={}", self.cg_backend_path),
|
||||
]);
|
||||
|
||||
// Since we don't support ThinLTO, disable LTO completely when not trying to do LTO.
|
||||
@ -103,7 +437,6 @@ pub fn set_config(
|
||||
if !env.contains_key(&"FAT_LTO".to_string()) {
|
||||
rustflags.push("-Clto=off".to_string());
|
||||
}
|
||||
rustflags.extend_from_slice(test_flags);
|
||||
// FIXME(antoyo): remove once the atomic shim is gone
|
||||
if os_name == "Darwin" {
|
||||
rustflags.extend_from_slice(&[
|
||||
@ -117,13 +450,17 @@ pub fn set_config(
|
||||
|
||||
let sysroot = current_dir.join(&format!(
|
||||
"build_sysroot/sysroot/lib/rustlib/{}/lib",
|
||||
target_triple
|
||||
self.target_triple,
|
||||
));
|
||||
let ld_library_path = format!(
|
||||
"{target}:{sysroot}:{gcc_path}",
|
||||
// FIXME: It's possible to pick another out directory. Would be nice to have a command
|
||||
// line option to change it.
|
||||
target = current_dir.join("target/out").display(),
|
||||
sysroot = sysroot.display(),
|
||||
gcc_path = self.gcc_path,
|
||||
);
|
||||
env.insert("LIBRARY_PATH".to_string(), ld_library_path.clone());
|
||||
env.insert("LD_LIBRARY_PATH".to_string(), ld_library_path.clone());
|
||||
env.insert("DYLD_LIBRARY_PATH".to_string(), ld_library_path);
|
||||
|
||||
@ -131,19 +468,93 @@ pub fn set_config(
|
||||
// To do so, add a symlink for cc to /opt/gcc/bin/gcc in our PATH.
|
||||
// Another option would be to add the following Rust flag: -Clinker=/opt/gcc/bin/gcc
|
||||
let path = std::env::var("PATH").unwrap_or_default();
|
||||
env.insert("PATH".to_string(), format!("/opt/gcc/bin:{}", path));
|
||||
env.insert(
|
||||
"PATH".to_string(),
|
||||
format!(
|
||||
"/opt/gcc/bin:/opt/m68k-unknown-linux-gnu/bin{}{}",
|
||||
if path.is_empty() { "" } else { ":" },
|
||||
path
|
||||
),
|
||||
);
|
||||
|
||||
let mut rustc_command = vec!["rustc".to_string()];
|
||||
rustc_command.extend_from_slice(&rustflags);
|
||||
rustc_command.extend_from_slice(&[
|
||||
self.rustc_command = vec![rustc];
|
||||
self.rustc_command.extend_from_slice(&rustflags);
|
||||
self.rustc_command.extend_from_slice(&[
|
||||
"-L".to_string(),
|
||||
"crate=target/out".to_string(),
|
||||
"--out-dir".to_string(),
|
||||
"target/out".to_string(),
|
||||
self.cargo_target_dir.clone(),
|
||||
]);
|
||||
Ok(ConfigInfo {
|
||||
target,
|
||||
target_triple,
|
||||
rustc_command,
|
||||
})
|
||||
|
||||
if !env.contains_key("RUSTC_LOG") {
|
||||
env.insert("RUSTC_LOG".to_string(), "warn".to_string());
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn show_usage() {
|
||||
println!(
|
||||
"\
|
||||
--target-triple [arg] : Set the target triple to [arg]
|
||||
--target [arg] : Set the target to [arg]
|
||||
--out-dir : Location where the files will be generated
|
||||
--release : Build in release mode
|
||||
--release-sysroot : Build sysroot in release mode
|
||||
--sysroot-panic-abort : Build the sysroot without unwinding support
|
||||
--config-file : Location of the config file to be used
|
||||
--cg_gcc-path : Location of the rustc_codegen_gcc root folder (used
|
||||
when ran from another directory)
|
||||
--no-default-features : Add `--no-default-features` flag to cargo commands"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn download_gccjit(
|
||||
url: String,
|
||||
output_dir: &Path,
|
||||
tempfile_name: String,
|
||||
with_progress_bar: bool,
|
||||
) -> Result<(), String> {
|
||||
// Try curl. If that fails and we are on windows, fallback to PowerShell.
|
||||
let mut ret = run_command_with_output(
|
||||
&[
|
||||
&"curl",
|
||||
&"--speed-time",
|
||||
&"30",
|
||||
&"--speed-limit",
|
||||
&"10", // timeout if speed is < 10 bytes/sec for > 30 seconds
|
||||
&"--connect-timeout",
|
||||
&"30", // timeout if cannot connect within 30 seconds
|
||||
&"-o",
|
||||
&tempfile_name,
|
||||
&"--retry",
|
||||
&"3",
|
||||
&"-SRfL",
|
||||
if with_progress_bar {
|
||||
&"--progress-bar"
|
||||
} else {
|
||||
&"-s"
|
||||
},
|
||||
&url.as_str(),
|
||||
],
|
||||
Some(&output_dir),
|
||||
);
|
||||
if ret.is_err() && cfg!(windows) {
|
||||
eprintln!("Fallback to PowerShell");
|
||||
ret = run_command_with_output(
|
||||
&[
|
||||
&"PowerShell.exe",
|
||||
&"/nologo",
|
||||
&"-Command",
|
||||
&"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
|
||||
&format!(
|
||||
"(New-Object System.Net.WebClient).DownloadFile('{}', '{}')",
|
||||
url, tempfile_name,
|
||||
)
|
||||
.as_str(),
|
||||
],
|
||||
Some(&output_dir),
|
||||
);
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
19
compiler/rustc_codegen_gcc/build_system/src/info.rs
Normal file
19
compiler/rustc_codegen_gcc/build_system/src/info.rs
Normal file
@ -0,0 +1,19 @@
|
||||
use crate::config::ConfigInfo;
|
||||
|
||||
pub fn run() -> Result<(), String> {
|
||||
let mut config = ConfigInfo::default();
|
||||
|
||||
// We skip binary name and the `info` command.
|
||||
let mut args = std::env::args().skip(2);
|
||||
while let Some(arg) = args.next() {
|
||||
if arg == "--help" {
|
||||
println!("Display the path where the libgccjit will be located");
|
||||
return Ok(());
|
||||
}
|
||||
config.parse_argument(&arg, &mut args)?;
|
||||
}
|
||||
config.no_download = true;
|
||||
config.setup_gcc_path()?;
|
||||
println!("{}", config.gcc_path);
|
||||
Ok(())
|
||||
}
|
@ -2,12 +2,18 @@ use std::env;
|
||||
use std::process;
|
||||
|
||||
mod build;
|
||||
mod cargo;
|
||||
mod clean;
|
||||
mod clone_gcc;
|
||||
mod config;
|
||||
mod info;
|
||||
mod prepare;
|
||||
mod rustc_info;
|
||||
mod test;
|
||||
mod utils;
|
||||
|
||||
const BUILD_DIR: &str = "build";
|
||||
|
||||
macro_rules! arg_error {
|
||||
($($err:tt)*) => {{
|
||||
eprintln!($($err)*);
|
||||
@ -22,17 +28,25 @@ fn usage() {
|
||||
"\
|
||||
Available commands for build_system:
|
||||
|
||||
cargo : Run cargo command
|
||||
clean : Run clean command
|
||||
prepare : Run prepare command
|
||||
build : Run build command
|
||||
test : Run test command
|
||||
info : Run info command
|
||||
clone-gcc : Run clone-gcc command
|
||||
--help : Show this message"
|
||||
);
|
||||
}
|
||||
|
||||
pub enum Command {
|
||||
Cargo,
|
||||
Clean,
|
||||
CloneGcc,
|
||||
Prepare,
|
||||
Build,
|
||||
Test,
|
||||
Info,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
@ -41,9 +55,13 @@ fn main() {
|
||||
}
|
||||
|
||||
let command = match env::args().nth(1).as_deref() {
|
||||
Some("cargo") => Command::Cargo,
|
||||
Some("clean") => Command::Clean,
|
||||
Some("prepare") => Command::Prepare,
|
||||
Some("build") => Command::Build,
|
||||
Some("test") => Command::Test,
|
||||
Some("info") => Command::Info,
|
||||
Some("clone-gcc") => Command::CloneGcc,
|
||||
Some("--help") => {
|
||||
usage();
|
||||
process::exit(0);
|
||||
@ -57,11 +75,15 @@ fn main() {
|
||||
};
|
||||
|
||||
if let Err(e) = match command {
|
||||
Command::Cargo => cargo::run(),
|
||||
Command::Clean => clean::run(),
|
||||
Command::Prepare => prepare::run(),
|
||||
Command::Build => build::run(),
|
||||
Command::Test => test::run(),
|
||||
Command::Info => info::run(),
|
||||
Command::CloneGcc => clone_gcc::run(),
|
||||
} {
|
||||
eprintln!("Command failed to run: {e:?}");
|
||||
eprintln!("Command failed to run: {e}");
|
||||
process::exit(1);
|
||||
}
|
||||
}
|
||||
|
@ -1,10 +1,16 @@
|
||||
use crate::rustc_info::get_rustc_path;
|
||||
use crate::utils::{cargo_install, git_clone, run_command, run_command_with_output, walk_dir};
|
||||
use crate::utils::{
|
||||
cargo_install, git_clone_root_dir, remove_file, run_command, run_command_with_output, walk_dir,
|
||||
};
|
||||
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
fn prepare_libcore(sysroot_path: &Path, libgccjit12_patches: bool, cross_compile: bool) -> Result<(), String> {
|
||||
fn prepare_libcore(
|
||||
sysroot_path: &Path,
|
||||
libgccjit12_patches: bool,
|
||||
cross_compile: bool,
|
||||
) -> Result<(), String> {
|
||||
let rustc_path = match get_rustc_path() {
|
||||
Some(path) => path,
|
||||
None => return Err("`rustc` path not found".to_string()),
|
||||
@ -88,10 +94,14 @@ fn prepare_libcore(sysroot_path: &Path, libgccjit12_patches: bool, cross_compile
|
||||
},
|
||||
)?;
|
||||
if cross_compile {
|
||||
walk_dir("cross_patches", |_| Ok(()), |file_path: &Path| {
|
||||
walk_dir(
|
||||
"patches/cross_patches",
|
||||
|_| Ok(()),
|
||||
|file_path: &Path| {
|
||||
patches.push(file_path.to_path_buf());
|
||||
Ok(())
|
||||
})?;
|
||||
},
|
||||
)?;
|
||||
}
|
||||
if libgccjit12_patches {
|
||||
walk_dir(
|
||||
@ -121,6 +131,30 @@ fn prepare_libcore(sysroot_path: &Path, libgccjit12_patches: bool, cross_compile
|
||||
)?;
|
||||
}
|
||||
println!("Successfully prepared libcore for building");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// TODO: remove when we can ignore warnings in rustdoc tests.
|
||||
fn prepare_rand() -> Result<(), String> {
|
||||
// Apply patch for the rand crate.
|
||||
let file_path = "patches/crates/0001-Remove-deny-warnings.patch";
|
||||
let rand_dir = Path::new("build/rand");
|
||||
println!("[GIT] apply `{}`", file_path);
|
||||
let path = Path::new("../..").join(file_path);
|
||||
run_command_with_output(&[&"git", &"apply", &path], Some(rand_dir))?;
|
||||
run_command_with_output(&[&"git", &"add", &"-A"], Some(rand_dir))?;
|
||||
run_command_with_output(
|
||||
&[
|
||||
&"git",
|
||||
&"commit",
|
||||
&"--no-gpg-sign",
|
||||
&"-m",
|
||||
&format!("Patch {}", path.display()),
|
||||
],
|
||||
Some(rand_dir),
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -129,8 +163,7 @@ fn build_raytracer(repo_dir: &Path) -> Result<(), String> {
|
||||
run_command(&[&"cargo", &"build"], Some(repo_dir))?;
|
||||
let mv_target = repo_dir.join("raytracer_cg_llvm");
|
||||
if mv_target.is_file() {
|
||||
std::fs::remove_file(&mv_target)
|
||||
.map_err(|e| format!("Failed to remove file `{}`: {e:?}", mv_target.display()))?;
|
||||
remove_file(&mv_target)?;
|
||||
}
|
||||
run_command(
|
||||
&[&"mv", &"target/debug/main", &"raytracer_cg_llvm"],
|
||||
@ -143,28 +176,13 @@ fn clone_and_setup<F>(repo_url: &str, checkout_commit: &str, extra: Option<F>) -
|
||||
where
|
||||
F: Fn(&Path) -> Result<(), String>,
|
||||
{
|
||||
let clone_result = git_clone(repo_url, None)?;
|
||||
let clone_result = git_clone_root_dir(repo_url, &Path::new(crate::BUILD_DIR), false)?;
|
||||
if !clone_result.ran_clone {
|
||||
println!("`{}` has already been cloned", clone_result.repo_name);
|
||||
}
|
||||
let repo_path = Path::new(&clone_result.repo_name);
|
||||
let repo_path = Path::new(crate::BUILD_DIR).join(&clone_result.repo_name);
|
||||
run_command(&[&"git", &"checkout", &"--", &"."], Some(&repo_path))?;
|
||||
run_command(&[&"git", &"checkout", &checkout_commit], Some(&repo_path))?;
|
||||
let filter = format!("-{}-", clone_result.repo_name);
|
||||
walk_dir(
|
||||
"crate_patches",
|
||||
|_| Ok(()),
|
||||
|file_path| {
|
||||
let patch = file_path.as_os_str().to_str().unwrap();
|
||||
if patch.contains(&filter) && patch.ends_with(".patch") {
|
||||
run_command_with_output(
|
||||
&[&"git", &"am", &file_path.canonicalize().unwrap()],
|
||||
Some(&repo_path),
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
},
|
||||
)?;
|
||||
if let Some(extra) = extra {
|
||||
extra(&repo_path)?;
|
||||
}
|
||||
@ -210,8 +228,7 @@ impl PrepareArg {
|
||||
--only-libcore : Only setup libcore and don't clone other repositories
|
||||
--cross : Apply the patches needed to do cross-compilation
|
||||
--libgccjit12-patches : Apply patches needed for libgccjit12
|
||||
--help : Show this help
|
||||
"#
|
||||
--help : Show this help"#
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -230,7 +247,7 @@ pub fn run() -> Result<(), String> {
|
||||
let to_clone = &[
|
||||
(
|
||||
"https://github.com/rust-random/rand.git",
|
||||
"0f933f9c7176e53b2a3c7952ded484e1783f0bf1",
|
||||
"1f4507a8e1cf8050e4ceef95eeda8f64645b6719",
|
||||
None,
|
||||
),
|
||||
(
|
||||
@ -248,6 +265,8 @@ pub fn run() -> Result<(), String> {
|
||||
for (repo_url, checkout_commit, cb) in to_clone {
|
||||
clone_and_setup(repo_url, checkout_commit, *cb)?;
|
||||
}
|
||||
|
||||
prepare_rand()?;
|
||||
}
|
||||
|
||||
println!("Successfully ran `prepare`");
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -2,7 +2,7 @@ use std::collections::HashMap;
|
||||
use std::ffi::OsStr;
|
||||
use std::fmt::Debug;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, ExitStatus, Output};
|
||||
|
||||
fn get_command_inner(
|
||||
@ -29,11 +29,13 @@ fn check_exit_status(
|
||||
input: &[&dyn AsRef<OsStr>],
|
||||
cwd: Option<&Path>,
|
||||
exit_status: ExitStatus,
|
||||
output: Option<&Output>,
|
||||
show_err: bool,
|
||||
) -> Result<(), String> {
|
||||
if exit_status.success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!(
|
||||
return Ok(());
|
||||
}
|
||||
let mut error = format!(
|
||||
"Command `{}`{} exited with status {:?}",
|
||||
input
|
||||
.iter()
|
||||
@ -42,9 +44,25 @@ fn check_exit_status(
|
||||
.join(" "),
|
||||
cwd.map(|cwd| format!(" (running in folder `{}`)", cwd.display()))
|
||||
.unwrap_or_default(),
|
||||
exit_status.code(),
|
||||
))
|
||||
exit_status.code()
|
||||
);
|
||||
let input = input.iter().map(|i| i.as_ref()).collect::<Vec<&OsStr>>();
|
||||
if show_err {
|
||||
eprintln!("Command `{:?}` failed", input);
|
||||
}
|
||||
if let Some(output) = output {
|
||||
let stdout = String::from_utf8_lossy(&output.stdout);
|
||||
if !stdout.is_empty() {
|
||||
error.push_str("\n==== STDOUT ====\n");
|
||||
error.push_str(&*stdout);
|
||||
}
|
||||
let stderr = String::from_utf8_lossy(&output.stderr);
|
||||
if !stderr.is_empty() {
|
||||
error.push_str("\n==== STDERR ====\n");
|
||||
error.push_str(&*stderr);
|
||||
}
|
||||
}
|
||||
Err(error)
|
||||
}
|
||||
|
||||
fn command_error<D: Debug>(input: &[&dyn AsRef<OsStr>], cwd: &Option<&Path>, error: D) -> String {
|
||||
@ -73,7 +91,7 @@ pub fn run_command_with_env(
|
||||
let output = get_command_inner(input, cwd, env)
|
||||
.output()
|
||||
.map_err(|e| command_error(input, &cwd, e))?;
|
||||
check_exit_status(input, cwd, output.status)?;
|
||||
check_exit_status(input, cwd, output.status, Some(&output), true)?;
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
@ -86,7 +104,7 @@ pub fn run_command_with_output(
|
||||
.map_err(|e| command_error(input, &cwd, e))?
|
||||
.wait()
|
||||
.map_err(|e| command_error(input, &cwd, e))?;
|
||||
check_exit_status(input, cwd, exit_status)?;
|
||||
check_exit_status(input, cwd, exit_status, None, true)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -100,7 +118,21 @@ pub fn run_command_with_output_and_env(
|
||||
.map_err(|e| command_error(input, &cwd, e))?
|
||||
.wait()
|
||||
.map_err(|e| command_error(input, &cwd, e))?;
|
||||
check_exit_status(input, cwd, exit_status)?;
|
||||
check_exit_status(input, cwd, exit_status, None, true)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn run_command_with_output_and_env_no_err(
|
||||
input: &[&dyn AsRef<OsStr>],
|
||||
cwd: Option<&Path>,
|
||||
env: Option<&HashMap<String, String>>,
|
||||
) -> Result<(), String> {
|
||||
let exit_status = get_command_inner(input, cwd, env)
|
||||
.spawn()
|
||||
.map_err(|e| command_error(input, &cwd, e))?
|
||||
.wait()
|
||||
.map_err(|e| command_error(input, &cwd, e))?;
|
||||
check_exit_status(input, cwd, exit_status, None, false)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -143,80 +175,157 @@ pub fn get_os_name() -> Result<String, String> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_rustc_host_triple() -> Result<String, String> {
|
||||
let output = run_command(&[&"rustc", &"-vV"], None)?;
|
||||
let content = std::str::from_utf8(&output.stdout).unwrap_or("");
|
||||
|
||||
for line in content.split('\n').map(|line| line.trim()) {
|
||||
if !line.starts_with("host:") {
|
||||
continue;
|
||||
}
|
||||
return Ok(line.split(':').nth(1).unwrap().trim().to_string());
|
||||
}
|
||||
Err("Cannot find host triple".to_string())
|
||||
#[derive(Default, PartialEq)]
|
||||
pub struct RustcVersionInfo {
|
||||
pub short: String,
|
||||
pub version: String,
|
||||
pub host: Option<String>,
|
||||
pub commit_hash: Option<String>,
|
||||
pub commit_date: Option<String>,
|
||||
}
|
||||
|
||||
pub fn get_gcc_path() -> Result<String, String> {
|
||||
let content = match fs::read_to_string("gcc_path") {
|
||||
Ok(content) => content,
|
||||
Err(_) => {
|
||||
return Err(
|
||||
"Please put the path to your custom build of libgccjit in the file \
|
||||
`gcc_path`, see Readme.md for details"
|
||||
.into(),
|
||||
)
|
||||
pub fn rustc_toolchain_version_info(toolchain: &str) -> Result<RustcVersionInfo, String> {
|
||||
rustc_version_info_inner(None, Some(toolchain))
|
||||
}
|
||||
|
||||
pub fn rustc_version_info(rustc: Option<&str>) -> Result<RustcVersionInfo, String> {
|
||||
rustc_version_info_inner(rustc, None)
|
||||
}
|
||||
|
||||
fn rustc_version_info_inner(
|
||||
rustc: Option<&str>,
|
||||
toolchain: Option<&str>,
|
||||
) -> Result<RustcVersionInfo, String> {
|
||||
let output = if let Some(toolchain) = toolchain {
|
||||
run_command(&[&rustc.unwrap_or("rustc"), &toolchain, &"-vV"], None)
|
||||
} else {
|
||||
run_command(&[&rustc.unwrap_or("rustc"), &"-vV"], None)
|
||||
}?;
|
||||
let content = std::str::from_utf8(&output.stdout).unwrap_or("");
|
||||
|
||||
let mut info = RustcVersionInfo::default();
|
||||
let mut lines = content.split('\n');
|
||||
info.short = match lines.next() {
|
||||
Some(s) => s.to_string(),
|
||||
None => return Err("failed to retrieve rustc version".to_string()),
|
||||
};
|
||||
|
||||
for line in lines.map(|line| line.trim()) {
|
||||
match line.split_once(':') {
|
||||
Some(("host", data)) => info.host = Some(data.trim().to_string()),
|
||||
Some(("release", data)) => info.version = data.trim().to_string(),
|
||||
Some(("commit-hash", data)) => info.commit_hash = Some(data.trim().to_string()),
|
||||
Some(("commit-date", data)) => info.commit_date = Some(data.trim().to_string()),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
if info.version.is_empty() {
|
||||
Err("failed to retrieve rustc version".to_string())
|
||||
} else {
|
||||
Ok(info)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_toolchain() -> Result<String, String> {
|
||||
let content = match fs::read_to_string("rust-toolchain") {
|
||||
Ok(content) => content,
|
||||
Err(_) => return Err("No `rust-toolchain` file found".to_string()),
|
||||
};
|
||||
match content
|
||||
.split('\n')
|
||||
.map(|line| line.trim())
|
||||
.filter(|line| !line.is_empty())
|
||||
.filter_map(|line| {
|
||||
if !line.starts_with("channel") {
|
||||
return None;
|
||||
}
|
||||
line.split('"').skip(1).next()
|
||||
})
|
||||
.next()
|
||||
{
|
||||
Some(gcc_path) => {
|
||||
let path = Path::new(gcc_path);
|
||||
if !path.exists() {
|
||||
Err(format!(
|
||||
"Path `{}` contained in the `gcc_path` file doesn't exist",
|
||||
gcc_path,
|
||||
))
|
||||
} else {
|
||||
Ok(gcc_path.into())
|
||||
}
|
||||
}
|
||||
None => Err("No path found in `gcc_path` file".into()),
|
||||
Some(toolchain) => Ok(toolchain.to_string()),
|
||||
None => Err("Couldn't find `channel` in `rust-toolchain` file".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CloneResult {
|
||||
pub ran_clone: bool,
|
||||
pub repo_name: String,
|
||||
pub repo_dir: String,
|
||||
}
|
||||
|
||||
pub fn git_clone(to_clone: &str, dest: Option<&Path>) -> Result<CloneResult, String> {
|
||||
let repo_name = to_clone.split('/').last().unwrap();
|
||||
let repo_name = match repo_name.strip_suffix(".git") {
|
||||
Some(n) => n.to_string(),
|
||||
None => repo_name.to_string(),
|
||||
};
|
||||
|
||||
let dest = dest
|
||||
.map(|dest| dest.join(&repo_name))
|
||||
.unwrap_or_else(|| Path::new(&repo_name).into());
|
||||
fn git_clone_inner(
|
||||
to_clone: &str,
|
||||
dest: &Path,
|
||||
shallow_clone: bool,
|
||||
repo_name: String,
|
||||
) -> Result<CloneResult, String> {
|
||||
if dest.is_dir() {
|
||||
return Ok(CloneResult {
|
||||
ran_clone: false,
|
||||
repo_name,
|
||||
repo_dir: dest.display().to_string(),
|
||||
});
|
||||
}
|
||||
|
||||
run_command_with_output(&[&"git", &"clone", &to_clone, &dest], None)?;
|
||||
let mut command: Vec<&dyn AsRef<OsStr>> = vec![&"git", &"clone", &to_clone, &dest];
|
||||
if shallow_clone {
|
||||
command.push(&"--depth");
|
||||
command.push(&"1");
|
||||
}
|
||||
run_command_with_output(&command, None)?;
|
||||
Ok(CloneResult {
|
||||
ran_clone: true,
|
||||
repo_name,
|
||||
repo_dir: dest.display().to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
fn get_repo_name(url: &str) -> String {
|
||||
let repo_name = url.split('/').last().unwrap();
|
||||
match repo_name.strip_suffix(".git") {
|
||||
Some(n) => n.to_string(),
|
||||
None => repo_name.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn git_clone(
|
||||
to_clone: &str,
|
||||
dest: Option<&Path>,
|
||||
shallow_clone: bool,
|
||||
) -> Result<CloneResult, String> {
|
||||
let repo_name = get_repo_name(to_clone);
|
||||
let tmp: PathBuf;
|
||||
|
||||
let dest = match dest {
|
||||
Some(dest) => dest,
|
||||
None => {
|
||||
tmp = repo_name.clone().into();
|
||||
&tmp
|
||||
}
|
||||
};
|
||||
git_clone_inner(to_clone, dest, shallow_clone, repo_name)
|
||||
}
|
||||
|
||||
/// This function differs from `git_clone` in how it handles *where* the repository will be cloned.
|
||||
/// In `git_clone`, it is cloned in the provided path. In this function, the path you provide is
|
||||
/// the parent folder. So if you pass "a" as folder and try to clone "b.git", it will be cloned into
|
||||
/// `a/b`.
|
||||
pub fn git_clone_root_dir(
|
||||
to_clone: &str,
|
||||
dest_parent_dir: &Path,
|
||||
shallow_clone: bool,
|
||||
) -> Result<CloneResult, String> {
|
||||
let repo_name = get_repo_name(to_clone);
|
||||
|
||||
git_clone_inner(
|
||||
to_clone,
|
||||
&dest_parent_dir.join(&repo_name),
|
||||
shallow_clone,
|
||||
repo_name,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn walk_dir<P, D, F>(dir: P, mut dir_cb: D, mut file_cb: F) -> Result<(), String>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
@ -238,3 +347,105 @@ where
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn split_args(args: &str) -> Result<Vec<String>, String> {
|
||||
let mut out = Vec::new();
|
||||
let mut start = 0;
|
||||
let args = args.trim();
|
||||
let mut iter = args.char_indices().peekable();
|
||||
|
||||
while let Some((pos, c)) = iter.next() {
|
||||
if c == ' ' {
|
||||
out.push(args[start..pos].to_string());
|
||||
let mut found_start = false;
|
||||
while let Some((pos, c)) = iter.peek() {
|
||||
if *c != ' ' {
|
||||
start = *pos;
|
||||
found_start = true;
|
||||
break;
|
||||
} else {
|
||||
iter.next();
|
||||
}
|
||||
}
|
||||
if !found_start {
|
||||
return Ok(out);
|
||||
}
|
||||
} else if c == '"' || c == '\'' {
|
||||
let end = c;
|
||||
let mut found_end = false;
|
||||
while let Some((_, c)) = iter.next() {
|
||||
if c == end {
|
||||
found_end = true;
|
||||
break;
|
||||
} else if c == '\\' {
|
||||
// We skip the escaped character.
|
||||
iter.next();
|
||||
}
|
||||
}
|
||||
if !found_end {
|
||||
return Err(format!(
|
||||
"Didn't find `{}` at the end of `{}`",
|
||||
end,
|
||||
&args[start..]
|
||||
));
|
||||
}
|
||||
} else if c == '\\' {
|
||||
// We skip the escaped character.
|
||||
iter.next();
|
||||
}
|
||||
}
|
||||
let s = args[start..].trim();
|
||||
if !s.is_empty() {
|
||||
out.push(s.to_string());
|
||||
}
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
pub fn remove_file<P: AsRef<Path> + ?Sized>(file_path: &P) -> Result<(), String> {
|
||||
std::fs::remove_file(file_path).map_err(|error| {
|
||||
format!(
|
||||
"Failed to remove `{}`: {:?}",
|
||||
file_path.as_ref().display(),
|
||||
error
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_symlink<P: AsRef<Path>, Q: AsRef<Path>>(original: P, link: Q) -> Result<(), String> {
|
||||
#[cfg(windows)]
|
||||
let symlink = std::os::windows::fs::symlink_file;
|
||||
#[cfg(not(windows))]
|
||||
let symlink = std::os::unix::fs::symlink;
|
||||
|
||||
symlink(&original, &link).map_err(|err| {
|
||||
format!(
|
||||
"failed to create a symlink `{}` to `{}`: {:?}",
|
||||
original.as_ref().display(),
|
||||
link.as_ref().display(),
|
||||
err,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_split_args() {
|
||||
// Missing `"` at the end.
|
||||
assert!(split_args("\"tada").is_err());
|
||||
// Missing `'` at the end.
|
||||
assert!(split_args("\'tada").is_err());
|
||||
|
||||
assert_eq!(
|
||||
split_args("a \"b\" c"),
|
||||
Ok(vec!["a".to_string(), "\"b\"".to_string(), "c".to_string()])
|
||||
);
|
||||
// Trailing whitespace characters.
|
||||
assert_eq!(
|
||||
split_args(" a \"b\" c "),
|
||||
Ok(vec!["a".to_string(), "\"b\"".to_string(), "c".to_string()])
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
if [ -z $CHANNEL ]; then
|
||||
export CHANNEL='debug'
|
||||
fi
|
||||
|
||||
pushd $(dirname "$0") >/dev/null
|
||||
source config.sh
|
||||
|
||||
# read nightly compiler from rust-toolchain file
|
||||
TOOLCHAIN=$(cat rust-toolchain | grep channel | sed 's/channel = "\(.*\)"/\1/')
|
||||
|
||||
popd >/dev/null
|
||||
|
||||
if [[ $(${RUSTC} -V) != $(${RUSTC} +${TOOLCHAIN} -V) ]]; then
|
||||
echo "rustc_codegen_gcc is build for $(rustc +${TOOLCHAIN} -V) but the default rustc version is $(rustc -V)."
|
||||
echo "Using $(rustc +${TOOLCHAIN} -V)."
|
||||
fi
|
||||
|
||||
cmd=$1
|
||||
shift
|
||||
|
||||
RUSTDOCFLAGS="$RUSTFLAGS" cargo +${TOOLCHAIN} $cmd $@
|
@ -1,6 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
set -v
|
||||
|
||||
rm -rf target/ build_sysroot/{sysroot/,sysroot_src/,target/,Cargo.lock} perf.data{,.old}
|
||||
rm -rf regex/ simple-raytracer/
|
2
compiler/rustc_codegen_gcc/config.example.toml
Normal file
2
compiler/rustc_codegen_gcc/config.example.toml
Normal file
@ -0,0 +1,2 @@
|
||||
gcc-path = "gcc-build/gcc"
|
||||
# download-gccjit = true
|
@ -1,85 +0,0 @@
|
||||
set -e
|
||||
|
||||
export CARGO_INCREMENTAL=0
|
||||
|
||||
if [ -f ./gcc_path ]; then
|
||||
export GCC_PATH=$(cat gcc_path)
|
||||
elif (( $use_system_gcc == 1 )); then
|
||||
echo 'Using system GCC'
|
||||
else
|
||||
echo 'Please put the path to your custom build of libgccjit in the file `gcc_path`, see Readme.md for details'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ -z "$RUSTC" ]]; then
|
||||
export RUSTC="rustc"
|
||||
fi
|
||||
|
||||
unamestr=`uname`
|
||||
if [[ "$unamestr" == 'Linux' ]]; then
|
||||
dylib_ext='so'
|
||||
elif [[ "$unamestr" == 'Darwin' ]]; then
|
||||
dylib_ext='dylib'
|
||||
else
|
||||
echo "Unsupported os"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
HOST_TRIPLE=$($RUSTC -vV | grep host | cut -d: -f2 | tr -d " ")
|
||||
# TODO: remove $OVERWRITE_TARGET_TRIPLE when config.sh is removed.
|
||||
TARGET_TRIPLE="${OVERWRITE_TARGET_TRIPLE:-$HOST_TRIPLE}"
|
||||
|
||||
linker=''
|
||||
RUN_WRAPPER=''
|
||||
if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then
|
||||
RUN_WRAPPER=run_in_vm
|
||||
if [[ "$TARGET_TRIPLE" == "m68k-unknown-linux-gnu" ]]; then
|
||||
linker='-Clinker=m68k-unknown-linux-gnu-gcc'
|
||||
elif [[ "$TARGET_TRIPLE" == "aarch64-unknown-linux-gnu" ]]; then
|
||||
# We are cross-compiling for aarch64. Use the correct linker and run tests in qemu.
|
||||
linker='-Clinker=aarch64-linux-gnu-gcc'
|
||||
else
|
||||
echo "Unknown non-native platform"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Since we don't support ThinLTO, disable LTO completely when not trying to do LTO.
|
||||
# TODO(antoyo): remove when we can handle ThinLTO.
|
||||
disable_lto_flags=''
|
||||
if [[ ! -v FAT_LTO ]]; then
|
||||
disable_lto_flags='-Clto=off'
|
||||
fi
|
||||
|
||||
if [[ -z "$BUILTIN_BACKEND" ]]; then
|
||||
export RUSTFLAGS="$CG_RUSTFLAGS $linker -Csymbol-mangling-version=v0 -Cdebuginfo=2 $disable_lto_flags -Zcodegen-backend=$(pwd)/target/${CHANNEL:-debug}/librustc_codegen_gcc.$dylib_ext --sysroot $(pwd)/build_sysroot/sysroot $TEST_FLAGS"
|
||||
else
|
||||
export RUSTFLAGS="$CG_RUSTFLAGS $linker -Csymbol-mangling-version=v0 -Cdebuginfo=2 $disable_lto_flags -Zcodegen-backend=gcc $TEST_FLAGS -Cpanic=abort"
|
||||
|
||||
if [[ ! -z "$RUSTC_SYSROOT" ]]; then
|
||||
export RUSTFLAGS="$RUSTFLAGS --sysroot $RUSTC_SYSROOT"
|
||||
fi
|
||||
fi
|
||||
|
||||
# FIXME(antoyo): remove once the atomic shim is gone
|
||||
if [[ unamestr == 'Darwin' ]]; then
|
||||
export RUSTFLAGS="$RUSTFLAGS -Clink-arg=-undefined -Clink-arg=dynamic_lookup"
|
||||
fi
|
||||
|
||||
if [[ -z "$cargo_target_dir" ]]; then
|
||||
RUST_CMD="$RUSTC $RUSTFLAGS -L crate=target/out --out-dir target/out"
|
||||
cargo_target_dir="target/out"
|
||||
else
|
||||
RUST_CMD="$RUSTC $RUSTFLAGS -L crate=$cargo_target_dir --out-dir $cargo_target_dir"
|
||||
fi
|
||||
export RUSTC_LOG=warn # display metadata load errors
|
||||
|
||||
export LD_LIBRARY_PATH="$(pwd)/target/out:$(pwd)/build_sysroot/sysroot/lib/rustlib/$TARGET_TRIPLE/lib"
|
||||
if [[ ! -z "$:$GCC_PATH" ]]; then
|
||||
export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GCC_PATH"
|
||||
fi
|
||||
|
||||
export DYLD_LIBRARY_PATH=$LD_LIBRARY_PATH
|
||||
# NOTE: To avoid the -fno-inline errors, use /opt/gcc/bin/gcc instead of cc.
|
||||
# To do so, add a symlink for cc to /opt/gcc/bin/gcc in our PATH.
|
||||
# Another option would be to add the following Rust flag: -Clinker=/opt/gcc/bin/gcc
|
||||
export PATH="/opt/gcc/bin:/opt/m68k-unknown-linux-gnu/bin:$PATH"
|
@ -1,32 +0,0 @@
|
||||
From a8fb97120d71252538b6b026695df40d02696bdb Mon Sep 17 00:00:00 2001
|
||||
From: bjorn3 <bjorn3@users.noreply.github.com>
|
||||
Date: Sat, 15 Aug 2020 20:04:38 +0200
|
||||
Subject: [PATCH] [rand] Disable failing test
|
||||
|
||||
---
|
||||
src/distributions/uniform.rs | 3 ++-
|
||||
1 file changed, 2 insertions(+), 1 deletion(-)
|
||||
|
||||
diff --git a/src/distributions/uniform.rs b/src/distributions/uniform.rs
|
||||
index 480b859..c80bb6f 100644
|
||||
--- a/src/distributions/uniform.rs
|
||||
+++ b/src/distributions/uniform.rs
|
||||
@@ -1085,7 +1085,7 @@ mod tests {
|
||||
_ => panic!("`UniformDurationMode` was not serialized/deserialized correctly")
|
||||
}
|
||||
}
|
||||
-
|
||||
+
|
||||
#[test]
|
||||
#[cfg(feature = "serde1")]
|
||||
fn test_uniform_serialization() {
|
||||
@@ -1314,6 +1314,7 @@ mod tests {
|
||||
not(target_arch = "wasm32"),
|
||||
not(target_arch = "asmjs")
|
||||
))]
|
||||
+ #[ignore] // FIXME
|
||||
fn test_float_assertions() {
|
||||
use super::SampleUniform;
|
||||
use std::panic::catch_unwind;
|
||||
--
|
||||
2.20.1
|
@ -0,0 +1 @@
|
||||
INPUT(libLLVM.so.18.1-rust-1.78.0-nightly)
|
3
compiler/rustc_codegen_gcc/doc/debugging-gcc-lto.md
Normal file
3
compiler/rustc_codegen_gcc/doc/debugging-gcc-lto.md
Normal file
@ -0,0 +1,3 @@
|
||||
# How to debug GCC LTO
|
||||
|
||||
Run do the command with `-v -save-temps` and then extract the `lto1` line from the output and run that under the debugger.
|
74
compiler/rustc_codegen_gcc/doc/debugging-libgccjit.md
Normal file
74
compiler/rustc_codegen_gcc/doc/debugging-libgccjit.md
Normal file
@ -0,0 +1,74 @@
|
||||
# Debugging libgccjit
|
||||
|
||||
Sometimes, libgccjit will crash and output an error like this:
|
||||
|
||||
```
|
||||
during RTL pass: expand
|
||||
libgccjit.so: error: in expmed_mode_index, at expmed.h:249
|
||||
0x7f0da2e61a35 expmed_mode_index
|
||||
../../../gcc/gcc/expmed.h:249
|
||||
0x7f0da2e61aa4 expmed_op_cost_ptr
|
||||
../../../gcc/gcc/expmed.h:271
|
||||
0x7f0da2e620dc sdiv_cost_ptr
|
||||
../../../gcc/gcc/expmed.h:540
|
||||
0x7f0da2e62129 sdiv_cost
|
||||
../../../gcc/gcc/expmed.h:558
|
||||
0x7f0da2e73c12 expand_divmod(int, tree_code, machine_mode, rtx_def*, rtx_def*, rtx_def*, int)
|
||||
../../../gcc/gcc/expmed.c:4335
|
||||
0x7f0da2ea1423 expand_expr_real_2(separate_ops*, rtx_def*, machine_mode, expand_modifier)
|
||||
../../../gcc/gcc/expr.c:9240
|
||||
0x7f0da2cd1a1e expand_gimple_stmt_1
|
||||
../../../gcc/gcc/cfgexpand.c:3796
|
||||
0x7f0da2cd1c30 expand_gimple_stmt
|
||||
../../../gcc/gcc/cfgexpand.c:3857
|
||||
0x7f0da2cd90a9 expand_gimple_basic_block
|
||||
../../../gcc/gcc/cfgexpand.c:5898
|
||||
0x7f0da2cdade8 execute
|
||||
../../../gcc/gcc/cfgexpand.c:6582
|
||||
```
|
||||
|
||||
To see the code which causes this error, call the following function:
|
||||
|
||||
```c
|
||||
gcc_jit_context_dump_to_file(ctxt, "/tmp/output.c", 1 /* update_locations */)
|
||||
```
|
||||
|
||||
This will create a C-like file and add the locations into the IR pointing to this C file.
|
||||
Then, rerun the program and it will output the location in the second line:
|
||||
|
||||
```
|
||||
libgccjit.so: /tmp/something.c:61322:0: error: in expmed_mode_index, at expmed.h:249
|
||||
```
|
||||
|
||||
Or add a breakpoint to `add_error` in gdb and print the line number using:
|
||||
|
||||
```
|
||||
p loc->m_line
|
||||
p loc->m_filename->m_buffer
|
||||
```
|
||||
|
||||
To print a debug representation of a tree:
|
||||
|
||||
```c
|
||||
debug_tree(expr);
|
||||
```
|
||||
|
||||
(defined in print-tree.h)
|
||||
|
||||
To print a debug representation of a gimple struct:
|
||||
|
||||
```c
|
||||
debug_gimple_stmt(gimple_struct)
|
||||
```
|
||||
|
||||
To get the `rustc` command to run in `gdb`, add the `--verbose` flag to `cargo build`.
|
||||
|
||||
To have the correct file paths in `gdb` instead of `/usr/src/debug/gcc/libstdc++-v3/libsupc++/eh_personality.cc`:
|
||||
|
||||
Maybe by calling the following at the beginning of gdb:
|
||||
|
||||
```
|
||||
set substitute-path /usr/src/debug/gcc /path/to/gcc-repo/gcc
|
||||
```
|
||||
|
||||
TODO(antoyo): but that's not what I remember I was doing.
|
27
compiler/rustc_codegen_gcc/doc/errors.md
Normal file
27
compiler/rustc_codegen_gcc/doc/errors.md
Normal file
@ -0,0 +1,27 @@
|
||||
# Common errors
|
||||
|
||||
This file lists errors that were encountered and how to fix them.
|
||||
|
||||
### `failed to build archive` error
|
||||
|
||||
When you get this error:
|
||||
|
||||
```
|
||||
error: failed to build archive: failed to open object file: No such file or directory (os error 2)
|
||||
```
|
||||
|
||||
That can be caused by the fact that you try to compile with `lto = "fat"`, but you didn't compile the sysroot with LTO.
|
||||
(Not sure if that's the reason since I cannot reproduce anymore. Maybe it happened when forgetting setting `FAT_LTO`.)
|
||||
|
||||
### ld: cannot find crtbegin.o
|
||||
|
||||
When compiling an executable with libgccijt, if setting the `*LIBRARY_PATH` variables to the install directory, you will get the following errors:
|
||||
|
||||
```
|
||||
ld: cannot find crtbegin.o: No such file or directory
|
||||
ld: cannot find -lgcc: No such file or directory
|
||||
ld: cannot find -lgcc: No such file or directory
|
||||
libgccjit.so: error: error invoking gcc driver
|
||||
```
|
||||
|
||||
To fix this, set the variables to `gcc-build/build/gcc`.
|
52
compiler/rustc_codegen_gcc/doc/subtree.md
Normal file
52
compiler/rustc_codegen_gcc/doc/subtree.md
Normal file
@ -0,0 +1,52 @@
|
||||
# git subtree sync
|
||||
|
||||
`rustc_codegen_gcc` is a subtree of the rust compiler. As such, it needs to be
|
||||
sync from time to time to ensure changes that happened on their side are also
|
||||
included on our side.
|
||||
|
||||
### How to install a forked git-subtree
|
||||
|
||||
Using git-subtree with `rustc` requires a patched git to make it work.
|
||||
The PR that is needed is [here](https://github.com/gitgitgadget/git/pull/493).
|
||||
Use the following instructions to install it:
|
||||
|
||||
```bash
|
||||
git clone git@github.com:tqc/git.git
|
||||
cd git
|
||||
git checkout tqc/subtree
|
||||
make
|
||||
make install
|
||||
cd contrib/subtree
|
||||
make
|
||||
cp git-subtree ~/bin
|
||||
```
|
||||
|
||||
### Syncing with rust compiler
|
||||
|
||||
Do a sync with this command:
|
||||
|
||||
```bash
|
||||
PATH="$HOME/bin:$PATH" ~/bin/git-subtree push -P compiler/rustc_codegen_gcc/ ../rustc_codegen_gcc/ sync_branch_name
|
||||
cd ../rustc_codegen_gcc
|
||||
git checkout master
|
||||
git pull
|
||||
git checkout sync_branch_name
|
||||
git merge master
|
||||
```
|
||||
|
||||
To send the changes to the rust repo:
|
||||
|
||||
```bash
|
||||
cd ../rust
|
||||
git pull origin master
|
||||
git checkout -b subtree-update_cg_gcc_YYYY-MM-DD
|
||||
PATH="$HOME/bin:$PATH" ~/bin/git-subtree pull --prefix=compiler/rustc_codegen_gcc/ https://github.com/rust-lang/rustc_codegen_gcc.git master
|
||||
git push
|
||||
|
||||
# Immediately merge the merge commit into cg_gcc to prevent merge conflicts when syncing from rust-lang/rust later.
|
||||
PATH="$HOME/bin:$PATH" ~/bin/git-subtree push -P compiler/rustc_codegen_gcc/ ../rustc_codegen_gcc/ sync_branch_name
|
||||
```
|
||||
|
||||
TODO: write a script that does the above.
|
||||
|
||||
https://rust-lang.zulipchat.com/#narrow/stream/301329-t-devtools/topic/subtree.20madness/near/258877725
|
72
compiler/rustc_codegen_gcc/doc/tips.md
Normal file
72
compiler/rustc_codegen_gcc/doc/tips.md
Normal file
@ -0,0 +1,72 @@
|
||||
# Tips
|
||||
|
||||
The following shows how to do different random small things we encountered and thought could
|
||||
be useful.
|
||||
|
||||
### How to send arguments to the GCC linker
|
||||
|
||||
```
|
||||
CG_RUSTFLAGS="-Clink-args=-save-temps -v" ../y.sh cargo build
|
||||
```
|
||||
|
||||
### How to see the personality functions in the asm dump
|
||||
|
||||
```
|
||||
CG_RUSTFLAGS="-Clink-arg=-save-temps -v -Clink-arg=-dA" ../y.sh cargo build
|
||||
```
|
||||
|
||||
### How to see the LLVM IR for a sysroot crate
|
||||
|
||||
```
|
||||
cargo build -v --target x86_64-unknown-linux-gnu -Zbuild-std
|
||||
# Take the command from the output and add --emit=llvm-ir
|
||||
```
|
||||
|
||||
### To prevent the linker from unmangling symbols
|
||||
|
||||
Run with:
|
||||
|
||||
```
|
||||
COLLECT_NO_DEMANGLE=1
|
||||
```
|
||||
|
||||
### How to use a custom-build rustc
|
||||
|
||||
* Build the stage2 compiler (`rustup toolchain link debug-current build/x86_64-unknown-linux-gnu/stage2`).
|
||||
* Clean and rebuild the codegen with `debug-current` in the file `rust-toolchain`.
|
||||
|
||||
### How to use [mem-trace](https://github.com/antoyo/mem-trace)
|
||||
|
||||
`rustc` needs to be built without `jemalloc` so that `mem-trace` can overload `malloc` since `jemalloc` is linked statically, so a `LD_PRELOAD`-ed library won't a chance to intercept the calls to `malloc`.
|
||||
|
||||
### How to generate GIMPLE
|
||||
|
||||
If you need to check what gccjit is generating (GIMPLE), then take a look at how to
|
||||
generate it in [gimple.md](./doc/gimple.md).
|
||||
|
||||
### How to build a cross-compiling libgccjit
|
||||
|
||||
#### Building libgccjit
|
||||
|
||||
* Follow the instructions on [this repo](https://github.com/cross-cg-gcc-tools/cross-gcc).
|
||||
|
||||
#### Configuring rustc_codegen_gcc
|
||||
|
||||
* Run `./y.sh prepare --cross` so that the sysroot is patched for the cross-compiling case.
|
||||
* Set the path to the cross-compiling libgccjit in `gcc-path` (in `config.toml`).
|
||||
* Make sure you have the linker for your target (for instance `m68k-unknown-linux-gnu-gcc`) in your `$PATH`. Currently, the linker name is hardcoded as being `$TARGET-gcc`. Specify the target when building the sysroot: `./y.sh build --target-triple m68k-unknown-linux-gnu`.
|
||||
* Build your project by specifying the target: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../y.sh cargo build --target m68k-unknown-linux-gnu`.
|
||||
|
||||
If the target is not yet supported by the Rust compiler, create a [target specification file](https://docs.rust-embedded.org/embedonomicon/custom-target.html) (note that the `arch` specified in this file must be supported by the rust compiler).
|
||||
Then, you can use it the following way:
|
||||
|
||||
* Add the target specification file using `--target` as an **absolute** path to build the sysroot: `./y.sh build --target-triple m68k-unknown-linux-gnu --target $(pwd)/m68k-unknown-linux-gnu.json`
|
||||
* Build your project by specifying the target specification file: `OVERWRITE_TARGET_TRIPLE=m68k-unknown-linux-gnu ../y.sh cargo build --target path/to/m68k-unknown-linux-gnu.json`.
|
||||
|
||||
If you get the following error:
|
||||
|
||||
```
|
||||
/usr/bin/ld: unrecognised emulation mode: m68kelf
|
||||
```
|
||||
|
||||
Make sure you set `gcc-path` (in `config.toml`) to the install directory.
|
1
compiler/rustc_codegen_gcc/libgccjit.version
Normal file
1
compiler/rustc_codegen_gcc/libgccjit.version
Normal file
@ -0,0 +1 @@
|
||||
b6f163f52
|
@ -20,8 +20,6 @@ codegen_gcc_dynamic_linking_with_lto =
|
||||
cannot prefer dynamic linking when performing LTO
|
||||
.note = only 'staticlib', 'bin', and 'cdylib' outputs are supported with LTO
|
||||
|
||||
codegen_gcc_load_bitcode = failed to load bitcode of module "{$name}"
|
||||
|
||||
codegen_gcc_lto_disallowed = lto can only be run for executables, cdylibs and static library outputs
|
||||
|
||||
codegen_gcc_lto_dylib = lto cannot be used for `dylib` crate type without `-Zdylib-lto`
|
||||
|
@ -39,6 +39,4 @@ index 42a26ae..5ac1042 100644
|
||||
+#![cfg(test)]
|
||||
#![feature(alloc_layout_extra)]
|
||||
#![feature(array_chunks)]
|
||||
#![feature(array_methods)]
|
||||
--
|
||||
2.21.0 (Apple Git-122)
|
||||
#![feature(array_windows)]
|
||||
|
@ -0,0 +1,24 @@
|
||||
From f4a31d2c57cdbd578b778ab70eb2a0cfb248652c Mon Sep 17 00:00:00 2001
|
||||
From: Antoni Boucher <bouanto@zoho.com>
|
||||
Date: Tue, 5 Mar 2024 12:39:44 -0500
|
||||
Subject: [PATCH] Remove #[deny(warnings)]
|
||||
|
||||
---
|
||||
src/lib.rs | 1 -
|
||||
1 file changed, 1 deletion(-)
|
||||
|
||||
diff --git a/src/lib.rs b/src/lib.rs
|
||||
index 8ade2881d5..e26c595e38 100644
|
||||
--- a/src/lib.rs
|
||||
+++ b/src/lib.rs
|
||||
@@ -47,7 +47,6 @@
|
||||
)]
|
||||
#![deny(missing_docs)]
|
||||
#![deny(missing_debug_implementations)]
|
||||
-#![doc(test(attr(allow(unused_variables), deny(warnings))))]
|
||||
#![no_std]
|
||||
#![cfg_attr(feature = "simd_support", feature(stdsimd, portable_simd))]
|
||||
#![cfg_attr(doc_cfg, feature(doc_cfg))]
|
||||
--
|
||||
2.44.0
|
||||
|
@ -21,19 +21,3 @@ index 5b21355..cb0c49b 100644
|
||||
|
||||
[dependencies]
|
||||
alloc = { path = "../alloc", public = true }
|
||||
diff --git a/library/test/Cargo.toml b/library/test/Cargo.toml
|
||||
index 91a1abd..a58c160 100644
|
||||
--- a/library/test/Cargo.toml
|
||||
+++ b/library/test/Cargo.toml
|
||||
@@ -4,7 +4,7 @@ version = "0.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
-crate-type = ["dylib", "rlib"]
|
||||
+crate-type = ["rlib"]
|
||||
|
||||
[dependencies]
|
||||
getopts = { version = "0.2.21", features = ['rustc-dep-of-std'] }
|
||||
--
|
||||
2.42.0
|
||||
|
@ -1,3 +1,3 @@
|
||||
[toolchain]
|
||||
channel = "nightly-2023-11-17"
|
||||
channel = "nightly-2024-03-05"
|
||||
components = ["rust-src", "rustc-dev", "llvm-tools-preview"]
|
||||
|
@ -1,29 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
case $1 in
|
||||
"prepare")
|
||||
TOOLCHAIN=$(date +%Y-%m-%d)
|
||||
|
||||
echo "=> Installing new nightly"
|
||||
rustup toolchain install --profile minimal nightly-${TOOLCHAIN} # Sanity check to see if the nightly exists
|
||||
echo nightly-${TOOLCHAIN} > rust-toolchain
|
||||
|
||||
echo "=> Uninstalling all old nightlies"
|
||||
for nightly in $(rustup toolchain list | grep nightly | grep -v $TOOLCHAIN | grep -v nightly-x86_64); do
|
||||
rustup toolchain uninstall $nightly
|
||||
done
|
||||
|
||||
./clean_all.sh
|
||||
./y.sh prepare
|
||||
;;
|
||||
"commit")
|
||||
git add rust-toolchain
|
||||
git commit -m "Rustup to $(rustc -V)"
|
||||
;;
|
||||
*)
|
||||
echo "Unknown command '$1'"
|
||||
echo "Usage: ./rustup.sh prepare|commit"
|
||||
;;
|
||||
esac
|
@ -18,17 +18,16 @@ impl<'a, 'gcc, 'tcx> AbiBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
fn get_param(&mut self, index: usize) -> Self::Value {
|
||||
let func = self.current_func();
|
||||
let param = func.get_param(index as i32);
|
||||
let on_stack =
|
||||
if let Some(on_stack_param_indices) = self.on_stack_function_params.borrow().get(&func) {
|
||||
let on_stack = if let Some(on_stack_param_indices) =
|
||||
self.on_stack_function_params.borrow().get(&func)
|
||||
{
|
||||
on_stack_param_indices.contains(&index)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
false
|
||||
};
|
||||
if on_stack {
|
||||
param.to_lvalue().get_address(None)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
param.to_rvalue()
|
||||
}
|
||||
}
|
||||
@ -37,12 +36,13 @@ impl<'a, 'gcc, 'tcx> AbiBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
impl GccType for CastTarget {
|
||||
fn gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, '_>) -> Type<'gcc> {
|
||||
let rest_gcc_unit = self.rest.unit.gcc_type(cx);
|
||||
let (rest_count, rem_bytes) =
|
||||
if self.rest.unit.size.bytes() == 0 {
|
||||
let (rest_count, rem_bytes) = if self.rest.unit.size.bytes() == 0 {
|
||||
(0, 0)
|
||||
}
|
||||
else {
|
||||
(self.rest.total.bytes() / self.rest.unit.size.bytes(), self.rest.total.bytes() % self.rest.unit.size.bytes())
|
||||
} else {
|
||||
(
|
||||
self.rest.total.bytes() / self.rest.unit.size.bytes(),
|
||||
self.rest.total.bytes() % self.rest.unit.size.bytes(),
|
||||
)
|
||||
};
|
||||
|
||||
if self.prefix.iter().all(|x| x.is_none()) {
|
||||
@ -61,9 +61,7 @@ impl GccType for CastTarget {
|
||||
let mut args: Vec<_> = self
|
||||
.prefix
|
||||
.iter()
|
||||
.flat_map(|option_reg| {
|
||||
option_reg.map(|reg| reg.gcc_type(cx))
|
||||
})
|
||||
.flat_map(|option_reg| option_reg.map(|reg| reg.gcc_type(cx)))
|
||||
.chain((0..rest_count).map(|_| rest_gcc_unit))
|
||||
.collect();
|
||||
|
||||
@ -86,12 +84,10 @@ impl GccType for Reg {
|
||||
fn gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, '_>) -> Type<'gcc> {
|
||||
match self.kind {
|
||||
RegKind::Integer => cx.type_ix(self.size.bits()),
|
||||
RegKind::Float => {
|
||||
match self.size.bits() {
|
||||
RegKind::Float => match self.size.bits() {
|
||||
32 => cx.type_f32(),
|
||||
64 => cx.type_f64(),
|
||||
_ => bug!("unsupported float: {:?}", self),
|
||||
}
|
||||
},
|
||||
RegKind::Vector => unimplemented!(), //cx.type_vector(cx.type_i8(), self.size.bytes()),
|
||||
}
|
||||
@ -119,11 +115,10 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
|
||||
// This capacity calculation is approximate.
|
||||
let mut argument_tys = Vec::with_capacity(
|
||||
self.args.len() + if let PassMode::Indirect { .. } = self.ret.mode { 1 } else { 0 }
|
||||
self.args.len() + if let PassMode::Indirect { .. } = self.ret.mode { 1 } else { 0 },
|
||||
);
|
||||
|
||||
let return_type =
|
||||
match self.ret.mode {
|
||||
let return_type = match self.ret.mode {
|
||||
PassMode::Ignore => cx.type_void(),
|
||||
PassMode::Direct(_) | PassMode::Pair(..) => self.ret.layout.immediate_gcc_type(cx),
|
||||
PassMode::Cast { ref cast, .. } => cast.gcc_type(cx),
|
||||
@ -149,17 +144,23 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
ty
|
||||
};
|
||||
#[cfg(not(feature = "master"))]
|
||||
let apply_attrs = |ty: Type<'gcc>, _attrs: &ArgAttributes, _arg_index: usize| {
|
||||
ty
|
||||
};
|
||||
let apply_attrs = |ty: Type<'gcc>, _attrs: &ArgAttributes, _arg_index: usize| ty;
|
||||
|
||||
for arg in self.args.iter() {
|
||||
let arg_ty = match arg.mode {
|
||||
PassMode::Ignore => continue,
|
||||
PassMode::Pair(a, b) => {
|
||||
let arg_pos = argument_tys.len();
|
||||
argument_tys.push(apply_attrs(arg.layout.scalar_pair_element_gcc_type(cx, 0), &a, arg_pos));
|
||||
argument_tys.push(apply_attrs(arg.layout.scalar_pair_element_gcc_type(cx, 1), &b, arg_pos + 1));
|
||||
argument_tys.push(apply_attrs(
|
||||
arg.layout.scalar_pair_element_gcc_type(cx, 0),
|
||||
&a,
|
||||
arg_pos,
|
||||
));
|
||||
argument_tys.push(apply_attrs(
|
||||
arg.layout.scalar_pair_element_gcc_type(cx, 1),
|
||||
&b,
|
||||
arg_pos + 1,
|
||||
));
|
||||
continue;
|
||||
}
|
||||
PassMode::Cast { ref cast, pad_i32 } => {
|
||||
@ -174,14 +175,17 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
// This is a "byval" argument, so we don't apply the `restrict` attribute on it.
|
||||
on_stack_param_indices.insert(argument_tys.len());
|
||||
arg.memory_ty(cx)
|
||||
},
|
||||
PassMode::Direct(attrs) => apply_attrs(arg.layout.immediate_gcc_type(cx), &attrs, argument_tys.len()),
|
||||
}
|
||||
PassMode::Direct(attrs) => {
|
||||
apply_attrs(arg.layout.immediate_gcc_type(cx), &attrs, argument_tys.len())
|
||||
}
|
||||
PassMode::Indirect { attrs, meta_attrs: None, on_stack: false } => {
|
||||
apply_attrs(cx.type_ptr_to(arg.memory_ty(cx)), &attrs, argument_tys.len())
|
||||
}
|
||||
PassMode::Indirect { attrs, meta_attrs: Some(meta_attrs), on_stack } => {
|
||||
assert!(!on_stack);
|
||||
let ty = apply_attrs(cx.type_ptr_to(arg.memory_ty(cx)), &attrs, argument_tys.len());
|
||||
let ty =
|
||||
apply_attrs(cx.type_ptr_to(arg.memory_ty(cx)), &attrs, argument_tys.len());
|
||||
apply_attrs(ty, &meta_attrs, argument_tys.len())
|
||||
}
|
||||
};
|
||||
@ -207,15 +211,14 @@ impl<'gcc, 'tcx> FnAbiGccExt<'gcc, 'tcx> for FnAbi<'tcx, Ty<'tcx>> {
|
||||
|
||||
fn ptr_to_gcc_type(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc> {
|
||||
// FIXME(antoyo): Should we do something with `FnAbiGcc::fn_attributes`?
|
||||
let FnAbiGcc {
|
||||
return_type,
|
||||
arguments_type,
|
||||
is_c_variadic,
|
||||
let FnAbiGcc { return_type, arguments_type, is_c_variadic, on_stack_param_indices, .. } =
|
||||
self.gcc_type(cx);
|
||||
let pointer_type =
|
||||
cx.context.new_function_pointer_type(None, return_type, &arguments_type, is_c_variadic);
|
||||
cx.on_stack_params.borrow_mut().insert(
|
||||
pointer_type.dyncast_function_ptr_type().expect("function ptr type"),
|
||||
on_stack_param_indices,
|
||||
..
|
||||
} = self.gcc_type(cx);
|
||||
let pointer_type = cx.context.new_function_pointer_type(None, return_type, &arguments_type, is_c_variadic);
|
||||
cx.on_stack_params.borrow_mut().insert(pointer_type.dyncast_function_ptr_type().expect("function ptr type"), on_stack_param_indices);
|
||||
);
|
||||
pointer_type
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
use gccjit::FnAttribute;
|
||||
use gccjit::{Context, FunctionType, GlobalKind, ToRValue, Type};
|
||||
use rustc_ast::expand::allocator::{
|
||||
@ -11,10 +11,15 @@ use rustc_session::config::OomStrategy;
|
||||
|
||||
use crate::GccContext;
|
||||
|
||||
pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut GccContext, _module_name: &str, kind: AllocatorKind, alloc_error_handler_kind: AllocatorKind) {
|
||||
pub(crate) unsafe fn codegen(
|
||||
tcx: TyCtxt<'_>,
|
||||
mods: &mut GccContext,
|
||||
_module_name: &str,
|
||||
kind: AllocatorKind,
|
||||
alloc_error_handler_kind: AllocatorKind,
|
||||
) {
|
||||
let context = &mods.context;
|
||||
let usize =
|
||||
match tcx.sess.target.pointer_width {
|
||||
let usize = match tcx.sess.target.pointer_width {
|
||||
16 => context.new_type::<u16>(),
|
||||
32 => context.new_type::<u32>(),
|
||||
64 => context.new_type::<u64>(),
|
||||
@ -58,7 +63,7 @@ pub(crate) unsafe fn codegen(tcx: TyCtxt<'_>, mods: &mut GccContext, _module_nam
|
||||
tcx,
|
||||
context,
|
||||
"__rust_alloc_error_handler",
|
||||
&alloc_error_handler_name(alloc_error_handler_kind),
|
||||
alloc_error_handler_name(alloc_error_handler_kind),
|
||||
&[usize, usize],
|
||||
None,
|
||||
);
|
||||
@ -85,24 +90,42 @@ fn create_wrapper_function(
|
||||
) {
|
||||
let void = context.new_type::<()>();
|
||||
|
||||
let args: Vec<_> = types.iter().enumerate()
|
||||
.map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index)))
|
||||
let args: Vec<_> = types
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, typ)| context.new_parameter(None, *typ, format!("param{}", index)))
|
||||
.collect();
|
||||
let func = context.new_function(None, FunctionType::Exported, output.unwrap_or(void), &args, from_name, false);
|
||||
let func = context.new_function(
|
||||
None,
|
||||
FunctionType::Exported,
|
||||
output.unwrap_or(void),
|
||||
&args,
|
||||
from_name,
|
||||
false,
|
||||
);
|
||||
|
||||
if tcx.sess.default_hidden_visibility() {
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
func.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden));
|
||||
}
|
||||
if tcx.sess.must_emit_unwind_tables() {
|
||||
// TODO(antoyo): emit unwind tables.
|
||||
}
|
||||
|
||||
let args: Vec<_> = types.iter().enumerate()
|
||||
.map(|(index, typ)| context.new_parameter(None, *typ, &format!("param{}", index)))
|
||||
let args: Vec<_> = types
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, typ)| context.new_parameter(None, *typ, format!("param{}", index)))
|
||||
.collect();
|
||||
let callee = context.new_function(None, FunctionType::Extern, output.unwrap_or(void), &args, to_name, false);
|
||||
#[cfg(feature="master")]
|
||||
let callee = context.new_function(
|
||||
None,
|
||||
FunctionType::Extern,
|
||||
output.unwrap_or(void),
|
||||
&args,
|
||||
to_name,
|
||||
false,
|
||||
);
|
||||
#[cfg(feature = "master")]
|
||||
callee.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden));
|
||||
|
||||
let block = func.new_block("entry");
|
||||
@ -116,8 +139,7 @@ fn create_wrapper_function(
|
||||
//llvm::LLVMSetTailCall(ret, True);
|
||||
if output.is_some() {
|
||||
block.end_with_return(None, ret);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
block.end_with_void_return(None);
|
||||
}
|
||||
|
||||
|
@ -2,7 +2,10 @@ use gccjit::{LValue, RValue, ToRValue, Type};
|
||||
use rustc_ast::ast::{InlineAsmOptions, InlineAsmTemplatePiece};
|
||||
use rustc_codegen_ssa::mir::operand::OperandValue;
|
||||
use rustc_codegen_ssa::mir::place::PlaceRef;
|
||||
use rustc_codegen_ssa::traits::{AsmBuilderMethods, AsmMethods, BaseTypeMethods, BuilderMethods, GlobalAsmOperandRef, InlineAsmOperandRef};
|
||||
use rustc_codegen_ssa::traits::{
|
||||
AsmBuilderMethods, AsmMethods, BaseTypeMethods, BuilderMethods, GlobalAsmOperandRef,
|
||||
InlineAsmOperandRef,
|
||||
};
|
||||
|
||||
use rustc_middle::{bug, ty::Instance};
|
||||
use rustc_span::Span;
|
||||
@ -11,11 +14,10 @@ use rustc_target::asm::*;
|
||||
use std::borrow::Cow;
|
||||
|
||||
use crate::builder::Builder;
|
||||
use crate::callee::get_fn;
|
||||
use crate::context::CodegenCx;
|
||||
use crate::errors::UnwindingInlineAsm;
|
||||
use crate::type_of::LayoutGccExt;
|
||||
use crate::callee::get_fn;
|
||||
|
||||
|
||||
// Rust asm! and GCC Extended Asm semantics differ substantially.
|
||||
//
|
||||
@ -68,7 +70,6 @@ use crate::callee::get_fn;
|
||||
const ATT_SYNTAX_INS: &str = ".att_syntax noprefix\n\t";
|
||||
const INTEL_SYNTAX_INS: &str = "\n\t.intel_syntax noprefix";
|
||||
|
||||
|
||||
struct AsmOutOperand<'a, 'tcx, 'gcc> {
|
||||
rust_idx: usize,
|
||||
constraint: &'a str,
|
||||
@ -76,13 +77,13 @@ struct AsmOutOperand<'a, 'tcx, 'gcc> {
|
||||
readwrite: bool,
|
||||
|
||||
tmp_var: LValue<'gcc>,
|
||||
out_place: Option<PlaceRef<'tcx, RValue<'gcc>>>
|
||||
out_place: Option<PlaceRef<'tcx, RValue<'gcc>>>,
|
||||
}
|
||||
|
||||
struct AsmInOperand<'a, 'tcx> {
|
||||
rust_idx: usize,
|
||||
constraint: Cow<'a, str>,
|
||||
val: RValue<'tcx>
|
||||
val: RValue<'tcx>,
|
||||
}
|
||||
|
||||
impl AsmOutOperand<'_, '_, '_> {
|
||||
@ -95,23 +96,28 @@ impl AsmOutOperand<'_, '_, '_> {
|
||||
res.push('&');
|
||||
}
|
||||
|
||||
res.push_str(&self.constraint);
|
||||
res.push_str(self.constraint);
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
enum ConstraintOrRegister {
|
||||
Constraint(&'static str),
|
||||
Register(&'static str)
|
||||
Register(&'static str),
|
||||
}
|
||||
|
||||
|
||||
impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
fn codegen_inline_asm(&mut self, template: &[InlineAsmTemplatePiece], rust_operands: &[InlineAsmOperandRef<'tcx, Self>], options: InlineAsmOptions, span: &[Span], instance: Instance<'_>, _dest_catch_funclet: Option<(Self::BasicBlock, Self::BasicBlock, Option<&Self::Funclet>)>) {
|
||||
fn codegen_inline_asm(
|
||||
&mut self,
|
||||
template: &[InlineAsmTemplatePiece],
|
||||
rust_operands: &[InlineAsmOperandRef<'tcx, Self>],
|
||||
options: InlineAsmOptions,
|
||||
span: &[Span],
|
||||
instance: Instance<'_>,
|
||||
_dest_catch_funclet: Option<(Self::BasicBlock, Self::BasicBlock, Option<&Self::Funclet>)>,
|
||||
) {
|
||||
if options.contains(InlineAsmOptions::MAY_UNWIND) {
|
||||
self.sess().dcx()
|
||||
.create_err(UnwindingInlineAsm { span: span[0] })
|
||||
.emit();
|
||||
self.sess().dcx().create_err(UnwindingInlineAsm { span: span[0] }).emit();
|
||||
return;
|
||||
}
|
||||
|
||||
@ -157,32 +163,40 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
use ConstraintOrRegister::*;
|
||||
|
||||
let (constraint, ty) = match (reg_to_gcc(reg), place) {
|
||||
(Constraint(constraint), Some(place)) => (constraint, place.layout.gcc_type(self.cx)),
|
||||
(Constraint(constraint), Some(place)) => {
|
||||
(constraint, place.layout.gcc_type(self.cx))
|
||||
}
|
||||
// When `reg` is a class and not an explicit register but the out place is not specified,
|
||||
// we need to create an unused output variable to assign the output to. This var
|
||||
// needs to be of a type that's "compatible" with the register class, but specific type
|
||||
// doesn't matter.
|
||||
(Constraint(constraint), None) => (constraint, dummy_output_type(self.cx, reg.reg_class())),
|
||||
(Constraint(constraint), None) => {
|
||||
(constraint, dummy_output_type(self.cx, reg.reg_class()))
|
||||
}
|
||||
(Register(_), Some(_)) => {
|
||||
// left for the next pass
|
||||
continue
|
||||
},
|
||||
continue;
|
||||
}
|
||||
(Register(reg_name), None) => {
|
||||
// `clobber_abi` can add lots of clobbers that are not supported by the target,
|
||||
// such as AVX-512 registers, so we just ignore unsupported registers
|
||||
let is_target_supported = reg.reg_class().supported_types(asm_arch).iter()
|
||||
.any(|&(_, feature)| {
|
||||
let is_target_supported =
|
||||
reg.reg_class().supported_types(asm_arch).iter().any(
|
||||
|&(_, feature)| {
|
||||
if let Some(feature) = feature {
|
||||
self.tcx.asm_target_features(instance.def_id()).contains(&feature)
|
||||
self.tcx
|
||||
.asm_target_features(instance.def_id())
|
||||
.contains(&feature)
|
||||
} else {
|
||||
true // Register class is unconditionally supported
|
||||
}
|
||||
});
|
||||
},
|
||||
);
|
||||
|
||||
if is_target_supported && !clobbers.contains(®_name) {
|
||||
clobbers.push(reg_name);
|
||||
}
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
@ -193,7 +207,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
late,
|
||||
readwrite: false,
|
||||
tmp_var,
|
||||
out_place: place
|
||||
out_place: place,
|
||||
});
|
||||
}
|
||||
|
||||
@ -202,22 +216,21 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
inputs.push(AsmInOperand {
|
||||
constraint: Cow::Borrowed(constraint),
|
||||
rust_idx,
|
||||
val: value.immediate()
|
||||
val: value.immediate(),
|
||||
});
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// left for the next pass
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
InlineAsmOperandRef::InOut { reg, late, in_value, out_place } => {
|
||||
let constraint = if let ConstraintOrRegister::Constraint(constraint) = reg_to_gcc(reg) {
|
||||
let constraint =
|
||||
if let ConstraintOrRegister::Constraint(constraint) = reg_to_gcc(reg) {
|
||||
constraint
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// left for the next pass
|
||||
continue
|
||||
continue;
|
||||
};
|
||||
|
||||
// Rustc frontend guarantees that input and output types are "compatible",
|
||||
@ -249,7 +262,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
inputs.push(AsmInOperand {
|
||||
constraint,
|
||||
rust_idx,
|
||||
val: in_value.immediate()
|
||||
val: in_value.immediate(),
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -267,7 +280,8 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
InlineAsmOperandRef::SymStatic { def_id } => {
|
||||
// TODO(@Amanieu): Additional mangling is needed on
|
||||
// some targets to add a leading underscore (Mach-O).
|
||||
constants_len += self.tcx.symbol_name(Instance::mono(self.tcx, def_id)).name.len();
|
||||
constants_len +=
|
||||
self.tcx.symbol_name(Instance::mono(self.tcx, def_id)).name.len();
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -280,10 +294,9 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
if let ConstraintOrRegister::Register(reg_name) = reg_to_gcc(reg) {
|
||||
let out_place = if let Some(place) = place {
|
||||
place
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// processed in the previous pass
|
||||
continue
|
||||
continue;
|
||||
};
|
||||
|
||||
let ty = out_place.layout.gcc_type(self.cx);
|
||||
@ -291,12 +304,12 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
tmp_var.set_register_name(reg_name);
|
||||
|
||||
outputs.push(AsmOutOperand {
|
||||
constraint: "r".into(),
|
||||
constraint: "r",
|
||||
rust_idx,
|
||||
late,
|
||||
readwrite: false,
|
||||
tmp_var,
|
||||
out_place: Some(out_place)
|
||||
out_place: Some(out_place),
|
||||
});
|
||||
}
|
||||
|
||||
@ -314,7 +327,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
inputs.push(AsmInOperand {
|
||||
constraint: "r".into(),
|
||||
rust_idx,
|
||||
val: reg_var.to_rvalue()
|
||||
val: reg_var.to_rvalue(),
|
||||
});
|
||||
}
|
||||
|
||||
@ -330,7 +343,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
tmp_var.set_register_name(reg_name);
|
||||
|
||||
outputs.push(AsmOutOperand {
|
||||
constraint: "r".into(),
|
||||
constraint: "r",
|
||||
rust_idx,
|
||||
late,
|
||||
readwrite: false,
|
||||
@ -342,7 +355,7 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
inputs.push(AsmInOperand {
|
||||
constraint,
|
||||
rust_idx,
|
||||
val: in_value.immediate()
|
||||
val: in_value.immediate(),
|
||||
});
|
||||
}
|
||||
|
||||
@ -373,7 +386,8 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
|
||||
// 3. Build the template string
|
||||
|
||||
let mut template_str = String::with_capacity(estimate_template_length(template, constants_len, att_dialect));
|
||||
let mut template_str =
|
||||
String::with_capacity(estimate_template_length(template, constants_len, att_dialect));
|
||||
if att_dialect {
|
||||
template_str.push_str(ATT_SYNTAX_INS);
|
||||
}
|
||||
@ -383,15 +397,14 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
InlineAsmTemplatePiece::String(ref string) => {
|
||||
for char in string.chars() {
|
||||
// TODO(antoyo): might also need to escape | if rustc doesn't do it.
|
||||
let escaped_char =
|
||||
match char {
|
||||
let escaped_char = match char {
|
||||
'%' => "%%",
|
||||
'{' => "%{",
|
||||
'}' => "%}",
|
||||
_ => {
|
||||
template_str.push(char);
|
||||
continue;
|
||||
},
|
||||
}
|
||||
};
|
||||
template_str.push_str(escaped_char);
|
||||
}
|
||||
@ -410,7 +423,8 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
match rust_operands[operand_idx] {
|
||||
InlineAsmOperandRef::Out { reg, .. } => {
|
||||
let modifier = modifier_to_gcc(asm_arch, reg.reg_class(), modifier);
|
||||
let gcc_index = outputs.iter()
|
||||
let gcc_index = outputs
|
||||
.iter()
|
||||
.position(|op| operand_idx == op.rust_idx)
|
||||
.expect("wrong rust index");
|
||||
push_to_template(modifier, gcc_index);
|
||||
@ -418,7 +432,8 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
|
||||
InlineAsmOperandRef::In { reg, .. } => {
|
||||
let modifier = modifier_to_gcc(asm_arch, reg.reg_class(), modifier);
|
||||
let in_gcc_index = inputs.iter()
|
||||
let in_gcc_index = inputs
|
||||
.iter()
|
||||
.position(|op| operand_idx == op.rust_idx)
|
||||
.expect("wrong rust index");
|
||||
let gcc_index = in_gcc_index + outputs.len();
|
||||
@ -429,7 +444,8 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
let modifier = modifier_to_gcc(asm_arch, reg.reg_class(), modifier);
|
||||
|
||||
// The input register is tied to the output, so we can just use the index of the output register
|
||||
let gcc_index = outputs.iter()
|
||||
let gcc_index = outputs
|
||||
.iter()
|
||||
.position(|op| operand_idx == op.rust_idx)
|
||||
.expect("wrong rust index");
|
||||
push_to_template(modifier, gcc_index);
|
||||
@ -496,7 +512,8 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
}
|
||||
if options.contains(InlineAsmOptions::NORETURN) {
|
||||
let builtin_unreachable = self.context.get_builtin_function("__builtin_unreachable");
|
||||
let builtin_unreachable: RValue<'gcc> = unsafe { std::mem::transmute(builtin_unreachable) };
|
||||
let builtin_unreachable: RValue<'gcc> =
|
||||
unsafe { std::mem::transmute(builtin_unreachable) };
|
||||
self.call(self.type_void(), None, None, builtin_unreachable, &[], None);
|
||||
}
|
||||
|
||||
@ -517,12 +534,16 @@ impl<'a, 'gcc, 'tcx> AsmBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn estimate_template_length(template: &[InlineAsmTemplatePiece], constants_len: usize, att_dialect: bool) -> usize {
|
||||
let len: usize = template.iter().map(|piece| {
|
||||
fn estimate_template_length(
|
||||
template: &[InlineAsmTemplatePiece],
|
||||
constants_len: usize,
|
||||
att_dialect: bool,
|
||||
) -> usize {
|
||||
let len: usize = template
|
||||
.iter()
|
||||
.map(|piece| {
|
||||
match *piece {
|
||||
InlineAsmTemplatePiece::String(ref string) => {
|
||||
string.len()
|
||||
}
|
||||
InlineAsmTemplatePiece::String(ref string) => string.len(),
|
||||
InlineAsmTemplatePiece::Placeholder { .. } => {
|
||||
// '%' + 1 char modifier + 1 char index
|
||||
3
|
||||
@ -562,7 +583,7 @@ fn reg_to_gcc(reg: InlineAsmRegOrRegClass) -> ConstraintOrRegister {
|
||||
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
},
|
||||
}
|
||||
// They can be retrieved from https://gcc.gnu.org/onlinedocs/gcc/Machine-Constraints.html
|
||||
InlineAsmRegOrRegClass::RegClass(reg) => match reg {
|
||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::reg) => "r",
|
||||
@ -610,7 +631,7 @@ fn reg_to_gcc(reg: InlineAsmRegOrRegClass) -> ConstraintOrRegister {
|
||||
InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::cr)
|
||||
| InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::xer) => {
|
||||
unreachable!("clobber-only")
|
||||
},
|
||||
}
|
||||
InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::reg) => "r",
|
||||
InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::freg) => "f",
|
||||
InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::vreg) => {
|
||||
@ -637,7 +658,7 @@ fn reg_to_gcc(reg: InlineAsmRegOrRegClass) -> ConstraintOrRegister {
|
||||
InlineAsmRegClass::S390x(S390xInlineAsmRegClass::reg_addr) => "a",
|
||||
InlineAsmRegClass::S390x(S390xInlineAsmRegClass::freg) => "f",
|
||||
InlineAsmRegClass::Err => unreachable!(),
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
ConstraintOrRegister::Constraint(constraint)
|
||||
@ -653,7 +674,7 @@ fn dummy_output_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, reg: InlineAsmRegCl
|
||||
| InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg_low16) => {
|
||||
unimplemented!()
|
||||
}
|
||||
InlineAsmRegClass::Arm(ArmInlineAsmRegClass::reg)=> cx.type_i32(),
|
||||
InlineAsmRegClass::Arm(ArmInlineAsmRegClass::reg) => cx.type_i32(),
|
||||
InlineAsmRegClass::Arm(ArmInlineAsmRegClass::sreg)
|
||||
| InlineAsmRegClass::Arm(ArmInlineAsmRegClass::sreg_low16) => cx.type_f32(),
|
||||
InlineAsmRegClass::Arm(ArmInlineAsmRegClass::dreg)
|
||||
@ -686,7 +707,7 @@ fn dummy_output_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, reg: InlineAsmRegCl
|
||||
InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::cr)
|
||||
| InlineAsmRegClass::PowerPC(PowerPCInlineAsmRegClass::xer) => {
|
||||
unreachable!("clobber-only")
|
||||
},
|
||||
}
|
||||
InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::reg) => cx.type_i32(),
|
||||
InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::freg) => cx.type_f32(),
|
||||
InlineAsmRegClass::RiscV(RiscVInlineAsmRegClass::vreg) => cx.type_f32(),
|
||||
@ -704,9 +725,9 @@ fn dummy_output_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, reg: InlineAsmRegCl
|
||||
InlineAsmRegClass::Wasm(WasmInlineAsmRegClass::local) => cx.type_i32(),
|
||||
InlineAsmRegClass::SpirV(SpirVInlineAsmRegClass::reg) => {
|
||||
bug!("LLVM backend does not support SPIR-V")
|
||||
},
|
||||
}
|
||||
InlineAsmRegClass::S390x(
|
||||
S390xInlineAsmRegClass::reg | S390xInlineAsmRegClass::reg_addr
|
||||
S390xInlineAsmRegClass::reg | S390xInlineAsmRegClass::reg_addr,
|
||||
) => cx.type_i32(),
|
||||
InlineAsmRegClass::S390x(S390xInlineAsmRegClass::freg) => cx.type_f64(),
|
||||
InlineAsmRegClass::Err => unreachable!(),
|
||||
@ -714,7 +735,13 @@ fn dummy_output_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, reg: InlineAsmRegCl
|
||||
}
|
||||
|
||||
impl<'gcc, 'tcx> AsmMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
fn codegen_global_asm(&self, template: &[InlineAsmTemplatePiece], operands: &[GlobalAsmOperandRef<'tcx>], options: InlineAsmOptions, _line_spans: &[Span]) {
|
||||
fn codegen_global_asm(
|
||||
&self,
|
||||
template: &[InlineAsmTemplatePiece],
|
||||
operands: &[GlobalAsmOperandRef<'tcx>],
|
||||
options: InlineAsmOptions,
|
||||
_line_spans: &[Span],
|
||||
) {
|
||||
let asm_arch = self.tcx.sess.asm_arch.unwrap();
|
||||
|
||||
// Default to Intel syntax on x86
|
||||
@ -732,15 +759,17 @@ impl<'gcc, 'tcx> AsmMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
let mut index = 0;
|
||||
while index < string.len() {
|
||||
// NOTE: gcc does not allow inline comment, so remove them.
|
||||
let comment_index = string[index..].find("//")
|
||||
let comment_index = string[index..]
|
||||
.find("//")
|
||||
.map(|comment_index| comment_index + index)
|
||||
.unwrap_or(string.len());
|
||||
template_str.push_str(&string[index..comment_index]);
|
||||
index = string[comment_index..].find('\n')
|
||||
index = string[comment_index..]
|
||||
.find('\n')
|
||||
.map(|index| index + comment_index)
|
||||
.unwrap_or(string.len());
|
||||
}
|
||||
},
|
||||
}
|
||||
InlineAsmTemplatePiece::Placeholder { operand_idx, modifier: _, span: _ } => {
|
||||
match operands[operand_idx] {
|
||||
GlobalAsmOperandRef::Const { ref string } => {
|
||||
@ -782,14 +811,22 @@ impl<'gcc, 'tcx> AsmMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn modifier_to_gcc(arch: InlineAsmArch, reg: InlineAsmRegClass, modifier: Option<char>) -> Option<char> {
|
||||
fn modifier_to_gcc(
|
||||
arch: InlineAsmArch,
|
||||
reg: InlineAsmRegClass,
|
||||
modifier: Option<char>,
|
||||
) -> Option<char> {
|
||||
// The modifiers can be retrieved from
|
||||
// https://gcc.gnu.org/onlinedocs/gcc/Modifiers.html#Modifiers
|
||||
match reg {
|
||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::reg) => modifier,
|
||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg)
|
||||
| InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg_low16) => {
|
||||
if modifier == Some('v') { None } else { modifier }
|
||||
if modifier == Some('v') {
|
||||
None
|
||||
} else {
|
||||
modifier
|
||||
}
|
||||
}
|
||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::preg) => {
|
||||
unreachable!("clobber-only")
|
||||
@ -821,7 +858,13 @@ fn modifier_to_gcc(arch: InlineAsmArch, reg: InlineAsmRegClass, modifier: Option
|
||||
}
|
||||
InlineAsmRegClass::X86(X86InlineAsmRegClass::reg)
|
||||
| InlineAsmRegClass::X86(X86InlineAsmRegClass::reg_abcd) => match modifier {
|
||||
None => if arch == InlineAsmArch::X86_64 { Some('q') } else { Some('k') },
|
||||
None => {
|
||||
if arch == InlineAsmArch::X86_64 {
|
||||
Some('q')
|
||||
} else {
|
||||
Some('k')
|
||||
}
|
||||
}
|
||||
Some('l') => Some('b'),
|
||||
Some('h') => Some('h'),
|
||||
Some('x') => Some('w'),
|
||||
|
@ -1,21 +1,24 @@
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
use gccjit::FnAttribute;
|
||||
use gccjit::Function;
|
||||
use rustc_attr::InstructionSetAttr;
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
use rustc_attr::InlineAttr;
|
||||
use rustc_middle::ty;
|
||||
#[cfg(feature="master")]
|
||||
use rustc_attr::InstructionSetAttr;
|
||||
#[cfg(feature = "master")]
|
||||
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
||||
use rustc_middle::ty;
|
||||
use rustc_span::symbol::sym;
|
||||
|
||||
use crate::{context::CodegenCx, errors::TiedTargetFeatures};
|
||||
use crate::gcc_util::{check_tied_features, to_gcc_features};
|
||||
use crate::{context::CodegenCx, errors::TiedTargetFeatures};
|
||||
|
||||
/// Get GCC attribute for the provided inline heuristic.
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
#[inline]
|
||||
fn inline_attr<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, inline: InlineAttr) -> Option<FnAttribute<'gcc>> {
|
||||
fn inline_attr<'gcc, 'tcx>(
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
inline: InlineAttr,
|
||||
) -> Option<FnAttribute<'gcc>> {
|
||||
match inline {
|
||||
InlineAttr::Hint => Some(FnAttribute::Inline),
|
||||
InlineAttr::Always => Some(FnAttribute::AlwaysInline),
|
||||
@ -34,22 +37,20 @@ fn inline_attr<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, inline: InlineAttr) -> Op
|
||||
/// attributes.
|
||||
pub fn from_fn_attrs<'gcc, 'tcx>(
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
#[cfg_attr(not(feature="master"), allow(unused_variables))]
|
||||
func: Function<'gcc>,
|
||||
#[cfg_attr(not(feature = "master"), allow(unused_variables))] func: Function<'gcc>,
|
||||
instance: ty::Instance<'tcx>,
|
||||
) {
|
||||
let codegen_fn_attrs = cx.tcx.codegen_fn_attrs(instance.def_id());
|
||||
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
{
|
||||
let inline =
|
||||
if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
|
||||
let inline = if codegen_fn_attrs.flags.contains(CodegenFnAttrFlags::NAKED) {
|
||||
InlineAttr::Never
|
||||
}
|
||||
else if codegen_fn_attrs.inline == InlineAttr::None && instance.def.requires_inline(cx.tcx) {
|
||||
} else if codegen_fn_attrs.inline == InlineAttr::None
|
||||
&& instance.def.requires_inline(cx.tcx)
|
||||
{
|
||||
InlineAttr::Hint
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
codegen_fn_attrs.inline
|
||||
};
|
||||
if let Some(attr) = inline_attr(cx, inline) {
|
||||
@ -70,18 +71,21 @@ pub fn from_fn_attrs<'gcc, 'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
let function_features =
|
||||
codegen_fn_attrs.target_features.iter().map(|features| features.as_str()).collect::<Vec<&str>>();
|
||||
let function_features = codegen_fn_attrs
|
||||
.target_features
|
||||
.iter()
|
||||
.map(|features| features.as_str())
|
||||
.collect::<Vec<&str>>();
|
||||
|
||||
if let Some(features) = check_tied_features(cx.tcx.sess, &function_features.iter().map(|features| (*features, true)).collect()) {
|
||||
let span = cx.tcx
|
||||
if let Some(features) = check_tied_features(
|
||||
cx.tcx.sess,
|
||||
&function_features.iter().map(|features| (*features, true)).collect(),
|
||||
) {
|
||||
let span = cx
|
||||
.tcx
|
||||
.get_attr(instance.def_id(), sym::target_feature)
|
||||
.map_or_else(|| cx.tcx.def_span(instance.def_id()), |a| a.span);
|
||||
cx.tcx.dcx().create_err(TiedTargetFeatures {
|
||||
features: features.join(", "),
|
||||
span,
|
||||
})
|
||||
.emit();
|
||||
cx.tcx.dcx().create_err(TiedTargetFeatures { features: features.join(", "), span }).emit();
|
||||
return;
|
||||
}
|
||||
|
||||
@ -105,24 +109,25 @@ pub fn from_fn_attrs<'gcc, 'tcx>(
|
||||
// compiling Rust for Linux:
|
||||
// SSE register return with SSE disabled
|
||||
// TODO(antoyo): support soft-float and retpoline-external-thunk.
|
||||
if feature.contains("soft-float") || feature.contains("retpoline-external-thunk") || *feature == "-sse" {
|
||||
if feature.contains("soft-float")
|
||||
|| feature.contains("retpoline-external-thunk")
|
||||
|| *feature == "-sse"
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
if feature.starts_with('-') {
|
||||
Some(format!("no{}", feature))
|
||||
}
|
||||
else if feature.starts_with('+') {
|
||||
} else if feature.starts_with('+') {
|
||||
Some(feature[1..].to_string())
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
Some(feature.to_string())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(",");
|
||||
if !target_features.is_empty() {
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
func.add_attribute(FnAttribute::Target(&target_features));
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,6 @@
|
||||
/// GCC requires to use the same toolchain for the whole compilation when doing LTO.
|
||||
/// So, we need the same version/commit of the linker (gcc) and lto front-end binaries (lto1,
|
||||
/// lto-wrapper, liblto_plugin.so).
|
||||
|
||||
// FIXME(antoyo): the executables compiled with LTO are bigger than those compiled without LTO.
|
||||
// Since it is the opposite for cg_llvm, check if this is normal.
|
||||
//
|
||||
@ -17,7 +16,6 @@
|
||||
// /usr/bin/ld: warning: type of symbol `_RNvNvNvNtCs5JWOrf9uCus_5rayon11thread_pool19WORKER_THREAD_STATE7___getit5___KEY' changed from 1 to 6 in /tmp/ccKeUSiR.ltrans0.ltrans.o
|
||||
// /usr/bin/ld: warning: type of symbol `_RNvNvNvNvNtNtNtCsAj5i4SGTR7_3std4sync4mpmc5waker17current_thread_id5DUMMY7___getit5___KEY' changed from 1 to 6 in /tmp/ccKeUSiR.ltrans0.ltrans.o
|
||||
// /usr/bin/ld: warning: incremental linking of LTO and non-LTO objects; using -flinker-output=nolto-rel which will bypass whole program optimization
|
||||
|
||||
use std::ffi::CString;
|
||||
use std::fs::{self, File};
|
||||
use std::path::{Path, PathBuf};
|
||||
@ -30,18 +28,16 @@ use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput};
|
||||
use rustc_codegen_ssa::traits::*;
|
||||
use rustc_codegen_ssa::{looks_like_rust_object_file, ModuleCodegen, ModuleKind};
|
||||
use rustc_data_structures::memmap::Mmap;
|
||||
use rustc_errors::{FatalError, DiagCtxt};
|
||||
use rustc_errors::{DiagCtxt, FatalError};
|
||||
use rustc_hir::def_id::LOCAL_CRATE;
|
||||
use rustc_middle::dep_graph::WorkProduct;
|
||||
use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel};
|
||||
use rustc_session::config::{CrateType, Lto};
|
||||
use tempfile::{TempDir, tempdir};
|
||||
use tempfile::{tempdir, TempDir};
|
||||
|
||||
use crate::back::write::save_temp_bitcode;
|
||||
use crate::errors::{
|
||||
DynamicLinkingWithLTO, LtoBitcodeFromRlib, LtoDisallowed, LtoDylib,
|
||||
};
|
||||
use crate::{GccCodegenBackend, GccContext, to_gcc_opt_level};
|
||||
use crate::errors::{DynamicLinkingWithLTO, LtoBitcodeFromRlib, LtoDisallowed, LtoDylib};
|
||||
use crate::{to_gcc_opt_level, GccCodegenBackend, GccContext};
|
||||
|
||||
/// We keep track of the computed LTO cache keys from the previous
|
||||
/// session to determine which CGUs we can reuse.
|
||||
@ -61,7 +57,10 @@ struct LtoData {
|
||||
tmp_path: TempDir,
|
||||
}
|
||||
|
||||
fn prepare_lto(cgcx: &CodegenContext<GccCodegenBackend>, dcx: &DiagCtxt) -> Result<LtoData, FatalError> {
|
||||
fn prepare_lto(
|
||||
cgcx: &CodegenContext<GccCodegenBackend>,
|
||||
dcx: &DiagCtxt,
|
||||
) -> Result<LtoData, FatalError> {
|
||||
let export_threshold = match cgcx.lto {
|
||||
// We're just doing LTO for our one crate
|
||||
Lto::ThinLocal => SymbolExportLevel::Rust,
|
||||
@ -72,13 +71,12 @@ fn prepare_lto(cgcx: &CodegenContext<GccCodegenBackend>, dcx: &DiagCtxt) -> Resu
|
||||
Lto::No => panic!("didn't request LTO but we're doing LTO"),
|
||||
};
|
||||
|
||||
let tmp_path =
|
||||
match tempdir() {
|
||||
let tmp_path = match tempdir() {
|
||||
Ok(tmp_path) => tmp_path,
|
||||
Err(error) => {
|
||||
eprintln!("Cannot create temporary directory: {}", error);
|
||||
return Err(FatalError);
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
let symbol_filter = &|&(ref name, info): &(String, SymbolExportInfo)| {
|
||||
@ -108,13 +106,12 @@ fn prepare_lto(cgcx: &CodegenContext<GccCodegenBackend>, dcx: &DiagCtxt) -> Resu
|
||||
if !crate_type_allows_lto(*crate_type) {
|
||||
dcx.emit_err(LtoDisallowed);
|
||||
return Err(FatalError);
|
||||
} else if *crate_type == CrateType::Dylib {
|
||||
if !cgcx.opts.unstable_opts.dylib_lto {
|
||||
}
|
||||
if *crate_type == CrateType::Dylib && !cgcx.opts.unstable_opts.dylib_lto {
|
||||
dcx.emit_err(LtoDylib);
|
||||
return Err(FatalError);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if cgcx.opts.cg.prefer_dynamic && !cgcx.opts.unstable_opts.dylib_lto {
|
||||
dcx.emit_err(DynamicLinkingWithLTO);
|
||||
@ -125,8 +122,7 @@ fn prepare_lto(cgcx: &CodegenContext<GccCodegenBackend>, dcx: &DiagCtxt) -> Resu
|
||||
let exported_symbols =
|
||||
cgcx.exported_symbols.as_ref().expect("needs exported symbols for LTO");
|
||||
{
|
||||
let _timer =
|
||||
cgcx.prof.generic_activity("GCC_lto_generate_symbols_below_threshold");
|
||||
let _timer = cgcx.prof.generic_activity("GCC_lto_generate_symbols_below_threshold");
|
||||
symbols_below_threshold
|
||||
.extend(exported_symbols[&cnum].iter().filter_map(symbol_filter));
|
||||
}
|
||||
@ -170,9 +166,8 @@ fn prepare_lto(cgcx: &CodegenContext<GccCodegenBackend>, dcx: &DiagCtxt) -> Resu
|
||||
}
|
||||
|
||||
fn save_as_file(obj: &[u8], path: &Path) -> Result<(), LtoBitcodeFromRlib> {
|
||||
fs::write(path, obj)
|
||||
.map_err(|error| LtoBitcodeFromRlib {
|
||||
gcc_err: format!("write object file to temp dir: {}", error)
|
||||
fs::write(path, obj).map_err(|error| LtoBitcodeFromRlib {
|
||||
gcc_err: format!("write object file to temp dir: {}", error),
|
||||
})
|
||||
}
|
||||
|
||||
@ -187,12 +182,24 @@ pub(crate) fn run_fat(
|
||||
let lto_data = prepare_lto(cgcx, &dcx)?;
|
||||
/*let symbols_below_threshold =
|
||||
lto_data.symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();*/
|
||||
fat_lto(cgcx, &dcx, modules, cached_modules, lto_data.upstream_modules, lto_data.tmp_path,
|
||||
fat_lto(
|
||||
cgcx,
|
||||
&dcx,
|
||||
modules,
|
||||
cached_modules,
|
||||
lto_data.upstream_modules,
|
||||
lto_data.tmp_path,
|
||||
//&symbols_below_threshold,
|
||||
)
|
||||
}
|
||||
|
||||
fn fat_lto(cgcx: &CodegenContext<GccCodegenBackend>, _dcx: &DiagCtxt, modules: Vec<FatLtoInput<GccCodegenBackend>>, cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>, mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>, tmp_path: TempDir,
|
||||
fn fat_lto(
|
||||
cgcx: &CodegenContext<GccCodegenBackend>,
|
||||
_dcx: &DiagCtxt,
|
||||
modules: Vec<FatLtoInput<GccCodegenBackend>>,
|
||||
cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
|
||||
mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
|
||||
tmp_path: TempDir,
|
||||
//symbols_below_threshold: &[*const libc::c_char],
|
||||
) -> Result<LtoModuleCodegen<GccCodegenBackend>, FatalError> {
|
||||
let _timer = cgcx.prof.generic_activity("GCC_fat_lto_build_monolithic_module");
|
||||
@ -298,10 +305,15 @@ fn fat_lto(cgcx: &CodegenContext<GccCodegenBackend>, _dcx: &DiagCtxt, modules: V
|
||||
match bc_decoded {
|
||||
SerializedModule::Local(ref module_buffer) => {
|
||||
module.module_llvm.should_combine_object_files = true;
|
||||
module.module_llvm.context.add_driver_option(module_buffer.0.to_str().expect("path"));
|
||||
},
|
||||
module
|
||||
.module_llvm
|
||||
.context
|
||||
.add_driver_option(module_buffer.0.to_str().expect("path"));
|
||||
}
|
||||
SerializedModule::FromRlib(_) => unimplemented!("from rlib"),
|
||||
SerializedModule::FromUncompressedFile(_) => unimplemented!("from uncompressed file"),
|
||||
SerializedModule::FromUncompressedFile(_) => {
|
||||
unimplemented!("from uncompressed file")
|
||||
}
|
||||
}
|
||||
serialized_bitcode.push(bc_decoded);
|
||||
}
|
||||
|
@ -1,19 +1,24 @@
|
||||
use std::{env, fs};
|
||||
|
||||
use gccjit::OutputKind;
|
||||
use rustc_codegen_ssa::{CompiledModule, ModuleCodegen};
|
||||
use rustc_codegen_ssa::back::link::ensure_removed;
|
||||
use rustc_codegen_ssa::back::write::{BitcodeSection, CodegenContext, EmitObj, ModuleConfig};
|
||||
use rustc_codegen_ssa::{CompiledModule, ModuleCodegen};
|
||||
use rustc_errors::DiagCtxt;
|
||||
use rustc_fs_util::link_or_copy;
|
||||
use rustc_session::config::OutputType;
|
||||
use rustc_span::fatal_error::FatalError;
|
||||
use rustc_target::spec::SplitDebuginfo;
|
||||
|
||||
use crate::{GccCodegenBackend, GccContext};
|
||||
use crate::errors::CopyBitcode;
|
||||
use crate::{GccCodegenBackend, GccContext};
|
||||
|
||||
pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, dcx: &DiagCtxt, module: ModuleCodegen<GccContext>, config: &ModuleConfig) -> Result<CompiledModule, FatalError> {
|
||||
pub(crate) unsafe fn codegen(
|
||||
cgcx: &CodegenContext<GccCodegenBackend>,
|
||||
dcx: &DiagCtxt,
|
||||
module: ModuleCodegen<GccContext>,
|
||||
config: &ModuleConfig,
|
||||
) -> Result<CompiledModule, FatalError> {
|
||||
let _timer = cgcx.prof.generic_activity_with_arg("GCC_module_codegen", &*module.name);
|
||||
{
|
||||
let context = &module.module_llvm.context;
|
||||
@ -51,7 +56,8 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, dcx: &Dia
|
||||
.generic_activity_with_arg("GCC_module_codegen_emit_bitcode", &*module.name);
|
||||
context.add_command_line_option("-flto=auto");
|
||||
context.add_command_line_option("-flto-partition=one");
|
||||
context.compile_to_file(OutputKind::ObjectFile, bc_out.to_str().expect("path to str"));
|
||||
context
|
||||
.compile_to_file(OutputKind::ObjectFile, bc_out.to_str().expect("path to str"));
|
||||
}
|
||||
|
||||
if config.emit_obj == EmitObj::ObjectCode(BitcodeSection::Full) {
|
||||
@ -65,18 +71,19 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, dcx: &Dia
|
||||
context.add_command_line_option("-flto-partition=one");
|
||||
context.add_command_line_option("-ffat-lto-objects");
|
||||
// TODO(antoyo): Send -plugin/usr/lib/gcc/x86_64-pc-linux-gnu/11.1.0/liblto_plugin.so to linker (this should be done when specifying the appropriate rustc cli argument).
|
||||
context.compile_to_file(OutputKind::ObjectFile, bc_out.to_str().expect("path to str"));
|
||||
context
|
||||
.compile_to_file(OutputKind::ObjectFile, bc_out.to_str().expect("path to str"));
|
||||
}
|
||||
}
|
||||
|
||||
if config.emit_ir {
|
||||
unimplemented!();
|
||||
let out = cgcx.output_filenames.temp_path(OutputType::LlvmAssembly, module_name);
|
||||
std::fs::write(out, "").expect("write file");
|
||||
}
|
||||
|
||||
if config.emit_asm {
|
||||
let _timer = cgcx
|
||||
.prof
|
||||
.generic_activity_with_arg("GCC_module_codegen_emit_asm", &*module.name);
|
||||
let _timer =
|
||||
cgcx.prof.generic_activity_with_arg("GCC_module_codegen_emit_asm", &*module.name);
|
||||
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
|
||||
context.compile_to_file(OutputKind::Assembler, path.to_str().expect("path to str"));
|
||||
}
|
||||
@ -89,7 +96,9 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, dcx: &Dia
|
||||
if env::var("CG_GCCJIT_DUMP_MODULE_NAMES").as_deref() == Ok("1") {
|
||||
println!("Module {}", module.name);
|
||||
}
|
||||
if env::var("CG_GCCJIT_DUMP_ALL_MODULES").as_deref() == Ok("1") || env::var("CG_GCCJIT_DUMP_MODULE").as_deref() == Ok(&module.name) {
|
||||
if env::var("CG_GCCJIT_DUMP_ALL_MODULES").as_deref() == Ok("1")
|
||||
|| env::var("CG_GCCJIT_DUMP_MODULE").as_deref() == Ok(&module.name)
|
||||
{
|
||||
println!("Dumping reproducer {}", module.name);
|
||||
let _ = fs::create_dir("/tmp/reproducers");
|
||||
// FIXME(antoyo): segfault in dump_reproducer_to_file() might be caused by
|
||||
@ -117,10 +126,15 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, dcx: &Dia
|
||||
context.add_driver_option("-fuse-linker-plugin");
|
||||
|
||||
// NOTE: this doesn't actually generate an executable. With the above flags, it combines the .o files together in another .o.
|
||||
context.compile_to_file(OutputKind::Executable, obj_out.to_str().expect("path to str"));
|
||||
}
|
||||
else {
|
||||
context.compile_to_file(OutputKind::ObjectFile, obj_out.to_str().expect("path to str"));
|
||||
context.compile_to_file(
|
||||
OutputKind::Executable,
|
||||
obj_out.to_str().expect("path to str"),
|
||||
);
|
||||
} else {
|
||||
context.compile_to_file(
|
||||
OutputKind::ObjectFile,
|
||||
obj_out.to_str().expect("path to str"),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@ -148,11 +162,19 @@ pub(crate) unsafe fn codegen(cgcx: &CodegenContext<GccCodegenBackend>, dcx: &Dia
|
||||
))
|
||||
}
|
||||
|
||||
pub(crate) fn link(_cgcx: &CodegenContext<GccCodegenBackend>, _dcx: &DiagCtxt, mut _modules: Vec<ModuleCodegen<GccContext>>) -> Result<ModuleCodegen<GccContext>, FatalError> {
|
||||
pub(crate) fn link(
|
||||
_cgcx: &CodegenContext<GccCodegenBackend>,
|
||||
_dcx: &DiagCtxt,
|
||||
mut _modules: Vec<ModuleCodegen<GccContext>>,
|
||||
) -> Result<ModuleCodegen<GccContext>, FatalError> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
pub(crate) fn save_temp_bitcode(cgcx: &CodegenContext<GccCodegenBackend>, _module: &ModuleCodegen<GccContext>, _name: &str) {
|
||||
pub(crate) fn save_temp_bitcode(
|
||||
cgcx: &CodegenContext<GccCodegenBackend>,
|
||||
_module: &ModuleCodegen<GccContext>,
|
||||
_name: &str,
|
||||
) {
|
||||
if !cgcx.save_temps {
|
||||
return;
|
||||
}
|
||||
|
@ -2,29 +2,26 @@ use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::time::Instant;
|
||||
|
||||
use gccjit::{
|
||||
FunctionType,
|
||||
GlobalKind,
|
||||
};
|
||||
use rustc_middle::dep_graph;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
#[cfg(feature="master")]
|
||||
use rustc_middle::mir::mono::Visibility;
|
||||
use rustc_middle::mir::mono::Linkage;
|
||||
use rustc_codegen_ssa::{ModuleCodegen, ModuleKind};
|
||||
use gccjit::{FunctionType, GlobalKind};
|
||||
use rustc_codegen_ssa::base::maybe_create_entry_wrapper;
|
||||
use rustc_codegen_ssa::mono_item::MonoItemExt;
|
||||
use rustc_codegen_ssa::traits::DebugInfoMethods;
|
||||
use rustc_codegen_ssa::{ModuleCodegen, ModuleKind};
|
||||
use rustc_middle::dep_graph;
|
||||
use rustc_middle::mir::mono::Linkage;
|
||||
#[cfg(feature = "master")]
|
||||
use rustc_middle::mir::mono::Visibility;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_session::config::DebugInfo;
|
||||
use rustc_span::Symbol;
|
||||
use rustc_target::spec::PanicStrategy;
|
||||
|
||||
use crate::{LockedTargetInfo, gcc_util, new_context};
|
||||
use crate::GccContext;
|
||||
use crate::builder::Builder;
|
||||
use crate::context::CodegenCx;
|
||||
use crate::GccContext;
|
||||
use crate::{gcc_util, new_context, LockedTargetInfo};
|
||||
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
pub fn visibility_to_gcc(linkage: Visibility) -> gccjit::Visibility {
|
||||
match linkage {
|
||||
Visibility::Default => gccjit::Visibility::Default,
|
||||
@ -66,7 +63,11 @@ pub fn linkage_to_gcc(linkage: Linkage) -> FunctionType {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, target_info: LockedTargetInfo) -> (ModuleCodegen<GccContext>, u64) {
|
||||
pub fn compile_codegen_unit(
|
||||
tcx: TyCtxt<'_>,
|
||||
cgu_name: Symbol,
|
||||
target_info: LockedTargetInfo,
|
||||
) -> (ModuleCodegen<GccContext>, u64) {
|
||||
let prof_timer = tcx.prof.generic_activity("codegen_module");
|
||||
let start_time = Instant::now();
|
||||
|
||||
@ -85,7 +86,10 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, target_info: Lock
|
||||
// the time we needed for codegenning it.
|
||||
let cost = time_to_codegen.as_secs() * 1_000_000_000 + time_to_codegen.subsec_nanos() as u64;
|
||||
|
||||
fn module_codegen(tcx: TyCtxt<'_>, (cgu_name, target_info): (Symbol, LockedTargetInfo)) -> ModuleCodegen<GccContext> {
|
||||
fn module_codegen(
|
||||
tcx: TyCtxt<'_>,
|
||||
(cgu_name, target_info): (Symbol, LockedTargetInfo),
|
||||
) -> ModuleCodegen<GccContext> {
|
||||
let cgu = tcx.codegen_unit(cgu_name);
|
||||
// Instantiate monomorphizations without filling out definitions yet...
|
||||
let context = new_context(tcx);
|
||||
@ -95,7 +99,12 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, target_info: Lock
|
||||
context.add_driver_option("-fexceptions");
|
||||
}
|
||||
|
||||
let disabled_features: HashSet<_> = tcx.sess.opts.cg.target_feature.split(',')
|
||||
let disabled_features: HashSet<_> = tcx
|
||||
.sess
|
||||
.opts
|
||||
.cg
|
||||
.target_feature
|
||||
.split(',')
|
||||
.filter(|feature| feature.starts_with('-'))
|
||||
.map(|string| &string[1..])
|
||||
.collect();
|
||||
@ -129,7 +138,13 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, target_info: Lock
|
||||
context.add_command_line_option(&format!("-march={}", target_cpu));
|
||||
}
|
||||
|
||||
if tcx.sess.opts.unstable_opts.function_sections.unwrap_or(tcx.sess.target.function_sections) {
|
||||
if tcx
|
||||
.sess
|
||||
.opts
|
||||
.unstable_opts
|
||||
.function_sections
|
||||
.unwrap_or(tcx.sess.target.function_sections)
|
||||
{
|
||||
context.add_command_line_option("-ffunction-sections");
|
||||
context.add_command_line_option("-fdata-sections");
|
||||
}
|
||||
@ -152,19 +167,17 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, target_info: Lock
|
||||
if env::var("CG_GCCJIT_DUMP_GIMPLE").as_deref() == Ok("1") {
|
||||
context.set_dump_initial_gimple(true);
|
||||
}
|
||||
context.set_debug_info(true);
|
||||
if env::var("CG_GCCJIT_DUMP_EVERYTHING").as_deref() == Ok("1") {
|
||||
context.set_dump_everything(true);
|
||||
}
|
||||
if env::var("CG_GCCJIT_KEEP_INTERMEDIATES").as_deref() == Ok("1") {
|
||||
context.set_keep_intermediates(true);
|
||||
}
|
||||
|
||||
if env::var("CG_GCCJIT_VERBOSE").as_deref() == Ok("1") {
|
||||
context.add_driver_option("-v");
|
||||
}
|
||||
|
||||
// NOTE: The codegen generates unrechable blocks.
|
||||
// NOTE: The codegen generates unreachable blocks.
|
||||
context.set_allow_unreachable_blocks(true);
|
||||
|
||||
{
|
||||
@ -192,11 +205,7 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, target_info: Lock
|
||||
|
||||
ModuleCodegen {
|
||||
name: cgu_name.to_string(),
|
||||
module_llvm: GccContext {
|
||||
context,
|
||||
should_combine_object_files: false,
|
||||
temp_dir: None,
|
||||
},
|
||||
module_llvm: GccContext { context, should_combine_object_files: false, temp_dir: None },
|
||||
kind: ModuleKind::Regular,
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,8 +1,8 @@
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
use gccjit::{FnAttribute, Visibility};
|
||||
use gccjit::{FunctionType, Function};
|
||||
use rustc_middle::ty::{self, Instance, TypeVisitableExt};
|
||||
use gccjit::{Function, FunctionType};
|
||||
use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt};
|
||||
use rustc_middle::ty::{self, Instance, TypeVisitableExt};
|
||||
|
||||
use crate::attributes;
|
||||
use crate::context::CodegenCx;
|
||||
@ -28,8 +28,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>)
|
||||
|
||||
let fn_abi = cx.fn_abi_of_instance(instance, ty::List::empty());
|
||||
|
||||
let func =
|
||||
if let Some(_func) = cx.get_declared_value(&sym) {
|
||||
let func = if let Some(_func) = cx.get_declared_value(&sym) {
|
||||
// FIXME(antoyo): we never reach this because get_declared_value only returns global variables
|
||||
// and here we try to get a function.
|
||||
unreachable!();
|
||||
@ -67,8 +66,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>)
|
||||
else {
|
||||
func
|
||||
}*/
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
cx.linkage.set(FunctionType::Extern);
|
||||
let func = cx.declare_fn(&sym, &fn_abi);
|
||||
|
||||
@ -100,7 +98,8 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>)
|
||||
// whether we are sharing generics or not. The important thing here is
|
||||
// that the visibility we apply to the declaration is the same one that
|
||||
// has been applied to the definition (wherever that definition may be).
|
||||
let is_generic = instance.args.non_erasable_generics(tcx, instance.def_id()).next().is_some();
|
||||
let is_generic =
|
||||
instance.args.non_erasable_generics(tcx, instance.def_id()).next().is_some();
|
||||
|
||||
if is_generic {
|
||||
// This is a monomorphization. Its expected visibility depends
|
||||
@ -118,7 +117,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>)
|
||||
if cx.tcx.is_unreachable_local_definition(instance_def_id)
|
||||
|| !cx.tcx.local_crate_exports_generics()
|
||||
{
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
func.add_attribute(FnAttribute::Visibility(Visibility::Hidden));
|
||||
}
|
||||
} else {
|
||||
@ -133,7 +132,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>)
|
||||
// (because it is a C library or an executable), it
|
||||
// will have been declared `hidden`.
|
||||
if !cx.tcx.local_crate_exports_generics() {
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
func.add_attribute(FnAttribute::Visibility(Visibility::Hidden));
|
||||
}
|
||||
}
|
||||
@ -141,7 +140,7 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>)
|
||||
} else {
|
||||
// When not sharing generics, all instances are in the same
|
||||
// crate and have hidden visibility
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
func.add_attribute(FnAttribute::Visibility(Visibility::Hidden));
|
||||
}
|
||||
} else {
|
||||
@ -153,13 +152,13 @@ pub fn get_fn<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, instance: Instance<'tcx>)
|
||||
// This is function that is defined in the local crate.
|
||||
// If it is not reachable, it is hidden.
|
||||
if !cx.tcx.is_reachable_non_generic(instance_def_id) {
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
func.add_attribute(FnAttribute::Visibility(Visibility::Hidden));
|
||||
}
|
||||
} else {
|
||||
// This is a function from an upstream crate that has
|
||||
// been instantiated here. These are always hidden.
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
func.add_attribute(FnAttribute::Visibility(Visibility::Hidden));
|
||||
}
|
||||
}
|
||||
|
@ -1,14 +1,9 @@
|
||||
use gccjit::LValue;
|
||||
use gccjit::{RValue, Type, ToRValue};
|
||||
use rustc_codegen_ssa::traits::{
|
||||
BaseTypeMethods,
|
||||
ConstMethods,
|
||||
MiscMethods,
|
||||
StaticMethods,
|
||||
};
|
||||
use rustc_middle::mir::Mutability;
|
||||
use rustc_middle::ty::layout::{LayoutOf};
|
||||
use gccjit::{RValue, ToRValue, Type};
|
||||
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods, MiscMethods, StaticMethods};
|
||||
use rustc_middle::mir::interpret::{ConstAllocation, GlobalAlloc, Scalar};
|
||||
use rustc_middle::mir::Mutability;
|
||||
use rustc_middle::ty::layout::LayoutOf;
|
||||
use rustc_target::abi::{self, HasDataLayout, Pointer};
|
||||
|
||||
use crate::consts::const_alloc_to_gcc;
|
||||
@ -40,9 +35,7 @@ pub fn bytes_in_context<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, bytes: &[u8]) ->
|
||||
let byte_type = context.new_type::<u8>();
|
||||
let typ = context.new_array_type(None, byte_type, bytes.len() as u64);
|
||||
let elements: Vec<_> =
|
||||
bytes.iter()
|
||||
.map(|&byte| context.new_rvalue_from_int(byte_type, byte as i32))
|
||||
.collect();
|
||||
bytes.iter().map(|&byte| context.new_rvalue_from_int(byte_type, byte as i32)).collect();
|
||||
context.new_array_constructor(None, typ, &elements)
|
||||
}
|
||||
|
||||
@ -54,23 +47,20 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
fn const_null(&self, typ: Type<'gcc>) -> RValue<'gcc> {
|
||||
if type_is_pointer(typ) {
|
||||
self.context.new_null(typ)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
self.const_int(typ, 0)
|
||||
}
|
||||
}
|
||||
|
||||
fn const_undef(&self, typ: Type<'gcc>) -> RValue<'gcc> {
|
||||
let local = self.current_func.borrow().expect("func")
|
||||
.new_local(None, typ, "undefined");
|
||||
let local = self.current_func.borrow().expect("func").new_local(None, typ, "undefined");
|
||||
if typ.is_struct().is_some() {
|
||||
// NOTE: hack to workaround a limitation of the rustc API: see comment on
|
||||
// CodegenCx.structs_as_pointer
|
||||
let pointer = local.get_address(None);
|
||||
self.structs_as_pointer.borrow_mut().insert(pointer);
|
||||
pointer
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
local.to_rvalue()
|
||||
}
|
||||
}
|
||||
@ -143,16 +133,15 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
.or_insert_with(|| (s.to_owned(), self.global_string(s)))
|
||||
.1;
|
||||
let len = s.len();
|
||||
let cs = self.const_ptrcast(str_global.get_address(None),
|
||||
let cs = self.const_ptrcast(
|
||||
str_global.get_address(None),
|
||||
self.type_ptr_to(self.layout_of(self.tcx.types.str_).gcc_type(self)),
|
||||
);
|
||||
(cs, self.const_usize(len as u64))
|
||||
}
|
||||
|
||||
fn const_struct(&self, values: &[RValue<'gcc>], packed: bool) -> RValue<'gcc> {
|
||||
let fields: Vec<_> = values.iter()
|
||||
.map(|value| value.get_type())
|
||||
.collect();
|
||||
let fields: Vec<_> = values.iter().map(|value| value.get_type()).collect();
|
||||
// TODO(antoyo): cache the type? It's anonymous, so probably not.
|
||||
let typ = self.type_struct(&fields, packed);
|
||||
let struct_type = typ.is_struct().expect("struct type");
|
||||
@ -178,9 +167,10 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
// FIXME(antoyo): there's some issues with using the u128 code that follows, so hard-code
|
||||
// the paths for floating-point values.
|
||||
if ty == self.float_type {
|
||||
return self.context.new_rvalue_from_double(ty, f32::from_bits(data as u32) as f64);
|
||||
}
|
||||
else if ty == self.double_type {
|
||||
return self
|
||||
.context
|
||||
.new_rvalue_from_double(ty, f32::from_bits(data as u32) as f64);
|
||||
} else if ty == self.double_type {
|
||||
return self.context.new_rvalue_from_double(ty, f64::from_bits(data as u64));
|
||||
}
|
||||
|
||||
@ -192,8 +182,7 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
// FIXME(antoyo): fix bitcast to work in constant contexts.
|
||||
// TODO(antoyo): perhaps only use bitcast for pointers?
|
||||
self.context.new_cast(None, value, ty)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// TODO(bjorn3): assert size is correct
|
||||
self.const_bitcast(value, ty)
|
||||
}
|
||||
@ -201,13 +190,11 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
Scalar::Ptr(ptr, _size) => {
|
||||
let (prov, offset) = ptr.into_parts(); // we know the `offset` is relative
|
||||
let alloc_id = prov.alloc_id();
|
||||
let base_addr =
|
||||
match self.tcx.global_alloc(alloc_id) {
|
||||
let base_addr = match self.tcx.global_alloc(alloc_id) {
|
||||
GlobalAlloc::Memory(alloc) => {
|
||||
let init = const_alloc_to_gcc(self, alloc);
|
||||
let alloc = alloc.inner();
|
||||
let value =
|
||||
match alloc.mutability {
|
||||
let value = match alloc.mutability {
|
||||
Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None),
|
||||
_ => self.static_addr_of(init, alloc.align, None),
|
||||
};
|
||||
@ -215,28 +202,29 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
// TODO(antoyo): set value name.
|
||||
}
|
||||
value
|
||||
},
|
||||
GlobalAlloc::Function(fn_instance) => {
|
||||
self.get_fn_addr(fn_instance)
|
||||
},
|
||||
}
|
||||
GlobalAlloc::Function(fn_instance) => self.get_fn_addr(fn_instance),
|
||||
GlobalAlloc::VTable(ty, trait_ref) => {
|
||||
let alloc = self.tcx.global_alloc(self.tcx.vtable_allocation((ty, trait_ref))).unwrap_memory();
|
||||
let alloc = self
|
||||
.tcx
|
||||
.global_alloc(self.tcx.vtable_allocation((ty, trait_ref)))
|
||||
.unwrap_memory();
|
||||
let init = const_alloc_to_gcc(self, alloc);
|
||||
self.static_addr_of(init, alloc.inner().align, None)
|
||||
}
|
||||
GlobalAlloc::Static(def_id) => {
|
||||
assert!(self.tcx.is_static(def_id));
|
||||
self.get_static(def_id).get_address(None)
|
||||
},
|
||||
}
|
||||
};
|
||||
let ptr_type = base_addr.get_type();
|
||||
let base_addr = self.const_bitcast(base_addr, self.usize_type);
|
||||
let offset = self.context.new_rvalue_from_long(self.usize_type, offset.bytes() as i64);
|
||||
let offset =
|
||||
self.context.new_rvalue_from_long(self.usize_type, offset.bytes() as i64);
|
||||
let ptr = self.const_bitcast(base_addr + offset, ptr_type);
|
||||
if !matches!(layout.primitive(), Pointer(_)) {
|
||||
self.const_bitcast(ptr.dereference(None).to_rvalue(), ty)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
self.const_bitcast(ptr, ty)
|
||||
}
|
||||
}
|
||||
@ -261,7 +249,9 @@ impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
|
||||
fn const_ptr_byte_offset(&self, base_addr: Self::Value, offset: abi::Size) -> Self::Value {
|
||||
self.context.new_array_access(None, base_addr, self.const_usize(offset.bytes())).get_address(None)
|
||||
self.context
|
||||
.new_array_access(None, base_addr, self.const_usize(offset.bytes()))
|
||||
.get_address(None)
|
||||
}
|
||||
}
|
||||
|
||||
@ -284,35 +274,25 @@ impl<'gcc, 'tcx> SignType<'gcc, 'tcx> for Type<'gcc> {
|
||||
fn to_signed(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc> {
|
||||
if self.is_u8(cx) {
|
||||
cx.i8_type
|
||||
}
|
||||
else if self.is_u16(cx) {
|
||||
} else if self.is_u16(cx) {
|
||||
cx.i16_type
|
||||
}
|
||||
else if self.is_u32(cx) {
|
||||
} else if self.is_u32(cx) {
|
||||
cx.i32_type
|
||||
}
|
||||
else if self.is_u64(cx) {
|
||||
} else if self.is_u64(cx) {
|
||||
cx.i64_type
|
||||
}
|
||||
else if self.is_u128(cx) {
|
||||
} else if self.is_u128(cx) {
|
||||
cx.i128_type
|
||||
}
|
||||
else if self.is_uchar(cx) {
|
||||
} else if self.is_uchar(cx) {
|
||||
cx.char_type
|
||||
}
|
||||
else if self.is_ushort(cx) {
|
||||
} else if self.is_ushort(cx) {
|
||||
cx.short_type
|
||||
}
|
||||
else if self.is_uint(cx) {
|
||||
} else if self.is_uint(cx) {
|
||||
cx.int_type
|
||||
}
|
||||
else if self.is_ulong(cx) {
|
||||
} else if self.is_ulong(cx) {
|
||||
cx.long_type
|
||||
}
|
||||
else if self.is_ulonglong(cx) {
|
||||
} else if self.is_ulonglong(cx) {
|
||||
cx.longlong_type
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
@ -320,35 +300,25 @@ impl<'gcc, 'tcx> SignType<'gcc, 'tcx> for Type<'gcc> {
|
||||
fn to_unsigned(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc> {
|
||||
if self.is_i8(cx) {
|
||||
cx.u8_type
|
||||
}
|
||||
else if self.is_i16(cx) {
|
||||
} else if self.is_i16(cx) {
|
||||
cx.u16_type
|
||||
}
|
||||
else if self.is_i32(cx) {
|
||||
} else if self.is_i32(cx) {
|
||||
cx.u32_type
|
||||
}
|
||||
else if self.is_i64(cx) {
|
||||
} else if self.is_i64(cx) {
|
||||
cx.u64_type
|
||||
}
|
||||
else if self.is_i128(cx) {
|
||||
} else if self.is_i128(cx) {
|
||||
cx.u128_type
|
||||
}
|
||||
else if self.is_char(cx) {
|
||||
} else if self.is_char(cx) {
|
||||
cx.uchar_type
|
||||
}
|
||||
else if self.is_short(cx) {
|
||||
} else if self.is_short(cx) {
|
||||
cx.ushort_type
|
||||
}
|
||||
else if self.is_int(cx) {
|
||||
} else if self.is_int(cx) {
|
||||
cx.uint_type
|
||||
}
|
||||
else if self.is_long(cx) {
|
||||
} else if self.is_long(cx) {
|
||||
cx.ulong_type
|
||||
}
|
||||
else if self.is_longlong(cx) {
|
||||
} else if self.is_longlong(cx) {
|
||||
cx.ulonglong_type
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
@ -2,12 +2,14 @@
|
||||
use gccjit::{FnAttribute, VarAttribute, Visibility};
|
||||
use gccjit::{Function, GlobalKind, LValue, RValue, ToRValue};
|
||||
use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods, DerivedTypeMethods, StaticMethods};
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
|
||||
use rustc_middle::mir::interpret::{
|
||||
self, read_target_uint, ConstAllocation, ErrorHandled, Scalar as InterpScalar,
|
||||
};
|
||||
use rustc_middle::mir::mono::MonoItem;
|
||||
use rustc_middle::ty::{self, Instance, Ty};
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_middle::ty::layout::LayoutOf;
|
||||
use rustc_middle::mir::interpret::{self, ConstAllocation, ErrorHandled, Scalar as InterpScalar, read_target_uint};
|
||||
use rustc_middle::ty::{self, Instance, Ty};
|
||||
use rustc_span::def_id::DefId;
|
||||
use rustc_target::abi::{self, Align, HasDataLayout, Primitive, Size, WrappingRange};
|
||||
|
||||
@ -16,7 +18,11 @@ use crate::context::CodegenCx;
|
||||
use crate::errors::InvalidMinimumAlignment;
|
||||
use crate::type_of::LayoutGccExt;
|
||||
|
||||
fn set_global_alignment<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, gv: LValue<'gcc>, mut align: Align) {
|
||||
fn set_global_alignment<'gcc, 'tcx>(
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
gv: LValue<'gcc>,
|
||||
mut align: Align,
|
||||
) {
|
||||
// The target may require greater alignment for globals than the type does.
|
||||
// Note: GCC and Clang also allow `__attribute__((aligned))` on variables,
|
||||
// which can force it to be smaller. Rust doesn't support this yet.
|
||||
@ -48,7 +54,9 @@ impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
let global_value = self.static_addr_of_mut(cv, align, kind);
|
||||
#[cfg(feature = "master")]
|
||||
self.global_lvalues.borrow().get(&global_value)
|
||||
self.global_lvalues
|
||||
.borrow()
|
||||
.get(&global_value)
|
||||
.expect("`static_addr_of_mut` did not add the global to `self.global_lvalues`")
|
||||
.global_set_readonly();
|
||||
self.const_globals.borrow_mut().insert(cv, global_value);
|
||||
@ -58,8 +66,7 @@ impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> {
|
||||
fn codegen_static(&self, def_id: DefId, is_mutable: bool) {
|
||||
let attrs = self.tcx.codegen_fn_attrs(def_id);
|
||||
|
||||
let value =
|
||||
match codegen_static_initializer(&self, def_id) {
|
||||
let value = match codegen_static_initializer(&self, def_id) {
|
||||
Ok((value, _)) => value,
|
||||
// Error has already been reported
|
||||
Err(_) => return,
|
||||
@ -70,12 +77,8 @@ impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> {
|
||||
// boolean SSA values are i1, but they have to be stored in i8 slots,
|
||||
// otherwise some LLVM optimization passes don't work as expected
|
||||
let val_llty = self.val_ty(value);
|
||||
let value =
|
||||
if val_llty == self.type_i1() {
|
||||
unimplemented!();
|
||||
}
|
||||
else {
|
||||
value
|
||||
};
|
||||
|
||||
let instance = Instance::mono(self.tcx, def_id);
|
||||
@ -89,12 +92,10 @@ impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> {
|
||||
|
||||
// As an optimization, all shared statics which do not have interior
|
||||
// mutability are placed into read-only memory.
|
||||
if !is_mutable {
|
||||
if self.type_is_freeze(ty) {
|
||||
if !is_mutable && self.type_is_freeze(ty) {
|
||||
#[cfg(feature = "master")]
|
||||
global.global_set_readonly();
|
||||
}
|
||||
}
|
||||
|
||||
if attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL) {
|
||||
// Do not allow LLVM to change the alignment of a TLS on macOS.
|
||||
@ -149,7 +150,9 @@ impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> {
|
||||
// TODO(antoyo): set link section.
|
||||
}
|
||||
|
||||
if attrs.flags.contains(CodegenFnAttrFlags::USED) || attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER) {
|
||||
if attrs.flags.contains(CodegenFnAttrFlags::USED)
|
||||
|| attrs.flags.contains(CodegenFnAttrFlags::USED_LINKER)
|
||||
{
|
||||
self.add_used_global(global.to_rvalue());
|
||||
}
|
||||
}
|
||||
@ -166,15 +169,19 @@ impl<'gcc, 'tcx> StaticMethods for CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
#[cfg_attr(not(feature="master"), allow(unused_variables))]
|
||||
#[cfg_attr(not(feature = "master"), allow(unused_variables))]
|
||||
pub fn add_used_function(&self, function: Function<'gcc>) {
|
||||
#[cfg(feature = "master")]
|
||||
function.add_attribute(FnAttribute::Used);
|
||||
}
|
||||
|
||||
pub fn static_addr_of_mut(&self, cv: RValue<'gcc>, align: Align, kind: Option<&str>) -> RValue<'gcc> {
|
||||
let global =
|
||||
match kind {
|
||||
pub fn static_addr_of_mut(
|
||||
&self,
|
||||
cv: RValue<'gcc>,
|
||||
align: Align,
|
||||
kind: Option<&str>,
|
||||
) -> RValue<'gcc> {
|
||||
let global = match kind {
|
||||
Some(kind) if !self.tcx.sess.fewer_names() => {
|
||||
let name = self.generate_local_symbol_name(kind);
|
||||
// TODO(antoyo): check if it's okay that no link_section is set.
|
||||
@ -187,7 +194,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
let typ = self.val_ty(cv).get_aligned(align.bytes());
|
||||
let global = self.declare_unnamed_global(typ);
|
||||
global
|
||||
},
|
||||
}
|
||||
};
|
||||
global.global_set_initializer_rvalue(cv);
|
||||
// TODO(antoyo): set unnamed address.
|
||||
@ -215,8 +222,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
let ty = instance.ty(self.tcx, ty::ParamEnv::reveal_all());
|
||||
let sym = self.tcx.symbol_name(instance).name;
|
||||
|
||||
let global =
|
||||
if def_id.is_local() && !self.tcx.is_foreign_item(def_id) {
|
||||
let global = if def_id.is_local() && !self.tcx.is_foreign_item(def_id) {
|
||||
let llty = self.layout_of(ty).gcc_type(self);
|
||||
if let Some(global) = self.get_declared_value(sym) {
|
||||
if self.val_ty(global) != self.type_ptr_to(llty) {
|
||||
@ -235,7 +241,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
|
||||
if !self.tcx.is_reachable_non_generic(def_id) {
|
||||
#[cfg(feature = "master")]
|
||||
global.add_attribute(VarAttribute::Visibility(Visibility::Hidden));
|
||||
global.add_string_attribute(VarAttribute::Visibility(Visibility::Hidden));
|
||||
}
|
||||
|
||||
global
|
||||
@ -278,7 +284,10 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAllocation<'tcx>) -> RValue<'gcc> {
|
||||
pub fn const_alloc_to_gcc<'gcc, 'tcx>(
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
alloc: ConstAllocation<'tcx>,
|
||||
) -> RValue<'gcc> {
|
||||
let alloc = alloc.inner();
|
||||
let mut llvals = Vec::with_capacity(alloc.provenance().ptrs().len() + 1);
|
||||
let dl = cx.data_layout();
|
||||
@ -300,8 +309,8 @@ pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAl
|
||||
let bytes = alloc.inspect_with_uninit_and_ptr_outside_interpreter(next_offset..offset);
|
||||
llvals.push(cx.const_bytes(bytes));
|
||||
}
|
||||
let ptr_offset =
|
||||
read_target_uint( dl.endian,
|
||||
let ptr_offset = read_target_uint(
|
||||
dl.endian,
|
||||
// This `inspect` is okay since it is within the bounds of the allocation, it doesn't
|
||||
// affect interpreter execution (we inspect the result after interpreter execution),
|
||||
// and we properly interpret the provenance as a relocation pointer offset.
|
||||
@ -317,7 +326,10 @@ pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAl
|
||||
interpret::Pointer::new(prov, Size::from_bytes(ptr_offset)),
|
||||
&cx.tcx,
|
||||
),
|
||||
abi::Scalar::Initialized { value: Primitive::Pointer(address_space), valid_range: WrappingRange::full(dl.pointer_size) },
|
||||
abi::Scalar::Initialized {
|
||||
value: Primitive::Pointer(address_space),
|
||||
valid_range: WrappingRange::full(dl.pointer_size),
|
||||
},
|
||||
cx.type_i8p_ext(address_space),
|
||||
));
|
||||
next_offset = offset + pointer_size;
|
||||
@ -337,17 +349,29 @@ pub fn const_alloc_to_gcc<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, alloc: ConstAl
|
||||
cx.const_struct(&llvals, true)
|
||||
}
|
||||
|
||||
pub fn codegen_static_initializer<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, def_id: DefId) -> Result<(RValue<'gcc>, ConstAllocation<'tcx>), ErrorHandled> {
|
||||
pub fn codegen_static_initializer<'gcc, 'tcx>(
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
def_id: DefId,
|
||||
) -> Result<(RValue<'gcc>, ConstAllocation<'tcx>), ErrorHandled> {
|
||||
let alloc = cx.tcx.eval_static_initializer(def_id)?;
|
||||
Ok((const_alloc_to_gcc(cx, alloc), alloc))
|
||||
}
|
||||
|
||||
fn check_and_apply_linkage<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, attrs: &CodegenFnAttrs, ty: Ty<'tcx>, sym: &str) -> LValue<'gcc> {
|
||||
fn check_and_apply_linkage<'gcc, 'tcx>(
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
attrs: &CodegenFnAttrs,
|
||||
ty: Ty<'tcx>,
|
||||
sym: &str,
|
||||
) -> LValue<'gcc> {
|
||||
let is_tls = attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL);
|
||||
let gcc_type = cx.layout_of(ty).gcc_type(cx);
|
||||
if let Some(linkage) = attrs.import_linkage {
|
||||
// Declare a symbol `foo` with the desired linkage.
|
||||
let global1 = cx.declare_global_with_linkage(&sym, cx.type_i8(), base::global_linkage_to_gcc(linkage));
|
||||
let global1 = cx.declare_global_with_linkage(
|
||||
&sym,
|
||||
cx.type_i8(),
|
||||
base::global_linkage_to_gcc(linkage),
|
||||
);
|
||||
|
||||
// Declare an internal global `extern_with_linkage_foo` which
|
||||
// is initialized with the address of `foo`. If `foo` is
|
||||
@ -363,8 +387,7 @@ fn check_and_apply_linkage<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, attrs: &Codeg
|
||||
global2.global_set_initializer_rvalue(value);
|
||||
// TODO(antoyo): use global_set_initializer() when it will work.
|
||||
global2
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// Generate an external declaration.
|
||||
// FIXME(nagisa): investigate whether it can be changed into define_global
|
||||
|
||||
|
@ -1,22 +1,25 @@
|
||||
use std::cell::{Cell, RefCell};
|
||||
|
||||
use gccjit::{Block, CType, Context, Function, FunctionPtrType, FunctionType, LValue, RValue, Type};
|
||||
use rustc_codegen_ssa::base::wants_msvc_seh;
|
||||
use rustc_codegen_ssa::traits::{
|
||||
BackendTypes,
|
||||
BaseTypeMethods,
|
||||
MiscMethods,
|
||||
use gccjit::{
|
||||
Block, CType, Context, Function, FunctionPtrType, FunctionType, LValue, Location, RValue, Type,
|
||||
};
|
||||
use rustc_codegen_ssa::base::wants_msvc_seh;
|
||||
use rustc_codegen_ssa::errors as ssa_errors;
|
||||
use rustc_codegen_ssa::traits::{BackendTypes, BaseTypeMethods, MiscMethods};
|
||||
use rustc_data_structures::base_n;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_middle::mir::mono::CodegenUnit;
|
||||
use rustc_middle::span_bug;
|
||||
use rustc_middle::ty::layout::{
|
||||
FnAbiError, FnAbiOf, FnAbiOfHelpers, FnAbiRequest, HasParamEnv, HasTyCtxt, LayoutError,
|
||||
LayoutOfHelpers, TyAndLayout,
|
||||
};
|
||||
use rustc_middle::ty::{self, Instance, ParamEnv, PolyExistentialTraitRef, Ty, TyCtxt};
|
||||
use rustc_middle::ty::layout::{FnAbiError, FnAbiOf, FnAbiOfHelpers, FnAbiRequest, HasParamEnv, HasTyCtxt, LayoutError, TyAndLayout, LayoutOfHelpers};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::{Span, source_map::respan};
|
||||
use rustc_target::abi::{call::FnAbi, HasDataLayout, PointeeInfo, Size, TargetDataLayout, VariantIdx};
|
||||
use rustc_span::{source_map::respan, Span};
|
||||
use rustc_target::abi::{
|
||||
call::FnAbi, HasDataLayout, PointeeInfo, Size, TargetDataLayout, VariantIdx,
|
||||
};
|
||||
use rustc_target::spec::{HasTargetSpec, Target, TlsModel};
|
||||
|
||||
use crate::callee::get_fn;
|
||||
@ -81,7 +84,8 @@ pub struct CodegenCx<'gcc, 'tcx> {
|
||||
/// Cache function instances of monomorphic and polymorphic items
|
||||
pub function_instances: RefCell<FxHashMap<Instance<'tcx>, Function<'gcc>>>,
|
||||
/// Cache generated vtables
|
||||
pub vtables: RefCell<FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), RValue<'gcc>>>,
|
||||
pub vtables:
|
||||
RefCell<FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), RValue<'gcc>>>,
|
||||
|
||||
// TODO(antoyo): improve the SSA API to not require those.
|
||||
/// Mapping from function pointer type to indexes of on stack parameters.
|
||||
@ -121,24 +125,28 @@ pub struct CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
pub fn new(context: &'gcc Context<'gcc>, codegen_unit: &'tcx CodegenUnit<'tcx>, tcx: TyCtxt<'tcx>, supports_128bit_integers: bool) -> Self {
|
||||
pub fn new(
|
||||
context: &'gcc Context<'gcc>,
|
||||
codegen_unit: &'tcx CodegenUnit<'tcx>,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
supports_128bit_integers: bool,
|
||||
) -> Self {
|
||||
let check_overflow = tcx.sess.overflow_checks();
|
||||
|
||||
let create_type = |ctype, rust_type| {
|
||||
let layout = tcx.layout_of(ParamEnv::reveal_all().and(rust_type)).unwrap();
|
||||
let align = layout.align.abi.bytes();
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
{
|
||||
context.new_c_type(ctype).get_aligned(align)
|
||||
}
|
||||
#[cfg(not(feature="master"))]
|
||||
#[cfg(not(feature = "master"))]
|
||||
{
|
||||
// Since libgccjit 12 doesn't contain the fix to compare aligned integer types,
|
||||
// only align u128 and i128.
|
||||
if layout.ty.int_size_and_signed(tcx).0.bytes() == 16 {
|
||||
context.new_c_type(ctype).get_aligned(align)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
context.new_c_type(ctype)
|
||||
}
|
||||
}
|
||||
@ -153,13 +161,11 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
let u32_type = create_type(CType::UInt32t, tcx.types.u32);
|
||||
let u64_type = create_type(CType::UInt64t, tcx.types.u64);
|
||||
|
||||
let (i128_type, u128_type) =
|
||||
if supports_128bit_integers {
|
||||
let (i128_type, u128_type) = if supports_128bit_integers {
|
||||
let i128_type = create_type(CType::Int128t, tcx.types.i128);
|
||||
let u128_type = create_type(CType::UInt128t, tcx.types.u128);
|
||||
(i128_type, u128_type)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
/*let layout = tcx.layout_of(ParamEnv::reveal_all().and(tcx.types.i128)).unwrap();
|
||||
let i128_align = layout.align.abi.bytes();
|
||||
let layout = tcx.layout_of(ParamEnv::reveal_all().and(tcx.types.u128)).unwrap();
|
||||
@ -196,16 +202,65 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
|
||||
let mut functions = FxHashMap::default();
|
||||
let builtins = [
|
||||
"__builtin_unreachable", "abort", "__builtin_expect", /*"__builtin_expect_with_probability",*/
|
||||
"__builtin_constant_p", "__builtin_add_overflow", "__builtin_mul_overflow", "__builtin_saddll_overflow",
|
||||
/*"__builtin_sadd_overflow",*/ "__builtin_smulll_overflow", /*"__builtin_smul_overflow",*/
|
||||
"__builtin_ssubll_overflow", /*"__builtin_ssub_overflow",*/ "__builtin_sub_overflow", "__builtin_uaddll_overflow",
|
||||
"__builtin_uadd_overflow", "__builtin_umulll_overflow", "__builtin_umul_overflow", "__builtin_usubll_overflow",
|
||||
"__builtin_usub_overflow", "sqrtf", "sqrt", "__builtin_powif", "__builtin_powi", "sinf", "sin", "cosf", "cos",
|
||||
"powf", "pow", "expf", "exp", "exp2f", "exp2", "logf", "log", "log10f", "log10", "log2f", "log2", "fmaf",
|
||||
"fma", "fabsf", "fabs", "fminf", "fmin", "fmaxf", "fmax", "copysignf", "copysign", "floorf", "floor", "ceilf",
|
||||
"ceil", "truncf", "trunc", "rintf", "rint", "nearbyintf", "nearbyint", "roundf", "round",
|
||||
|
||||
"__builtin_unreachable",
|
||||
"abort",
|
||||
"__builtin_expect", /*"__builtin_expect_with_probability",*/
|
||||
"__builtin_constant_p",
|
||||
"__builtin_add_overflow",
|
||||
"__builtin_mul_overflow",
|
||||
"__builtin_saddll_overflow",
|
||||
/*"__builtin_sadd_overflow",*/
|
||||
"__builtin_smulll_overflow", /*"__builtin_smul_overflow",*/
|
||||
"__builtin_ssubll_overflow",
|
||||
/*"__builtin_ssub_overflow",*/ "__builtin_sub_overflow",
|
||||
"__builtin_uaddll_overflow",
|
||||
"__builtin_uadd_overflow",
|
||||
"__builtin_umulll_overflow",
|
||||
"__builtin_umul_overflow",
|
||||
"__builtin_usubll_overflow",
|
||||
"__builtin_usub_overflow",
|
||||
"sqrtf",
|
||||
"sqrt",
|
||||
"__builtin_powif",
|
||||
"__builtin_powi",
|
||||
"sinf",
|
||||
"sin",
|
||||
"cosf",
|
||||
"cos",
|
||||
"powf",
|
||||
"pow",
|
||||
"expf",
|
||||
"exp",
|
||||
"exp2f",
|
||||
"exp2",
|
||||
"logf",
|
||||
"log",
|
||||
"log10f",
|
||||
"log10",
|
||||
"log2f",
|
||||
"log2",
|
||||
"fmaf",
|
||||
"fma",
|
||||
"fabsf",
|
||||
"fabs",
|
||||
"fminf",
|
||||
"fmin",
|
||||
"fmaxf",
|
||||
"fmax",
|
||||
"copysignf",
|
||||
"copysign",
|
||||
"floorf",
|
||||
"floor",
|
||||
"ceilf",
|
||||
"ceil",
|
||||
"truncf",
|
||||
"trunc",
|
||||
"rintf",
|
||||
"rint",
|
||||
"nearbyintf",
|
||||
"nearbyint",
|
||||
"roundf",
|
||||
"round",
|
||||
];
|
||||
|
||||
for builtin in builtins.iter() {
|
||||
@ -282,8 +337,12 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
|
||||
pub fn rvalue_as_function(&self, value: RValue<'gcc>) -> Function<'gcc> {
|
||||
let function: Function<'gcc> = unsafe { std::mem::transmute(value) };
|
||||
debug_assert!(self.functions.borrow().values().any(|value| *value == function),
|
||||
"{:?} ({:?}) is not a function", value, value.get_type());
|
||||
debug_assert!(
|
||||
self.functions.borrow().values().any(|value| *value == function),
|
||||
"{:?} ({:?}) is not a function",
|
||||
value,
|
||||
value.get_type()
|
||||
);
|
||||
function
|
||||
}
|
||||
|
||||
@ -305,13 +364,13 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
self.supports_128bit_integers &&
|
||||
(self.u128_type.is_compatible_with(typ) || self.i128_type.is_compatible_with(typ))
|
||||
self.supports_128bit_integers
|
||||
&& (self.u128_type.is_compatible_with(typ) || self.i128_type.is_compatible_with(typ))
|
||||
}
|
||||
|
||||
pub fn is_non_native_int_type(&self, typ: Type<'gcc>) -> bool {
|
||||
!self.supports_128bit_integers &&
|
||||
(self.u128_type.is_compatible_with(typ) || self.i128_type.is_compatible_with(typ))
|
||||
!self.supports_128bit_integers
|
||||
&& (self.u128_type.is_compatible_with(typ) || self.i128_type.is_compatible_with(typ))
|
||||
}
|
||||
|
||||
pub fn is_native_int_type_or_bool(&self, typ: Type<'gcc>) -> bool {
|
||||
@ -319,18 +378,23 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn is_int_type_or_bool(&self, typ: Type<'gcc>) -> bool {
|
||||
self.is_native_int_type(typ) || self.is_non_native_int_type(typ) || typ.is_compatible_with(self.bool_type)
|
||||
self.is_native_int_type(typ)
|
||||
|| self.is_non_native_int_type(typ)
|
||||
|| typ.is_compatible_with(self.bool_type)
|
||||
}
|
||||
|
||||
pub fn sess(&self) -> &'tcx Session {
|
||||
&self.tcx.sess
|
||||
}
|
||||
|
||||
pub fn bitcast_if_needed(&self, value: RValue<'gcc>, expected_type: Type<'gcc>) -> RValue<'gcc> {
|
||||
pub fn bitcast_if_needed(
|
||||
&self,
|
||||
value: RValue<'gcc>,
|
||||
expected_type: Type<'gcc>,
|
||||
) -> RValue<'gcc> {
|
||||
if value.get_type() != expected_type {
|
||||
self.context.new_bitcast(None, value, expected_type)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
value
|
||||
}
|
||||
}
|
||||
@ -345,12 +409,14 @@ impl<'gcc, 'tcx> BackendTypes for CodegenCx<'gcc, 'tcx> {
|
||||
type Funclet = (); // TODO(antoyo)
|
||||
|
||||
type DIScope = (); // TODO(antoyo)
|
||||
type DILocation = (); // TODO(antoyo)
|
||||
type DILocation = Location<'gcc>;
|
||||
type DIVariable = (); // TODO(antoyo)
|
||||
}
|
||||
|
||||
impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
fn vtables(&self) -> &RefCell<FxHashMap<(Ty<'tcx>, Option<PolyExistentialTraitRef<'tcx>>), RValue<'gcc>>> {
|
||||
fn vtables(
|
||||
&self,
|
||||
) -> &RefCell<FxHashMap<(Ty<'tcx>, Option<PolyExistentialTraitRef<'tcx>>), RValue<'gcc>>> {
|
||||
&self.vtables
|
||||
}
|
||||
|
||||
@ -364,11 +430,9 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
fn get_fn_addr(&self, instance: Instance<'tcx>) -> RValue<'gcc> {
|
||||
let func_name = self.tcx.symbol_name(instance).name;
|
||||
|
||||
let func =
|
||||
if self.intrinsics.borrow().contains_key(func_name) {
|
||||
let func = if self.intrinsics.borrow().contains_key(func_name) {
|
||||
self.intrinsics.borrow()[func_name].clone()
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
get_fn(self, instance)
|
||||
};
|
||||
let ptr = func.get_address(None);
|
||||
@ -407,17 +471,16 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
return llpersonality;
|
||||
}
|
||||
let tcx = self.tcx;
|
||||
let func =
|
||||
match tcx.lang_items().eh_personality() {
|
||||
let func = match tcx.lang_items().eh_personality() {
|
||||
Some(def_id) if !wants_msvc_seh(self.sess()) => {
|
||||
let instance =
|
||||
ty::Instance::resolve(
|
||||
let instance = ty::Instance::resolve(
|
||||
tcx,
|
||||
ty::ParamEnv::reveal_all(),
|
||||
def_id,
|
||||
ty::List::empty(),
|
||||
)
|
||||
.unwrap().unwrap();
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
let symbol_name = tcx.symbol_name(instance).name;
|
||||
let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty());
|
||||
@ -425,13 +488,11 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
let func = self.declare_fn(symbol_name, &fn_abi);
|
||||
let func: RValue<'gcc> = unsafe { std::mem::transmute(func) };
|
||||
func
|
||||
},
|
||||
_ => {
|
||||
let name =
|
||||
if wants_msvc_seh(self.sess()) {
|
||||
"__CxxFrameHandler3"
|
||||
}
|
||||
else {
|
||||
_ => {
|
||||
let name = if wants_msvc_seh(self.sess()) {
|
||||
"__CxxFrameHandler3"
|
||||
} else {
|
||||
"rust_eh_personality"
|
||||
};
|
||||
let func = self.declare_func(name, self.type_i32(), &[], true);
|
||||
@ -467,8 +528,7 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
let entry_name = self.sess().target.entry_name.as_ref();
|
||||
if self.get_declared_value(entry_name).is_none() {
|
||||
Some(self.declare_entry_fn(entry_name, fn_type, ()))
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// If the symbol already exists, it is an error: for example, the user wrote
|
||||
// #[no_mangle] extern "C" fn main(..) {..}
|
||||
// instead of #[start]
|
||||
|
@ -1,9 +1,14 @@
|
||||
use gccjit::RValue;
|
||||
use rustc_codegen_ssa::mir::debuginfo::{FunctionDebugContext, VariableKind};
|
||||
use crate::rustc_index::Idx;
|
||||
use gccjit::{Location, RValue};
|
||||
use rustc_codegen_ssa::mir::debuginfo::{DebugScope, FunctionDebugContext, VariableKind};
|
||||
use rustc_codegen_ssa::traits::{DebugInfoBuilderMethods, DebugInfoMethods};
|
||||
use rustc_middle::mir;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::mir::{self, Body, SourceScope};
|
||||
use rustc_middle::ty::{Instance, PolyExistentialTraitRef, Ty};
|
||||
use rustc_span::{SourceFile, Span, Symbol};
|
||||
use rustc_session::config::DebugInfo;
|
||||
use rustc_span::{BytePos, Pos, SourceFile, SourceFileAndLine, Span, Symbol};
|
||||
use rustc_target::abi::call::FnAbi;
|
||||
use rustc_target::abi::Size;
|
||||
use std::ops::Range;
|
||||
@ -11,31 +16,183 @@ use std::ops::Range;
|
||||
use crate::builder::Builder;
|
||||
use crate::context::CodegenCx;
|
||||
|
||||
pub(super) const UNKNOWN_LINE_NUMBER: u32 = 0;
|
||||
pub(super) const UNKNOWN_COLUMN_NUMBER: u32 = 0;
|
||||
|
||||
impl<'a, 'gcc, 'tcx> DebugInfoBuilderMethods for Builder<'a, 'gcc, 'tcx> {
|
||||
// FIXME(eddyb) find a common convention for all of the debuginfo-related
|
||||
// names (choose between `dbg`, `debug`, `debuginfo`, `debug_info` etc.).
|
||||
fn dbg_var_addr(
|
||||
&mut self,
|
||||
_dbg_var: Self::DIVariable,
|
||||
_scope_metadata: Self::DIScope,
|
||||
_dbg_loc: Self::DILocation,
|
||||
_variable_alloca: Self::Value,
|
||||
_direct_offset: Size,
|
||||
_indirect_offsets: &[Size],
|
||||
_fragment: Option<Range<Size>>,
|
||||
) {
|
||||
unimplemented!();
|
||||
// FIXME(tempdragon): Not sure if this is correct, probably wrong but still keep it here.
|
||||
#[cfg(feature = "master")]
|
||||
_variable_alloca.set_location(_dbg_loc);
|
||||
}
|
||||
|
||||
fn insert_reference_to_gdb_debug_scripts_section_global(&mut self) {
|
||||
// TODO(antoyo): insert reference to gdb debug scripts section global.
|
||||
}
|
||||
|
||||
fn set_var_name(&mut self, _value: RValue<'gcc>, _name: &str) {
|
||||
unimplemented!();
|
||||
/// FIXME(tempdragon): Currently, this function is not yet implemented. It seems that the
|
||||
/// debug name and the mangled name should both be included in the LValues.
|
||||
/// Besides, a function to get the rvalue type(m_is_lvalue) should also be included.
|
||||
fn set_var_name(&mut self, _value: RValue<'gcc>, _name: &str) {}
|
||||
|
||||
fn set_dbg_loc(&mut self, dbg_loc: Self::DILocation) {
|
||||
self.location = Some(dbg_loc);
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate the `debug_context` in an MIR Body.
|
||||
/// # Souce of Origin
|
||||
/// Copied from `create_scope_map.rs` of rustc_codegen_llvm
|
||||
fn compute_mir_scopes<'gcc, 'tcx>(
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
instance: Instance<'tcx>,
|
||||
mir: &Body<'tcx>,
|
||||
debug_context: &mut FunctionDebugContext<'tcx, (), Location<'gcc>>,
|
||||
) {
|
||||
// Find all scopes with variables defined in them.
|
||||
let variables = if cx.sess().opts.debuginfo == DebugInfo::Full {
|
||||
let mut vars = BitSet::new_empty(mir.source_scopes.len());
|
||||
// FIXME(eddyb) take into account that arguments always have debuginfo,
|
||||
// irrespective of their name (assuming full debuginfo is enabled).
|
||||
// NOTE(eddyb) actually, on second thought, those are always in the
|
||||
// function scope, which always exists.
|
||||
for var_debug_info in &mir.var_debug_info {
|
||||
vars.insert(var_debug_info.source_info.scope);
|
||||
}
|
||||
Some(vars)
|
||||
} else {
|
||||
// Nothing to emit, of course.
|
||||
None
|
||||
};
|
||||
let mut instantiated = BitSet::new_empty(mir.source_scopes.len());
|
||||
// Instantiate all scopes.
|
||||
for idx in 0..mir.source_scopes.len() {
|
||||
let scope = SourceScope::new(idx);
|
||||
make_mir_scope(cx, instance, mir, &variables, debug_context, &mut instantiated, scope);
|
||||
}
|
||||
assert!(instantiated.count() == mir.source_scopes.len());
|
||||
}
|
||||
|
||||
/// Update the `debug_context`, adding new scope to it,
|
||||
/// if it's not added as is denoted in `instantiated`.
|
||||
///
|
||||
/// # Souce of Origin
|
||||
/// Copied from `create_scope_map.rs` of rustc_codegen_llvm
|
||||
/// FIXME(tempdragon/?): Add Scope Support Here.
|
||||
fn make_mir_scope<'gcc, 'tcx>(
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
instance: Instance<'tcx>,
|
||||
mir: &Body<'tcx>,
|
||||
variables: &Option<BitSet<SourceScope>>,
|
||||
debug_context: &mut FunctionDebugContext<'tcx, (), Location<'gcc>>,
|
||||
instantiated: &mut BitSet<SourceScope>,
|
||||
scope: SourceScope,
|
||||
) {
|
||||
if instantiated.contains(scope) {
|
||||
return;
|
||||
}
|
||||
|
||||
fn set_dbg_loc(&mut self, _dbg_loc: Self::DILocation) {
|
||||
unimplemented!();
|
||||
let scope_data = &mir.source_scopes[scope];
|
||||
let parent_scope = if let Some(parent) = scope_data.parent_scope {
|
||||
make_mir_scope(cx, instance, mir, variables, debug_context, instantiated, parent);
|
||||
debug_context.scopes[parent]
|
||||
} else {
|
||||
// The root is the function itself.
|
||||
let file = cx.sess().source_map().lookup_source_file(mir.span.lo());
|
||||
debug_context.scopes[scope] = DebugScope {
|
||||
file_start_pos: file.start_pos,
|
||||
file_end_pos: file.end_position(),
|
||||
..debug_context.scopes[scope]
|
||||
};
|
||||
instantiated.insert(scope);
|
||||
return;
|
||||
};
|
||||
|
||||
if let Some(vars) = variables {
|
||||
if !vars.contains(scope) && scope_data.inlined.is_none() {
|
||||
// Do not create a DIScope if there are no variables defined in this
|
||||
// MIR `SourceScope`, and it's not `inlined`, to avoid debuginfo bloat.
|
||||
debug_context.scopes[scope] = parent_scope;
|
||||
instantiated.insert(scope);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let loc = cx.lookup_debug_loc(scope_data.span.lo());
|
||||
|
||||
// FIXME(tempdragon): Add the scope related code here if the scope is supported.
|
||||
let dbg_scope = ();
|
||||
|
||||
let inlined_at = scope_data.inlined.map(|(_, callsite_span)| {
|
||||
// FIXME(eddyb) this doesn't account for the macro-related
|
||||
// `Span` fixups that `rustc_codegen_ssa::mir::debuginfo` does.
|
||||
let callsite_scope = parent_scope.adjust_dbg_scope_for_span(cx, callsite_span);
|
||||
cx.dbg_loc(callsite_scope, parent_scope.inlined_at, callsite_span)
|
||||
});
|
||||
let p_inlined_at = parent_scope.inlined_at;
|
||||
// TODO(tempdragon): dbg_scope: Add support for scope extension here.
|
||||
inlined_at.or(p_inlined_at);
|
||||
|
||||
debug_context.scopes[scope] = DebugScope {
|
||||
dbg_scope,
|
||||
inlined_at,
|
||||
file_start_pos: loc.file.start_pos,
|
||||
file_end_pos: loc.file.end_position(),
|
||||
};
|
||||
instantiated.insert(scope);
|
||||
}
|
||||
|
||||
/// A source code location used to generate debug information.
|
||||
// FIXME(eddyb) rename this to better indicate it's a duplicate of
|
||||
// `rustc_span::Loc` rather than `DILocation`, perhaps by making
|
||||
// `lookup_char_pos` return the right information instead.
|
||||
pub struct DebugLoc {
|
||||
/// Information about the original source file.
|
||||
pub file: Lrc<SourceFile>,
|
||||
/// The (1-based) line number.
|
||||
pub line: u32,
|
||||
/// The (1-based) column number.
|
||||
pub col: u32,
|
||||
}
|
||||
|
||||
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
/// Looks up debug source information about a `BytePos`.
|
||||
// FIXME(eddyb) rename this to better indicate it's a duplicate of
|
||||
// `lookup_char_pos` rather than `dbg_loc`, perhaps by making
|
||||
// `lookup_char_pos` return the right information instead.
|
||||
// Source of Origin: cg_llvm
|
||||
pub fn lookup_debug_loc(&self, pos: BytePos) -> DebugLoc {
|
||||
let (file, line, col) = match self.sess().source_map().lookup_line(pos) {
|
||||
Ok(SourceFileAndLine { sf: file, line }) => {
|
||||
let line_pos = file.lines()[line];
|
||||
|
||||
// Use 1-based indexing.
|
||||
let line = (line + 1) as u32;
|
||||
let col = (file.relative_position(pos) - line_pos).to_u32() + 1;
|
||||
|
||||
(file, line, col)
|
||||
}
|
||||
Err(file) => (file, UNKNOWN_LINE_NUMBER, UNKNOWN_COLUMN_NUMBER),
|
||||
};
|
||||
|
||||
// For MSVC, omit the column number.
|
||||
// Otherwise, emit it. This mimics clang behaviour.
|
||||
// See discussion in https://github.com/rust-lang/rust/issues/42921
|
||||
if self.sess().target.is_like_msvc {
|
||||
DebugLoc { file, line, col: UNKNOWN_COLUMN_NUMBER }
|
||||
} else {
|
||||
DebugLoc { file, line, col }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -51,13 +208,31 @@ impl<'gcc, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
|
||||
fn create_function_debug_context(
|
||||
&self,
|
||||
_instance: Instance<'tcx>,
|
||||
_fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
|
||||
_llfn: RValue<'gcc>,
|
||||
_mir: &mir::Body<'tcx>,
|
||||
instance: Instance<'tcx>,
|
||||
fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
|
||||
llfn: RValue<'gcc>,
|
||||
mir: &mir::Body<'tcx>,
|
||||
) -> Option<FunctionDebugContext<'tcx, Self::DIScope, Self::DILocation>> {
|
||||
// TODO(antoyo)
|
||||
None
|
||||
if self.sess().opts.debuginfo == DebugInfo::None {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Initialize fn debug context (including scopes).
|
||||
let empty_scope = DebugScope {
|
||||
dbg_scope: self.dbg_scope_fn(instance, fn_abi, Some(llfn)),
|
||||
inlined_at: None,
|
||||
file_start_pos: BytePos(0),
|
||||
file_end_pos: BytePos(0),
|
||||
};
|
||||
let mut fn_debug_context = FunctionDebugContext {
|
||||
scopes: IndexVec::from_elem(empty_scope, &mir.source_scopes.as_slice()),
|
||||
inlined_function_scopes: Default::default(),
|
||||
};
|
||||
|
||||
// Fill in all the scopes, with the information from the MIR body.
|
||||
compute_mir_scopes(self, instance, mir, &mut fn_debug_context);
|
||||
|
||||
Some(fn_debug_context)
|
||||
}
|
||||
|
||||
fn extend_scope_to_file(
|
||||
@ -65,11 +240,11 @@ impl<'gcc, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
_scope_metadata: Self::DIScope,
|
||||
_file: &SourceFile,
|
||||
) -> Self::DIScope {
|
||||
unimplemented!();
|
||||
// TODO(antoyo): implement.
|
||||
}
|
||||
|
||||
fn debuginfo_finalize(&self) {
|
||||
// TODO(antoyo)
|
||||
self.context.set_debug_info(true)
|
||||
}
|
||||
|
||||
fn create_dbg_var(
|
||||
@ -80,7 +255,6 @@ impl<'gcc, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
_variable_kind: VariableKind,
|
||||
_span: Span,
|
||||
) -> Self::DIVariable {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn dbg_scope_fn(
|
||||
@ -89,15 +263,40 @@ impl<'gcc, 'tcx> DebugInfoMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
_fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
|
||||
_maybe_definition_llfn: Option<RValue<'gcc>>,
|
||||
) -> Self::DIScope {
|
||||
unimplemented!();
|
||||
// TODO(antoyo): implement.
|
||||
}
|
||||
|
||||
fn dbg_loc(
|
||||
&self,
|
||||
_scope: Self::DIScope,
|
||||
_inlined_at: Option<Self::DILocation>,
|
||||
_span: Span,
|
||||
span: Span,
|
||||
) -> Self::DILocation {
|
||||
unimplemented!();
|
||||
let pos = span.lo();
|
||||
let DebugLoc { file, line, col } = self.lookup_debug_loc(pos);
|
||||
let loc = match &file.name {
|
||||
rustc_span::FileName::Real(name) => match name {
|
||||
rustc_span::RealFileName::LocalPath(name) => {
|
||||
if let Some(name) = name.to_str() {
|
||||
self.context.new_location(name, line as i32, col as i32)
|
||||
} else {
|
||||
Location::null()
|
||||
}
|
||||
}
|
||||
rustc_span::RealFileName::Remapped { local_path, virtual_name: _ } => {
|
||||
if let Some(name) = local_path.as_ref() {
|
||||
if let Some(name) = name.to_str() {
|
||||
self.context.new_location(name, line as i32, col as i32)
|
||||
} else {
|
||||
Location::null()
|
||||
}
|
||||
} else {
|
||||
Location::null()
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => Location::null(),
|
||||
};
|
||||
loc
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use gccjit::{Function, FunctionType, GlobalKind, LValue, RValue, Type};
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
use gccjit::{FnAttribute, ToRValue};
|
||||
use gccjit::{Function, FunctionType, GlobalKind, LValue, RValue, Type};
|
||||
use rustc_codegen_ssa::traits::BaseTypeMethods;
|
||||
use rustc_middle::ty::Ty;
|
||||
use rustc_span::Symbol;
|
||||
@ -11,7 +11,13 @@ use crate::context::CodegenCx;
|
||||
use crate::intrinsic::llvm;
|
||||
|
||||
impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
pub fn get_or_insert_global(&self, name: &str, ty: Type<'gcc>, is_tls: bool, link_section: Option<Symbol>) -> LValue<'gcc> {
|
||||
pub fn get_or_insert_global(
|
||||
&self,
|
||||
name: &str,
|
||||
ty: Type<'gcc>,
|
||||
is_tls: bool,
|
||||
link_section: Option<Symbol>,
|
||||
) -> LValue<'gcc> {
|
||||
if self.globals.borrow().contains_key(name) {
|
||||
let typ = self.globals.borrow()[name].get_type();
|
||||
let global = self.context.new_global(None, GlobalKind::Imported, typ, name);
|
||||
@ -22,8 +28,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
global.set_link_section(link_section.as_str());
|
||||
}
|
||||
global
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
self.declare_global(name, ty, GlobalKind::Exported, is_tls, link_section)
|
||||
}
|
||||
}
|
||||
@ -33,19 +38,37 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
self.context.new_global(None, GlobalKind::Internal, ty, &name)
|
||||
}
|
||||
|
||||
pub fn declare_global_with_linkage(&self, name: &str, ty: Type<'gcc>, linkage: GlobalKind) -> LValue<'gcc> {
|
||||
pub fn declare_global_with_linkage(
|
||||
&self,
|
||||
name: &str,
|
||||
ty: Type<'gcc>,
|
||||
linkage: GlobalKind,
|
||||
) -> LValue<'gcc> {
|
||||
let global = self.context.new_global(None, linkage, ty, name);
|
||||
let global_address = global.get_address(None);
|
||||
self.globals.borrow_mut().insert(name.to_string(), global_address);
|
||||
global
|
||||
}
|
||||
|
||||
pub fn declare_func(&self, name: &str, return_type: Type<'gcc>, params: &[Type<'gcc>], variadic: bool) -> Function<'gcc> {
|
||||
pub fn declare_func(
|
||||
&self,
|
||||
name: &str,
|
||||
return_type: Type<'gcc>,
|
||||
params: &[Type<'gcc>],
|
||||
variadic: bool,
|
||||
) -> Function<'gcc> {
|
||||
self.linkage.set(FunctionType::Extern);
|
||||
declare_raw_fn(self, name, () /*llvm::CCallConv*/, return_type, params, variadic)
|
||||
}
|
||||
|
||||
pub fn declare_global(&self, name: &str, ty: Type<'gcc>, global_kind: GlobalKind, is_tls: bool, link_section: Option<Symbol>) -> LValue<'gcc> {
|
||||
pub fn declare_global(
|
||||
&self,
|
||||
name: &str,
|
||||
ty: Type<'gcc>,
|
||||
global_kind: GlobalKind,
|
||||
is_tls: bool,
|
||||
link_section: Option<Symbol>,
|
||||
) -> LValue<'gcc> {
|
||||
let global = self.context.new_global(None, global_kind, ty, name);
|
||||
if is_tls {
|
||||
global.set_tls_model(self.tls_model);
|
||||
@ -65,13 +88,25 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
global
|
||||
}
|
||||
|
||||
pub fn declare_entry_fn(&self, name: &str, _fn_type: Type<'gcc>, callconv: () /*llvm::CCallConv*/) -> RValue<'gcc> {
|
||||
pub fn declare_entry_fn(
|
||||
&self,
|
||||
name: &str,
|
||||
_fn_type: Type<'gcc>,
|
||||
callconv: (), /*llvm::CCallConv*/
|
||||
) -> RValue<'gcc> {
|
||||
// TODO(antoyo): use the fn_type parameter.
|
||||
let const_string = self.context.new_type::<u8>().make_pointer().make_pointer();
|
||||
let return_type = self.type_i32();
|
||||
let variadic = false;
|
||||
self.linkage.set(FunctionType::Exported);
|
||||
let func = declare_raw_fn(self, name, callconv, return_type, &[self.type_i32(), const_string], variadic);
|
||||
let func = declare_raw_fn(
|
||||
self,
|
||||
name,
|
||||
callconv,
|
||||
return_type,
|
||||
&[self.type_i32(), const_string],
|
||||
variadic,
|
||||
);
|
||||
// NOTE: it is needed to set the current_func here as well, because get_fn() is not called
|
||||
// for the main function.
|
||||
*self.current_func.borrow_mut() = Some(func);
|
||||
@ -85,19 +120,32 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
arguments_type,
|
||||
is_c_variadic,
|
||||
on_stack_param_indices,
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
fn_attributes,
|
||||
} = fn_abi.gcc_type(self);
|
||||
let func = declare_raw_fn(self, name, () /*fn_abi.llvm_cconv()*/, return_type, &arguments_type, is_c_variadic);
|
||||
let func = declare_raw_fn(
|
||||
self,
|
||||
name,
|
||||
(), /*fn_abi.llvm_cconv()*/
|
||||
return_type,
|
||||
&arguments_type,
|
||||
is_c_variadic,
|
||||
);
|
||||
self.on_stack_function_params.borrow_mut().insert(func, on_stack_param_indices);
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
for fn_attr in fn_attributes {
|
||||
func.add_attribute(fn_attr);
|
||||
}
|
||||
func
|
||||
}
|
||||
|
||||
pub fn define_global(&self, name: &str, ty: Type<'gcc>, is_tls: bool, link_section: Option<Symbol>) -> LValue<'gcc> {
|
||||
pub fn define_global(
|
||||
&self,
|
||||
name: &str,
|
||||
ty: Type<'gcc>,
|
||||
is_tls: bool,
|
||||
link_section: Option<Symbol>,
|
||||
) -> LValue<'gcc> {
|
||||
self.get_or_insert_global(name, ty, is_tls, link_section)
|
||||
}
|
||||
|
||||
@ -111,24 +159,36 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
///
|
||||
/// If there’s a value with the same name already declared, the function will
|
||||
/// update the declaration and return existing Value instead.
|
||||
fn declare_raw_fn<'gcc>(cx: &CodegenCx<'gcc, '_>, name: &str, _callconv: () /*llvm::CallConv*/, return_type: Type<'gcc>, param_types: &[Type<'gcc>], variadic: bool) -> Function<'gcc> {
|
||||
fn declare_raw_fn<'gcc>(
|
||||
cx: &CodegenCx<'gcc, '_>,
|
||||
name: &str,
|
||||
_callconv: (), /*llvm::CallConv*/
|
||||
return_type: Type<'gcc>,
|
||||
param_types: &[Type<'gcc>],
|
||||
variadic: bool,
|
||||
) -> Function<'gcc> {
|
||||
if name.starts_with("llvm.") {
|
||||
let intrinsic = llvm::intrinsic(name, cx);
|
||||
cx.intrinsics.borrow_mut().insert(name.to_string(), intrinsic);
|
||||
return intrinsic;
|
||||
}
|
||||
let func =
|
||||
if cx.functions.borrow().contains_key(name) {
|
||||
let func = if cx.functions.borrow().contains_key(name) {
|
||||
cx.functions.borrow()[name]
|
||||
}
|
||||
else {
|
||||
let params: Vec<_> = param_types.into_iter().enumerate()
|
||||
.map(|(index, param)| cx.context.new_parameter(None, *param, &format!("param{}", index))) // TODO(antoyo): set name.
|
||||
} else {
|
||||
let params: Vec<_> = param_types
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, param)| {
|
||||
cx.context.new_parameter(None, *param, &format!("param{}", index))
|
||||
}) // TODO(antoyo): set name.
|
||||
.collect();
|
||||
let func = cx.context.new_function(None, cx.linkage.get(), return_type, ¶ms, mangle_name(name), variadic);
|
||||
#[cfg(not(feature = "master"))]
|
||||
let name = mangle_name(name);
|
||||
let func =
|
||||
cx.context.new_function(None, cx.linkage.get(), return_type, ¶ms, &name, variadic);
|
||||
cx.functions.borrow_mut().insert(name.to_string(), func);
|
||||
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
if name == "rust_eh_personality" {
|
||||
// NOTE: GCC will sometimes change the personality function set on a function from
|
||||
// rust_eh_personality to __gcc_personality_v0 as an optimization.
|
||||
@ -139,10 +199,21 @@ fn declare_raw_fn<'gcc>(cx: &CodegenCx<'gcc, '_>, name: &str, _callconv: () /*ll
|
||||
// Since aliases don't work (maybe because of a bug in LTO partitioning?), we
|
||||
// create a wrapper function that calls rust_eh_personality.
|
||||
|
||||
let params: Vec<_> = param_types.into_iter().enumerate()
|
||||
.map(|(index, param)| cx.context.new_parameter(None, *param, &format!("param{}", index))) // TODO(antoyo): set name.
|
||||
let params: Vec<_> = param_types
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, param)| {
|
||||
cx.context.new_parameter(None, *param, &format!("param{}", index))
|
||||
}) // TODO(antoyo): set name.
|
||||
.collect();
|
||||
let gcc_func = cx.context.new_function(None, FunctionType::Exported, return_type, ¶ms, "__gcc_personality_v0", variadic);
|
||||
let gcc_func = cx.context.new_function(
|
||||
None,
|
||||
FunctionType::Exported,
|
||||
return_type,
|
||||
¶ms,
|
||||
"__gcc_personality_v0",
|
||||
variadic,
|
||||
);
|
||||
|
||||
// We need a normal extern function for the crates that access rust_eh_personality
|
||||
// without defining it, otherwise we'll get a compiler error.
|
||||
@ -159,8 +230,7 @@ fn declare_raw_fn<'gcc>(cx: &CodegenCx<'gcc, '_>, name: &str, _callconv: () /*ll
|
||||
if return_type == cx.type_void() {
|
||||
block.add_eval(None, call);
|
||||
block.end_with_void_return(None);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
block.end_with_return(None, call);
|
||||
}
|
||||
}
|
||||
@ -179,15 +249,24 @@ fn declare_raw_fn<'gcc>(cx: &CodegenCx<'gcc, '_>, name: &str, _callconv: () /*ll
|
||||
}
|
||||
|
||||
// FIXME(antoyo): this is a hack because libgccjit currently only supports alpha, num and _.
|
||||
// Unsupported characters: `$` and `.`.
|
||||
pub fn mangle_name(name: &str) -> String {
|
||||
name.replace(|char: char| {
|
||||
// Unsupported characters: `$`, `.` and `*`.
|
||||
// FIXME(antoyo): `*` might not be expected: https://github.com/rust-lang/rust/issues/116979#issuecomment-1840926865
|
||||
#[cfg(not(feature = "master"))]
|
||||
fn mangle_name(name: &str) -> String {
|
||||
name.replace(
|
||||
|char: char| {
|
||||
if !char.is_alphanumeric() && char != '_' {
|
||||
debug_assert!("$.*".contains(char), "Unsupported char in function name {}: {}", name, char);
|
||||
debug_assert!(
|
||||
"$.*".contains(char),
|
||||
"Unsupported char in function name {}: {}",
|
||||
name,
|
||||
char
|
||||
);
|
||||
true
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}, "_")
|
||||
},
|
||||
"_",
|
||||
)
|
||||
}
|
||||
|
@ -1,9 +1,6 @@
|
||||
use rustc_errors::{
|
||||
DiagCtxt, DiagArgValue, Diag, EmissionGuarantee, IntoDiagnostic, IntoDiagnosticArg, Level,
|
||||
};
|
||||
use rustc_errors::{Diag, DiagCtxt, EmissionGuarantee, IntoDiagnostic, Level};
|
||||
use rustc_macros::{Diagnostic, Subdiagnostic};
|
||||
use rustc_span::Span;
|
||||
use std::borrow::Cow;
|
||||
|
||||
use crate::fluent_generated as fluent;
|
||||
|
||||
@ -31,18 +28,6 @@ pub(crate) enum PossibleFeature<'a> {
|
||||
None,
|
||||
}
|
||||
|
||||
struct ExitCode(Option<i32>);
|
||||
|
||||
impl IntoDiagnosticArg for ExitCode {
|
||||
fn into_diagnostic_arg(self) -> DiagArgValue {
|
||||
let ExitCode(exit_code) = self;
|
||||
match exit_code {
|
||||
Some(t) => t.into_diagnostic_arg(),
|
||||
None => DiagArgValue::Str(Cow::Borrowed("<signal>")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc_lto_not_supported)]
|
||||
pub(crate) struct LTONotSupported;
|
||||
@ -80,12 +65,6 @@ pub(crate) struct CopyBitcode {
|
||||
#[note]
|
||||
pub(crate) struct DynamicLinkingWithLTO;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc_load_bitcode)]
|
||||
pub(crate) struct LoadBitcode {
|
||||
name: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_gcc_lto_disallowed)]
|
||||
pub(crate) struct LtoDisallowed;
|
||||
|
@ -1,4 +1,4 @@
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
use gccjit::Context;
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
@ -7,7 +7,10 @@ use rustc_middle::bug;
|
||||
use rustc_session::Session;
|
||||
use rustc_target::target_features::RUSTC_SPECIFIC_FEATURES;
|
||||
|
||||
use crate::errors::{PossibleFeature, TargetFeatureDisableOrEnable, UnknownCTargetFeature, UnknownCTargetFeaturePrefix};
|
||||
use crate::errors::{
|
||||
PossibleFeature, TargetFeatureDisableOrEnable, UnknownCTargetFeature,
|
||||
UnknownCTargetFeaturePrefix,
|
||||
};
|
||||
|
||||
/// The list of GCC features computed from CLI flags (`-Ctarget-cpu`, `-Ctarget-feature`,
|
||||
/// `--target` and similar).
|
||||
@ -44,7 +47,10 @@ pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec<Stri
|
||||
// -Ctarget-features
|
||||
let supported_features = sess.target.supported_target_features();
|
||||
let mut featsmap = FxHashMap::default();
|
||||
let feats = sess.opts.cg.target_feature
|
||||
let feats = sess
|
||||
.opts
|
||||
.cg
|
||||
.target_feature
|
||||
.split(',')
|
||||
.filter_map(|s| {
|
||||
let enable_disable = match s.chars().next() {
|
||||
@ -69,14 +75,12 @@ pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec<Stri
|
||||
None
|
||||
}
|
||||
});
|
||||
let unknown_feature =
|
||||
if let Some(rust_feature) = rust_feature {
|
||||
let unknown_feature = if let Some(rust_feature) = rust_feature {
|
||||
UnknownCTargetFeature {
|
||||
feature,
|
||||
rust_feature: PossibleFeature::Some { rust_feature },
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
UnknownCTargetFeature { feature, rust_feature: PossibleFeature::None }
|
||||
};
|
||||
sess.dcx().emit_warn(unknown_feature);
|
||||
@ -95,14 +99,14 @@ pub(crate) fn global_gcc_features(sess: &Session, diagnostics: bool) -> Vec<Stri
|
||||
// passing requests down to GCC. This means that all in-language
|
||||
// features also work on the command line instead of having two
|
||||
// different names when the GCC name and the Rust name differ.
|
||||
Some(to_gcc_features(sess, feature)
|
||||
Some(
|
||||
to_gcc_features(sess, feature)
|
||||
.iter()
|
||||
.flat_map(|feat| to_gcc_features(sess, feat).into_iter())
|
||||
.map(|feature| {
|
||||
if enable_disable == '-' {
|
||||
format!("-{}", feature)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
feature.to_string()
|
||||
}
|
||||
})
|
||||
@ -184,7 +188,10 @@ pub fn to_gcc_features<'a>(sess: &Session, s: &'a str) -> SmallVec<[&'a str; 2]>
|
||||
|
||||
// Given a map from target_features to whether they are enabled or disabled,
|
||||
// ensure only valid combinations are allowed.
|
||||
pub fn check_tied_features(sess: &Session, features: &FxHashMap<&str, bool>) -> Option<&'static [&'static str]> {
|
||||
pub fn check_tied_features(
|
||||
sess: &Session,
|
||||
features: &FxHashMap<&str, bool>,
|
||||
) -> Option<&'static [&'static str]> {
|
||||
for tied in sess.target.tied_target_features() {
|
||||
// Tied features must be set to the same value, or not set at all
|
||||
let mut tied_iter = tied.iter();
|
||||
@ -208,15 +215,13 @@ fn handle_native(name: &str) -> &str {
|
||||
return arch_to_gcc(name);
|
||||
}
|
||||
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
{
|
||||
// Get the native arch.
|
||||
let context = Context::default();
|
||||
context.get_target_info().arch().unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
context.get_target_info().arch().unwrap().to_str().unwrap()
|
||||
}
|
||||
#[cfg(not(feature="master"))]
|
||||
#[cfg(not(feature = "master"))]
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -151,8 +151,10 @@ match name {
|
||||
"llvm.amdgcn.msad.u8" => "__builtin_amdgcn_msad_u8",
|
||||
"llvm.amdgcn.perm" => "__builtin_amdgcn_perm",
|
||||
"llvm.amdgcn.permlane16" => "__builtin_amdgcn_permlane16",
|
||||
"llvm.amdgcn.permlane16.var" => "__builtin_amdgcn_permlane16_var",
|
||||
"llvm.amdgcn.permlane64" => "__builtin_amdgcn_permlane64",
|
||||
"llvm.amdgcn.permlanex16" => "__builtin_amdgcn_permlanex16",
|
||||
"llvm.amdgcn.permlanex16.var" => "__builtin_amdgcn_permlanex16_var",
|
||||
"llvm.amdgcn.qsad.pk.u16.u8" => "__builtin_amdgcn_qsad_pk_u16_u8",
|
||||
"llvm.amdgcn.queue.ptr" => "__builtin_amdgcn_queue_ptr",
|
||||
"llvm.amdgcn.rcp.legacy" => "__builtin_amdgcn_rcp_legacy",
|
||||
@ -160,11 +162,20 @@ match name {
|
||||
"llvm.amdgcn.readlane" => "__builtin_amdgcn_readlane",
|
||||
"llvm.amdgcn.rsq.legacy" => "__builtin_amdgcn_rsq_legacy",
|
||||
"llvm.amdgcn.s.barrier" => "__builtin_amdgcn_s_barrier",
|
||||
"llvm.amdgcn.s.barrier.init" => "__builtin_amdgcn_s_barrier_init",
|
||||
"llvm.amdgcn.s.barrier.join" => "__builtin_amdgcn_s_barrier_join",
|
||||
"llvm.amdgcn.s.barrier.leave" => "__builtin_amdgcn_s_barrier_leave",
|
||||
"llvm.amdgcn.s.barrier.signal" => "__builtin_amdgcn_s_barrier_signal",
|
||||
"llvm.amdgcn.s.barrier.signal.isfirst" => "__builtin_amdgcn_s_barrier_signal_isfirst",
|
||||
"llvm.amdgcn.s.barrier.signal.isfirst.var" => "__builtin_amdgcn_s_barrier_signal_isfirst_var",
|
||||
"llvm.amdgcn.s.barrier.signal.var" => "__builtin_amdgcn_s_barrier_signal_var",
|
||||
"llvm.amdgcn.s.barrier.wait" => "__builtin_amdgcn_s_barrier_wait",
|
||||
"llvm.amdgcn.s.dcache.inv" => "__builtin_amdgcn_s_dcache_inv",
|
||||
"llvm.amdgcn.s.dcache.inv.vol" => "__builtin_amdgcn_s_dcache_inv_vol",
|
||||
"llvm.amdgcn.s.dcache.wb" => "__builtin_amdgcn_s_dcache_wb",
|
||||
"llvm.amdgcn.s.dcache.wb.vol" => "__builtin_amdgcn_s_dcache_wb_vol",
|
||||
"llvm.amdgcn.s.decperflevel" => "__builtin_amdgcn_s_decperflevel",
|
||||
"llvm.amdgcn.s.get.barrier.state" => "__builtin_amdgcn_s_get_barrier_state",
|
||||
"llvm.amdgcn.s.get.waveid.in.workgroup" => "__builtin_amdgcn_s_get_waveid_in_workgroup",
|
||||
"llvm.amdgcn.s.getpc" => "__builtin_amdgcn_s_getpc",
|
||||
"llvm.amdgcn.s.getreg" => "__builtin_amdgcn_s_getreg",
|
||||
@ -176,8 +187,10 @@ match name {
|
||||
"llvm.amdgcn.s.setprio" => "__builtin_amdgcn_s_setprio",
|
||||
"llvm.amdgcn.s.setreg" => "__builtin_amdgcn_s_setreg",
|
||||
"llvm.amdgcn.s.sleep" => "__builtin_amdgcn_s_sleep",
|
||||
"llvm.amdgcn.s.sleep.var" => "__builtin_amdgcn_s_sleep_var",
|
||||
"llvm.amdgcn.s.wait.event.export.ready" => "__builtin_amdgcn_s_wait_event_export_ready",
|
||||
"llvm.amdgcn.s.waitcnt" => "__builtin_amdgcn_s_waitcnt",
|
||||
"llvm.amdgcn.s.wakeup.barrier" => "__builtin_amdgcn_s_wakeup_barrier",
|
||||
"llvm.amdgcn.sad.hi.u8" => "__builtin_amdgcn_sad_hi_u8",
|
||||
"llvm.amdgcn.sad.u16" => "__builtin_amdgcn_sad_u16",
|
||||
"llvm.amdgcn.sad.u8" => "__builtin_amdgcn_sad_u8",
|
||||
@ -314,6 +327,8 @@ match name {
|
||||
// bpf
|
||||
"llvm.bpf.btf.type.id" => "__builtin_bpf_btf_type_id",
|
||||
"llvm.bpf.compare" => "__builtin_bpf_compare",
|
||||
"llvm.bpf.getelementptr.and.load" => "__builtin_bpf_getelementptr_and_load",
|
||||
"llvm.bpf.getelementptr.and.store" => "__builtin_bpf_getelementptr_and_store",
|
||||
"llvm.bpf.load.byte" => "__builtin_bpf_load_byte",
|
||||
"llvm.bpf.load.half" => "__builtin_bpf_load_half",
|
||||
"llvm.bpf.load.word" => "__builtin_bpf_load_word",
|
||||
@ -5776,14 +5791,6 @@ match name {
|
||||
"llvm.s390.verimf" => "__builtin_s390_verimf",
|
||||
"llvm.s390.verimg" => "__builtin_s390_verimg",
|
||||
"llvm.s390.verimh" => "__builtin_s390_verimh",
|
||||
"llvm.s390.verllb" => "__builtin_s390_verllb",
|
||||
"llvm.s390.verllf" => "__builtin_s390_verllf",
|
||||
"llvm.s390.verllg" => "__builtin_s390_verllg",
|
||||
"llvm.s390.verllh" => "__builtin_s390_verllh",
|
||||
"llvm.s390.verllvb" => "__builtin_s390_verllvb",
|
||||
"llvm.s390.verllvf" => "__builtin_s390_verllvf",
|
||||
"llvm.s390.verllvg" => "__builtin_s390_verllvg",
|
||||
"llvm.s390.verllvh" => "__builtin_s390_verllvh",
|
||||
"llvm.s390.vfaeb" => "__builtin_s390_vfaeb",
|
||||
"llvm.s390.vfaef" => "__builtin_s390_vfaef",
|
||||
"llvm.s390.vfaeh" => "__builtin_s390_vfaeh",
|
||||
@ -5815,7 +5822,7 @@ match name {
|
||||
"llvm.s390.vistrh" => "__builtin_s390_vistrh",
|
||||
"llvm.s390.vlbb" => "__builtin_s390_vlbb",
|
||||
"llvm.s390.vll" => "__builtin_s390_vll",
|
||||
"llvm.s390.vlrl" => "__builtin_s390_vlrl",
|
||||
"llvm.s390.vlrl" => "__builtin_s390_vlrlr",
|
||||
"llvm.s390.vmaeb" => "__builtin_s390_vmaeb",
|
||||
"llvm.s390.vmaef" => "__builtin_s390_vmaef",
|
||||
"llvm.s390.vmaeh" => "__builtin_s390_vmaeh",
|
||||
@ -5885,7 +5892,7 @@ match name {
|
||||
"llvm.s390.vstrczb" => "__builtin_s390_vstrczb",
|
||||
"llvm.s390.vstrczf" => "__builtin_s390_vstrczf",
|
||||
"llvm.s390.vstrczh" => "__builtin_s390_vstrczh",
|
||||
"llvm.s390.vstrl" => "__builtin_s390_vstrl",
|
||||
"llvm.s390.vstrl" => "__builtin_s390_vstrlr",
|
||||
"llvm.s390.vsumb" => "__builtin_s390_vsumb",
|
||||
"llvm.s390.vsumgf" => "__builtin_s390_vsumgf",
|
||||
"llvm.s390.vsumgh" => "__builtin_s390_vsumgh",
|
||||
|
@ -3,94 +3,185 @@ use std::borrow::Cow;
|
||||
use gccjit::{Function, FunctionPtrType, RValue, ToRValue, UnaryOp};
|
||||
use rustc_codegen_ssa::traits::BuilderMethods;
|
||||
|
||||
use crate::{context::CodegenCx, builder::Builder};
|
||||
use crate::{builder::Builder, context::CodegenCx};
|
||||
|
||||
pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc, 'tcx>, gcc_func: FunctionPtrType<'gcc>, mut args: Cow<'b, [RValue<'gcc>]>, func_name: &str, original_function_name: Option<&String>) -> Cow<'b, [RValue<'gcc>]> {
|
||||
pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(
|
||||
builder: &Builder<'a, 'gcc, 'tcx>,
|
||||
gcc_func: FunctionPtrType<'gcc>,
|
||||
mut args: Cow<'b, [RValue<'gcc>]>,
|
||||
func_name: &str,
|
||||
original_function_name: Option<&String>,
|
||||
) -> Cow<'b, [RValue<'gcc>]> {
|
||||
// Some LLVM intrinsics do not map 1-to-1 to GCC intrinsics, so we add the missing
|
||||
// arguments here.
|
||||
if gcc_func.get_param_count() != args.len() {
|
||||
match &*func_name {
|
||||
// NOTE: the following intrinsics have a different number of parameters in LLVM and GCC.
|
||||
"__builtin_ia32_prold512_mask" | "__builtin_ia32_pmuldq512_mask" | "__builtin_ia32_pmuludq512_mask"
|
||||
| "__builtin_ia32_pmaxsd512_mask" | "__builtin_ia32_pmaxsq512_mask" | "__builtin_ia32_pmaxsq256_mask"
|
||||
| "__builtin_ia32_pmaxsq128_mask" | "__builtin_ia32_pmaxud512_mask" | "__builtin_ia32_pmaxuq512_mask"
|
||||
| "__builtin_ia32_pminsd512_mask" | "__builtin_ia32_pminsq512_mask" | "__builtin_ia32_pminsq256_mask"
|
||||
| "__builtin_ia32_pminsq128_mask" | "__builtin_ia32_pminud512_mask" | "__builtin_ia32_pminuq512_mask"
|
||||
| "__builtin_ia32_prolq512_mask" | "__builtin_ia32_prorq512_mask" | "__builtin_ia32_pslldi512_mask"
|
||||
| "__builtin_ia32_psrldi512_mask" | "__builtin_ia32_psllqi512_mask" | "__builtin_ia32_psrlqi512_mask"
|
||||
| "__builtin_ia32_pslld512_mask" | "__builtin_ia32_psrld512_mask" | "__builtin_ia32_psllq512_mask"
|
||||
| "__builtin_ia32_psrlq512_mask" | "__builtin_ia32_psrad512_mask" | "__builtin_ia32_psraq512_mask"
|
||||
| "__builtin_ia32_psradi512_mask" | "__builtin_ia32_psraqi512_mask" | "__builtin_ia32_psrav16si_mask"
|
||||
| "__builtin_ia32_psrav8di_mask" | "__builtin_ia32_prolvd512_mask" | "__builtin_ia32_prorvd512_mask"
|
||||
| "__builtin_ia32_prolvq512_mask" | "__builtin_ia32_prorvq512_mask" | "__builtin_ia32_psllv16si_mask"
|
||||
| "__builtin_ia32_psrlv16si_mask" | "__builtin_ia32_psllv8di_mask" | "__builtin_ia32_psrlv8di_mask"
|
||||
| "__builtin_ia32_permvarsi512_mask" | "__builtin_ia32_vpermilvarps512_mask"
|
||||
| "__builtin_ia32_vpermilvarpd512_mask" | "__builtin_ia32_permvardi512_mask"
|
||||
| "__builtin_ia32_permvarsf512_mask" | "__builtin_ia32_permvarqi512_mask"
|
||||
| "__builtin_ia32_permvarqi256_mask" | "__builtin_ia32_permvarqi128_mask"
|
||||
| "__builtin_ia32_vpmultishiftqb512_mask" | "__builtin_ia32_vpmultishiftqb256_mask"
|
||||
| "__builtin_ia32_vpmultishiftqb128_mask"
|
||||
=> {
|
||||
"__builtin_ia32_prold512_mask"
|
||||
| "__builtin_ia32_pmuldq512_mask"
|
||||
| "__builtin_ia32_pmuludq512_mask"
|
||||
| "__builtin_ia32_pmaxsd512_mask"
|
||||
| "__builtin_ia32_pmaxsq512_mask"
|
||||
| "__builtin_ia32_pmaxsq256_mask"
|
||||
| "__builtin_ia32_pmaxsq128_mask"
|
||||
| "__builtin_ia32_pmaxud512_mask"
|
||||
| "__builtin_ia32_pmaxuq512_mask"
|
||||
| "__builtin_ia32_pminsd512_mask"
|
||||
| "__builtin_ia32_pminsq512_mask"
|
||||
| "__builtin_ia32_pminsq256_mask"
|
||||
| "__builtin_ia32_pminsq128_mask"
|
||||
| "__builtin_ia32_pminud512_mask"
|
||||
| "__builtin_ia32_pminuq512_mask"
|
||||
| "__builtin_ia32_prolq512_mask"
|
||||
| "__builtin_ia32_prorq512_mask"
|
||||
| "__builtin_ia32_pslldi512_mask"
|
||||
| "__builtin_ia32_psrldi512_mask"
|
||||
| "__builtin_ia32_psllqi512_mask"
|
||||
| "__builtin_ia32_psrlqi512_mask"
|
||||
| "__builtin_ia32_pslld512_mask"
|
||||
| "__builtin_ia32_psrld512_mask"
|
||||
| "__builtin_ia32_psllq512_mask"
|
||||
| "__builtin_ia32_psrlq512_mask"
|
||||
| "__builtin_ia32_psrad512_mask"
|
||||
| "__builtin_ia32_psraq512_mask"
|
||||
| "__builtin_ia32_psradi512_mask"
|
||||
| "__builtin_ia32_psraqi512_mask"
|
||||
| "__builtin_ia32_psrav16si_mask"
|
||||
| "__builtin_ia32_psrav8di_mask"
|
||||
| "__builtin_ia32_prolvd512_mask"
|
||||
| "__builtin_ia32_prorvd512_mask"
|
||||
| "__builtin_ia32_prolvq512_mask"
|
||||
| "__builtin_ia32_prorvq512_mask"
|
||||
| "__builtin_ia32_psllv16si_mask"
|
||||
| "__builtin_ia32_psrlv16si_mask"
|
||||
| "__builtin_ia32_psllv8di_mask"
|
||||
| "__builtin_ia32_psrlv8di_mask"
|
||||
| "__builtin_ia32_permvarsi512_mask"
|
||||
| "__builtin_ia32_vpermilvarps512_mask"
|
||||
| "__builtin_ia32_vpermilvarpd512_mask"
|
||||
| "__builtin_ia32_permvardi512_mask"
|
||||
| "__builtin_ia32_permvarsf512_mask"
|
||||
| "__builtin_ia32_permvarqi512_mask"
|
||||
| "__builtin_ia32_permvarqi256_mask"
|
||||
| "__builtin_ia32_permvarqi128_mask"
|
||||
| "__builtin_ia32_vpmultishiftqb512_mask"
|
||||
| "__builtin_ia32_vpmultishiftqb256_mask"
|
||||
| "__builtin_ia32_vpmultishiftqb128_mask" => {
|
||||
let mut new_args = args.to_vec();
|
||||
let arg3_type = gcc_func.get_param_type(2);
|
||||
let first_arg = builder.current_func().new_local(None, arg3_type, "undefined_for_intrinsic").to_rvalue();
|
||||
let first_arg = builder
|
||||
.current_func()
|
||||
.new_local(None, arg3_type, "undefined_for_intrinsic")
|
||||
.to_rvalue();
|
||||
new_args.push(first_arg);
|
||||
let arg4_type = gcc_func.get_param_type(3);
|
||||
let minus_one = builder.context.new_rvalue_from_int(arg4_type, -1);
|
||||
new_args.push(minus_one);
|
||||
args = new_args.into();
|
||||
},
|
||||
"__builtin_ia32_pmaxuq256_mask" | "__builtin_ia32_pmaxuq128_mask" | "__builtin_ia32_pminuq256_mask"
|
||||
| "__builtin_ia32_pminuq128_mask" | "__builtin_ia32_prold256_mask" | "__builtin_ia32_prold128_mask"
|
||||
| "__builtin_ia32_prord512_mask" | "__builtin_ia32_prord256_mask" | "__builtin_ia32_prord128_mask"
|
||||
| "__builtin_ia32_prolq256_mask" | "__builtin_ia32_prolq128_mask" | "__builtin_ia32_prorq256_mask"
|
||||
| "__builtin_ia32_prorq128_mask" | "__builtin_ia32_psraq256_mask" | "__builtin_ia32_psraq128_mask"
|
||||
| "__builtin_ia32_psraqi256_mask" | "__builtin_ia32_psraqi128_mask" | "__builtin_ia32_psravq256_mask"
|
||||
| "__builtin_ia32_psravq128_mask" | "__builtin_ia32_prolvd256_mask" | "__builtin_ia32_prolvd128_mask"
|
||||
| "__builtin_ia32_prorvd256_mask" | "__builtin_ia32_prorvd128_mask" | "__builtin_ia32_prolvq256_mask"
|
||||
| "__builtin_ia32_prolvq128_mask" | "__builtin_ia32_prorvq256_mask" | "__builtin_ia32_prorvq128_mask"
|
||||
| "__builtin_ia32_permvardi256_mask" | "__builtin_ia32_permvardf512_mask" | "__builtin_ia32_permvardf256_mask"
|
||||
| "__builtin_ia32_pmulhuw512_mask" | "__builtin_ia32_pmulhw512_mask" | "__builtin_ia32_pmulhrsw512_mask"
|
||||
| "__builtin_ia32_pmaxuw512_mask" | "__builtin_ia32_pmaxub512_mask" | "__builtin_ia32_pmaxsw512_mask"
|
||||
| "__builtin_ia32_pmaxsb512_mask" | "__builtin_ia32_pminuw512_mask" | "__builtin_ia32_pminub512_mask"
|
||||
| "__builtin_ia32_pminsw512_mask" | "__builtin_ia32_pminsb512_mask"
|
||||
| "__builtin_ia32_pmaddwd512_mask" | "__builtin_ia32_pmaddubsw512_mask" | "__builtin_ia32_packssdw512_mask"
|
||||
| "__builtin_ia32_packsswb512_mask" | "__builtin_ia32_packusdw512_mask" | "__builtin_ia32_packuswb512_mask"
|
||||
| "__builtin_ia32_pavgw512_mask" | "__builtin_ia32_pavgb512_mask" | "__builtin_ia32_psllw512_mask"
|
||||
| "__builtin_ia32_psllwi512_mask" | "__builtin_ia32_psllv32hi_mask" | "__builtin_ia32_psrlw512_mask"
|
||||
| "__builtin_ia32_psrlwi512_mask" | "__builtin_ia32_psllv16hi_mask" | "__builtin_ia32_psllv8hi_mask"
|
||||
| "__builtin_ia32_psrlv32hi_mask" | "__builtin_ia32_psraw512_mask" | "__builtin_ia32_psrawi512_mask"
|
||||
| "__builtin_ia32_psrlv16hi_mask" | "__builtin_ia32_psrlv8hi_mask" | "__builtin_ia32_psrav32hi_mask"
|
||||
| "__builtin_ia32_permvarhi512_mask" | "__builtin_ia32_pshufb512_mask" | "__builtin_ia32_psrav16hi_mask"
|
||||
| "__builtin_ia32_psrav8hi_mask" | "__builtin_ia32_permvarhi256_mask" | "__builtin_ia32_permvarhi128_mask"
|
||||
=> {
|
||||
}
|
||||
"__builtin_ia32_pmaxuq256_mask"
|
||||
| "__builtin_ia32_pmaxuq128_mask"
|
||||
| "__builtin_ia32_pminuq256_mask"
|
||||
| "__builtin_ia32_pminuq128_mask"
|
||||
| "__builtin_ia32_prold256_mask"
|
||||
| "__builtin_ia32_prold128_mask"
|
||||
| "__builtin_ia32_prord512_mask"
|
||||
| "__builtin_ia32_prord256_mask"
|
||||
| "__builtin_ia32_prord128_mask"
|
||||
| "__builtin_ia32_prolq256_mask"
|
||||
| "__builtin_ia32_prolq128_mask"
|
||||
| "__builtin_ia32_prorq256_mask"
|
||||
| "__builtin_ia32_prorq128_mask"
|
||||
| "__builtin_ia32_psraq256_mask"
|
||||
| "__builtin_ia32_psraq128_mask"
|
||||
| "__builtin_ia32_psraqi256_mask"
|
||||
| "__builtin_ia32_psraqi128_mask"
|
||||
| "__builtin_ia32_psravq256_mask"
|
||||
| "__builtin_ia32_psravq128_mask"
|
||||
| "__builtin_ia32_prolvd256_mask"
|
||||
| "__builtin_ia32_prolvd128_mask"
|
||||
| "__builtin_ia32_prorvd256_mask"
|
||||
| "__builtin_ia32_prorvd128_mask"
|
||||
| "__builtin_ia32_prolvq256_mask"
|
||||
| "__builtin_ia32_prolvq128_mask"
|
||||
| "__builtin_ia32_prorvq256_mask"
|
||||
| "__builtin_ia32_prorvq128_mask"
|
||||
| "__builtin_ia32_permvardi256_mask"
|
||||
| "__builtin_ia32_permvardf512_mask"
|
||||
| "__builtin_ia32_permvardf256_mask"
|
||||
| "__builtin_ia32_pmulhuw512_mask"
|
||||
| "__builtin_ia32_pmulhw512_mask"
|
||||
| "__builtin_ia32_pmulhrsw512_mask"
|
||||
| "__builtin_ia32_pmaxuw512_mask"
|
||||
| "__builtin_ia32_pmaxub512_mask"
|
||||
| "__builtin_ia32_pmaxsw512_mask"
|
||||
| "__builtin_ia32_pmaxsb512_mask"
|
||||
| "__builtin_ia32_pminuw512_mask"
|
||||
| "__builtin_ia32_pminub512_mask"
|
||||
| "__builtin_ia32_pminsw512_mask"
|
||||
| "__builtin_ia32_pminsb512_mask"
|
||||
| "__builtin_ia32_pmaddwd512_mask"
|
||||
| "__builtin_ia32_pmaddubsw512_mask"
|
||||
| "__builtin_ia32_packssdw512_mask"
|
||||
| "__builtin_ia32_packsswb512_mask"
|
||||
| "__builtin_ia32_packusdw512_mask"
|
||||
| "__builtin_ia32_packuswb512_mask"
|
||||
| "__builtin_ia32_pavgw512_mask"
|
||||
| "__builtin_ia32_pavgb512_mask"
|
||||
| "__builtin_ia32_psllw512_mask"
|
||||
| "__builtin_ia32_psllwi512_mask"
|
||||
| "__builtin_ia32_psllv32hi_mask"
|
||||
| "__builtin_ia32_psrlw512_mask"
|
||||
| "__builtin_ia32_psrlwi512_mask"
|
||||
| "__builtin_ia32_psllv16hi_mask"
|
||||
| "__builtin_ia32_psllv8hi_mask"
|
||||
| "__builtin_ia32_psrlv32hi_mask"
|
||||
| "__builtin_ia32_psraw512_mask"
|
||||
| "__builtin_ia32_psrawi512_mask"
|
||||
| "__builtin_ia32_psrlv16hi_mask"
|
||||
| "__builtin_ia32_psrlv8hi_mask"
|
||||
| "__builtin_ia32_psrav32hi_mask"
|
||||
| "__builtin_ia32_permvarhi512_mask"
|
||||
| "__builtin_ia32_pshufb512_mask"
|
||||
| "__builtin_ia32_psrav16hi_mask"
|
||||
| "__builtin_ia32_psrav8hi_mask"
|
||||
| "__builtin_ia32_permvarhi256_mask"
|
||||
| "__builtin_ia32_permvarhi128_mask" => {
|
||||
let mut new_args = args.to_vec();
|
||||
let arg3_type = gcc_func.get_param_type(2);
|
||||
let vector_type = arg3_type.dyncast_vector().expect("vector type");
|
||||
let zero = builder.context.new_rvalue_zero(vector_type.get_element_type());
|
||||
let num_units = vector_type.get_num_units();
|
||||
let first_arg = builder.context.new_rvalue_from_vector(None, arg3_type, &vec![zero; num_units]);
|
||||
let first_arg =
|
||||
builder.context.new_rvalue_from_vector(None, arg3_type, &vec![zero; num_units]);
|
||||
new_args.push(first_arg);
|
||||
let arg4_type = gcc_func.get_param_type(3);
|
||||
let minus_one = builder.context.new_rvalue_from_int(arg4_type, -1);
|
||||
new_args.push(minus_one);
|
||||
args = new_args.into();
|
||||
},
|
||||
"__builtin_ia32_dbpsadbw512_mask" | "__builtin_ia32_dbpsadbw256_mask" | "__builtin_ia32_dbpsadbw128_mask" => {
|
||||
}
|
||||
"__builtin_ia32_dbpsadbw512_mask"
|
||||
| "__builtin_ia32_dbpsadbw256_mask"
|
||||
| "__builtin_ia32_dbpsadbw128_mask" => {
|
||||
let mut new_args = args.to_vec();
|
||||
let arg4_type = gcc_func.get_param_type(3);
|
||||
let vector_type = arg4_type.dyncast_vector().expect("vector type");
|
||||
let zero = builder.context.new_rvalue_zero(vector_type.get_element_type());
|
||||
let num_units = vector_type.get_num_units();
|
||||
let first_arg = builder.context.new_rvalue_from_vector(None, arg4_type, &vec![zero; num_units]);
|
||||
let first_arg =
|
||||
builder.context.new_rvalue_from_vector(None, arg4_type, &vec![zero; num_units]);
|
||||
new_args.push(first_arg);
|
||||
let arg5_type = gcc_func.get_param_type(4);
|
||||
let minus_one = builder.context.new_rvalue_from_int(arg5_type, -1);
|
||||
new_args.push(minus_one);
|
||||
args = new_args.into();
|
||||
},
|
||||
"__builtin_ia32_vplzcntd_512_mask" | "__builtin_ia32_vplzcntd_256_mask" | "__builtin_ia32_vplzcntd_128_mask"
|
||||
| "__builtin_ia32_vplzcntq_512_mask" | "__builtin_ia32_vplzcntq_256_mask" | "__builtin_ia32_vplzcntq_128_mask" => {
|
||||
}
|
||||
"__builtin_ia32_vplzcntd_512_mask"
|
||||
| "__builtin_ia32_vplzcntd_256_mask"
|
||||
| "__builtin_ia32_vplzcntd_128_mask"
|
||||
| "__builtin_ia32_vplzcntq_512_mask"
|
||||
| "__builtin_ia32_vplzcntq_256_mask"
|
||||
| "__builtin_ia32_vplzcntq_128_mask" => {
|
||||
let mut new_args = args.to_vec();
|
||||
// Remove last arg as it doesn't seem to be used in GCC and is always false.
|
||||
new_args.pop();
|
||||
@ -98,37 +189,45 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc
|
||||
let vector_type = arg2_type.dyncast_vector().expect("vector type");
|
||||
let zero = builder.context.new_rvalue_zero(vector_type.get_element_type());
|
||||
let num_units = vector_type.get_num_units();
|
||||
let first_arg = builder.context.new_rvalue_from_vector(None, arg2_type, &vec![zero; num_units]);
|
||||
let first_arg =
|
||||
builder.context.new_rvalue_from_vector(None, arg2_type, &vec![zero; num_units]);
|
||||
new_args.push(first_arg);
|
||||
let arg3_type = gcc_func.get_param_type(2);
|
||||
let minus_one = builder.context.new_rvalue_from_int(arg3_type, -1);
|
||||
new_args.push(minus_one);
|
||||
args = new_args.into();
|
||||
},
|
||||
"__builtin_ia32_vpconflictsi_512_mask" | "__builtin_ia32_vpconflictsi_256_mask"
|
||||
| "__builtin_ia32_vpconflictsi_128_mask" | "__builtin_ia32_vpconflictdi_512_mask"
|
||||
| "__builtin_ia32_vpconflictdi_256_mask" | "__builtin_ia32_vpconflictdi_128_mask" => {
|
||||
}
|
||||
"__builtin_ia32_vpconflictsi_512_mask"
|
||||
| "__builtin_ia32_vpconflictsi_256_mask"
|
||||
| "__builtin_ia32_vpconflictsi_128_mask"
|
||||
| "__builtin_ia32_vpconflictdi_512_mask"
|
||||
| "__builtin_ia32_vpconflictdi_256_mask"
|
||||
| "__builtin_ia32_vpconflictdi_128_mask" => {
|
||||
let mut new_args = args.to_vec();
|
||||
let arg2_type = gcc_func.get_param_type(1);
|
||||
let vector_type = arg2_type.dyncast_vector().expect("vector type");
|
||||
let zero = builder.context.new_rvalue_zero(vector_type.get_element_type());
|
||||
let num_units = vector_type.get_num_units();
|
||||
let first_arg = builder.context.new_rvalue_from_vector(None, arg2_type, &vec![zero; num_units]);
|
||||
let first_arg =
|
||||
builder.context.new_rvalue_from_vector(None, arg2_type, &vec![zero; num_units]);
|
||||
new_args.push(first_arg);
|
||||
let arg3_type = gcc_func.get_param_type(2);
|
||||
let minus_one = builder.context.new_rvalue_from_int(arg3_type, -1);
|
||||
new_args.push(minus_one);
|
||||
args = new_args.into();
|
||||
},
|
||||
"__builtin_ia32_pternlogd512_mask" | "__builtin_ia32_pternlogd256_mask"
|
||||
| "__builtin_ia32_pternlogd128_mask" | "__builtin_ia32_pternlogq512_mask"
|
||||
| "__builtin_ia32_pternlogq256_mask" | "__builtin_ia32_pternlogq128_mask" => {
|
||||
}
|
||||
"__builtin_ia32_pternlogd512_mask"
|
||||
| "__builtin_ia32_pternlogd256_mask"
|
||||
| "__builtin_ia32_pternlogd128_mask"
|
||||
| "__builtin_ia32_pternlogq512_mask"
|
||||
| "__builtin_ia32_pternlogq256_mask"
|
||||
| "__builtin_ia32_pternlogq128_mask" => {
|
||||
let mut new_args = args.to_vec();
|
||||
let arg5_type = gcc_func.get_param_type(4);
|
||||
let minus_one = builder.context.new_rvalue_from_int(arg5_type, -1);
|
||||
new_args.push(minus_one);
|
||||
args = new_args.into();
|
||||
},
|
||||
}
|
||||
"__builtin_ia32_vfmaddps512_mask" | "__builtin_ia32_vfmaddpd512_mask" => {
|
||||
let mut new_args = args.to_vec();
|
||||
|
||||
@ -154,24 +253,33 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc
|
||||
}
|
||||
|
||||
args = new_args.into();
|
||||
},
|
||||
"__builtin_ia32_addps512_mask" | "__builtin_ia32_addpd512_mask"
|
||||
| "__builtin_ia32_subps512_mask" | "__builtin_ia32_subpd512_mask"
|
||||
| "__builtin_ia32_mulps512_mask" | "__builtin_ia32_mulpd512_mask"
|
||||
| "__builtin_ia32_divps512_mask" | "__builtin_ia32_divpd512_mask"
|
||||
| "__builtin_ia32_maxps512_mask" | "__builtin_ia32_maxpd512_mask"
|
||||
| "__builtin_ia32_minps512_mask" | "__builtin_ia32_minpd512_mask" => {
|
||||
}
|
||||
"__builtin_ia32_addps512_mask"
|
||||
| "__builtin_ia32_addpd512_mask"
|
||||
| "__builtin_ia32_subps512_mask"
|
||||
| "__builtin_ia32_subpd512_mask"
|
||||
| "__builtin_ia32_mulps512_mask"
|
||||
| "__builtin_ia32_mulpd512_mask"
|
||||
| "__builtin_ia32_divps512_mask"
|
||||
| "__builtin_ia32_divpd512_mask"
|
||||
| "__builtin_ia32_maxps512_mask"
|
||||
| "__builtin_ia32_maxpd512_mask"
|
||||
| "__builtin_ia32_minps512_mask"
|
||||
| "__builtin_ia32_minpd512_mask" => {
|
||||
let mut new_args = args.to_vec();
|
||||
let last_arg = new_args.pop().expect("last arg");
|
||||
let arg3_type = gcc_func.get_param_type(2);
|
||||
let undefined = builder.current_func().new_local(None, arg3_type, "undefined_for_intrinsic").to_rvalue();
|
||||
let undefined = builder
|
||||
.current_func()
|
||||
.new_local(None, arg3_type, "undefined_for_intrinsic")
|
||||
.to_rvalue();
|
||||
new_args.push(undefined);
|
||||
let arg4_type = gcc_func.get_param_type(3);
|
||||
let minus_one = builder.context.new_rvalue_from_int(arg4_type, -1);
|
||||
new_args.push(minus_one);
|
||||
new_args.push(last_arg);
|
||||
args = new_args.into();
|
||||
},
|
||||
}
|
||||
"__builtin_ia32_vfmaddsubps512_mask" | "__builtin_ia32_vfmaddsubpd512_mask" => {
|
||||
let mut new_args = args.to_vec();
|
||||
let last_arg = new_args.pop().expect("last arg");
|
||||
@ -180,54 +288,72 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc
|
||||
new_args.push(minus_one);
|
||||
new_args.push(last_arg);
|
||||
args = new_args.into();
|
||||
},
|
||||
"__builtin_ia32_vpermi2vard512_mask" | "__builtin_ia32_vpermi2vard256_mask"
|
||||
| "__builtin_ia32_vpermi2vard128_mask" | "__builtin_ia32_vpermi2varq512_mask"
|
||||
| "__builtin_ia32_vpermi2varq256_mask" | "__builtin_ia32_vpermi2varq128_mask"
|
||||
| "__builtin_ia32_vpermi2varps512_mask" | "__builtin_ia32_vpermi2varps256_mask"
|
||||
| "__builtin_ia32_vpermi2varps128_mask" | "__builtin_ia32_vpermi2varpd512_mask"
|
||||
| "__builtin_ia32_vpermi2varpd256_mask" | "__builtin_ia32_vpermi2varpd128_mask" | "__builtin_ia32_vpmadd52huq512_mask"
|
||||
| "__builtin_ia32_vpmadd52luq512_mask" | "__builtin_ia32_vpmadd52huq256_mask" | "__builtin_ia32_vpmadd52luq256_mask"
|
||||
| "__builtin_ia32_vpmadd52huq128_mask"
|
||||
=> {
|
||||
}
|
||||
"__builtin_ia32_vpermi2vard512_mask"
|
||||
| "__builtin_ia32_vpermi2vard256_mask"
|
||||
| "__builtin_ia32_vpermi2vard128_mask"
|
||||
| "__builtin_ia32_vpermi2varq512_mask"
|
||||
| "__builtin_ia32_vpermi2varq256_mask"
|
||||
| "__builtin_ia32_vpermi2varq128_mask"
|
||||
| "__builtin_ia32_vpermi2varps512_mask"
|
||||
| "__builtin_ia32_vpermi2varps256_mask"
|
||||
| "__builtin_ia32_vpermi2varps128_mask"
|
||||
| "__builtin_ia32_vpermi2varpd512_mask"
|
||||
| "__builtin_ia32_vpermi2varpd256_mask"
|
||||
| "__builtin_ia32_vpermi2varpd128_mask"
|
||||
| "__builtin_ia32_vpmadd52huq512_mask"
|
||||
| "__builtin_ia32_vpmadd52luq512_mask"
|
||||
| "__builtin_ia32_vpmadd52huq256_mask"
|
||||
| "__builtin_ia32_vpmadd52luq256_mask"
|
||||
| "__builtin_ia32_vpmadd52huq128_mask" => {
|
||||
let mut new_args = args.to_vec();
|
||||
let arg4_type = gcc_func.get_param_type(3);
|
||||
let minus_one = builder.context.new_rvalue_from_int(arg4_type, -1);
|
||||
new_args.push(minus_one);
|
||||
args = new_args.into();
|
||||
},
|
||||
"__builtin_ia32_cvtdq2ps512_mask" | "__builtin_ia32_cvtudq2ps512_mask"
|
||||
| "__builtin_ia32_sqrtps512_mask" | "__builtin_ia32_sqrtpd512_mask" => {
|
||||
}
|
||||
"__builtin_ia32_cvtdq2ps512_mask"
|
||||
| "__builtin_ia32_cvtudq2ps512_mask"
|
||||
| "__builtin_ia32_sqrtps512_mask"
|
||||
| "__builtin_ia32_sqrtpd512_mask" => {
|
||||
let mut new_args = args.to_vec();
|
||||
let last_arg = new_args.pop().expect("last arg");
|
||||
let arg2_type = gcc_func.get_param_type(1);
|
||||
let undefined = builder.current_func().new_local(None, arg2_type, "undefined_for_intrinsic").to_rvalue();
|
||||
let undefined = builder
|
||||
.current_func()
|
||||
.new_local(None, arg2_type, "undefined_for_intrinsic")
|
||||
.to_rvalue();
|
||||
new_args.push(undefined);
|
||||
let arg3_type = gcc_func.get_param_type(2);
|
||||
let minus_one = builder.context.new_rvalue_from_int(arg3_type, -1);
|
||||
new_args.push(minus_one);
|
||||
new_args.push(last_arg);
|
||||
args = new_args.into();
|
||||
},
|
||||
}
|
||||
"__builtin_ia32_stmxcsr" => {
|
||||
args = vec![].into();
|
||||
},
|
||||
"__builtin_ia32_addcarryx_u64" | "__builtin_ia32_sbb_u64" | "__builtin_ia32_addcarryx_u32" | "__builtin_ia32_sbb_u32" => {
|
||||
}
|
||||
"__builtin_ia32_addcarryx_u64"
|
||||
| "__builtin_ia32_sbb_u64"
|
||||
| "__builtin_ia32_addcarryx_u32"
|
||||
| "__builtin_ia32_sbb_u32" => {
|
||||
let mut new_args = args.to_vec();
|
||||
let arg2_type = gcc_func.get_param_type(1);
|
||||
let variable = builder.current_func().new_local(None, arg2_type, "addcarryResult");
|
||||
new_args.push(variable.get_address(None));
|
||||
args = new_args.into();
|
||||
},
|
||||
"__builtin_ia32_vpermt2varqi512_mask" | "__builtin_ia32_vpermt2varqi256_mask"
|
||||
| "__builtin_ia32_vpermt2varqi128_mask" | "__builtin_ia32_vpermt2varhi512_mask"
|
||||
| "__builtin_ia32_vpermt2varhi256_mask" | "__builtin_ia32_vpermt2varhi128_mask"
|
||||
=> {
|
||||
}
|
||||
"__builtin_ia32_vpermt2varqi512_mask"
|
||||
| "__builtin_ia32_vpermt2varqi256_mask"
|
||||
| "__builtin_ia32_vpermt2varqi128_mask"
|
||||
| "__builtin_ia32_vpermt2varhi512_mask"
|
||||
| "__builtin_ia32_vpermt2varhi256_mask"
|
||||
| "__builtin_ia32_vpermt2varhi128_mask" => {
|
||||
let new_args = args.to_vec();
|
||||
let arg4_type = gcc_func.get_param_type(3);
|
||||
let minus_one = builder.context.new_rvalue_from_int(arg4_type, -1);
|
||||
args = vec![new_args[1], new_args[0], new_args[2], minus_one].into();
|
||||
},
|
||||
}
|
||||
"__builtin_ia32_xrstor" | "__builtin_ia32_xsavec" => {
|
||||
let new_args = args.to_vec();
|
||||
let thirty_two = builder.context.new_rvalue_from_int(new_args[1].get_type(), 32);
|
||||
@ -235,22 +361,25 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc
|
||||
let arg2_type = gcc_func.get_param_type(1);
|
||||
let arg2 = builder.context.new_cast(None, arg2, arg2_type);
|
||||
args = vec![new_args[0], arg2].into();
|
||||
},
|
||||
}
|
||||
// These builtins are sent one more argument than needed.
|
||||
"__builtin_prefetch" => {
|
||||
let mut new_args = args.to_vec();
|
||||
new_args.pop();
|
||||
args = new_args.into();
|
||||
},
|
||||
}
|
||||
// The GCC version returns one value of the tuple through a pointer.
|
||||
"__builtin_ia32_rdrand64_step" => {
|
||||
let arg = builder.current_func().new_local(None, builder.ulonglong_type, "return_rdrand_arg");
|
||||
let arg = builder.current_func().new_local(
|
||||
None,
|
||||
builder.ulonglong_type,
|
||||
"return_rdrand_arg",
|
||||
);
|
||||
args = vec![arg.get_address(None)].into();
|
||||
},
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
match &*func_name {
|
||||
"__builtin_ia32_rndscaless_mask_round" | "__builtin_ia32_rndscalesd_mask_round" => {
|
||||
let new_args = args.to_vec();
|
||||
@ -259,10 +388,10 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc
|
||||
let arg4_type = gcc_func.get_param_type(3);
|
||||
let arg4 = builder.context.new_bitcast(None, new_args[2], arg4_type);
|
||||
args = vec![new_args[0], new_args[1], arg3, arg4, new_args[3], new_args[5]].into();
|
||||
},
|
||||
}
|
||||
// NOTE: the LLVM intrinsic receives 3 floats, but the GCC builtin requires 3 vectors.
|
||||
// FIXME: the intrinsics like _mm_mask_fmadd_sd should probably directly call the GCC
|
||||
// instrinsic to avoid this.
|
||||
// intrinsic to avoid this.
|
||||
"__builtin_ia32_vfmaddss3_round" => {
|
||||
let new_args = args.to_vec();
|
||||
let arg1_type = gcc_func.get_param_type(0);
|
||||
@ -272,7 +401,7 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc
|
||||
let b = builder.context.new_rvalue_from_vector(None, arg2_type, &[new_args[1]; 4]);
|
||||
let c = builder.context.new_rvalue_from_vector(None, arg3_type, &[new_args[2]; 4]);
|
||||
args = vec![a, b, c, new_args[3]].into();
|
||||
},
|
||||
}
|
||||
"__builtin_ia32_vfmaddsd3_round" => {
|
||||
let new_args = args.to_vec();
|
||||
let arg1_type = gcc_func.get_param_type(0);
|
||||
@ -282,25 +411,34 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc
|
||||
let b = builder.context.new_rvalue_from_vector(None, arg2_type, &[new_args[1]; 2]);
|
||||
let c = builder.context.new_rvalue_from_vector(None, arg3_type, &[new_args[2]; 2]);
|
||||
args = vec![a, b, c, new_args[3]].into();
|
||||
},
|
||||
"__builtin_ia32_vfmaddsubpd256" | "__builtin_ia32_vfmaddsubps" | "__builtin_ia32_vfmaddsubps256"
|
||||
}
|
||||
"__builtin_ia32_vfmaddsubpd256"
|
||||
| "__builtin_ia32_vfmaddsubps"
|
||||
| "__builtin_ia32_vfmaddsubps256"
|
||||
| "__builtin_ia32_vfmaddsubpd" => {
|
||||
if let Some(original_function_name) = original_function_name {
|
||||
match &**original_function_name {
|
||||
"llvm.x86.fma.vfmsubadd.pd.256" | "llvm.x86.fma.vfmsubadd.ps" | "llvm.x86.fma.vfmsubadd.ps.256"
|
||||
"llvm.x86.fma.vfmsubadd.pd.256"
|
||||
| "llvm.x86.fma.vfmsubadd.ps"
|
||||
| "llvm.x86.fma.vfmsubadd.ps.256"
|
||||
| "llvm.x86.fma.vfmsubadd.pd" => {
|
||||
// NOTE: since both llvm.x86.fma.vfmsubadd.ps and llvm.x86.fma.vfmaddsub.ps maps to
|
||||
// __builtin_ia32_vfmaddsubps, only add minus if this comes from a
|
||||
// subadd LLVM intrinsic, e.g. _mm256_fmsubadd_pd.
|
||||
let mut new_args = args.to_vec();
|
||||
let arg3 = &mut new_args[2];
|
||||
*arg3 = builder.context.new_unary_op(None, UnaryOp::Minus, arg3.get_type(), *arg3);
|
||||
*arg3 = builder.context.new_unary_op(
|
||||
None,
|
||||
UnaryOp::Minus,
|
||||
arg3.get_type(),
|
||||
*arg3,
|
||||
);
|
||||
args = new_args.into();
|
||||
},
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
"__builtin_ia32_ldmxcsr" => {
|
||||
// The builtin __builtin_ia32_ldmxcsr takes an integer value while llvm.x86.sse.ldmxcsr takes a pointer,
|
||||
// so dereference the pointer.
|
||||
@ -309,23 +447,31 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc
|
||||
let arg1 = builder.context.new_cast(None, args[0], uint_ptr_type);
|
||||
new_args[0] = arg1.dereference(None).to_rvalue();
|
||||
args = new_args.into();
|
||||
},
|
||||
"__builtin_ia32_rcp14sd_mask" | "__builtin_ia32_rcp14ss_mask" | "__builtin_ia32_rsqrt14sd_mask"
|
||||
}
|
||||
"__builtin_ia32_rcp14sd_mask"
|
||||
| "__builtin_ia32_rcp14ss_mask"
|
||||
| "__builtin_ia32_rsqrt14sd_mask"
|
||||
| "__builtin_ia32_rsqrt14ss_mask" => {
|
||||
let new_args = args.to_vec();
|
||||
args = vec![new_args[1], new_args[0], new_args[2], new_args[3]].into();
|
||||
},
|
||||
}
|
||||
"__builtin_ia32_sqrtsd_mask_round" | "__builtin_ia32_sqrtss_mask_round" => {
|
||||
let new_args = args.to_vec();
|
||||
args = vec![new_args[1], new_args[0], new_args[2], new_args[3], new_args[4]].into();
|
||||
},
|
||||
"__builtin_ia32_vpshrdv_v8di" | "__builtin_ia32_vpshrdv_v4di" | "__builtin_ia32_vpshrdv_v2di" |
|
||||
"__builtin_ia32_vpshrdv_v16si" | "__builtin_ia32_vpshrdv_v8si" | "__builtin_ia32_vpshrdv_v4si" |
|
||||
"__builtin_ia32_vpshrdv_v32hi" | "__builtin_ia32_vpshrdv_v16hi" | "__builtin_ia32_vpshrdv_v8hi" => {
|
||||
}
|
||||
"__builtin_ia32_vpshrdv_v8di"
|
||||
| "__builtin_ia32_vpshrdv_v4di"
|
||||
| "__builtin_ia32_vpshrdv_v2di"
|
||||
| "__builtin_ia32_vpshrdv_v16si"
|
||||
| "__builtin_ia32_vpshrdv_v8si"
|
||||
| "__builtin_ia32_vpshrdv_v4si"
|
||||
| "__builtin_ia32_vpshrdv_v32hi"
|
||||
| "__builtin_ia32_vpshrdv_v16hi"
|
||||
| "__builtin_ia32_vpshrdv_v8hi" => {
|
||||
// The first two arguments are reversed, compared to LLVM.
|
||||
let new_args = args.to_vec();
|
||||
args = vec![new_args[1], new_args[0], new_args[2]].into();
|
||||
},
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
@ -333,16 +479,27 @@ pub fn adjust_intrinsic_arguments<'a, 'b, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc
|
||||
args
|
||||
}
|
||||
|
||||
pub fn adjust_intrinsic_return_value<'a, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc, 'tcx>, mut return_value: RValue<'gcc>, func_name: &str, args: &[RValue<'gcc>], args_adjusted: bool, orig_args: &[RValue<'gcc>]) -> RValue<'gcc> {
|
||||
pub fn adjust_intrinsic_return_value<'a, 'gcc, 'tcx>(
|
||||
builder: &Builder<'a, 'gcc, 'tcx>,
|
||||
mut return_value: RValue<'gcc>,
|
||||
func_name: &str,
|
||||
args: &[RValue<'gcc>],
|
||||
args_adjusted: bool,
|
||||
orig_args: &[RValue<'gcc>],
|
||||
) -> RValue<'gcc> {
|
||||
match func_name {
|
||||
"__builtin_ia32_vfmaddss3_round" | "__builtin_ia32_vfmaddsd3_round" => {
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
{
|
||||
let zero = builder.context.new_rvalue_zero(builder.int_type);
|
||||
return_value = builder.context.new_vector_access(None, return_value, zero).to_rvalue();
|
||||
return_value =
|
||||
builder.context.new_vector_access(None, return_value, zero).to_rvalue();
|
||||
}
|
||||
},
|
||||
"__builtin_ia32_addcarryx_u64" | "__builtin_ia32_sbb_u64" | "__builtin_ia32_addcarryx_u32" | "__builtin_ia32_sbb_u32" => {
|
||||
}
|
||||
"__builtin_ia32_addcarryx_u64"
|
||||
| "__builtin_ia32_sbb_u64"
|
||||
| "__builtin_ia32_addcarryx_u32"
|
||||
| "__builtin_ia32_sbb_u32" => {
|
||||
// Both llvm.x86.addcarry.32 and llvm.x86.addcarryx.u32 points to the same GCC builtin,
|
||||
// but only the former requires adjusting the return value.
|
||||
// Those 2 LLVM intrinsics differ by their argument count, that's why we check if the
|
||||
@ -351,10 +508,16 @@ pub fn adjust_intrinsic_return_value<'a, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc,
|
||||
let last_arg = args.last().expect("last arg");
|
||||
let field1 = builder.context.new_field(None, builder.u8_type, "carryFlag");
|
||||
let field2 = builder.context.new_field(None, args[1].get_type(), "carryResult");
|
||||
let struct_type = builder.context.new_struct_type(None, "addcarryResult", &[field1, field2]);
|
||||
return_value = builder.context.new_struct_constructor(None, struct_type.as_type(), None, &[return_value, last_arg.dereference(None).to_rvalue()]);
|
||||
let struct_type =
|
||||
builder.context.new_struct_type(None, "addcarryResult", &[field1, field2]);
|
||||
return_value = builder.context.new_struct_constructor(
|
||||
None,
|
||||
struct_type.as_type(),
|
||||
None,
|
||||
&[return_value, last_arg.dereference(None).to_rvalue()],
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
"__builtin_ia32_stmxcsr" => {
|
||||
// The builtin __builtin_ia32_stmxcsr returns a value while llvm.x86.sse.stmxcsr writes
|
||||
// the result in its pointer argument.
|
||||
@ -366,20 +529,24 @@ pub fn adjust_intrinsic_return_value<'a, 'gcc, 'tcx>(builder: &Builder<'a, 'gcc,
|
||||
// The return value was assigned to the result pointer above. In order to not call the
|
||||
// builtin twice, we overwrite the return value with a dummy value.
|
||||
return_value = builder.context.new_rvalue_zero(builder.int_type);
|
||||
},
|
||||
}
|
||||
"__builtin_ia32_rdrand64_step" => {
|
||||
let random_number = args[0].dereference(None).to_rvalue();
|
||||
let success_variable = builder.current_func().new_local(None, return_value.get_type(), "success");
|
||||
let success_variable =
|
||||
builder.current_func().new_local(None, return_value.get_type(), "success");
|
||||
builder.llbb().add_assignment(None, success_variable, return_value);
|
||||
|
||||
let field1 = builder.context.new_field(None, random_number.get_type(), "random_number");
|
||||
let field2 = builder.context.new_field(None, return_value.get_type(), "success");
|
||||
let struct_type = builder.context.new_struct_type(None, "rdrand_result", &[field1, field2]);
|
||||
return_value = builder.context.new_struct_constructor(None, struct_type.as_type(), None, &[
|
||||
random_number,
|
||||
success_variable.to_rvalue(),
|
||||
]);
|
||||
},
|
||||
let struct_type =
|
||||
builder.context.new_struct_type(None, "rdrand_result", &[field1, field2]);
|
||||
return_value = builder.context.new_struct_constructor(
|
||||
None,
|
||||
struct_type.as_type(),
|
||||
None,
|
||||
&[random_number, success_variable.to_rvalue()],
|
||||
);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
@ -391,23 +558,33 @@ pub fn ignore_arg_cast(func_name: &str, index: usize, args_len: usize) -> bool {
|
||||
match func_name {
|
||||
// NOTE: these intrinsics have missing parameters before the last one, so ignore the
|
||||
// last argument type check.
|
||||
"__builtin_ia32_maxps512_mask" | "__builtin_ia32_maxpd512_mask"
|
||||
| "__builtin_ia32_minps512_mask" | "__builtin_ia32_minpd512_mask" | "__builtin_ia32_sqrtps512_mask"
|
||||
| "__builtin_ia32_sqrtpd512_mask" | "__builtin_ia32_addps512_mask" | "__builtin_ia32_addpd512_mask"
|
||||
| "__builtin_ia32_subps512_mask" | "__builtin_ia32_subpd512_mask"
|
||||
| "__builtin_ia32_mulps512_mask" | "__builtin_ia32_mulpd512_mask"
|
||||
| "__builtin_ia32_divps512_mask" | "__builtin_ia32_divpd512_mask"
|
||||
| "__builtin_ia32_vfmaddsubps512_mask" | "__builtin_ia32_vfmaddsubpd512_mask"
|
||||
| "__builtin_ia32_cvtdq2ps512_mask" | "__builtin_ia32_cvtudq2ps512_mask" => {
|
||||
"__builtin_ia32_maxps512_mask"
|
||||
| "__builtin_ia32_maxpd512_mask"
|
||||
| "__builtin_ia32_minps512_mask"
|
||||
| "__builtin_ia32_minpd512_mask"
|
||||
| "__builtin_ia32_sqrtps512_mask"
|
||||
| "__builtin_ia32_sqrtpd512_mask"
|
||||
| "__builtin_ia32_addps512_mask"
|
||||
| "__builtin_ia32_addpd512_mask"
|
||||
| "__builtin_ia32_subps512_mask"
|
||||
| "__builtin_ia32_subpd512_mask"
|
||||
| "__builtin_ia32_mulps512_mask"
|
||||
| "__builtin_ia32_mulpd512_mask"
|
||||
| "__builtin_ia32_divps512_mask"
|
||||
| "__builtin_ia32_divpd512_mask"
|
||||
| "__builtin_ia32_vfmaddsubps512_mask"
|
||||
| "__builtin_ia32_vfmaddsubpd512_mask"
|
||||
| "__builtin_ia32_cvtdq2ps512_mask"
|
||||
| "__builtin_ia32_cvtudq2ps512_mask" => {
|
||||
if index == args_len - 1 {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
}
|
||||
"__builtin_ia32_rndscaless_mask_round" | "__builtin_ia32_rndscalesd_mask_round" => {
|
||||
if index == 2 || index == 3 {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
}
|
||||
"__builtin_ia32_vfmaddps512_mask" | "__builtin_ia32_vfmaddpd512_mask" => {
|
||||
// Since there are two LLVM intrinsics that map to each of these GCC builtins and only
|
||||
// one of them has a missing parameter before the last one, we check the number of
|
||||
@ -415,33 +592,34 @@ pub fn ignore_arg_cast(func_name: &str, index: usize, args_len: usize) -> bool {
|
||||
if args_len == 4 && index == args_len - 1 {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
}
|
||||
// NOTE: the LLVM intrinsic receives 3 floats, but the GCC builtin requires 3 vectors.
|
||||
"__builtin_ia32_vfmaddss3_round" | "__builtin_ia32_vfmaddsd3_round" => return true,
|
||||
"__builtin_ia32_vplzcntd_512_mask" | "__builtin_ia32_vplzcntd_256_mask" | "__builtin_ia32_vplzcntd_128_mask"
|
||||
| "__builtin_ia32_vplzcntq_512_mask" | "__builtin_ia32_vplzcntq_256_mask" | "__builtin_ia32_vplzcntq_128_mask" => {
|
||||
"__builtin_ia32_vplzcntd_512_mask"
|
||||
| "__builtin_ia32_vplzcntd_256_mask"
|
||||
| "__builtin_ia32_vplzcntd_128_mask"
|
||||
| "__builtin_ia32_vplzcntq_512_mask"
|
||||
| "__builtin_ia32_vplzcntq_256_mask"
|
||||
| "__builtin_ia32_vplzcntq_128_mask" => {
|
||||
if index == args_len - 1 {
|
||||
return true;
|
||||
}
|
||||
},
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(not(feature="master"))]
|
||||
#[cfg(not(feature = "master"))]
|
||||
pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function<'gcc> {
|
||||
let gcc_name =
|
||||
match name {
|
||||
let gcc_name = match name {
|
||||
"llvm.x86.sse2.pause" => {
|
||||
// NOTE: pause is only a hint, so we use a dummy built-in because target built-ins
|
||||
// are not supported in libgccjit 12.
|
||||
"__builtin_inff"
|
||||
},
|
||||
"llvm.x86.xgetbv" => {
|
||||
"__builtin_trap"
|
||||
},
|
||||
}
|
||||
"llvm.x86.xgetbv" => "__builtin_trap",
|
||||
_ => unimplemented!("unsupported LLVM intrinsic {}", name),
|
||||
};
|
||||
let func = cx.context.get_builtin_function(gcc_name);
|
||||
@ -449,15 +627,15 @@ pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function
|
||||
return func;
|
||||
}
|
||||
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
pub fn intrinsic<'gcc, 'tcx>(name: &str, cx: &CodegenCx<'gcc, 'tcx>) -> Function<'gcc> {
|
||||
match name {
|
||||
"llvm.prefetch" => {
|
||||
let gcc_name = "__builtin_prefetch";
|
||||
let func = cx.context.get_builtin_function(gcc_name);
|
||||
cx.functions.borrow_mut().insert(gcc_name.to_string(), func);
|
||||
return func
|
||||
},
|
||||
return func;
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
|
@ -1,43 +1,48 @@
|
||||
pub mod llvm;
|
||||
mod simd;
|
||||
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
use std::iter;
|
||||
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
use gccjit::FunctionType;
|
||||
use gccjit::{ComparisonOp, Function, RValue, ToRValue, Type, UnaryOp};
|
||||
use rustc_codegen_ssa::MemFlags;
|
||||
use rustc_codegen_ssa::base::wants_msvc_seh;
|
||||
use rustc_codegen_ssa::common::IntPredicate;
|
||||
use rustc_codegen_ssa::errors::InvalidMonomorphization;
|
||||
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
|
||||
use rustc_codegen_ssa::mir::place::PlaceRef;
|
||||
use rustc_codegen_ssa::traits::{ArgAbiMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods};
|
||||
#[cfg(feature="master")]
|
||||
use rustc_codegen_ssa::traits::{
|
||||
ArgAbiMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods,
|
||||
};
|
||||
#[cfg(feature = "master")]
|
||||
use rustc_codegen_ssa::traits::{BaseTypeMethods, MiscMethods};
|
||||
use rustc_codegen_ssa::errors::InvalidMonomorphization;
|
||||
use rustc_codegen_ssa::MemFlags;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::ty::{self, Instance, Ty};
|
||||
use rustc_middle::ty::layout::LayoutOf;
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
use rustc_middle::ty::layout::{FnAbiOf, HasTyCtxt};
|
||||
use rustc_span::{Span, Symbol, sym};
|
||||
use rustc_target::abi::HasDataLayout;
|
||||
use rustc_middle::ty::{self, Instance, Ty};
|
||||
use rustc_span::{sym, Span, Symbol};
|
||||
use rustc_target::abi::call::{ArgAbi, FnAbi, PassMode};
|
||||
use rustc_target::spec::PanicStrategy;
|
||||
#[cfg(feature="master")]
|
||||
use rustc_target::abi::HasDataLayout;
|
||||
#[cfg(feature = "master")]
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use rustc_target::spec::PanicStrategy;
|
||||
|
||||
use crate::abi::GccType;
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
use crate::abi::FnAbiGccExt;
|
||||
use crate::abi::GccType;
|
||||
use crate::builder::Builder;
|
||||
use crate::common::{SignType, TypeReflection};
|
||||
use crate::context::CodegenCx;
|
||||
use crate::type_of::LayoutGccExt;
|
||||
use crate::intrinsic::simd::generic_simd_intrinsic;
|
||||
use crate::type_of::LayoutGccExt;
|
||||
|
||||
fn get_simple_intrinsic<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, name: Symbol) -> Option<Function<'gcc>> {
|
||||
fn get_simple_intrinsic<'gcc, 'tcx>(
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
name: Symbol,
|
||||
) -> Option<Function<'gcc>> {
|
||||
let gcc_name = match name {
|
||||
sym::sqrtf32 => "sqrtf",
|
||||
sym::sqrtf64 => "sqrt",
|
||||
@ -90,7 +95,14 @@ fn get_simple_intrinsic<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, name: Symbol) ->
|
||||
}
|
||||
|
||||
impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
fn codegen_intrinsic_call(&mut self, instance: Instance<'tcx>, fn_abi: &FnAbi<'tcx, Ty<'tcx>>, args: &[OperandRef<'tcx, RValue<'gcc>>], llresult: RValue<'gcc>, span: Span) -> Result<(), Instance<'tcx>> {
|
||||
fn codegen_intrinsic_call(
|
||||
&mut self,
|
||||
instance: Instance<'tcx>,
|
||||
fn_abi: &FnAbi<'tcx, Ty<'tcx>>,
|
||||
args: &[OperandRef<'tcx, RValue<'gcc>>],
|
||||
llresult: RValue<'gcc>,
|
||||
span: Span,
|
||||
) -> Result<(), Instance<'tcx>> {
|
||||
let tcx = self.tcx;
|
||||
let callee_ty = instance.ty(tcx, ty::ParamEnv::reveal_all());
|
||||
|
||||
@ -110,19 +122,21 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);
|
||||
|
||||
let simple = get_simple_intrinsic(self, name);
|
||||
let llval =
|
||||
match name {
|
||||
let llval = match name {
|
||||
_ if simple.is_some() => {
|
||||
// FIXME(antoyo): remove this cast when the API supports function.
|
||||
let func = unsafe { std::mem::transmute(simple.expect("simple")) };
|
||||
self.call(self.type_void(), None, None, func, &args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(), None)
|
||||
},
|
||||
sym::likely => {
|
||||
self.expect(args[0].immediate(), true)
|
||||
}
|
||||
sym::unlikely => {
|
||||
self.expect(args[0].immediate(), false)
|
||||
self.call(
|
||||
self.type_void(),
|
||||
None,
|
||||
None,
|
||||
func,
|
||||
&args.iter().map(|arg| arg.immediate()).collect::<Vec<_>>(),
|
||||
None,
|
||||
)
|
||||
}
|
||||
sym::likely => self.expect(args[0].immediate(), true),
|
||||
sym::unlikely => self.expect(args[0].immediate(), false),
|
||||
sym::is_val_statically_known => {
|
||||
let a = args[0].immediate();
|
||||
let builtin = self.context.get_builtin_function("__builtin_constant_p");
|
||||
@ -152,12 +166,10 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
sym::volatile_load | sym::unaligned_volatile_load => {
|
||||
let tp_ty = fn_args.type_at(0);
|
||||
let ptr = args[0].immediate();
|
||||
let load =
|
||||
if let PassMode::Cast { cast: ty, pad_i32: _ } = &fn_abi.ret.mode {
|
||||
let load = if let PassMode::Cast { cast: ty, pad_i32: _ } = &fn_abi.ret.mode {
|
||||
let gcc_ty = ty.gcc_type(self);
|
||||
self.volatile_load(gcc_ty, ptr)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
self.volatile_load(self.layout_of(tp_ty).gcc_type(self), ptr)
|
||||
};
|
||||
// TODO(antoyo): set alignment.
|
||||
@ -214,8 +226,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
// in the state need to be updated.
|
||||
self.switch_to_block(else_block);
|
||||
|
||||
let zeros =
|
||||
match name {
|
||||
let zeros = match name {
|
||||
sym::ctlz => self.count_leading_zeroes(width, arg),
|
||||
sym::cttz => self.count_trailing_zeroes(width, arg),
|
||||
_ => unreachable!(),
|
||||
@ -229,21 +240,16 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
|
||||
result.to_rvalue()
|
||||
}
|
||||
sym::ctlz_nonzero => {
|
||||
self.count_leading_zeroes(width, args[0].immediate())
|
||||
},
|
||||
sym::cttz_nonzero => {
|
||||
self.count_trailing_zeroes(width, args[0].immediate())
|
||||
}
|
||||
sym::ctlz_nonzero => self.count_leading_zeroes(width, args[0].immediate()),
|
||||
sym::cttz_nonzero => self.count_trailing_zeroes(width, args[0].immediate()),
|
||||
sym::ctpop => self.pop_count(args[0].immediate()),
|
||||
sym::bswap => {
|
||||
if width == 8 {
|
||||
args[0].immediate() // byte swap a u8/i8 is just a no-op
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
self.gcc_bswap(args[0].immediate(), width)
|
||||
}
|
||||
},
|
||||
}
|
||||
sym::bitreverse => self.bit_reverse(width, args[0].immediate()),
|
||||
sym::rotate_left | sym::rotate_right => {
|
||||
// TODO(antoyo): implement using algorithm from:
|
||||
@ -254,21 +260,30 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
let raw_shift = args[1].immediate();
|
||||
if is_left {
|
||||
self.rotate_left(val, raw_shift, width)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
self.rotate_right(val, raw_shift, width)
|
||||
}
|
||||
},
|
||||
sym::saturating_add => {
|
||||
self.saturating_add(args[0].immediate(), args[1].immediate(), signed, width)
|
||||
},
|
||||
sym::saturating_sub => {
|
||||
self.saturating_sub(args[0].immediate(), args[1].immediate(), signed, width)
|
||||
},
|
||||
}
|
||||
sym::saturating_add => self.saturating_add(
|
||||
args[0].immediate(),
|
||||
args[1].immediate(),
|
||||
signed,
|
||||
width,
|
||||
),
|
||||
sym::saturating_sub => self.saturating_sub(
|
||||
args[0].immediate(),
|
||||
args[1].immediate(),
|
||||
signed,
|
||||
width,
|
||||
),
|
||||
_ => bug!(),
|
||||
},
|
||||
None => {
|
||||
tcx.dcx().emit_err(InvalidMonomorphization::BasicIntegerType { span, name, ty });
|
||||
tcx.dcx().emit_err(InvalidMonomorphization::BasicIntegerType {
|
||||
span,
|
||||
name,
|
||||
ty,
|
||||
});
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
@ -307,7 +322,11 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
let void_ptr_type = self.context.new_type::<*const ()>();
|
||||
let a_ptr = self.bitcast(a, void_ptr_type);
|
||||
let b_ptr = self.bitcast(b, void_ptr_type);
|
||||
let n = self.context.new_cast(None, self.const_usize(layout.size().bytes()), self.sizet_type);
|
||||
let n = self.context.new_cast(
|
||||
None,
|
||||
self.const_usize(layout.size().bytes()),
|
||||
self.sizet_type,
|
||||
);
|
||||
let builtin = self.context.get_builtin_function("memcmp");
|
||||
let cmp = self.context.new_call(None, builtin, &[a_ptr, b_ptr, n]);
|
||||
self.icmp(IntPredicate::IntEQ, cmp, self.const_i32(0))
|
||||
@ -352,7 +371,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
let addr = self.bitcast(ptr, usize_type);
|
||||
let masked = self.and(addr, mask);
|
||||
self.bitcast(masked, void_ptr_type)
|
||||
},
|
||||
}
|
||||
|
||||
_ if name_str.starts_with("simd_") => {
|
||||
match generic_simd_intrinsic(self, name, callee_ty, args, ret_ty, llret_ty, span) {
|
||||
@ -370,8 +389,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
let ptr_llty = self.type_ptr_to(ty.gcc_type(self));
|
||||
let ptr = self.pointercast(result.llval, ptr_llty);
|
||||
self.store(llval, ptr, result.align);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
OperandRef::from_immediate_or_packed_pair(self, llval, result.layout)
|
||||
.val
|
||||
.store(self, result);
|
||||
@ -423,11 +441,21 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'a, 'gcc, 'tcx> ArgAbiMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
fn store_fn_arg(&mut self, arg_abi: &ArgAbi<'tcx, Ty<'tcx>>, idx: &mut usize, dst: PlaceRef<'tcx, Self::Value>) {
|
||||
fn store_fn_arg(
|
||||
&mut self,
|
||||
arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
|
||||
idx: &mut usize,
|
||||
dst: PlaceRef<'tcx, Self::Value>,
|
||||
) {
|
||||
arg_abi.store_fn_arg(self, idx, dst)
|
||||
}
|
||||
|
||||
fn store_arg(&mut self, arg_abi: &ArgAbi<'tcx, Ty<'tcx>>, val: RValue<'gcc>, dst: PlaceRef<'tcx, RValue<'gcc>>) {
|
||||
fn store_arg(
|
||||
&mut self,
|
||||
arg_abi: &ArgAbi<'tcx, Ty<'tcx>>,
|
||||
val: RValue<'gcc>,
|
||||
dst: PlaceRef<'tcx, RValue<'gcc>>,
|
||||
) {
|
||||
arg_abi.store(self, val, dst)
|
||||
}
|
||||
|
||||
@ -438,8 +466,18 @@ impl<'a, 'gcc, 'tcx> ArgAbiMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
|
||||
pub trait ArgAbiExt<'gcc, 'tcx> {
|
||||
fn memory_ty(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>;
|
||||
fn store(&self, bx: &mut Builder<'_, 'gcc, 'tcx>, val: RValue<'gcc>, dst: PlaceRef<'tcx, RValue<'gcc>>);
|
||||
fn store_fn_arg(&self, bx: &mut Builder<'_, 'gcc, 'tcx>, idx: &mut usize, dst: PlaceRef<'tcx, RValue<'gcc>>);
|
||||
fn store(
|
||||
&self,
|
||||
bx: &mut Builder<'_, 'gcc, 'tcx>,
|
||||
val: RValue<'gcc>,
|
||||
dst: PlaceRef<'tcx, RValue<'gcc>>,
|
||||
);
|
||||
fn store_fn_arg(
|
||||
&self,
|
||||
bx: &mut Builder<'_, 'gcc, 'tcx>,
|
||||
idx: &mut usize,
|
||||
dst: PlaceRef<'tcx, RValue<'gcc>>,
|
||||
);
|
||||
}
|
||||
|
||||
impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||
@ -453,17 +491,20 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||
/// place for the original Rust type of this argument/return.
|
||||
/// Can be used for both storing formal arguments into Rust variables
|
||||
/// or results of call/invoke instructions into their destinations.
|
||||
fn store(&self, bx: &mut Builder<'_, 'gcc, 'tcx>, val: RValue<'gcc>, dst: PlaceRef<'tcx, RValue<'gcc>>) {
|
||||
fn store(
|
||||
&self,
|
||||
bx: &mut Builder<'_, 'gcc, 'tcx>,
|
||||
val: RValue<'gcc>,
|
||||
dst: PlaceRef<'tcx, RValue<'gcc>>,
|
||||
) {
|
||||
if self.is_ignore() {
|
||||
return;
|
||||
}
|
||||
if self.is_sized_indirect() {
|
||||
OperandValue::Ref(val, None, self.layout.align.abi).store(bx, dst)
|
||||
}
|
||||
else if self.is_unsized_indirect() {
|
||||
} else if self.is_unsized_indirect() {
|
||||
bug!("unsized `ArgAbi` must be handled through `store_fn_arg`");
|
||||
}
|
||||
else if let PassMode::Cast { ref cast, .. } = self.mode {
|
||||
} else if let PassMode::Cast { ref cast, .. } = self.mode {
|
||||
// FIXME(eddyb): Figure out when the simpler Store is safe, clang
|
||||
// uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}.
|
||||
let can_store_through_cast_ptr = false;
|
||||
@ -471,8 +512,7 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||
let cast_ptr_llty = bx.type_ptr_to(cast.gcc_type(bx));
|
||||
let cast_dst = bx.pointercast(dst.llval, cast_ptr_llty);
|
||||
bx.store(val, cast_dst, self.layout.align.abi);
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// The actual return type is a struct, but the ABI
|
||||
// adaptation code has cast it into some scalar type. The
|
||||
// code that follows is the only reliable way I have
|
||||
@ -508,35 +548,44 @@ impl<'gcc, 'tcx> ArgAbiExt<'gcc, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||
|
||||
bx.lifetime_end(llscratch, scratch_size);
|
||||
}
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
OperandValue::Immediate(val).store(bx, dst);
|
||||
}
|
||||
}
|
||||
|
||||
fn store_fn_arg<'a>(&self, bx: &mut Builder<'a, 'gcc, 'tcx>, idx: &mut usize, dst: PlaceRef<'tcx, RValue<'gcc>>) {
|
||||
fn store_fn_arg<'a>(
|
||||
&self,
|
||||
bx: &mut Builder<'a, 'gcc, 'tcx>,
|
||||
idx: &mut usize,
|
||||
dst: PlaceRef<'tcx, RValue<'gcc>>,
|
||||
) {
|
||||
let mut next = || {
|
||||
let val = bx.current_func().get_param(*idx as i32);
|
||||
*idx += 1;
|
||||
val.to_rvalue()
|
||||
};
|
||||
match self.mode {
|
||||
PassMode::Ignore => {},
|
||||
PassMode::Ignore => {}
|
||||
PassMode::Pair(..) => {
|
||||
OperandValue::Pair(next(), next()).store(bx, dst);
|
||||
},
|
||||
}
|
||||
PassMode::Indirect { meta_attrs: Some(_), .. } => {
|
||||
OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst);
|
||||
},
|
||||
PassMode::Direct(_) | PassMode::Indirect { meta_attrs: None, .. } | PassMode::Cast { .. } => {
|
||||
}
|
||||
PassMode::Direct(_)
|
||||
| PassMode::Indirect { meta_attrs: None, .. }
|
||||
| PassMode::Cast { .. } => {
|
||||
let next_arg = next();
|
||||
self.store(bx, next_arg, dst);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn int_type_width_signed<'gcc, 'tcx>(ty: Ty<'tcx>, cx: &CodegenCx<'gcc, 'tcx>) -> Option<(u64, bool)> {
|
||||
fn int_type_width_signed<'gcc, 'tcx>(
|
||||
ty: Ty<'tcx>,
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
) -> Option<(u64, bool)> {
|
||||
match ty.kind() {
|
||||
ty::Int(t) => Some((
|
||||
match t {
|
||||
@ -570,16 +619,10 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
let typ = result_type.to_unsigned(self.cx);
|
||||
|
||||
let value =
|
||||
if result_type.is_signed(self.cx) {
|
||||
self.gcc_int_cast(value, typ)
|
||||
}
|
||||
else {
|
||||
value
|
||||
};
|
||||
if result_type.is_signed(self.cx) { self.gcc_int_cast(value, typ) } else { value };
|
||||
|
||||
let context = &self.cx.context;
|
||||
let result =
|
||||
match width {
|
||||
let result = match width {
|
||||
8 | 16 | 32 | 64 => {
|
||||
let mask = ((1u128 << width) - 1) as u64;
|
||||
let (m0, m1, m2) = if width > 16 {
|
||||
@ -626,7 +669,7 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
} else {
|
||||
self.gcc_bswap(step3, width)
|
||||
}
|
||||
},
|
||||
}
|
||||
128 => {
|
||||
// TODO(antoyo): find a more efficient implementation?
|
||||
let sixty_four = self.gcc_int(typ, 64);
|
||||
@ -640,11 +683,11 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
let new_low = self.gcc_int_cast(reversed_high, typ);
|
||||
let new_high = self.shl(self.gcc_int_cast(reversed_low, typ), sixty_four);
|
||||
|
||||
self.gcc_or(new_low, new_high)
|
||||
},
|
||||
self.gcc_or(new_low, new_high, self.location)
|
||||
}
|
||||
_ => {
|
||||
panic!("cannot bit reverse with width = {}", width);
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
self.gcc_int_cast(result, result_type)
|
||||
@ -685,54 +728,52 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
let first_elem = self.context.new_array_access(None, result, zero);
|
||||
let first_value = self.gcc_int_cast(self.context.new_call(None, clzll, &[high]), arg_type);
|
||||
self.llbb()
|
||||
.add_assignment(None, first_elem, first_value);
|
||||
.add_assignment(self.location, first_elem, first_value);
|
||||
|
||||
let second_elem = self.context.new_array_access(None, result, one);
|
||||
let cast = self.gcc_int_cast(self.context.new_call(None, clzll, &[low]), arg_type);
|
||||
let second_elem = self.context.new_array_access(self.location, result, one);
|
||||
let cast = self.gcc_int_cast(self.context.new_call(self.location, clzll, &[low]), arg_type);
|
||||
let second_value = self.add(cast, sixty_four);
|
||||
self.llbb()
|
||||
.add_assignment(None, second_elem, second_value);
|
||||
.add_assignment(self.location, second_elem, second_value);
|
||||
|
||||
let third_elem = self.context.new_array_access(None, result, two);
|
||||
let third_elem = self.context.new_array_access(self.location, result, two);
|
||||
let third_value = self.const_uint(arg_type, 128);
|
||||
self.llbb()
|
||||
.add_assignment(None, third_elem, third_value);
|
||||
.add_assignment(self.location, third_elem, third_value);
|
||||
|
||||
let not_high = self.context.new_unary_op(None, UnaryOp::LogicalNegate, self.u64_type, high);
|
||||
let not_low = self.context.new_unary_op(None, UnaryOp::LogicalNegate, self.u64_type, low);
|
||||
let not_high = self.context.new_unary_op(self.location, UnaryOp::LogicalNegate, self.u64_type, high);
|
||||
let not_low = self.context.new_unary_op(self.location, UnaryOp::LogicalNegate, self.u64_type, low);
|
||||
let not_low_and_not_high = not_low & not_high;
|
||||
let index = not_high + not_low_and_not_high;
|
||||
// NOTE: the following cast is necessary to avoid a GIMPLE verification failure in
|
||||
// gcc.
|
||||
// TODO(antoyo): do the correct verification in libgccjit to avoid an error at the
|
||||
// compilation stage.
|
||||
let index = self.context.new_cast(None, index, self.i32_type);
|
||||
let index = self.context.new_cast(self.location, index, self.i32_type);
|
||||
|
||||
let res = self.context.new_array_access(None, result, index);
|
||||
let res = self.context.new_array_access(self.location, result, index);
|
||||
|
||||
return self.gcc_int_cast(res.to_rvalue(), arg_type);
|
||||
}
|
||||
else {
|
||||
let count_leading_zeroes = self.context.get_builtin_function("__builtin_clzll");
|
||||
let arg = self.context.new_cast(None, arg, self.ulonglong_type);
|
||||
let arg = self.context.new_cast(self.location, arg, self.ulonglong_type);
|
||||
let diff = self.ulonglong_type.get_size() as i64 - arg_type.get_size() as i64;
|
||||
let diff = self.context.new_rvalue_from_long(self.int_type, diff * 8);
|
||||
let res = self.context.new_call(None, count_leading_zeroes, &[arg]) - diff;
|
||||
return self.context.new_cast(None, res, arg_type);
|
||||
let res = self.context.new_call(self.location, count_leading_zeroes, &[arg]) - diff;
|
||||
return self.context.new_cast(self.location, res, arg_type);
|
||||
};
|
||||
let count_leading_zeroes = self.context.get_builtin_function(count_leading_zeroes);
|
||||
let res = self.context.new_call(None, count_leading_zeroes, &[arg]);
|
||||
self.context.new_cast(None, res, arg_type)
|
||||
let res = self.context.new_call(self.location, count_leading_zeroes, &[arg]);
|
||||
self.context.new_cast(self.location, res, arg_type)
|
||||
}
|
||||
|
||||
fn count_trailing_zeroes(&mut self, _width: u64, arg: RValue<'gcc>) -> RValue<'gcc> {
|
||||
let result_type = arg.get_type();
|
||||
let arg =
|
||||
if result_type.is_signed(self.cx) {
|
||||
let arg = if result_type.is_signed(self.cx) {
|
||||
let new_type = result_type.to_unsigned(self.cx);
|
||||
self.gcc_int_cast(arg, new_type)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
arg
|
||||
};
|
||||
let arg_type = arg.get_type();
|
||||
@ -766,58 +807,56 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
|
||||
let ctzll = self.context.get_builtin_function("__builtin_ctzll");
|
||||
|
||||
let first_elem = self.context.new_array_access(None, result, zero);
|
||||
let first_value = self.gcc_int_cast(self.context.new_call(None, ctzll, &[low]), arg_type);
|
||||
let first_elem = self.context.new_array_access(self.location, result, zero);
|
||||
let first_value = self.gcc_int_cast(self.context.new_call(self.location, ctzll, &[low]), arg_type);
|
||||
self.llbb()
|
||||
.add_assignment(None, first_elem, first_value);
|
||||
.add_assignment(self.location, first_elem, first_value);
|
||||
|
||||
let second_elem = self.context.new_array_access(None, result, one);
|
||||
let second_value = self.gcc_add(self.gcc_int_cast(self.context.new_call(None, ctzll, &[high]), arg_type), sixty_four);
|
||||
let second_elem = self.context.new_array_access(self.location, result, one);
|
||||
let second_value = self.gcc_add(self.gcc_int_cast(self.context.new_call(self.location, ctzll, &[high]), arg_type), sixty_four);
|
||||
self.llbb()
|
||||
.add_assignment(None, second_elem, second_value);
|
||||
.add_assignment(self.location, second_elem, second_value);
|
||||
|
||||
let third_elem = self.context.new_array_access(None, result, two);
|
||||
let third_elem = self.context.new_array_access(self.location, result, two);
|
||||
let third_value = self.gcc_int(arg_type, 128);
|
||||
self.llbb()
|
||||
.add_assignment(None, third_elem, third_value);
|
||||
.add_assignment(self.location, third_elem, third_value);
|
||||
|
||||
let not_low = self.context.new_unary_op(None, UnaryOp::LogicalNegate, self.u64_type, low);
|
||||
let not_high = self.context.new_unary_op(None, UnaryOp::LogicalNegate, self.u64_type, high);
|
||||
let not_low = self.context.new_unary_op(self.location, UnaryOp::LogicalNegate, self.u64_type, low);
|
||||
let not_high = self.context.new_unary_op(self.location, UnaryOp::LogicalNegate, self.u64_type, high);
|
||||
let not_low_and_not_high = not_low & not_high;
|
||||
let index = not_low + not_low_and_not_high;
|
||||
// NOTE: the following cast is necessary to avoid a GIMPLE verification failure in
|
||||
// gcc.
|
||||
// TODO(antoyo): do the correct verification in libgccjit to avoid an error at the
|
||||
// compilation stage.
|
||||
let index = self.context.new_cast(None, index, self.i32_type);
|
||||
let index = self.context.new_cast(self.location, index, self.i32_type);
|
||||
|
||||
let res = self.context.new_array_access(None, result, index);
|
||||
let res = self.context.new_array_access(self.location, result, index);
|
||||
|
||||
return self.gcc_int_cast(res.to_rvalue(), result_type);
|
||||
}
|
||||
else {
|
||||
let count_trailing_zeroes = self.context.get_builtin_function("__builtin_ctzll");
|
||||
let arg_size = arg_type.get_size();
|
||||
let casted_arg = self.context.new_cast(None, arg, self.ulonglong_type);
|
||||
let casted_arg = self.context.new_cast(self.location, arg, self.ulonglong_type);
|
||||
let byte_diff = self.ulonglong_type.get_size() as i64 - arg_size as i64;
|
||||
let diff = self.context.new_rvalue_from_long(self.int_type, byte_diff * 8);
|
||||
let mask = self.context.new_rvalue_from_long(arg_type, -1); // To get the value with all bits set.
|
||||
let masked = mask & self.context.new_unary_op(None, UnaryOp::BitwiseNegate, arg_type, arg);
|
||||
let cond = self.context.new_comparison(None, ComparisonOp::Equals, masked, mask);
|
||||
let diff = diff * self.context.new_cast(None, cond, self.int_type);
|
||||
let res = self.context.new_call(None, count_trailing_zeroes, &[casted_arg]) - diff;
|
||||
return self.context.new_cast(None, res, result_type);
|
||||
let masked = mask & self.context.new_unary_op(self.location, UnaryOp::BitwiseNegate, arg_type, arg);
|
||||
let cond = self.context.new_comparison(self.location, ComparisonOp::Equals, masked, mask);
|
||||
let diff = diff * self.context.new_cast(self.location, cond, self.int_type);
|
||||
let res = self.context.new_call(self.location, count_trailing_zeroes, &[casted_arg]) - diff;
|
||||
return self.context.new_cast(self.location, res, result_type);
|
||||
};
|
||||
let count_trailing_zeroes = self.context.get_builtin_function(count_trailing_zeroes);
|
||||
let arg =
|
||||
if arg_type != expected_type {
|
||||
self.context.new_cast(None, arg, expected_type)
|
||||
}
|
||||
else {
|
||||
let arg = if arg_type != expected_type {
|
||||
self.context.new_cast(self.location, arg, expected_type)
|
||||
} else {
|
||||
arg
|
||||
};
|
||||
let res = self.context.new_call(None, count_trailing_zeroes, &[arg]);
|
||||
self.context.new_cast(None, res, result_type)
|
||||
let res = self.context.new_call(self.location, count_trailing_zeroes, &[arg]);
|
||||
self.context.new_cast(self.location, res, result_type)
|
||||
}
|
||||
|
||||
fn pop_count(&mut self, value: RValue<'gcc>) -> RValue<'gcc> {
|
||||
@ -825,11 +864,9 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
let result_type = value.get_type();
|
||||
let value_type = result_type.to_unsigned(self.cx);
|
||||
|
||||
let value =
|
||||
if result_type.is_signed(self.cx) {
|
||||
let value = if result_type.is_signed(self.cx) {
|
||||
self.gcc_int_cast(value, value_type)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
value
|
||||
};
|
||||
|
||||
@ -859,8 +896,8 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
let counter = self.current_func().new_local(None, counter_type, "popcount_counter");
|
||||
let val = self.current_func().new_local(None, value_type, "popcount_value");
|
||||
let zero = self.gcc_zero(counter_type);
|
||||
self.llbb().add_assignment(None, counter, zero);
|
||||
self.llbb().add_assignment(None, val, value);
|
||||
self.llbb().add_assignment(self.location, counter, zero);
|
||||
self.llbb().add_assignment(self.location, val, value);
|
||||
self.br(loop_head);
|
||||
|
||||
// check if value isn't zero
|
||||
@ -874,12 +911,12 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
let one = self.gcc_int(value_type, 1);
|
||||
let sub = self.gcc_sub(val.to_rvalue(), one);
|
||||
let op = self.gcc_and(val.to_rvalue(), sub);
|
||||
loop_body.add_assignment(None, val, op);
|
||||
loop_body.add_assignment(self.location, val, op);
|
||||
|
||||
// counter += 1
|
||||
let one = self.gcc_int(counter_type, 1);
|
||||
let op = self.gcc_add(counter.to_rvalue(), one);
|
||||
loop_body.add_assignment(None, counter, op);
|
||||
loop_body.add_assignment(self.location, counter, op);
|
||||
self.br(loop_head);
|
||||
|
||||
// end of loop
|
||||
@ -888,46 +925,52 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
}
|
||||
|
||||
// Algorithm from: https://blog.regehr.org/archives/1063
|
||||
fn rotate_left(&mut self, value: RValue<'gcc>, shift: RValue<'gcc>, width: u64) -> RValue<'gcc> {
|
||||
fn rotate_left(
|
||||
&mut self,
|
||||
value: RValue<'gcc>,
|
||||
shift: RValue<'gcc>,
|
||||
width: u64,
|
||||
) -> RValue<'gcc> {
|
||||
let max = self.const_uint(shift.get_type(), width);
|
||||
let shift = self.urem(shift, max);
|
||||
let lhs = self.shl(value, shift);
|
||||
let result_neg = self.neg(shift);
|
||||
let result_and =
|
||||
self.and(
|
||||
result_neg,
|
||||
self.const_uint(shift.get_type(), width - 1),
|
||||
);
|
||||
let result_and = self.and(result_neg, self.const_uint(shift.get_type(), width - 1));
|
||||
let rhs = self.lshr(value, result_and);
|
||||
self.or(lhs, rhs)
|
||||
}
|
||||
|
||||
// Algorithm from: https://blog.regehr.org/archives/1063
|
||||
fn rotate_right(&mut self, value: RValue<'gcc>, shift: RValue<'gcc>, width: u64) -> RValue<'gcc> {
|
||||
fn rotate_right(
|
||||
&mut self,
|
||||
value: RValue<'gcc>,
|
||||
shift: RValue<'gcc>,
|
||||
width: u64,
|
||||
) -> RValue<'gcc> {
|
||||
let max = self.const_uint(shift.get_type(), width);
|
||||
let shift = self.urem(shift, max);
|
||||
let lhs = self.lshr(value, shift);
|
||||
let result_neg = self.neg(shift);
|
||||
let result_and =
|
||||
self.and(
|
||||
result_neg,
|
||||
self.const_uint(shift.get_type(), width - 1),
|
||||
);
|
||||
let result_and = self.and(result_neg, self.const_uint(shift.get_type(), width - 1));
|
||||
let rhs = self.shl(value, result_and);
|
||||
self.or(lhs, rhs)
|
||||
}
|
||||
|
||||
fn saturating_add(&mut self, lhs: RValue<'gcc>, rhs: RValue<'gcc>, signed: bool, width: u64) -> RValue<'gcc> {
|
||||
fn saturating_add(
|
||||
&mut self,
|
||||
lhs: RValue<'gcc>,
|
||||
rhs: RValue<'gcc>,
|
||||
signed: bool,
|
||||
width: u64,
|
||||
) -> RValue<'gcc> {
|
||||
let result_type = lhs.get_type();
|
||||
if signed {
|
||||
// Based on algorithm from: https://stackoverflow.com/a/56531252/389119
|
||||
let func = self.current_func.borrow().expect("func");
|
||||
let res = func.new_local(None, result_type, "saturating_sum");
|
||||
let res = func.new_local(self.location, result_type, "saturating_sum");
|
||||
let supports_native_type = self.is_native_int_type(result_type);
|
||||
let overflow =
|
||||
if supports_native_type {
|
||||
let func_name =
|
||||
match width {
|
||||
let overflow = if supports_native_type {
|
||||
let func_name = match width {
|
||||
8 => "__builtin_add_overflow",
|
||||
16 => "__builtin_add_overflow",
|
||||
32 => "__builtin_sadd_overflow",
|
||||
@ -936,16 +979,14 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let overflow_func = self.context.get_builtin_function(func_name);
|
||||
self.overflow_call(overflow_func, &[lhs, rhs, res.get_address(None)], None)
|
||||
}
|
||||
else {
|
||||
let func_name =
|
||||
match width {
|
||||
self.overflow_call(overflow_func, &[lhs, rhs, res.get_address(self.location)], None)
|
||||
} else {
|
||||
let func_name = match width {
|
||||
128 => "__rust_i128_addo",
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let (int_result, overflow) = self.operation_with_overflow(func_name, lhs, rhs);
|
||||
self.llbb().add_assignment(None, res, int_result);
|
||||
self.llbb().add_assignment(self.location, res, int_result);
|
||||
overflow
|
||||
};
|
||||
|
||||
@ -955,41 +996,51 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
// Return `result_type`'s maximum or minimum value on overflow
|
||||
// NOTE: convert the type to unsigned to have an unsigned shift.
|
||||
let unsigned_type = result_type.to_unsigned(&self.cx);
|
||||
let shifted = self.gcc_lshr(self.gcc_int_cast(lhs, unsigned_type), self.gcc_int(unsigned_type, width as i64 - 1));
|
||||
let shifted = self.gcc_lshr(
|
||||
self.gcc_int_cast(lhs, unsigned_type),
|
||||
self.gcc_int(unsigned_type, width as i64 - 1),
|
||||
);
|
||||
let uint_max = self.gcc_not(self.gcc_int(unsigned_type, 0));
|
||||
let int_max = self.gcc_lshr(uint_max, self.gcc_int(unsigned_type, 1));
|
||||
then_block.add_assignment(None, res, self.gcc_int_cast(self.gcc_add(shifted, int_max), result_type));
|
||||
then_block.end_with_jump(None, after_block);
|
||||
then_block.add_assignment(
|
||||
self.location,
|
||||
res,
|
||||
self.gcc_int_cast(self.gcc_add(shifted, int_max), result_type),
|
||||
);
|
||||
then_block.end_with_jump(self.location, after_block);
|
||||
|
||||
self.llbb().end_with_conditional(None, overflow, then_block, after_block);
|
||||
self.llbb().end_with_conditional(self.location, overflow, then_block, after_block);
|
||||
|
||||
// NOTE: since jumps were added in a place rustc does not
|
||||
// expect, the current block in the state need to be updated.
|
||||
self.switch_to_block(after_block);
|
||||
|
||||
res.to_rvalue()
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// Algorithm from: http://locklessinc.com/articles/sat_arithmetic/
|
||||
let res = self.gcc_add(lhs, rhs);
|
||||
let cond = self.gcc_icmp(IntPredicate::IntULT, res, lhs);
|
||||
let value = self.gcc_neg(self.gcc_int_cast(cond, result_type));
|
||||
self.gcc_or(res, value)
|
||||
self.gcc_or(res, value, self.location)
|
||||
}
|
||||
}
|
||||
|
||||
// Algorithm from: https://locklessinc.com/articles/sat_arithmetic/
|
||||
fn saturating_sub(&mut self, lhs: RValue<'gcc>, rhs: RValue<'gcc>, signed: bool, width: u64) -> RValue<'gcc> {
|
||||
fn saturating_sub(
|
||||
&mut self,
|
||||
lhs: RValue<'gcc>,
|
||||
rhs: RValue<'gcc>,
|
||||
signed: bool,
|
||||
width: u64,
|
||||
) -> RValue<'gcc> {
|
||||
let result_type = lhs.get_type();
|
||||
if signed {
|
||||
// Based on algorithm from: https://stackoverflow.com/a/56531252/389119
|
||||
let func = self.current_func.borrow().expect("func");
|
||||
let res = func.new_local(None, result_type, "saturating_diff");
|
||||
let res = func.new_local(self.location, result_type, "saturating_diff");
|
||||
let supports_native_type = self.is_native_int_type(result_type);
|
||||
let overflow =
|
||||
if supports_native_type {
|
||||
let func_name =
|
||||
match width {
|
||||
let overflow = if supports_native_type {
|
||||
let func_name = match width {
|
||||
8 => "__builtin_sub_overflow",
|
||||
16 => "__builtin_sub_overflow",
|
||||
32 => "__builtin_ssub_overflow",
|
||||
@ -998,16 +1049,14 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let overflow_func = self.context.get_builtin_function(func_name);
|
||||
self.overflow_call(overflow_func, &[lhs, rhs, res.get_address(None)], None)
|
||||
}
|
||||
else {
|
||||
let func_name =
|
||||
match width {
|
||||
self.overflow_call(overflow_func, &[lhs, rhs, res.get_address(self.location)], None)
|
||||
} else {
|
||||
let func_name = match width {
|
||||
128 => "__rust_i128_subo",
|
||||
_ => unreachable!(),
|
||||
};
|
||||
let (int_result, overflow) = self.operation_with_overflow(func_name, lhs, rhs);
|
||||
self.llbb().add_assignment(None, res, int_result);
|
||||
self.llbb().add_assignment(self.location, res, int_result);
|
||||
overflow
|
||||
};
|
||||
|
||||
@ -1016,22 +1065,28 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
|
||||
// Return `result_type`'s maximum or minimum value on overflow
|
||||
// NOTE: convert the type to unsigned to have an unsigned shift.
|
||||
let unsigned_type = result_type.to_unsigned(&self.cx);
|
||||
let shifted = self.gcc_lshr(self.gcc_int_cast(lhs, unsigned_type), self.gcc_int(unsigned_type, width as i64 - 1));
|
||||
let unsigned_type = result_type.to_unsigned(self.cx);
|
||||
let shifted = self.gcc_lshr(
|
||||
self.gcc_int_cast(lhs, unsigned_type),
|
||||
self.gcc_int(unsigned_type, width as i64 - 1),
|
||||
);
|
||||
let uint_max = self.gcc_not(self.gcc_int(unsigned_type, 0));
|
||||
let int_max = self.gcc_lshr(uint_max, self.gcc_int(unsigned_type, 1));
|
||||
then_block.add_assignment(None, res, self.gcc_int_cast(self.gcc_add(shifted, int_max), result_type));
|
||||
then_block.end_with_jump(None, after_block);
|
||||
then_block.add_assignment(
|
||||
self.location,
|
||||
res,
|
||||
self.gcc_int_cast(self.gcc_add(shifted, int_max), result_type),
|
||||
);
|
||||
then_block.end_with_jump(self.location, after_block);
|
||||
|
||||
self.llbb().end_with_conditional(None, overflow, then_block, after_block);
|
||||
self.llbb().end_with_conditional(self.location, overflow, then_block, after_block);
|
||||
|
||||
// NOTE: since jumps were added in a place rustc does not
|
||||
// expect, the current block in the state need to be updated.
|
||||
self.switch_to_block(after_block);
|
||||
|
||||
res.to_rvalue()
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
let res = self.gcc_sub(lhs, rhs);
|
||||
let comparison = self.gcc_icmp(IntPredicate::IntULE, res, lhs);
|
||||
let value = self.gcc_neg(self.gcc_int_cast(comparison, result_type));
|
||||
@ -1040,21 +1095,26 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn try_intrinsic<'a, 'b, 'gcc, 'tcx>(bx: &'b mut Builder<'a, 'gcc, 'tcx>, try_func: RValue<'gcc>, data: RValue<'gcc>, _catch_func: RValue<'gcc>, dest: RValue<'gcc>) {
|
||||
fn try_intrinsic<'a, 'b, 'gcc, 'tcx>(
|
||||
bx: &'b mut Builder<'a, 'gcc, 'tcx>,
|
||||
try_func: RValue<'gcc>,
|
||||
data: RValue<'gcc>,
|
||||
_catch_func: RValue<'gcc>,
|
||||
dest: RValue<'gcc>,
|
||||
) {
|
||||
if bx.sess().panic_strategy() == PanicStrategy::Abort {
|
||||
bx.call(bx.type_void(), None, None, try_func, &[data], None);
|
||||
// Return 0 unconditionally from the intrinsic call;
|
||||
// we can never unwind.
|
||||
let ret_align = bx.tcx.data_layout.i32_align.abi;
|
||||
bx.store(bx.const_i32(0), dest, ret_align);
|
||||
}
|
||||
else if wants_msvc_seh(bx.sess()) {
|
||||
} else {
|
||||
if wants_msvc_seh(bx.sess()) {
|
||||
unimplemented!();
|
||||
}
|
||||
else {
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
codegen_gnu_try(bx, try_func, data, _catch_func, dest);
|
||||
#[cfg(not(feature="master"))]
|
||||
#[cfg(not(feature = "master"))]
|
||||
unimplemented!();
|
||||
}
|
||||
}
|
||||
@ -1070,8 +1130,14 @@ fn try_intrinsic<'a, 'b, 'gcc, 'tcx>(bx: &'b mut Builder<'a, 'gcc, 'tcx>, try_fu
|
||||
// function calling it, and that function may already have other personality
|
||||
// functions in play. By calling a shim we're guaranteed that our shim will have
|
||||
// the right personality function.
|
||||
#[cfg(feature="master")]
|
||||
fn codegen_gnu_try<'gcc>(bx: &mut Builder<'_, 'gcc, '_>, try_func: RValue<'gcc>, data: RValue<'gcc>, catch_func: RValue<'gcc>, dest: RValue<'gcc>) {
|
||||
#[cfg(feature = "master")]
|
||||
fn codegen_gnu_try<'gcc>(
|
||||
bx: &mut Builder<'_, 'gcc, '_>,
|
||||
try_func: RValue<'gcc>,
|
||||
data: RValue<'gcc>,
|
||||
catch_func: RValue<'gcc>,
|
||||
dest: RValue<'gcc>,
|
||||
) {
|
||||
let cx: &CodegenCx<'gcc, '_> = bx.cx;
|
||||
let (llty, func) = get_rust_try_fn(cx, &mut |mut bx| {
|
||||
// Codegens the shims described above:
|
||||
@ -1095,7 +1161,7 @@ fn codegen_gnu_try<'gcc>(bx: &mut Builder<'_, 'gcc, '_>, try_func: RValue<'gcc>,
|
||||
let catch_func = func.get_param(2).to_rvalue();
|
||||
let try_func_ty = bx.type_func(&[bx.type_i8p()], bx.type_void());
|
||||
|
||||
let current_block = bx.block.clone();
|
||||
let current_block = bx.block;
|
||||
|
||||
bx.switch_to_block(then);
|
||||
bx.ret(bx.const_i32(0));
|
||||
@ -1130,36 +1196,44 @@ fn codegen_gnu_try<'gcc>(bx: &mut Builder<'_, 'gcc, '_>, try_func: RValue<'gcc>,
|
||||
bx.store(ret, dest, i32_align);
|
||||
}
|
||||
|
||||
|
||||
// Helper function used to get a handle to the `__rust_try` function used to
|
||||
// catch exceptions.
|
||||
//
|
||||
// This function is only generated once and is then cached.
|
||||
#[cfg(feature="master")]
|
||||
fn get_rust_try_fn<'a, 'gcc, 'tcx>(cx: &'a CodegenCx<'gcc, 'tcx>, codegen: &mut dyn FnMut(Builder<'a, 'gcc, 'tcx>)) -> (Type<'gcc>, Function<'gcc>) {
|
||||
#[cfg(feature = "master")]
|
||||
fn get_rust_try_fn<'a, 'gcc, 'tcx>(
|
||||
cx: &'a CodegenCx<'gcc, 'tcx>,
|
||||
codegen: &mut dyn FnMut(Builder<'a, 'gcc, 'tcx>),
|
||||
) -> (Type<'gcc>, Function<'gcc>) {
|
||||
if let Some(llfn) = cx.rust_try_fn.get() {
|
||||
return llfn;
|
||||
}
|
||||
|
||||
// Define the type up front for the signature of the rust_try function.
|
||||
let tcx = cx.tcx;
|
||||
let i8p = Ty::new_mut_ptr(tcx,tcx.types.i8);
|
||||
let i8p = Ty::new_mut_ptr(tcx, tcx.types.i8);
|
||||
// `unsafe fn(*mut i8) -> ()`
|
||||
let try_fn_ty = Ty::new_fn_ptr(tcx,ty::Binder::dummy(tcx.mk_fn_sig(
|
||||
let try_fn_ty = Ty::new_fn_ptr(
|
||||
tcx,
|
||||
ty::Binder::dummy(tcx.mk_fn_sig(
|
||||
iter::once(i8p),
|
||||
Ty::new_unit(tcx,),
|
||||
Ty::new_unit(tcx),
|
||||
false,
|
||||
rustc_hir::Unsafety::Unsafe,
|
||||
Abi::Rust,
|
||||
)));
|
||||
)),
|
||||
);
|
||||
// `unsafe fn(*mut i8, *mut i8) -> ()`
|
||||
let catch_fn_ty = Ty::new_fn_ptr(tcx,ty::Binder::dummy(tcx.mk_fn_sig(
|
||||
let catch_fn_ty = Ty::new_fn_ptr(
|
||||
tcx,
|
||||
ty::Binder::dummy(tcx.mk_fn_sig(
|
||||
[i8p, i8p].iter().cloned(),
|
||||
Ty::new_unit(tcx,),
|
||||
Ty::new_unit(tcx),
|
||||
false,
|
||||
rustc_hir::Unsafety::Unsafe,
|
||||
Abi::Rust,
|
||||
)));
|
||||
)),
|
||||
);
|
||||
// `unsafe fn(unsafe fn(*mut i8) -> (), *mut i8, unsafe fn(*mut i8, *mut i8) -> ()) -> i32`
|
||||
let rust_fn_sig = ty::Binder::dummy(cx.tcx.mk_fn_sig(
|
||||
[try_fn_ty, i8p, catch_fn_ty],
|
||||
@ -1175,8 +1249,13 @@ fn get_rust_try_fn<'a, 'gcc, 'tcx>(cx: &'a CodegenCx<'gcc, 'tcx>, codegen: &mut
|
||||
|
||||
// Helper function to give a Block to a closure to codegen a shim function.
|
||||
// This is currently primarily used for the `try` intrinsic functions above.
|
||||
#[cfg(feature="master")]
|
||||
fn gen_fn<'a, 'gcc, 'tcx>(cx: &'a CodegenCx<'gcc, 'tcx>, name: &str, rust_fn_sig: ty::PolyFnSig<'tcx>, codegen: &mut dyn FnMut(Builder<'a, 'gcc, 'tcx>)) -> (Type<'gcc>, Function<'gcc>) {
|
||||
#[cfg(feature = "master")]
|
||||
fn gen_fn<'a, 'gcc, 'tcx>(
|
||||
cx: &'a CodegenCx<'gcc, 'tcx>,
|
||||
name: &str,
|
||||
rust_fn_sig: ty::PolyFnSig<'tcx>,
|
||||
codegen: &mut dyn FnMut(Builder<'a, 'gcc, 'tcx>),
|
||||
) -> (Type<'gcc>, Function<'gcc>) {
|
||||
let fn_abi = cx.fn_abi_of_fn_ptr(rust_fn_sig, ty::List::empty());
|
||||
let return_type = fn_abi.gcc_type(cx).return_type;
|
||||
// FIXME(eddyb) find a nicer way to do this.
|
||||
|
@ -1,3 +1,5 @@
|
||||
use std::iter::FromIterator;
|
||||
|
||||
use gccjit::ToRValue;
|
||||
use gccjit::{BinaryOp, RValue, Type};
|
||||
#[cfg(feature = "master")]
|
||||
@ -19,6 +21,8 @@ use rustc_span::{sym, Span, Symbol};
|
||||
use rustc_target::abi::Align;
|
||||
|
||||
use crate::builder::Builder;
|
||||
#[cfg(not(feature = "master"))]
|
||||
use crate::common::SignType;
|
||||
#[cfg(feature = "master")]
|
||||
use crate::context::CodegenCx;
|
||||
|
||||
@ -156,6 +160,197 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||
return Ok(compare_simd_types(bx, arg1, arg2, in_elem, llret_ty, cmp_op));
|
||||
}
|
||||
|
||||
let simd_bswap = |bx: &mut Builder<'a, 'gcc, 'tcx>, vector: RValue<'gcc>| -> RValue<'gcc> {
|
||||
let v_type = vector.get_type();
|
||||
let vector_type = v_type.unqualified().dyncast_vector().expect("vector type");
|
||||
let elem_type = vector_type.get_element_type();
|
||||
let elem_size_bytes = elem_type.get_size();
|
||||
if elem_size_bytes == 1 {
|
||||
return vector;
|
||||
}
|
||||
|
||||
let type_size_bytes = elem_size_bytes as u64 * in_len;
|
||||
let shuffle_indices = Vec::from_iter(0..type_size_bytes);
|
||||
let byte_vector_type = bx.context.new_vector_type(bx.type_u8(), type_size_bytes);
|
||||
let byte_vector = bx.context.new_bitcast(None, args[0].immediate(), byte_vector_type);
|
||||
|
||||
#[cfg(not(feature = "master"))]
|
||||
let shuffled = {
|
||||
let new_elements: Vec<_> = shuffle_indices
|
||||
.chunks_exact(elem_size_bytes as _)
|
||||
.flat_map(|x| x.iter().rev())
|
||||
.map(|&i| {
|
||||
let index = bx.context.new_rvalue_from_long(bx.u64_type, i as _);
|
||||
bx.extract_element(byte_vector, index)
|
||||
})
|
||||
.collect();
|
||||
|
||||
bx.context.new_rvalue_from_vector(None, byte_vector_type, &new_elements)
|
||||
};
|
||||
#[cfg(feature = "master")]
|
||||
let shuffled = {
|
||||
let indices: Vec<_> = shuffle_indices
|
||||
.chunks_exact(elem_size_bytes as _)
|
||||
.flat_map(|x| x.iter().rev())
|
||||
.map(|&i| bx.context.new_rvalue_from_int(bx.u8_type, i as _))
|
||||
.collect();
|
||||
|
||||
let mask = bx.context.new_rvalue_from_vector(None, byte_vector_type, &indices);
|
||||
bx.context.new_rvalue_vector_perm(None, byte_vector, byte_vector, mask)
|
||||
};
|
||||
bx.context.new_bitcast(None, shuffled, v_type)
|
||||
};
|
||||
|
||||
if name == sym::simd_bswap || name == sym::simd_bitreverse {
|
||||
require!(
|
||||
bx.type_kind(bx.element_type(llret_ty)) == TypeKind::Integer,
|
||||
InvalidMonomorphization::UnsupportedOperation { span, name, in_ty, in_elem }
|
||||
);
|
||||
}
|
||||
|
||||
if name == sym::simd_bswap {
|
||||
return Ok(simd_bswap(bx, args[0].immediate()));
|
||||
}
|
||||
|
||||
// We use a different algorithm from non-vector bitreverse to take advantage of most
|
||||
// processors' vector shuffle units. It works like this:
|
||||
// 1. Generate pre-reversed low and high nibbles as a vector.
|
||||
// 2. Byte-swap the input.
|
||||
// 3. Mask off the low and high nibbles of each byte in the byte-swapped input.
|
||||
// 4. Shuffle the pre-reversed low and high-nibbles using the masked nibbles as a shuffle mask.
|
||||
// 5. Combine the results of the shuffle back together and cast back to the original type.
|
||||
#[cfg(feature = "master")]
|
||||
if name == sym::simd_bitreverse {
|
||||
let vector = args[0].immediate();
|
||||
let v_type = vector.get_type();
|
||||
let vector_type = v_type.unqualified().dyncast_vector().expect("vector type");
|
||||
let elem_type = vector_type.get_element_type();
|
||||
let elem_size_bytes = elem_type.get_size();
|
||||
|
||||
let type_size_bytes = elem_size_bytes as u64 * in_len;
|
||||
// We need to ensure at least 16 entries in our vector type, since the pre-reversed vectors
|
||||
// we generate below have 16 entries in them. `new_rvalue_vector_perm` requires the mask
|
||||
// vector to be of the same length as the source vectors.
|
||||
let byte_vector_type_size = type_size_bytes.max(16);
|
||||
|
||||
let byte_vector_type = bx.context.new_vector_type(bx.u8_type, type_size_bytes);
|
||||
let long_byte_vector_type = bx.context.new_vector_type(bx.u8_type, byte_vector_type_size);
|
||||
|
||||
// Step 1: Generate pre-reversed low and high nibbles as a vector.
|
||||
let zero_byte = bx.context.new_rvalue_zero(bx.u8_type);
|
||||
let hi_nibble_elements: Vec<_> = (0u8..16)
|
||||
.map(|x| bx.context.new_rvalue_from_int(bx.u8_type, x.reverse_bits() as _))
|
||||
.chain((16..byte_vector_type_size).map(|_| zero_byte))
|
||||
.collect();
|
||||
let hi_nibble =
|
||||
bx.context.new_rvalue_from_vector(None, long_byte_vector_type, &hi_nibble_elements);
|
||||
|
||||
let lo_nibble_elements: Vec<_> = (0u8..16)
|
||||
.map(|x| bx.context.new_rvalue_from_int(bx.u8_type, (x.reverse_bits() >> 4) as _))
|
||||
.chain((16..byte_vector_type_size).map(|_| zero_byte))
|
||||
.collect();
|
||||
let lo_nibble =
|
||||
bx.context.new_rvalue_from_vector(None, long_byte_vector_type, &lo_nibble_elements);
|
||||
|
||||
let mask = bx.context.new_rvalue_from_vector(
|
||||
None,
|
||||
long_byte_vector_type,
|
||||
&vec![bx.context.new_rvalue_from_int(bx.u8_type, 0x0f); byte_vector_type_size as _],
|
||||
);
|
||||
|
||||
let four_vec = bx.context.new_rvalue_from_vector(
|
||||
None,
|
||||
long_byte_vector_type,
|
||||
&vec![bx.context.new_rvalue_from_int(bx.u8_type, 4); byte_vector_type_size as _],
|
||||
);
|
||||
|
||||
// Step 2: Byte-swap the input.
|
||||
let swapped = simd_bswap(bx, args[0].immediate());
|
||||
let byte_vector = bx.context.new_bitcast(None, swapped, byte_vector_type);
|
||||
|
||||
// We're going to need to extend the vector with zeros to make sure that the types are the
|
||||
// same, since that's what new_rvalue_vector_perm expects.
|
||||
let byte_vector = if byte_vector_type_size > type_size_bytes {
|
||||
let mut byte_vector_elements = Vec::with_capacity(byte_vector_type_size as _);
|
||||
for i in 0..type_size_bytes {
|
||||
let idx = bx.context.new_rvalue_from_int(bx.u32_type, i as _);
|
||||
let val = bx.extract_element(byte_vector, idx);
|
||||
byte_vector_elements.push(val);
|
||||
}
|
||||
for _ in type_size_bytes..byte_vector_type_size {
|
||||
byte_vector_elements.push(zero_byte);
|
||||
}
|
||||
bx.context.new_rvalue_from_vector(None, long_byte_vector_type, &byte_vector_elements)
|
||||
} else {
|
||||
bx.context.new_bitcast(None, byte_vector, long_byte_vector_type)
|
||||
};
|
||||
|
||||
// Step 3: Mask off the low and high nibbles of each byte in the byte-swapped input.
|
||||
let masked_hi = (byte_vector >> four_vec) & mask;
|
||||
let masked_lo = byte_vector & mask;
|
||||
|
||||
// Step 4: Shuffle the pre-reversed low and high-nibbles using the masked nibbles as a shuffle mask.
|
||||
let hi = bx.context.new_rvalue_vector_perm(None, hi_nibble, hi_nibble, masked_lo);
|
||||
let lo = bx.context.new_rvalue_vector_perm(None, lo_nibble, lo_nibble, masked_hi);
|
||||
|
||||
// Step 5: Combine the results of the shuffle back together and cast back to the original type.
|
||||
let result = hi | lo;
|
||||
let cast_ty =
|
||||
bx.context.new_vector_type(elem_type, byte_vector_type_size / (elem_size_bytes as u64));
|
||||
|
||||
// we might need to truncate if sizeof(v_type) < sizeof(cast_type)
|
||||
if type_size_bytes < byte_vector_type_size {
|
||||
let cast_result = bx.context.new_bitcast(None, result, cast_ty);
|
||||
let elems: Vec<_> = (0..in_len)
|
||||
.map(|i| {
|
||||
let idx = bx.context.new_rvalue_from_int(bx.u32_type, i as _);
|
||||
bx.extract_element(cast_result, idx)
|
||||
})
|
||||
.collect();
|
||||
return Ok(bx.context.new_rvalue_from_vector(None, v_type, &elems));
|
||||
} else {
|
||||
// avoid the unnecessary truncation as an optimization.
|
||||
return Ok(bx.context.new_bitcast(None, result, v_type));
|
||||
}
|
||||
}
|
||||
// since gcc doesn't have vector shuffle methods available in non-patched builds, fallback to
|
||||
// component-wise bitreverses if they're not available.
|
||||
#[cfg(not(feature = "master"))]
|
||||
if name == sym::simd_bitreverse {
|
||||
let vector = args[0].immediate();
|
||||
let vector_ty = vector.get_type();
|
||||
let vector_type = vector_ty.unqualified().dyncast_vector().expect("vector type");
|
||||
let num_elements = vector_type.get_num_units();
|
||||
|
||||
let elem_type = vector_type.get_element_type();
|
||||
let elem_size_bytes = elem_type.get_size();
|
||||
let num_type = elem_type.to_unsigned(bx.cx);
|
||||
let new_elements: Vec<_> = (0..num_elements)
|
||||
.map(|idx| {
|
||||
let index = bx.context.new_rvalue_from_long(num_type, idx as _);
|
||||
let extracted_value = bx.extract_element(vector, index).to_rvalue();
|
||||
bx.bit_reverse(elem_size_bytes as u64 * 8, extracted_value)
|
||||
})
|
||||
.collect();
|
||||
return Ok(bx.context.new_rvalue_from_vector(None, vector_ty, &new_elements));
|
||||
}
|
||||
|
||||
if name == sym::simd_ctlz || name == sym::simd_cttz {
|
||||
let vector = args[0].immediate();
|
||||
let elements: Vec<_> = (0..in_len)
|
||||
.map(|i| {
|
||||
let index = bx.context.new_rvalue_from_long(bx.i32_type, i as i64);
|
||||
let value = bx.extract_element(vector, index).to_rvalue();
|
||||
if name == sym::simd_ctlz {
|
||||
bx.count_leading_zeroes(value.get_type().get_size() as u64 * 8, value)
|
||||
} else {
|
||||
bx.count_trailing_zeroes(value.get_type().get_size() as u64 * 8, value)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
return Ok(bx.context.new_rvalue_from_vector(None, vector.get_type(), &elements));
|
||||
}
|
||||
|
||||
if name == sym::simd_shuffle {
|
||||
// Make sure this is actually an array, since typeck only checks the length-suffixed
|
||||
// version of this intrinsic.
|
||||
@ -504,20 +699,15 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||
default: RValue<'gcc>,
|
||||
pointers: RValue<'gcc>,
|
||||
mask: RValue<'gcc>,
|
||||
pointer_count: usize,
|
||||
bx: &mut Builder<'a, 'gcc, 'tcx>,
|
||||
in_len: u64,
|
||||
underlying_ty: Ty<'tcx>,
|
||||
invert: bool,
|
||||
) -> RValue<'gcc> {
|
||||
let vector_type = if pointer_count > 1 {
|
||||
bx.context.new_vector_type(bx.usize_type, in_len)
|
||||
} else {
|
||||
vector_ty(bx, underlying_ty, in_len)
|
||||
};
|
||||
let elem_type = vector_type.dyncast_vector().expect("vector type").get_element_type();
|
||||
let vector_type = default.get_type();
|
||||
let elem_type =
|
||||
vector_type.unqualified().dyncast_vector().expect("vector type").get_element_type();
|
||||
|
||||
let mut values = vec![];
|
||||
let mut values = Vec::with_capacity(in_len as usize);
|
||||
for i in 0..in_len {
|
||||
let index = bx.context.new_rvalue_from_long(bx.i32_type, i as i64);
|
||||
let int = bx.context.new_vector_access(None, pointers, index).to_rvalue();
|
||||
@ -530,13 +720,15 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||
|
||||
let vector = bx.context.new_rvalue_from_vector(None, vector_type, &values);
|
||||
|
||||
let mut mask_types = vec![];
|
||||
let mut mask_values = vec![];
|
||||
let mut mask_types = Vec::with_capacity(in_len as usize);
|
||||
let mut mask_values = Vec::with_capacity(in_len as usize);
|
||||
for i in 0..in_len {
|
||||
let index = bx.context.new_rvalue_from_long(bx.i32_type, i as i64);
|
||||
mask_types.push(bx.context.new_field(None, bx.i32_type, "m"));
|
||||
let mask_value = bx.context.new_vector_access(None, mask, index).to_rvalue();
|
||||
let masked = bx.context.new_rvalue_from_int(bx.i32_type, in_len as i32) & mask_value;
|
||||
let mask_value_cast = bx.context.new_cast(None, mask_value, bx.i32_type);
|
||||
let masked =
|
||||
bx.context.new_rvalue_from_int(bx.i32_type, in_len as i32) & mask_value_cast;
|
||||
let value = index + masked;
|
||||
mask_values.push(value);
|
||||
}
|
||||
@ -665,10 +857,8 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||
args[0].immediate(),
|
||||
args[1].immediate(),
|
||||
args[2].immediate(),
|
||||
pointer_count,
|
||||
bx,
|
||||
in_len,
|
||||
underlying_ty,
|
||||
false,
|
||||
));
|
||||
}
|
||||
@ -779,16 +969,8 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
let result = gather(
|
||||
args[0].immediate(),
|
||||
args[1].immediate(),
|
||||
args[2].immediate(),
|
||||
pointer_count,
|
||||
bx,
|
||||
in_len,
|
||||
underlying_ty,
|
||||
true,
|
||||
);
|
||||
let result =
|
||||
gather(args[0].immediate(), args[1].immediate(), args[2].immediate(), bx, in_len, true);
|
||||
|
||||
let pointers = args[1].immediate();
|
||||
|
||||
|
@ -4,6 +4,7 @@
|
||||
* TODO(antoyo): support LTO (gcc's equivalent to Full LTO is -flto -flto-partition=one — https://documentation.suse.com/sbp/all/html/SBP-GCC-10/index.html).
|
||||
* For Thin LTO, this might be helpful:
|
||||
* In gcc 4.6 -fwhopr was removed and became default with -flto. The non-whopr path can still be executed via -flto-partition=none.
|
||||
* Or the new incremental LTO?
|
||||
*
|
||||
* Maybe some missing optizations enabled by rustc's LTO is in there: https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html
|
||||
* Like -fipa-icf (should be already enabled) and maybe -fdevirtualize-at-ltrans.
|
||||
@ -24,9 +25,10 @@
|
||||
hash_raw_entry
|
||||
)]
|
||||
#![allow(broken_intra_doc_links)]
|
||||
#![recursion_limit="256"]
|
||||
#![recursion_limit = "256"]
|
||||
#![warn(rust_2018_idioms)]
|
||||
#![warn(unused_lifetimes)]
|
||||
#![deny(clippy::pattern_type_mismatch)]
|
||||
|
||||
extern crate rustc_apfloat;
|
||||
extern crate rustc_ast;
|
||||
@ -37,7 +39,8 @@ extern crate rustc_errors;
|
||||
extern crate rustc_fluent_macro;
|
||||
extern crate rustc_fs_util;
|
||||
extern crate rustc_hir;
|
||||
#[cfg(feature="master")]
|
||||
extern crate rustc_index;
|
||||
#[cfg(feature = "master")]
|
||||
extern crate rustc_interface;
|
||||
extern crate rustc_macros;
|
||||
extern crate rustc_metadata;
|
||||
@ -77,36 +80,40 @@ mod type_of;
|
||||
|
||||
use std::any::Any;
|
||||
use std::fmt::Debug;
|
||||
#[cfg(not(feature = "master"))]
|
||||
use std::sync::atomic::AtomicBool;
|
||||
#[cfg(not(feature = "master"))]
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::sync::Arc;
|
||||
use std::sync::Mutex;
|
||||
#[cfg(not(feature="master"))]
|
||||
use std::sync::atomic::AtomicBool;
|
||||
#[cfg(not(feature="master"))]
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
use gccjit::{Context, OptimizationLevel};
|
||||
#[cfg(feature="master")]
|
||||
use gccjit::{TargetInfo, Version};
|
||||
#[cfg(not(feature="master"))]
|
||||
use gccjit::CType;
|
||||
use errors::LTONotSupported;
|
||||
#[cfg(not(feature = "master"))]
|
||||
use gccjit::CType;
|
||||
use gccjit::{Context, OptimizationLevel};
|
||||
#[cfg(feature = "master")]
|
||||
use gccjit::{TargetInfo, Version};
|
||||
use rustc_ast::expand::allocator::AllocatorKind;
|
||||
use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen};
|
||||
use rustc_codegen_ssa::base::codegen_crate;
|
||||
use rustc_codegen_ssa::back::write::{CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryFn};
|
||||
use rustc_codegen_ssa::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule};
|
||||
use rustc_codegen_ssa::back::write::{
|
||||
CodegenContext, FatLtoInput, ModuleConfig, TargetMachineFactoryFn,
|
||||
};
|
||||
use rustc_codegen_ssa::base::codegen_crate;
|
||||
use rustc_codegen_ssa::traits::{
|
||||
CodegenBackend, ExtraBackendMethods, ThinBufferMethods, WriteBackendMethods,
|
||||
};
|
||||
use rustc_codegen_ssa::{CodegenResults, CompiledModule, ModuleCodegen};
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::sync::IntoDynSyncSend;
|
||||
use rustc_codegen_ssa::traits::{CodegenBackend, ExtraBackendMethods, ThinBufferMethods, WriteBackendMethods};
|
||||
use rustc_errors::{ErrorGuaranteed, DiagCtxt};
|
||||
use rustc_errors::{DiagCtxt, ErrorGuaranteed};
|
||||
use rustc_metadata::EncodedMetadata;
|
||||
use rustc_middle::dep_graph::{WorkProduct, WorkProductId};
|
||||
use rustc_middle::util::Providers;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_middle::util::Providers;
|
||||
use rustc_session::config::{Lto, OptLevel, OutputFilenames};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::Symbol;
|
||||
use rustc_span::fatal_error::FatalError;
|
||||
use rustc_span::Symbol;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use crate::back::lto::ModuleBuffer;
|
||||
@ -124,13 +131,13 @@ impl<F: Fn() -> String> Drop for PrintOnPanic<F> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(feature="master"))]
|
||||
#[cfg(not(feature = "master"))]
|
||||
#[derive(Debug)]
|
||||
pub struct TargetInfo {
|
||||
supports_128bit_integers: AtomicBool,
|
||||
}
|
||||
|
||||
#[cfg(not(feature="master"))]
|
||||
#[cfg(not(feature = "master"))]
|
||||
impl TargetInfo {
|
||||
fn cpu_supports(&self, _feature: &str) -> bool {
|
||||
false
|
||||
@ -173,26 +180,26 @@ impl CodegenBackend for GccCodegenBackend {
|
||||
}
|
||||
|
||||
fn init(&self, sess: &Session) {
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
{
|
||||
let target_cpu = target_cpu(sess);
|
||||
|
||||
// Get the second TargetInfo with the correct CPU features by setting the arch.
|
||||
let context = Context::default();
|
||||
if target_cpu != "generic" {
|
||||
context.add_command_line_option(&format!("-march={}", target_cpu));
|
||||
context.add_command_line_option(format!("-march={}", target_cpu));
|
||||
}
|
||||
|
||||
**self.target_info.info.lock().expect("lock") = context.get_target_info();
|
||||
}
|
||||
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
gccjit::set_global_personality_function_name(b"rust_eh_personality\0");
|
||||
if sess.lto() == Lto::Thin {
|
||||
sess.dcx().emit_warn(LTONotSupported {});
|
||||
}
|
||||
|
||||
#[cfg(not(feature="master"))]
|
||||
#[cfg(not(feature = "master"))]
|
||||
{
|
||||
let temp_dir = TempDir::new().expect("cannot create temporary directory");
|
||||
let temp_file = temp_dir.into_path().join("result.asm");
|
||||
@ -200,39 +207,62 @@ impl CodegenBackend for GccCodegenBackend {
|
||||
check_context.set_print_errors_to_stderr(false);
|
||||
let _int128_ty = check_context.new_c_type(CType::UInt128t);
|
||||
// NOTE: we cannot just call compile() as this would require other files than libgccjit.so.
|
||||
check_context.compile_to_file(gccjit::OutputKind::Assembler, temp_file.to_str().expect("path to str"));
|
||||
self.target_info.info.lock().expect("lock").supports_128bit_integers.store(check_context.get_last_error() == Ok(None), Ordering::SeqCst);
|
||||
check_context.compile_to_file(
|
||||
gccjit::OutputKind::Assembler,
|
||||
temp_file.to_str().expect("path to str"),
|
||||
);
|
||||
self.target_info
|
||||
.info
|
||||
.lock()
|
||||
.expect("lock")
|
||||
.supports_128bit_integers
|
||||
.store(check_context.get_last_error() == Ok(None), Ordering::SeqCst);
|
||||
}
|
||||
}
|
||||
|
||||
fn provide(&self, providers: &mut Providers) {
|
||||
providers.global_backend_features =
|
||||
|tcx, ()| gcc_util::global_gcc_features(tcx.sess, true)
|
||||
providers.global_backend_features = |tcx, ()| gcc_util::global_gcc_features(tcx.sess, true)
|
||||
}
|
||||
|
||||
fn codegen_crate<'tcx>(&self, tcx: TyCtxt<'tcx>, metadata: EncodedMetadata, need_metadata_module: bool) -> Box<dyn Any> {
|
||||
fn codegen_crate(
|
||||
&self,
|
||||
tcx: TyCtxt<'_>,
|
||||
metadata: EncodedMetadata,
|
||||
need_metadata_module: bool,
|
||||
) -> Box<dyn Any> {
|
||||
let target_cpu = target_cpu(tcx.sess);
|
||||
let res = codegen_crate(self.clone(), tcx, target_cpu.to_string(), metadata, need_metadata_module);
|
||||
let res = codegen_crate(
|
||||
self.clone(),
|
||||
tcx,
|
||||
target_cpu.to_string(),
|
||||
metadata,
|
||||
need_metadata_module,
|
||||
);
|
||||
|
||||
Box::new(res)
|
||||
}
|
||||
|
||||
fn join_codegen(&self, ongoing_codegen: Box<dyn Any>, sess: &Session, _outputs: &OutputFilenames) -> (CodegenResults, FxIndexMap<WorkProductId, WorkProduct>) {
|
||||
fn join_codegen(
|
||||
&self,
|
||||
ongoing_codegen: Box<dyn Any>,
|
||||
sess: &Session,
|
||||
_outputs: &OutputFilenames,
|
||||
) -> (CodegenResults, FxIndexMap<WorkProductId, WorkProduct>) {
|
||||
ongoing_codegen
|
||||
.downcast::<rustc_codegen_ssa::back::write::OngoingCodegen<GccCodegenBackend>>()
|
||||
.expect("Expected GccCodegenBackend's OngoingCodegen, found Box<Any>")
|
||||
.join(sess)
|
||||
}
|
||||
|
||||
fn link(&self, sess: &Session, codegen_results: CodegenResults, outputs: &OutputFilenames) -> Result<(), ErrorGuaranteed> {
|
||||
fn link(
|
||||
&self,
|
||||
sess: &Session,
|
||||
codegen_results: CodegenResults,
|
||||
outputs: &OutputFilenames,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
use rustc_codegen_ssa::back::link::link_binary;
|
||||
|
||||
link_binary(
|
||||
sess,
|
||||
&crate::archive::ArArchiveBuilderBuilder,
|
||||
&codegen_results,
|
||||
outputs,
|
||||
)
|
||||
link_binary(sess, &crate::archive::ArArchiveBuilderBuilder, &codegen_results, outputs)
|
||||
}
|
||||
|
||||
fn target_features(&self, sess: &Session, allow_unstable: bool) -> Vec<Symbol> {
|
||||
@ -245,11 +275,13 @@ fn new_context<'gcc, 'tcx>(tcx: TyCtxt<'tcx>) -> Context<'gcc> {
|
||||
if tcx.sess.target.arch == "x86" || tcx.sess.target.arch == "x86_64" {
|
||||
context.add_command_line_option("-masm=intel");
|
||||
}
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
{
|
||||
context.set_special_chars_allowed_in_func_names("$.*");
|
||||
let version = Version::get();
|
||||
let version = format!("{}.{}.{}", version.major, version.minor, version.patch);
|
||||
context.set_output_ident(&format!("rustc version {} with libgccjit {}",
|
||||
context.set_output_ident(&format!(
|
||||
"rustc version {} with libgccjit {}",
|
||||
rustc_interface::util::rustc_version_str().unwrap_or("unknown version"),
|
||||
version,
|
||||
));
|
||||
@ -260,26 +292,41 @@ fn new_context<'gcc, 'tcx>(tcx: TyCtxt<'tcx>) -> Context<'gcc> {
|
||||
}
|
||||
|
||||
impl ExtraBackendMethods for GccCodegenBackend {
|
||||
fn codegen_allocator<'tcx>(&self, tcx: TyCtxt<'tcx>, module_name: &str, kind: AllocatorKind, alloc_error_handler_kind: AllocatorKind) -> Self::Module {
|
||||
fn codegen_allocator(
|
||||
&self,
|
||||
tcx: TyCtxt<'_>,
|
||||
module_name: &str,
|
||||
kind: AllocatorKind,
|
||||
alloc_error_handler_kind: AllocatorKind,
|
||||
) -> Self::Module {
|
||||
let mut mods = GccContext {
|
||||
context: new_context(tcx),
|
||||
should_combine_object_files: false,
|
||||
temp_dir: None,
|
||||
};
|
||||
|
||||
unsafe { allocator::codegen(tcx, &mut mods, module_name, kind, alloc_error_handler_kind); }
|
||||
unsafe {
|
||||
allocator::codegen(tcx, &mut mods, module_name, kind, alloc_error_handler_kind);
|
||||
}
|
||||
mods
|
||||
}
|
||||
|
||||
fn compile_codegen_unit(&self, tcx: TyCtxt<'_>, cgu_name: Symbol) -> (ModuleCodegen<Self::Module>, u64) {
|
||||
fn compile_codegen_unit(
|
||||
&self,
|
||||
tcx: TyCtxt<'_>,
|
||||
cgu_name: Symbol,
|
||||
) -> (ModuleCodegen<Self::Module>, u64) {
|
||||
base::compile_codegen_unit(tcx, cgu_name, self.target_info.clone())
|
||||
}
|
||||
|
||||
fn target_machine_factory(&self, _sess: &Session, _opt_level: OptLevel, _features: &[String]) -> TargetMachineFactoryFn<Self> {
|
||||
fn target_machine_factory(
|
||||
&self,
|
||||
_sess: &Session,
|
||||
_opt_level: OptLevel,
|
||||
_features: &[String],
|
||||
) -> TargetMachineFactoryFn<Self> {
|
||||
// TODO(antoyo): set opt level.
|
||||
Arc::new(|_| {
|
||||
Ok(())
|
||||
})
|
||||
Arc::new(|_| Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
@ -310,11 +357,19 @@ impl WriteBackendMethods for GccCodegenBackend {
|
||||
type ThinData = ();
|
||||
type ThinBuffer = ThinBuffer;
|
||||
|
||||
fn run_fat_lto(cgcx: &CodegenContext<Self>, modules: Vec<FatLtoInput<Self>>, cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>) -> Result<LtoModuleCodegen<Self>, FatalError> {
|
||||
fn run_fat_lto(
|
||||
cgcx: &CodegenContext<Self>,
|
||||
modules: Vec<FatLtoInput<Self>>,
|
||||
cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
|
||||
) -> Result<LtoModuleCodegen<Self>, FatalError> {
|
||||
back::lto::run_fat(cgcx, modules, cached_modules)
|
||||
}
|
||||
|
||||
fn run_thin_lto(_cgcx: &CodegenContext<Self>, _modules: Vec<(String, Self::ThinBuffer)>, _cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>) -> Result<(Vec<LtoModuleCodegen<Self>>, Vec<WorkProduct>), FatalError> {
|
||||
fn run_thin_lto(
|
||||
_cgcx: &CodegenContext<Self>,
|
||||
_modules: Vec<(String, Self::ThinBuffer)>,
|
||||
_cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
|
||||
) -> Result<(Vec<LtoModuleCodegen<Self>>, Vec<WorkProduct>), FatalError> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
@ -326,21 +381,37 @@ impl WriteBackendMethods for GccCodegenBackend {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
unsafe fn optimize(_cgcx: &CodegenContext<Self>, _dcx: &DiagCtxt, module: &ModuleCodegen<Self::Module>, config: &ModuleConfig) -> Result<(), FatalError> {
|
||||
unsafe fn optimize(
|
||||
_cgcx: &CodegenContext<Self>,
|
||||
_dcx: &DiagCtxt,
|
||||
module: &ModuleCodegen<Self::Module>,
|
||||
config: &ModuleConfig,
|
||||
) -> Result<(), FatalError> {
|
||||
module.module_llvm.context.set_optimization_level(to_gcc_opt_level(config.opt_level));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn optimize_fat(_cgcx: &CodegenContext<Self>, _module: &mut ModuleCodegen<Self::Module>) -> Result<(), FatalError> {
|
||||
fn optimize_fat(
|
||||
_cgcx: &CodegenContext<Self>,
|
||||
_module: &mut ModuleCodegen<Self::Module>,
|
||||
) -> Result<(), FatalError> {
|
||||
// TODO(antoyo)
|
||||
Ok(())
|
||||
}
|
||||
|
||||
unsafe fn optimize_thin(_cgcx: &CodegenContext<Self>, _thin: ThinModule<Self>) -> Result<ModuleCodegen<Self::Module>, FatalError> {
|
||||
unsafe fn optimize_thin(
|
||||
_cgcx: &CodegenContext<Self>,
|
||||
_thin: ThinModule<Self>,
|
||||
) -> Result<ModuleCodegen<Self::Module>, FatalError> {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
unsafe fn codegen(cgcx: &CodegenContext<Self>, dcx: &DiagCtxt, module: ModuleCodegen<Self::Module>, config: &ModuleConfig) -> Result<CompiledModule, FatalError> {
|
||||
unsafe fn codegen(
|
||||
cgcx: &CodegenContext<Self>,
|
||||
dcx: &DiagCtxt,
|
||||
module: ModuleCodegen<Self::Module>,
|
||||
config: &ModuleConfig,
|
||||
) -> Result<CompiledModule, FatalError> {
|
||||
back::write::codegen(cgcx, dcx, module, config)
|
||||
}
|
||||
|
||||
@ -352,7 +423,11 @@ impl WriteBackendMethods for GccCodegenBackend {
|
||||
unimplemented!();
|
||||
}
|
||||
|
||||
fn run_link(cgcx: &CodegenContext<Self>, dcx: &DiagCtxt, modules: Vec<ModuleCodegen<Self::Module>>) -> Result<ModuleCodegen<Self::Module>, FatalError> {
|
||||
fn run_link(
|
||||
cgcx: &CodegenContext<Self>,
|
||||
dcx: &DiagCtxt,
|
||||
modules: Vec<ModuleCodegen<Self::Module>>,
|
||||
) -> Result<ModuleCodegen<Self::Module>, FatalError> {
|
||||
back::write::link(cgcx, dcx, modules)
|
||||
}
|
||||
}
|
||||
@ -360,47 +435,48 @@ impl WriteBackendMethods for GccCodegenBackend {
|
||||
/// This is the entrypoint for a hot plugged rustc_codegen_gccjit
|
||||
#[no_mangle]
|
||||
pub fn __rustc_codegen_backend() -> Box<dyn CodegenBackend> {
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
let info = {
|
||||
// Check whether the target supports 128-bit integers.
|
||||
let context = Context::default();
|
||||
Arc::new(Mutex::new(IntoDynSyncSend(context.get_target_info())))
|
||||
};
|
||||
#[cfg(not(feature="master"))]
|
||||
#[cfg(not(feature = "master"))]
|
||||
let info = Arc::new(Mutex::new(IntoDynSyncSend(TargetInfo {
|
||||
supports_128bit_integers: AtomicBool::new(false),
|
||||
})));
|
||||
|
||||
Box::new(GccCodegenBackend {
|
||||
target_info: LockedTargetInfo { info },
|
||||
})
|
||||
Box::new(GccCodegenBackend { target_info: LockedTargetInfo { info } })
|
||||
}
|
||||
|
||||
fn to_gcc_opt_level(optlevel: Option<OptLevel>) -> OptimizationLevel {
|
||||
match optlevel {
|
||||
None => OptimizationLevel::None,
|
||||
Some(level) => {
|
||||
match level {
|
||||
Some(level) => match level {
|
||||
OptLevel::No => OptimizationLevel::None,
|
||||
OptLevel::Less => OptimizationLevel::Limited,
|
||||
OptLevel::Default => OptimizationLevel::Standard,
|
||||
OptLevel::Aggressive => OptimizationLevel::Aggressive,
|
||||
OptLevel::Size | OptLevel::SizeMin => OptimizationLevel::Limited,
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn target_features(sess: &Session, allow_unstable: bool, target_info: &LockedTargetInfo) -> Vec<Symbol> {
|
||||
sess
|
||||
.target
|
||||
pub fn target_features(
|
||||
sess: &Session,
|
||||
allow_unstable: bool,
|
||||
target_info: &LockedTargetInfo,
|
||||
) -> Vec<Symbol> {
|
||||
sess.target
|
||||
.supported_target_features()
|
||||
.iter()
|
||||
.filter_map(
|
||||
|&(feature, gate)| {
|
||||
if sess.is_nightly_build() || allow_unstable || gate.is_stable() { Some(feature) } else { None }
|
||||
},
|
||||
)
|
||||
.filter_map(|&(feature, gate)| {
|
||||
if sess.is_nightly_build() || allow_unstable || gate.is_stable() {
|
||||
Some(feature)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.filter(|_feature| {
|
||||
target_info.cpu_supports(_feature)
|
||||
/*
|
||||
@ -410,6 +486,6 @@ pub fn target_features(sess: &Session, allow_unstable: bool, target_info: &Locke
|
||||
sha, sse, sse2, sse3, sse4.1, sse4.2, sse4a, ssse3, tbm, vaes, vpclmulqdq, xsave, xsavec, xsaveopt, xsaves
|
||||
*/
|
||||
})
|
||||
.map(|feature| Symbol::intern(feature))
|
||||
.map(Symbol::intern)
|
||||
.collect()
|
||||
}
|
||||
|
@ -1,11 +1,11 @@
|
||||
#[cfg(feature="master")]
|
||||
use gccjit::{VarAttribute, FnAttribute};
|
||||
#[cfg(feature = "master")]
|
||||
use gccjit::{FnAttribute, VarAttribute};
|
||||
use rustc_codegen_ssa::traits::PreDefineMethods;
|
||||
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
|
||||
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
||||
use rustc_middle::mir::mono::{Linkage, Visibility};
|
||||
use rustc_middle::ty::{self, Instance, TypeVisitableExt};
|
||||
use rustc_middle::ty::layout::{FnAbiOf, LayoutOf};
|
||||
use rustc_middle::ty::{self, Instance, TypeVisitableExt};
|
||||
|
||||
use crate::attributes;
|
||||
use crate::base;
|
||||
@ -13,8 +13,14 @@ use crate::context::CodegenCx;
|
||||
use crate::type_of::LayoutGccExt;
|
||||
|
||||
impl<'gcc, 'tcx> PreDefineMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
#[cfg_attr(not(feature="master"), allow(unused_variables))]
|
||||
fn predefine_static(&self, def_id: DefId, _linkage: Linkage, visibility: Visibility, symbol_name: &str) {
|
||||
#[cfg_attr(not(feature = "master"), allow(unused_variables))]
|
||||
fn predefine_static(
|
||||
&self,
|
||||
def_id: DefId,
|
||||
_linkage: Linkage,
|
||||
visibility: Visibility,
|
||||
symbol_name: &str,
|
||||
) {
|
||||
let attrs = self.tcx.codegen_fn_attrs(def_id);
|
||||
let instance = Instance::mono(self.tcx, def_id);
|
||||
let ty = instance.ty(self.tcx, ty::ParamEnv::reveal_all());
|
||||
@ -22,20 +28,26 @@ impl<'gcc, 'tcx> PreDefineMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
|
||||
let is_tls = attrs.flags.contains(CodegenFnAttrFlags::THREAD_LOCAL);
|
||||
let global = self.define_global(symbol_name, gcc_type, is_tls, attrs.link_section);
|
||||
#[cfg(feature="master")]
|
||||
global.add_attribute(VarAttribute::Visibility(base::visibility_to_gcc(visibility)));
|
||||
#[cfg(feature = "master")]
|
||||
global.add_string_attribute(VarAttribute::Visibility(base::visibility_to_gcc(visibility)));
|
||||
|
||||
// TODO(antoyo): set linkage.
|
||||
self.instances.borrow_mut().insert(instance, global);
|
||||
}
|
||||
|
||||
#[cfg_attr(not(feature="master"), allow(unused_variables))]
|
||||
fn predefine_fn(&self, instance: Instance<'tcx>, linkage: Linkage, visibility: Visibility, symbol_name: &str) {
|
||||
#[cfg_attr(not(feature = "master"), allow(unused_variables))]
|
||||
fn predefine_fn(
|
||||
&self,
|
||||
instance: Instance<'tcx>,
|
||||
linkage: Linkage,
|
||||
visibility: Visibility,
|
||||
symbol_name: &str,
|
||||
) {
|
||||
assert!(!instance.args.has_infer());
|
||||
|
||||
let fn_abi = self.fn_abi_of_instance(instance, ty::List::empty());
|
||||
self.linkage.set(base::linkage_to_gcc(linkage));
|
||||
let decl = self.declare_fn(symbol_name, &fn_abi);
|
||||
let decl = self.declare_fn(symbol_name, fn_abi);
|
||||
//let attrs = self.tcx.codegen_fn_attrs(instance.def_id());
|
||||
|
||||
attributes::from_fn_attrs(self, decl, instance);
|
||||
@ -48,11 +60,10 @@ impl<'gcc, 'tcx> PreDefineMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
&& linkage != Linkage::Private
|
||||
&& self.tcx.is_compiler_builtins(LOCAL_CRATE)
|
||||
{
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
decl.add_attribute(FnAttribute::Visibility(gccjit::Visibility::Hidden));
|
||||
}
|
||||
else {
|
||||
#[cfg(feature="master")]
|
||||
} else {
|
||||
#[cfg(feature = "master")]
|
||||
decl.add_attribute(FnAttribute::Visibility(base::visibility_to_gcc(visibility)));
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
use gccjit::{RValue, Struct, Type};
|
||||
use rustc_codegen_ssa::traits::{BaseTypeMethods, DerivedTypeMethods, TypeMembershipMethods};
|
||||
use rustc_codegen_ssa::common::TypeKind;
|
||||
use rustc_middle::{bug, ty};
|
||||
use rustc_codegen_ssa::traits::{BaseTypeMethods, DerivedTypeMethods, TypeMembershipMethods};
|
||||
use rustc_middle::ty::layout::TyAndLayout;
|
||||
use rustc_middle::{bug, ty};
|
||||
use rustc_target::abi::{AddressSpace, Align, Integer, Size};
|
||||
|
||||
use crate::common::TypeReflection;
|
||||
@ -136,6 +136,10 @@ impl<'gcc, 'tcx> BaseTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
unimplemented!("f16_f128")
|
||||
}
|
||||
|
||||
fn type_f128(&self) -> Type<'gcc> {
|
||||
unimplemented!("f16_f128")
|
||||
}
|
||||
|
||||
fn type_func(&self, params: &[Type<'gcc>], return_type: Type<'gcc>) -> Type<'gcc> {
|
||||
self.context.new_function_pointer_type(None, return_type, params, false)
|
||||
}
|
||||
@ -143,14 +147,18 @@ impl<'gcc, 'tcx> BaseTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
fn type_struct(&self, fields: &[Type<'gcc>], packed: bool) -> Type<'gcc> {
|
||||
let types = fields.to_vec();
|
||||
if let Some(typ) = self.struct_types.borrow().get(fields) {
|
||||
return typ.clone();
|
||||
return *typ;
|
||||
}
|
||||
let fields: Vec<_> = fields.iter().enumerate()
|
||||
.map(|(index, field)| self.context.new_field(None, *field, &format!("field{}_TODO", index)))
|
||||
let fields: Vec<_> = fields
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, field)| {
|
||||
self.context.new_field(None, *field, format!("field{}_TODO", index))
|
||||
})
|
||||
.collect();
|
||||
let typ = self.context.new_struct_type(None, "struct", &fields).as_type();
|
||||
if packed {
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
typ.set_packed();
|
||||
}
|
||||
self.struct_types.borrow_mut().insert(types, typ);
|
||||
@ -160,17 +168,13 @@ impl<'gcc, 'tcx> BaseTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
fn type_kind(&self, typ: Type<'gcc>) -> TypeKind {
|
||||
if self.is_int_type_or_bool(typ) {
|
||||
TypeKind::Integer
|
||||
}
|
||||
else if typ.is_compatible_with(self.float_type) {
|
||||
} else if typ.is_compatible_with(self.float_type) {
|
||||
TypeKind::Float
|
||||
}
|
||||
else if typ.is_compatible_with(self.double_type) {
|
||||
} else if typ.is_compatible_with(self.double_type) {
|
||||
TypeKind::Double
|
||||
}
|
||||
else if typ.is_vector() {
|
||||
} else if typ.is_vector() {
|
||||
TypeKind::Vector
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
// TODO(antoyo): support other types.
|
||||
TypeKind::Void
|
||||
}
|
||||
@ -187,14 +191,11 @@ impl<'gcc, 'tcx> BaseTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
fn element_type(&self, ty: Type<'gcc>) -> Type<'gcc> {
|
||||
if let Some(typ) = ty.dyncast_array() {
|
||||
typ
|
||||
}
|
||||
else if let Some(vector_type) = ty.dyncast_vector() {
|
||||
} else if let Some(vector_type) = ty.dyncast_vector() {
|
||||
vector_type.get_element_type()
|
||||
}
|
||||
else if let Some(typ) = ty.get_pointee() {
|
||||
} else if let Some(typ) = ty.get_pointee() {
|
||||
typ
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
@ -208,11 +209,9 @@ impl<'gcc, 'tcx> BaseTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
let f64 = self.context.new_type::<f64>();
|
||||
if typ.is_compatible_with(f32) {
|
||||
32
|
||||
}
|
||||
else if typ.is_compatible_with(f64) {
|
||||
} else if typ.is_compatible_with(f64) {
|
||||
64
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
panic!("Cannot get width of float type {:?}", typ);
|
||||
}
|
||||
// TODO(antoyo): support other sizes.
|
||||
@ -226,9 +225,9 @@ impl<'gcc, 'tcx> BaseTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
value.get_type()
|
||||
}
|
||||
|
||||
#[cfg_attr(feature="master", allow(unused_mut))]
|
||||
#[cfg_attr(feature = "master", allow(unused_mut))]
|
||||
fn type_array(&self, ty: Type<'gcc>, mut len: u64) -> Type<'gcc> {
|
||||
#[cfg(not(feature="master"))]
|
||||
#[cfg(not(feature = "master"))]
|
||||
if let Some(struct_type) = ty.is_struct() {
|
||||
if struct_type.get_field_count() == 0 {
|
||||
// NOTE: since gccjit only supports i32 for the array size and libcore's tests uses a
|
||||
@ -252,12 +251,14 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn set_struct_body(&self, typ: Struct<'gcc>, fields: &[Type<'gcc>], packed: bool) {
|
||||
let fields: Vec<_> = fields.iter().enumerate()
|
||||
.map(|(index, field)| self.context.new_field(None, *field, &format!("field_{}", index)))
|
||||
let fields: Vec<_> = fields
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, field)| self.context.new_field(None, *field, format!("field_{}", index)))
|
||||
.collect();
|
||||
typ.set_fields(None, &fields);
|
||||
if packed {
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
typ.as_type().set_packed();
|
||||
}
|
||||
}
|
||||
@ -267,7 +268,10 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn struct_fields<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout<'tcx>) -> (Vec<Type<'gcc>>, bool) {
|
||||
pub fn struct_fields<'gcc, 'tcx>(
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
) -> (Vec<Type<'gcc>>, bool) {
|
||||
let field_count = layout.fields.count();
|
||||
|
||||
let mut packed = false;
|
||||
@ -275,7 +279,7 @@ pub fn struct_fields<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout
|
||||
let mut prev_effective_align = layout.align.abi;
|
||||
let mut result: Vec<_> = Vec::with_capacity(1 + field_count * 2);
|
||||
for i in layout.fields.index_by_increasing_offset() {
|
||||
let target_offset = layout.fields.offset(i as usize);
|
||||
let target_offset = layout.fields.offset(i);
|
||||
let field = layout.field(cx, i);
|
||||
let effective_field_align =
|
||||
layout.align.abi.min(field.align.abi).restrict_for_offset(target_offset);
|
||||
@ -305,5 +309,4 @@ pub fn struct_fields<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout
|
||||
(result, packed)
|
||||
}
|
||||
|
||||
impl<'gcc, 'tcx> TypeMembershipMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
impl<'gcc, 'tcx> TypeMembershipMethods<'tcx> for CodegenCx<'gcc, 'tcx> {}
|
||||
|
@ -1,13 +1,16 @@
|
||||
use std::fmt::Write;
|
||||
|
||||
use gccjit::{Struct, Type};
|
||||
use crate::rustc_codegen_ssa::traits::{BaseTypeMethods, DerivedTypeMethods, LayoutTypeMethods};
|
||||
use gccjit::{Struct, Type};
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::ty::{self, Ty, TypeVisitableExt};
|
||||
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
|
||||
use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||
use rustc_target::abi::{self, Abi, Align, F16, F128, F32, F64, FieldsShape, Int, Integer, Pointer, PointeeInfo, Size, TyAbiInterface, Variants};
|
||||
use rustc_middle::ty::{self, Ty, TypeVisitableExt};
|
||||
use rustc_target::abi::call::{CastTarget, FnAbi, Reg};
|
||||
use rustc_target::abi::{
|
||||
self, Abi, Align, FieldsShape, Int, Integer, PointeeInfo, Pointer, Size, TyAbiInterface,
|
||||
Variants, F128, F16, F32, F64,
|
||||
};
|
||||
|
||||
use crate::abi::{FnAbiGcc, FnAbiGccExt, GccType};
|
||||
use crate::context::CodegenCx;
|
||||
@ -25,7 +28,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
pub fn type_int_from_ty(&self, t: ty::IntTy) -> Type<'gcc> {
|
||||
match t {
|
||||
ty::IntTy::Isize => self.type_isize(),
|
||||
@ -37,7 +40,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
pub fn type_uint_from_ty(&self, t: ty::UintTy) -> Type<'gcc> {
|
||||
match t {
|
||||
ty::UintTy::Usize => self.type_isize(),
|
||||
@ -56,7 +59,11 @@ impl<'a, 'tcx> CodegenCx<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn uncached_gcc_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout<'tcx>, defer: &mut Option<(Struct<'gcc>, TyAndLayout<'tcx>)>) -> Type<'gcc> {
|
||||
fn uncached_gcc_type<'gcc, 'tcx>(
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
defer: &mut Option<(Struct<'gcc>, TyAndLayout<'tcx>)>,
|
||||
) -> Type<'gcc> {
|
||||
match layout.abi {
|
||||
Abi::Scalar(_) => bug!("handled elsewhere"),
|
||||
Abi::Vector { ref element, count } => {
|
||||
@ -70,7 +77,7 @@ fn uncached_gcc_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout
|
||||
element
|
||||
};
|
||||
return cx.context.new_vector_type(element, count);
|
||||
},
|
||||
}
|
||||
Abi::ScalarPair(..) => {
|
||||
return cx.type_struct(
|
||||
&[
|
||||
@ -87,7 +94,12 @@ fn uncached_gcc_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout
|
||||
// FIXME(eddyb) producing readable type names for trait objects can result
|
||||
// in problematically distinct types due to HRTB and subtyping (see #47638).
|
||||
// ty::Dynamic(..) |
|
||||
ty::Adt(..) | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Foreign(..) | ty::Coroutine(..) | ty::Str
|
||||
ty::Adt(..)
|
||||
| ty::Closure(..)
|
||||
| ty::CoroutineClosure(..)
|
||||
| ty::Foreign(..)
|
||||
| ty::Coroutine(..)
|
||||
| ty::Str
|
||||
if !cx.sess().fewer_names() =>
|
||||
{
|
||||
let mut name = with_no_trimmed_paths!(layout.ty.to_string());
|
||||
@ -125,21 +137,20 @@ fn uncached_gcc_type<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, layout: TyAndLayout
|
||||
let gcc_type = cx.type_named_struct(name);
|
||||
cx.set_struct_body(gcc_type, &[fill], packed);
|
||||
gcc_type.as_type()
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
FieldsShape::Array { count, .. } => cx.type_array(layout.field(cx, 0).gcc_type(cx), count),
|
||||
FieldsShape::Arbitrary { .. } =>
|
||||
match name {
|
||||
FieldsShape::Arbitrary { .. } => match name {
|
||||
None => {
|
||||
let (gcc_fields, packed) = struct_fields(cx, layout);
|
||||
cx.type_struct(&gcc_fields, packed)
|
||||
},
|
||||
}
|
||||
Some(ref name) => {
|
||||
let gcc_type = cx.type_named_struct(name);
|
||||
*defer = Some((gcc_type, layout));
|
||||
gcc_type.as_type()
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -149,9 +160,22 @@ pub trait LayoutGccExt<'tcx> {
|
||||
fn is_gcc_scalar_pair(&self) -> bool;
|
||||
fn gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>;
|
||||
fn immediate_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>) -> Type<'gcc>;
|
||||
fn scalar_gcc_type_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, scalar: &abi::Scalar, offset: Size) -> Type<'gcc>;
|
||||
fn scalar_pair_element_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, index: usize) -> Type<'gcc>;
|
||||
fn pointee_info_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, offset: Size) -> Option<PointeeInfo>;
|
||||
fn scalar_gcc_type_at<'gcc>(
|
||||
&self,
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
scalar: &abi::Scalar,
|
||||
offset: Size,
|
||||
) -> Type<'gcc>;
|
||||
fn scalar_pair_element_gcc_type<'gcc>(
|
||||
&self,
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
index: usize,
|
||||
) -> Type<'gcc>;
|
||||
fn pointee_info_at<'gcc>(
|
||||
&self,
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
offset: Size,
|
||||
) -> Option<PointeeInfo>;
|
||||
}
|
||||
|
||||
impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
|
||||
@ -191,12 +215,13 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
|
||||
if let Some(&ty) = cx.scalar_types.borrow().get(&self.ty) {
|
||||
return ty;
|
||||
}
|
||||
let ty =
|
||||
match *self.ty.kind() {
|
||||
let ty = match *self.ty.kind() {
|
||||
// NOTE: we cannot remove this match like in the LLVM codegen because the call
|
||||
// to fn_ptr_backend_type handle the on-stack attribute.
|
||||
// TODO(antoyo): find a less hackish way to hande the on-stack attribute.
|
||||
ty::FnPtr(sig) => cx.fn_ptr_backend_type(&cx.fn_abi_of_fn_ptr(sig, ty::List::empty())),
|
||||
ty::FnPtr(sig) => {
|
||||
cx.fn_ptr_backend_type(&cx.fn_abi_of_fn_ptr(sig, ty::List::empty()))
|
||||
}
|
||||
_ => self.scalar_gcc_type_at(cx, scalar, Size::ZERO),
|
||||
};
|
||||
cx.scalar_types.borrow_mut().insert(self.ty, ty);
|
||||
@ -204,8 +229,7 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
|
||||
}
|
||||
|
||||
// Check the cache.
|
||||
let variant_index =
|
||||
match self.variants {
|
||||
let variant_index = match self.variants {
|
||||
Variants::Single { index } => Some(index),
|
||||
_ => None,
|
||||
};
|
||||
@ -221,15 +245,13 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
|
||||
let normal_ty = cx.tcx.erase_regions(self.ty);
|
||||
|
||||
let mut defer = None;
|
||||
let ty =
|
||||
if self.ty != normal_ty {
|
||||
let ty = if self.ty != normal_ty {
|
||||
let mut layout = cx.layout_of(normal_ty);
|
||||
if let Some(v) = variant_index {
|
||||
layout = layout.for_variant(cx, v);
|
||||
}
|
||||
layout.gcc_type(cx)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
uncached_gcc_type(cx, *self, &mut defer)
|
||||
};
|
||||
|
||||
@ -252,7 +274,12 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
|
||||
self.gcc_type(cx)
|
||||
}
|
||||
|
||||
fn scalar_gcc_type_at<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, scalar: &abi::Scalar, offset: Size) -> Type<'gcc> {
|
||||
fn scalar_gcc_type_at<'gcc>(
|
||||
&self,
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
scalar: &abi::Scalar,
|
||||
offset: Size,
|
||||
) -> Type<'gcc> {
|
||||
match scalar.primitive() {
|
||||
Int(i, true) => cx.type_from_integer(i),
|
||||
Int(i, false) => cx.type_from_unsigned_integer(i),
|
||||
@ -262,11 +289,9 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
|
||||
F128 => cx.type_f128(),
|
||||
Pointer(address_space) => {
|
||||
// If we know the alignment, pick something better than i8.
|
||||
let pointee =
|
||||
if let Some(pointee) = self.pointee_info_at(cx, offset) {
|
||||
let pointee = if let Some(pointee) = self.pointee_info_at(cx, offset) {
|
||||
cx.type_pointee_for_align(pointee.align)
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
cx.type_i8()
|
||||
};
|
||||
cx.type_ptr_to_ext(pointee, address_space)
|
||||
@ -274,7 +299,11 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn scalar_pair_element_gcc_type<'gcc>(&self, cx: &CodegenCx<'gcc, 'tcx>, index: usize) -> Type<'gcc> {
|
||||
fn scalar_pair_element_gcc_type<'gcc>(
|
||||
&self,
|
||||
cx: &CodegenCx<'gcc, 'tcx>,
|
||||
index: usize,
|
||||
) -> Type<'gcc> {
|
||||
// This must produce the same result for `repr(transparent)` wrappers as for the inner type!
|
||||
// In other words, this should generally not look at the type at all, but only at the
|
||||
// layout.
|
||||
@ -295,13 +324,7 @@ impl<'tcx> LayoutGccExt<'tcx> for TyAndLayout<'tcx> {
|
||||
return cx.type_i1();
|
||||
}
|
||||
|
||||
let offset =
|
||||
if index == 0 {
|
||||
Size::ZERO
|
||||
}
|
||||
else {
|
||||
a.size(cx).align_to(b.align(cx).abi)
|
||||
};
|
||||
let offset = if index == 0 { Size::ZERO } else { a.size(cx).align_to(b.align(cx).abi) };
|
||||
self.scalar_gcc_type_at(cx, scalar, offset)
|
||||
}
|
||||
|
||||
@ -334,7 +357,12 @@ impl<'gcc, 'tcx> LayoutTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
layout.is_gcc_scalar_pair()
|
||||
}
|
||||
|
||||
fn scalar_pair_element_backend_type(&self, layout: TyAndLayout<'tcx>, index: usize, _immediate: bool) -> Type<'gcc> {
|
||||
fn scalar_pair_element_backend_type(
|
||||
&self,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
index: usize,
|
||||
_immediate: bool,
|
||||
) -> Type<'gcc> {
|
||||
layout.scalar_pair_element_gcc_type(self, index)
|
||||
}
|
||||
|
||||
@ -352,12 +380,7 @@ impl<'gcc, 'tcx> LayoutTypeMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
|
||||
fn fn_decl_backend_type(&self, fn_abi: &FnAbi<'tcx, Ty<'tcx>>) -> Type<'gcc> {
|
||||
// FIXME(antoyo): Should we do something with `FnAbiGcc::fn_attributes`?
|
||||
let FnAbiGcc {
|
||||
return_type,
|
||||
arguments_type,
|
||||
is_c_variadic,
|
||||
..
|
||||
} = fn_abi.gcc_type(self);
|
||||
let FnAbiGcc { return_type, arguments_type, is_c_variadic, .. } = fn_abi.gcc_type(self);
|
||||
self.context.new_function_pointer_type(None, return_type, &arguments_type, is_c_variadic)
|
||||
}
|
||||
}
|
||||
|
@ -1,479 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
# TODO(antoyo): rewrite to cargo-make (or just) or something like that to only rebuild the sysroot when needed?
|
||||
|
||||
set -e
|
||||
#set -x
|
||||
|
||||
flags=
|
||||
gcc_master_branch=1
|
||||
channel="debug"
|
||||
funcs=()
|
||||
build_only=0
|
||||
nb_parts=0
|
||||
current_part=0
|
||||
use_system_gcc=0
|
||||
use_backend=0
|
||||
cargo_target_dir=""
|
||||
|
||||
export CHANNEL='debug'
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--release)
|
||||
codegen_channel=release
|
||||
channel="release"
|
||||
export CHANNEL='release'
|
||||
shift
|
||||
;;
|
||||
--release-sysroot)
|
||||
sysroot_channel="--release"
|
||||
shift
|
||||
;;
|
||||
--no-default-features)
|
||||
gcc_master_branch=0
|
||||
flags="$flags --no-default-features"
|
||||
shift
|
||||
;;
|
||||
--features)
|
||||
shift
|
||||
flags="$flags --features $1"
|
||||
shift
|
||||
;;
|
||||
"--test-rustc")
|
||||
funcs+=(test_rustc)
|
||||
shift
|
||||
;;
|
||||
"--test-successful-rustc")
|
||||
funcs+=(test_successful_rustc)
|
||||
shift
|
||||
;;
|
||||
"--test-failing-rustc")
|
||||
funcs+=(test_failing_rustc)
|
||||
shift
|
||||
;;
|
||||
|
||||
"--test-libcore")
|
||||
funcs+=(test_libcore)
|
||||
shift
|
||||
;;
|
||||
|
||||
"--clean-ui-tests")
|
||||
funcs+=(clean_ui_tests)
|
||||
shift
|
||||
;;
|
||||
"--clean")
|
||||
funcs+=(clean)
|
||||
shift
|
||||
;;
|
||||
|
||||
"--std-tests")
|
||||
funcs+=(std_tests)
|
||||
shift
|
||||
;;
|
||||
|
||||
"--asm-tests")
|
||||
funcs+=(asm_tests)
|
||||
shift
|
||||
;;
|
||||
|
||||
"--extended-tests")
|
||||
funcs+=(extended_sysroot_tests)
|
||||
shift
|
||||
;;
|
||||
"--extended-rand-tests")
|
||||
funcs+=(extended_rand_tests)
|
||||
shift
|
||||
;;
|
||||
"--extended-regex-example-tests")
|
||||
funcs+=(extended_regex_example_tests)
|
||||
shift
|
||||
;;
|
||||
"--extended-regex-tests")
|
||||
funcs+=(extended_regex_tests)
|
||||
shift
|
||||
;;
|
||||
|
||||
"--mini-tests")
|
||||
funcs+=(mini_tests)
|
||||
shift
|
||||
;;
|
||||
|
||||
"--build-sysroot")
|
||||
funcs+=(build_sysroot)
|
||||
shift
|
||||
;;
|
||||
"--build")
|
||||
build_only=1
|
||||
shift
|
||||
;;
|
||||
"--use-system-gcc")
|
||||
use_system_gcc=1
|
||||
shift
|
||||
;;
|
||||
"--use-backend")
|
||||
use_backend=1
|
||||
shift
|
||||
export BUILTIN_BACKEND=$1
|
||||
shift
|
||||
;;
|
||||
"--out-dir")
|
||||
shift
|
||||
export CARGO_TARGET_DIR=$1
|
||||
cargo_target_dir=$1
|
||||
shift
|
||||
;;
|
||||
"--nb-parts")
|
||||
shift
|
||||
nb_parts=$1
|
||||
shift
|
||||
;;
|
||||
"--current-part")
|
||||
shift
|
||||
current_part=$1
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
echo "Unknown option $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [ -f ./gcc_path ]; then
|
||||
export GCC_PATH=$(cat gcc_path)
|
||||
elif (( $use_system_gcc == 1 )); then
|
||||
echo 'Using system GCC'
|
||||
else
|
||||
echo 'Please put the path to your custom build of libgccjit in the file `gcc_path`, see Readme.md for details'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
export LD_LIBRARY_PATH="$GCC_PATH"
|
||||
export LIBRARY_PATH="$GCC_PATH"
|
||||
|
||||
if [[ $use_backend == 0 ]]; then
|
||||
if [[ $channel == "release" ]]; then
|
||||
CARGO_INCREMENTAL=1 cargo rustc --release $flags
|
||||
else
|
||||
echo $LD_LIBRARY_PATH
|
||||
cargo rustc $flags
|
||||
fi
|
||||
fi
|
||||
|
||||
if (( $build_only == 1 )); then
|
||||
echo "Since it's 'build-only', exiting..."
|
||||
exit
|
||||
fi
|
||||
|
||||
source config.sh
|
||||
|
||||
function clean() {
|
||||
rm -r $cargo_target_dir || true
|
||||
mkdir -p $cargo_target_dir/gccjit
|
||||
}
|
||||
|
||||
function mini_tests() {
|
||||
echo "[BUILD] mini_core"
|
||||
crate_types="lib,dylib"
|
||||
|
||||
if [[ "$HOST_TRIPLE" != "$TARGET_TRIPLE" ]]; then
|
||||
crate_types="lib"
|
||||
fi
|
||||
|
||||
$RUST_CMD example/mini_core.rs --crate-name mini_core --crate-type $crate_types --target $TARGET_TRIPLE
|
||||
|
||||
echo "[BUILD] example"
|
||||
$RUST_CMD example/example.rs --crate-type lib --target $TARGET_TRIPLE
|
||||
|
||||
echo "[AOT] mini_core_hello_world"
|
||||
$RUST_CMD example/mini_core_hello_world.rs --crate-name mini_core_hello_world --crate-type bin -g --target $TARGET_TRIPLE
|
||||
$RUN_WRAPPER $cargo_target_dir/mini_core_hello_world abc bcd
|
||||
}
|
||||
|
||||
function build_sysroot() {
|
||||
echo "[BUILD] sysroot"
|
||||
time ./build_sysroot/build_sysroot.sh $sysroot_channel
|
||||
}
|
||||
|
||||
# TODO(GuillaumeGomez): when rewriting in Rust, refactor with the code in tests/lang_tests_common.rs if possible.
|
||||
function run_in_vm() {
|
||||
vm_parent_dir=${CG_GCC_VM_DIR:-$(pwd)}
|
||||
vm_dir=vm
|
||||
exe=$1
|
||||
exe_filename=$(basename $exe)
|
||||
vm_home_dir=$vm_parent_dir/$vm_dir/home
|
||||
vm_exe_path=$vm_home_dir/$exe_filename
|
||||
inside_vm_exe_path=/home/$exe_filename
|
||||
sudo cp $exe $vm_exe_path
|
||||
|
||||
shift
|
||||
pushd $vm_parent_dir
|
||||
sudo chroot $vm_dir qemu-m68k-static $inside_vm_exe_path $@
|
||||
popd
|
||||
}
|
||||
|
||||
function std_tests() {
|
||||
echo "[AOT] arbitrary_self_types_pointers_and_wrappers"
|
||||
$RUST_CMD example/arbitrary_self_types_pointers_and_wrappers.rs --crate-name arbitrary_self_types_pointers_and_wrappers --crate-type bin --target $TARGET_TRIPLE
|
||||
$RUN_WRAPPER $cargo_target_dir/arbitrary_self_types_pointers_and_wrappers
|
||||
|
||||
echo "[AOT] alloc_system"
|
||||
$RUST_CMD example/alloc_system.rs --crate-type lib --target "$TARGET_TRIPLE"
|
||||
|
||||
# FIXME: doesn't work on m68k.
|
||||
if [[ "$HOST_TRIPLE" == "$TARGET_TRIPLE" ]]; then
|
||||
echo "[AOT] alloc_example"
|
||||
$RUST_CMD example/alloc_example.rs --crate-type bin --target $TARGET_TRIPLE
|
||||
$RUN_WRAPPER $cargo_target_dir/alloc_example
|
||||
fi
|
||||
|
||||
echo "[AOT] dst_field_align"
|
||||
# FIXME(antoyo): Re-add -Zmir-opt-level=2 once rust-lang/rust#67529 is fixed.
|
||||
$RUST_CMD example/dst-field-align.rs --crate-name dst_field_align --crate-type bin --target $TARGET_TRIPLE
|
||||
$RUN_WRAPPER $cargo_target_dir/dst_field_align || (echo $?; false)
|
||||
|
||||
echo "[AOT] std_example"
|
||||
std_flags="--cfg feature=\"master\""
|
||||
if (( $gcc_master_branch == 0 )); then
|
||||
std_flags=""
|
||||
fi
|
||||
$RUST_CMD example/std_example.rs --crate-type bin --target $TARGET_TRIPLE $std_flags
|
||||
$RUN_WRAPPER $cargo_target_dir/std_example --target $TARGET_TRIPLE
|
||||
|
||||
echo "[AOT] subslice-patterns-const-eval"
|
||||
$RUST_CMD example/subslice-patterns-const-eval.rs --crate-type bin $TEST_FLAGS --target $TARGET_TRIPLE
|
||||
$RUN_WRAPPER $cargo_target_dir/subslice-patterns-const-eval
|
||||
|
||||
echo "[AOT] track-caller-attribute"
|
||||
$RUST_CMD example/track-caller-attribute.rs --crate-type bin $TEST_FLAGS --target $TARGET_TRIPLE
|
||||
$RUN_WRAPPER $cargo_target_dir/track-caller-attribute
|
||||
|
||||
echo "[BUILD] mod_bench"
|
||||
$RUST_CMD example/mod_bench.rs --crate-type bin --target $TARGET_TRIPLE
|
||||
}
|
||||
|
||||
function setup_rustc() {
|
||||
rust_toolchain=$(cat rust-toolchain | grep channel | sed 's/channel = "\(.*\)"/\1/')
|
||||
|
||||
git clone https://github.com/rust-lang/rust.git || true
|
||||
cd rust
|
||||
git fetch
|
||||
git checkout $($RUSTC -V | cut -d' ' -f3 | tr -d '(')
|
||||
export RUSTFLAGS=
|
||||
|
||||
rm config.toml || true
|
||||
|
||||
cat > config.toml <<EOF
|
||||
change-id = 115898
|
||||
|
||||
[rust]
|
||||
codegen-backends = []
|
||||
deny-warnings = false
|
||||
verbose-tests = true
|
||||
|
||||
[build]
|
||||
cargo = "$(rustup which cargo)"
|
||||
local-rebuild = true
|
||||
rustc = "$HOME/.rustup/toolchains/$rust_toolchain-$HOST_TRIPLE/bin/rustc"
|
||||
|
||||
[target.x86_64-unknown-linux-gnu]
|
||||
llvm-filecheck = "`which FileCheck-10 || which FileCheck-11 || which FileCheck-12 || which FileCheck-13 || which FileCheck-14`"
|
||||
|
||||
[llvm]
|
||||
download-ci-llvm = false
|
||||
EOF
|
||||
|
||||
$RUSTC -V | cut -d' ' -f3 | tr -d '('
|
||||
git checkout $($RUSTC -V | cut -d' ' -f3 | tr -d '(') tests
|
||||
}
|
||||
|
||||
function asm_tests() {
|
||||
setup_rustc
|
||||
|
||||
echo "[TEST] rustc asm test suite"
|
||||
RUSTC_ARGS="-Zpanic-abort-tests -Csymbol-mangling-version=v0 -Zcodegen-backend="$(pwd)"/../target/"$CHANNEL"/librustc_codegen_gcc."$dylib_ext" --sysroot "$(pwd)"/../build_sysroot/sysroot -Cpanic=abort"
|
||||
COMPILETEST_FORCE_STAGE0=1 ./x.py test --run always --stage 0 tests/assembly/asm --rustc-args "$RUSTC_ARGS"
|
||||
}
|
||||
|
||||
# FIXME(antoyo): linker gives multiple definitions error on Linux
|
||||
#echo "[BUILD] sysroot in release mode"
|
||||
#./build_sysroot/build_sysroot.sh --release
|
||||
|
||||
function test_libcore() {
|
||||
pushd build_sysroot/sysroot_src/library/core/tests
|
||||
echo "[TEST] libcore"
|
||||
rm -r ./target || true
|
||||
../../../../../cargo.sh test
|
||||
popd
|
||||
}
|
||||
|
||||
#echo
|
||||
#echo "[BENCH COMPILE] mod_bench"
|
||||
|
||||
#COMPILE_MOD_BENCH_INLINE="$RUSTC example/mod_bench.rs --crate-type bin -Zmir-opt-level=3 -O --crate-name mod_bench_inline"
|
||||
#COMPILE_MOD_BENCH_LLVM_0="rustc example/mod_bench.rs --crate-type bin -Copt-level=0 -o $cargo_target_dir/mod_bench_llvm_0 -Cpanic=abort"
|
||||
#COMPILE_MOD_BENCH_LLVM_1="rustc example/mod_bench.rs --crate-type bin -Copt-level=1 -o $cargo_target_dir/mod_bench_llvm_1 -Cpanic=abort"
|
||||
#COMPILE_MOD_BENCH_LLVM_2="rustc example/mod_bench.rs --crate-type bin -Copt-level=2 -o $cargo_target_dir/mod_bench_llvm_2 -Cpanic=abort"
|
||||
#COMPILE_MOD_BENCH_LLVM_3="rustc example/mod_bench.rs --crate-type bin -Copt-level=3 -o $cargo_target_dir/mod_bench_llvm_3 -Cpanic=abort"
|
||||
|
||||
## Use 100 runs, because a single compilations doesn't take more than ~150ms, so it isn't very slow
|
||||
#hyperfine --runs ${COMPILE_RUNS:-100} "$COMPILE_MOD_BENCH_INLINE" "$COMPILE_MOD_BENCH_LLVM_0" "$COMPILE_MOD_BENCH_LLVM_1" "$COMPILE_MOD_BENCH_LLVM_2" "$COMPILE_MOD_BENCH_LLVM_3"
|
||||
|
||||
#echo
|
||||
#echo "[BENCH RUN] mod_bench"
|
||||
#hyperfine --runs ${RUN_RUNS:-10} $cargo_target_dir/mod_bench{,_inline} $cargo_target_dir/mod_bench_llvm_*
|
||||
|
||||
function extended_rand_tests() {
|
||||
if (( $gcc_master_branch == 0 )); then
|
||||
return
|
||||
fi
|
||||
|
||||
pushd rand
|
||||
cargo clean
|
||||
echo "[TEST] rust-random/rand"
|
||||
../cargo.sh test --workspace
|
||||
popd
|
||||
}
|
||||
|
||||
function extended_regex_example_tests() {
|
||||
if (( $gcc_master_branch == 0 )); then
|
||||
return
|
||||
fi
|
||||
|
||||
pushd regex
|
||||
echo "[TEST] rust-lang/regex example shootout-regex-dna"
|
||||
cargo clean
|
||||
export CG_RUSTFLAGS="--cap-lints warn" # newer aho_corasick versions throw a deprecation warning
|
||||
# Make sure `[codegen mono items] start` doesn't poison the diff
|
||||
../cargo.sh build --example shootout-regex-dna
|
||||
cat examples/regexdna-input.txt \
|
||||
| ../cargo.sh run --example shootout-regex-dna \
|
||||
| grep -v "Spawned thread" > res.txt
|
||||
diff -u res.txt examples/regexdna-output.txt
|
||||
popd
|
||||
}
|
||||
|
||||
function extended_regex_tests() {
|
||||
if (( $gcc_master_branch == 0 )); then
|
||||
return
|
||||
fi
|
||||
|
||||
pushd regex
|
||||
echo "[TEST] rust-lang/regex tests"
|
||||
export CG_RUSTFLAGS="--cap-lints warn" # newer aho_corasick versions throw a deprecation warning
|
||||
../cargo.sh test --tests -- --exclude-should-panic --test-threads 1 -Zunstable-options -q
|
||||
popd
|
||||
}
|
||||
|
||||
function extended_sysroot_tests() {
|
||||
#pushd simple-raytracer
|
||||
#echo "[BENCH COMPILE] ebobby/simple-raytracer"
|
||||
#hyperfine --runs "${RUN_RUNS:-10}" --warmup 1 --prepare "cargo clean" \
|
||||
#"RUSTC=rustc RUSTFLAGS='' cargo build" \
|
||||
#"../cargo.sh build"
|
||||
|
||||
#echo "[BENCH RUN] ebobby/simple-raytracer"
|
||||
#cp ./target/debug/main ./raytracer_cg_gcc
|
||||
#hyperfine --runs "${RUN_RUNS:-10}" ./raytracer_cg_llvm ./raytracer_cg_gcc
|
||||
#popd
|
||||
|
||||
extended_rand_tests
|
||||
extended_regex_example_tests
|
||||
extended_regex_tests
|
||||
}
|
||||
|
||||
function test_rustc() {
|
||||
echo
|
||||
echo "[TEST] rust-lang/rust"
|
||||
|
||||
setup_rustc
|
||||
|
||||
for test in $(rg -i --files-with-matches "//(\[\w+\])?~|// error-pattern:|// build-fail|// run-fail|-Cllvm-args" tests/ui); do
|
||||
rm $test
|
||||
done
|
||||
rm tests/ui/consts/const_cmp_type_id.rs
|
||||
rm tests/ui/consts/issue-73976-monomorphic.rs
|
||||
|
||||
git checkout -- tests/ui/issues/auxiliary/issue-3136-a.rs # contains //~ERROR, but shouldn't be removed
|
||||
|
||||
rm -r tests/ui/{abi*,extern/,unsized-locals/,proc-macro/,threads-sendsync/,borrowck/,test*,consts/issue-miri-1910.rs} || true
|
||||
rm tests/ui/mir/mir_heavy_promoted.rs # this test is oom-killed in the CI.
|
||||
# Tests generating errors.
|
||||
rm tests/ui/consts/issue-94675.rs
|
||||
for test in $(rg --files-with-matches "thread" tests/ui); do
|
||||
rm $test
|
||||
done
|
||||
git checkout tests/ui/type-alias-impl-trait/auxiliary/cross_crate_ice.rs
|
||||
git checkout tests/ui/type-alias-impl-trait/auxiliary/cross_crate_ice2.rs
|
||||
git checkout tests/ui/macros/rfc-2011-nicer-assert-messages/auxiliary/common.rs
|
||||
git checkout tests/ui/imports/ambiguous-1.rs
|
||||
git checkout tests/ui/imports/ambiguous-4-extern.rs
|
||||
git checkout tests/ui/entry-point/auxiliary/bad_main_functions.rs
|
||||
|
||||
RUSTC_ARGS="$TEST_FLAGS -Csymbol-mangling-version=v0 -Zcodegen-backend="$(pwd)"/../target/"$CHANNEL"/librustc_codegen_gcc."$dylib_ext" --sysroot "$(pwd)"/../build_sysroot/sysroot"
|
||||
|
||||
if [ $# -eq 0 ]; then
|
||||
# No argument supplied to the function. Doing nothing.
|
||||
echo "No argument provided. Keeping all UI tests"
|
||||
elif [ $1 = "0" ]; then
|
||||
# Removing the failing tests.
|
||||
xargs -a ../failing-ui-tests.txt -d'\n' rm
|
||||
else
|
||||
# Removing all tests.
|
||||
find tests/ui -type f -name '*.rs' -not -path '*/auxiliary/*' -delete
|
||||
# Putting back only the failing ones.
|
||||
xargs -a ../failing-ui-tests.txt -d'\n' git checkout --
|
||||
fi
|
||||
|
||||
if [ $nb_parts -gt 0 ]; then
|
||||
echo "Splitting ui_test into $nb_parts parts (and running part $current_part)"
|
||||
find tests/ui -type f -name '*.rs' -not -path "*/auxiliary/*" > ui_tests
|
||||
# To ensure it'll be always the same sub files, we sort the content.
|
||||
sort ui_tests -o ui_tests
|
||||
count=$((`wc -l < ui_tests` / $nb_parts))
|
||||
# We increment the number of tests by one because if this is an odd number, we would skip
|
||||
# one test.
|
||||
count=$((count + 1))
|
||||
split -d -l $count -a 1 ui_tests ui_tests.split
|
||||
# Removing all tests.
|
||||
find tests/ui -type f -name '*.rs' -not -path "*/auxiliary/*" -delete
|
||||
# Putting back only the ones we want to test.
|
||||
xargs -a "ui_tests.split$current_part" -d'\n' git checkout --
|
||||
fi
|
||||
|
||||
echo "[TEST] rustc test suite"
|
||||
COMPILETEST_FORCE_STAGE0=1 ./x.py test --run always --stage 0 tests/ui/ --rustc-args "$RUSTC_ARGS" # --target $TARGET_TRIPLE
|
||||
}
|
||||
|
||||
function test_failing_rustc() {
|
||||
test_rustc "1"
|
||||
}
|
||||
|
||||
function test_successful_rustc() {
|
||||
test_rustc "0"
|
||||
}
|
||||
|
||||
function clean_ui_tests() {
|
||||
find rust/build/x86_64-unknown-linux-gnu/test/ui/ -name stamp -delete
|
||||
}
|
||||
|
||||
function all() {
|
||||
clean
|
||||
mini_tests
|
||||
build_sysroot
|
||||
std_tests
|
||||
#asm_tests
|
||||
test_libcore
|
||||
extended_sysroot_tests
|
||||
test_rustc
|
||||
}
|
||||
|
||||
if [ ${#funcs[@]} -eq 0 ]; then
|
||||
echo "No command passed, running '--all'..."
|
||||
all
|
||||
else
|
||||
for t in ${funcs[@]}; do
|
||||
$t
|
||||
done
|
||||
fi
|
@ -1,6 +1,6 @@
|
||||
tests/ui/lint/unsafe_code/forge_unsafe_block.rs
|
||||
tests/ui/lint/unused-qualification-in-derive-expansion.rs
|
||||
tests/ui/macro-quote-test.rs
|
||||
tests/ui/macros/macro-quote-test.rs
|
||||
tests/ui/macros/proc_macro.rs
|
||||
tests/ui/panic-runtime/lto-unwind.rs
|
||||
tests/ui/resolve/derive-macro-1.rs
|
||||
@ -21,3 +21,12 @@ tests/ui/fmt/format-args-capture-issue-106408.rs
|
||||
tests/ui/fmt/indoc-issue-106408.rs
|
||||
tests/ui/hygiene/issue-77523-def-site-async-await.rs
|
||||
tests/ui/inherent-impls-overlap-check/no-overlap.rs
|
||||
tests/ui/enum-discriminant/issue-46519.rs
|
||||
tests/ui/issues/issue-45731.rs
|
||||
tests/ui/lint/test-allow-dead-extern-static-no-warning.rs
|
||||
tests/ui/macros/macro-comma-behavior-rpass.rs
|
||||
tests/ui/macros/rfc-2011-nicer-assert-messages/assert-with-custom-errors-does-not-create-unnecessary-code.rs
|
||||
tests/ui/macros/rfc-2011-nicer-assert-messages/feature-gate-generic_assert.rs
|
||||
tests/ui/macros/stringify.rs
|
||||
tests/ui/reexport-test-harness-main.rs
|
||||
tests/ui/rfcs/rfc-1937-termination-trait/termination-trait-in-test.rs
|
@ -5,7 +5,7 @@ tests/ui/lto/lto-many-codegen-units.rs
|
||||
tests/ui/lto/issue-100772.rs
|
||||
tests/ui/lto/lto-rustc-loads-linker-plugin.rs
|
||||
tests/ui/panic-runtime/lto-unwind.rs
|
||||
tests/ui/sanitize/issue-111184-coroutine-witness.rs
|
||||
tests/ui/sanitizer/issue-111184-cfi-coroutine-witness.rs
|
||||
tests/ui/sepcomp/sepcomp-lib-lto.rs
|
||||
tests/ui/lto/lto-opt-level-s.rs
|
||||
tests/ui/lto/lto-opt-level-z.rs
|
@ -13,7 +13,6 @@ tests/ui/sepcomp/sepcomp-extern.rs
|
||||
tests/ui/sepcomp/sepcomp-fns-backwards.rs
|
||||
tests/ui/sepcomp/sepcomp-fns.rs
|
||||
tests/ui/sepcomp/sepcomp-statics.rs
|
||||
tests/ui/simd/intrinsic/generic-arithmetic-pass.rs
|
||||
tests/ui/asm/x86_64/may_unwind.rs
|
||||
tests/ui/backtrace.rs
|
||||
tests/ui/catch-unwind-bang.rs
|
||||
@ -49,7 +48,6 @@ tests/ui/rfcs/rfc-1857-stabilize-drop-order/drop-order.rs
|
||||
tests/ui/rfcs/rfc-2091-track-caller/std-panic-locations.rs
|
||||
tests/ui/simd/issue-17170.rs
|
||||
tests/ui/simd/issue-39720.rs
|
||||
tests/ui/simd/issue-89193.rs
|
||||
tests/ui/statics/issue-91050-1.rs
|
||||
tests/ui/statics/issue-91050-2.rs
|
||||
tests/ui/alloc-error/default-alloc-error-hook.rs
|
||||
@ -57,7 +55,6 @@ tests/ui/coroutine/panic-safe.rs
|
||||
tests/ui/issues/issue-14875.rs
|
||||
tests/ui/issues/issue-29948.rs
|
||||
tests/ui/panics/nested_panic_caught.rs
|
||||
tests/ui/simd/intrinsic/generic-bswap-byte.rs
|
||||
tests/ui/const_prop/ice-issue-111353.rs
|
||||
tests/ui/process/println-with-broken-pipe.rs
|
||||
tests/ui/panic-runtime/lto-abort.rs
|
||||
@ -72,3 +69,8 @@ tests/ui/async-await/deep-futures-are-freeze.rs
|
||||
tests/ui/closures/capture-unsized-by-ref.rs
|
||||
tests/ui/coroutine/resume-after-return.rs
|
||||
tests/ui/macros/rfc-2011-nicer-assert-messages/all-expr-kinds.rs
|
||||
tests/ui/simd/masked-load-store.rs
|
||||
tests/ui/simd/repr_packed.rs
|
||||
tests/ui/async-await/in-trait/dont-project-to-specializable-projection.rs
|
||||
tests/ui/consts/try-operator.rs
|
||||
tests/ui/coroutine/unwind-abort-mix.rs
|
@ -9,6 +9,7 @@ tests/ui/packed/packed-struct-vec.rs
|
||||
tests/ui/packed/packed-tuple-struct-layout.rs
|
||||
tests/ui/simd/array-type.rs
|
||||
tests/ui/simd/intrinsic/float-minmax-pass.rs
|
||||
tests/ui/simd/intrinsic/generic-arithmetic-pass.rs
|
||||
tests/ui/simd/intrinsic/generic-arithmetic-saturating-pass.rs
|
||||
tests/ui/simd/intrinsic/generic-as.rs
|
||||
tests/ui/simd/intrinsic/generic-cast-pass.rs
|
||||
@ -32,11 +33,16 @@ tests/ui/coroutine/size-moved-locals.rs
|
||||
tests/ui/macros/rfc-2011-nicer-assert-messages/all-not-available-cases.rs
|
||||
tests/ui/simd/intrinsic/generic-gather-pass.rs
|
||||
tests/ui/simd/issue-85915-simd-ptrs.rs
|
||||
tests/ui/simd/issue-89193.rs
|
||||
tests/ui/issues/issue-68010-large-zst-consts.rs
|
||||
tests/ui/rust-2018/proc-macro-crate-in-paths.rs
|
||||
tests/ui/target-feature/missing-plusminus.rs
|
||||
tests/ui/sse2.rs
|
||||
tests/ui/codegen/issue-79865-llvm-miscompile.rs
|
||||
tests/ui/intrinsics/intrinsics-integer.rs
|
||||
tests/ui/std-backtrace.rs
|
||||
tests/ui/mir/alignment/packed.rs
|
||||
tests/ui/intrinsics/intrinsics-integer.rs
|
||||
tests/ui/asm/x86_64/evex512-implicit-feature.rs
|
||||
tests/ui/packed/dyn-trait.rs
|
||||
tests/ui/packed/issue-118537-field-offset-ice.rs
|
||||
tests/ui/stable-mir-print/basic_function.rs
|
@ -5,6 +5,7 @@ use std::{
|
||||
process::Command,
|
||||
};
|
||||
|
||||
use boml::Toml;
|
||||
use lang_tester::LangTester;
|
||||
use tempfile::TempDir;
|
||||
|
||||
@ -20,20 +21,32 @@ pub fn main_inner(profile: Profile) {
|
||||
let tempdir = TempDir::new().expect("temp dir");
|
||||
let current_dir = current_dir().expect("current dir");
|
||||
let current_dir = current_dir.to_str().expect("current dir").to_string();
|
||||
let gcc_path = include_str!("../gcc_path");
|
||||
let gcc_path = gcc_path.trim();
|
||||
let toml = Toml::parse(include_str!("../config.toml")).expect("Failed to parse `config.toml`");
|
||||
let gcc_path = if let Ok(gcc_path) = toml.get_string("gcc-path") {
|
||||
PathBuf::from(gcc_path.to_string())
|
||||
} else {
|
||||
// then we try to retrieve it from the `target` folder.
|
||||
let commit = include_str!("../libgccjit.version").trim();
|
||||
Path::new("build/libgccjit").join(commit)
|
||||
};
|
||||
|
||||
let gcc_path = Path::new(&gcc_path)
|
||||
.canonicalize()
|
||||
.expect("failed to get absolute path of `gcc-path`")
|
||||
.display()
|
||||
.to_string();
|
||||
env::set_var("LD_LIBRARY_PATH", gcc_path);
|
||||
|
||||
fn rust_filter(filename: &Path) -> bool {
|
||||
filename.extension().expect("extension").to_str().expect("to_str") == "rs"
|
||||
fn rust_filter(path: &Path) -> bool {
|
||||
path.is_file() && path.extension().expect("extension").to_str().expect("to_str") == "rs"
|
||||
}
|
||||
|
||||
#[cfg(feature="master")]
|
||||
#[cfg(feature = "master")]
|
||||
fn filter(filename: &Path) -> bool {
|
||||
rust_filter(filename)
|
||||
}
|
||||
|
||||
#[cfg(not(feature="master"))]
|
||||
#[cfg(not(feature = "master"))]
|
||||
fn filter(filename: &Path) -> bool {
|
||||
if let Some(filename) = filename.to_str() {
|
||||
if filename.ends_with("gep.rs") {
|
||||
@ -45,16 +58,17 @@ pub fn main_inner(profile: Profile) {
|
||||
|
||||
LangTester::new()
|
||||
.test_dir("tests/run")
|
||||
.test_file_filter(filter)
|
||||
.test_extract(|source| {
|
||||
let lines =
|
||||
source.lines()
|
||||
.test_path_filter(filter)
|
||||
.test_extract(|path| {
|
||||
let lines = std::fs::read_to_string(path)
|
||||
.expect("read file")
|
||||
.lines()
|
||||
.skip_while(|l| !l.starts_with("//"))
|
||||
.take_while(|l| l.starts_with("//"))
|
||||
.map(|l| &l[2..])
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
Some(lines)
|
||||
lines
|
||||
})
|
||||
.test_cmds(move |path| {
|
||||
// Test command 1: Compile `x.rs` into `tempdir/x`.
|
||||
@ -62,19 +76,22 @@ pub fn main_inner(profile: Profile) {
|
||||
exe.push(&tempdir);
|
||||
exe.push(path.file_stem().expect("file_stem"));
|
||||
let mut compiler = Command::new("rustc");
|
||||
compiler.args(&[
|
||||
compiler.args([
|
||||
&format!("-Zcodegen-backend={}/target/debug/librustc_codegen_gcc.so", current_dir),
|
||||
"--sysroot", &format!("{}/build_sysroot/sysroot/", current_dir),
|
||||
"--sysroot",
|
||||
&format!("{}/build_sysroot/sysroot/", current_dir),
|
||||
"-Zno-parallel-llvm",
|
||||
"-C", "link-arg=-lc",
|
||||
"-o", exe.to_str().expect("to_str"),
|
||||
"-C",
|
||||
"link-arg=-lc",
|
||||
"-o",
|
||||
exe.to_str().expect("to_str"),
|
||||
path.to_str().expect("to_str"),
|
||||
]);
|
||||
|
||||
// TODO(antoyo): find a way to send this via a cli argument.
|
||||
let test_target = std::env::var("CG_GCC_TEST_TARGET");
|
||||
if let Ok(ref target) = test_target {
|
||||
compiler.args(&["--target", &target]);
|
||||
compiler.args(["--target", target]);
|
||||
let linker = format!("{}-gcc", target);
|
||||
compiler.args(&[format!("-Clinker={}", linker)]);
|
||||
let mut env_path = std::env::var("PATH").unwrap_or_default();
|
||||
@ -85,49 +102,38 @@ pub fn main_inner(profile: Profile) {
|
||||
|
||||
if let Some(flags) = option_env!("TEST_FLAGS") {
|
||||
for flag in flags.split_whitespace() {
|
||||
compiler.arg(&flag);
|
||||
compiler.arg(flag);
|
||||
}
|
||||
}
|
||||
match profile {
|
||||
Profile::Debug => {}
|
||||
Profile::Release => {
|
||||
compiler.args(&[
|
||||
"-C", "opt-level=3",
|
||||
"-C", "lto=no",
|
||||
]);
|
||||
compiler.args(["-C", "opt-level=3", "-C", "lto=no"]);
|
||||
}
|
||||
}
|
||||
// Test command 2: run `tempdir/x`.
|
||||
if test_target.is_ok() {
|
||||
let vm_parent_dir = std::env::var("CG_GCC_VM_DIR")
|
||||
.map(|dir| PathBuf::from(dir))
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|_| std::env::current_dir().unwrap());
|
||||
let vm_dir = "vm";
|
||||
let exe_filename = exe.file_name().unwrap();
|
||||
let vm_home_dir = vm_parent_dir.join(vm_dir).join("home");
|
||||
let vm_exe_path = vm_home_dir.join(exe_filename);
|
||||
// FIXME(antoyo): panicking here makes the test pass.
|
||||
let inside_vm_exe_path = PathBuf::from("/home").join(&exe_filename);
|
||||
let inside_vm_exe_path = PathBuf::from("/home").join(exe_filename);
|
||||
let mut copy = Command::new("sudo");
|
||||
copy.arg("cp");
|
||||
copy.args(&[&exe, &vm_exe_path]);
|
||||
copy.args([&exe, &vm_exe_path]);
|
||||
|
||||
let mut runtime = Command::new("sudo");
|
||||
runtime.args(&["chroot", vm_dir, "qemu-m68k-static"]);
|
||||
runtime.args(["chroot", vm_dir, "qemu-m68k-static"]);
|
||||
runtime.arg(inside_vm_exe_path);
|
||||
runtime.current_dir(vm_parent_dir);
|
||||
vec![
|
||||
("Compiler", compiler),
|
||||
("Copy", copy),
|
||||
("Run-time", runtime),
|
||||
]
|
||||
}
|
||||
else {
|
||||
vec![("Compiler", compiler), ("Copy", copy), ("Run-time", runtime)]
|
||||
} else {
|
||||
let runtime = Command::new(exe);
|
||||
vec![
|
||||
("Compiler", compiler),
|
||||
("Run-time", runtime),
|
||||
]
|
||||
vec![("Compiler", compiler), ("Run-time", runtime)]
|
||||
}
|
||||
})
|
||||
.run();
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
set -e
|
||||
echo "[BUILD] build system" 1>&2
|
||||
cd build_system
|
||||
pushd $(dirname "$0")/build_system > /dev/null
|
||||
cargo build --release
|
||||
cd ..
|
||||
./build_system/target/release/y $@
|
||||
popd > /dev/null
|
||||
$(dirname "$0")/build_system/target/release/y $@
|
||||
|
Loading…
Reference in New Issue
Block a user