2018-05-08 13:10:16 +00:00
|
|
|
//! Code that is useful in various codegen modules.
|
2013-01-07 22:16:52 +00:00
|
|
|
|
2020-03-31 16:16:47 +00:00
|
|
|
use libc::{c_char, c_uint};
|
2024-10-09 00:29:00 +00:00
|
|
|
use rustc_abi as abi;
|
|
|
|
use rustc_abi::Primitive::Pointer;
|
|
|
|
use rustc_abi::{AddressSpace, HasDataLayout};
|
2020-04-27 17:56:11 +00:00
|
|
|
use rustc_ast::Mutability;
|
2024-10-12 14:36:14 +00:00
|
|
|
use rustc_codegen_ssa::common::TypeKind;
|
2020-03-31 16:16:47 +00:00
|
|
|
use rustc_codegen_ssa::traits::*;
|
2023-04-08 03:11:20 +00:00
|
|
|
use rustc_data_structures::stable_hasher::{Hash128, HashStable, StableHasher};
|
2022-07-12 20:52:35 +00:00
|
|
|
use rustc_hir::def_id::DefId;
|
2020-03-31 16:16:47 +00:00
|
|
|
use rustc_middle::bug;
|
Introduce `ConstAllocation`.
Currently some `Allocation`s are interned, some are not, and it's very
hard to tell at a use point which is which.
This commit introduces `ConstAllocation` for the known-interned ones,
which makes the division much clearer. `ConstAllocation::inner()` is
used to get the underlying `Allocation`.
In some places it's natural to use an `Allocation`, in some it's natural
to use a `ConstAllocation`, and in some places there's no clear choice.
I've tried to make things look as nice as possible, while generally
favouring `ConstAllocation`, which is the type that embodies more
information. This does require quite a few calls to `inner()`.
The commit also tweaks how `PartialOrd` works for `Interned`. The
previous code was too clever by half, building on `T: Ord` to make the
code shorter. That caused problems with deriving `PartialOrd` and `Ord`
for `ConstAllocation`, so I changed it to build on `T: PartialOrd`,
which is slightly more verbose but much more standard and avoided the
problems.
2022-03-01 20:15:04 +00:00
|
|
|
use rustc_middle::mir::interpret::{ConstAllocation, GlobalAlloc, Scalar};
|
2022-07-12 20:52:35 +00:00
|
|
|
use rustc_middle::ty::TyCtxt;
|
2024-07-25 20:02:56 +00:00
|
|
|
use rustc_session::cstore::DllImport;
|
2024-05-22 04:50:24 +00:00
|
|
|
use tracing::debug;
|
2013-06-13 02:02:33 +00:00
|
|
|
|
2023-07-29 00:24:33 +00:00
|
|
|
use crate::consts::const_alloc_to_llvm;
|
2024-07-06 12:26:42 +00:00
|
|
|
pub(crate) use crate::context::CodegenCx;
|
2024-10-28 06:25:40 +00:00
|
|
|
use crate::llvm::{self, BasicBlock, Bool, ConstantInt, False, Metadata, True};
|
2019-02-17 18:58:58 +00:00
|
|
|
use crate::type_::Type;
|
|
|
|
use crate::value::Value;
|
2024-07-28 22:13:50 +00:00
|
|
|
|
2011-07-22 00:27:34 +00:00
|
|
|
/*
|
2015-01-06 23:22:24 +00:00
|
|
|
* A note on nomenclature of linking: "extern", "foreign", and "upcall".
|
|
|
|
*
|
|
|
|
* An "extern" is an LLVM symbol we wind up emitting an undefined external
|
|
|
|
* reference to. This means "we don't have the thing in this compilation unit,
|
|
|
|
* please make sure you link it in at runtime". This could be a reference to
|
|
|
|
* C code found in a C library, or rust code found in a rust crate.
|
|
|
|
*
|
|
|
|
* Most "externs" are implicitly declared (automatically) as a result of a
|
|
|
|
* user declaring an extern _module_ dependency; this causes the rust driver
|
|
|
|
* to locate an extern crate, scan its compilation metadata, and emit extern
|
|
|
|
* declarations for any symbols used by the declaring crate.
|
|
|
|
*
|
|
|
|
* A "foreign" is an extern that references C (or other non-rust ABI) code.
|
|
|
|
* There is no metadata to scan for extern references so in these cases either
|
|
|
|
* a header-digester like bindgen, or manual function prototypes, have to
|
|
|
|
* serve as declarators. So these are usually given explicitly as prototype
|
|
|
|
* declarations, in rust code, with ABI attributes on them noting which ABI to
|
|
|
|
* link via.
|
|
|
|
*
|
|
|
|
* An "upcall" is a foreign call generated by the compiler (not corresponding
|
|
|
|
* to any user-written call in the code) into the runtime library, to perform
|
|
|
|
* some helper task such as bringing a task to life, allocating memory, etc.
|
|
|
|
*
|
|
|
|
*/
|
2012-03-22 20:44:20 +00:00
|
|
|
|
2015-10-24 01:18:44 +00:00
|
|
|
/// A structure representing an active landing pad for the duration of a basic
|
|
|
|
/// block.
|
|
|
|
///
|
|
|
|
/// Each `Block` may contain an instance of this, indicating whether the block
|
|
|
|
/// is part of a landing pad or not. This is used to make decision about whether
|
2018-11-27 02:59:49 +00:00
|
|
|
/// to emit `invoke` instructions (e.g., in a landing pad we don't continue to
|
2015-10-24 01:18:44 +00:00
|
|
|
/// use `invoke`) and also about various function call metadata.
|
|
|
|
///
|
|
|
|
/// For GNU exceptions (`landingpad` + `resume` instructions) this structure is
|
|
|
|
/// just a bunch of `None` instances (not too interesting), but for MSVC
|
|
|
|
/// exceptions (`cleanuppad` + `cleanupret` instructions) this contains data.
|
|
|
|
/// When inside of a landing pad, each function call in LLVM IR needs to be
|
|
|
|
/// annotated with which landing pad it's a part of. This is accomplished via
|
|
|
|
/// the `OperandBundleDef` value created for MSVC landing pads.
|
2024-07-06 12:26:42 +00:00
|
|
|
pub(crate) struct Funclet<'ll> {
|
2018-11-13 10:51:42 +00:00
|
|
|
cleanuppad: &'ll Value,
|
2024-10-28 06:25:40 +00:00
|
|
|
operand: llvm::OperandBundleOwned<'ll>,
|
2015-10-24 01:18:44 +00:00
|
|
|
}
|
|
|
|
|
2021-12-14 18:49:49 +00:00
|
|
|
impl<'ll> Funclet<'ll> {
|
2024-07-06 12:26:42 +00:00
|
|
|
pub(crate) fn new(cleanuppad: &'ll Value) -> Self {
|
2024-10-28 06:25:40 +00:00
|
|
|
Funclet { cleanuppad, operand: llvm::OperandBundleOwned::new("funclet", &[cleanuppad]) }
|
2015-10-24 01:18:44 +00:00
|
|
|
}
|
|
|
|
|
2024-07-06 12:26:42 +00:00
|
|
|
pub(crate) fn cleanuppad(&self) -> &'ll Value {
|
2016-12-12 13:48:39 +00:00
|
|
|
self.cleanuppad
|
2015-10-24 01:18:44 +00:00
|
|
|
}
|
2016-05-29 19:01:06 +00:00
|
|
|
|
2024-10-28 06:25:40 +00:00
|
|
|
pub(crate) fn bundle(&self) -> &llvm::OperandBundle<'ll> {
|
2016-12-12 13:48:39 +00:00
|
|
|
&self.operand
|
2016-05-29 19:01:06 +00:00
|
|
|
}
|
2015-10-24 01:18:44 +00:00
|
|
|
}
|
|
|
|
|
2021-12-14 18:49:49 +00:00
|
|
|
impl<'ll> BackendTypes for CodegenCx<'ll, '_> {
|
2018-08-28 15:03:46 +00:00
|
|
|
type Value = &'ll Value;
|
2024-09-19 01:39:28 +00:00
|
|
|
type Metadata = &'ll Metadata;
|
2020-02-10 20:30:51 +00:00
|
|
|
// FIXME(eddyb) replace this with a `Function` "subclass" of `Value`.
|
2019-10-13 09:28:19 +00:00
|
|
|
type Function = &'ll Value;
|
2019-08-27 09:45:03 +00:00
|
|
|
|
2018-08-28 15:03:46 +00:00
|
|
|
type BasicBlock = &'ll BasicBlock;
|
|
|
|
type Type = &'ll Type;
|
2018-11-13 10:51:42 +00:00
|
|
|
type Funclet = Funclet<'ll>;
|
2018-09-20 13:47:22 +00:00
|
|
|
|
|
|
|
type DIScope = &'ll llvm::debuginfo::DIScope;
|
2020-02-10 20:52:30 +00:00
|
|
|
type DILocation = &'ll llvm::debuginfo::DILocation;
|
2020-01-26 16:50:13 +00:00
|
|
|
type DIVariable = &'ll llvm::debuginfo::DIVariable;
|
2013-01-11 05:23:07 +00:00
|
|
|
}
|
2011-07-22 00:27:34 +00:00
|
|
|
|
2021-12-14 18:49:49 +00:00
|
|
|
impl<'ll> CodegenCx<'ll, '_> {
|
2024-07-06 12:26:42 +00:00
|
|
|
pub(crate) fn const_array(&self, ty: &'ll Type, elts: &[&'ll Value]) -> &'ll Value {
|
2024-03-04 21:17:23 +00:00
|
|
|
let len = u64::try_from(elts.len()).expect("LLVMConstArray2 elements len overflow");
|
|
|
|
unsafe { llvm::LLVMConstArray2(ty, elts.as_ptr(), len) }
|
2018-12-02 14:58:40 +00:00
|
|
|
}
|
|
|
|
|
2024-07-06 12:26:42 +00:00
|
|
|
pub(crate) fn const_bytes(&self, bytes: &[u8]) -> &'ll Value {
|
2018-12-02 14:58:40 +00:00
|
|
|
bytes_in_context(self.llcx, bytes)
|
|
|
|
}
|
2018-12-02 15:53:39 +00:00
|
|
|
|
2024-07-06 12:26:42 +00:00
|
|
|
pub(crate) fn const_get_elt(&self, v: &'ll Value, idx: u64) -> &'ll Value {
|
2018-12-02 15:53:39 +00:00
|
|
|
unsafe {
|
2024-03-04 21:17:23 +00:00
|
|
|
let idx = c_uint::try_from(idx).expect("LLVMGetAggregateElement index overflow");
|
|
|
|
let r = llvm::LLVMGetAggregateElement(v, idx).unwrap();
|
2018-12-02 15:53:39 +00:00
|
|
|
|
|
|
|
debug!("const_get_elt(v={:?}, idx={}, r={:?})", v, idx, r);
|
|
|
|
|
|
|
|
r
|
|
|
|
}
|
|
|
|
}
|
2018-12-02 14:58:40 +00:00
|
|
|
}
|
|
|
|
|
2024-09-17 00:15:26 +00:00
|
|
|
impl<'ll, 'tcx> ConstCodegenMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
2018-09-06 18:57:42 +00:00
|
|
|
fn const_null(&self, t: &'ll Type) -> &'ll Value {
|
2018-08-28 15:03:46 +00:00
|
|
|
unsafe { llvm::LLVMConstNull(t) }
|
2015-01-08 12:14:07 +00:00
|
|
|
}
|
2013-02-18 22:16:21 +00:00
|
|
|
|
2018-09-06 18:57:42 +00:00
|
|
|
fn const_undef(&self, t: &'ll Type) -> &'ll Value {
|
2018-08-28 15:03:46 +00:00
|
|
|
unsafe { llvm::LLVMGetUndef(t) }
|
2017-08-05 09:27:28 +00:00
|
|
|
}
|
|
|
|
|
2025-01-08 15:00:25 +00:00
|
|
|
fn is_undef(&self, v: &'ll Value) -> bool {
|
|
|
|
unsafe { llvm::LLVMIsUndef(v) == True }
|
|
|
|
}
|
|
|
|
|
2023-03-16 13:56:02 +00:00
|
|
|
fn const_poison(&self, t: &'ll Type) -> &'ll Value {
|
|
|
|
unsafe { llvm::LLVMGetPoison(t) }
|
|
|
|
}
|
|
|
|
|
2018-09-06 18:57:42 +00:00
|
|
|
fn const_bool(&self, val: bool) -> &'ll Value {
|
2018-11-07 10:08:41 +00:00
|
|
|
self.const_uint(self.type_i1(), val as u64)
|
2015-01-08 12:14:07 +00:00
|
|
|
}
|
2014-10-15 17:26:43 +00:00
|
|
|
|
2024-09-18 00:10:08 +00:00
|
|
|
fn const_i8(&self, i: i8) -> &'ll Value {
|
|
|
|
self.const_int(self.type_i8(), i as i64)
|
|
|
|
}
|
|
|
|
|
2021-11-11 04:14:23 +00:00
|
|
|
fn const_i16(&self, i: i16) -> &'ll Value {
|
|
|
|
self.const_int(self.type_i16(), i as i64)
|
|
|
|
}
|
|
|
|
|
2018-09-06 18:57:42 +00:00
|
|
|
fn const_i32(&self, i: i32) -> &'ll Value {
|
2018-11-07 10:08:41 +00:00
|
|
|
self.const_int(self.type_i32(), i as i64)
|
2018-08-28 15:03:46 +00:00
|
|
|
}
|
2011-07-15 00:08:22 +00:00
|
|
|
|
2024-09-18 00:10:08 +00:00
|
|
|
fn const_int(&self, t: &'ll Type, i: i64) -> &'ll Value {
|
2024-10-12 14:36:14 +00:00
|
|
|
debug_assert!(
|
|
|
|
self.type_kind(t) == TypeKind::Integer,
|
|
|
|
"only allows integer types in const_int"
|
|
|
|
);
|
2024-09-18 00:10:08 +00:00
|
|
|
unsafe { llvm::LLVMConstInt(t, i as u64, True) }
|
|
|
|
}
|
|
|
|
|
|
|
|
fn const_u8(&self, i: u8) -> &'ll Value {
|
|
|
|
self.const_uint(self.type_i8(), i as u64)
|
2023-03-06 04:19:41 +00:00
|
|
|
}
|
|
|
|
|
2018-09-06 18:57:42 +00:00
|
|
|
fn const_u32(&self, i: u32) -> &'ll Value {
|
2018-11-07 10:08:41 +00:00
|
|
|
self.const_uint(self.type_i32(), i as u64)
|
2018-08-28 15:03:46 +00:00
|
|
|
}
|
2011-07-15 00:08:22 +00:00
|
|
|
|
2018-09-06 18:57:42 +00:00
|
|
|
fn const_u64(&self, i: u64) -> &'ll Value {
|
2018-11-07 10:08:41 +00:00
|
|
|
self.const_uint(self.type_i64(), i)
|
2018-08-28 15:03:46 +00:00
|
|
|
}
|
2011-07-15 00:08:22 +00:00
|
|
|
|
2023-06-09 19:21:39 +00:00
|
|
|
fn const_u128(&self, i: u128) -> &'ll Value {
|
|
|
|
self.const_uint_big(self.type_i128(), i)
|
|
|
|
}
|
|
|
|
|
2018-09-06 18:57:42 +00:00
|
|
|
fn const_usize(&self, i: u64) -> &'ll Value {
|
2018-08-28 15:03:46 +00:00
|
|
|
let bit_size = self.data_layout().pointer_size.bits();
|
|
|
|
if bit_size < 64 {
|
|
|
|
// make sure it doesn't overflow
|
|
|
|
assert!(i < (1 << bit_size));
|
2015-01-08 12:14:07 +00:00
|
|
|
}
|
2012-04-21 20:23:25 +00:00
|
|
|
|
2018-11-07 10:08:41 +00:00
|
|
|
self.const_uint(self.isize_ty, i)
|
2015-01-08 12:14:07 +00:00
|
|
|
}
|
2011-07-15 00:08:22 +00:00
|
|
|
|
2024-09-18 00:10:08 +00:00
|
|
|
fn const_uint(&self, t: &'ll Type, i: u64) -> &'ll Value {
|
2024-10-12 14:36:14 +00:00
|
|
|
debug_assert!(
|
|
|
|
self.type_kind(t) == TypeKind::Integer,
|
|
|
|
"only allows integer types in const_uint"
|
|
|
|
);
|
2024-09-18 00:10:08 +00:00
|
|
|
unsafe { llvm::LLVMConstInt(t, i, False) }
|
|
|
|
}
|
|
|
|
|
|
|
|
fn const_uint_big(&self, t: &'ll Type, u: u128) -> &'ll Value {
|
2024-10-12 14:36:14 +00:00
|
|
|
debug_assert!(
|
|
|
|
self.type_kind(t) == TypeKind::Integer,
|
|
|
|
"only allows integer types in const_uint_big"
|
|
|
|
);
|
2024-09-18 00:10:08 +00:00
|
|
|
unsafe {
|
|
|
|
let words = [u as u64, (u >> 64) as u64];
|
|
|
|
llvm::LLVMConstIntOfArbitraryPrecision(t, 2, words.as_ptr())
|
|
|
|
}
|
2018-08-28 15:03:46 +00:00
|
|
|
}
|
2017-06-25 09:42:55 +00:00
|
|
|
|
2019-07-07 17:08:40 +00:00
|
|
|
fn const_real(&self, t: &'ll Type, val: f64) -> &'ll Value {
|
|
|
|
unsafe { llvm::LLVMConstReal(t, val) }
|
|
|
|
}
|
|
|
|
|
2022-06-28 17:34:24 +00:00
|
|
|
fn const_str(&self, s: &str) -> (&'ll Value, &'ll Value) {
|
|
|
|
let str_global = *self
|
|
|
|
.const_str_cache
|
|
|
|
.borrow_mut()
|
|
|
|
.raw_entry_mut()
|
|
|
|
.from_key(s)
|
|
|
|
.or_insert_with(|| {
|
|
|
|
let sc = self.const_bytes(s.as_bytes());
|
|
|
|
let sym = self.generate_local_symbol_name("str");
|
|
|
|
let g = self.define_global(&sym, self.val_ty(sc)).unwrap_or_else(|| {
|
|
|
|
bug!("symbol `{}` is already defined", sym);
|
|
|
|
});
|
|
|
|
unsafe {
|
|
|
|
llvm::LLVMSetInitializer(g, sc);
|
|
|
|
llvm::LLVMSetGlobalConstant(g, True);
|
2023-11-15 07:56:40 +00:00
|
|
|
llvm::LLVMSetUnnamedAddress(g, llvm::UnnamedAddr::Global);
|
2022-06-28 17:34:24 +00:00
|
|
|
}
|
2024-10-25 10:40:38 +00:00
|
|
|
llvm::set_linkage(g, llvm::Linkage::InternalLinkage);
|
2025-01-23 23:37:05 +00:00
|
|
|
// Cast to default address space if globals are in a different addrspace
|
Cast global variables to default address space
Pointers for variables all need to be in the same address space for
correct compilation. Therefore ensure that even if a global variable is
created in a different address space, it is casted to the default
address space before its value is used.
This is necessary for the amdgpu target and others where the default
address space for global variables is not 0.
For example `core` does not compile in debug mode when not casting the
address space to the default one because it tries to emit the following
(simplified) LLVM IR, containing a type mismatch:
```llvm
@alloc_0 = addrspace(1) constant <{ [6 x i8] }> <{ [6 x i8] c"bit.rs" }>, align 1
@alloc_1 = addrspace(1) constant <{ ptr }> <{ ptr addrspace(1) @alloc_0 }>, align 8
; ^ here a struct containing a `ptr` is needed, but it is created using a `ptr addrspace(1)`
```
For this to compile, we need to insert a constant `addrspacecast` before
we use a global variable:
```llvm
@alloc_0 = addrspace(1) constant <{ [6 x i8] }> <{ [6 x i8] c"bit.rs" }>, align 1
@alloc_1 = addrspace(1) constant <{ ptr }> <{ ptr addrspacecast (ptr addrspace(1) @alloc_0 to ptr) }>, align 8
```
As vtables are global variables as well, they are also created with an
`addrspacecast`. In the SSA backend, after a vtable global is created,
metadata is added to it. To add metadata, we need the non-casted global
variable. Therefore we strip away an addrspacecast if there is one, to
get the underlying global.
2025-01-02 12:10:11 +00:00
|
|
|
let g = self.const_pointercast(g, self.type_ptr());
|
2022-06-28 17:34:24 +00:00
|
|
|
(s.to_owned(), g)
|
|
|
|
})
|
|
|
|
.1;
|
|
|
|
let len = s.len();
|
2022-12-06 05:07:28 +00:00
|
|
|
(str_global, self.const_usize(len as u64))
|
2019-10-09 15:25:41 +00:00
|
|
|
}
|
|
|
|
|
2018-08-28 15:03:46 +00:00
|
|
|
fn const_struct(&self, elts: &[&'ll Value], packed: bool) -> &'ll Value {
|
2018-11-07 10:08:41 +00:00
|
|
|
struct_in_context(self.llcx, elts, packed)
|
2015-01-08 12:14:07 +00:00
|
|
|
}
|
2012-09-05 22:27:22 +00:00
|
|
|
|
2024-08-08 10:15:03 +00:00
|
|
|
fn const_vector(&self, elts: &[&'ll Value]) -> &'ll Value {
|
|
|
|
let len = c_uint::try_from(elts.len()).expect("LLVMConstVector elements len overflow");
|
|
|
|
unsafe { llvm::LLVMConstVector(elts.as_ptr(), len) }
|
|
|
|
}
|
|
|
|
|
2019-08-27 09:51:53 +00:00
|
|
|
fn const_to_opt_uint(&self, v: &'ll Value) -> Option<u64> {
|
2022-09-09 00:00:00 +00:00
|
|
|
try_as_const_integral(v).and_then(|v| unsafe {
|
|
|
|
let mut i = 0u64;
|
|
|
|
let success = llvm::LLVMRustConstIntGetZExtValue(v, &mut i);
|
|
|
|
success.then_some(i)
|
|
|
|
})
|
2018-08-28 15:03:46 +00:00
|
|
|
}
|
2018-03-15 15:36:02 +00:00
|
|
|
|
2018-08-30 12:58:15 +00:00
|
|
|
fn const_to_opt_u128(&self, v: &'ll Value, sign_ext: bool) -> Option<u128> {
|
2019-10-13 10:19:14 +00:00
|
|
|
try_as_const_integral(v).and_then(|v| unsafe {
|
|
|
|
let (mut lo, mut hi) = (0u64, 0u64);
|
|
|
|
let success = llvm::LLVMRustConstInt128Get(v, sign_ext, &mut hi, &mut lo);
|
2019-12-06 12:18:32 +00:00
|
|
|
success.then_some(hi_lo_to_u128(lo, hi))
|
2019-10-13 10:19:14 +00:00
|
|
|
})
|
2015-03-27 00:37:10 +00:00
|
|
|
}
|
2018-09-20 13:47:22 +00:00
|
|
|
|
2021-08-29 09:06:55 +00:00
|
|
|
fn scalar_to_backend(&self, cv: Scalar, layout: abi::Scalar, llty: &'ll Type) -> &'ll Value {
|
2022-03-03 12:02:12 +00:00
|
|
|
let bitsize = if layout.is_bool() { 1 } else { layout.size(self).bits() };
|
2018-09-20 13:47:22 +00:00
|
|
|
match cv {
|
2020-11-01 16:57:03 +00:00
|
|
|
Scalar::Int(int) => {
|
2024-06-08 14:13:45 +00:00
|
|
|
let data = int.to_bits(layout.size(self));
|
2019-05-25 08:59:09 +00:00
|
|
|
let llval = self.const_uint_big(self.type_ix(bitsize), data);
|
2023-01-23 04:03:58 +00:00
|
|
|
if matches!(layout.primitive(), Pointer(_)) {
|
2018-09-20 13:47:22 +00:00
|
|
|
unsafe { llvm::LLVMConstIntToPtr(llval, llty) }
|
|
|
|
} else {
|
2018-11-24 16:45:05 +00:00
|
|
|
self.const_bitcast(llval, llty)
|
2018-09-20 13:47:22 +00:00
|
|
|
}
|
|
|
|
}
|
2021-07-12 18:29:05 +00:00
|
|
|
Scalar::Ptr(ptr, _size) => {
|
2023-11-25 17:41:53 +00:00
|
|
|
let (prov, offset) = ptr.into_parts();
|
|
|
|
let (base_addr, base_addr_space) = match self.tcx.global_alloc(prov.alloc_id()) {
|
2020-05-08 08:58:53 +00:00
|
|
|
GlobalAlloc::Memory(alloc) => {
|
2024-04-14 17:52:56 +00:00
|
|
|
// For ZSTs directly codegen an aligned pointer.
|
|
|
|
// This avoids generating a zero-sized constant value and actually needing a
|
|
|
|
// real address at runtime.
|
|
|
|
if alloc.inner().len() == 0 {
|
|
|
|
assert_eq!(offset.bytes(), 0);
|
|
|
|
let llval = self.const_usize(alloc.inner().align.bytes());
|
|
|
|
return if matches!(layout.primitive(), Pointer(_)) {
|
|
|
|
unsafe { llvm::LLVMConstIntToPtr(llval, llty) }
|
|
|
|
} else {
|
|
|
|
self.const_bitcast(llval, llty)
|
|
|
|
};
|
|
|
|
} else {
|
|
|
|
let init = const_alloc_to_llvm(self, alloc, /*static*/ false);
|
|
|
|
let alloc = alloc.inner();
|
|
|
|
let value = match alloc.mutability {
|
|
|
|
Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None),
|
Cast global variables to default address space
Pointers for variables all need to be in the same address space for
correct compilation. Therefore ensure that even if a global variable is
created in a different address space, it is casted to the default
address space before its value is used.
This is necessary for the amdgpu target and others where the default
address space for global variables is not 0.
For example `core` does not compile in debug mode when not casting the
address space to the default one because it tries to emit the following
(simplified) LLVM IR, containing a type mismatch:
```llvm
@alloc_0 = addrspace(1) constant <{ [6 x i8] }> <{ [6 x i8] c"bit.rs" }>, align 1
@alloc_1 = addrspace(1) constant <{ ptr }> <{ ptr addrspace(1) @alloc_0 }>, align 8
; ^ here a struct containing a `ptr` is needed, but it is created using a `ptr addrspace(1)`
```
For this to compile, we need to insert a constant `addrspacecast` before
we use a global variable:
```llvm
@alloc_0 = addrspace(1) constant <{ [6 x i8] }> <{ [6 x i8] c"bit.rs" }>, align 1
@alloc_1 = addrspace(1) constant <{ ptr }> <{ ptr addrspacecast (ptr addrspace(1) @alloc_0 to ptr) }>, align 8
```
As vtables are global variables as well, they are also created with an
`addrspacecast`. In the SSA backend, after a vtable global is created,
metadata is added to it. To add metadata, we need the non-casted global
variable. Therefore we strip away an addrspacecast if there is one, to
get the underlying global.
2025-01-02 12:10:11 +00:00
|
|
|
_ => self.static_addr_of_impl(init, alloc.align, None),
|
2024-04-14 17:52:56 +00:00
|
|
|
};
|
|
|
|
if !self.sess().fewer_names() && llvm::get_value_name(value).is_empty()
|
|
|
|
{
|
|
|
|
let hash = self.tcx.with_stable_hashing_context(|mut hcx| {
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
alloc.hash_stable(&mut hcx, &mut hasher);
|
|
|
|
hasher.finish::<Hash128>()
|
|
|
|
});
|
|
|
|
llvm::set_value_name(
|
|
|
|
value,
|
|
|
|
format!("alloc_{hash:032x}").as_bytes(),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
(value, AddressSpace::DATA)
|
2018-09-20 13:47:22 +00:00
|
|
|
}
|
|
|
|
}
|
2024-12-05 02:03:12 +00:00
|
|
|
GlobalAlloc::Function { instance, .. } => {
|
|
|
|
(self.get_fn_addr(instance), self.data_layout().instruction_address_space)
|
|
|
|
}
|
2024-09-23 00:00:01 +00:00
|
|
|
GlobalAlloc::VTable(ty, dyn_ty) => {
|
2022-07-20 00:19:15 +00:00
|
|
|
let alloc = self
|
|
|
|
.tcx
|
2025-01-10 04:36:11 +00:00
|
|
|
.global_alloc(self.tcx.vtable_allocation((
|
|
|
|
ty,
|
|
|
|
dyn_ty.principal().map(|principal| {
|
|
|
|
self.tcx.instantiate_bound_regions_with_erased(principal)
|
|
|
|
}),
|
|
|
|
)))
|
2022-07-20 00:19:15 +00:00
|
|
|
.unwrap_memory();
|
2024-04-14 17:52:56 +00:00
|
|
|
let init = const_alloc_to_llvm(self, alloc, /*static*/ false);
|
Cast global variables to default address space
Pointers for variables all need to be in the same address space for
correct compilation. Therefore ensure that even if a global variable is
created in a different address space, it is casted to the default
address space before its value is used.
This is necessary for the amdgpu target and others where the default
address space for global variables is not 0.
For example `core` does not compile in debug mode when not casting the
address space to the default one because it tries to emit the following
(simplified) LLVM IR, containing a type mismatch:
```llvm
@alloc_0 = addrspace(1) constant <{ [6 x i8] }> <{ [6 x i8] c"bit.rs" }>, align 1
@alloc_1 = addrspace(1) constant <{ ptr }> <{ ptr addrspace(1) @alloc_0 }>, align 8
; ^ here a struct containing a `ptr` is needed, but it is created using a `ptr addrspace(1)`
```
For this to compile, we need to insert a constant `addrspacecast` before
we use a global variable:
```llvm
@alloc_0 = addrspace(1) constant <{ [6 x i8] }> <{ [6 x i8] c"bit.rs" }>, align 1
@alloc_1 = addrspace(1) constant <{ ptr }> <{ ptr addrspacecast (ptr addrspace(1) @alloc_0 to ptr) }>, align 8
```
As vtables are global variables as well, they are also created with an
`addrspacecast`. In the SSA backend, after a vtable global is created,
metadata is added to it. To add metadata, we need the non-casted global
variable. Therefore we strip away an addrspacecast if there is one, to
get the underlying global.
2025-01-02 12:10:11 +00:00
|
|
|
let value = self.static_addr_of_impl(init, alloc.inner().align, None);
|
2022-07-20 00:19:15 +00:00
|
|
|
(value, AddressSpace::DATA)
|
|
|
|
}
|
2020-05-08 08:58:53 +00:00
|
|
|
GlobalAlloc::Static(def_id) => {
|
2019-04-21 11:41:51 +00:00
|
|
|
assert!(self.tcx.is_static(def_id));
|
2020-05-02 19:44:25 +00:00
|
|
|
assert!(!self.tcx.is_thread_local_static(def_id));
|
2020-06-11 05:52:09 +00:00
|
|
|
(self.get_static(def_id), AddressSpace::DATA)
|
2018-09-20 13:47:22 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
let llval = unsafe {
|
2023-07-10 07:19:06 +00:00
|
|
|
llvm::LLVMConstInBoundsGEP2(
|
2021-07-31 00:00:00 +00:00
|
|
|
self.type_i8(),
|
2025-01-23 23:37:05 +00:00
|
|
|
// Cast to the required address space if necessary
|
Cast global variables to default address space
Pointers for variables all need to be in the same address space for
correct compilation. Therefore ensure that even if a global variable is
created in a different address space, it is casted to the default
address space before its value is used.
This is necessary for the amdgpu target and others where the default
address space for global variables is not 0.
For example `core` does not compile in debug mode when not casting the
address space to the default one because it tries to emit the following
(simplified) LLVM IR, containing a type mismatch:
```llvm
@alloc_0 = addrspace(1) constant <{ [6 x i8] }> <{ [6 x i8] c"bit.rs" }>, align 1
@alloc_1 = addrspace(1) constant <{ ptr }> <{ ptr addrspace(1) @alloc_0 }>, align 8
; ^ here a struct containing a `ptr` is needed, but it is created using a `ptr addrspace(1)`
```
For this to compile, we need to insert a constant `addrspacecast` before
we use a global variable:
```llvm
@alloc_0 = addrspace(1) constant <{ [6 x i8] }> <{ [6 x i8] c"bit.rs" }>, align 1
@alloc_1 = addrspace(1) constant <{ ptr }> <{ ptr addrspacecast (ptr addrspace(1) @alloc_0 to ptr) }>, align 8
```
As vtables are global variables as well, they are also created with an
`addrspacecast`. In the SSA backend, after a vtable global is created,
metadata is added to it. To add metadata, we need the non-casted global
variable. Therefore we strip away an addrspacecast if there is one, to
get the underlying global.
2025-01-02 12:10:11 +00:00
|
|
|
self.const_pointercast(base_addr, self.type_ptr_ext(base_addr_space)),
|
2021-07-12 16:22:15 +00:00
|
|
|
&self.const_usize(offset.bytes()),
|
2018-09-20 13:47:22 +00:00
|
|
|
1,
|
|
|
|
)
|
|
|
|
};
|
2023-01-23 04:03:58 +00:00
|
|
|
if !matches!(layout.primitive(), Pointer(_)) {
|
2018-09-20 13:47:22 +00:00
|
|
|
unsafe { llvm::LLVMConstPtrToInt(llval, llty) }
|
|
|
|
} else {
|
2018-11-24 16:45:05 +00:00
|
|
|
self.const_bitcast(llval, llty)
|
2018-09-20 13:47:22 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
Introduce `ConstAllocation`.
Currently some `Allocation`s are interned, some are not, and it's very
hard to tell at a use point which is which.
This commit introduces `ConstAllocation` for the known-interned ones,
which makes the division much clearer. `ConstAllocation::inner()` is
used to get the underlying `Allocation`.
In some places it's natural to use an `Allocation`, in some it's natural
to use a `ConstAllocation`, and in some places there's no clear choice.
I've tried to make things look as nice as possible, while generally
favouring `ConstAllocation`, which is the type that embodies more
information. This does require quite a few calls to `inner()`.
The commit also tweaks how `PartialOrd` works for `Interned`. The
previous code was too clever by half, building on `T: Ord` to make the
code shorter. That caused problems with deriving `PartialOrd` and `Ord`
for `ConstAllocation`, so I changed it to build on `T: PartialOrd`,
which is slightly more verbose but much more standard and avoided the
problems.
2022-03-01 20:15:04 +00:00
|
|
|
fn const_data_from_alloc(&self, alloc: ConstAllocation<'tcx>) -> Self::Value {
|
2024-04-14 17:52:56 +00:00
|
|
|
const_alloc_to_llvm(self, alloc, /*static*/ false)
|
2021-06-20 09:43:25 +00:00
|
|
|
}
|
|
|
|
|
2023-05-19 15:48:43 +00:00
|
|
|
fn const_ptr_byte_offset(&self, base_addr: Self::Value, offset: abi::Size) -> Self::Value {
|
|
|
|
unsafe {
|
2023-07-10 07:19:06 +00:00
|
|
|
llvm::LLVMConstInBoundsGEP2(
|
2023-05-19 15:48:43 +00:00
|
|
|
self.type_i8(),
|
2022-12-06 05:07:28 +00:00
|
|
|
base_addr,
|
2023-05-19 15:48:43 +00:00
|
|
|
&self.const_usize(offset.bytes()),
|
|
|
|
1,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
2015-03-27 00:37:10 +00:00
|
|
|
}
|
|
|
|
|
2020-12-23 03:37:23 +00:00
|
|
|
/// Get the [LLVM type][Type] of a [`Value`].
|
2024-07-06 12:26:42 +00:00
|
|
|
pub(crate) fn val_ty(v: &Value) -> &Type {
|
2018-08-30 12:24:41 +00:00
|
|
|
unsafe { llvm::LLVMTypeOf(v) }
|
|
|
|
}
|
|
|
|
|
2024-07-06 12:26:42 +00:00
|
|
|
pub(crate) fn bytes_in_context<'ll>(llcx: &'ll llvm::Context, bytes: &[u8]) -> &'ll Value {
|
2018-08-30 12:24:41 +00:00
|
|
|
unsafe {
|
|
|
|
let ptr = bytes.as_ptr() as *const c_char;
|
2024-03-04 21:17:23 +00:00
|
|
|
llvm::LLVMConstStringInContext2(llcx, ptr, bytes.len(), True)
|
2018-08-30 12:24:41 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-07-06 12:26:42 +00:00
|
|
|
fn struct_in_context<'ll>(
|
2021-12-14 18:49:49 +00:00
|
|
|
llcx: &'ll llvm::Context,
|
|
|
|
elts: &[&'ll Value],
|
|
|
|
packed: bool,
|
|
|
|
) -> &'ll Value {
|
2024-03-04 21:17:23 +00:00
|
|
|
let len = c_uint::try_from(elts.len()).expect("LLVMConstStructInContext elements len overflow");
|
|
|
|
unsafe { llvm::LLVMConstStructInContext(llcx, elts.as_ptr(), len, packed as Bool) }
|
2018-08-30 12:24:41 +00:00
|
|
|
}
|
|
|
|
|
2018-08-28 15:03:46 +00:00
|
|
|
#[inline]
|
|
|
|
fn hi_lo_to_u128(lo: u64, hi: u64) -> u128 {
|
|
|
|
((hi as u128) << 64) | (lo as u128)
|
|
|
|
}
|
2019-08-27 09:51:53 +00:00
|
|
|
|
2020-03-06 23:56:32 +00:00
|
|
|
fn try_as_const_integral(v: &Value) -> Option<&ConstantInt> {
|
2019-10-13 10:19:14 +00:00
|
|
|
unsafe { llvm::LLVMIsAConstantInt(v) }
|
2019-08-27 09:51:53 +00:00
|
|
|
}
|
2022-07-12 20:52:35 +00:00
|
|
|
|
|
|
|
pub(crate) fn get_dllimport<'tcx>(
|
|
|
|
tcx: TyCtxt<'tcx>,
|
|
|
|
id: DefId,
|
|
|
|
name: &str,
|
|
|
|
) -> Option<&'tcx DllImport> {
|
|
|
|
tcx.native_library(id)
|
2023-04-01 21:50:45 +00:00
|
|
|
.and_then(|lib| lib.dll_imports.iter().find(|di| di.name.as_str() == name))
|
2022-07-12 20:52:35 +00:00
|
|
|
}
|
2024-10-28 07:52:39 +00:00
|
|
|
|
|
|
|
/// Extension trait for explicit casts to `*const c_char`.
|
|
|
|
pub(crate) trait AsCCharPtr {
|
|
|
|
/// Equivalent to `self.as_ptr().cast()`, but only casts to `*const c_char`.
|
|
|
|
fn as_c_char_ptr(&self) -> *const c_char;
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AsCCharPtr for str {
|
|
|
|
fn as_c_char_ptr(&self) -> *const c_char {
|
|
|
|
self.as_ptr().cast()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl AsCCharPtr for [u8] {
|
|
|
|
fn as_c_char_ptr(&self) -> *const c_char {
|
|
|
|
self.as_ptr().cast()
|
|
|
|
}
|
|
|
|
}
|