mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 06:44:35 +00:00
Auto merge of #129225 - jieyouxu:rollup-xwtkwgr, r=jieyouxu
Rollup of 5 pull requests Successful merges: - #129164 (Use `ar_archive_writer` for writing COFF import libs on all backends) - #129173 (Fix `is_val_statically_known` for floats) - #129185 (Port `run-make/libtest-json/validate_json.py` to Rust) - #129203 (Use cnum for extern crate data key) - #129221 (Remove JohnTitor from review rotation) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
7521bdaf5b
@ -205,9 +205,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "ar_archive_writer"
|
||||
version = "0.3.3"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f2bcb7cf51decfbbfc7ef476e28b0775b13e5eb1190f8b7df145cd53d4f4374"
|
||||
checksum = "de11a9d32db3327f981143bdf699ade4d637c6887b13b97e6e91a9154666963c"
|
||||
dependencies = [
|
||||
"object 0.36.2",
|
||||
]
|
||||
|
@ -1,13 +1,6 @@
|
||||
use std::borrow::Borrow;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
use ar_archive_writer::{COFFShortExport, MachineTypes};
|
||||
use rustc_codegen_ssa::back::archive::{
|
||||
create_mingw_dll_import_lib, ArArchiveBuilder, ArchiveBuilder, ArchiveBuilderBuilder,
|
||||
DEFAULT_OBJECT_READER,
|
||||
ArArchiveBuilder, ArchiveBuilder, ArchiveBuilderBuilder, DEFAULT_OBJECT_READER,
|
||||
};
|
||||
use rustc_codegen_ssa::common::is_mingw_gnu_toolchain;
|
||||
use rustc_session::Session;
|
||||
|
||||
pub(crate) struct ArArchiveBuilderBuilder;
|
||||
@ -16,78 +9,4 @@ impl ArchiveBuilderBuilder for ArArchiveBuilderBuilder {
|
||||
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder + 'a> {
|
||||
Box::new(ArArchiveBuilder::new(sess, &DEFAULT_OBJECT_READER))
|
||||
}
|
||||
|
||||
fn create_dll_import_lib(
|
||||
&self,
|
||||
sess: &Session,
|
||||
lib_name: &str,
|
||||
import_name_and_ordinal_vector: Vec<(String, Option<u16>)>,
|
||||
output_path: &Path,
|
||||
) {
|
||||
if is_mingw_gnu_toolchain(&sess.target) {
|
||||
// The binutils linker used on -windows-gnu targets cannot read the import
|
||||
// libraries generated by LLVM: in our attempts, the linker produced an .EXE
|
||||
// that loaded but crashed with an AV upon calling one of the imported
|
||||
// functions. Therefore, use binutils to create the import library instead,
|
||||
// by writing a .DEF file to the temp dir and calling binutils's dlltool.
|
||||
create_mingw_dll_import_lib(
|
||||
sess,
|
||||
lib_name,
|
||||
import_name_and_ordinal_vector,
|
||||
output_path,
|
||||
);
|
||||
} else {
|
||||
let mut file =
|
||||
match fs::OpenOptions::new().write(true).create_new(true).open(&output_path) {
|
||||
Ok(file) => file,
|
||||
Err(error) => {
|
||||
sess.dcx().fatal(format!(
|
||||
"failed to create import library file `{path}`: {error}",
|
||||
path = output_path.display(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let machine = match sess.target.arch.borrow() {
|
||||
"x86" => MachineTypes::I386,
|
||||
"x86_64" => MachineTypes::AMD64,
|
||||
"arm" => MachineTypes::ARMNT,
|
||||
"aarch64" => MachineTypes::ARM64,
|
||||
_ => {
|
||||
sess.dcx().fatal(format!(
|
||||
"unsupported target architecture `{arch}`",
|
||||
arch = sess.target.arch,
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let exports = import_name_and_ordinal_vector
|
||||
.iter()
|
||||
.map(|(name, ordinal)| COFFShortExport {
|
||||
name: name.to_string(),
|
||||
ext_name: None,
|
||||
symbol_name: None,
|
||||
alias_target: None,
|
||||
ordinal: ordinal.unwrap_or(0),
|
||||
noname: ordinal.is_some(),
|
||||
data: false,
|
||||
private: false,
|
||||
constant: false,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if let Err(error) = ar_archive_writer::write_import_library(
|
||||
&mut file,
|
||||
lib_name,
|
||||
&exports,
|
||||
machine,
|
||||
!sess.target.is_like_msvc,
|
||||
) {
|
||||
sess.dcx().fatal(format!(
|
||||
"failed to create import library `{path}`: `{error}`",
|
||||
path = output_path.display(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -12,7 +12,6 @@
|
||||
#![warn(unused_lifetimes)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
extern crate ar_archive_writer;
|
||||
extern crate jobserver;
|
||||
#[macro_use]
|
||||
extern crate rustc_middle;
|
||||
|
@ -5,9 +5,6 @@ codegen_llvm_dynamic_linking_with_lto =
|
||||
.note = only 'staticlib', 'bin', and 'cdylib' outputs are supported with LTO
|
||||
|
||||
|
||||
codegen_llvm_error_creating_import_library =
|
||||
Error creating import library for {$lib_name}: {$error}
|
||||
|
||||
codegen_llvm_fixed_x18_invalid_arch = the `-Zfixed-x18` flag is not supported on the `{$arch}` architecture
|
||||
|
||||
codegen_llvm_from_llvm_diag = {$message}
|
||||
|
@ -5,17 +5,13 @@ use std::path::{Path, PathBuf};
|
||||
use std::{io, mem, ptr, str};
|
||||
|
||||
use rustc_codegen_ssa::back::archive::{
|
||||
create_mingw_dll_import_lib, try_extract_macho_fat_archive, ArArchiveBuilder,
|
||||
ArchiveBuildFailure, ArchiveBuilder, ArchiveBuilderBuilder, ObjectReader, UnknownArchiveKind,
|
||||
DEFAULT_OBJECT_READER,
|
||||
try_extract_macho_fat_archive, ArArchiveBuilder, ArchiveBuildFailure, ArchiveBuilder,
|
||||
ArchiveBuilderBuilder, ObjectReader, UnknownArchiveKind, DEFAULT_OBJECT_READER,
|
||||
};
|
||||
use rustc_codegen_ssa::common;
|
||||
use rustc_session::Session;
|
||||
use tracing::trace;
|
||||
|
||||
use crate::errors::ErrorCreatingImportLibrary;
|
||||
use crate::llvm::archive_ro::{ArchiveRO, Child};
|
||||
use crate::llvm::{self, ArchiveKind, LLVMMachineType, LLVMRustCOFFShortExport};
|
||||
use crate::llvm::{self, ArchiveKind};
|
||||
|
||||
/// Helper for adding many files to an archive.
|
||||
#[must_use = "must call build() to finish building the archive"]
|
||||
@ -44,18 +40,6 @@ fn is_relevant_child(c: &Child<'_>) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
/// Map machine type strings to values of LLVM's MachineTypes enum.
|
||||
fn llvm_machine_type(cpu: &str) -> LLVMMachineType {
|
||||
match cpu {
|
||||
"x86_64" => LLVMMachineType::AMD64,
|
||||
"x86" => LLVMMachineType::I386,
|
||||
"aarch64" => LLVMMachineType::ARM64,
|
||||
"arm64ec" => LLVMMachineType::ARM64EC,
|
||||
"arm" => LLVMMachineType::ARM,
|
||||
_ => panic!("unsupported cpu type {cpu}"),
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ArchiveBuilder for LlvmArchiveBuilder<'a> {
|
||||
fn add_archive(
|
||||
&mut self,
|
||||
@ -116,78 +100,6 @@ impl ArchiveBuilderBuilder for LlvmArchiveBuilderBuilder {
|
||||
Box::new(ArArchiveBuilder::new(sess, &LLVM_OBJECT_READER))
|
||||
}
|
||||
}
|
||||
|
||||
fn create_dll_import_lib(
|
||||
&self,
|
||||
sess: &Session,
|
||||
lib_name: &str,
|
||||
import_name_and_ordinal_vector: Vec<(String, Option<u16>)>,
|
||||
output_path: &Path,
|
||||
) {
|
||||
if common::is_mingw_gnu_toolchain(&sess.target) {
|
||||
// The binutils linker used on -windows-gnu targets cannot read the import
|
||||
// libraries generated by LLVM: in our attempts, the linker produced an .EXE
|
||||
// that loaded but crashed with an AV upon calling one of the imported
|
||||
// functions. Therefore, use binutils to create the import library instead,
|
||||
// by writing a .DEF file to the temp dir and calling binutils's dlltool.
|
||||
create_mingw_dll_import_lib(
|
||||
sess,
|
||||
lib_name,
|
||||
import_name_and_ordinal_vector,
|
||||
output_path,
|
||||
);
|
||||
} else {
|
||||
// we've checked for \0 characters in the library name already
|
||||
let dll_name_z = CString::new(lib_name).unwrap();
|
||||
|
||||
let output_path_z = rustc_fs_util::path_to_c_string(&output_path);
|
||||
|
||||
trace!("invoking LLVMRustWriteImportLibrary");
|
||||
trace!(" dll_name {:#?}", dll_name_z);
|
||||
trace!(" output_path {}", output_path.display());
|
||||
trace!(
|
||||
" import names: {}",
|
||||
import_name_and_ordinal_vector
|
||||
.iter()
|
||||
.map(|(name, _ordinal)| name.clone())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", "),
|
||||
);
|
||||
|
||||
// All import names are Rust identifiers and therefore cannot contain \0 characters.
|
||||
// FIXME: when support for #[link_name] is implemented, ensure that the import names
|
||||
// still don't contain any \0 characters. Also need to check that the names don't
|
||||
// contain substrings like " @" or "NONAME" that are keywords or otherwise reserved
|
||||
// in definition files.
|
||||
let cstring_import_name_and_ordinal_vector: Vec<(CString, Option<u16>)> =
|
||||
import_name_and_ordinal_vector
|
||||
.into_iter()
|
||||
.map(|(name, ordinal)| (CString::new(name).unwrap(), ordinal))
|
||||
.collect();
|
||||
|
||||
let ffi_exports: Vec<LLVMRustCOFFShortExport> = cstring_import_name_and_ordinal_vector
|
||||
.iter()
|
||||
.map(|(name_z, ordinal)| LLVMRustCOFFShortExport::new(name_z.as_ptr(), *ordinal))
|
||||
.collect();
|
||||
let result = unsafe {
|
||||
crate::llvm::LLVMRustWriteImportLibrary(
|
||||
dll_name_z.as_ptr(),
|
||||
output_path_z.as_ptr(),
|
||||
ffi_exports.as_ptr(),
|
||||
ffi_exports.len(),
|
||||
llvm_machine_type(&sess.target.arch) as u16,
|
||||
!sess.target.is_like_msvc,
|
||||
)
|
||||
};
|
||||
|
||||
if result == crate::llvm::LLVMRustResult::Failure {
|
||||
sess.dcx().emit_fatal(ErrorCreatingImportLibrary {
|
||||
lib_name,
|
||||
error: llvm::last_error().unwrap_or("unknown LLVM error".to_string()),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The object crate doesn't know how to get symbols for LLVM bitcode and COFF bigobj files.
|
||||
|
@ -1000,8 +1000,10 @@ impl<'ll> CodegenCx<'ll, '_> {
|
||||
ifn!("llvm.is.constant.i64", fn(t_i64) -> i1);
|
||||
ifn!("llvm.is.constant.i128", fn(t_i128) -> i1);
|
||||
ifn!("llvm.is.constant.isize", fn(t_isize) -> i1);
|
||||
ifn!("llvm.is.constant.f16", fn(t_f16) -> i1);
|
||||
ifn!("llvm.is.constant.f32", fn(t_f32) -> i1);
|
||||
ifn!("llvm.is.constant.f64", fn(t_f64) -> i1);
|
||||
ifn!("llvm.is.constant.f128", fn(t_f128) -> i1);
|
||||
ifn!("llvm.is.constant.ptr", fn(ptr) -> i1);
|
||||
|
||||
ifn!("llvm.expect.i1", fn(i1, i1) -> i1);
|
||||
|
@ -39,13 +39,6 @@ pub(crate) enum PossibleFeature<'a> {
|
||||
None,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_llvm_error_creating_import_library)]
|
||||
pub(crate) struct ErrorCreatingImportLibrary<'a> {
|
||||
pub lib_name: &'a str,
|
||||
pub error: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_llvm_symbol_already_defined)]
|
||||
pub(crate) struct SymbolAlreadyDefined<'a> {
|
||||
|
@ -192,14 +192,22 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
|
||||
}
|
||||
sym::is_val_statically_known => {
|
||||
let intrinsic_type = args[0].layout.immediate_llvm_type(self.cx);
|
||||
match self.type_kind(intrinsic_type) {
|
||||
TypeKind::Pointer | TypeKind::Integer | TypeKind::Float | TypeKind::Double => {
|
||||
self.call_intrinsic(
|
||||
&format!("llvm.is.constant.{:?}", intrinsic_type),
|
||||
&[args[0].immediate()],
|
||||
)
|
||||
let kind = self.type_kind(intrinsic_type);
|
||||
let intrinsic_name = match kind {
|
||||
TypeKind::Pointer | TypeKind::Integer => {
|
||||
Some(format!("llvm.is.constant.{intrinsic_type:?}"))
|
||||
}
|
||||
_ => self.const_bool(false),
|
||||
// LLVM float types' intrinsic names differ from their type names.
|
||||
TypeKind::Half => Some(format!("llvm.is.constant.f16")),
|
||||
TypeKind::Float => Some(format!("llvm.is.constant.f32")),
|
||||
TypeKind::Double => Some(format!("llvm.is.constant.f64")),
|
||||
TypeKind::FP128 => Some(format!("llvm.is.constant.f128")),
|
||||
_ => None,
|
||||
};
|
||||
if let Some(intrinsic_name) = intrinsic_name {
|
||||
self.call_intrinsic(&intrinsic_name, &[args[0].immediate()])
|
||||
} else {
|
||||
self.const_bool(false)
|
||||
}
|
||||
}
|
||||
sym::unlikely => self
|
||||
|
@ -5,7 +5,7 @@ edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
ar_archive_writer = "0.3.3"
|
||||
ar_archive_writer = "0.4.0"
|
||||
arrayvec = { version = "0.7", default-features = false }
|
||||
bitflags = "2.4.1"
|
||||
cc = "1.0.90"
|
||||
|
@ -32,6 +32,9 @@ codegen_ssa_dlltool_fail_import_library =
|
||||
codegen_ssa_error_calling_dlltool =
|
||||
Error calling dlltool '{$dlltool_path}': {$error}
|
||||
|
||||
codegen_ssa_error_creating_import_library =
|
||||
Error creating import library for {$lib_name}: {$error}
|
||||
|
||||
codegen_ssa_error_creating_remark_dir = failed to create remark directory: {$error}
|
||||
|
||||
codegen_ssa_error_writing_def_file =
|
||||
|
@ -5,7 +5,9 @@ use std::fs::{self, File};
|
||||
use std::io::{self, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use ar_archive_writer::{write_archive_to_stream, ArchiveKind, NewArchiveMember};
|
||||
use ar_archive_writer::{
|
||||
write_archive_to_stream, ArchiveKind, COFFShortExport, MachineTypes, NewArchiveMember,
|
||||
};
|
||||
pub use ar_archive_writer::{ObjectReader, DEFAULT_OBJECT_READER};
|
||||
use object::read::archive::ArchiveFile;
|
||||
use object::read::macho::FatArch;
|
||||
@ -14,11 +16,15 @@ use rustc_data_structures::memmap::Mmap;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::symbol::Symbol;
|
||||
use tempfile::Builder as TempFileBuilder;
|
||||
use tracing::trace;
|
||||
|
||||
use super::metadata::search_for_section;
|
||||
use crate::common;
|
||||
// Re-exporting for rustc_codegen_llvm::back::archive
|
||||
pub use crate::errors::{ArchiveBuildFailure, ExtractBundledLibsError, UnknownArchiveKind};
|
||||
use crate::errors::{DlltoolFailImportLibrary, ErrorCallingDllTool, ErrorWritingDEFFile};
|
||||
use crate::errors::{
|
||||
DlltoolFailImportLibrary, ErrorCallingDllTool, ErrorCreatingImportLibrary, ErrorWritingDEFFile,
|
||||
};
|
||||
|
||||
pub trait ArchiveBuilderBuilder {
|
||||
fn new_archive_builder<'a>(&self, sess: &'a Session) -> Box<dyn ArchiveBuilder + 'a>;
|
||||
@ -34,7 +40,81 @@ pub trait ArchiveBuilderBuilder {
|
||||
lib_name: &str,
|
||||
import_name_and_ordinal_vector: Vec<(String, Option<u16>)>,
|
||||
output_path: &Path,
|
||||
);
|
||||
) {
|
||||
if common::is_mingw_gnu_toolchain(&sess.target) {
|
||||
// The binutils linker used on -windows-gnu targets cannot read the import
|
||||
// libraries generated by LLVM: in our attempts, the linker produced an .EXE
|
||||
// that loaded but crashed with an AV upon calling one of the imported
|
||||
// functions. Therefore, use binutils to create the import library instead,
|
||||
// by writing a .DEF file to the temp dir and calling binutils's dlltool.
|
||||
create_mingw_dll_import_lib(
|
||||
sess,
|
||||
lib_name,
|
||||
import_name_and_ordinal_vector,
|
||||
output_path,
|
||||
);
|
||||
} else {
|
||||
trace!("creating import library");
|
||||
trace!(" dll_name {:#?}", lib_name);
|
||||
trace!(" output_path {}", output_path.display());
|
||||
trace!(
|
||||
" import names: {}",
|
||||
import_name_and_ordinal_vector
|
||||
.iter()
|
||||
.map(|(name, _ordinal)| name.clone())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", "),
|
||||
);
|
||||
|
||||
// All import names are Rust identifiers and therefore cannot contain \0 characters.
|
||||
// FIXME: when support for #[link_name] is implemented, ensure that the import names
|
||||
// still don't contain any \0 characters. Also need to check that the names don't
|
||||
// contain substrings like " @" or "NONAME" that are keywords or otherwise reserved
|
||||
// in definition files.
|
||||
|
||||
let mut file = match fs::File::create_new(&output_path) {
|
||||
Ok(file) => file,
|
||||
Err(error) => sess
|
||||
.dcx()
|
||||
.emit_fatal(ErrorCreatingImportLibrary { lib_name, error: error.to_string() }),
|
||||
};
|
||||
|
||||
let exports = import_name_and_ordinal_vector
|
||||
.iter()
|
||||
.map(|(name, ordinal)| COFFShortExport {
|
||||
name: name.to_string(),
|
||||
ext_name: None,
|
||||
symbol_name: None,
|
||||
alias_target: None,
|
||||
ordinal: ordinal.unwrap_or(0),
|
||||
noname: ordinal.is_some(),
|
||||
data: false,
|
||||
private: false,
|
||||
constant: false,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let machine = match &*sess.target.arch {
|
||||
"x86_64" => MachineTypes::AMD64,
|
||||
"x86" => MachineTypes::I386,
|
||||
"aarch64" => MachineTypes::ARM64,
|
||||
"arm64ec" => MachineTypes::ARM64EC,
|
||||
"arm" => MachineTypes::ARMNT,
|
||||
cpu => panic!("unsupported cpu type {cpu}"),
|
||||
};
|
||||
|
||||
if let Err(error) = ar_archive_writer::write_import_library(
|
||||
&mut file,
|
||||
lib_name,
|
||||
&exports,
|
||||
machine,
|
||||
!sess.target.is_like_msvc,
|
||||
/*comdat=*/ false,
|
||||
) {
|
||||
sess.dcx()
|
||||
.emit_fatal(ErrorCreatingImportLibrary { lib_name, error: error.to_string() });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_bundled_libs<'a>(
|
||||
&'a self,
|
||||
|
@ -1060,3 +1060,10 @@ pub struct CompilerBuiltinsCannotCall {
|
||||
pub caller: String,
|
||||
pub callee: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(codegen_ssa_error_creating_import_library)]
|
||||
pub(crate) struct ErrorCreatingImportLibrary<'a> {
|
||||
pub lib_name: &'a str,
|
||||
pub error: String,
|
||||
}
|
||||
|
@ -1520,7 +1520,7 @@ rustc_queries! {
|
||||
separate_provide_extern
|
||||
}
|
||||
|
||||
query extern_crate(def_id: DefId) -> Option<&'tcx ExternCrate> {
|
||||
query extern_crate(def_id: CrateNum) -> Option<&'tcx ExternCrate> {
|
||||
eval_always
|
||||
desc { "getting crate's ExternCrateData" }
|
||||
separate_provide_extern
|
||||
|
@ -451,7 +451,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
// 2. For an extern inferred from a path or an indirect crate,
|
||||
// where there is no explicit `extern crate`, we just prepend
|
||||
// the crate name.
|
||||
match self.tcx().extern_crate(def_id) {
|
||||
match self.tcx().extern_crate(cnum) {
|
||||
Some(&ExternCrate { src, dependency_of, span, .. }) => match (src, dependency_of) {
|
||||
(ExternCrateSource::Extern(def_id), LOCAL_CRATE) => {
|
||||
// NOTE(eddyb) the only reason `span` might be dummy,
|
||||
@ -3247,10 +3247,8 @@ fn for_each_def(tcx: TyCtxt<'_>, mut collect_fn: impl for<'b> FnMut(&'b Ident, N
|
||||
let mut seen_defs: DefIdSet = Default::default();
|
||||
|
||||
for &cnum in tcx.crates(()).iter() {
|
||||
let def_id = cnum.as_def_id();
|
||||
|
||||
// Ignore crates that are not direct dependencies.
|
||||
match tcx.extern_crate(def_id) {
|
||||
match tcx.extern_crate(cnum) {
|
||||
None => continue,
|
||||
Some(extern_crate) => {
|
||||
if !extern_crate.is_direct() {
|
||||
@ -3259,7 +3257,7 @@ fn for_each_def(tcx: TyCtxt<'_>, mut collect_fn: impl for<'b> FnMut(&'b Ident, N
|
||||
}
|
||||
}
|
||||
|
||||
queue.push(def_id);
|
||||
queue.push(cnum.as_def_id());
|
||||
}
|
||||
|
||||
// Iterate external crate defs but be mindful about visibility
|
||||
|
@ -859,7 +859,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
// If `extern_crate` is `None`, then the crate was injected (e.g., by the allocator).
|
||||
// Treat that kind of crate as "indirect", since it's an implementation detail of
|
||||
// the language.
|
||||
|| self.extern_crate(key.as_def_id()).is_some_and(|e| e.is_direct())
|
||||
|| self.extern_crate(key).is_some_and(|e| e.is_direct())
|
||||
}
|
||||
|
||||
/// Whether the item has a host effect param. This is different from `TyCtxt::is_const`,
|
||||
|
@ -130,7 +130,7 @@ impl<'ast, 'tcx> LanguageItemCollector<'ast, 'tcx> {
|
||||
if first_defined_span.is_none() {
|
||||
orig_crate_name = self.tcx.crate_name(original_def_id.krate);
|
||||
if let Some(ExternCrate { dependency_of: inner_dependency_of, .. }) =
|
||||
self.tcx.extern_crate(original_def_id)
|
||||
self.tcx.extern_crate(original_def_id.krate)
|
||||
{
|
||||
orig_dependency_of = self.tcx.crate_name(*inner_dependency_of);
|
||||
}
|
||||
@ -139,7 +139,7 @@ impl<'ast, 'tcx> LanguageItemCollector<'ast, 'tcx> {
|
||||
let duplicate = if item_span.is_some() {
|
||||
Duplicate::Plain
|
||||
} else {
|
||||
match self.tcx.extern_crate(item_def_id) {
|
||||
match self.tcx.extern_crate(item_def_id.krate) {
|
||||
Some(ExternCrate { dependency_of: inner_dependency_of, .. }) => {
|
||||
dependency_of = self.tcx.crate_name(*inner_dependency_of);
|
||||
Duplicate::CrateDepends
|
||||
|
@ -1668,7 +1668,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
||||
let name = self.tcx.crate_name(trait_def_id.krate);
|
||||
let spans: Vec<_> = [trait_def_id, found_type]
|
||||
.into_iter()
|
||||
.filter_map(|def_id| self.tcx.extern_crate(def_id))
|
||||
.filter_map(|def_id| self.tcx.extern_crate(def_id.krate))
|
||||
.map(|data| {
|
||||
let dependency = if data.dependency_of == LOCAL_CRATE {
|
||||
"direct dependency of the current crate".to_string()
|
||||
|
@ -1,6 +1,7 @@
|
||||
//@ compile-flags: --crate-type=lib -Zmerge-functions=disabled -O
|
||||
|
||||
#![feature(core_intrinsics)]
|
||||
#![feature(f16, f128)]
|
||||
|
||||
use std::intrinsics::is_val_statically_known;
|
||||
|
||||
@ -49,7 +50,7 @@ pub fn _bool_false(b: bool) -> i32 {
|
||||
|
||||
#[inline]
|
||||
pub fn _iref(a: &u8) -> i32 {
|
||||
if unsafe { is_val_statically_known(a) } { 5 } else { 4 }
|
||||
if is_val_statically_known(a) { 5 } else { 4 }
|
||||
}
|
||||
|
||||
// CHECK-LABEL: @_iref_borrow(
|
||||
@ -68,7 +69,7 @@ pub fn _iref_arg(a: &u8) -> i32 {
|
||||
|
||||
#[inline]
|
||||
pub fn _slice_ref(a: &[u8]) -> i32 {
|
||||
if unsafe { is_val_statically_known(a) } { 7 } else { 6 }
|
||||
if is_val_statically_known(a) { 7 } else { 6 }
|
||||
}
|
||||
|
||||
// CHECK-LABEL: @_slice_ref_borrow(
|
||||
@ -84,3 +85,79 @@ pub fn _slice_ref_arg(a: &[u8]) -> i32 {
|
||||
// CHECK: ret i32 6
|
||||
_slice_ref(a)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn _f16(a: f16) -> i32 {
|
||||
if is_val_statically_known(a) { 1 } else { 0 }
|
||||
}
|
||||
|
||||
// CHECK-LABEL: @_f16_true(
|
||||
#[no_mangle]
|
||||
pub fn _f16_true() -> i32 {
|
||||
// CHECK: ret i32 1
|
||||
_f16(1.0)
|
||||
}
|
||||
|
||||
// CHECK-LABEL: @_f16_false(
|
||||
#[no_mangle]
|
||||
pub fn _f16_false(a: f16) -> i32 {
|
||||
// CHECK: ret i32 0
|
||||
_f16(a)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn _f32(a: f32) -> i32 {
|
||||
if is_val_statically_known(a) { 1 } else { 0 }
|
||||
}
|
||||
|
||||
// CHECK-LABEL: @_f32_true(
|
||||
#[no_mangle]
|
||||
pub fn _f32_true() -> i32 {
|
||||
// CHECK: ret i32 1
|
||||
_f32(1.0)
|
||||
}
|
||||
|
||||
// CHECK-LABEL: @_f32_false(
|
||||
#[no_mangle]
|
||||
pub fn _f32_false(a: f32) -> i32 {
|
||||
// CHECK: ret i32 0
|
||||
_f32(a)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn _f64(a: f64) -> i32 {
|
||||
if is_val_statically_known(a) { 1 } else { 0 }
|
||||
}
|
||||
|
||||
// CHECK-LABEL: @_f64_true(
|
||||
#[no_mangle]
|
||||
pub fn _f64_true() -> i32 {
|
||||
// CHECK: ret i32 1
|
||||
_f64(1.0)
|
||||
}
|
||||
|
||||
// CHECK-LABEL: @_f64_false(
|
||||
#[no_mangle]
|
||||
pub fn _f64_false(a: f64) -> i32 {
|
||||
// CHECK: ret i32 0
|
||||
_f64(a)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn _f128(a: f128) -> i32 {
|
||||
if is_val_statically_known(a) { 1 } else { 0 }
|
||||
}
|
||||
|
||||
// CHECK-LABEL: @_f128_true(
|
||||
#[no_mangle]
|
||||
pub fn _f128_true() -> i32 {
|
||||
// CHECK: ret i32 1
|
||||
_f128(1.0)
|
||||
}
|
||||
|
||||
// CHECK-LABEL: @_f128_false(
|
||||
#[no_mangle]
|
||||
pub fn _f128_false(a: f128) -> i32 {
|
||||
// CHECK: ret i32 0
|
||||
_f128(a)
|
||||
}
|
||||
|
@ -3,7 +3,7 @@
|
||||
//@ ignore-cross-compile
|
||||
//@ needs-unwind (test file contains #[should_panic] test)
|
||||
|
||||
use run_make_support::{cmd, diff, python_command, rustc};
|
||||
use run_make_support::{cmd, diff, rustc, serde_json};
|
||||
|
||||
fn main() {
|
||||
rustc().arg("--test").input("f.rs").run();
|
||||
@ -21,7 +21,18 @@ fn run_tests(extra_args: &[&str], expected_file: &str) {
|
||||
.run_fail();
|
||||
let test_stdout = &cmd_out.stdout_utf8();
|
||||
|
||||
python_command().arg("validate_json.py").stdin(test_stdout).run();
|
||||
// Verify that the test process output is JSON Lines, i.e. each line is valid JSON.
|
||||
for (line, n) in test_stdout.lines().zip(1..) {
|
||||
if let Err(e) = serde_json::from_str::<serde_json::Value>(line) {
|
||||
panic!(
|
||||
"could not parse JSON on line {n}: {e}\n\
|
||||
\n\
|
||||
=== STDOUT ===\n\
|
||||
{test_stdout}\
|
||||
=============="
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
diff()
|
||||
.expected_file(expected_file)
|
||||
|
@ -1,8 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
import json
|
||||
|
||||
# Try to decode line in order to ensure it is a valid JSON document
|
||||
for line in sys.stdin:
|
||||
json.loads(line)
|
@ -970,7 +970,6 @@ rustdoc = [
|
||||
docs = [
|
||||
"@ehuss",
|
||||
"@GuillaumeGomez",
|
||||
"@JohnTitor",
|
||||
]
|
||||
query-system = [
|
||||
"@cjgillot",
|
||||
|
Loading…
Reference in New Issue
Block a user