Auto merge of #74235 - Manishearth:rollup-bgs3q14, r=Manishearth

Rollup of 19 pull requests

Successful merges:

 - #71322 (Accept tuple.0.0 as tuple indexing (take 2))
 - #72303 (Add core::future::{poll_fn, PollFn})
 - #73862 (Stabilize casts and coercions to `&[T]` in const fn)
 - #73887 (stabilize const mem::forget)
 - #73989 (adjust ub-enum test to be endianess-independent)
 - #74045 (Explain effects of debugging options from config.toml)
 - #74076 (Add `read_exact_at` and `write_all_at` to WASI's `FileExt`)
 - #74099 (Add VecDeque::range* methods)
 - #74100 (Use str::strip* in bootstrap)
 - #74103 (Only add CFGuard on `windows-msvc` targets)
 - #74109 (Only allow `repr(i128/u128)` on enum)
 - #74122 (Start-up clean-up)
 - #74125 (Correctly mark the ending span of a match arm)
 - #74127 (Avoid "whitelist")
 - #74129 (⬆️ rust-analyzer)
 - #74135 (Update books)
 - #74145 (Update rust-installer to latest version)
 - #74161 (Fix  disabled dockerfiles)
 - #74162 (take self by value in ToPredicate)

Failed merges:

r? @ghost
This commit is contained in:
bors 2020-07-11 06:28:04 +00:00
commit 346aec9b02
130 changed files with 1600 additions and 597 deletions

View File

@ -1366,8 +1366,8 @@ checksum = "7e81a7c05f79578dbc15793d8b619db9ba32b4577003ef3af1a91c416798c58d"
name = "installer" name = "installer"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"anyhow",
"clap", "clap",
"failure",
"flate2", "flate2",
"lazy_static", "lazy_static",
"num_cpus", "num_cpus",

View File

@ -318,7 +318,9 @@
#codegen-units-std = 1 #codegen-units-std = 1
# Whether or not debug assertions are enabled for the compiler and standard # Whether or not debug assertions are enabled for the compiler and standard
# library. # library. Debug assertions control the maximum log level used by rustc. When
# enabled calls to `trace!` and `debug!` macros are preserved in the compiled
# binary, otherwise they are omitted.
# #
# Defaults to rust.debug value # Defaults to rust.debug value
#debug-assertions = false #debug-assertions = false
@ -331,7 +333,9 @@
# Debuginfo level for most of Rust code, corresponds to the `-C debuginfo=N` option of `rustc`. # Debuginfo level for most of Rust code, corresponds to the `-C debuginfo=N` option of `rustc`.
# `0` - no debug info # `0` - no debug info
# `1` - line tables only # `1` - line tables only - sufficient to generate backtraces that include line
# information and inlined functions, set breakpoints at source code
# locations, and step through execution in a debugger.
# `2` - full debug info with variable and type information # `2` - full debug info with variable and type information
# Can be overridden for specific subsets of Rust code (rustc, std or tools). # Can be overridden for specific subsets of Rust code (rustc, std or tools).
# Debuginfo for tests run with compiletest is not controlled by this option # Debuginfo for tests run with compiletest is not controlled by this option

View File

@ -963,10 +963,11 @@ pub fn run_cargo(
.collect::<Vec<_>>(); .collect::<Vec<_>>();
for (prefix, extension, expected_len) in toplevel { for (prefix, extension, expected_len) in toplevel {
let candidates = contents.iter().filter(|&&(_, ref filename, ref meta)| { let candidates = contents.iter().filter(|&&(_, ref filename, ref meta)| {
filename.starts_with(&prefix[..]) meta.len() == expected_len
&& filename[prefix.len()..].starts_with('-') && filename
&& filename.ends_with(&extension[..]) .strip_prefix(&prefix[..])
&& meta.len() == expected_len .map(|s| s.starts_with('-') && s.ends_with(&extension[..]))
.unwrap_or(false)
}); });
let max = candidates let max = candidates
.max_by_key(|&&(_, _, ref metadata)| FileTime::from_last_modification_time(metadata)); .max_by_key(|&&(_, _, ref metadata)| FileTime::from_last_modification_time(metadata));

View File

@ -439,8 +439,6 @@ impl Step for Std {
builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "rustdoc"); builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "rustdoc");
compile::std_cargo(builder, target, compiler.stage, &mut cargo); compile::std_cargo(builder, target, compiler.stage, &mut cargo);
// Keep a whitelist so we do not build internal stdlib crates, these will be
// build by the rustc step later if enabled.
cargo.arg("-p").arg(package); cargo.arg("-p").arg(package);
// Create all crate output directories first to make sure rustdoc uses // Create all crate output directories first to make sure rustdoc uses
// relative links. // relative links.
@ -460,6 +458,10 @@ impl Step for Std {
builder.run(&mut cargo.into()); builder.run(&mut cargo.into());
}; };
// Only build the following crates. While we could just iterate over the
// folder structure, that would also build internal crates that we do
// not want to show in documentation. These crates will later be visited
// by the rustc step, so internal documentation will show them.
let krates = ["alloc", "core", "std", "proc_macro", "test"]; let krates = ["alloc", "core", "std", "proc_macro", "test"];
for krate in &krates { for krate in &krates {
run_cargo_rustdoc_for(krate); run_cargo_rustdoc_for(krate);

View File

@ -436,10 +436,9 @@ impl Build {
output(Command::new(&build.initial_rustc).arg("--version").arg("--verbose")); output(Command::new(&build.initial_rustc).arg("--version").arg("--verbose"));
let local_release = local_version_verbose let local_release = local_version_verbose
.lines() .lines()
.filter(|x| x.starts_with("release:")) .filter_map(|x| x.strip_prefix("release:"))
.next() .next()
.unwrap() .unwrap()
.trim_start_matches("release:")
.trim(); .trim();
let my_version = channel::CFG_RELEASE_NUM; let my_version = channel::CFG_RELEASE_NUM;
if local_release.split('.').take(2).eq(my_version.split('.').take(2)) { if local_release.split('.').take(2).eq(my_version.split('.').take(2)) {
@ -1089,10 +1088,10 @@ impl Build {
let toml_file_name = self.src.join(&format!("src/tools/{}/Cargo.toml", package)); let toml_file_name = self.src.join(&format!("src/tools/{}/Cargo.toml", package));
let toml = t!(fs::read_to_string(&toml_file_name)); let toml = t!(fs::read_to_string(&toml_file_name));
for line in toml.lines() { for line in toml.lines() {
let prefix = "version = \""; if let Some(stripped) =
let suffix = "\""; line.strip_prefix("version = \"").and_then(|s| s.strip_suffix("\""))
if line.starts_with(prefix) && line.ends_with(suffix) { {
return line[prefix.len()..line.len() - suffix.len()].to_string(); return stripped.to_owned();
} }
} }

View File

@ -40,9 +40,9 @@ RUN curl https://cdn.kernel.org/pub/linux/kernel/v5.x/linux-5.6.16.tar.xz | tar
cp linux.config linux-5.6.16/.config && \ cp linux.config linux-5.6.16/.config && \
cd /build/linux-5.6.16 && \ cd /build/linux-5.6.16 && \
make olddefconfig && \ make olddefconfig && \
make -j$(nproc) vmlinux make -j$(nproc) vmlinux && \
RUN cp linux-5.6.16/vmlinux /tmp cp vmlinux /tmp && \
RUN rm -rf linux-5.6.16 rm -rf linux-5.6.16
# Compile an instance of busybox as this provides a lightweight system and init # Compile an instance of busybox as this provides a lightweight system and init
# binary which we will boot into. Only trick here is configuring busybox to # binary which we will boot into. Only trick here is configuring busybox to

View File

@ -119,11 +119,11 @@ elif [ -f "$docker_dir/disabled/$image/Dockerfile" ]; then
exit 1 exit 1
fi fi
# Transform changes the context of disabled Dockerfiles to match the enabled ones # Transform changes the context of disabled Dockerfiles to match the enabled ones
tar --transform 's#^./disabled/#./#' -C $docker_dir -c . | docker \ tar --transform 's#disabled/#./#' -C $script_dir -c . | docker \
build \ build \
--rm \ --rm \
-t rust-ci \ -t rust-ci \
-f "$image/Dockerfile" \ -f "host-$(uname -m)/$image/Dockerfile" \
- -
else else
echo Invalid image: $image echo Invalid image: $image

@ -1 +1 @@
Subproject commit 4e7c00bece1544d409312ec93467beb62b5bd0cb Subproject commit 84a31397b34f9d405df44f2899ff17a4828dba18

@ -1 +1 @@
Subproject commit 616962ad0dd80f34d8b802da038d0aed9dd691bb Subproject commit 94d9ea8460bcbbbfef1877b47cb930260b5849a7

@ -1 +1 @@
Subproject commit 04d5d5d7ba624b6f5016298451f3a63d557f3260 Subproject commit 0ea7bc494f1289234d8800bb9185021e0ad946f0

@ -1 +1 @@
Subproject commit 6f94ccb48da6fa4ed0031290f21411cf789f7d5e Subproject commit 229c6945a26a53a751ffa4f9cb418388c00029d3

View File

@ -195,9 +195,9 @@ def main():
global MAILBOX global MAILBOX
tests = [os.path.splitext(f)[0] for f in glob('*.rs') tests = [os.path.splitext(f)[0] for f in glob('*.rs')
if not f.startswith('_')] if not f.startswith('_')]
whitelist = sys.argv[1:] listed = sys.argv[1:]
if whitelist: if listed:
tests = [test for test in tests if test in whitelist] tests = [test for test in tests if test in listed]
if not tests: if not tests:
print("Error: No tests to run") print("Error: No tests to run")
sys.exit(1) sys.exit(1)
@ -210,8 +210,6 @@ def main():
mailman.daemon = True mailman.daemon = True
mailman.start() mailman.start()
for test in tests: for test in tests:
if whitelist and test not in whitelist:
continue
run(test) run(test)
MAILBOX.put(None) MAILBOX.put(None)
mailman.join() mailman.join()

View File

@ -1084,6 +1084,108 @@ impl<T> VecDeque<T> {
self.tail == self.head self.tail == self.head
} }
fn range_start_end<R>(&self, range: R) -> (usize, usize)
where
R: RangeBounds<usize>,
{
let len = self.len();
let start = match range.start_bound() {
Included(&n) => n,
Excluded(&n) => n + 1,
Unbounded => 0,
};
let end = match range.end_bound() {
Included(&n) => n + 1,
Excluded(&n) => n,
Unbounded => len,
};
assert!(start <= end, "lower bound was too large");
assert!(end <= len, "upper bound was too large");
(start, end)
}
/// Creates an iterator that covers the specified range in the `VecDeque`.
///
/// # Panics
///
/// Panics if the starting point is greater than the end point or if
/// the end point is greater than the length of the vector.
///
/// # Examples
///
/// ```
/// #![feature(deque_range)]
///
/// use std::collections::VecDeque;
///
/// let v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
/// let range = v.range(2..).copied().collect::<VecDeque<_>>();
/// assert_eq!(range, [3]);
///
/// // A full range covers all contents
/// let all = v.range(..);
/// assert_eq!(all.len(), 3);
/// ```
#[inline]
#[unstable(feature = "deque_range", issue = "74217")]
pub fn range<R>(&self, range: R) -> Iter<'_, T>
where
R: RangeBounds<usize>,
{
let (start, end) = self.range_start_end(range);
let tail = self.wrap_add(self.tail, start);
let head = self.wrap_add(self.tail, end);
Iter {
tail,
head,
// The shared reference we have in &self is maintained in the '_ of Iter.
ring: unsafe { self.buffer_as_slice() },
}
}
/// Creates an iterator that covers the specified mutable range in the `VecDeque`.
///
/// # Panics
///
/// Panics if the starting point is greater than the end point or if
/// the end point is greater than the length of the vector.
///
/// # Examples
///
/// ```
/// #![feature(deque_range)]
///
/// use std::collections::VecDeque;
///
/// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
/// for v in v.range_mut(2..) {
/// *v *= 2;
/// }
/// assert_eq!(v, vec![1, 2, 6]);
///
/// // A full range covers all contents
/// for v in v.range_mut(..) {
/// *v *= 2;
/// }
/// assert_eq!(v, vec![2, 4, 12]);
/// ```
#[inline]
#[unstable(feature = "deque_range", issue = "74217")]
pub fn range_mut<R>(&mut self, range: R) -> IterMut<'_, T>
where
R: RangeBounds<usize>,
{
let (start, end) = self.range_start_end(range);
let tail = self.wrap_add(self.tail, start);
let head = self.wrap_add(self.tail, end);
IterMut {
tail,
head,
// The shared reference we have in &mut self is maintained in the '_ of IterMut.
ring: unsafe { self.buffer_as_mut_slice() },
}
}
/// Creates a draining iterator that removes the specified range in the /// Creates a draining iterator that removes the specified range in the
/// `VecDeque` and yields the removed items. /// `VecDeque` and yields the removed items.
/// ///
@ -1129,19 +1231,7 @@ impl<T> VecDeque<T> {
// When finished, the remaining data will be copied back to cover the hole, // When finished, the remaining data will be copied back to cover the hole,
// and the head/tail values will be restored correctly. // and the head/tail values will be restored correctly.
// //
let len = self.len(); let (start, end) = self.range_start_end(range);
let start = match range.start_bound() {
Included(&n) => n,
Excluded(&n) => n + 1,
Unbounded => 0,
};
let end = match range.end_bound() {
Included(&n) => n + 1,
Excluded(&n) => n,
Unbounded => len,
};
assert!(start <= end, "drain lower bound was too large");
assert!(end <= len, "drain upper bound was too large");
// The deque's elements are parted into three segments: // The deque's elements are parted into three segments:
// * self.tail -> drain_tail // * self.tail -> drain_tail

View File

@ -246,6 +246,65 @@ fn test_remove() {
} }
} }
#[test]
fn test_range() {
let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
let cap = tester.capacity();
for len in 0..=cap {
for tail in 0..=cap {
for start in 0..=len {
for end in start..=len {
tester.tail = tail;
tester.head = tail;
for i in 0..len {
tester.push_back(i);
}
// Check that we iterate over the correct values
let range: VecDeque<_> = tester.range(start..end).copied().collect();
let expected: VecDeque<_> = (start..end).collect();
assert_eq!(range, expected);
}
}
}
}
}
#[test]
fn test_range_mut() {
let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
let cap = tester.capacity();
for len in 0..=cap {
for tail in 0..=cap {
for start in 0..=len {
for end in start..=len {
tester.tail = tail;
tester.head = tail;
for i in 0..len {
tester.push_back(i);
}
let head_was = tester.head;
let tail_was = tester.tail;
// Check that we iterate over the correct values
let range: VecDeque<_> = tester.range_mut(start..end).map(|v| *v).collect();
let expected: VecDeque<_> = (start..end).collect();
assert_eq!(range, expected);
// We shouldn't have changed the capacity or made the
// head or tail out of bounds
assert_eq!(tester.capacity(), cap);
assert_eq!(tester.tail, tail_was);
assert_eq!(tester.head, head_was);
}
}
}
}
}
#[test] #[test]
fn test_drain() { fn test_drain() {
let mut tester: VecDeque<usize> = VecDeque::with_capacity(7); let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);

View File

@ -89,6 +89,7 @@
#![feature(const_in_array_repeat_expressions)] #![feature(const_in_array_repeat_expressions)]
#![cfg_attr(bootstrap, feature(const_if_match))] #![cfg_attr(bootstrap, feature(const_if_match))]
#![feature(cow_is_borrowed)] #![feature(cow_is_borrowed)]
#![feature(deque_range)]
#![feature(dispatch_from_dyn)] #![feature(dispatch_from_dyn)]
#![feature(core_intrinsics)] #![feature(core_intrinsics)]
#![feature(container_error_extra)] #![feature(container_error_extra)]

View File

@ -280,7 +280,7 @@ impl<'a, 'f: 'a> DerefMut for VaList<'a, 'f> {
// within a private module. Once RFC 2145 has been implemented look into // within a private module. Once RFC 2145 has been implemented look into
// improving this. // improving this.
mod sealed_trait { mod sealed_trait {
/// Trait which whitelists the allowed types to be used with [VaList::arg] /// Trait which permits the allowed types to be used with [VaList::arg].
/// ///
/// [VaList::arg]: ../struct.VaList.html#method.arg /// [VaList::arg]: ../struct.VaList.html#method.arg
#[unstable( #[unstable(

View File

@ -12,6 +12,7 @@ use crate::{
mod future; mod future;
mod into_future; mod into_future;
mod pending; mod pending;
mod poll_fn;
mod ready; mod ready;
#[stable(feature = "futures_api", since = "1.36.0")] #[stable(feature = "futures_api", since = "1.36.0")]
@ -25,6 +26,9 @@ pub use pending::{pending, Pending};
#[unstable(feature = "future_readiness_fns", issue = "70921")] #[unstable(feature = "future_readiness_fns", issue = "70921")]
pub use ready::{ready, Ready}; pub use ready::{ready, Ready};
#[unstable(feature = "future_poll_fn", issue = "72302")]
pub use poll_fn::{poll_fn, PollFn};
/// This type is needed because: /// This type is needed because:
/// ///
/// a) Generators cannot implement `for<'a, 'b> Generator<&'a mut Context<'b>>`, so we need to pass /// a) Generators cannot implement `for<'a, 'b> Generator<&'a mut Context<'b>>`, so we need to pass

View File

@ -0,0 +1,66 @@
use crate::fmt;
use crate::future::Future;
use crate::pin::Pin;
use crate::task::{Context, Poll};
/// Creates a future that wraps a function returning `Poll`.
///
/// Polling the future delegates to the wrapped function.
///
/// # Examples
///
/// ```
/// #![feature(future_poll_fn)]
/// # async fn run() {
/// use core::future::poll_fn;
/// use core::task::{Context, Poll};
///
/// fn read_line(_cx: &mut Context<'_>) -> Poll<String> {
/// Poll::Ready("Hello, World!".into())
/// }
///
/// let read_future = poll_fn(read_line);
/// assert_eq!(read_future.await, "Hello, World!".to_owned());
/// # };
/// ```
#[unstable(feature = "future_poll_fn", issue = "72302")]
pub fn poll_fn<T, F>(f: F) -> PollFn<F>
where
F: FnMut(&mut Context<'_>) -> Poll<T>,
{
PollFn { f }
}
/// A Future that wraps a function returning `Poll`.
///
/// This `struct` is created by the [`poll_fn`] function. See its
/// documentation for more.
///
/// [`poll_fn`]: fn.poll_fn.html
#[must_use = "futures do nothing unless you `.await` or poll them"]
#[unstable(feature = "future_poll_fn", issue = "72302")]
pub struct PollFn<F> {
f: F,
}
#[unstable(feature = "future_poll_fn", issue = "72302")]
impl<F> Unpin for PollFn<F> {}
#[unstable(feature = "future_poll_fn", issue = "72302")]
impl<F> fmt::Debug for PollFn<F> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("PollFn").finish()
}
}
#[unstable(feature = "future_poll_fn", issue = "72302")]
impl<T, F> Future for PollFn<F>
where
F: FnMut(&mut Context<'_>) -> Poll<T>,
{
type Output = T;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<T> {
(&mut self.f)(cx)
}
}

View File

@ -142,7 +142,7 @@ pub use crate::intrinsics::transmute;
/// [ub]: ../../reference/behavior-considered-undefined.html /// [ub]: ../../reference/behavior-considered-undefined.html
/// [`ManuallyDrop`]: struct.ManuallyDrop.html /// [`ManuallyDrop`]: struct.ManuallyDrop.html
#[inline] #[inline]
#[rustc_const_unstable(feature = "const_forget", issue = "69616")] #[rustc_const_stable(feature = "const_forget", since = "1.46.0")]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub const fn forget<T>(t: T) { pub const fn forget<T>(t: T) {
ManuallyDrop::new(t); ManuallyDrop::new(t);

View File

@ -40,7 +40,6 @@
#![feature(const_raw_ptr_deref)] #![feature(const_raw_ptr_deref)]
#![feature(never_type)] #![feature(never_type)]
#![feature(unwrap_infallible)] #![feature(unwrap_infallible)]
#![feature(const_forget)]
#![feature(option_unwrap_none)] #![feature(option_unwrap_none)]
#![feature(peekable_next_if)] #![feature(peekable_next_if)]
#![feature(partition_point)] #![feature(partition_point)]

View File

@ -263,7 +263,7 @@ pub fn from_fn_attrs(cx: &CodegenCx<'ll, 'tcx>, llfn: &'ll Value, instance: ty::
// Windows we end up still needing the `uwtable` attribute even if the `-C // Windows we end up still needing the `uwtable` attribute even if the `-C
// panic=abort` flag is passed. // panic=abort` flag is passed.
// //
// You can also find more info on why Windows is whitelisted here in: // You can also find more info on why Windows always requires uwtables here:
// https://bugzilla.mozilla.org/show_bug.cgi?id=1302078 // https://bugzilla.mozilla.org/show_bug.cgi?id=1302078
if cx.sess().must_emit_unwind_tables() { if cx.sess().must_emit_unwind_tables() {
attributes::emit_uwtable(llfn, true); attributes::emit_uwtable(llfn, true);
@ -343,14 +343,14 @@ pub fn from_fn_attrs(cx: &CodegenCx<'ll, 'tcx>, llfn: &'ll Value, instance: ty::
} }
pub fn provide(providers: &mut Providers) { pub fn provide(providers: &mut Providers) {
providers.target_features_whitelist = |tcx, cnum| { providers.supported_target_features = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE); assert_eq!(cnum, LOCAL_CRATE);
if tcx.sess.opts.actually_rustdoc { if tcx.sess.opts.actually_rustdoc {
// rustdoc needs to be able to document functions that use all the features, so // rustdoc needs to be able to document functions that use all the features, so
// whitelist them all // provide them all.
llvm_util::all_known_features().map(|(a, b)| (a.to_string(), b)).collect() llvm_util::all_known_features().map(|(a, b)| (a.to_string(), b)).collect()
} else { } else {
llvm_util::target_feature_whitelist(tcx.sess) llvm_util::supported_target_features(tcx.sess)
.iter() .iter()
.map(|&(a, b)| (a.to_string(), b)) .map(|&(a, b)| (a.to_string(), b))
.collect() .collect()

View File

@ -62,11 +62,11 @@ fn prepare_lto(
} }
}; };
let exported_symbols = cgcx.exported_symbols.as_ref().expect("needs exported symbols for LTO"); let exported_symbols = cgcx.exported_symbols.as_ref().expect("needs exported symbols for LTO");
let mut symbol_white_list = { let mut symbols_below_threshold = {
let _timer = cgcx.prof.generic_activity("LLVM_lto_generate_symbol_white_list"); let _timer = cgcx.prof.generic_activity("LLVM_lto_generate_symbols_below_threshold");
exported_symbols[&LOCAL_CRATE].iter().filter_map(symbol_filter).collect::<Vec<CString>>() exported_symbols[&LOCAL_CRATE].iter().filter_map(symbol_filter).collect::<Vec<CString>>()
}; };
info!("{} symbols to preserve in this crate", symbol_white_list.len()); info!("{} symbols to preserve in this crate", symbols_below_threshold.len());
// If we're performing LTO for the entire crate graph, then for each of our // If we're performing LTO for the entire crate graph, then for each of our
// upstream dependencies, find the corresponding rlib and load the bitcode // upstream dependencies, find the corresponding rlib and load the bitcode
@ -102,8 +102,10 @@ fn prepare_lto(
let exported_symbols = let exported_symbols =
cgcx.exported_symbols.as_ref().expect("needs exported symbols for LTO"); cgcx.exported_symbols.as_ref().expect("needs exported symbols for LTO");
{ {
let _timer = cgcx.prof.generic_activity("LLVM_lto_generate_symbol_white_list"); let _timer =
symbol_white_list.extend(exported_symbols[&cnum].iter().filter_map(symbol_filter)); cgcx.prof.generic_activity("LLVM_lto_generate_symbols_below_threshold");
symbols_below_threshold
.extend(exported_symbols[&cnum].iter().filter_map(symbol_filter));
} }
let archive = ArchiveRO::open(&path).expect("wanted an rlib"); let archive = ArchiveRO::open(&path).expect("wanted an rlib");
@ -124,7 +126,7 @@ fn prepare_lto(
} }
} }
Ok((symbol_white_list, upstream_modules)) Ok((symbols_below_threshold, upstream_modules))
} }
fn get_bitcode_slice_from_object_data(obj: &[u8]) -> Result<&[u8], String> { fn get_bitcode_slice_from_object_data(obj: &[u8]) -> Result<&[u8], String> {
@ -155,9 +157,17 @@ pub(crate) fn run_fat(
cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>, cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
) -> Result<LtoModuleCodegen<LlvmCodegenBackend>, FatalError> { ) -> Result<LtoModuleCodegen<LlvmCodegenBackend>, FatalError> {
let diag_handler = cgcx.create_diag_handler(); let diag_handler = cgcx.create_diag_handler();
let (symbol_white_list, upstream_modules) = prepare_lto(cgcx, &diag_handler)?; let (symbols_below_threshold, upstream_modules) = prepare_lto(cgcx, &diag_handler)?;
let symbol_white_list = symbol_white_list.iter().map(|c| c.as_ptr()).collect::<Vec<_>>(); let symbols_below_threshold =
fat_lto(cgcx, &diag_handler, modules, cached_modules, upstream_modules, &symbol_white_list) symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();
fat_lto(
cgcx,
&diag_handler,
modules,
cached_modules,
upstream_modules,
&symbols_below_threshold,
)
} }
/// Performs thin LTO by performing necessary global analysis and returning two /// Performs thin LTO by performing necessary global analysis and returning two
@ -169,15 +179,23 @@ pub(crate) fn run_thin(
cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>, cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
) -> Result<(Vec<LtoModuleCodegen<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError> { ) -> Result<(Vec<LtoModuleCodegen<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError> {
let diag_handler = cgcx.create_diag_handler(); let diag_handler = cgcx.create_diag_handler();
let (symbol_white_list, upstream_modules) = prepare_lto(cgcx, &diag_handler)?; let (symbols_below_threshold, upstream_modules) = prepare_lto(cgcx, &diag_handler)?;
let symbol_white_list = symbol_white_list.iter().map(|c| c.as_ptr()).collect::<Vec<_>>(); let symbols_below_threshold =
symbols_below_threshold.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();
if cgcx.opts.cg.linker_plugin_lto.enabled() { if cgcx.opts.cg.linker_plugin_lto.enabled() {
unreachable!( unreachable!(
"We should never reach this case if the LTO step \ "We should never reach this case if the LTO step \
is deferred to the linker" is deferred to the linker"
); );
} }
thin_lto(cgcx, &diag_handler, modules, upstream_modules, cached_modules, &symbol_white_list) thin_lto(
cgcx,
&diag_handler,
modules,
upstream_modules,
cached_modules,
&symbols_below_threshold,
)
} }
pub(crate) fn prepare_thin(module: ModuleCodegen<ModuleLlvm>) -> (String, ThinBuffer) { pub(crate) fn prepare_thin(module: ModuleCodegen<ModuleLlvm>) -> (String, ThinBuffer) {
@ -192,7 +210,7 @@ fn fat_lto(
modules: Vec<FatLTOInput<LlvmCodegenBackend>>, modules: Vec<FatLTOInput<LlvmCodegenBackend>>,
cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>, cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>, mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
symbol_white_list: &[*const libc::c_char], symbols_below_threshold: &[*const libc::c_char],
) -> Result<LtoModuleCodegen<LlvmCodegenBackend>, FatalError> { ) -> Result<LtoModuleCodegen<LlvmCodegenBackend>, FatalError> {
let _timer = cgcx.prof.generic_activity("LLVM_fat_lto_build_monolithic_module"); let _timer = cgcx.prof.generic_activity("LLVM_fat_lto_build_monolithic_module");
info!("going for a fat lto"); info!("going for a fat lto");
@ -306,14 +324,13 @@ fn fat_lto(
drop(linker); drop(linker);
save_temp_bitcode(&cgcx, &module, "lto.input"); save_temp_bitcode(&cgcx, &module, "lto.input");
// Internalize everything that *isn't* in our whitelist to help strip out // Internalize everything below threshold to help strip out more modules and such.
// more modules and such
unsafe { unsafe {
let ptr = symbol_white_list.as_ptr(); let ptr = symbols_below_threshold.as_ptr();
llvm::LLVMRustRunRestrictionPass( llvm::LLVMRustRunRestrictionPass(
llmod, llmod,
ptr as *const *const libc::c_char, ptr as *const *const libc::c_char,
symbol_white_list.len() as libc::size_t, symbols_below_threshold.len() as libc::size_t,
); );
save_temp_bitcode(&cgcx, &module, "lto.after-restriction"); save_temp_bitcode(&cgcx, &module, "lto.after-restriction");
} }
@ -395,7 +412,7 @@ fn thin_lto(
modules: Vec<(String, ThinBuffer)>, modules: Vec<(String, ThinBuffer)>,
serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>, serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>, cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
symbol_white_list: &[*const libc::c_char], symbols_below_threshold: &[*const libc::c_char],
) -> Result<(Vec<LtoModuleCodegen<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError> { ) -> Result<(Vec<LtoModuleCodegen<LlvmCodegenBackend>>, Vec<WorkProduct>), FatalError> {
let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis"); let _timer = cgcx.prof.generic_activity("LLVM_thin_lto_global_analysis");
unsafe { unsafe {
@ -463,8 +480,8 @@ fn thin_lto(
let data = llvm::LLVMRustCreateThinLTOData( let data = llvm::LLVMRustCreateThinLTOData(
thin_modules.as_ptr(), thin_modules.as_ptr(),
thin_modules.len() as u32, thin_modules.len() as u32,
symbol_white_list.as_ptr(), symbols_below_threshold.as_ptr(),
symbol_white_list.len() as u32, symbols_below_threshold.len() as u32,
) )
.ok_or_else(|| write::llvm_err(&diag_handler, "failed to prepare thin LTO context"))?; .ok_or_else(|| write::llvm_err(&diag_handler, "failed to prepare thin LTO context"))?;

View File

@ -188,14 +188,19 @@ pub unsafe fn create_module(
llvm::LLVMRustAddModuleFlag(llmod, avoid_plt, 1); llvm::LLVMRustAddModuleFlag(llmod, avoid_plt, 1);
} }
// Set module flags to enable Windows Control Flow Guard (/guard:cf) metadata // Control Flow Guard is currently only supported by the MSVC linker on Windows.
// only (`cfguard=1`) or metadata and checks (`cfguard=2`). if sess.target.target.options.is_like_msvc {
match sess.opts.debugging_opts.control_flow_guard { match sess.opts.debugging_opts.control_flow_guard {
CFGuard::Disabled => {} CFGuard::Disabled => {}
CFGuard::NoChecks => { CFGuard::NoChecks => {
// Set `cfguard=1` module flag to emit metadata only.
llvm::LLVMRustAddModuleFlag(llmod, "cfguard\0".as_ptr() as *const _, 1) llvm::LLVMRustAddModuleFlag(llmod, "cfguard\0".as_ptr() as *const _, 1)
} }
CFGuard::Checks => llvm::LLVMRustAddModuleFlag(llmod, "cfguard\0".as_ptr() as *const _, 2), CFGuard::Checks => {
// Set `cfguard=2` module flag to emit metadata and checks.
llvm::LLVMRustAddModuleFlag(llmod, "cfguard\0".as_ptr() as *const _, 2)
}
}
} }
llmod llmod

View File

@ -139,7 +139,7 @@ pub fn time_trace_profiler_finish(file_name: &str) {
// to LLVM or the feature detection code will walk past the end of the feature // to LLVM or the feature detection code will walk past the end of the feature
// array, leading to crashes. // array, leading to crashes.
const ARM_WHITELIST: &[(&str, Option<Symbol>)] = &[ const ARM_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
("aclass", Some(sym::arm_target_feature)), ("aclass", Some(sym::arm_target_feature)),
("mclass", Some(sym::arm_target_feature)), ("mclass", Some(sym::arm_target_feature)),
("rclass", Some(sym::arm_target_feature)), ("rclass", Some(sym::arm_target_feature)),
@ -162,7 +162,7 @@ const ARM_WHITELIST: &[(&str, Option<Symbol>)] = &[
("thumb-mode", Some(sym::arm_target_feature)), ("thumb-mode", Some(sym::arm_target_feature)),
]; ];
const AARCH64_WHITELIST: &[(&str, Option<Symbol>)] = &[ const AARCH64_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
("fp", Some(sym::aarch64_target_feature)), ("fp", Some(sym::aarch64_target_feature)),
("neon", Some(sym::aarch64_target_feature)), ("neon", Some(sym::aarch64_target_feature)),
("sve", Some(sym::aarch64_target_feature)), ("sve", Some(sym::aarch64_target_feature)),
@ -180,7 +180,7 @@ const AARCH64_WHITELIST: &[(&str, Option<Symbol>)] = &[
("v8.3a", Some(sym::aarch64_target_feature)), ("v8.3a", Some(sym::aarch64_target_feature)),
]; ];
const X86_WHITELIST: &[(&str, Option<Symbol>)] = &[ const X86_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
("adx", Some(sym::adx_target_feature)), ("adx", Some(sym::adx_target_feature)),
("aes", None), ("aes", None),
("avx", None), ("avx", None),
@ -224,12 +224,12 @@ const X86_WHITELIST: &[(&str, Option<Symbol>)] = &[
("xsaves", None), ("xsaves", None),
]; ];
const HEXAGON_WHITELIST: &[(&str, Option<Symbol>)] = &[ const HEXAGON_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
("hvx", Some(sym::hexagon_target_feature)), ("hvx", Some(sym::hexagon_target_feature)),
("hvx-length128b", Some(sym::hexagon_target_feature)), ("hvx-length128b", Some(sym::hexagon_target_feature)),
]; ];
const POWERPC_WHITELIST: &[(&str, Option<Symbol>)] = &[ const POWERPC_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
("altivec", Some(sym::powerpc_target_feature)), ("altivec", Some(sym::powerpc_target_feature)),
("power8-altivec", Some(sym::powerpc_target_feature)), ("power8-altivec", Some(sym::powerpc_target_feature)),
("power9-altivec", Some(sym::powerpc_target_feature)), ("power9-altivec", Some(sym::powerpc_target_feature)),
@ -238,10 +238,10 @@ const POWERPC_WHITELIST: &[(&str, Option<Symbol>)] = &[
("vsx", Some(sym::powerpc_target_feature)), ("vsx", Some(sym::powerpc_target_feature)),
]; ];
const MIPS_WHITELIST: &[(&str, Option<Symbol>)] = const MIPS_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] =
&[("fp64", Some(sym::mips_target_feature)), ("msa", Some(sym::mips_target_feature))]; &[("fp64", Some(sym::mips_target_feature)), ("msa", Some(sym::mips_target_feature))];
const RISCV_WHITELIST: &[(&str, Option<Symbol>)] = &[ const RISCV_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
("m", Some(sym::riscv_target_feature)), ("m", Some(sym::riscv_target_feature)),
("a", Some(sym::riscv_target_feature)), ("a", Some(sym::riscv_target_feature)),
("c", Some(sym::riscv_target_feature)), ("c", Some(sym::riscv_target_feature)),
@ -250,7 +250,7 @@ const RISCV_WHITELIST: &[(&str, Option<Symbol>)] = &[
("e", Some(sym::riscv_target_feature)), ("e", Some(sym::riscv_target_feature)),
]; ];
const WASM_WHITELIST: &[(&str, Option<Symbol>)] = &[ const WASM_ALLOWED_FEATURES: &[(&str, Option<Symbol>)] = &[
("simd128", Some(sym::wasm_target_feature)), ("simd128", Some(sym::wasm_target_feature)),
("atomics", Some(sym::wasm_target_feature)), ("atomics", Some(sym::wasm_target_feature)),
("nontrapping-fptoint", Some(sym::wasm_target_feature)), ("nontrapping-fptoint", Some(sym::wasm_target_feature)),
@ -259,19 +259,18 @@ const WASM_WHITELIST: &[(&str, Option<Symbol>)] = &[
/// When rustdoc is running, provide a list of all known features so that all their respective /// When rustdoc is running, provide a list of all known features so that all their respective
/// primitives may be documented. /// primitives may be documented.
/// ///
/// IMPORTANT: If you're adding another whitelist to the above lists, make sure to add it to this /// IMPORTANT: If you're adding another feature list above, make sure to add it to this iterator!
/// iterator!
pub fn all_known_features() -> impl Iterator<Item = (&'static str, Option<Symbol>)> { pub fn all_known_features() -> impl Iterator<Item = (&'static str, Option<Symbol>)> {
ARM_WHITELIST std::iter::empty()
.iter() .chain(ARM_ALLOWED_FEATURES.iter())
.chain(AARCH64_ALLOWED_FEATURES.iter())
.chain(X86_ALLOWED_FEATURES.iter())
.chain(HEXAGON_ALLOWED_FEATURES.iter())
.chain(POWERPC_ALLOWED_FEATURES.iter())
.chain(MIPS_ALLOWED_FEATURES.iter())
.chain(RISCV_ALLOWED_FEATURES.iter())
.chain(WASM_ALLOWED_FEATURES.iter())
.cloned() .cloned()
.chain(AARCH64_WHITELIST.iter().cloned())
.chain(X86_WHITELIST.iter().cloned())
.chain(HEXAGON_WHITELIST.iter().cloned())
.chain(POWERPC_WHITELIST.iter().cloned())
.chain(MIPS_WHITELIST.iter().cloned())
.chain(RISCV_WHITELIST.iter().cloned())
.chain(WASM_WHITELIST.iter().cloned())
} }
pub fn to_llvm_feature<'a>(sess: &Session, s: &'a str) -> &'a str { pub fn to_llvm_feature<'a>(sess: &Session, s: &'a str) -> &'a str {
@ -289,7 +288,7 @@ pub fn to_llvm_feature<'a>(sess: &Session, s: &'a str) -> &'a str {
pub fn target_features(sess: &Session) -> Vec<Symbol> { pub fn target_features(sess: &Session) -> Vec<Symbol> {
let target_machine = create_informational_target_machine(sess); let target_machine = create_informational_target_machine(sess);
target_feature_whitelist(sess) supported_target_features(sess)
.iter() .iter()
.filter_map(|&(feature, gate)| { .filter_map(|&(feature, gate)| {
if UnstableFeatures::from_environment().is_nightly_build() || gate.is_none() { if UnstableFeatures::from_environment().is_nightly_build() || gate.is_none() {
@ -307,16 +306,16 @@ pub fn target_features(sess: &Session) -> Vec<Symbol> {
.collect() .collect()
} }
pub fn target_feature_whitelist(sess: &Session) -> &'static [(&'static str, Option<Symbol>)] { pub fn supported_target_features(sess: &Session) -> &'static [(&'static str, Option<Symbol>)] {
match &*sess.target.target.arch { match &*sess.target.target.arch {
"arm" => ARM_WHITELIST, "arm" => ARM_ALLOWED_FEATURES,
"aarch64" => AARCH64_WHITELIST, "aarch64" => AARCH64_ALLOWED_FEATURES,
"x86" | "x86_64" => X86_WHITELIST, "x86" | "x86_64" => X86_ALLOWED_FEATURES,
"hexagon" => HEXAGON_WHITELIST, "hexagon" => HEXAGON_ALLOWED_FEATURES,
"mips" | "mips64" => MIPS_WHITELIST, "mips" | "mips64" => MIPS_ALLOWED_FEATURES,
"powerpc" | "powerpc64" => POWERPC_WHITELIST, "powerpc" | "powerpc64" => POWERPC_ALLOWED_FEATURES,
"riscv32" | "riscv64" => RISCV_WHITELIST, "riscv32" | "riscv64" => RISCV_ALLOWED_FEATURES,
"wasm32" => WASM_WHITELIST, "wasm32" => WASM_ALLOWED_FEATURES,
_ => &[], _ => &[],
} }
} }

View File

@ -475,9 +475,7 @@ impl<'a> Linker for GccLinker<'a> {
self.cmd.arg("__llvm_profile_runtime"); self.cmd.arg("__llvm_profile_runtime");
} }
fn control_flow_guard(&mut self) { fn control_flow_guard(&mut self) {}
self.sess.warn("Windows Control Flow Guard is not supported by this linker.");
}
fn debuginfo(&mut self, strip: Strip) { fn debuginfo(&mut self, strip: Strip) {
match strip { match strip {
@ -959,9 +957,7 @@ impl<'a> Linker for EmLinker<'a> {
// noop, but maybe we need something like the gnu linker? // noop, but maybe we need something like the gnu linker?
} }
fn control_flow_guard(&mut self) { fn control_flow_guard(&mut self) {}
self.sess.warn("Windows Control Flow Guard is not supported by this linker.");
}
fn debuginfo(&mut self, _strip: Strip) { fn debuginfo(&mut self, _strip: Strip) {
// Preserve names or generate source maps depending on debug info // Preserve names or generate source maps depending on debug info
@ -1163,9 +1159,7 @@ impl<'a> Linker for WasmLd<'a> {
} }
} }
fn control_flow_guard(&mut self) { fn control_flow_guard(&mut self) {}
self.sess.warn("Windows Control Flow Guard is not supported by this linker.");
}
fn no_crt_objects(&mut self) {} fn no_crt_objects(&mut self) {}
@ -1176,10 +1170,10 @@ impl<'a> Linker for WasmLd<'a> {
self.cmd.arg("--export").arg(&sym); self.cmd.arg("--export").arg(&sym);
} }
// LLD will hide these otherwise-internal symbols since our `--export` // LLD will hide these otherwise-internal symbols since it only exports
// list above is a whitelist of what to export. Various bits and pieces // symbols explicity passed via the `--export` flags above and hides all
// of tooling use this, so be sure these symbols make their way out of // others. Various bits and pieces of tooling use this, so be sure these
// the linker as well. // symbols make their way out of the linker as well.
self.cmd.arg("--export=__heap_base"); self.cmd.arg("--export=__heap_base");
self.cmd.arg("--export=__data_end"); self.cmd.arg("--export=__data_end");
} }
@ -1330,9 +1324,7 @@ impl<'a> Linker for PtxLinker<'a> {
fn no_default_libraries(&mut self) {} fn no_default_libraries(&mut self) {}
fn control_flow_guard(&mut self) { fn control_flow_guard(&mut self) {}
self.sess.warn("Windows Control Flow Guard is not supported by this linker.");
}
fn export_symbols(&mut self, _tmpdir: &Path, _crate_type: CrateType) {} fn export_symbols(&mut self, _tmpdir: &Path, _crate_type: CrateType) {}

View File

@ -842,10 +842,9 @@ impl CrateInfo {
} }
} }
// No need to look for lang items that are whitelisted and don't // No need to look for lang items that don't actually need to exist.
// actually need to exist.
let missing = let missing =
missing.iter().cloned().filter(|&l| !lang_items::whitelisted(tcx, l)).collect(); missing.iter().cloned().filter(|&l| lang_items::required(tcx, l)).collect();
info.missing_lang_items.insert(cnum, missing); info.missing_lang_items.insert(cnum, missing);
} }

View File

@ -735,7 +735,7 @@ pub struct SyntaxExtension {
pub kind: SyntaxExtensionKind, pub kind: SyntaxExtensionKind,
/// Span of the macro definition. /// Span of the macro definition.
pub span: Span, pub span: Span,
/// Whitelist of unstable features that are treated as stable inside this macro. /// List of unstable features that are treated as stable inside this macro.
pub allow_internal_unstable: Option<Lrc<[Symbol]>>, pub allow_internal_unstable: Option<Lrc<[Symbol]>>,
/// Suppresses the `unsafe_code` lint for code produced by this macro. /// Suppresses the `unsafe_code` lint for code produced by this macro.
pub allow_internal_unsafe: bool, pub allow_internal_unsafe: bool,

View File

@ -47,7 +47,7 @@ pub enum AttributeType {
/// Builtin attribute that may not be consumed by the compiler /// Builtin attribute that may not be consumed by the compiler
/// before the unused_attribute check. These attributes /// before the unused_attribute check. These attributes
/// will be ignored by the unused_attribute lint /// will be ignored by the unused_attribute lint
Whitelisted, AssumedUsed,
/// Builtin attribute that is only allowed at the crate level /// Builtin attribute that is only allowed at the crate level
CrateLevel, CrateLevel,
@ -202,7 +202,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
ungated!(allow, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#)), ungated!(allow, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#)),
ungated!(forbid, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#)), ungated!(forbid, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#)),
ungated!(deny, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#)), ungated!(deny, Normal, template!(List: r#"lint1, lint2, ..., /*opt*/ reason = "...""#)),
ungated!(must_use, Whitelisted, template!(Word, NameValueStr: "reason")), ungated!(must_use, AssumedUsed, template!(Word, NameValueStr: "reason")),
// FIXME(#14407) // FIXME(#14407)
ungated!( ungated!(
deprecated, Normal, deprecated, Normal,
@ -220,16 +220,16 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
// ABI, linking, symbols, and FFI // ABI, linking, symbols, and FFI
ungated!( ungated!(
link, Whitelisted, link, AssumedUsed,
template!(List: r#"name = "...", /*opt*/ kind = "dylib|static|...", /*opt*/ wasm_import_module = "...""#), template!(List: r#"name = "...", /*opt*/ kind = "dylib|static|...", /*opt*/ wasm_import_module = "...""#),
), ),
ungated!(link_name, Whitelisted, template!(NameValueStr: "name")), ungated!(link_name, AssumedUsed, template!(NameValueStr: "name")),
ungated!(no_link, Normal, template!(Word)), ungated!(no_link, Normal, template!(Word)),
ungated!(repr, Normal, template!(List: "C")), ungated!(repr, Normal, template!(List: "C")),
ungated!(export_name, Whitelisted, template!(NameValueStr: "name")), ungated!(export_name, AssumedUsed, template!(NameValueStr: "name")),
ungated!(link_section, Whitelisted, template!(NameValueStr: "name")), ungated!(link_section, AssumedUsed, template!(NameValueStr: "name")),
ungated!(no_mangle, Whitelisted, template!(Word)), ungated!(no_mangle, AssumedUsed, template!(Word)),
ungated!(used, Whitelisted, template!(Word)), ungated!(used, AssumedUsed, template!(Word)),
// Limits: // Limits:
ungated!(recursion_limit, CrateLevel, template!(NameValueStr: "N")), ungated!(recursion_limit, CrateLevel, template!(NameValueStr: "N")),
@ -249,40 +249,40 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
ungated!(path, Normal, template!(NameValueStr: "file")), ungated!(path, Normal, template!(NameValueStr: "file")),
ungated!(no_std, CrateLevel, template!(Word)), ungated!(no_std, CrateLevel, template!(Word)),
ungated!(no_implicit_prelude, Normal, template!(Word)), ungated!(no_implicit_prelude, Normal, template!(Word)),
ungated!(non_exhaustive, Whitelisted, template!(Word)), ungated!(non_exhaustive, AssumedUsed, template!(Word)),
// Runtime // Runtime
ungated!(windows_subsystem, Whitelisted, template!(NameValueStr: "windows|console")), ungated!(windows_subsystem, AssumedUsed, template!(NameValueStr: "windows|console")),
ungated!(panic_handler, Normal, template!(Word)), // RFC 2070 ungated!(panic_handler, Normal, template!(Word)), // RFC 2070
// Code generation: // Code generation:
ungated!(inline, Whitelisted, template!(Word, List: "always|never")), ungated!(inline, AssumedUsed, template!(Word, List: "always|never")),
ungated!(cold, Whitelisted, template!(Word)), ungated!(cold, AssumedUsed, template!(Word)),
ungated!(no_builtins, Whitelisted, template!(Word)), ungated!(no_builtins, AssumedUsed, template!(Word)),
ungated!(target_feature, Whitelisted, template!(List: r#"enable = "name""#)), ungated!(target_feature, AssumedUsed, template!(List: r#"enable = "name""#)),
ungated!(track_caller, Whitelisted, template!(Word)), ungated!(track_caller, AssumedUsed, template!(Word)),
gated!( gated!(
no_sanitize, Whitelisted, no_sanitize, AssumedUsed,
template!(List: "address, memory, thread"), template!(List: "address, memory, thread"),
experimental!(no_sanitize) experimental!(no_sanitize)
), ),
// FIXME: #14408 whitelist docs since rustdoc looks at them // FIXME: #14408 assume docs are used since rustdoc looks at them.
ungated!(doc, Whitelisted, template!(List: "hidden|inline|...", NameValueStr: "string")), ungated!(doc, AssumedUsed, template!(List: "hidden|inline|...", NameValueStr: "string")),
// ========================================================================== // ==========================================================================
// Unstable attributes: // Unstable attributes:
// ========================================================================== // ==========================================================================
// Linking: // Linking:
gated!(naked, Whitelisted, template!(Word), naked_functions, experimental!(naked)), gated!(naked, AssumedUsed, template!(Word), naked_functions, experimental!(naked)),
gated!( gated!(
link_args, Normal, template!(NameValueStr: "args"), link_args, Normal, template!(NameValueStr: "args"),
"the `link_args` attribute is experimental and not portable across platforms, \ "the `link_args` attribute is experimental and not portable across platforms, \
it is recommended to use `#[link(name = \"foo\")] instead", it is recommended to use `#[link(name = \"foo\")] instead",
), ),
gated!( gated!(
link_ordinal, Whitelisted, template!(List: "ordinal"), raw_dylib, link_ordinal, AssumedUsed, template!(List: "ordinal"), raw_dylib,
experimental!(link_ordinal) experimental!(link_ordinal)
), ),
@ -321,19 +321,19 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
// RFC #1268 // RFC #1268
gated!(marker, Normal, template!(Word), marker_trait_attr, experimental!(marker)), gated!(marker, Normal, template!(Word), marker_trait_attr, experimental!(marker)),
gated!( gated!(
thread_local, Whitelisted, template!(Word), thread_local, AssumedUsed, template!(Word),
"`#[thread_local]` is an experimental feature, and does not currently handle destructors", "`#[thread_local]` is an experimental feature, and does not currently handle destructors",
), ),
gated!(no_core, CrateLevel, template!(Word), experimental!(no_core)), gated!(no_core, CrateLevel, template!(Word), experimental!(no_core)),
// RFC 2412 // RFC 2412
gated!( gated!(
optimize, Whitelisted, template!(List: "size|speed"), optimize_attribute, optimize, AssumedUsed, template!(List: "size|speed"), optimize_attribute,
experimental!(optimize), experimental!(optimize),
), ),
gated!(ffi_returns_twice, Whitelisted, template!(Word), experimental!(ffi_returns_twice)), gated!(ffi_returns_twice, AssumedUsed, template!(Word), experimental!(ffi_returns_twice)),
gated!(ffi_pure, Whitelisted, template!(Word), experimental!(ffi_pure)), gated!(ffi_pure, AssumedUsed, template!(Word), experimental!(ffi_pure)),
gated!(ffi_const, Whitelisted, template!(Word), experimental!(ffi_const)), gated!(ffi_const, AssumedUsed, template!(Word), experimental!(ffi_const)),
gated!( gated!(
register_attr, CrateLevel, template!(List: "attr1, attr2, ..."), register_attr, CrateLevel, template!(List: "attr1, attr2, ..."),
experimental!(register_attr), experimental!(register_attr),
@ -351,22 +351,22 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
// FIXME(#14407) -- only looked at on-demand so we can't // FIXME(#14407) -- only looked at on-demand so we can't
// guarantee they'll have already been checked. // guarantee they'll have already been checked.
ungated!( ungated!(
rustc_deprecated, Whitelisted, rustc_deprecated, AssumedUsed,
template!(List: r#"since = "version", reason = "...""#) template!(List: r#"since = "version", reason = "...""#)
), ),
// FIXME(#14407) // FIXME(#14407)
ungated!(stable, Whitelisted, template!(List: r#"feature = "name", since = "version""#)), ungated!(stable, AssumedUsed, template!(List: r#"feature = "name", since = "version""#)),
// FIXME(#14407) // FIXME(#14407)
ungated!( ungated!(
unstable, Whitelisted, unstable, AssumedUsed,
template!(List: r#"feature = "name", reason = "...", issue = "N""#), template!(List: r#"feature = "name", reason = "...", issue = "N""#),
), ),
// FIXME(#14407) // FIXME(#14407)
ungated!(rustc_const_unstable, Whitelisted, template!(List: r#"feature = "name""#)), ungated!(rustc_const_unstable, AssumedUsed, template!(List: r#"feature = "name""#)),
// FIXME(#14407) // FIXME(#14407)
ungated!(rustc_const_stable, Whitelisted, template!(List: r#"feature = "name""#)), ungated!(rustc_const_stable, AssumedUsed, template!(List: r#"feature = "name""#)),
gated!( gated!(
allow_internal_unstable, Whitelisted, template!(Word, List: "feat1, feat2, ..."), allow_internal_unstable, AssumedUsed, template!(Word, List: "feat1, feat2, ..."),
"allow_internal_unstable side-steps feature gating and stability checks", "allow_internal_unstable side-steps feature gating and stability checks",
), ),
gated!( gated!(
@ -378,7 +378,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
// Internal attributes: Type system related: // Internal attributes: Type system related:
// ========================================================================== // ==========================================================================
gated!(fundamental, Whitelisted, template!(Word), experimental!(fundamental)), gated!(fundamental, AssumedUsed, template!(Word), experimental!(fundamental)),
gated!( gated!(
may_dangle, Normal, template!(Word), dropck_eyepatch, may_dangle, Normal, template!(Word), dropck_eyepatch,
"`may_dangle` has unstable semantics and may be removed in the future", "`may_dangle` has unstable semantics and may be removed in the future",
@ -388,30 +388,30 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
// Internal attributes: Runtime related: // Internal attributes: Runtime related:
// ========================================================================== // ==========================================================================
rustc_attr!(rustc_allocator, Whitelisted, template!(Word), IMPL_DETAIL), rustc_attr!(rustc_allocator, AssumedUsed, template!(Word), IMPL_DETAIL),
rustc_attr!(rustc_allocator_nounwind, Whitelisted, template!(Word), IMPL_DETAIL), rustc_attr!(rustc_allocator_nounwind, AssumedUsed, template!(Word), IMPL_DETAIL),
gated!(alloc_error_handler, Normal, template!(Word), experimental!(alloc_error_handler)), gated!(alloc_error_handler, Normal, template!(Word), experimental!(alloc_error_handler)),
gated!( gated!(
default_lib_allocator, Whitelisted, template!(Word), allocator_internals, default_lib_allocator, AssumedUsed, template!(Word), allocator_internals,
experimental!(default_lib_allocator), experimental!(default_lib_allocator),
), ),
gated!( gated!(
needs_allocator, Normal, template!(Word), allocator_internals, needs_allocator, Normal, template!(Word), allocator_internals,
experimental!(needs_allocator), experimental!(needs_allocator),
), ),
gated!(panic_runtime, Whitelisted, template!(Word), experimental!(panic_runtime)), gated!(panic_runtime, AssumedUsed, template!(Word), experimental!(panic_runtime)),
gated!(needs_panic_runtime, Whitelisted, template!(Word), experimental!(needs_panic_runtime)), gated!(needs_panic_runtime, AssumedUsed, template!(Word), experimental!(needs_panic_runtime)),
gated!( gated!(
unwind, Whitelisted, template!(List: "allowed|aborts"), unwind_attributes, unwind, AssumedUsed, template!(List: "allowed|aborts"), unwind_attributes,
experimental!(unwind), experimental!(unwind),
), ),
gated!( gated!(
compiler_builtins, Whitelisted, template!(Word), compiler_builtins, AssumedUsed, template!(Word),
"the `#[compiler_builtins]` attribute is used to identify the `compiler_builtins` crate \ "the `#[compiler_builtins]` attribute is used to identify the `compiler_builtins` crate \
which contains compiler-rt intrinsics and will never be stable", which contains compiler-rt intrinsics and will never be stable",
), ),
gated!( gated!(
profiler_runtime, Whitelisted, template!(Word), profiler_runtime, AssumedUsed, template!(Word),
"the `#[profiler_runtime]` attribute is used to identify the `profiler_builtins` crate \ "the `#[profiler_runtime]` attribute is used to identify the `profiler_builtins` crate \
which contains the profiler runtime and will never be stable", which contains the profiler runtime and will never be stable",
), ),
@ -421,19 +421,19 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
// ========================================================================== // ==========================================================================
gated!( gated!(
linkage, Whitelisted, template!(NameValueStr: "external|internal|..."), linkage, AssumedUsed, template!(NameValueStr: "external|internal|..."),
"the `linkage` attribute is experimental and not portable across platforms", "the `linkage` attribute is experimental and not portable across platforms",
), ),
rustc_attr!(rustc_std_internal_symbol, Whitelisted, template!(Word), INTERNAL_UNSTABLE), rustc_attr!(rustc_std_internal_symbol, AssumedUsed, template!(Word), INTERNAL_UNSTABLE),
// ========================================================================== // ==========================================================================
// Internal attributes, Macro related: // Internal attributes, Macro related:
// ========================================================================== // ==========================================================================
rustc_attr!(rustc_builtin_macro, Whitelisted, template!(Word), IMPL_DETAIL), rustc_attr!(rustc_builtin_macro, AssumedUsed, template!(Word), IMPL_DETAIL),
rustc_attr!(rustc_proc_macro_decls, Normal, template!(Word), INTERNAL_UNSTABLE), rustc_attr!(rustc_proc_macro_decls, Normal, template!(Word), INTERNAL_UNSTABLE),
rustc_attr!( rustc_attr!(
rustc_macro_transparency, Whitelisted, rustc_macro_transparency, AssumedUsed,
template!(NameValueStr: "transparent|semitransparent|opaque"), template!(NameValueStr: "transparent|semitransparent|opaque"),
"used internally for testing macro hygiene", "used internally for testing macro hygiene",
), ),
@ -443,40 +443,40 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
// ========================================================================== // ==========================================================================
rustc_attr!( rustc_attr!(
rustc_on_unimplemented, Whitelisted, rustc_on_unimplemented, AssumedUsed,
template!( template!(
List: r#"/*opt*/ message = "...", /*opt*/ label = "...", /*opt*/ note = "...""#, List: r#"/*opt*/ message = "...", /*opt*/ label = "...", /*opt*/ note = "...""#,
NameValueStr: "message" NameValueStr: "message"
), ),
INTERNAL_UNSTABLE INTERNAL_UNSTABLE
), ),
// Whitelists "identity-like" conversion methods to suggest on type mismatch. // Enumerates "identity-like" conversion methods to suggest on type mismatch.
rustc_attr!(rustc_conversion_suggestion, Whitelisted, template!(Word), INTERNAL_UNSTABLE), rustc_attr!(rustc_conversion_suggestion, AssumedUsed, template!(Word), INTERNAL_UNSTABLE),
// ========================================================================== // ==========================================================================
// Internal attributes, Const related: // Internal attributes, Const related:
// ========================================================================== // ==========================================================================
rustc_attr!(rustc_promotable, Whitelisted, template!(Word), IMPL_DETAIL), rustc_attr!(rustc_promotable, AssumedUsed, template!(Word), IMPL_DETAIL),
rustc_attr!(rustc_allow_const_fn_ptr, Whitelisted, template!(Word), IMPL_DETAIL), rustc_attr!(rustc_allow_const_fn_ptr, AssumedUsed, template!(Word), IMPL_DETAIL),
rustc_attr!(rustc_args_required_const, Whitelisted, template!(List: "N"), INTERNAL_UNSTABLE), rustc_attr!(rustc_args_required_const, AssumedUsed, template!(List: "N"), INTERNAL_UNSTABLE),
// ========================================================================== // ==========================================================================
// Internal attributes, Layout related: // Internal attributes, Layout related:
// ========================================================================== // ==========================================================================
rustc_attr!( rustc_attr!(
rustc_layout_scalar_valid_range_start, Whitelisted, template!(List: "value"), rustc_layout_scalar_valid_range_start, AssumedUsed, template!(List: "value"),
"the `#[rustc_layout_scalar_valid_range_start]` attribute is just used to enable \ "the `#[rustc_layout_scalar_valid_range_start]` attribute is just used to enable \
niche optimizations in libcore and will never be stable", niche optimizations in libcore and will never be stable",
), ),
rustc_attr!( rustc_attr!(
rustc_layout_scalar_valid_range_end, Whitelisted, template!(List: "value"), rustc_layout_scalar_valid_range_end, AssumedUsed, template!(List: "value"),
"the `#[rustc_layout_scalar_valid_range_end]` attribute is just used to enable \ "the `#[rustc_layout_scalar_valid_range_end]` attribute is just used to enable \
niche optimizations in libcore and will never be stable", niche optimizations in libcore and will never be stable",
), ),
rustc_attr!( rustc_attr!(
rustc_nonnull_optimization_guaranteed, Whitelisted, template!(Word), rustc_nonnull_optimization_guaranteed, AssumedUsed, template!(Word),
"the `#[rustc_nonnull_optimization_guaranteed]` attribute is just used to enable \ "the `#[rustc_nonnull_optimization_guaranteed]` attribute is just used to enable \
niche optimizations in libcore and will never be stable", niche optimizations in libcore and will never be stable",
), ),
@ -501,7 +501,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
), ),
gated!( gated!(
// Used in resolve: // Used in resolve:
prelude_import, Whitelisted, template!(Word), prelude_import, AssumedUsed, template!(Word),
"`#[prelude_import]` is for use by rustc only", "`#[prelude_import]` is for use by rustc only",
), ),
gated!( gated!(
@ -509,7 +509,7 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
"unboxed_closures are still evolving", "unboxed_closures are still evolving",
), ),
rustc_attr!( rustc_attr!(
rustc_inherit_overflow_checks, Whitelisted, template!(Word), rustc_inherit_overflow_checks, AssumedUsed, template!(Word),
"the `#[rustc_inherit_overflow_checks]` attribute is just used to control \ "the `#[rustc_inherit_overflow_checks]` attribute is just used to control \
overflow checking behavior of several libcore functions that are inlined \ overflow checking behavior of several libcore functions that are inlined \
across crates and will never be stable", across crates and will never be stable",
@ -540,42 +540,42 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ...")), rustc_attr!(TEST, rustc_layout, Normal, template!(List: "field1, field2, ...")),
rustc_attr!(TEST, rustc_regions, Normal, template!(Word)), rustc_attr!(TEST, rustc_regions, Normal, template!(Word)),
rustc_attr!( rustc_attr!(
TEST, rustc_error, Whitelisted, TEST, rustc_error, AssumedUsed,
template!(Word, List: "delay_span_bug_from_inside_query") template!(Word, List: "delay_span_bug_from_inside_query")
), ),
rustc_attr!(TEST, rustc_dump_user_substs, Whitelisted, template!(Word)), rustc_attr!(TEST, rustc_dump_user_substs, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_if_this_changed, Whitelisted, template!(Word, List: "DepNode")), rustc_attr!(TEST, rustc_if_this_changed, AssumedUsed, template!(Word, List: "DepNode")),
rustc_attr!(TEST, rustc_then_this_would_need, Whitelisted, template!(List: "DepNode")), rustc_attr!(TEST, rustc_then_this_would_need, AssumedUsed, template!(List: "DepNode")),
rustc_attr!( rustc_attr!(
TEST, rustc_dirty, Whitelisted, TEST, rustc_dirty, AssumedUsed,
template!(List: r#"cfg = "...", /*opt*/ label = "...", /*opt*/ except = "...""#), template!(List: r#"cfg = "...", /*opt*/ label = "...", /*opt*/ except = "...""#),
), ),
rustc_attr!( rustc_attr!(
TEST, rustc_clean, Whitelisted, TEST, rustc_clean, AssumedUsed,
template!(List: r#"cfg = "...", /*opt*/ label = "...", /*opt*/ except = "...""#), template!(List: r#"cfg = "...", /*opt*/ label = "...", /*opt*/ except = "...""#),
), ),
rustc_attr!( rustc_attr!(
TEST, rustc_partition_reused, Whitelisted, TEST, rustc_partition_reused, AssumedUsed,
template!(List: r#"cfg = "...", module = "...""#), template!(List: r#"cfg = "...", module = "...""#),
), ),
rustc_attr!( rustc_attr!(
TEST, rustc_partition_codegened, Whitelisted, TEST, rustc_partition_codegened, AssumedUsed,
template!(List: r#"cfg = "...", module = "...""#), template!(List: r#"cfg = "...", module = "...""#),
), ),
rustc_attr!( rustc_attr!(
TEST, rustc_expected_cgu_reuse, Whitelisted, TEST, rustc_expected_cgu_reuse, AssumedUsed,
template!(List: r#"cfg = "...", module = "...", kind = "...""#), template!(List: r#"cfg = "...", module = "...", kind = "...""#),
), ),
rustc_attr!(TEST, rustc_synthetic, Whitelisted, template!(Word)), rustc_attr!(TEST, rustc_synthetic, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_symbol_name, Whitelisted, template!(Word)), rustc_attr!(TEST, rustc_symbol_name, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_def_path, Whitelisted, template!(Word)), rustc_attr!(TEST, rustc_def_path, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_mir, Whitelisted, template!(List: "arg1, arg2, ...")), rustc_attr!(TEST, rustc_mir, AssumedUsed, template!(List: "arg1, arg2, ...")),
rustc_attr!(TEST, rustc_dump_program_clauses, Whitelisted, template!(Word)), rustc_attr!(TEST, rustc_dump_program_clauses, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_dump_env_program_clauses, Whitelisted, template!(Word)), rustc_attr!(TEST, rustc_dump_env_program_clauses, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_object_lifetime_default, Whitelisted, template!(Word)), rustc_attr!(TEST, rustc_object_lifetime_default, AssumedUsed, template!(Word)),
rustc_attr!(TEST, rustc_dummy, Normal, template!(Word /* doesn't matter*/)), rustc_attr!(TEST, rustc_dummy, Normal, template!(Word /* doesn't matter*/)),
gated!( gated!(
omit_gdb_pretty_printer_section, Whitelisted, template!(Word), omit_gdb_pretty_printer_section, AssumedUsed, template!(Word),
"the `#[omit_gdb_pretty_printer_section]` attribute is just used for the Rust test suite", "the `#[omit_gdb_pretty_printer_section]` attribute is just used for the Rust test suite",
), ),
]; ];

View File

@ -168,7 +168,7 @@ pub fn check_dirty_clean_annotations(tcx: TyCtxt<'_>) {
// Note that we cannot use the existing "unused attribute"-infrastructure // Note that we cannot use the existing "unused attribute"-infrastructure
// here, since that is running before codegen. This is also the reason why // here, since that is running before codegen. This is also the reason why
// all codegen-specific attributes are `Whitelisted` in rustc_ast::feature_gate. // all codegen-specific attributes are `AssumedUsed` in rustc_ast::feature_gate.
all_attrs.report_unchecked_attrs(&dirty_clean_visitor.checked_attrs); all_attrs.report_unchecked_attrs(&dirty_clean_visitor.checked_attrs);
}) })
} }

View File

@ -159,10 +159,7 @@ pub struct Config {
pub registry: Registry, pub registry: Registry,
} }
pub fn run_compiler_in_existing_thread_pool<R>( pub fn create_compiler_and_run<R>(config: Config, f: impl FnOnce(&Compiler) -> R) -> R {
config: Config,
f: impl FnOnce(&Compiler) -> R,
) -> R {
let registry = &config.registry; let registry = &config.registry;
let (sess, codegen_backend) = util::create_session( let (sess, codegen_backend) = util::create_session(
config.opts, config.opts,
@ -204,17 +201,20 @@ pub fn run_compiler_in_existing_thread_pool<R>(
pub fn run_compiler<R: Send>(mut config: Config, f: impl FnOnce(&Compiler) -> R + Send) -> R { pub fn run_compiler<R: Send>(mut config: Config, f: impl FnOnce(&Compiler) -> R + Send) -> R {
log::trace!("run_compiler"); log::trace!("run_compiler");
let stderr = config.stderr.take(); let stderr = config.stderr.take();
util::spawn_thread_pool( util::setup_callbacks_and_run_in_thread_pool_with_globals(
config.opts.edition, config.opts.edition,
config.opts.debugging_opts.threads, config.opts.debugging_opts.threads,
&stderr, &stderr,
|| run_compiler_in_existing_thread_pool(config, f), || create_compiler_and_run(config, f),
) )
} }
pub fn default_thread_pool<R: Send>(edition: edition::Edition, f: impl FnOnce() -> R + Send) -> R { pub fn setup_callbacks_and_run_in_default_thread_pool_with_globals<R: Send>(
edition: edition::Edition,
f: impl FnOnce() -> R + Send,
) -> R {
// the 1 here is duplicating code in config.opts.debugging_opts.threads // the 1 here is duplicating code in config.opts.debugging_opts.threads
// which also defaults to 1; it ultimately doesn't matter as the default // which also defaults to 1; it ultimately doesn't matter as the default
// isn't threaded, and just ignores this parameter // isn't threaded, and just ignores this parameter
util::spawn_thread_pool(edition, 1, &None, f) util::setup_callbacks_and_run_in_thread_pool_with_globals(edition, 1, &None, f)
} }

View File

@ -38,8 +38,8 @@ use std::{panic, thread};
/// Adds `target_feature = "..."` cfgs for a variety of platform /// Adds `target_feature = "..."` cfgs for a variety of platform
/// specific features (SSE, NEON etc.). /// specific features (SSE, NEON etc.).
/// ///
/// This is performed by checking whether a whitelisted set of /// This is performed by checking whether a set of permitted features
/// features is available on the target machine, by querying LLVM. /// is available on the target machine, by querying LLVM.
pub fn add_configuration( pub fn add_configuration(
cfg: &mut CrateConfig, cfg: &mut CrateConfig,
sess: &mut Session, sess: &mut Session,
@ -102,6 +102,8 @@ impl Write for Sink {
} }
} }
/// Like a `thread::Builder::spawn` followed by a `join()`, but avoids the need
/// for `'static` bounds.
#[cfg(not(parallel_compiler))] #[cfg(not(parallel_compiler))]
pub fn scoped_thread<F: FnOnce() -> R + Send, R: Send>(cfg: thread::Builder, f: F) -> R { pub fn scoped_thread<F: FnOnce() -> R + Send, R: Send>(cfg: thread::Builder, f: F) -> R {
struct Ptr(*mut ()); struct Ptr(*mut ());
@ -126,7 +128,7 @@ pub fn scoped_thread<F: FnOnce() -> R + Send, R: Send>(cfg: thread::Builder, f:
} }
#[cfg(not(parallel_compiler))] #[cfg(not(parallel_compiler))]
pub fn spawn_thread_pool<F: FnOnce() -> R + Send, R: Send>( pub fn setup_callbacks_and_run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
edition: Edition, edition: Edition,
_threads: usize, _threads: usize,
stderr: &Option<Arc<Mutex<Vec<u8>>>>, stderr: &Option<Arc<Mutex<Vec<u8>>>>,
@ -140,7 +142,7 @@ pub fn spawn_thread_pool<F: FnOnce() -> R + Send, R: Send>(
crate::callbacks::setup_callbacks(); crate::callbacks::setup_callbacks();
scoped_thread(cfg, || { let main_handler = move || {
rustc_ast::with_session_globals(edition, || { rustc_ast::with_session_globals(edition, || {
ty::tls::GCX_PTR.set(&Lock::new(0), || { ty::tls::GCX_PTR.set(&Lock::new(0), || {
if let Some(stderr) = stderr { if let Some(stderr) = stderr {
@ -149,22 +151,21 @@ pub fn spawn_thread_pool<F: FnOnce() -> R + Send, R: Send>(
f() f()
}) })
}) })
}) };
scoped_thread(cfg, main_handler)
} }
#[cfg(parallel_compiler)] #[cfg(parallel_compiler)]
pub fn spawn_thread_pool<F: FnOnce() -> R + Send, R: Send>( pub fn setup_callbacks_and_run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
edition: Edition, edition: Edition,
threads: usize, threads: usize,
stderr: &Option<Arc<Mutex<Vec<u8>>>>, stderr: &Option<Arc<Mutex<Vec<u8>>>>,
f: F, f: F,
) -> R { ) -> R {
use rayon::{ThreadBuilder, ThreadPool, ThreadPoolBuilder};
let gcx_ptr = &Lock::new(0);
crate::callbacks::setup_callbacks(); crate::callbacks::setup_callbacks();
let mut config = ThreadPoolBuilder::new() let mut config = rayon::ThreadPoolBuilder::new()
.thread_name(|_| "rustc".to_string()) .thread_name(|_| "rustc".to_string())
.acquire_thread_handler(jobserver::acquire_thread) .acquire_thread_handler(jobserver::acquire_thread)
.release_thread_handler(jobserver::release_thread) .release_thread_handler(jobserver::release_thread)
@ -175,7 +176,7 @@ pub fn spawn_thread_pool<F: FnOnce() -> R + Send, R: Send>(
config = config.stack_size(size); config = config.stack_size(size);
} }
let with_pool = move |pool: &ThreadPool| pool.install(move || f()); let with_pool = move |pool: &rayon::ThreadPool| pool.install(move || f());
rustc_ast::with_session_globals(edition, || { rustc_ast::with_session_globals(edition, || {
rustc_ast::SESSION_GLOBALS.with(|ast_session_globals| { rustc_ast::SESSION_GLOBALS.with(|ast_session_globals| {
@ -185,13 +186,15 @@ pub fn spawn_thread_pool<F: FnOnce() -> R + Send, R: Send>(
// span_session_globals are captured and set on the new // span_session_globals are captured and set on the new
// threads. ty::tls::with_thread_locals sets up thread local // threads. ty::tls::with_thread_locals sets up thread local
// callbacks from librustc_ast. // callbacks from librustc_ast.
let main_handler = move |thread: ThreadBuilder| { let main_handler = move |thread: rayon::ThreadBuilder| {
rustc_ast::SESSION_GLOBALS.set(ast_session_globals, || { rustc_ast::SESSION_GLOBALS.set(ast_session_globals, || {
rustc_span::SESSION_GLOBALS.set(span_session_globals, || { rustc_span::SESSION_GLOBALS.set(span_session_globals, || {
ty::tls::GCX_PTR.set(&Lock::new(0), || {
if let Some(stderr) = stderr { if let Some(stderr) = stderr {
io::set_panic(Some(box Sink(stderr.clone()))); io::set_panic(Some(box Sink(stderr.clone())));
} }
ty::tls::GCX_PTR.set(gcx_ptr, || thread.run()) thread.run()
})
}) })
}) })
}; };

View File

@ -287,8 +287,8 @@ impl<'tcx> LateLintPass<'tcx> for UnusedAttributes {
let attr_info = attr.ident().and_then(|ident| self.builtin_attributes.get(&ident.name)); let attr_info = attr.ident().and_then(|ident| self.builtin_attributes.get(&ident.name));
if let Some(&&(name, ty, ..)) = attr_info { if let Some(&&(name, ty, ..)) = attr_info {
if let AttributeType::Whitelisted = ty { if let AttributeType::AssumedUsed = ty {
debug!("{:?} is Whitelisted", name); debug!("{:?} is AssumedUsed", name);
return; return;
} }
} }

View File

@ -1386,9 +1386,6 @@ impl<'a, 'tcx> CrateMetadataRef<'a> {
let constness = match self.kind(id) { let constness = match self.kind(id) {
EntryKind::AssocFn(data) => data.decode(self).fn_data.constness, EntryKind::AssocFn(data) => data.decode(self).fn_data.constness,
EntryKind::Fn(data) => data.decode(self).constness, EntryKind::Fn(data) => data.decode(self).constness,
// Some intrinsics can be const fn. While we could recompute this (at least until we
// stop having hardcoded whitelists and move to stability attributes), it seems cleaner
// to treat all const fns equally.
EntryKind::ForeignFn(data) => data.decode(self).constness, EntryKind::ForeignFn(data) => data.decode(self).constness,
EntryKind::Variant(..) | EntryKind::Struct(..) => hir::Constness::Const, EntryKind::Variant(..) | EntryKind::Struct(..) => hir::Constness::Const,
_ => hir::Constness::NotConst, _ => hir::Constness::NotConst,

View File

@ -230,8 +230,9 @@ pub fn struct_lint_level<'s, 'd>(
err.allow_suggestions(false); err.allow_suggestions(false);
// If this is a future incompatible lint it'll become a hard error, so // If this is a future incompatible lint it'll become a hard error, so
// we have to emit *something*. Also allow lints to whitelist themselves // we have to emit *something*. Also, if this lint occurs in the
// on a case-by-case basis for emission in a foreign macro. // expansion of a macro from an external crate, allow individual lints
// to opt-out from being reported.
if future_incompatible.is_none() && !lint.report_in_external_macro { if future_incompatible.is_none() && !lint.report_in_external_macro {
err.cancel(); err.cancel();
// Don't continue further, since we don't want to have // Don't continue further, since we don't want to have

View File

@ -42,19 +42,18 @@ impl<'tcx> TyCtxt<'tcx> {
} }
} }
/// Returns `true` if the specified `lang_item` doesn't actually need to be /// Returns `true` if the specified `lang_item` must be present for this
/// present for this compilation. /// compilation.
/// ///
/// Not all lang items are always required for each compilation, particularly in /// Not all lang items are always required for each compilation, particularly in
/// the case of panic=abort. In these situations some lang items are injected by /// the case of panic=abort. In these situations some lang items are injected by
/// crates and don't actually need to be defined in libstd. /// crates and don't actually need to be defined in libstd.
pub fn whitelisted(tcx: TyCtxt<'_>, lang_item: LangItem) -> bool { pub fn required(tcx: TyCtxt<'_>, lang_item: LangItem) -> bool {
// If we're not compiling with unwinding, we won't actually need these // If we're not compiling with unwinding, we won't actually need these
// symbols. Other panic runtimes ensure that the relevant symbols are // symbols. Other panic runtimes ensure that the relevant symbols are
// available to link things together, but they're never exercised. // available to link things together, but they're never exercised.
if tcx.sess.panic_strategy() != PanicStrategy::Unwind { match tcx.sess.panic_strategy() {
return lang_item == LangItem::EhPersonalityLangItem; PanicStrategy::Abort => lang_item != LangItem::EhPersonalityLangItem,
PanicStrategy::Unwind => true,
} }
false
} }

View File

@ -1413,10 +1413,10 @@ rustc_queries! {
} }
Other { Other {
query target_features_whitelist(_: CrateNum) -> FxHashMap<String, Option<Symbol>> { query supported_target_features(_: CrateNum) -> FxHashMap<String, Option<Symbol>> {
storage(ArenaCacheSelector<'tcx>) storage(ArenaCacheSelector<'tcx>)
eval_always eval_always
desc { "looking up the whitelist of target features" } desc { "looking up supported target features" }
} }
// Get an estimate of the size of an InstanceDef based on its MIR for CGU partitioning. // Get an estimate of the size of an InstanceDef based on its MIR for CGU partitioning.

View File

@ -527,7 +527,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
size: Size::ZERO, size: Size::ZERO,
}), }),
// Potentially-fat pointers. // Potentially-wide pointers.
ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => { ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
let mut data_ptr = scalar_unit(Pointer); let mut data_ptr = scalar_unit(Pointer);
if !ty.is_unsafe_ptr() { if !ty.is_unsafe_ptr() {

View File

@ -1337,18 +1337,18 @@ impl<'tcx> ToPolyTraitRef<'tcx> for PolyTraitPredicate<'tcx> {
} }
pub trait ToPredicate<'tcx> { pub trait ToPredicate<'tcx> {
fn to_predicate(&self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx>; fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx>;
} }
impl ToPredicate<'tcx> for PredicateKind<'tcx> { impl ToPredicate<'tcx> for PredicateKind<'tcx> {
#[inline(always)] #[inline(always)]
fn to_predicate(&self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
tcx.mk_predicate(*self) tcx.mk_predicate(self)
} }
} }
impl<'tcx> ToPredicate<'tcx> for ConstnessAnd<TraitRef<'tcx>> { impl<'tcx> ToPredicate<'tcx> for ConstnessAnd<TraitRef<'tcx>> {
fn to_predicate(&self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
ty::PredicateKind::Trait( ty::PredicateKind::Trait(
ty::Binder::dummy(ty::TraitPredicate { trait_ref: self.value }), ty::Binder::dummy(ty::TraitPredicate { trait_ref: self.value }),
self.constness, self.constness,
@ -1358,7 +1358,7 @@ impl<'tcx> ToPredicate<'tcx> for ConstnessAnd<TraitRef<'tcx>> {
} }
impl<'tcx> ToPredicate<'tcx> for ConstnessAnd<&TraitRef<'tcx>> { impl<'tcx> ToPredicate<'tcx> for ConstnessAnd<&TraitRef<'tcx>> {
fn to_predicate(&self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
ty::PredicateKind::Trait( ty::PredicateKind::Trait(
ty::Binder::dummy(ty::TraitPredicate { trait_ref: *self.value }), ty::Binder::dummy(ty::TraitPredicate { trait_ref: *self.value }),
self.constness, self.constness,
@ -1368,34 +1368,34 @@ impl<'tcx> ToPredicate<'tcx> for ConstnessAnd<&TraitRef<'tcx>> {
} }
impl<'tcx> ToPredicate<'tcx> for ConstnessAnd<PolyTraitRef<'tcx>> { impl<'tcx> ToPredicate<'tcx> for ConstnessAnd<PolyTraitRef<'tcx>> {
fn to_predicate(&self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
ty::PredicateKind::Trait(self.value.to_poly_trait_predicate(), self.constness) ty::PredicateKind::Trait(self.value.to_poly_trait_predicate(), self.constness)
.to_predicate(tcx) .to_predicate(tcx)
} }
} }
impl<'tcx> ToPredicate<'tcx> for ConstnessAnd<&PolyTraitRef<'tcx>> { impl<'tcx> ToPredicate<'tcx> for ConstnessAnd<&PolyTraitRef<'tcx>> {
fn to_predicate(&self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
ty::PredicateKind::Trait(self.value.to_poly_trait_predicate(), self.constness) ty::PredicateKind::Trait(self.value.to_poly_trait_predicate(), self.constness)
.to_predicate(tcx) .to_predicate(tcx)
} }
} }
impl<'tcx> ToPredicate<'tcx> for PolyRegionOutlivesPredicate<'tcx> { impl<'tcx> ToPredicate<'tcx> for PolyRegionOutlivesPredicate<'tcx> {
fn to_predicate(&self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
PredicateKind::RegionOutlives(*self).to_predicate(tcx) PredicateKind::RegionOutlives(self).to_predicate(tcx)
} }
} }
impl<'tcx> ToPredicate<'tcx> for PolyTypeOutlivesPredicate<'tcx> { impl<'tcx> ToPredicate<'tcx> for PolyTypeOutlivesPredicate<'tcx> {
fn to_predicate(&self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
PredicateKind::TypeOutlives(*self).to_predicate(tcx) PredicateKind::TypeOutlives(self).to_predicate(tcx)
} }
} }
impl<'tcx> ToPredicate<'tcx> for PolyProjectionPredicate<'tcx> { impl<'tcx> ToPredicate<'tcx> for PolyProjectionPredicate<'tcx> {
fn to_predicate(&self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> {
PredicateKind::Projection(*self).to_predicate(tcx) PredicateKind::Projection(self).to_predicate(tcx)
} }
} }

View File

@ -88,7 +88,7 @@ pub fn is_parent_const_impl_raw(tcx: TyCtxt<'_>, hir_id: hir::HirId) -> bool {
} }
/// Checks whether the function has a `const` modifier or, in case it is an intrinsic, whether /// Checks whether the function has a `const` modifier or, in case it is an intrinsic, whether
/// said intrinsic is on the whitelist for being const callable. /// said intrinsic has a `rustc_const_{un,}stable` attribute.
fn is_const_fn_raw(tcx: TyCtxt<'_>, def_id: DefId) -> bool { fn is_const_fn_raw(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
let hir_id = tcx.hir().as_local_hir_id(def_id.expect_local()); let hir_id = tcx.hir().as_local_hir_id(def_id.expect_local());

View File

@ -45,7 +45,7 @@ macro_rules! throw_validation_failure {
/// If $e throws an error matching the pattern, throw a validation failure. /// If $e throws an error matching the pattern, throw a validation failure.
/// Other errors are passed back to the caller, unchanged -- and if they reach the root of /// Other errors are passed back to the caller, unchanged -- and if they reach the root of
/// the visitor, we make sure only validation errors and `InvalidProgram` errors are left. /// the visitor, we make sure only validation errors and `InvalidProgram` errors are left.
/// This lets you use the patterns as a kind of validation whitelist, asserting which errors /// This lets you use the patterns as a kind of validation list, asserting which errors
/// can possibly happen: /// can possibly happen:
/// ///
/// ``` /// ```

View File

@ -191,9 +191,18 @@ fn check_rvalue(
_, _,
_, _,
) => Err((span, "function pointer casts are not allowed in const fn".into())), ) => Err((span, "function pointer casts are not allowed in const fn".into())),
Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), _, _) => { Rvalue::Cast(CastKind::Pointer(PointerCast::Unsize), op, cast_ty) => {
let pointee_ty = cast_ty.builtin_deref(true).unwrap().ty;
let unsized_ty = tcx.struct_tail_erasing_lifetimes(pointee_ty, tcx.param_env(def_id));
if let ty::Slice(_) | ty::Str = unsized_ty.kind {
check_operand(tcx, op, span, def_id, body)?;
// Casting/coercing things to slices is fine.
Ok(())
} else {
// We just can't allow trait objects until we have figured out trait method calls.
Err((span, "unsizing casts are not allowed in const fn".into())) Err((span, "unsizing casts are not allowed in const fn".into()))
} }
}
// binops are fine on integers // binops are fine on integers
Rvalue::BinaryOp(_, lhs, rhs) | Rvalue::CheckedBinaryOp(_, lhs, rhs) => { Rvalue::BinaryOp(_, lhs, rhs) | Rvalue::CheckedBinaryOp(_, lhs, rhs) => {
check_operand(tcx, lhs, span, def_id, body)?; check_operand(tcx, lhs, span, def_id, body)?;

View File

@ -770,10 +770,10 @@ impl<'a> Parser<'a> {
match self.token.uninterpolate().kind { match self.token.uninterpolate().kind {
token::Ident(..) => self.parse_dot_suffix(base, lo), token::Ident(..) => self.parse_dot_suffix(base, lo),
token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => { token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix)) Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix, None))
} }
token::Literal(token::Lit { kind: token::Float, symbol, .. }) => { token::Literal(token::Lit { kind: token::Float, symbol, suffix }) => {
self.recover_field_access_by_float_lit(lo, base, symbol) Ok(self.parse_tuple_field_access_expr_float(lo, base, symbol, suffix))
} }
_ => { _ => {
self.error_unexpected_after_dot(); self.error_unexpected_after_dot();
@ -788,45 +788,84 @@ impl<'a> Parser<'a> {
self.struct_span_err(self.token.span, &format!("unexpected token: `{}`", actual)).emit(); self.struct_span_err(self.token.span, &format!("unexpected token: `{}`", actual)).emit();
} }
fn recover_field_access_by_float_lit( // We need and identifier or integer, but the next token is a float.
// Break the float into components to extract the identifier or integer.
// FIXME: With current `TokenCursor` it's hard to break tokens into more than 2
// parts unless those parts are processed immediately. `TokenCursor` should either
// support pushing "future tokens" (would be also helpful to `break_and_eat`), or
// we should break everything including floats into more basic proc-macro style
// tokens in the lexer (probably preferable).
fn parse_tuple_field_access_expr_float(
&mut self, &mut self,
lo: Span, lo: Span,
base: P<Expr>, base: P<Expr>,
sym: Symbol, float: Symbol,
) -> PResult<'a, P<Expr>> { suffix: Option<Symbol>,
self.bump(); ) -> P<Expr> {
#[derive(Debug)]
let fstr = sym.as_str(); enum FloatComponent {
let msg = format!("unexpected token: `{}`", sym); IdentLike(String),
Punct(char),
let mut err = self.struct_span_err(self.prev_token.span, &msg);
err.span_label(self.prev_token.span, "unexpected token");
if fstr.chars().all(|x| "0123456789.".contains(x)) {
let float = match fstr.parse::<f64>() {
Ok(f) => f,
Err(_) => {
err.emit();
return Ok(base);
} }
}; use FloatComponent::*;
let sugg = pprust::to_string(|s| {
s.popen(); let mut components = Vec::new();
s.print_expr(&base); let mut ident_like = String::new();
s.s.word("."); for c in float.as_str().chars() {
s.print_usize(float.trunc() as usize); if c == '_' || c.is_ascii_alphanumeric() {
s.pclose(); ident_like.push(c);
s.s.word("."); } else if matches!(c, '.' | '+' | '-') {
s.s.word(fstr.splitn(2, '.').last().unwrap().to_string()) if !ident_like.is_empty() {
}); components.push(IdentLike(mem::take(&mut ident_like)));
err.span_suggestion( }
lo.to(self.prev_token.span), components.push(Punct(c));
"try parenthesizing the first index", } else {
sugg, panic!("unexpected character in a float token: {:?}", c)
Applicability::MachineApplicable, }
); }
if !ident_like.is_empty() {
components.push(IdentLike(ident_like));
}
// FIXME: Make the span more precise.
let span = self.token.span;
match &*components {
// 1e2
[IdentLike(i)] => {
self.parse_tuple_field_access_expr(lo, base, Symbol::intern(&i), suffix, None)
}
// 1.
[IdentLike(i), Punct('.')] => {
assert!(suffix.is_none());
let symbol = Symbol::intern(&i);
self.token = Token::new(token::Ident(symbol, false), span);
let next_token = Token::new(token::Dot, span);
self.parse_tuple_field_access_expr(lo, base, symbol, None, Some(next_token))
}
// 1.2 | 1.2e3
[IdentLike(i1), Punct('.'), IdentLike(i2)] => {
let symbol1 = Symbol::intern(&i1);
self.token = Token::new(token::Ident(symbol1, false), span);
let next_token1 = Token::new(token::Dot, span);
let base1 =
self.parse_tuple_field_access_expr(lo, base, symbol1, None, Some(next_token1));
let symbol2 = Symbol::intern(&i2);
let next_token2 = Token::new(token::Ident(symbol2, false), span);
self.bump_with(next_token2); // `.`
self.parse_tuple_field_access_expr(lo, base1, symbol2, suffix, None)
}
// 1e+ | 1e- (recovered)
[IdentLike(_), Punct('+' | '-')] |
// 1e+2 | 1e-2
[IdentLike(_), Punct('+' | '-'), IdentLike(_)] |
// 1.2e+3 | 1.2e-3
[IdentLike(_), Punct('.'), IdentLike(_), Punct('+' | '-'), IdentLike(_)] => {
// See the FIXME about `TokenCursor` above.
self.error_unexpected_after_dot();
base
}
_ => panic!("unexpected components in a float token: {:?}", components),
} }
Err(err)
} }
fn parse_tuple_field_access_expr( fn parse_tuple_field_access_expr(
@ -835,8 +874,12 @@ impl<'a> Parser<'a> {
base: P<Expr>, base: P<Expr>,
field: Symbol, field: Symbol,
suffix: Option<Symbol>, suffix: Option<Symbol>,
next_token: Option<Token>,
) -> P<Expr> { ) -> P<Expr> {
self.bump(); match next_token {
Some(next_token) => self.bump_with(next_token),
None => self.bump(),
}
let span = self.prev_token.span; let span = self.prev_token.span;
let field = ExprKind::Field(base, Ident::new(field, span)); let field = ExprKind::Field(base, Ident::new(field, span));
self.expect_no_suffix(span, "a tuple index", suffix); self.expect_no_suffix(span, "a tuple index", suffix);
@ -1790,7 +1833,7 @@ impl<'a> Parser<'a> {
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr) let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
&& self.token != token::CloseDelim(token::Brace); && self.token != token::CloseDelim(token::Brace);
let hi = self.token.span; let hi = self.prev_token.span;
if require_comma { if require_comma {
let sm = self.sess.source_map(); let sm = self.sess.source_map();

View File

@ -292,6 +292,8 @@ impl CheckAttrVisitor<'tcx> {
| sym::u32 | sym::u32
| sym::i64 | sym::i64
| sym::u64 | sym::u64
| sym::i128
| sym::u128
| sym::isize | sym::isize
| sym::usize => { | sym::usize => {
int_reprs += 1; int_reprs += 1;

View File

@ -7,7 +7,7 @@ use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_hir::lang_items; use rustc_hir::lang_items;
use rustc_hir::lang_items::ITEM_REFS; use rustc_hir::lang_items::ITEM_REFS;
use rustc_hir::weak_lang_items::WEAK_ITEMS_REFS; use rustc_hir::weak_lang_items::WEAK_ITEMS_REFS;
use rustc_middle::middle::lang_items::whitelisted; use rustc_middle::middle::lang_items::required;
use rustc_middle::ty::TyCtxt; use rustc_middle::ty::TyCtxt;
use rustc_session::config::CrateType; use rustc_session::config::CrateType;
use rustc_span::symbol::sym; use rustc_span::symbol::sym;
@ -59,7 +59,7 @@ fn verify<'tcx>(tcx: TyCtxt<'tcx>, items: &lang_items::LanguageItems) {
} }
for (name, &item) in WEAK_ITEMS_REFS.iter() { for (name, &item) in WEAK_ITEMS_REFS.iter() {
if missing.contains(&item) && !whitelisted(tcx, item) && items.require(item).is_err() { if missing.contains(&item) && required(tcx, item) && items.require(item).is_err() {
if item == lang_items::PanicImplLangItem { if item == lang_items::PanicImplLangItem {
tcx.sess.err("`#[panic_handler]` function required, but not found"); tcx.sess.err("`#[panic_handler]` function required, but not found");
} else if item == lang_items::OomLangItem { } else if item == lang_items::OomLangItem {

View File

@ -2122,7 +2122,7 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
self.impl_self self.impl_self
{ {
match path.res { match path.res {
// Whitelist the types that unambiguously always // Permit the types that unambiguously always
// result in the same type constructor being used // result in the same type constructor being used
// (it can't differ between `Self` and `self`). // (it can't differ between `Self` and `self`).
Res::Def(DefKind::Struct | DefKind::Union | DefKind::Enum, _) Res::Def(DefKind::Struct | DefKind::Union | DefKind::Enum, _)

View File

@ -16,8 +16,8 @@ pub(super) fn test_target(target: TargetResult) {
impl Target { impl Target {
fn check_consistency(&self) { fn check_consistency(&self) {
// Check that LLD with the given flavor is treated identically to the linker it emulates. // Check that LLD with the given flavor is treated identically to the linker it emulates.
// If you target really needs to deviate from the rules below, whitelist it // If your target really needs to deviate from the rules below, except it and document the
// and document the reasons. // reasons.
assert_eq!( assert_eq!(
self.linker_flavor == LinkerFlavor::Msvc self.linker_flavor == LinkerFlavor::Msvc
|| self.linker_flavor == LinkerFlavor::Lld(LldFlavor::Link), || self.linker_flavor == LinkerFlavor::Lld(LldFlavor::Link),

View File

@ -40,14 +40,14 @@ pub fn options() -> TargetOptions {
// corrupting static data. // corrupting static data.
arg("--stack-first"); arg("--stack-first");
// FIXME we probably shouldn't pass this but instead pass an explicit // FIXME we probably shouldn't pass this but instead pass an explicit list
// whitelist of symbols we'll allow to be undefined. We don't currently have // of symbols we'll allow to be undefined. We don't currently have a
// a mechanism of knowing, however, which symbols are intended to be // mechanism of knowing, however, which symbols are intended to be imported
// imported from the environment and which are intended to be imported from // from the environment and which are intended to be imported from other
// other objects linked elsewhere. This is a coarse approximation but is // objects linked elsewhere. This is a coarse approximation but is sure to
// sure to hide some bugs and frustrate someone at some point, so we should // hide some bugs and frustrate someone at some point, so we should ideally
// ideally work towards a world where we can explicitly list symbols that // work towards a world where we can explicitly list symbols that are
// are supposed to be imported and have all other symbols generate errors if // supposed to be imported and have all other symbols generate errors if
// they remain undefined. // they remain undefined.
arg("--allow-undefined"); arg("--allow-undefined");

View File

@ -2139,7 +2139,7 @@ pub trait NextTypeParamName {
impl NextTypeParamName for &[hir::GenericParam<'_>] { impl NextTypeParamName for &[hir::GenericParam<'_>] {
fn next_type_param_name(&self, name: Option<&str>) -> String { fn next_type_param_name(&self, name: Option<&str>) -> String {
// This is the whitelist of possible parameter names that we might suggest. // This is the list of possible parameter names that we might suggest.
let name = name.and_then(|n| n.chars().next()).map(|c| c.to_string().to_uppercase()); let name = name.and_then(|n| n.chars().next()).map(|c| c.to_string().to_uppercase());
let name = name.as_deref(); let name = name.as_deref();
let possible_names = [name.unwrap_or("T"), "T", "U", "V", "X", "Y", "Z", "A", "B", "C"]; let possible_names = [name.unwrap_or("T"), "T", "U", "V", "X", "Y", "Z", "A", "B", "C"];

View File

@ -236,7 +236,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.tcx .tcx
.get_attrs(m.def_id) .get_attrs(m.def_id)
.iter() .iter()
// This special internal attribute is used to whitelist // This special internal attribute is used to permit
// "identity-like" conversion methods to be suggested here. // "identity-like" conversion methods to be suggested here.
// //
// FIXME (#46459 and #46460): ideally // FIXME (#46459 and #46460): ideally

View File

@ -913,7 +913,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if let ty::Adt(..) = rcvr_t.kind { if let ty::Adt(..) = rcvr_t.kind {
// Try alternative arbitrary self types that could fulfill this call. // Try alternative arbitrary self types that could fulfill this call.
// FIXME: probe for all types that *could* be arbitrary self-types, not // FIXME: probe for all types that *could* be arbitrary self-types, not
// just this whitelist. // just this list.
try_alt_rcvr(&mut err, self.tcx.mk_lang_item(rcvr_t, lang_items::OwnedBoxLangItem)); try_alt_rcvr(&mut err, self.tcx.mk_lang_item(rcvr_t, lang_items::OwnedBoxLangItem));
try_alt_rcvr(&mut err, self.tcx.mk_lang_item(rcvr_t, lang_items::PinTypeLangItem)); try_alt_rcvr(&mut err, self.tcx.mk_lang_item(rcvr_t, lang_items::PinTypeLangItem));
try_alt_rcvr(&mut err, self.tcx.mk_diagnostic_item(rcvr_t, sym::Arc)); try_alt_rcvr(&mut err, self.tcx.mk_diagnostic_item(rcvr_t, sym::Arc));
@ -1806,7 +1806,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// If this is an input value, we require its type to be fully resolved // If this is an input value, we require its type to be fully resolved
// at this point. This allows us to provide helpful coercions which help // at this point. This allows us to provide helpful coercions which help
// pass the type whitelist in a later pass. // pass the type candidate list in a later pass.
// //
// We don't require output types to be resolved at this point, which // We don't require output types to be resolved at this point, which
// allows them to be inferred based on how they are used later in the // allows them to be inferred based on how they are used later in the

View File

@ -2150,7 +2150,7 @@ fn from_target_feature(
tcx: TyCtxt<'_>, tcx: TyCtxt<'_>,
id: DefId, id: DefId,
attr: &ast::Attribute, attr: &ast::Attribute,
whitelist: &FxHashMap<String, Option<Symbol>>, supported_target_features: &FxHashMap<String, Option<Symbol>>,
target_features: &mut Vec<Symbol>, target_features: &mut Vec<Symbol>,
) { ) {
let list = match attr.meta_item_list() { let list = match attr.meta_item_list() {
@ -2184,8 +2184,7 @@ fn from_target_feature(
// We allow comma separation to enable multiple features. // We allow comma separation to enable multiple features.
target_features.extend(value.as_str().split(',').filter_map(|feature| { target_features.extend(value.as_str().split(',').filter_map(|feature| {
// Only allow whitelisted features per platform. let feature_gate = match supported_target_features.get(feature) {
let feature_gate = match whitelist.get(feature) {
Some(g) => g, Some(g) => g,
None => { None => {
let msg = let msg =
@ -2196,7 +2195,7 @@ fn from_target_feature(
format!("`{}` is not valid for this target", feature), format!("`{}` is not valid for this target", feature),
); );
if feature.starts_with('+') { if feature.starts_with('+') {
let valid = whitelist.contains_key(&feature[1..]); let valid = supported_target_features.contains_key(&feature[1..]);
if valid { if valid {
err.help("consider removing the leading `+` in the feature name"); err.help("consider removing the leading `+` in the feature name");
} }
@ -2246,9 +2245,9 @@ fn linkage_by_name(tcx: TyCtxt<'_>, def_id: DefId, name: &str) -> Linkage {
// Use the names from src/llvm/docs/LangRef.rst here. Most types are only // Use the names from src/llvm/docs/LangRef.rst here. Most types are only
// applicable to variable declarations and may not really make sense for // applicable to variable declarations and may not really make sense for
// Rust code in the first place but whitelist them anyway and trust that // Rust code in the first place but allow them anyway and trust that the
// the user knows what s/he's doing. Who knows, unanticipated use cases // user knows what s/he's doing. Who knows, unanticipated use cases may pop
// may pop up in the future. // up in the future.
// //
// ghost, dllimport, dllexport and linkonce_odr_autohide are not supported // ghost, dllimport, dllexport and linkonce_odr_autohide are not supported
// and don't have to be, LLVM treats them as no-ops. // and don't have to be, LLVM treats them as no-ops.
@ -2283,7 +2282,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
codegen_fn_attrs.flags |= CodegenFnAttrFlags::TRACK_CALLER; codegen_fn_attrs.flags |= CodegenFnAttrFlags::TRACK_CALLER;
} }
let whitelist = tcx.target_features_whitelist(LOCAL_CRATE); let supported_target_features = tcx.supported_target_features(LOCAL_CRATE);
let mut inline_span = None; let mut inline_span = None;
let mut link_ordinal_span = None; let mut link_ordinal_span = None;
@ -2386,7 +2385,13 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
check_target_feature_trait_unsafe(tcx, local_id, attr.span); check_target_feature_trait_unsafe(tcx, local_id, attr.span);
} }
} }
from_target_feature(tcx, id, attr, &whitelist, &mut codegen_fn_attrs.target_features); from_target_feature(
tcx,
id,
attr,
&supported_target_features,
&mut codegen_fn_attrs.target_features,
);
} else if attr.check_name(sym::linkage) { } else if attr.check_name(sym::linkage) {
if let Some(val) = attr.value_str() { if let Some(val) = attr.value_str() {
codegen_fn_attrs.linkage = Some(linkage_by_name(tcx, id, &val.as_str())); codegen_fn_attrs.linkage = Some(linkage_by_name(tcx, id, &val.as_str()));

View File

@ -225,7 +225,7 @@ pub fn new_handler(
/// * Vector of tuples of lints' name and their associated "max" level /// * Vector of tuples of lints' name and their associated "max" level
/// * HashMap of lint id with their associated "max" level /// * HashMap of lint id with their associated "max" level
pub fn init_lints<F>( pub fn init_lints<F>(
mut whitelisted_lints: Vec<String>, mut allowed_lints: Vec<String>,
lint_opts: Vec<(String, lint::Level)>, lint_opts: Vec<(String, lint::Level)>,
filter_call: F, filter_call: F,
) -> (Vec<(String, lint::Level)>, FxHashMap<lint::LintId, lint::Level>) ) -> (Vec<(String, lint::Level)>, FxHashMap<lint::LintId, lint::Level>)
@ -234,8 +234,8 @@ where
{ {
let warnings_lint_name = lint::builtin::WARNINGS.name; let warnings_lint_name = lint::builtin::WARNINGS.name;
whitelisted_lints.push(warnings_lint_name.to_owned()); allowed_lints.push(warnings_lint_name.to_owned());
whitelisted_lints.extend(lint_opts.iter().map(|(lint, _)| lint).cloned()); allowed_lints.extend(lint_opts.iter().map(|(lint, _)| lint).cloned());
let lints = || { let lints = || {
lint::builtin::HardwiredLints::get_lints() lint::builtin::HardwiredLints::get_lints()
@ -245,7 +245,7 @@ where
let lint_opts = lints() let lint_opts = lints()
.filter_map(|lint| { .filter_map(|lint| {
// Whitelist feature-gated lints to avoid feature errors when trying to // Permit feature-gated lints to avoid feature errors when trying to
// allow all lints. // allow all lints.
if lint.name == warnings_lint_name || lint.feature_gate.is_some() { if lint.name == warnings_lint_name || lint.feature_gate.is_some() {
None None
@ -258,9 +258,9 @@ where
let lint_caps = lints() let lint_caps = lints()
.filter_map(|lint| { .filter_map(|lint| {
// We don't want to whitelist *all* lints so let's // We don't want to allow *all* lints so let's ignore
// ignore those ones. // those ones.
if whitelisted_lints.iter().any(|l| lint.name == l) { if allowed_lints.iter().any(|l| lint.name == l) {
None None
} else { } else {
Some((lint::LintId::of(lint), lint::Allow)) Some((lint::LintId::of(lint), lint::Allow))
@ -317,9 +317,9 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt
let no_crate_level_docs = rustc_lint::builtin::MISSING_CRATE_LEVEL_DOCS.name; let no_crate_level_docs = rustc_lint::builtin::MISSING_CRATE_LEVEL_DOCS.name;
let invalid_codeblock_attribute_name = rustc_lint::builtin::INVALID_CODEBLOCK_ATTRIBUTES.name; let invalid_codeblock_attribute_name = rustc_lint::builtin::INVALID_CODEBLOCK_ATTRIBUTES.name;
// In addition to those specific lints, we also need to whitelist those given through // In addition to those specific lints, we also need to allow those given through
// command line, otherwise they'll get ignored and we don't want that. // command line, otherwise they'll get ignored and we don't want that.
let whitelisted_lints = vec![ let allowed_lints = vec![
intra_link_resolution_failure_name.to_owned(), intra_link_resolution_failure_name.to_owned(),
missing_docs.to_owned(), missing_docs.to_owned(),
missing_doc_example.to_owned(), missing_doc_example.to_owned(),
@ -328,7 +328,7 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt
invalid_codeblock_attribute_name.to_owned(), invalid_codeblock_attribute_name.to_owned(),
]; ];
let (lint_opts, lint_caps) = init_lints(whitelisted_lints, lint_opts, |lint| { let (lint_opts, lint_caps) = init_lints(allowed_lints, lint_opts, |lint| {
if lint.name == intra_link_resolution_failure_name if lint.name == intra_link_resolution_failure_name
|| lint.name == invalid_codeblock_attribute_name || lint.name == invalid_codeblock_attribute_name
{ {
@ -376,7 +376,7 @@ pub fn run_core(options: RustdocOptions) -> (clean::Crate, RenderInfo, RenderOpt
registry: rustc_driver::diagnostics_registry(), registry: rustc_driver::diagnostics_registry(),
}; };
interface::run_compiler_in_existing_thread_pool(config, |compiler| { interface::create_compiler_and_run(config, |compiler| {
compiler.enter(|queries| { compiler.enter(|queries| {
let sess = compiler.session(); let sess = compiler.session();

View File

@ -3151,7 +3151,7 @@ fn item_enum(w: &mut Buffer, cx: &Context, it: &clean::Item, e: &clean::Enum) {
render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All) render_assoc_items(w, cx, it, it.def_id, AssocItemRender::All)
} }
const ATTRIBUTE_WHITELIST: &[Symbol] = &[ const ALLOWED_ATTRIBUTES: &[Symbol] = &[
sym::export_name, sym::export_name,
sym::lang, sym::lang,
sym::link_section, sym::link_section,
@ -3173,7 +3173,7 @@ fn render_attributes(w: &mut Buffer, it: &clean::Item, top: bool) {
let mut attrs = String::new(); let mut attrs = String::new();
for attr in &it.attrs.other_attrs { for attr in &it.attrs.other_attrs {
if !ATTRIBUTE_WHITELIST.contains(&attr.name_or_empty()) { if !ALLOWED_ATTRIBUTES.contains(&attr.name_or_empty()) {
continue; continue;
} }

View File

@ -437,7 +437,10 @@ fn main_args(args: &[String]) -> i32 {
Ok(opts) => opts, Ok(opts) => opts,
Err(code) => return code, Err(code) => return code,
}; };
rustc_interface::interface::default_thread_pool(options.edition, move || main_options(options)) rustc_interface::interface::setup_callbacks_and_run_in_default_thread_pool_with_globals(
options.edition,
move || main_options(options),
)
} }
fn wrap_return(diag: &rustc_errors::Handler, res: Result<(), String>) -> i32 { fn wrap_return(diag: &rustc_errors::Handler, res: Result<(), String>) -> i32 {
@ -471,7 +474,29 @@ fn main_options(options: config::Options) -> i32 {
// but we can't crates the Handler ahead of time because it's not Send // but we can't crates the Handler ahead of time because it's not Send
let diag_opts = (options.error_format, options.edition, options.debugging_options.clone()); let diag_opts = (options.error_format, options.edition, options.debugging_options.clone());
let show_coverage = options.show_coverage; let show_coverage = options.show_coverage;
rust_input(options, move |out| {
// First, parse the crate and extract all relevant information.
info!("starting to run rustc");
// Interpret the input file as a rust source file, passing it through the
// compiler all the way through the analysis passes. The rustdoc output is
// then generated from the cleaned AST of the crate. This runs all the
// plug/cleaning passes.
let result = rustc_driver::catch_fatal_errors(move || {
let crate_name = options.crate_name.clone();
let crate_version = options.crate_version.clone();
let (mut krate, renderinfo, renderopts) = core::run_core(options);
info!("finished with rustc");
if let Some(name) = crate_name {
krate.name = name
}
krate.version = crate_version;
let out = Output { krate, renderinfo, renderopts };
if show_coverage { if show_coverage {
// if we ran coverage, bail early, we don't need to also generate docs at this point // if we ran coverage, bail early, we don't need to also generate docs at this point
// (also we didn't load in any of the useful passes) // (also we didn't load in any of the useful passes)
@ -491,36 +516,6 @@ fn main_options(options: config::Options) -> i32 {
rustc_driver::EXIT_FAILURE rustc_driver::EXIT_FAILURE
} }
} }
})
}
/// Interprets the input file as a rust source file, passing it through the
/// compiler all the way through the analysis passes. The rustdoc output is then
/// generated from the cleaned AST of the crate.
///
/// This form of input will run all of the plug/cleaning passes
fn rust_input<R, F>(options: config::Options, f: F) -> R
where
R: 'static + Send,
F: 'static + Send + FnOnce(Output) -> R,
{
// First, parse the crate and extract all relevant information.
info!("starting to run rustc");
let result = rustc_driver::catch_fatal_errors(move || {
let crate_name = options.crate_name.clone();
let crate_version = options.crate_version.clone();
let (mut krate, renderinfo, renderopts) = core::run_core(options);
info!("finished with rustc");
if let Some(name) = crate_name {
krate.name = name
}
krate.version = crate_version;
f(Output { krate, renderinfo, renderopts })
}); });
match result { match result {

View File

@ -47,11 +47,11 @@ pub fn run(options: Options) -> Result<(), String> {
let invalid_codeblock_attribute_name = rustc_lint::builtin::INVALID_CODEBLOCK_ATTRIBUTES.name; let invalid_codeblock_attribute_name = rustc_lint::builtin::INVALID_CODEBLOCK_ATTRIBUTES.name;
// In addition to those specific lints, we also need to whitelist those given through // In addition to those specific lints, we also need to allow those given through
// command line, otherwise they'll get ignored and we don't want that. // command line, otherwise they'll get ignored and we don't want that.
let whitelisted_lints = vec![invalid_codeblock_attribute_name.to_owned()]; let allowed_lints = vec![invalid_codeblock_attribute_name.to_owned()];
let (lint_opts, lint_caps) = init_lints(whitelisted_lints, options.lint_opts.clone(), |lint| { let (lint_opts, lint_caps) = init_lints(allowed_lints, options.lint_opts.clone(), |lint| {
if lint.name == invalid_codeblock_attribute_name { if lint.name == invalid_codeblock_attribute_name {
None None
} else { } else {

View File

@ -12,6 +12,24 @@ use crate::sys_common::{AsInner, AsInnerMut, FromInner};
/// ///
/// [`File`]: ../../../../std/fs/struct.File.html /// [`File`]: ../../../../std/fs/struct.File.html
pub trait FileExt { pub trait FileExt {
/// Reads a number of bytes starting from a given offset.
///
/// Returns the number of bytes read.
///
/// The offset is relative to the start of the file and thus independent
/// from the current cursor.
///
/// The current file cursor is not affected by this function.
///
/// Note that similar to [`File::read`], it is not an error to return with a
/// short read.
///
/// [`File::read`]: ../../../../std/fs/struct.File.html#method.read
fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result<usize> {
let bufs = &mut [IoSliceMut::new(buf)];
self.read_vectored_at(bufs, offset)
}
/// Reads a number of bytes starting from a given offset. /// Reads a number of bytes starting from a given offset.
/// ///
/// Returns the number of bytes read. /// Returns the number of bytes read.
@ -25,7 +43,80 @@ pub trait FileExt {
/// return with a short read. /// return with a short read.
/// ///
/// [`File::read`]: ../../../../std/fs/struct.File.html#method.read_vectored /// [`File::read`]: ../../../../std/fs/struct.File.html#method.read_vectored
fn read_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result<usize>; fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result<usize>;
/// Reads the exact number of byte required to fill `buf` from the given offset.
///
/// The offset is relative to the start of the file and thus independent
/// from the current cursor.
///
/// The current file cursor is not affected by this function.
///
/// Similar to [`Read::read_exact`] but uses [`read_at`] instead of `read`.
///
/// [`Read::read_exact`]: ../../../../std/io/trait.Read.html#method.read_exact
/// [`read_at`]: #tymethod.read_at
///
/// # Errors
///
/// If this function encounters an error of the kind
/// [`ErrorKind::Interrupted`] then the error is ignored and the operation
/// will continue.
///
/// If this function encounters an "end of file" before completely filling
/// the buffer, it returns an error of the kind [`ErrorKind::UnexpectedEof`].
/// The contents of `buf` are unspecified in this case.
///
/// If any other read error is encountered then this function immediately
/// returns. The contents of `buf` are unspecified in this case.
///
/// If this function returns an error, it is unspecified how many bytes it
/// has read, but it will never read more than would be necessary to
/// completely fill the buffer.
///
/// [`ErrorKind::Interrupted`]: ../../../../std/io/enum.ErrorKind.html#variant.Interrupted
/// [`ErrorKind::UnexpectedEof`]: ../../../../std/io/enum.ErrorKind.html#variant.UnexpectedEof
#[stable(feature = "rw_exact_all_at", since = "1.33.0")]
fn read_exact_at(&self, mut buf: &mut [u8], mut offset: u64) -> io::Result<()> {
while !buf.is_empty() {
match self.read_at(buf, offset) {
Ok(0) => break,
Ok(n) => {
let tmp = buf;
buf = &mut tmp[n..];
offset += n as u64;
}
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}
Err(e) => return Err(e),
}
}
if !buf.is_empty() {
Err(io::Error::new(io::ErrorKind::UnexpectedEof, "failed to fill whole buffer"))
} else {
Ok(())
}
}
/// Writes a number of bytes starting from a given offset.
///
/// Returns the number of bytes written.
///
/// The offset is relative to the start of the file and thus independent
/// from the current cursor.
///
/// The current file cursor is not affected by this function.
///
/// When writing beyond the end of the file, the file is appropriately
/// extended and the intermediate bytes are initialized with the value 0.
///
/// Note that similar to [`File::write`], it is not an error to return a
/// short write.
///
/// [`File::write`]: ../../../../std/fs/struct.File.html#write.v
fn write_at(&self, buf: &[u8], offset: u64) -> io::Result<usize> {
let bufs = &[IoSlice::new(buf)];
self.write_vectored_at(bufs, offset)
}
/// Writes a number of bytes starting from a given offset. /// Writes a number of bytes starting from a given offset.
/// ///
@ -43,7 +134,49 @@ pub trait FileExt {
/// short write. /// short write.
/// ///
/// [`File::write`]: ../../../../std/fs/struct.File.html#method.write_vectored /// [`File::write`]: ../../../../std/fs/struct.File.html#method.write_vectored
fn write_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result<usize>; fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result<usize>;
/// Attempts to write an entire buffer starting from a given offset.
///
/// The offset is relative to the start of the file and thus independent
/// from the current cursor.
///
/// The current file cursor is not affected by this function.
///
/// This method will continuously call [`write_at`] until there is no more data
/// to be written or an error of non-[`ErrorKind::Interrupted`] kind is
/// returned. This method will not return until the entire buffer has been
/// successfully written or such an error occurs. The first error that is
/// not of [`ErrorKind::Interrupted`] kind generated from this method will be
/// returned.
///
/// # Errors
///
/// This function will return the first error of
/// non-[`ErrorKind::Interrupted`] kind that [`write_at`] returns.
///
/// [`ErrorKind::Interrupted`]: ../../../../std/io/enum.ErrorKind.html#variant.Interrupted
/// [`write_at`]: #tymethod.write_at
#[stable(feature = "rw_exact_all_at", since = "1.33.0")]
fn write_all_at(&self, mut buf: &[u8], mut offset: u64) -> io::Result<()> {
while !buf.is_empty() {
match self.write_at(buf, offset) {
Ok(0) => {
return Err(io::Error::new(
io::ErrorKind::WriteZero,
"failed to write whole buffer",
));
}
Ok(n) => {
buf = &buf[n..];
offset += n as u64
}
Err(ref e) if e.kind() == io::ErrorKind::Interrupted => {}
Err(e) => return Err(e),
}
}
Ok(())
}
/// Returns the current position within the file. /// Returns the current position within the file.
/// ///
@ -105,11 +238,11 @@ pub trait FileExt {
// FIXME: bind random_get maybe? - on crates.io for unix // FIXME: bind random_get maybe? - on crates.io for unix
impl FileExt for fs::File { impl FileExt for fs::File {
fn read_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result<usize> { fn read_vectored_at(&self, bufs: &mut [IoSliceMut<'_>], offset: u64) -> io::Result<usize> {
self.as_inner().fd().pread(bufs, offset) self.as_inner().fd().pread(bufs, offset)
} }
fn write_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result<usize> { fn write_vectored_at(&self, bufs: &[IoSlice<'_>], offset: u64) -> io::Result<usize> {
self.as_inner().fd().pwrite(bufs, offset) self.as_inner().fd().pwrite(bufs, offset)
} }

View File

@ -241,7 +241,7 @@ impl Instant {
// returned instead of what the OS says if the OS goes backwards. // returned instead of what the OS says if the OS goes backwards.
// //
// To hopefully mitigate the impact of this, a few platforms are // To hopefully mitigate the impact of this, a few platforms are
// whitelisted as "these at least haven't gone backwards yet". // excluded as "these at least haven't gone backwards yet".
if time::Instant::actually_monotonic() { if time::Instant::actually_monotonic() {
return Instant(os_now); return Instant(os_now);
} }

View File

@ -1,4 +1,5 @@
// compile-flags: -Z control-flow-guard=checks // compile-flags: -Z control-flow-guard=checks
// only-msvc
#![crate_type = "lib"] #![crate_type = "lib"]

View File

@ -1,4 +1,5 @@
// compile-flags: -Z control-flow-guard=no // compile-flags: -Z control-flow-guard=no
// only-msvc
#![crate_type = "lib"] #![crate_type = "lib"]

View File

@ -1,4 +1,5 @@
// compile-flags: -Z control-flow-guard=nochecks // compile-flags: -Z control-flow-guard=nochecks
// only-msvc
#![crate_type = "lib"] #![crate_type = "lib"]

View File

@ -0,0 +1,11 @@
// compile-flags: -Z control-flow-guard
// ignore-msvc
#![crate_type = "lib"]
// A basic test function.
pub fn test() {
}
// Ensure the cfguard module flag is not added for non-MSVC targets.
// CHECK-NOT: !"cfguard"

View File

@ -102,8 +102,8 @@ fn match_tuple(_1: (u32, bool, std::option::Option<i32>, u32)) -> u32 {
_0 = BitXor(move _9, move _10); // scope 1 at $DIR/exponential-or.rs:8:83: 8:88 _0 = BitXor(move _9, move _10); // scope 1 at $DIR/exponential-or.rs:8:83: 8:88
StorageDead(_10); // scope 1 at $DIR/exponential-or.rs:8:87: 8:88 StorageDead(_10); // scope 1 at $DIR/exponential-or.rs:8:87: 8:88
StorageDead(_9); // scope 1 at $DIR/exponential-or.rs:8:87: 8:88 StorageDead(_9); // scope 1 at $DIR/exponential-or.rs:8:87: 8:88
StorageDead(_8); // scope 0 at $DIR/exponential-or.rs:8:88: 8:89 StorageDead(_8); // scope 0 at $DIR/exponential-or.rs:8:87: 8:88
StorageDead(_7); // scope 0 at $DIR/exponential-or.rs:8:88: 8:89 StorageDead(_7); // scope 0 at $DIR/exponential-or.rs:8:87: 8:88
goto -> bb10; // scope 0 at $DIR/exponential-or.rs:7:5: 10:6 goto -> bb10; // scope 0 at $DIR/exponential-or.rs:7:5: 10:6
} }

View File

@ -137,7 +137,7 @@
StorageLive(_4); // scope 0 at $DIR/issue-73223.rs:3:14: 3:15 StorageLive(_4); // scope 0 at $DIR/issue-73223.rs:3:14: 3:15
_4 = ((_2 as Some).0: i32); // scope 0 at $DIR/issue-73223.rs:3:14: 3:15 _4 = ((_2 as Some).0: i32); // scope 0 at $DIR/issue-73223.rs:3:14: 3:15
_1 = _4; // scope 2 at $DIR/issue-73223.rs:3:20: 3:21 _1 = _4; // scope 2 at $DIR/issue-73223.rs:3:20: 3:21
StorageDead(_4); // scope 0 at $DIR/issue-73223.rs:3:21: 3:22 StorageDead(_4); // scope 0 at $DIR/issue-73223.rs:3:20: 3:21
StorageDead(_2); // scope 0 at $DIR/issue-73223.rs:5:6: 5:7 StorageDead(_2); // scope 0 at $DIR/issue-73223.rs:5:6: 5:7
StorageLive(_6); // scope 1 at $DIR/issue-73223.rs:7:9: 7:14 StorageLive(_6); // scope 1 at $DIR/issue-73223.rs:7:9: 7:14
StorageLive(_7); // scope 1 at $DIR/issue-73223.rs:7:22: 7:27 StorageLive(_7); // scope 1 at $DIR/issue-73223.rs:7:22: 7:27

View File

@ -137,7 +137,7 @@
StorageLive(_4); // scope 0 at $DIR/issue-73223.rs:3:14: 3:15 StorageLive(_4); // scope 0 at $DIR/issue-73223.rs:3:14: 3:15
_4 = ((_2 as Some).0: i32); // scope 0 at $DIR/issue-73223.rs:3:14: 3:15 _4 = ((_2 as Some).0: i32); // scope 0 at $DIR/issue-73223.rs:3:14: 3:15
_1 = _4; // scope 2 at $DIR/issue-73223.rs:3:20: 3:21 _1 = _4; // scope 2 at $DIR/issue-73223.rs:3:20: 3:21
StorageDead(_4); // scope 0 at $DIR/issue-73223.rs:3:21: 3:22 StorageDead(_4); // scope 0 at $DIR/issue-73223.rs:3:20: 3:21
StorageDead(_2); // scope 0 at $DIR/issue-73223.rs:5:6: 5:7 StorageDead(_2); // scope 0 at $DIR/issue-73223.rs:5:6: 5:7
StorageLive(_6); // scope 1 at $DIR/issue-73223.rs:7:9: 7:14 StorageLive(_6); // scope 1 at $DIR/issue-73223.rs:7:9: 7:14
StorageLive(_7); // scope 1 at $DIR/issue-73223.rs:7:22: 7:27 StorageLive(_7); // scope 1 at $DIR/issue-73223.rs:7:22: 7:27

View File

@ -61,7 +61,7 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
// mir::Constant // mir::Constant
// + span: $DIR/match-arm-scopes.rs:16:77: 16:78 // + span: $DIR/match-arm-scopes.rs:16:77: 16:78
// + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) }
drop(_7) -> [return: bb19, unwind: bb10]; // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 drop(_7) -> [return: bb19, unwind: bb10]; // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
} }
bb6: { bb6: {
@ -90,9 +90,9 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
// + span: $DIR/match-arm-scopes.rs:16:59: 16:60 // + span: $DIR/match-arm-scopes.rs:16:59: 16:60
// + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) }
StorageDead(_10); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 StorageDead(_10); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73
StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
goto -> bb11; // scope 0 at $DIR/match-arm-scopes.rs:16:52: 16:60 goto -> bb11; // scope 0 at $DIR/match-arm-scopes.rs:16:52: 16:60
} }
@ -109,7 +109,7 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
} }
bb12: { bb12: {
StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageLive(_5); // scope 0 at $DIR/match-arm-scopes.rs:16:17: 16:18 StorageLive(_5); // scope 0 at $DIR/match-arm-scopes.rs:16:17: 16:18
_5 = (_2.1: bool); // scope 0 at $DIR/match-arm-scopes.rs:16:17: 16:18 _5 = (_2.1: bool); // scope 0 at $DIR/match-arm-scopes.rs:16:17: 16:18
StorageLive(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:20: 16:21 StorageLive(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:20: 16:21
@ -118,9 +118,9 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
} }
bb13: { bb13: {
StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
goto -> bb2; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 goto -> bb2; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73
} }
@ -150,14 +150,14 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
// + span: $DIR/match-arm-scopes.rs:16:59: 16:60 // + span: $DIR/match-arm-scopes.rs:16:59: 16:60
// + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) }
StorageDead(_13); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 StorageDead(_13); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73
StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
goto -> bb11; // scope 0 at $DIR/match-arm-scopes.rs:16:52: 16:60 goto -> bb11; // scope 0 at $DIR/match-arm-scopes.rs:16:52: 16:60
} }
bb17: { bb17: {
StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageLive(_5); // scope 0 at $DIR/match-arm-scopes.rs:16:26: 16:27 StorageLive(_5); // scope 0 at $DIR/match-arm-scopes.rs:16:26: 16:27
_5 = (_2.0: bool); // scope 0 at $DIR/match-arm-scopes.rs:16:26: 16:27 _5 = (_2.0: bool); // scope 0 at $DIR/match-arm-scopes.rs:16:26: 16:27
StorageLive(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:36: 16:37 StorageLive(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:36: 16:37
@ -166,17 +166,17 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
} }
bb18: { bb18: {
StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
goto -> bb3; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 goto -> bb3; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73
} }
bb19: { bb19: {
StorageDead(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_5); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_5); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
goto -> bb23; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 goto -> bb23; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6
} }
@ -188,7 +188,7 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
// mir::Constant // mir::Constant
// + span: $DIR/match-arm-scopes.rs:17:41: 17:42 // + span: $DIR/match-arm-scopes.rs:17:41: 17:42
// + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) }
drop(_16) -> [return: bb22, unwind: bb10]; // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 drop(_16) -> [return: bb22, unwind: bb10]; // scope 0 at $DIR/match-arm-scopes.rs:17:41: 17:42
} }
bb21: { bb21: {
@ -200,8 +200,8 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
} }
bb22: { bb22: {
StorageDead(_16); // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 StorageDead(_16); // scope 0 at $DIR/match-arm-scopes.rs:17:41: 17:42
StorageDead(_15); // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 StorageDead(_15); // scope 0 at $DIR/match-arm-scopes.rs:17:41: 17:42
goto -> bb23; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 goto -> bb23; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6
} }

View File

@ -74,7 +74,7 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
// mir::Constant // mir::Constant
// + span: $DIR/match-arm-scopes.rs:16:77: 16:78 // + span: $DIR/match-arm-scopes.rs:16:77: 16:78
// + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) }
drop(_7) -> [return: bb24, unwind: bb14]; // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 drop(_7) -> [return: bb24, unwind: bb14]; // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
} }
bb9: { bb9: {
@ -110,9 +110,9 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
// + span: $DIR/match-arm-scopes.rs:16:59: 16:60 // + span: $DIR/match-arm-scopes.rs:16:59: 16:60
// + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) }
StorageDead(_10); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 StorageDead(_10); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73
StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
goto -> bb15; // scope 0 at $DIR/match-arm-scopes.rs:16:52: 16:60 goto -> bb15; // scope 0 at $DIR/match-arm-scopes.rs:16:52: 16:60
} }
@ -129,7 +129,7 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
} }
bb16: { bb16: {
StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
FakeRead(ForMatchGuard, _3); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 FakeRead(ForMatchGuard, _3); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73
FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73
FakeRead(ForGuardBinding, _6); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 FakeRead(ForGuardBinding, _6); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73
@ -142,9 +142,9 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
} }
bb17: { bb17: {
StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_9); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
falseEdge -> [real: bb3, imaginary: bb4]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 falseEdge -> [real: bb3, imaginary: bb4]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73
} }
@ -181,14 +181,14 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
// + span: $DIR/match-arm-scopes.rs:16:59: 16:60 // + span: $DIR/match-arm-scopes.rs:16:59: 16:60
// + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) }
StorageDead(_13); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 StorageDead(_13); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73
StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
goto -> bb15; // scope 0 at $DIR/match-arm-scopes.rs:16:52: 16:60 goto -> bb15; // scope 0 at $DIR/match-arm-scopes.rs:16:52: 16:60
} }
bb22: { bb22: {
StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
FakeRead(ForMatchGuard, _3); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 FakeRead(ForMatchGuard, _3); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73
FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73
FakeRead(ForGuardBinding, _6); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73 FakeRead(ForGuardBinding, _6); // scope 0 at $DIR/match-arm-scopes.rs:16:72: 16:73
@ -201,17 +201,17 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
} }
bb23: { bb23: {
StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_12); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
falseEdge -> [real: bb5, imaginary: bb6]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73 falseEdge -> [real: bb5, imaginary: bb6]; // scope 0 at $DIR/match-arm-scopes.rs:16:42: 16:73
} }
bb24: { bb24: {
StorageDead(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_7); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_5); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_5); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_8); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:78: 16:79 StorageDead(_6); // scope 0 at $DIR/match-arm-scopes.rs:16:77: 16:78
goto -> bb28; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 goto -> bb28; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6
} }
@ -223,7 +223,7 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
// mir::Constant // mir::Constant
// + span: $DIR/match-arm-scopes.rs:17:41: 17:42 // + span: $DIR/match-arm-scopes.rs:17:41: 17:42
// + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) }
drop(_16) -> [return: bb27, unwind: bb14]; // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 drop(_16) -> [return: bb27, unwind: bb14]; // scope 0 at $DIR/match-arm-scopes.rs:17:41: 17:42
} }
bb26: { bb26: {
@ -235,8 +235,8 @@ fn complicated_match(_1: bool, _2: (bool, bool, std::string::String)) -> i32 {
} }
bb27: { bb27: {
StorageDead(_16); // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 StorageDead(_16); // scope 0 at $DIR/match-arm-scopes.rs:17:41: 17:42
StorageDead(_15); // scope 0 at $DIR/match-arm-scopes.rs:17:42: 17:43 StorageDead(_15); // scope 0 at $DIR/match-arm-scopes.rs:17:41: 17:42
goto -> bb28; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6 goto -> bb28; // scope 0 at $DIR/match-arm-scopes.rs:15:5: 18:6
} }

View File

@ -97,7 +97,7 @@ fn full_tested_match() -> () {
} }
bb8: { bb8: {
StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:16:37: 16:38 StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:16:36: 16:37
FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/match_false_edges.rs:16:26: 16:27 FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/match_false_edges.rs:16:26: 16:27
FakeRead(ForGuardBinding, _6); // scope 0 at $DIR/match_false_edges.rs:16:26: 16:27 FakeRead(ForGuardBinding, _6); // scope 0 at $DIR/match_false_edges.rs:16:26: 16:27
StorageLive(_5); // scope 0 at $DIR/match_false_edges.rs:16:14: 16:15 StorageLive(_5); // scope 0 at $DIR/match_false_edges.rs:16:14: 16:15
@ -112,14 +112,14 @@ fn full_tested_match() -> () {
// + span: $DIR/match_false_edges.rs:16:32: 16:33 // + span: $DIR/match_false_edges.rs:16:32: 16:33
// + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) }
StorageDead(_8); // scope 2 at $DIR/match_false_edges.rs:16:36: 16:37 StorageDead(_8); // scope 2 at $DIR/match_false_edges.rs:16:36: 16:37
StorageDead(_5); // scope 0 at $DIR/match_false_edges.rs:16:37: 16:38 StorageDead(_5); // scope 0 at $DIR/match_false_edges.rs:16:36: 16:37
StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:16:37: 16:38 StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:16:36: 16:37
goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:15:13: 19:6 goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:15:13: 19:6
} }
bb9: { bb9: {
StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:16:37: 16:38 StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:16:36: 16:37
StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:16:37: 16:38 StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:16:36: 16:37
goto -> bb4; // scope 0 at $DIR/match_false_edges.rs:16:20: 16:27 goto -> bb4; // scope 0 at $DIR/match_false_edges.rs:16:20: 16:27
} }
@ -136,7 +136,7 @@ fn full_tested_match() -> () {
// + span: $DIR/match_false_edges.rs:17:21: 17:22 // + span: $DIR/match_false_edges.rs:17:21: 17:22
// + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) }
StorageDead(_10); // scope 3 at $DIR/match_false_edges.rs:17:25: 17:26 StorageDead(_10); // scope 3 at $DIR/match_false_edges.rs:17:25: 17:26
StorageDead(_9); // scope 0 at $DIR/match_false_edges.rs:17:26: 17:27 StorageDead(_9); // scope 0 at $DIR/match_false_edges.rs:17:25: 17:26
goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:15:13: 19:6 goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:15:13: 19:6
} }

View File

@ -62,7 +62,7 @@ fn full_tested_match2() -> () {
// + span: $DIR/match_false_edges.rs:29:21: 29:22 // + span: $DIR/match_false_edges.rs:29:21: 29:22
// + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) }
StorageDead(_10); // scope 3 at $DIR/match_false_edges.rs:29:25: 29:26 StorageDead(_10); // scope 3 at $DIR/match_false_edges.rs:29:25: 29:26
StorageDead(_9); // scope 0 at $DIR/match_false_edges.rs:29:26: 29:27 StorageDead(_9); // scope 0 at $DIR/match_false_edges.rs:29:25: 29:26
goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:26:13: 30:6 goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:26:13: 30:6
} }
@ -89,7 +89,7 @@ fn full_tested_match2() -> () {
} }
bb8: { bb8: {
StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:27:37: 27:38 StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:27:36: 27:37
FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/match_false_edges.rs:27:26: 27:27 FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/match_false_edges.rs:27:26: 27:27
FakeRead(ForGuardBinding, _6); // scope 0 at $DIR/match_false_edges.rs:27:26: 27:27 FakeRead(ForGuardBinding, _6); // scope 0 at $DIR/match_false_edges.rs:27:26: 27:27
StorageLive(_5); // scope 0 at $DIR/match_false_edges.rs:27:14: 27:15 StorageLive(_5); // scope 0 at $DIR/match_false_edges.rs:27:14: 27:15
@ -104,14 +104,14 @@ fn full_tested_match2() -> () {
// + span: $DIR/match_false_edges.rs:27:32: 27:33 // + span: $DIR/match_false_edges.rs:27:32: 27:33
// + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) }
StorageDead(_8); // scope 2 at $DIR/match_false_edges.rs:27:36: 27:37 StorageDead(_8); // scope 2 at $DIR/match_false_edges.rs:27:36: 27:37
StorageDead(_5); // scope 0 at $DIR/match_false_edges.rs:27:37: 27:38 StorageDead(_5); // scope 0 at $DIR/match_false_edges.rs:27:36: 27:37
StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:27:37: 27:38 StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:27:36: 27:37
goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:26:13: 30:6 goto -> bb11; // scope 0 at $DIR/match_false_edges.rs:26:13: 30:6
} }
bb9: { bb9: {
StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:27:37: 27:38 StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:27:36: 27:37
StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:27:37: 27:38 StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:27:36: 27:37
falseEdge -> [real: bb4, imaginary: bb2]; // scope 0 at $DIR/match_false_edges.rs:27:20: 27:27 falseEdge -> [real: bb4, imaginary: bb2]; // scope 0 at $DIR/match_false_edges.rs:27:20: 27:27
} }

View File

@ -70,7 +70,7 @@ fn main() -> () {
// mir::Constant // mir::Constant
// + span: $DIR/match_false_edges.rs:39:15: 39:16 // + span: $DIR/match_false_edges.rs:39:15: 39:16
// + literal: Const { ty: i32, val: Value(Scalar(0x00000004)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000004)) }
StorageDead(_14); // scope 0 at $DIR/match_false_edges.rs:39:16: 39:17 StorageDead(_14); // scope 0 at $DIR/match_false_edges.rs:39:15: 39:16
goto -> bb15; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6 goto -> bb15; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6
} }
@ -97,7 +97,7 @@ fn main() -> () {
} }
bb8: { bb8: {
StorageDead(_8); // scope 0 at $DIR/match_false_edges.rs:36:33: 36:34 StorageDead(_8); // scope 0 at $DIR/match_false_edges.rs:36:32: 36:33
FakeRead(ForMatchGuard, _5); // scope 0 at $DIR/match_false_edges.rs:36:27: 36:28 FakeRead(ForMatchGuard, _5); // scope 0 at $DIR/match_false_edges.rs:36:27: 36:28
FakeRead(ForGuardBinding, _7); // scope 0 at $DIR/match_false_edges.rs:36:27: 36:28 FakeRead(ForGuardBinding, _7); // scope 0 at $DIR/match_false_edges.rs:36:27: 36:28
StorageLive(_6); // scope 0 at $DIR/match_false_edges.rs:36:14: 36:16 StorageLive(_6); // scope 0 at $DIR/match_false_edges.rs:36:14: 36:16
@ -109,14 +109,14 @@ fn main() -> () {
// mir::Constant // mir::Constant
// + span: $DIR/match_false_edges.rs:36:32: 36:33 // + span: $DIR/match_false_edges.rs:36:32: 36:33
// + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000001)) }
StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:36:33: 36:34 StorageDead(_6); // scope 0 at $DIR/match_false_edges.rs:36:32: 36:33
StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:36:33: 36:34 StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:36:32: 36:33
goto -> bb15; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6 goto -> bb15; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6
} }
bb9: { bb9: {
StorageDead(_8); // scope 0 at $DIR/match_false_edges.rs:36:33: 36:34 StorageDead(_8); // scope 0 at $DIR/match_false_edges.rs:36:32: 36:33
StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:36:33: 36:34 StorageDead(_7); // scope 0 at $DIR/match_false_edges.rs:36:32: 36:33
falseEdge -> [real: bb2, imaginary: bb2]; // scope 0 at $DIR/match_false_edges.rs:36:21: 36:28 falseEdge -> [real: bb2, imaginary: bb2]; // scope 0 at $DIR/match_false_edges.rs:36:21: 36:28
} }
@ -130,7 +130,7 @@ fn main() -> () {
// mir::Constant // mir::Constant
// + span: $DIR/match_false_edges.rs:37:15: 37:16 // + span: $DIR/match_false_edges.rs:37:15: 37:16
// + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000002)) }
StorageDead(_9); // scope 0 at $DIR/match_false_edges.rs:37:16: 37:17 StorageDead(_9); // scope 0 at $DIR/match_false_edges.rs:37:15: 37:16
goto -> bb15; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6 goto -> bb15; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6
} }
@ -156,7 +156,7 @@ fn main() -> () {
} }
bb13: { bb13: {
StorageDead(_12); // scope 0 at $DIR/match_false_edges.rs:38:34: 38:35 StorageDead(_12); // scope 0 at $DIR/match_false_edges.rs:38:33: 38:34
FakeRead(ForMatchGuard, _5); // scope 0 at $DIR/match_false_edges.rs:38:28: 38:29 FakeRead(ForMatchGuard, _5); // scope 0 at $DIR/match_false_edges.rs:38:28: 38:29
FakeRead(ForGuardBinding, _11); // scope 0 at $DIR/match_false_edges.rs:38:28: 38:29 FakeRead(ForGuardBinding, _11); // scope 0 at $DIR/match_false_edges.rs:38:28: 38:29
StorageLive(_10); // scope 0 at $DIR/match_false_edges.rs:38:14: 38:15 StorageLive(_10); // scope 0 at $DIR/match_false_edges.rs:38:14: 38:15
@ -168,14 +168,14 @@ fn main() -> () {
// mir::Constant // mir::Constant
// + span: $DIR/match_false_edges.rs:38:33: 38:34 // + span: $DIR/match_false_edges.rs:38:33: 38:34
// + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) } // + literal: Const { ty: i32, val: Value(Scalar(0x00000003)) }
StorageDead(_10); // scope 0 at $DIR/match_false_edges.rs:38:34: 38:35 StorageDead(_10); // scope 0 at $DIR/match_false_edges.rs:38:33: 38:34
StorageDead(_11); // scope 0 at $DIR/match_false_edges.rs:38:34: 38:35 StorageDead(_11); // scope 0 at $DIR/match_false_edges.rs:38:33: 38:34
goto -> bb15; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6 goto -> bb15; // scope 0 at $DIR/match_false_edges.rs:35:13: 40:6
} }
bb14: { bb14: {
StorageDead(_12); // scope 0 at $DIR/match_false_edges.rs:38:34: 38:35 StorageDead(_12); // scope 0 at $DIR/match_false_edges.rs:38:33: 38:34
StorageDead(_11); // scope 0 at $DIR/match_false_edges.rs:38:34: 38:35 StorageDead(_11); // scope 0 at $DIR/match_false_edges.rs:38:33: 38:34
falseEdge -> [real: bb4, imaginary: bb4]; // scope 0 at $DIR/match_false_edges.rs:38:20: 38:29 falseEdge -> [real: bb4, imaginary: bb4]; // scope 0 at $DIR/match_false_edges.rs:38:20: 38:29
} }

View File

@ -117,7 +117,7 @@ fn main() -> () {
} }
bb10: { bb10: {
StorageDead(_9); // scope 2 at $DIR/match_test.rs:13:24: 13:25 StorageDead(_9); // scope 2 at $DIR/match_test.rs:13:23: 13:24
FakeRead(ForMatchGuard, _8); // scope 2 at $DIR/match_test.rs:13:18: 13:19 FakeRead(ForMatchGuard, _8); // scope 2 at $DIR/match_test.rs:13:18: 13:19
_3 = const 0_i32; // scope 2 at $DIR/match_test.rs:13:23: 13:24 _3 = const 0_i32; // scope 2 at $DIR/match_test.rs:13:23: 13:24
// ty::Const // ty::Const
@ -130,7 +130,7 @@ fn main() -> () {
} }
bb11: { bb11: {
StorageDead(_9); // scope 2 at $DIR/match_test.rs:13:24: 13:25 StorageDead(_9); // scope 2 at $DIR/match_test.rs:13:23: 13:24
falseEdge -> [real: bb3, imaginary: bb6]; // scope 2 at $DIR/match_test.rs:13:18: 13:19 falseEdge -> [real: bb3, imaginary: bb6]; // scope 2 at $DIR/match_test.rs:13:18: 13:19
} }

View File

@ -43,7 +43,7 @@ fn unwrap(_1: std::option::Option<T>) -> T {
StorageLive(_3); // scope 0 at $DIR/no-drop-for-inactive-variant.rs:9:14: 9:15 StorageLive(_3); // scope 0 at $DIR/no-drop-for-inactive-variant.rs:9:14: 9:15
_3 = move ((_1 as Some).0: T); // scope 0 at $DIR/no-drop-for-inactive-variant.rs:9:14: 9:15 _3 = move ((_1 as Some).0: T); // scope 0 at $DIR/no-drop-for-inactive-variant.rs:9:14: 9:15
_0 = move _3; // scope 1 at $DIR/no-drop-for-inactive-variant.rs:9:20: 9:21 _0 = move _3; // scope 1 at $DIR/no-drop-for-inactive-variant.rs:9:20: 9:21
StorageDead(_3); // scope 0 at $DIR/no-drop-for-inactive-variant.rs:9:21: 9:22 StorageDead(_3); // scope 0 at $DIR/no-drop-for-inactive-variant.rs:9:20: 9:21
_6 = discriminant(_1); // scope 0 at $DIR/no-drop-for-inactive-variant.rs:12:1: 12:2 _6 = discriminant(_1); // scope 0 at $DIR/no-drop-for-inactive-variant.rs:12:1: 12:2
return; // scope 0 at $DIR/no-drop-for-inactive-variant.rs:12:2: 12:2 return; // scope 0 at $DIR/no-drop-for-inactive-variant.rs:12:2: 12:2
} }

View File

@ -53,7 +53,7 @@
} }
bb5: { bb5: {
StorageDead(_8); // scope 0 at $DIR/remove_fake_borrows.rs:8:26: 8:27 StorageDead(_8); // scope 0 at $DIR/remove_fake_borrows.rs:8:25: 8:26
- FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/remove_fake_borrows.rs:8:20: 8:21 - FakeRead(ForMatchGuard, _4); // scope 0 at $DIR/remove_fake_borrows.rs:8:20: 8:21
- FakeRead(ForMatchGuard, _5); // scope 0 at $DIR/remove_fake_borrows.rs:8:20: 8:21 - FakeRead(ForMatchGuard, _5); // scope 0 at $DIR/remove_fake_borrows.rs:8:20: 8:21
- FakeRead(ForMatchGuard, _6); // scope 0 at $DIR/remove_fake_borrows.rs:8:20: 8:21 - FakeRead(ForMatchGuard, _6); // scope 0 at $DIR/remove_fake_borrows.rs:8:20: 8:21
@ -73,7 +73,7 @@
} }
bb6: { bb6: {
StorageDead(_8); // scope 0 at $DIR/remove_fake_borrows.rs:8:26: 8:27 StorageDead(_8); // scope 0 at $DIR/remove_fake_borrows.rs:8:25: 8:26
goto -> bb1; // scope 0 at $DIR/remove_fake_borrows.rs:8:20: 8:21 goto -> bb1; // scope 0 at $DIR/remove_fake_borrows.rs:8:20: 8:21
} }

View File

@ -61,7 +61,7 @@
((_2 as Foo).0: u8) = move _5; // scope 3 at $DIR/simplify-arm-identity.rs:20:24: 20:35 ((_2 as Foo).0: u8) = move _5; // scope 3 at $DIR/simplify-arm-identity.rs:20:24: 20:35
discriminant(_2) = 0; // scope 3 at $DIR/simplify-arm-identity.rs:20:24: 20:35 discriminant(_2) = 0; // scope 3 at $DIR/simplify-arm-identity.rs:20:24: 20:35
StorageDead(_5); // scope 3 at $DIR/simplify-arm-identity.rs:20:34: 20:35 StorageDead(_5); // scope 3 at $DIR/simplify-arm-identity.rs:20:34: 20:35
StorageDead(_4); // scope 1 at $DIR/simplify-arm-identity.rs:20:35: 20:36 StorageDead(_4); // scope 1 at $DIR/simplify-arm-identity.rs:20:34: 20:35
goto -> bb4; // scope 1 at $DIR/simplify-arm-identity.rs:19:18: 22:6 goto -> bb4; // scope 1 at $DIR/simplify-arm-identity.rs:19:18: 22:6
} }

View File

@ -61,7 +61,7 @@
((_2 as Foo).0: u8) = move _5; // scope 3 at $DIR/simplify-arm-identity.rs:20:24: 20:35 ((_2 as Foo).0: u8) = move _5; // scope 3 at $DIR/simplify-arm-identity.rs:20:24: 20:35
discriminant(_2) = 0; // scope 3 at $DIR/simplify-arm-identity.rs:20:24: 20:35 discriminant(_2) = 0; // scope 3 at $DIR/simplify-arm-identity.rs:20:24: 20:35
StorageDead(_5); // scope 3 at $DIR/simplify-arm-identity.rs:20:34: 20:35 StorageDead(_5); // scope 3 at $DIR/simplify-arm-identity.rs:20:34: 20:35
StorageDead(_4); // scope 1 at $DIR/simplify-arm-identity.rs:20:35: 20:36 StorageDead(_4); // scope 1 at $DIR/simplify-arm-identity.rs:20:34: 20:35
goto -> bb4; // scope 1 at $DIR/simplify-arm-identity.rs:19:18: 22:6 goto -> bb4; // scope 1 at $DIR/simplify-arm-identity.rs:19:18: 22:6
} }

View File

@ -33,7 +33,7 @@
((_0 as Some).0: u8) = move _4; // scope 1 at $DIR/simplify-arm.rs:11:20: 11:27 ((_0 as Some).0: u8) = move _4; // scope 1 at $DIR/simplify-arm.rs:11:20: 11:27
discriminant(_0) = 1; // scope 1 at $DIR/simplify-arm.rs:11:20: 11:27 discriminant(_0) = 1; // scope 1 at $DIR/simplify-arm.rs:11:20: 11:27
StorageDead(_4); // scope 1 at $DIR/simplify-arm.rs:11:26: 11:27 StorageDead(_4); // scope 1 at $DIR/simplify-arm.rs:11:26: 11:27
StorageDead(_3); // scope 0 at $DIR/simplify-arm.rs:11:27: 11:28 StorageDead(_3); // scope 0 at $DIR/simplify-arm.rs:11:26: 11:27
goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:10:5: 13:6 goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:10:5: 13:6
} }

View File

@ -33,7 +33,7 @@
((_0 as Some).0: u8) = move _4; // scope 1 at $DIR/simplify-arm.rs:11:20: 11:27 ((_0 as Some).0: u8) = move _4; // scope 1 at $DIR/simplify-arm.rs:11:20: 11:27
discriminant(_0) = 1; // scope 1 at $DIR/simplify-arm.rs:11:20: 11:27 discriminant(_0) = 1; // scope 1 at $DIR/simplify-arm.rs:11:20: 11:27
StorageDead(_4); // scope 1 at $DIR/simplify-arm.rs:11:26: 11:27 StorageDead(_4); // scope 1 at $DIR/simplify-arm.rs:11:26: 11:27
StorageDead(_3); // scope 0 at $DIR/simplify-arm.rs:11:27: 11:28 StorageDead(_3); // scope 0 at $DIR/simplify-arm.rs:11:26: 11:27
goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:10:5: 13:6 goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:10:5: 13:6
} }

View File

@ -29,7 +29,7 @@
((_0 as Err).0: i32) = move _6; // scope 2 at $DIR/simplify-arm.rs:19:19: 19:25 ((_0 as Err).0: i32) = move _6; // scope 2 at $DIR/simplify-arm.rs:19:19: 19:25
discriminant(_0) = 1; // scope 2 at $DIR/simplify-arm.rs:19:19: 19:25 discriminant(_0) = 1; // scope 2 at $DIR/simplify-arm.rs:19:19: 19:25
StorageDead(_6); // scope 2 at $DIR/simplify-arm.rs:19:24: 19:25 StorageDead(_6); // scope 2 at $DIR/simplify-arm.rs:19:24: 19:25
StorageDead(_5); // scope 0 at $DIR/simplify-arm.rs:19:25: 19:26 StorageDead(_5); // scope 0 at $DIR/simplify-arm.rs:19:24: 19:25
goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:17:5: 20:6 goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:17:5: 20:6
} }
@ -45,7 +45,7 @@
((_0 as Ok).0: u8) = move _4; // scope 1 at $DIR/simplify-arm.rs:18:18: 18:23 ((_0 as Ok).0: u8) = move _4; // scope 1 at $DIR/simplify-arm.rs:18:18: 18:23
discriminant(_0) = 0; // scope 1 at $DIR/simplify-arm.rs:18:18: 18:23 discriminant(_0) = 0; // scope 1 at $DIR/simplify-arm.rs:18:18: 18:23
StorageDead(_4); // scope 1 at $DIR/simplify-arm.rs:18:22: 18:23 StorageDead(_4); // scope 1 at $DIR/simplify-arm.rs:18:22: 18:23
StorageDead(_3); // scope 0 at $DIR/simplify-arm.rs:18:23: 18:24 StorageDead(_3); // scope 0 at $DIR/simplify-arm.rs:18:22: 18:23
goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:17:5: 20:6 goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:17:5: 20:6
} }

View File

@ -29,7 +29,7 @@
((_0 as Err).0: i32) = move _6; // scope 2 at $DIR/simplify-arm.rs:19:19: 19:25 ((_0 as Err).0: i32) = move _6; // scope 2 at $DIR/simplify-arm.rs:19:19: 19:25
discriminant(_0) = 1; // scope 2 at $DIR/simplify-arm.rs:19:19: 19:25 discriminant(_0) = 1; // scope 2 at $DIR/simplify-arm.rs:19:19: 19:25
StorageDead(_6); // scope 2 at $DIR/simplify-arm.rs:19:24: 19:25 StorageDead(_6); // scope 2 at $DIR/simplify-arm.rs:19:24: 19:25
StorageDead(_5); // scope 0 at $DIR/simplify-arm.rs:19:25: 19:26 StorageDead(_5); // scope 0 at $DIR/simplify-arm.rs:19:24: 19:25
goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:17:5: 20:6 goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:17:5: 20:6
} }
@ -45,7 +45,7 @@
((_0 as Ok).0: u8) = move _4; // scope 1 at $DIR/simplify-arm.rs:18:18: 18:23 ((_0 as Ok).0: u8) = move _4; // scope 1 at $DIR/simplify-arm.rs:18:18: 18:23
discriminant(_0) = 0; // scope 1 at $DIR/simplify-arm.rs:18:18: 18:23 discriminant(_0) = 0; // scope 1 at $DIR/simplify-arm.rs:18:18: 18:23
StorageDead(_4); // scope 1 at $DIR/simplify-arm.rs:18:22: 18:23 StorageDead(_4); // scope 1 at $DIR/simplify-arm.rs:18:22: 18:23
StorageDead(_3); // scope 0 at $DIR/simplify-arm.rs:18:23: 18:24 StorageDead(_3); // scope 0 at $DIR/simplify-arm.rs:18:22: 18:23
goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:17:5: 20:6 goto -> bb4; // scope 0 at $DIR/simplify-arm.rs:17:5: 20:6
} }

View File

@ -115,7 +115,7 @@
bb8: { bb8: {
StorageDead(_5); // scope 1 at $DIR/simplify_try_if_let.rs:31:13: 31:14 StorageDead(_5); // scope 1 at $DIR/simplify_try_if_let.rs:31:13: 31:14
StorageDead(_4); // scope 0 at $DIR/simplify_try_if_let.rs:32:9: 32:10 StorageDead(_4); // scope 0 at $DIR/simplify_try_if_let.rs:31:13: 31:14
goto -> bb9; // scope 0 at $DIR/simplify_try_if_let.rs:21:9: 32:10 goto -> bb9; // scope 0 at $DIR/simplify_try_if_let.rs:21:9: 32:10
} }

View File

@ -52,7 +52,7 @@ impl CodegenBackend for TheBackend {
fn provide(&self, providers: &mut Providers) { fn provide(&self, providers: &mut Providers) {
rustc_symbol_mangling::provide(providers); rustc_symbol_mangling::provide(providers);
providers.target_features_whitelist = |tcx, _cnum| { providers.supported_target_features = |tcx, _cnum| {
Default::default() // Just a dummy Default::default() // Just a dummy
}; };
providers.is_reachable_non_generic = |_tcx, _defid| true; providers.is_reachable_non_generic = |_tcx, _defid| true;

View File

@ -5,7 +5,7 @@ from os.path import isfile, join
from subprocess import PIPE, Popen from subprocess import PIPE, Popen
# This is a whitelist of files which are stable crates or simply are not crates, # This is n list of files which are stable crates or simply are not crates,
# we don't check for the instability of these crates as they're all stable! # we don't check for the instability of these crates as they're all stable!
STABLE_CRATES = ['std', 'alloc', 'core', 'proc_macro', STABLE_CRATES = ['std', 'alloc', 'core', 'proc_macro',
'rsbegin.o', 'rsend.o', 'dllcrt2.o', 'crt2.o', 'clang_rt'] 'rsbegin.o', 'rsend.o', 'dllcrt2.o', 'crt2.o', 'clang_rt']

View File

@ -21,19 +21,19 @@ use rustc_span::source_map;
#[plugin_registrar] #[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) { pub fn plugin_registrar(reg: &mut Registry) {
reg.lint_store.register_lints(&[&MISSING_WHITELISTED_ATTR]); reg.lint_store.register_lints(&[&MISSING_ALLOWED_ATTR]);
reg.lint_store.register_late_pass(|| box MissingWhitelistedAttrPass); reg.lint_store.register_late_pass(|| box MissingAllowedAttrPass);
} }
declare_lint! { declare_lint! {
MISSING_WHITELISTED_ATTR, MISSING_ALLOWED_ATTR,
Deny, Deny,
"Checks for missing `whitelisted_attr` attribute" "Checks for missing `allowed_attr` attribute"
} }
declare_lint_pass!(MissingWhitelistedAttrPass => [MISSING_WHITELISTED_ATTR]); declare_lint_pass!(MissingAllowedAttrPass => [MISSING_ALLOWED_ATTR]);
impl<'tcx> LateLintPass<'tcx> for MissingWhitelistedAttrPass { impl<'tcx> LateLintPass<'tcx> for MissingAllowedAttrPass {
fn check_fn( fn check_fn(
&mut self, &mut self,
cx: &LateContext<'tcx>, cx: &LateContext<'tcx>,
@ -48,10 +48,10 @@ impl<'tcx> LateLintPass<'tcx> for MissingWhitelistedAttrPass {
_ => cx.tcx.hir().expect_item(cx.tcx.hir().get_parent_item(id)), _ => cx.tcx.hir().expect_item(cx.tcx.hir().get_parent_item(id)),
}; };
let whitelisted = |attr| pprust::attribute_to_string(attr).contains("whitelisted_attr"); let allowed = |attr| pprust::attribute_to_string(attr).contains("allowed_attr");
if !item.attrs.iter().any(whitelisted) { if !item.attrs.iter().any(allowed) {
cx.lint(MISSING_WHITELISTED_ATTR, |lint| { cx.lint(MISSING_ALLOWED_ATTR, |lint| {
lint.build("Missing 'whitelisted_attr' attribute").set_span(span).emit() lint.build("Missing 'allowed_attr' attribute").set_span(span).emit()
}); });
} }
} }

View File

@ -6,5 +6,5 @@
#![plugin(issue_40001_plugin)] //~ WARNING compiler plugins are deprecated #![plugin(issue_40001_plugin)] //~ WARNING compiler plugins are deprecated
#![register_tool(plugin)] #![register_tool(plugin)]
#[plugin::whitelisted_attr] #[plugin::allowed_attr]
fn main() {} fn main() {}

View File

@ -7,7 +7,7 @@ use std::arch::x86_64::{_mm256_setzero_ps, _mm_setzero_ps};
fn main() { fn main() {
unsafe { unsafe {
// Types must be in the whitelist for the register class // Types must be listed in the register class.
asm!("{}", in(reg) 0i128); asm!("{}", in(reg) 0i128);
//~^ ERROR type `i128` cannot be used with this register class //~^ ERROR type `i128` cannot be used with this register class

View File

@ -0,0 +1,6 @@
// run-pass
// compile-flags: -Z control-flow-guard
pub fn main() {
println!("hello, world");
}

View File

@ -0,0 +1,13 @@
// check-pass
fn main() {}
const fn foo() {
let x = [1, 2, 3, 4, 5];
let y: &[_] = &x;
struct Foo<T: ?Sized>(bool, T);
let x: Foo<[u8; 3]> = Foo(true, [1, 2, 3]);
let y: &Foo<[u8]> = &x;
}

View File

@ -88,9 +88,10 @@ const BAD_OPTION_CHAR: Option<(char, char)> = Some(('x', unsafe { mem::transmute
//~^ ERROR is undefined behavior //~^ ERROR is undefined behavior
// All variants are uninhabited but also have data. // All variants are uninhabited but also have data.
const BAD_UNINHABITED_WITH_DATA1: Result<(i32, Never), (i32, !)> = unsafe { mem::transmute(1u64) }; // Use `0` as constant to make behavior endianess-independent.
const BAD_UNINHABITED_WITH_DATA1: Result<(i32, Never), (i32, !)> = unsafe { mem::transmute(0u64) };
//~^ ERROR is undefined behavior //~^ ERROR is undefined behavior
const BAD_UNINHABITED_WITH_DATA2: Result<(i32, !), (i32, Never)> = unsafe { mem::transmute(1u64) }; const BAD_UNINHABITED_WITH_DATA2: Result<(i32, !), (i32, Never)> = unsafe { mem::transmute(0u64) };
//~^ ERROR is undefined behavior //~^ ERROR is undefined behavior
fn main() { fn main() {

View File

@ -87,18 +87,18 @@ LL | const BAD_OPTION_CHAR: Option<(char, char)> = Some(('x', unsafe { mem::tran
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
error[E0080]: it is undefined behavior to use this value error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:91:1 --> $DIR/ub-enum.rs:92:1
| |
LL | const BAD_UNINHABITED_WITH_DATA1: Result<(i32, Never), (i32, !)> = unsafe { mem::transmute(1u64) }; LL | const BAD_UNINHABITED_WITH_DATA1: Result<(i32, Never), (i32, !)> = unsafe { mem::transmute(0u64) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a value of the never type `!` at .<enum-variant(Err)>.0.1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a value of uninhabited type Never at .<enum-variant(Ok)>.0.1
| |
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
error[E0080]: it is undefined behavior to use this value error[E0080]: it is undefined behavior to use this value
--> $DIR/ub-enum.rs:93:1 --> $DIR/ub-enum.rs:94:1
| |
LL | const BAD_UNINHABITED_WITH_DATA2: Result<(i32, !), (i32, Never)> = unsafe { mem::transmute(1u64) }; LL | const BAD_UNINHABITED_WITH_DATA2: Result<(i32, !), (i32, Never)> = unsafe { mem::transmute(0u64) };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a value of uninhabited type Never at .<enum-variant(Err)>.0.1 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type validation failed: encountered a value of the never type `!` at .<enum-variant(Ok)>.0.1
| |
= note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. = note: The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.

View File

@ -1,7 +1,6 @@
#![feature(const_extern_fn)] #![feature(const_extern_fn)]
const extern fn unsize(x: &[u8; 3]) -> &[u8] { x } const extern fn unsize(x: &[u8; 3]) -> &[u8] { x }
//~^ ERROR unsizing casts are not allowed in const fn
const unsafe extern "C" fn closure() -> fn() { || {} } const unsafe extern "C" fn closure() -> fn() { || {} }
//~^ ERROR function pointers in const fn are unstable //~^ ERROR function pointers in const fn are unstable
const unsafe extern fn use_float() { 1.0 + 1.0; } const unsafe extern fn use_float() { 1.0 + 1.0; }

View File

@ -1,14 +1,5 @@
error[E0723]: unsizing casts are not allowed in const fn
--> $DIR/const-extern-fn-min-const-fn.rs:3:48
|
LL | const extern fn unsize(x: &[u8; 3]) -> &[u8] { x }
| ^
|
= note: see issue #57563 <https://github.com/rust-lang/rust/issues/57563> for more information
= help: add `#![feature(const_fn)]` to the crate attributes to enable
error[E0723]: function pointers in const fn are unstable error[E0723]: function pointers in const fn are unstable
--> $DIR/const-extern-fn-min-const-fn.rs:5:41 --> $DIR/const-extern-fn-min-const-fn.rs:4:41
| |
LL | const unsafe extern "C" fn closure() -> fn() { || {} } LL | const unsafe extern "C" fn closure() -> fn() { || {} }
| ^^^^ | ^^^^
@ -17,7 +8,7 @@ LL | const unsafe extern "C" fn closure() -> fn() { || {} }
= help: add `#![feature(const_fn)]` to the crate attributes to enable = help: add `#![feature(const_fn)]` to the crate attributes to enable
error[E0723]: only int, `bool` and `char` operations are stable in const fn error[E0723]: only int, `bool` and `char` operations are stable in const fn
--> $DIR/const-extern-fn-min-const-fn.rs:7:38 --> $DIR/const-extern-fn-min-const-fn.rs:6:38
| |
LL | const unsafe extern fn use_float() { 1.0 + 1.0; } LL | const unsafe extern fn use_float() { 1.0 + 1.0; }
| ^^^^^^^^^ | ^^^^^^^^^
@ -26,7 +17,7 @@ LL | const unsafe extern fn use_float() { 1.0 + 1.0; }
= help: add `#![feature(const_fn)]` to the crate attributes to enable = help: add `#![feature(const_fn)]` to the crate attributes to enable
error[E0723]: casting pointers to ints is unstable in const fn error[E0723]: casting pointers to ints is unstable in const fn
--> $DIR/const-extern-fn-min-const-fn.rs:9:48 --> $DIR/const-extern-fn-min-const-fn.rs:8:48
| |
LL | const extern "C" fn ptr_cast(val: *const u8) { val as usize; } LL | const extern "C" fn ptr_cast(val: *const u8) { val as usize; }
| ^^^^^^^^^^^^ | ^^^^^^^^^^^^
@ -34,6 +25,6 @@ LL | const extern "C" fn ptr_cast(val: *const u8) { val as usize; }
= note: see issue #57563 <https://github.com/rust-lang/rust/issues/57563> for more information = note: see issue #57563 <https://github.com/rust-lang/rust/issues/57563> for more information
= help: add `#![feature(const_fn)]` to the crate attributes to enable = help: add `#![feature(const_fn)]` to the crate attributes to enable
error: aborting due to 4 previous errors error: aborting due to 3 previous errors
For more information about this error, try `rustc --explain E0723`. For more information about this error, try `rustc --explain E0723`.

View File

@ -1,7 +1,5 @@
// check-pass // check-pass
#![feature(const_forget)]
use std::mem::forget; use std::mem::forget;
const _: () = forget(0i32); const _: () = forget(0i32);

View File

@ -1,7 +1,6 @@
fn main() {} fn main() {}
const fn unsize(x: &[u8; 3]) -> &[u8] { x } const fn unsize(x: &[u8; 3]) -> &[u8] { x }
//~^ ERROR unsizing casts are not allowed in const fn
const fn closure() -> fn() { || {} } const fn closure() -> fn() { || {} }
//~^ ERROR function pointers in const fn are unstable //~^ ERROR function pointers in const fn are unstable
const fn closure2() { const fn closure2() {

View File

@ -1,14 +1,5 @@
error[E0723]: unsizing casts are not allowed in const fn
--> $DIR/cast_errors.rs:3:41
|
LL | const fn unsize(x: &[u8; 3]) -> &[u8] { x }
| ^
|
= note: see issue #57563 <https://github.com/rust-lang/rust/issues/57563> for more information
= help: add `#![feature(const_fn)]` to the crate attributes to enable
error[E0723]: function pointers in const fn are unstable error[E0723]: function pointers in const fn are unstable
--> $DIR/cast_errors.rs:5:23 --> $DIR/cast_errors.rs:4:23
| |
LL | const fn closure() -> fn() { || {} } LL | const fn closure() -> fn() { || {} }
| ^^^^ | ^^^^
@ -17,7 +8,7 @@ LL | const fn closure() -> fn() { || {} }
= help: add `#![feature(const_fn)]` to the crate attributes to enable = help: add `#![feature(const_fn)]` to the crate attributes to enable
error[E0723]: function pointers in const fn are unstable error[E0723]: function pointers in const fn are unstable
--> $DIR/cast_errors.rs:8:5 --> $DIR/cast_errors.rs:7:5
| |
LL | (|| {}) as fn(); LL | (|| {}) as fn();
| ^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^
@ -26,7 +17,7 @@ LL | (|| {}) as fn();
= help: add `#![feature(const_fn)]` to the crate attributes to enable = help: add `#![feature(const_fn)]` to the crate attributes to enable
error[E0723]: function pointers in const fn are unstable error[E0723]: function pointers in const fn are unstable
--> $DIR/cast_errors.rs:11:28 --> $DIR/cast_errors.rs:10:28
| |
LL | const fn reify(f: fn()) -> unsafe fn() { f } LL | const fn reify(f: fn()) -> unsafe fn() { f }
| ^^^^^^^^^^^ | ^^^^^^^^^^^
@ -35,7 +26,7 @@ LL | const fn reify(f: fn()) -> unsafe fn() { f }
= help: add `#![feature(const_fn)]` to the crate attributes to enable = help: add `#![feature(const_fn)]` to the crate attributes to enable
error[E0723]: function pointers in const fn are unstable error[E0723]: function pointers in const fn are unstable
--> $DIR/cast_errors.rs:13:21 --> $DIR/cast_errors.rs:12:21
| |
LL | const fn reify2() { main as unsafe fn(); } LL | const fn reify2() { main as unsafe fn(); }
| ^^^^^^^^^^^^^^^^^^^ | ^^^^^^^^^^^^^^^^^^^
@ -43,6 +34,6 @@ LL | const fn reify2() { main as unsafe fn(); }
= note: see issue #57563 <https://github.com/rust-lang/rust/issues/57563> for more information = note: see issue #57563 <https://github.com/rust-lang/rust/issues/57563> for more information
= help: add `#![feature(const_fn)]` to the crate attributes to enable = help: add `#![feature(const_fn)]` to the crate attributes to enable
error: aborting due to 5 previous errors error: aborting due to 4 previous errors
For more information about this error, try `rustc --explain E0723`. For more information about this error, try `rustc --explain E0723`.

View File

@ -0,0 +1,9 @@
#![allow(dead_code)]
#[repr(i128)] //~ ERROR: attribute should be applied to enum
struct Foo;
#[repr(u128)] //~ ERROR: attribute should be applied to enum
struct Bar;
fn main() {}

View File

@ -0,0 +1,19 @@
error[E0517]: attribute should be applied to enum
--> $DIR/issue-74082.rs:3:8
|
LL | #[repr(i128)]
| ^^^^
LL | struct Foo;
| ----------- not an enum
error[E0517]: attribute should be applied to enum
--> $DIR/issue-74082.rs:6:8
|
LL | #[repr(u128)]
| ^^^^
LL | struct Bar;
| ----------- not an enum
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0517`.

View File

@ -0,0 +1,13 @@
fn main() {
let mut args = std::env::args_os();
let _arg = match args.next() {
Some(arg) => {
match arg.to_str() {
//~^ ERROR `arg` does not live long enough
Some(s) => s,
None => return,
}
}
None => return,
};
}

View File

@ -0,0 +1,15 @@
error[E0597]: `arg` does not live long enough
--> $DIR/issue-74050-end-span.rs:5:19
|
LL | let _arg = match args.next() {
| ---- borrow later stored here
LL | Some(arg) => {
LL | match arg.to_str() {
| ^^^ borrowed value does not live long enough
...
LL | }
| - `arg` dropped here while still borrowed
error: aborting due to previous error
For more information about this error, try `rustc --explain E0597`.

View File

@ -0,0 +1,17 @@
struct S(u8, (u8, u8));
macro_rules! generate_field_accesses {
($a:tt, $b:literal, $c:expr) => {
let s = S(0, (0, 0));
s.$a; // OK
{ s.$b; } //~ ERROR unexpected token: `1.1`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1.1`
{ s.$c; } //~ ERROR unexpected token: `1.1`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1.1`
};
}
fn main() {
generate_field_accesses!(1.1, 1.1, 1.1);
}

View File

@ -0,0 +1,46 @@
error: unexpected token: `1.1`
--> $DIR/float-field-interpolated.rs:8:13
|
LL | { s.$b; }
| ^^
...
LL | generate_field_accesses!(1.1, 1.1, 1.1);
| ---------------------------------------- in this macro invocation
|
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
error: expected one of `.`, `;`, `?`, `}`, or an operator, found `1.1`
--> $DIR/float-field-interpolated.rs:8:13
|
LL | { s.$b; }
| ^^ expected one of `.`, `;`, `?`, `}`, or an operator
...
LL | generate_field_accesses!(1.1, 1.1, 1.1);
| ---------------------------------------- in this macro invocation
|
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
error: unexpected token: `1.1`
--> $DIR/float-field-interpolated.rs:10:13
|
LL | { s.$c; }
| ^^
...
LL | generate_field_accesses!(1.1, 1.1, 1.1);
| ---------------------------------------- in this macro invocation
|
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
error: expected one of `.`, `;`, `?`, `}`, or an operator, found `1.1`
--> $DIR/float-field-interpolated.rs:10:13
|
LL | { s.$c; }
| ^^ expected one of `.`, `;`, `?`, `}`, or an operator
...
LL | generate_field_accesses!(1.1, 1.1, 1.1);
| ---------------------------------------- in this macro invocation
|
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 4 previous errors

View File

@ -0,0 +1,62 @@
struct S(u8, (u8, u8));
fn main() {
let s = S(0, (0, 0));
s.1e1; //~ ERROR no field `1e1` on type `S`
s.1.; //~ ERROR unexpected token: `;`
s.1.1;
s.1.1e1; //~ ERROR no field `1e1` on type `(u8, u8)`
{ s.1e+; } //~ ERROR unexpected token: `1e+`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1e+`
//~| ERROR expected at least one digit in exponent
{ s.1e-; } //~ ERROR unexpected token: `1e-`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1e-`
//~| ERROR expected at least one digit in exponent
{ s.1e+1; } //~ ERROR unexpected token: `1e+1`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1e+1`
{ s.1e-1; } //~ ERROR unexpected token: `1e-1`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1e-1`
{ s.1.1e+1; } //~ ERROR unexpected token: `1.1e+1`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1.1e+1`
{ s.1.1e-1; } //~ ERROR unexpected token: `1.1e-1`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1.1e-1`
s.0x1e1; //~ ERROR no field `0x1e1` on type `S`
s.0x1.; //~ ERROR no field `0x1` on type `S`
//~| ERROR hexadecimal float literal is not supported
//~| ERROR unexpected token: `;`
s.0x1.1; //~ ERROR no field `0x1` on type `S`
//~| ERROR hexadecimal float literal is not supported
s.0x1.1e1; //~ ERROR no field `0x1` on type `S`
//~| ERROR hexadecimal float literal is not supported
{ s.0x1e+; } //~ ERROR expected expression, found `;`
{ s.0x1e-; } //~ ERROR expected expression, found `;`
s.0x1e+1; //~ ERROR no field `0x1e` on type `S`
s.0x1e-1; //~ ERROR no field `0x1e` on type `S`
{ s.0x1.1e+1; } //~ ERROR unexpected token: `0x1.1e+1`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `0x1.1e+1`
//~| ERROR hexadecimal float literal is not supported
{ s.0x1.1e-1; } //~ ERROR unexpected token: `0x1.1e-1`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `0x1.1e-1`
//~| ERROR hexadecimal float literal is not supported
s.1e1f32; //~ ERROR no field `1e1` on type `S`
//~| ERROR suffixes on a tuple index are invalid
s.1.f32; //~ ERROR no field `f32` on type `(u8, u8)`
s.1.1f32; //~ ERROR suffixes on a tuple index are invalid
s.1.1e1f32; //~ ERROR no field `1e1` on type `(u8, u8)`
//~| ERROR suffixes on a tuple index are invalid
{ s.1e+f32; } //~ ERROR unexpected token: `1e+f32`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1e+f32`
//~| ERROR expected at least one digit in exponent
{ s.1e-f32; } //~ ERROR unexpected token: `1e-f32`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1e-f32`
//~| ERROR expected at least one digit in exponent
{ s.1e+1f32; } //~ ERROR unexpected token: `1e+1f32`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1e+1f32`
{ s.1e-1f32; } //~ ERROR unexpected token: `1e-1f32`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1e-1f32`
{ s.1.1e+1f32; } //~ ERROR unexpected token: `1.1e+1f32`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1.1e+1f32`
{ s.1.1e-1f32; } //~ ERROR unexpected token: `1.1e-1f32`
//~| ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `1.1e-1f32`
}

Some files were not shown because too many files have changed in this diff Show More