Auto merge of #128253 - tgross35:rollup-rpmoebz, r=tgross35

Rollup of 9 pull requests

Successful merges:

 - #124941 (Stabilize const `{integer}::from_str_radix` i.e. `const_int_from_str`)
 - #128201 (Implement `Copy`/`Clone` for async closures)
 - #128210 (rustdoc: change title of search results)
 - #128223 (Refactor complex conditions in `collect_tokens_trailing_token`)
 - #128224 (Remove unnecessary range replacements)
 - #128226 (Remove redundant option that was just encoding that a slice was empty)
 - #128227 (CI: do not respect custom try jobs for unrolled perf builds)
 - #128229 (Improve `extern "<abi>" unsafe fn()` error message)
 - #128235 (Fix `Iterator::filter` docs)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-07-27 00:01:28 +00:00
commit 8b6b8574f6
32 changed files with 265 additions and 133 deletions

View File

@ -172,7 +172,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
id: NodeId,
hir_id: hir::HirId,
ident: &mut Ident,
attrs: Option<&'hir [Attribute]>,
attrs: &'hir [Attribute],
vis_span: Span,
i: &ItemKind,
) -> hir::ItemKind<'hir> {
@ -488,7 +488,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
id: NodeId,
vis_span: Span,
ident: &mut Ident,
attrs: Option<&'hir [Attribute]>,
attrs: &'hir [Attribute],
) -> hir::ItemKind<'hir> {
let path = &tree.prefix;
let segments = prefix.segments.iter().chain(path.segments.iter()).cloned().collect();
@ -566,7 +566,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
// `ItemLocalId` and the new owner. (See `lower_node_id`)
let kind =
this.lower_use_tree(use_tree, &prefix, id, vis_span, &mut ident, attrs);
if let Some(attrs) = attrs {
if !attrs.is_empty() {
this.attrs.insert(hir::ItemLocalId::ZERO, attrs);
}

View File

@ -913,15 +913,15 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
ret
}
fn lower_attrs(&mut self, id: HirId, attrs: &[Attribute]) -> Option<&'hir [Attribute]> {
fn lower_attrs(&mut self, id: HirId, attrs: &[Attribute]) -> &'hir [Attribute] {
if attrs.is_empty() {
None
&[]
} else {
debug_assert_eq!(id.owner, self.current_hir_id_owner);
let ret = self.arena.alloc_from_iter(attrs.iter().map(|a| self.lower_attr(a)));
debug_assert!(!ret.is_empty());
self.attrs.insert(id.local_id, ret);
Some(ret)
ret
}
}

View File

@ -1442,9 +1442,14 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> {
// See `tests/ui/moves/needs-clone-through-deref.rs`
return false;
}
// We don't want to suggest `.clone()` in a move closure, since the value has already been captured.
if self.in_move_closure(expr) {
return false;
}
// We also don't want to suggest cloning a closure itself, since the value has already been captured.
if let hir::ExprKind::Closure(_) = expr.kind {
return false;
}
// Try to find predicates on *generic params* that would allow copying `ty`
let mut suggestion =
if let Some(symbol) = tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) {

View File

@ -435,6 +435,9 @@ fn build_clone_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, self_ty: Ty<'tcx>) -
match self_ty.kind() {
ty::FnDef(..) | ty::FnPtr(_) => builder.copy_shim(),
ty::Closure(_, args) => builder.tuple_like_shim(dest, src, args.as_closure().upvar_tys()),
ty::CoroutineClosure(_, args) => {
builder.tuple_like_shim(dest, src, args.as_coroutine_closure().upvar_tys())
}
ty::Tuple(..) => builder.tuple_like_shim(dest, src, self_ty.tuple_fields()),
ty::Coroutine(coroutine_def_id, args) => {
assert_eq!(tcx.coroutine_movability(*coroutine_def_id), hir::Movability::Movable);

View File

@ -217,7 +217,10 @@ where
// impl Copy/Clone for Closure where Self::TupledUpvars: Copy/Clone
ty::Closure(_, args) => Ok(vec![ty::Binder::dummy(args.as_closure().tupled_upvars_ty())]),
ty::CoroutineClosure(..) => Err(NoSolution),
// impl Copy/Clone for CoroutineClosure where Self::TupledUpvars: Copy/Clone
ty::CoroutineClosure(_, args) => {
Ok(vec![ty::Binder::dummy(args.as_coroutine_closure().tupled_upvars_ty())])
}
// only when `coroutine_clone` is enabled and the coroutine is movable
// impl Copy/Clone for Coroutine where T: Copy/Clone forall T in (upvars, witnesses)

View File

@ -457,14 +457,3 @@ impl<'a> Parser<'a> {
Err(self.dcx().create_err(err))
}
}
/// The attributes are complete if all attributes are either a doc comment or a
/// builtin attribute other than `cfg_attr`.
pub fn is_complete(attrs: &[ast::Attribute]) -> bool {
attrs.iter().all(|attr| {
attr.is_doc_comment()
|| attr.ident().is_some_and(|ident| {
ident.name != sym::cfg_attr && rustc_feature::is_builtin_attr_name(ident.name)
})
})
}

View File

@ -60,10 +60,6 @@ impl AttrWrapper {
pub fn is_empty(&self) -> bool {
self.attrs.is_empty()
}
pub fn is_complete(&self) -> bool {
crate::parser::attr::is_complete(&self.attrs)
}
}
/// Returns `true` if `attrs` contains a `cfg` or `cfg_attr` attribute
@ -114,17 +110,15 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
replace_ranges.sort_by_key(|(range, _)| range.start);
#[cfg(debug_assertions)]
{
for [(range, tokens), (next_range, next_tokens)] in replace_ranges.array_windows() {
assert!(
range.end <= next_range.start || range.end >= next_range.end,
"Replace ranges should either be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
range,
tokens,
next_range,
next_tokens,
);
}
for [(range, tokens), (next_range, next_tokens)] in replace_ranges.array_windows() {
assert!(
range.end <= next_range.start || range.end >= next_range.end,
"Replace ranges should either be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
range,
tokens,
next_range,
next_tokens,
);
}
// Process the replace ranges, starting from the highest start
@ -137,9 +131,9 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
//
// By starting processing from the replace range with the greatest
// start position, we ensure that any replace range which encloses
// another replace range will capture the *replaced* tokens for the inner
// range, not the original tokens.
// start position, we ensure that any (outer) replace range which
// encloses another (inner) replace range will fully overwrite the
// inner range's replacement.
for (range, target) in replace_ranges.into_iter().rev() {
assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");
@ -199,20 +193,20 @@ impl<'a> Parser<'a> {
force_collect: ForceCollect,
f: impl FnOnce(&mut Self, ast::AttrVec) -> PResult<'a, (R, bool)>,
) -> PResult<'a, R> {
// Skip collection when nothing could observe the collected tokens, i.e.
// all of the following conditions hold.
// - We are not force collecting tokens (because force collection
// requires tokens by definition).
if matches!(force_collect, ForceCollect::No)
// - None of our outer attributes require tokens.
&& attrs.is_complete()
// - Our target doesn't support custom inner attributes (custom
// We must collect if anything could observe the collected tokens, i.e.
// if any of the following conditions hold.
// - We are force collecting tokens (because force collection requires
// tokens by definition).
let needs_collection = matches!(force_collect, ForceCollect::Yes)
// - Any of our outer attributes require tokens.
|| needs_tokens(&attrs.attrs)
// - Our target supports custom inner attributes (custom
// inner attribute invocation might require token capturing).
&& !R::SUPPORTS_CUSTOM_INNER_ATTRS
// - We are not in `capture_cfg` mode (which requires tokens if
|| R::SUPPORTS_CUSTOM_INNER_ATTRS
// - We are in `capture_cfg` mode (which requires tokens if
// the parsed node has `#[cfg]` or `#[cfg_attr]` attributes).
&& !self.capture_cfg
{
|| self.capture_cfg;
if !needs_collection {
return Ok(f(self, attrs.attrs)?.0);
}
@ -250,28 +244,28 @@ impl<'a> Parser<'a> {
return Ok(ret);
}
// This is similar to the "skip collection" check at the start of this
// function, but now that we've parsed an AST node we have more
// This is similar to the `needs_collection` check at the start of this
// function, but now that we've parsed an AST node we have complete
// information available. (If we return early here that means the
// setup, such as cloning the token cursor, was unnecessary. That's
// hard to avoid.)
//
// Skip collection when nothing could observe the collected tokens, i.e.
// all of the following conditions hold.
// - We are not force collecting tokens.
if matches!(force_collect, ForceCollect::No)
// - None of our outer *or* inner attributes require tokens.
// (`attrs` was just outer attributes, but `ret.attrs()` is outer
// and inner attributes. That makes this check more precise than
// `attrs.is_complete()` at the start of the function, and we can
// skip the subsequent check on `R::SUPPORTS_CUSTOM_INNER_ATTRS`.
&& crate::parser::attr::is_complete(ret.attrs())
// - We are not in `capture_cfg` mode, or we are but there are no
// `#[cfg]` or `#[cfg_attr]` attributes. (During normal
// non-`capture_cfg` parsing, we don't need any special capturing
// for those attributes, because they're builtin.)
&& (!self.capture_cfg || !has_cfg_or_cfg_attr(ret.attrs()))
{
// We must collect if anything could observe the collected tokens, i.e.
// if any of the following conditions hold.
// - We are force collecting tokens.
let needs_collection = matches!(force_collect, ForceCollect::Yes)
// - Any of our outer *or* inner attributes require tokens.
// (`attr.attrs` was just outer attributes, but `ret.attrs()` is
// outer and inner attributes. So this check is more precise than
// the earlier `needs_tokens` check, and we don't need to
// check `R::SUPPORTS_CUSTOM_INNER_ATTRS`.)
|| needs_tokens(ret.attrs())
// - We are in `capture_cfg` mode and there are `#[cfg]` or
// `#[cfg_attr]` attributes. (During normal non-`capture_cfg`
// parsing, we don't need any special capturing for those
// attributes, because they're builtin.)
|| (self.capture_cfg && has_cfg_or_cfg_attr(ret.attrs()));
if !needs_collection {
return Ok(ret);
}
@ -297,11 +291,13 @@ impl<'a> Parser<'a> {
// with `None`, which means the relevant tokens will be removed. (More
// details below.)
let mut inner_attr_replace_ranges = Vec::new();
for inner_attr in ret.attrs().iter().filter(|a| a.style == ast::AttrStyle::Inner) {
if let Some(attr_range) = self.capture_state.inner_attr_ranges.remove(&inner_attr.id) {
inner_attr_replace_ranges.push((attr_range, None));
} else {
self.dcx().span_delayed_bug(inner_attr.span, "Missing token range for attribute");
for attr in ret.attrs() {
if attr.style == ast::AttrStyle::Inner {
if let Some(attr_range) = self.capture_state.inner_attr_ranges.remove(&attr.id) {
inner_attr_replace_ranges.push((attr_range, None));
} else {
self.dcx().span_delayed_bug(attr.span, "Missing token range for attribute");
}
}
}
@ -337,8 +333,7 @@ impl<'a> Parser<'a> {
// When parsing `m`:
// - `start_pos..end_pos` is `0..34` (`mod m`, excluding the `#[cfg_eval]` attribute).
// - `inner_attr_replace_ranges` is empty.
// - `replace_range_start..replace_ranges_end` has two entries.
// - One to delete the inner attribute (`17..27`), obtained when parsing `g` (see above).
// - `replace_range_start..replace_ranges_end` has one entry.
// - One `AttrsTarget` (added below when parsing `g`) to replace all of `g` (`3..33`,
// including its outer attribute), with:
// - `attrs`: includes the outer and the inner attr.
@ -369,12 +364,10 @@ impl<'a> Parser<'a> {
// What is the status here when parsing the example code at the top of this method?
//
// When parsing `g`, we add two entries:
// When parsing `g`, we add one entry:
// - The `start_pos..end_pos` (`3..33`) entry has a new `AttrsTarget` with:
// - `attrs`: includes the outer and the inner attr.
// - `tokens`: lazy tokens for `g` (with its inner attr deleted).
// - `inner_attr_replace_ranges` contains the one entry to delete the inner attr's
// tokens (`17..27`).
//
// When parsing `m`, we do nothing here.
@ -384,7 +377,6 @@ impl<'a> Parser<'a> {
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
let target = AttrsTarget { attrs: ret.attrs().iter().cloned().collect(), tokens };
self.capture_state.replace_ranges.push((start_pos..end_pos, Some(target)));
self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
} else if matches!(self.capture_state.capturing, Capturing::No) {
// Only clear the ranges once we've finished capturing entirely, i.e. we've finished
// the outermost call to this method.
@ -461,6 +453,19 @@ fn make_attr_token_stream(
AttrTokenStream::new(stack_top.inner)
}
/// Tokens are needed if:
/// - any non-single-segment attributes (other than doc comments) are present; or
/// - any `cfg_attr` attributes are present;
/// - any single-segment, non-builtin attributes are present.
fn needs_tokens(attrs: &[ast::Attribute]) -> bool {
attrs.iter().any(|attr| match attr.ident() {
None => !attr.is_doc_comment(),
Some(ident) => {
ident.name == sym::cfg_attr || !rustc_feature::is_builtin_attr_name(ident.name)
}
})
}
// Some types are used a lot. Make sure they don't unintentionally get bigger.
#[cfg(target_pointer_width = "64")]
mod size_asserts {

View File

@ -2483,12 +2483,15 @@ impl<'a> Parser<'a> {
/// `check_pub` adds additional `pub` to the checks in case users place it
/// wrongly, can be used to ensure `pub` never comes after `default`.
pub(super) fn check_fn_front_matter(&mut self, check_pub: bool, case: Case) -> bool {
const ALL_QUALS: &[Symbol] =
&[kw::Pub, kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Safe, kw::Extern];
// We use an over-approximation here.
// `const const`, `fn const` won't parse, but we're not stepping over other syntax either.
// `pub` is added in case users got confused with the ordering like `async pub fn`,
// only if it wasn't preceded by `default` as `default pub` is invalid.
let quals: &[Symbol] = if check_pub {
&[kw::Pub, kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Safe, kw::Extern]
ALL_QUALS
} else {
&[kw::Gen, kw::Const, kw::Async, kw::Unsafe, kw::Safe, kw::Extern]
};
@ -2518,9 +2521,9 @@ impl<'a> Parser<'a> {
|| self.check_keyword_case(kw::Extern, case)
&& self.look_ahead(1, |t| t.can_begin_string_literal())
&& (self.look_ahead(2, |t| t.is_keyword_case(kw::Fn, case)) ||
// this branch is only for better diagnostic in later, `pub` is not allowed here
// this branch is only for better diagnostics; `pub`, `unsafe`, etc. are not allowed here
(self.may_recover()
&& self.look_ahead(2, |t| t.is_keyword(kw::Pub))
&& self.look_ahead(2, |t| ALL_QUALS.iter().any(|&kw| t.is_keyword(kw)))
&& self.look_ahead(3, |t| t.is_keyword_case(kw::Fn, case))))
}

View File

@ -2262,8 +2262,21 @@ impl<'tcx> SelectionContext<'_, 'tcx> {
}
}
// FIXME(async_closures): These are never clone, for now.
ty::CoroutineClosure(_, _) => None,
ty::CoroutineClosure(_, args) => {
// (*) binder moved here
let ty = self.infcx.shallow_resolve(args.as_coroutine_closure().tupled_upvars_ty());
if let ty::Infer(ty::TyVar(_)) = ty.kind() {
// Not yet resolved.
Ambiguous
} else {
Where(
obligation
.predicate
.rebind(args.as_coroutine_closure().upvar_tys().to_vec()),
)
}
}
// `Copy` and `Clone` are automatically implemented for an anonymous adt
// if all of its fields are `Copy` and `Clone`
ty::Adt(adt, args) if adt.is_anonymous() => {

View File

@ -823,7 +823,7 @@ pub trait Iterator {
///
/// Given an element the closure must return `true` or `false`. The returned
/// iterator will yield only the elements for which the closure returns
/// true.
/// `true`.
///
/// # Examples
///

View File

@ -127,7 +127,6 @@
#![feature(const_hash)]
#![feature(const_heap)]
#![feature(const_index_range_slice_index)]
#![feature(const_int_from_str)]
#![feature(const_intrinsic_copy)]
#![feature(const_intrinsic_forget)]
#![feature(const_ipv4)]

View File

@ -113,7 +113,7 @@ pub enum IntErrorKind {
impl ParseIntError {
/// Outputs the detailed cause of parsing an integer failing.
#[must_use]
#[rustc_const_unstable(feature = "const_int_from_str", issue = "59133")]
#[rustc_const_stable(feature = "const_int_from_str", since = "CURRENT_RUSTC_VERSION")]
#[stable(feature = "int_error_matching", since = "1.55.0")]
pub const fn kind(&self) -> &IntErrorKind {
&self.kind

View File

@ -1386,6 +1386,7 @@ from_str_radix_int_impl! { isize i8 i16 i32 i64 i128 usize u8 u16 u32 u64 u128 }
#[doc(hidden)]
#[inline(always)]
#[unstable(issue = "none", feature = "std_internals")]
#[rustc_const_stable(feature = "const_int_from_str", since = "CURRENT_RUSTC_VERSION")]
pub const fn can_not_overflow<T>(radix: u32, is_signed_ty: bool, digits: &[u8]) -> bool {
radix <= 16 && digits.len() <= mem::size_of::<T>() * 2 - is_signed_ty as usize
}
@ -1435,7 +1436,7 @@ macro_rules! from_str_radix {
#[doc = concat!("assert_eq!(", stringify!($int_ty), "::from_str_radix(\"A\", 16), Ok(10));")]
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_int_from_str", issue = "59133")]
#[rustc_const_stable(feature = "const_int_from_str", since = "CURRENT_RUSTC_VERSION")]
pub const fn from_str_radix(src: &str, radix: u32) -> Result<$int_ty, ParseIntError> {
use self::IntErrorKind::*;
use self::ParseIntError as PIE;
@ -1565,7 +1566,7 @@ macro_rules! from_str_radix_size_impl {
#[doc = concat!("assert_eq!(", stringify!($size), "::from_str_radix(\"A\", 16), Ok(10));")]
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_unstable(feature = "const_int_from_str", issue = "59133")]
#[rustc_const_stable(feature = "const_int_from_str", since = "CURRENT_RUSTC_VERSION")]
pub const fn from_str_radix(src: &str, radix: u32) -> Result<$size, ParseIntError> {
match <$t>::from_str_radix(src, radix) {
Ok(x) => Ok(x as $size),

View File

@ -16,7 +16,6 @@
#![feature(const_hash)]
#![feature(const_heap)]
#![feature(const_intrinsic_copy)]
#![feature(const_int_from_str)]
#![feature(const_maybe_uninit_as_mut_ptr)]
#![feature(const_nonnull_new)]
#![feature(const_pointer_is_aligned)]

View File

@ -97,9 +97,15 @@ def find_run_type(ctx: GitHubCtx) -> Optional[WorkflowRunType]:
"refs/heads/automation/bors/try"
)
# Unrolled branch from a rollup for testing perf
# This should **not** allow custom try jobs
is_unrolled_perf_build = ctx.ref == "refs/heads/try-perf"
if try_build:
jobs = get_custom_jobs(ctx)
return TryRunType(custom_jobs=jobs)
custom_jobs = []
if not is_unrolled_perf_build:
custom_jobs = get_custom_jobs(ctx)
return TryRunType(custom_jobs=custom_jobs)
if ctx.ref == "refs/heads/auto":
return AutoRunType()

View File

@ -2932,7 +2932,7 @@ ${item.displayPath}<span class="${type}">${name}</span>\
}
// Update document title to maintain a meaningful browser history
searchState.title = "Results for " + query.original + " - Rust";
searchState.title = "\"" + query.original + "\" Search - Rust";
// Because searching is incremental by character, only the most
// recent search query is added to the browser history.

View File

@ -1,4 +1,3 @@
#![feature(const_int_from_str)]
#![warn(clippy::from_str_radix_10)]
mod some_mod {
@ -61,7 +60,8 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
Ok(())
}
fn issue_12732() {
// https://github.com/rust-lang/rust-clippy/issues/12731
fn issue_12731() {
const A: Result<u32, std::num::ParseIntError> = u32::from_str_radix("123", 10);
const B: () = {
let _ = u32::from_str_radix("123", 10);

View File

@ -1,4 +1,3 @@
#![feature(const_int_from_str)]
#![warn(clippy::from_str_radix_10)]
mod some_mod {
@ -61,7 +60,8 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
Ok(())
}
fn issue_12732() {
// https://github.com/rust-lang/rust-clippy/issues/12731
fn issue_12731() {
const A: Result<u32, std::num::ParseIntError> = u32::from_str_radix("123", 10);
const B: () = {
let _ = u32::from_str_radix("123", 10);

View File

@ -1,5 +1,5 @@
error: this call to `from_str_radix` can be replaced with a call to `str::parse`
--> tests/ui/from_str_radix_10.rs:29:5
--> tests/ui/from_str_radix_10.rs:28:5
|
LL | u32::from_str_radix("30", 10)?;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"30".parse::<u32>()`
@ -8,43 +8,43 @@ LL | u32::from_str_radix("30", 10)?;
= help: to override `-D warnings` add `#[allow(clippy::from_str_radix_10)]`
error: this call to `from_str_radix` can be replaced with a call to `str::parse`
--> tests/ui/from_str_radix_10.rs:32:5
--> tests/ui/from_str_radix_10.rs:31:5
|
LL | i64::from_str_radix("24", 10)?;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"24".parse::<i64>()`
error: this call to `from_str_radix` can be replaced with a call to `str::parse`
--> tests/ui/from_str_radix_10.rs:34:5
--> tests/ui/from_str_radix_10.rs:33:5
|
LL | isize::from_str_radix("100", 10)?;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"100".parse::<isize>()`
error: this call to `from_str_radix` can be replaced with a call to `str::parse`
--> tests/ui/from_str_radix_10.rs:36:5
--> tests/ui/from_str_radix_10.rs:35:5
|
LL | u8::from_str_radix("7", 10)?;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"7".parse::<u8>()`
error: this call to `from_str_radix` can be replaced with a call to `str::parse`
--> tests/ui/from_str_radix_10.rs:38:5
--> tests/ui/from_str_radix_10.rs:37:5
|
LL | u16::from_str_radix(&("10".to_owned() + "5"), 10)?;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `("10".to_owned() + "5").parse::<u16>()`
error: this call to `from_str_radix` can be replaced with a call to `str::parse`
--> tests/ui/from_str_radix_10.rs:40:5
--> tests/ui/from_str_radix_10.rs:39:5
|
LL | i128::from_str_radix(Test + Test, 10)?;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(Test + Test).parse::<i128>()`
error: this call to `from_str_radix` can be replaced with a call to `str::parse`
--> tests/ui/from_str_radix_10.rs:44:5
--> tests/ui/from_str_radix_10.rs:43:5
|
LL | i32::from_str_radix(string, 10)?;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `string.parse::<i32>()`
error: this call to `from_str_radix` can be replaced with a call to `str::parse`
--> tests/ui/from_str_radix_10.rs:48:5
--> tests/ui/from_str_radix_10.rs:47:5
|
LL | i32::from_str_radix(&stringier, 10)?;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `stringier.parse::<i32>()`

View File

@ -0,0 +1,24 @@
//@ aux-build:block-on.rs
//@ edition:2021
//@ run-pass
//@ check-run-results
#![feature(async_closure)]
extern crate block_on;
async fn for_each(f: impl async FnOnce(&str) + Clone) {
f.clone()("world").await;
f.clone()("world2").await;
}
fn main() {
block_on::block_on(async_main());
}
async fn async_main() {
let x = String::from("Hello,");
for_each(async move |s| {
println!("{x} {s}");
}).await;
}

View File

@ -0,0 +1,2 @@
Hello, world
Hello, world2

View File

@ -11,6 +11,15 @@ LL | x().await;
|
note: `async_call_once` takes ownership of the receiver `self`, which moves `x`
--> $SRC_DIR/core/src/ops/async_function.rs:LL:COL
help: you could `clone` the value and consume it, if the `NoCopy: Clone` trait bound could be satisfied
|
LL | x.clone()().await;
| ++++++++
help: consider annotating `NoCopy` with `#[derive(Clone)]`
|
LL + #[derive(Clone)]
LL | struct NoCopy;
|
error: aborting due to 1 previous error

View File

@ -0,0 +1,36 @@
//@ edition: 2021
#![feature(async_closure)]
struct NotClonableArg;
#[derive(Default)]
struct NotClonableReturnType;
// Verify that the only components that we care about are the upvars, not the signature.
fn we_are_okay_with_not_clonable_signature() {
let x = async |x: NotClonableArg| -> NotClonableReturnType { Default::default() };
x.clone(); // Okay
}
#[derive(Debug)]
struct NotClonableUpvar;
fn we_only_care_about_clonable_upvars() {
let x = NotClonableUpvar;
// Notably, this is clone because we capture `&x`.
let yes_clone = async || {
println!("{x:?}");
};
yes_clone.clone(); // Okay
let z = NotClonableUpvar;
// However, this is not because the closure captures `z` by move.
// (Even though the future that is lent out captures `z by ref!)
let not_clone = async move || {
println!("{z:?}");
};
not_clone.clone();
//~^ ERROR the trait bound `NotClonableUpvar: Clone` is not satisfied
}
fn main() {}

View File

@ -0,0 +1,20 @@
error[E0277]: the trait bound `NotClonableUpvar: Clone` is not satisfied in `{async closure@$DIR/not-clone-closure.rs:29:21: 29:34}`
--> $DIR/not-clone-closure.rs:32:15
|
LL | not_clone.clone();
| ^^^^^ within `{async closure@$DIR/not-clone-closure.rs:29:21: 29:34}`, the trait `Clone` is not implemented for `NotClonableUpvar`, which is required by `{async closure@$DIR/not-clone-closure.rs:29:21: 29:34}: Clone`
|
note: required because it's used within this closure
--> $DIR/not-clone-closure.rs:29:21
|
LL | let not_clone = async move || {
| ^^^^^^^^^^^^^
help: consider annotating `NotClonableUpvar` with `#[derive(Clone)]`
|
LL + #[derive(Clone)]
LL | struct NotClonableUpvar;
|
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0277`.

View File

@ -19,7 +19,7 @@ fn simple<'a>(x: &'a i32) {
let c = async move || { println!("{}", *x); };
outlives::<'a>(c()); //~ ERROR `c` does not live long enough
outlives::<'a>(call_once(c)); //~ ERROR cannot move out of `c`
outlives::<'a>(call_once(c));
}
struct S<'a>(&'a i32);

View File

@ -29,22 +29,6 @@ LL | outlives::<'a>(call_once(c));
LL | }
| - `c` dropped here while still borrowed
error[E0505]: cannot move out of `c` because it is borrowed
--> $DIR/without-precise-captures-we-are-powerless.rs:22:30
|
LL | fn simple<'a>(x: &'a i32) {
| -- lifetime `'a` defined here
...
LL | let c = async move || { println!("{}", *x); };
| - binding `c` declared here
LL | outlives::<'a>(c());
| ---
| |
| borrow of `c` occurs here
| argument requires that `c` is borrowed for `'a`
LL | outlives::<'a>(call_once(c));
| ^ move out of `c` occurs here
error[E0597]: `x` does not live long enough
--> $DIR/without-precise-captures-we-are-powerless.rs:28:13
|
@ -146,7 +130,7 @@ LL | // outlives::<'a>(call_once(c)); // FIXME(async_closures): Figure out w
LL | }
| - `c` dropped here while still borrowed
error: aborting due to 10 previous errors
error: aborting due to 9 previous errors
Some errors have detailed explanations: E0505, E0597, E0621.
For more information about an error, try `rustc --explain E0505`.

View File

@ -1,5 +1,3 @@
#![feature(const_int_from_str)]
const _OK: () = match i32::from_str_radix("-1234", 10) {
Ok(x) => assert!(x == -1234),
Err(_) => panic!(),

View File

@ -6,7 +6,7 @@ error[E0080]: evaluation of constant value failed
note: inside `core::num::<impl u64>::from_str_radix`
--> $SRC_DIR/core/src/num/mod.rs:LL:COL
note: inside `_TOO_LOW`
--> $DIR/parse_ints.rs:7:24
--> $DIR/parse_ints.rs:5:24
|
LL | const _TOO_LOW: () = { u64::from_str_radix("12345ABCD", 1); };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@ -20,7 +20,7 @@ error[E0080]: evaluation of constant value failed
note: inside `core::num::<impl u64>::from_str_radix`
--> $SRC_DIR/core/src/num/mod.rs:LL:COL
note: inside `_TOO_HIGH`
--> $DIR/parse_ints.rs:8:25
--> $DIR/parse_ints.rs:6:25
|
LL | const _TOO_HIGH: () = { u64::from_str_radix("12345ABCD", 37); };
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

View File

@ -1,6 +1,10 @@
trait T {
extern "Rust" unsafe fn foo();
//~^ ERROR expected `{`, found keyword `unsafe`
//~^ ERROR expected `fn`, found keyword `unsafe`
//~| NOTE expected `fn`
//~| HELP `unsafe` must come before `extern "Rust"`
//~| SUGGESTION unsafe extern "Rust"
//~| NOTE keyword order for functions declaration is `pub`, `default`, `const`, `async`, `unsafe`, `extern`
}
fn main() {}

View File

@ -1,13 +1,13 @@
error: expected `{`, found keyword `unsafe`
error: expected `fn`, found keyword `unsafe`
--> $DIR/issue-19398.rs:2:19
|
LL | trait T {
| - while parsing this item list starting here
LL | extern "Rust" unsafe fn foo();
| ^^^^^^ expected `{`
LL |
LL | }
| - the item list ends here
| --------------^^^^^^
| | |
| | expected `fn`
| help: `unsafe` must come before `extern "Rust"`: `unsafe extern "Rust"`
|
= note: keyword order for functions declaration is `pub`, `default`, `const`, `async`, `unsafe`, `extern`
error: aborting due to 1 previous error

View File

@ -0,0 +1,16 @@
//@ edition:2018
// There is an order to respect for keywords before a function:
// `<visibility>, const, async, unsafe, extern, "<ABI>"`
//
// This test ensures the compiler is helpful about them being misplaced.
// Visibilities are tested elsewhere.
extern "C" unsafe fn test() {}
//~^ ERROR expected `fn`, found keyword `unsafe`
//~| NOTE expected `fn`
//~| HELP `unsafe` must come before `extern "C"`
//~| SUGGESTION unsafe extern "C"
//~| NOTE keyword order for functions declaration is `pub`, `default`, `const`, `async`, `unsafe`, `extern`
fn main() {}

View File

@ -0,0 +1,13 @@
error: expected `fn`, found keyword `unsafe`
--> $DIR/wrong-unsafe-abi.rs:9:12
|
LL | extern "C" unsafe fn test() {}
| -----------^^^^^^
| | |
| | expected `fn`
| help: `unsafe` must come before `extern "C"`: `unsafe extern "C"`
|
= note: keyword order for functions declaration is `pub`, `default`, `const`, `async`, `unsafe`, `extern`
error: aborting due to 1 previous error