Auto merge of #81660 - jonas-schievink:rollup-fz2lh78, r=jonas-schievink

Rollup of 11 pull requests

Successful merges:

 - #80629 (Add lint for 2229 migrations)
 - #81022 (Add Frames Iterator for Backtrace)
 - #81481 (move some tests)
 - #81485 (Add some tests for associated-type-bounds issues)
 - #81492 (rustdoc: Note why `rustdoc::html::markdown` is public)
 - #81577 (const_evaluatable: consider sub-expressions to be evaluatable)
 - #81599 (Implement `TrustedLen` for `Fuse<I: TrustedLen>`)
 - #81608 (Improve handling of spans around macro result parse errors)
 - #81609 (Remove the remains of query categories)
 - #81630 (Fix overflowing text on mobile when sidebar is displayed)
 - #81631 (Remove unneeded `mut` variable)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2021-02-02 12:02:36 +00:00
commit a3ed564c13
42 changed files with 2644 additions and 1797 deletions

View File

@ -896,7 +896,9 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
fragment
}
Err(mut err) => {
err.set_span(span);
if err.span.is_dummy() {
err.set_span(span);
}
annotate_err_with_kind(&mut err, kind, span);
err.emit();
self.cx.trace_macros_diag();

View File

@ -2968,6 +2968,7 @@ declare_lint_pass! {
UNSUPPORTED_NAKED_FUNCTIONS,
MISSING_ABI,
SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
DISJOINT_CAPTURE_DROP_REORDER,
]
}
@ -2994,6 +2995,51 @@ declare_lint! {
"detects doc comments that aren't used by rustdoc"
}
declare_lint! {
/// The `disjoint_capture_drop_reorder` lint detects variables that aren't completely
/// captured when the feature `capture_disjoint_fields` is enabled and it affects the Drop
/// order of at least one path starting at this variable.
///
/// ### Example
///
/// ```rust,compile_fail
/// # #![deny(disjoint_capture_drop_reorder)]
/// # #![allow(unused)]
/// struct FancyInteger(i32);
///
/// impl Drop for FancyInteger {
/// fn drop(&mut self) {
/// println!("Just dropped {}", self.0);
/// }
/// }
///
/// struct Point { x: FancyInteger, y: FancyInteger }
///
/// fn main() {
/// let p = Point { x: FancyInteger(10), y: FancyInteger(20) };
///
/// let c = || {
/// let x = p.x;
/// };
///
/// c();
///
/// // ... More code ...
/// }
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// In the above example `p.y` will be dropped at the end of `f` instead of with `c` if
/// the feature `capture_disjoint_fields` is enabled.
pub DISJOINT_CAPTURE_DROP_REORDER,
Allow,
"Drop reorder because of `capture_disjoint_fields`"
}
declare_lint_pass!(UnusedDocComment => [UNUSED_DOC_COMMENTS]);
declare_lint! {

View File

@ -189,25 +189,6 @@ impl<T: Parse> Parse for List<T> {
}
}
/// A named group containing queries.
///
/// For now, the name is not used any more, but the capability remains interesting for future
/// developments of the query system.
struct Group {
#[allow(unused)]
name: Ident,
queries: List<Query>,
}
impl Parse for Group {
fn parse(input: ParseStream<'_>) -> Result<Self> {
let name: Ident = input.parse()?;
let content;
braced!(content in input);
Ok(Group { name, queries: content.parse()? })
}
}
struct QueryModifiers {
/// The description of the query.
desc: (Option<Ident>, Punctuated<Expr, Token![,]>),
@ -450,72 +431,70 @@ fn add_query_description_impl(
}
pub fn rustc_queries(input: TokenStream) -> TokenStream {
let groups = parse_macro_input!(input as List<Group>);
let queries = parse_macro_input!(input as List<Query>);
let mut query_stream = quote! {};
let mut query_description_stream = quote! {};
let mut dep_node_def_stream = quote! {};
let mut cached_queries = quote! {};
for group in groups.0 {
for mut query in group.queries.0 {
let modifiers = process_modifiers(&mut query);
let name = &query.name;
let arg = &query.arg;
let result_full = &query.result;
let result = match query.result {
ReturnType::Default => quote! { -> () },
_ => quote! { #result_full },
};
for mut query in queries.0 {
let modifiers = process_modifiers(&mut query);
let name = &query.name;
let arg = &query.arg;
let result_full = &query.result;
let result = match query.result {
ReturnType::Default => quote! { -> () },
_ => quote! { #result_full },
};
if modifiers.cache.is_some() {
cached_queries.extend(quote! {
#name,
});
}
let mut attributes = Vec::new();
// Pass on the fatal_cycle modifier
if modifiers.fatal_cycle {
attributes.push(quote! { fatal_cycle });
};
// Pass on the storage modifier
if let Some(ref ty) = modifiers.storage {
attributes.push(quote! { storage(#ty) });
};
// Pass on the cycle_delay_bug modifier
if modifiers.cycle_delay_bug {
attributes.push(quote! { cycle_delay_bug });
};
// Pass on the no_hash modifier
if modifiers.no_hash {
attributes.push(quote! { no_hash });
};
// Pass on the anon modifier
if modifiers.anon {
attributes.push(quote! { anon });
};
// Pass on the eval_always modifier
if modifiers.eval_always {
attributes.push(quote! { eval_always });
};
let attribute_stream = quote! {#(#attributes),*};
let doc_comments = query.doc_comments.iter();
// Add the query to the group
query_stream.extend(quote! {
#(#doc_comments)*
[#attribute_stream] fn #name(#arg) #result,
if modifiers.cache.is_some() {
cached_queries.extend(quote! {
#name,
});
// Create a dep node for the query
dep_node_def_stream.extend(quote! {
[#attribute_stream] #name(#arg),
});
add_query_description_impl(&query, modifiers, &mut query_description_stream);
}
let mut attributes = Vec::new();
// Pass on the fatal_cycle modifier
if modifiers.fatal_cycle {
attributes.push(quote! { fatal_cycle });
};
// Pass on the storage modifier
if let Some(ref ty) = modifiers.storage {
attributes.push(quote! { storage(#ty) });
};
// Pass on the cycle_delay_bug modifier
if modifiers.cycle_delay_bug {
attributes.push(quote! { cycle_delay_bug });
};
// Pass on the no_hash modifier
if modifiers.no_hash {
attributes.push(quote! { no_hash });
};
// Pass on the anon modifier
if modifiers.anon {
attributes.push(quote! { anon });
};
// Pass on the eval_always modifier
if modifiers.eval_always {
attributes.push(quote! { eval_always });
};
let attribute_stream = quote! {#(#attributes),*};
let doc_comments = query.doc_comments.iter();
// Add the query to the group
query_stream.extend(quote! {
#(#doc_comments)*
[#attribute_stream] fn #name(#arg) #result,
});
// Create a dep node for the query
dep_node_def_stream.extend(quote! {
[#attribute_stream] #name(#arg),
});
add_query_description_impl(&query, modifiers, &mut query_description_stream);
}
TokenStream::from(quote! {

File diff suppressed because it is too large Load Diff

View File

@ -1104,7 +1104,7 @@ impl<'a> Parser<'a> {
let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) {
// Point at the end of the macro call when reaching end of macro arguments.
(token::Eof, Some(_)) => {
let sp = self.sess.source_map().next_point(self.token.span);
let sp = self.sess.source_map().next_point(self.prev_token.span);
(sp, sp)
}
// We don't want to point at the following span after DUMMY_SP.
@ -1721,7 +1721,7 @@ impl<'a> Parser<'a> {
pub(super) fn expected_expression_found(&self) -> DiagnosticBuilder<'a> {
let (span, msg) = match (&self.token.kind, self.subparser_name) {
(&token::Eof, Some(origin)) => {
let sp = self.sess.source_map().next_point(self.token.span);
let sp = self.sess.source_map().next_point(self.prev_token.span);
(sp, format!("expected expression, found end of {}", origin))
}
_ => (

View File

@ -132,7 +132,7 @@ where
tcx: TyCtxt<'tcx>,
ct: AbstractConst<'tcx>,
) -> ControlFlow<V::BreakTy> {
const_evaluatable::walk_abstract_const(tcx, ct, |node| match node {
const_evaluatable::walk_abstract_const(tcx, ct, |node| match node.root() {
ACNode::Leaf(leaf) => {
let leaf = leaf.subst(tcx, ct.substs);
self.visit_const(leaf)

View File

@ -2083,18 +2083,11 @@ impl<'a, 'tcx> LifetimeContext<'a, 'tcx> {
output: Option<&'tcx hir::Ty<'tcx>>,
) {
debug!("visit_fn_like_elision: enter");
let mut arg_elide = Elide::FreshLateAnon(Cell::new(0));
let arg_scope = Scope::Elision { elide: arg_elide.clone(), s: self.scope };
let arg_scope = Scope::Elision { elide: Elide::FreshLateAnon(Cell::new(0)), s: self.scope };
self.with(arg_scope, |_, this| {
for input in inputs {
this.visit_ty(input);
}
match *this.scope {
Scope::Elision { ref elide, .. } => {
arg_elide = elide.clone();
}
_ => bug!(),
}
});
let output = match output {

View File

@ -799,6 +799,9 @@ impl SourceMap {
/// Returns a new span representing the next character after the end-point of this span.
pub fn next_point(&self, sp: Span) -> Span {
if sp.is_dummy() {
return sp;
}
let start_of_next_point = sp.hi().0;
let width = self.find_width_of_character_at_span(sp.shrink_to_hi(), true);

View File

@ -50,11 +50,24 @@ pub fn is_const_evaluatable<'cx, 'tcx>(
if b_def == def && b_substs == substs {
debug!("is_const_evaluatable: caller_bound ~~> ok");
return Ok(());
} else if AbstractConst::new(tcx, b_def, b_substs)?
.map_or(false, |b_ct| try_unify(tcx, ct, b_ct))
{
debug!("is_const_evaluatable: abstract_const ~~> ok");
return Ok(());
}
if let Some(b_ct) = AbstractConst::new(tcx, b_def, b_substs)? {
// Try to unify with each subtree in the AbstractConst to allow for
// `N + 1` being const evaluatable even if theres only a `ConstEvaluatable`
// predicate for `(N + 1) * 2`
let result =
walk_abstract_const(tcx, b_ct, |b_ct| {
match try_unify(tcx, ct, b_ct) {
true => ControlFlow::BREAK,
false => ControlFlow::CONTINUE,
}
});
if let ControlFlow::Break(()) = result {
debug!("is_const_evaluatable: abstract_const ~~> ok");
return Ok(());
}
}
}
_ => {} // don't care
@ -78,7 +91,7 @@ pub fn is_const_evaluatable<'cx, 'tcx>(
Concrete,
}
let mut failure_kind = FailureKind::Concrete;
walk_abstract_const::<!, _>(tcx, ct, |node| match node {
walk_abstract_const::<!, _>(tcx, ct, |node| match node.root() {
Node::Leaf(leaf) => {
let leaf = leaf.subst(tcx, ct.substs);
if leaf.has_infer_types_or_consts() {
@ -580,15 +593,15 @@ pub fn walk_abstract_const<'tcx, R, F>(
mut f: F,
) -> ControlFlow<R>
where
F: FnMut(Node<'tcx>) -> ControlFlow<R>,
F: FnMut(AbstractConst<'tcx>) -> ControlFlow<R>,
{
fn recurse<'tcx, R>(
tcx: TyCtxt<'tcx>,
ct: AbstractConst<'tcx>,
f: &mut dyn FnMut(Node<'tcx>) -> ControlFlow<R>,
f: &mut dyn FnMut(AbstractConst<'tcx>) -> ControlFlow<R>,
) -> ControlFlow<R> {
f(ct)?;
let root = ct.root();
f(root)?;
match root {
Node::Leaf(_) => ControlFlow::CONTINUE,
Node::Binop(_, l, r) => {

View File

@ -828,7 +828,7 @@ fn contains_illegal_self_type_reference<'tcx, T: TypeFoldable<'tcx>>(
// constants which are not considered const evaluatable.
use rustc_middle::mir::abstract_const::Node;
if let Ok(Some(ct)) = AbstractConst::from_const(self.tcx, ct) {
const_evaluatable::walk_abstract_const(self.tcx, ct, |node| match node {
const_evaluatable::walk_abstract_const(self.tcx, ct, |node| match node.root() {
Node::Leaf(leaf) => {
let leaf = leaf.subst(self.tcx, ct.substs);
self.visit_const(leaf)
@ -849,7 +849,7 @@ fn contains_illegal_self_type_reference<'tcx, T: TypeFoldable<'tcx>>(
// take a `ty::Const` instead.
use rustc_middle::mir::abstract_const::Node;
if let Ok(Some(ct)) = AbstractConst::new(self.tcx, def, substs) {
const_evaluatable::walk_abstract_const(self.tcx, ct, |node| match node {
const_evaluatable::walk_abstract_const(self.tcx, ct, |node| match node.root() {
Node::Leaf(leaf) => {
let leaf = leaf.subst(self.tcx, ct.substs);
self.visit_const(leaf)

View File

@ -30,6 +30,7 @@
//! then mean that all later passes would have to check for these figments
//! and report an error, and it just seems like more mess in the end.)
use super::writeback::Resolver;
use super::FnCtxt;
use crate::expr_use_visitor as euv;
@ -40,7 +41,9 @@ use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_infer::infer::UpvarRegion;
use rustc_middle::hir::place::{Place, PlaceBase, PlaceWithHirId, ProjectionKind};
use rustc_middle::ty::{self, Ty, TyCtxt, UpvarSubsts};
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::{self, Ty, TyCtxt, TypeckResults, UpvarSubsts};
use rustc_session::lint;
use rustc_span::sym;
use rustc_span::{MultiSpan, Span, Symbol};
@ -55,6 +58,11 @@ enum PlaceAncestryRelation {
Divergent,
}
/// Intermediate format to store a captured `Place` and associated `ty::CaptureInfo`
/// during capture analysis. Information in this map feeds into the minimum capture
/// analysis pass.
type InferredCaptureInformation<'tcx> = FxIndexMap<Place<'tcx>, ty::CaptureInfo<'tcx>>;
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
pub fn closure_analyze(&self, body: &'tcx hir::Body<'tcx>) {
InferBorrowKindVisitor { fcx: self }.visit_body(body);
@ -92,7 +100,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&self,
closure_hir_id: hir::HirId,
span: Span,
body: &hir::Body<'_>,
body: &'tcx hir::Body<'tcx>,
capture_clause: hir::CaptureBy,
) {
debug!("analyze_closure(id={:?}, body.id={:?})", closure_hir_id, body.id());
@ -124,28 +132,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let local_def_id = closure_def_id.expect_local();
let mut capture_information: FxIndexMap<Place<'tcx>, ty::CaptureInfo<'tcx>> =
Default::default();
if !self.tcx.features().capture_disjoint_fields {
if let Some(upvars) = self.tcx.upvars_mentioned(closure_def_id) {
for (&var_hir_id, _) in upvars.iter() {
let place = self.place_for_root_variable(local_def_id, var_hir_id);
debug!("seed place {:?}", place);
let upvar_id = ty::UpvarId::new(var_hir_id, local_def_id);
let capture_kind = self.init_capture_kind(capture_clause, upvar_id, span);
let info = ty::CaptureInfo {
capture_kind_expr_id: None,
path_expr_id: None,
capture_kind,
};
capture_information.insert(place, info);
}
}
}
let body_owner_def_id = self.tcx.hir().body_owner_def_id(body.id());
assert_eq!(body_owner_def_id.to_def_id(), closure_def_id);
let mut delegate = InferBorrowKind {
@ -155,7 +141,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
capture_clause,
current_closure_kind: ty::ClosureKind::LATTICE_BOTTOM,
current_origin: None,
capture_information,
capture_information: Default::default(),
};
euv::ExprUseVisitor::new(
&mut delegate,
@ -172,6 +158,40 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
);
self.log_capture_analysis_first_pass(closure_def_id, &delegate.capture_information, span);
self.compute_min_captures(closure_def_id, delegate.capture_information);
let closure_hir_id = self.tcx.hir().local_def_id_to_hir_id(local_def_id);
if should_do_migration_analysis(self.tcx, closure_hir_id) {
self.perform_2229_migration_anaysis(closure_def_id, capture_clause, span, body);
}
// We now fake capture information for all variables that are mentioned within the closure
// We do this after handling migrations so that min_captures computes before
if !self.tcx.features().capture_disjoint_fields {
let mut capture_information: InferredCaptureInformation<'tcx> = Default::default();
if let Some(upvars) = self.tcx.upvars_mentioned(closure_def_id) {
for var_hir_id in upvars.keys() {
let place = self.place_for_root_variable(local_def_id, *var_hir_id);
debug!("seed place {:?}", place);
let upvar_id = ty::UpvarId::new(*var_hir_id, local_def_id);
let capture_kind = self.init_capture_kind(capture_clause, upvar_id, span);
let fake_info = ty::CaptureInfo {
capture_kind_expr_id: None,
path_expr_id: None,
capture_kind,
};
capture_information.insert(place, fake_info);
}
}
// This will update the min captures based on this new fake information.
self.compute_min_captures(closure_def_id, capture_information);
}
if let Some(closure_substs) = infer_kind {
// Unify the (as yet unbound) type variable in the closure
// substs with the kind we inferred.
@ -197,7 +217,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
}
self.compute_min_captures(closure_def_id, delegate);
self.log_closure_min_capture_info(closure_def_id, span);
self.min_captures_to_closure_captures_bridge(closure_def_id);
@ -344,6 +363,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Places (and corresponding capture kind) that we need to keep track of to support all
/// the required captured paths.
///
///
/// Note: If this function is called multiple times for the same closure, it will update
/// the existing min_capture map that is stored in TypeckResults.
///
/// Eg:
/// ```rust,no_run
/// struct Point { x: i32, y: i32 }
@ -408,11 +431,18 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fn compute_min_captures(
&self,
closure_def_id: DefId,
inferred_info: InferBorrowKind<'_, 'tcx>,
capture_information: InferredCaptureInformation<'tcx>,
) {
let mut root_var_min_capture_list: ty::RootVariableMinCaptureList<'_> = Default::default();
if capture_information.is_empty() {
return;
}
for (place, capture_info) in inferred_info.capture_information.into_iter() {
let mut typeck_results = self.typeck_results.borrow_mut();
let mut root_var_min_capture_list =
typeck_results.closure_min_captures.remove(&closure_def_id).unwrap_or_default();
for (place, capture_info) in capture_information.into_iter() {
let var_hir_id = match place.base {
PlaceBase::Upvar(upvar_id) => upvar_id.var_path.hir_id,
base => bug!("Expected upvar, found={:?}", base),
@ -422,7 +452,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let min_cap_list = match root_var_min_capture_list.get_mut(&var_hir_id) {
None => {
let mutability = self.determine_capture_mutability(&place);
let mutability = self.determine_capture_mutability(&typeck_results, &place);
let min_cap_list =
vec![ty::CapturedPlace { place, info: capture_info, mutability }];
root_var_min_capture_list.insert(var_hir_id, min_cap_list);
@ -487,7 +517,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Only need to insert when we don't have an ancestor in the existing min capture list
if !ancestor_found {
let mutability = self.determine_capture_mutability(&place);
let mutability = self.determine_capture_mutability(&typeck_results, &place);
let captured_place =
ty::CapturedPlace { place, info: updated_capture_info, mutability };
min_cap_list.push(captured_place);
@ -495,15 +525,123 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}
debug!("For closure={:?}, min_captures={:#?}", closure_def_id, root_var_min_capture_list);
typeck_results.closure_min_captures.insert(closure_def_id, root_var_min_capture_list);
}
if !root_var_min_capture_list.is_empty() {
self.typeck_results
.borrow_mut()
.closure_min_captures
.insert(closure_def_id, root_var_min_capture_list);
/// Perform the migration analysis for RFC 2229, and emit lint
/// `disjoint_capture_drop_reorder` if needed.
fn perform_2229_migration_anaysis(
&self,
closure_def_id: DefId,
capture_clause: hir::CaptureBy,
span: Span,
body: &'tcx hir::Body<'tcx>,
) {
let need_migrations = self.compute_2229_migrations_first_pass(
closure_def_id,
span,
capture_clause,
body,
self.typeck_results.borrow().closure_min_captures.get(&closure_def_id),
);
if !need_migrations.is_empty() {
let need_migrations_hir_id = need_migrations.iter().map(|m| m.0).collect::<Vec<_>>();
let migrations_text = migration_suggestion_for_2229(self.tcx, &need_migrations_hir_id);
let local_def_id = closure_def_id.expect_local();
let closure_hir_id = self.tcx.hir().local_def_id_to_hir_id(local_def_id);
self.tcx.struct_span_lint_hir(
lint::builtin::DISJOINT_CAPTURE_DROP_REORDER,
closure_hir_id,
span,
|lint| {
let mut diagnostics_builder = lint.build(
"drop order affected for closure because of `capture_disjoint_fields`",
);
diagnostics_builder.note(&migrations_text);
diagnostics_builder.emit();
},
);
}
}
/// Figures out the list of root variables (and their types) that aren't completely
/// captured by the closure when `capture_disjoint_fields` is enabled and drop order of
/// some path starting at that root variable **might** be affected.
///
/// The output list would include a root variable if:
/// - It would have been moved into the closure when `capture_disjoint_fields` wasn't
/// enabled, **and**
/// - It wasn't completely captured by the closure, **and**
/// - The type of the root variable needs Drop.
fn compute_2229_migrations_first_pass(
&self,
closure_def_id: DefId,
closure_span: Span,
closure_clause: hir::CaptureBy,
body: &'tcx hir::Body<'tcx>,
min_captures: Option<&ty::RootVariableMinCaptureList<'tcx>>,
) -> Vec<(hir::HirId, Ty<'tcx>)> {
fn resolve_ty<T: TypeFoldable<'tcx>>(
fcx: &FnCtxt<'_, 'tcx>,
span: Span,
body: &'tcx hir::Body<'tcx>,
ty: T,
) -> T {
let mut resolver = Resolver::new(fcx, &span, body);
ty.fold_with(&mut resolver)
}
let upvars = if let Some(upvars) = self.tcx.upvars_mentioned(closure_def_id) {
upvars
} else {
return vec![];
};
let mut need_migrations = Vec::new();
for (&var_hir_id, _) in upvars.iter() {
let ty = resolve_ty(self, closure_span, body, self.node_ty(var_hir_id));
if !ty.needs_drop(self.tcx, self.tcx.param_env(closure_def_id.expect_local())) {
continue;
}
let root_var_min_capture_list = if let Some(root_var_min_capture_list) =
min_captures.and_then(|m| m.get(&var_hir_id))
{
root_var_min_capture_list
} else {
// The upvar is mentioned within the closure but no path starting from it is
// used.
match closure_clause {
// Only migrate if closure is a move closure
hir::CaptureBy::Value => need_migrations.push((var_hir_id, ty)),
hir::CaptureBy::Ref => {}
}
continue;
};
let is_moved = root_var_min_capture_list
.iter()
.any(|capture| matches!(capture.info.capture_kind, ty::UpvarCapture::ByValue(_)));
let is_not_completely_captured =
root_var_min_capture_list.iter().any(|capture| capture.place.projections.len() > 0);
if is_moved && is_not_completely_captured {
need_migrations.push((var_hir_id, ty));
}
}
need_migrations
}
fn init_capture_kind(
&self,
capture_clause: hir::CaptureBy,
@ -613,18 +751,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// A captured place is mutable if
/// 1. Projections don't include a Deref of an immut-borrow, **and**
/// 2. PlaceBase is mut or projections include a Deref of a mut-borrow.
fn determine_capture_mutability(&self, place: &Place<'tcx>) -> hir::Mutability {
fn determine_capture_mutability(
&self,
typeck_results: &'a TypeckResults<'tcx>,
place: &Place<'tcx>,
) -> hir::Mutability {
let var_hir_id = match place.base {
PlaceBase::Upvar(upvar_id) => upvar_id.var_path.hir_id,
_ => unreachable!(),
};
let bm = *self
.typeck_results
.borrow()
.pat_binding_modes()
.get(var_hir_id)
.expect("missing binding mode");
let bm = *typeck_results.pat_binding_modes().get(var_hir_id).expect("missing binding mode");
let mut is_mutbl = match bm {
ty::BindByValue(mutability) => mutability,
@ -698,9 +835,11 @@ struct InferBorrowKind<'a, 'tcx> {
///
/// For closure `fix_s`, (at a high level) the map contains
///
/// ```
/// Place { V1, [ProjectionKind::Field(Index=0, Variant=0)] } : CaptureKind { E1, ImmutableBorrow }
/// Place { V1, [ProjectionKind::Field(Index=1, Variant=0)] } : CaptureKind { E2, MutableBorrow }
capture_information: FxIndexMap<Place<'tcx>, ty::CaptureInfo<'tcx>>,
/// ```
capture_information: InferredCaptureInformation<'tcx>,
}
impl<'a, 'tcx> InferBorrowKind<'a, 'tcx> {
@ -1119,6 +1258,21 @@ fn var_name(tcx: TyCtxt<'_>, var_hir_id: hir::HirId) -> Symbol {
tcx.hir().name(var_hir_id)
}
fn should_do_migration_analysis(tcx: TyCtxt<'_>, closure_id: hir::HirId) -> bool {
let (level, _) =
tcx.lint_level_at_node(lint::builtin::DISJOINT_CAPTURE_DROP_REORDER, closure_id);
!matches!(level, lint::Level::Allow)
}
fn migration_suggestion_for_2229(tcx: TyCtxt<'_>, need_migrations: &Vec<hir::HirId>) -> String {
let need_migrations_strings =
need_migrations.iter().map(|v| format!("{}", var_name(tcx, *v))).collect::<Vec<_>>();
let migrations_list_concat = need_migrations_strings.join(", ");
format!("drop(&({}));", migrations_list_concat)
}
/// Helper function to determine if we need to escalate CaptureKind from
/// CaptureInfo A to B and returns the escalated CaptureInfo.
/// (Note: CaptureInfo contains CaptureKind and an expression that led to capture it in that way)

View File

@ -650,7 +650,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
}
}
trait Locatable {
crate trait Locatable {
fn to_span(&self, tcx: TyCtxt<'_>) -> Span;
}
@ -668,7 +668,7 @@ impl Locatable for hir::HirId {
/// The Resolver. This is the type folding engine that detects
/// unresolved types and so forth.
struct Resolver<'cx, 'tcx> {
crate struct Resolver<'cx, 'tcx> {
tcx: TyCtxt<'tcx>,
infcx: &'cx InferCtxt<'cx, 'tcx>,
span: &'cx dyn Locatable,
@ -679,7 +679,7 @@ struct Resolver<'cx, 'tcx> {
}
impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
fn new(
crate fn new(
fcx: &'cx FnCtxt<'cx, 'tcx>,
span: &'cx dyn Locatable,
body: &'tcx hir::Body<'tcx>,

View File

@ -1,6 +1,8 @@
use crate::intrinsics;
use crate::iter::adapters::{zip::try_get_unchecked, InPlaceIterable, SourceIter};
use crate::iter::{DoubleEndedIterator, ExactSizeIterator, FusedIterator, TrustedRandomAccess};
use crate::iter::{
DoubleEndedIterator, ExactSizeIterator, FusedIterator, TrustedLen, TrustedRandomAccess,
};
use crate::ops::Try;
/// An iterator that yields `None` forever after the underlying iterator
@ -182,8 +184,19 @@ where
}
}
#[unstable(feature = "trusted_len", issue = "37572")]
// SAFETY: `TrustedLen` requires that an accurate length is reported via `size_hint()`. As `Fuse`
// is just forwarding this to the wrapped iterator `I` this property is preserved and it is safe to
// implement `TrustedLen` here.
unsafe impl<I> TrustedLen for Fuse<I> where I: TrustedLen {}
#[doc(hidden)]
#[unstable(feature = "trusted_random_access", issue = "none")]
// SAFETY: `TrustedRandomAccess` requires that `size_hint()` must be exact and cheap to call, and
// `Iterator::__iterator_get_unchecked()` must be implemented accordingly.
//
// This is safe to implement as `Fuse` is just forwarding these to the wrapped iterator `I`, which
// preserves these properties.
unsafe impl<I> TrustedRandomAccess for Fuse<I>
where
I: TrustedRandomAccess,

View File

@ -147,11 +147,14 @@ fn _assert_send_sync() {
_assert::<Backtrace>();
}
struct BacktraceFrame {
/// A single frame of a backtrace.
#[unstable(feature = "backtrace_frames", issue = "79676")]
pub struct BacktraceFrame {
frame: RawFrame,
symbols: Vec<BacktraceSymbol>,
}
#[derive(Debug)]
enum RawFrame {
Actual(backtrace_rs::Frame),
#[cfg(test)]
@ -196,6 +199,14 @@ impl fmt::Debug for Backtrace {
}
}
impl fmt::Debug for BacktraceFrame {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut dbg = fmt.debug_list();
dbg.entries(&self.symbols);
dbg.finish()
}
}
impl fmt::Debug for BacktraceSymbol {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
// FIXME: improve formatting: https://github.com/rust-lang/rust/issues/65280
@ -353,6 +364,14 @@ impl Backtrace {
}
}
impl<'a> Backtrace {
/// Returns an iterator over the backtrace frames.
#[unstable(feature = "backtrace_frames", issue = "79676")]
pub fn frames(&'a self) -> &'a [BacktraceFrame] {
if let Inner::Captured(c) = &self.inner { &c.force().frames } else { &[] }
}
}
impl fmt::Display for Backtrace {
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
let capture = match &self.inner {

View File

@ -1,48 +1,52 @@
use super::*;
fn generate_fake_frames() -> Vec<BacktraceFrame> {
vec![
BacktraceFrame {
frame: RawFrame::Fake,
symbols: vec![BacktraceSymbol {
name: Some(b"std::backtrace::Backtrace::create".to_vec()),
filename: Some(BytesOrWide::Bytes(b"rust/backtrace.rs".to_vec())),
lineno: Some(100),
colno: None,
}],
},
BacktraceFrame {
frame: RawFrame::Fake,
symbols: vec![BacktraceSymbol {
name: Some(b"__rust_maybe_catch_panic".to_vec()),
filename: None,
lineno: None,
colno: None,
}],
},
BacktraceFrame {
frame: RawFrame::Fake,
symbols: vec![
BacktraceSymbol {
name: Some(b"std::rt::lang_start_internal".to_vec()),
filename: Some(BytesOrWide::Bytes(b"rust/rt.rs".to_vec())),
lineno: Some(300),
colno: Some(5),
},
BacktraceSymbol {
name: Some(b"std::rt::lang_start".to_vec()),
filename: Some(BytesOrWide::Bytes(b"rust/rt.rs".to_vec())),
lineno: Some(400),
colno: None,
},
],
},
]
}
#[test]
fn test_debug() {
let backtrace = Backtrace {
inner: Inner::Captured(LazilyResolvedCapture::new(Capture {
actual_start: 1,
resolved: true,
frames: vec![
BacktraceFrame {
frame: RawFrame::Fake,
symbols: vec![BacktraceSymbol {
name: Some(b"std::backtrace::Backtrace::create".to_vec()),
filename: Some(BytesOrWide::Bytes(b"rust/backtrace.rs".to_vec())),
lineno: Some(100),
colno: None,
}],
},
BacktraceFrame {
frame: RawFrame::Fake,
symbols: vec![BacktraceSymbol {
name: Some(b"__rust_maybe_catch_panic".to_vec()),
filename: None,
lineno: None,
colno: None,
}],
},
BacktraceFrame {
frame: RawFrame::Fake,
symbols: vec![
BacktraceSymbol {
name: Some(b"std::rt::lang_start_internal".to_vec()),
filename: Some(BytesOrWide::Bytes(b"rust/rt.rs".to_vec())),
lineno: Some(300),
colno: Some(5),
},
BacktraceSymbol {
name: Some(b"std::rt::lang_start".to_vec()),
filename: Some(BytesOrWide::Bytes(b"rust/rt.rs".to_vec())),
lineno: Some(400),
colno: None,
},
],
},
],
frames: generate_fake_frames(),
})),
};
@ -58,3 +62,34 @@ fn test_debug() {
// Format the backtrace a second time, just to make sure lazily resolved state is stable
assert_eq!(format!("{:#?}", backtrace), expected);
}
#[test]
fn test_frames() {
let backtrace = Backtrace {
inner: Inner::Captured(LazilyResolvedCapture::new(Capture {
actual_start: 1,
resolved: true,
frames: generate_fake_frames(),
})),
};
let frames = backtrace.frames();
#[rustfmt::skip]
let expected = vec![
"[
{ fn: \"std::backtrace::Backtrace::create\", file: \"rust/backtrace.rs\", line: 100 },
]",
"[
{ fn: \"__rust_maybe_catch_panic\" },
]",
"[
{ fn: \"std::rt::lang_start_internal\", file: \"rust/rt.rs\", line: 300 },
{ fn: \"std::rt::lang_start\", file: \"rust/rt.rs\", line: 400 },
]"
];
let mut iter = frames.iter().zip(expected.iter());
assert!(iter.all(|(f, e)| format!("{:#?}", f) == *e));
}

View File

@ -2,6 +2,7 @@ crate mod escape;
crate mod format;
crate mod highlight;
crate mod layout;
// used by the error-index generator, so it needs to be public
pub mod markdown;
crate mod render;
crate mod sources;

View File

@ -1490,6 +1490,14 @@ h4 > .notable-traits {
background-color: rgba(0,0,0,0);
height: 100%;
}
/*
This allows to prevent the version text to overflow the sidebar title on mobile mode when the
sidebar is displayed (after clicking on the "hamburger" button).
*/
.sidebar.mobile > div.version {
overflow: hidden;
max-height: 33px;
}
.sidebar {
width: calc(100% + 30px);
}

View File

@ -83,7 +83,8 @@ mod doctree;
mod error;
mod doctest;
mod fold;
crate mod formats;
mod formats;
// used by the error-index generator, so it needs to be public
pub mod html;
mod json;
mod markdown;

View File

@ -0,0 +1,25 @@
// check-pass
use std::borrow::Borrow;
trait TNode: Sized {
type ConcreteElement: TElement<ConcreteNode = Self>;
}
trait TElement: Sized {
type ConcreteNode: TNode<ConcreteElement = Self>;
}
trait DomTraversal<N: TNode> {
type BorrowElement: Borrow<N::ConcreteElement>;
}
#[allow(dead_code)]
fn recalc_style_at<E, D>()
where
E: TElement,
D: DomTraversal<E::ConcreteNode>,
{
}
fn main() {}

View File

@ -0,0 +1,14 @@
// check-pass
pub trait Test {
type Item;
type Bundle: From<Self::Item>;
}
fn fails<T>()
where
T: Test<Item = String>,
{
}
fn main() {}

View File

@ -0,0 +1,10 @@
// check-pass
trait Foo { type FooT: Foo; }
impl Foo for () { type FooT = (); }
trait Bar<T: Foo> { type BarT: Bar<T::FooT>; }
impl Bar<()> for () { type BarT = (); }
#[allow(dead_code)]
fn test<C: Bar<()>>() { }
fn main() { }

View File

@ -0,0 +1,24 @@
// check-pass
#![feature(associated_type_bounds)]
#![feature(type_alias_impl_trait)]
fn main() {}
trait Bar { type Assoc; }
trait Thing {
type Out;
fn func() -> Self::Out;
}
struct AssocIsCopy;
impl Bar for AssocIsCopy { type Assoc = u8; }
impl Thing for AssocIsCopy {
type Out = impl Bar<Assoc: Copy>;
fn func() -> Self::Out {
AssocIsCopy
}
}

View File

@ -0,0 +1,5 @@
struct A;
fn main() {
println!("{:?}", 1.0 as *const A); //~ERROR casting `f64` as `*const A` is invalid
}

View File

@ -1,7 +1,7 @@
error[E0606]: casting `f64` as `*const A` is invalid
--> $DIR/unsupported-cast.rs:6:20
--> $DIR/unsupported-cast.rs:4:20
|
LL | println!("{:?}", 1.0 as *const A); // Can't cast float to foreign.
LL | println!("{:?}", 1.0 as *const A);
| ^^^^^^^^^^^^^^^
error: aborting due to previous error

View File

@ -0,0 +1,130 @@
#![deny(disjoint_capture_drop_reorder)]
//~^ NOTE: the lint level is defined here
// Test cases for types that implement a insignificant drop (stlib defined)
// `t` needs Drop because one of its elements needs drop,
// therefore precise capture might affect drop ordering
fn test1_all_need_migration() {
let t = (String::new(), String::new());
let t1 = (String::new(), String::new());
let t2 = (String::new(), String::new());
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t, t1, t2));
let _t = t.0;
let _t1 = t1.0;
let _t2 = t2.0;
};
c();
}
// String implements drop and therefore should be migrated.
// But in this test cases, `t2` is completely captured and when it is dropped won't be affected
fn test2_only_precise_paths_need_migration() {
let t = (String::new(), String::new());
let t1 = (String::new(), String::new());
let t2 = (String::new(), String::new());
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t, t1));
let _t = t.0;
let _t1 = t1.0;
let _t2 = t2;
};
c();
}
// If a variable would've not been captured by value then it would've not been
// dropped with the closure and therefore doesn't need migration.
fn test3_only_by_value_need_migration() {
let t = (String::new(), String::new());
let t1 = (String::new(), String::new());
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t));
let _t = t.0;
println!("{}", t1.1);
};
c();
}
// Copy types get copied into the closure instead of move. Therefore we don't need to
// migrate then as their drop order isn't tied to the closure.
fn test4_only_non_copy_types_need_migration() {
let t = (String::new(), String::new());
// `t1` is Copy because all of its elements are Copy
let t1 = (0i32, 0i32);
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t));
let _t = t.0;
let _t1 = t1.0;
};
c();
}
fn test5_only_drop_types_need_migration() {
struct S(i32, i32);
let t = (String::new(), String::new());
// `s` doesn't implement Drop or any elements within it, and doesn't need migration
let s = S(0i32, 0i32);
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t));
let _t = t.0;
let _s = s.0;
};
c();
}
// Since we are using a move closure here, both `t` and `t1` get moved
// even though they are being used by ref inside the closure.
fn test6_move_closures_non_copy_types_might_need_migration() {
let t = (String::new(), String::new());
let t1 = (String::new(), String::new());
let c = move || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t1, t));
println!("{} {}", t1.1, t.1);
};
c();
}
// Test migration analysis in case of Drop + Non Drop aggregates.
// Note we need migration here only because the non-copy (because Drop type) is captured,
// otherwise we won't need to, since we can get away with just by ref capture in that case.
fn test7_drop_non_drop_aggregate_need_migration() {
let t = (String::new(), String::new(), 0i32);
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t));
let _t = t.0;
};
c();
}
fn main() {
test1_all_need_migration();
test2_only_precise_paths_need_migration();
test3_only_by_value_need_migration();
test4_only_non_copy_types_need_migration();
test5_only_drop_types_need_migration();
test6_move_closures_non_copy_types_might_need_migration();
test7_drop_non_drop_aggregate_need_migration();
}

View File

@ -0,0 +1,105 @@
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/insignificant_drop.rs:13:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | let _t1 = t1.0;
LL | | let _t2 = t2.0;
LL | | };
| |_____^
|
note: the lint level is defined here
--> $DIR/insignificant_drop.rs:1:9
|
LL | #![deny(disjoint_capture_drop_reorder)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: drop(&(t, t1, t2));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/insignificant_drop.rs:31:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | let _t1 = t1.0;
LL | | let _t2 = t2;
LL | | };
| |_____^
|
= note: drop(&(t, t1));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/insignificant_drop.rs:47:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | println!("{}", t1.1);
LL | | };
| |_____^
|
= note: drop(&(t));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/insignificant_drop.rs:65:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | let _t1 = t1.0;
LL | | };
| |_____^
|
= note: drop(&(t));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/insignificant_drop.rs:83:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | let _s = s.0;
LL | | };
| |_____^
|
= note: drop(&(t));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/insignificant_drop.rs:98:13
|
LL | let c = move || {
| _____________^
LL | |
LL | |
LL | | println!("{} {}", t1.1, t.1);
LL | | };
| |_____^
|
= note: drop(&(t1, t));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/insignificant_drop.rs:113:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | };
| |_____^
|
= note: drop(&(t));
error: aborting due to 7 previous errors

View File

@ -0,0 +1,84 @@
// run-pass
// Set of test cases that don't need migrations
#![deny(disjoint_capture_drop_reorder)]
// Copy types as copied by the closure instead of being moved into the closure
// Therefore their drop order isn't tied to the closure and won't be requiring any
// migrations.
fn test1_only_copy_types() {
let t = (0i32, 0i32);
let c = || {
let _t = t.0;
};
c();
}
// Same as test1 but using a move closure
fn test2_only_copy_types_move_closure() {
let t = (0i32, 0i32);
let c = move || {
println!("{}", t.0);
};
c();
}
// Don't need to migrate if captured by ref
fn test3_only_copy_types_move_closure() {
let t = (String::new(), String::new());
let c = || {
println!("{}", t.0);
};
c();
}
// Test migration analysis in case of Insignificant Drop + Non Drop aggregates.
// Note in this test the closure captures a non Drop type and therefore the variable
// is only captured by ref.
fn test4_insignificant_drop_non_drop_aggregate() {
let t = (String::new(), 0i32);
let c = || {
let _t = t.1;
};
c();
}
struct Foo(i32);
impl Drop for Foo {
fn drop(&mut self) {
println!("{:?} dropped", self.0);
}
}
// Test migration analysis in case of Significant Drop + Non Drop aggregates.
// Note in this test the closure captures a non Drop type and therefore the variable
// is only captured by ref.
fn test5_significant_drop_non_drop_aggregate() {
let t = (Foo(0), 0i32);
let c = || {
let _t = t.1;
};
c();
}
fn main() {
test1_only_copy_types();
test2_only_copy_types_move_closure();
test3_only_copy_types_move_closure();
test4_insignificant_drop_non_drop_aggregate();
test5_significant_drop_non_drop_aggregate();
}

View File

@ -0,0 +1,137 @@
#![deny(disjoint_capture_drop_reorder)]
//~^ NOTE: the lint level is defined here
// Test cases for types that implement a significant drop (user defined)
#[derive(Debug)]
struct Foo(i32);
impl Drop for Foo {
fn drop(&mut self) {
println!("{:?} dropped", self.0);
}
}
#[derive(Debug)]
struct ConstainsDropField(Foo, Foo);
// `t` needs Drop because one of its elements needs drop,
// therefore precise capture might affect drop ordering
fn test1_all_need_migration() {
let t = (Foo(0), Foo(0));
let t1 = (Foo(0), Foo(0));
let t2 = (Foo(0), Foo(0));
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t, t1, t2));
let _t = t.0;
let _t1 = t1.0;
let _t2 = t2.0;
};
c();
}
// String implements drop and therefore should be migrated.
// But in this test cases, `t2` is completely captured and when it is dropped won't be affected
fn test2_only_precise_paths_need_migration() {
let t = (Foo(0), Foo(0));
let t1 = (Foo(0), Foo(0));
let t2 = (Foo(0), Foo(0));
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t, t1));
let _t = t.0;
let _t1 = t1.0;
let _t2 = t2;
};
c();
}
// If a variable would've not been captured by value then it would've not been
// dropped with the closure and therefore doesn't need migration.
fn test3_only_by_value_need_migration() {
let t = (Foo(0), Foo(0));
let t1 = (Foo(0), Foo(0));
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t));
let _t = t.0;
println!("{:?}", t1.1);
};
c();
}
// The root variable might not implement drop themselves but some path starting
// at the root variable might implement Drop.
//
// If this path isn't captured we need to migrate for the root variable.
fn test4_type_contains_drop_need_migration() {
let t = ConstainsDropField(Foo(0), Foo(0));
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t));
let _t = t.0;
};
c();
}
// Test migration analysis in case of Drop + Non Drop aggregates.
// Note we need migration here only because the non-copy (because Drop type) is captured,
// otherwise we won't need to, since we can get away with just by ref capture in that case.
fn test5_drop_non_drop_aggregate_need_migration() {
let t = (Foo(0), Foo(0), 0i32);
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t));
let _t = t.0;
};
c();
}
// Test migration analysis in case of Significant and Insignificant Drop aggregates.
fn test6_significant_insignificant_drop_aggregate_need_migration() {
struct S(i32, i32);
let t = (Foo(0), String::new());
let c = || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t));
let _t = t.1;
};
c();
}
// Since we are using a move closure here, both `t` and `t1` get moved
// even though they are being used by ref inside the closure.
fn test7_move_closures_non_copy_types_might_need_migration() {
let t = (Foo(0), Foo(0));
let t1 = (Foo(0), Foo(0), Foo(0));
let c = move || {
//~^ERROR: drop order affected for closure because of `capture_disjoint_fields`
//~| NOTE: drop(&(t1, t));
println!("{:?} {:?}", t1.1, t.1);
};
c();
}
fn main() {
test1_all_need_migration();
test2_only_precise_paths_need_migration();
test3_only_by_value_need_migration();
test4_type_contains_drop_need_migration();
test5_drop_non_drop_aggregate_need_migration();
test6_significant_insignificant_drop_aggregate_need_migration();
test7_move_closures_non_copy_types_might_need_migration();
}

View File

@ -0,0 +1,103 @@
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/significant_drop.rs:24:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | let _t1 = t1.0;
LL | | let _t2 = t2.0;
LL | | };
| |_____^
|
note: the lint level is defined here
--> $DIR/significant_drop.rs:1:9
|
LL | #![deny(disjoint_capture_drop_reorder)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: drop(&(t, t1, t2));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/significant_drop.rs:42:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | let _t1 = t1.0;
LL | | let _t2 = t2;
LL | | };
| |_____^
|
= note: drop(&(t, t1));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/significant_drop.rs:58:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | println!("{:?}", t1.1);
LL | | };
| |_____^
|
= note: drop(&(t));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/significant_drop.rs:75:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | };
| |_____^
|
= note: drop(&(t));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/significant_drop.rs:90:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.0;
LL | | };
| |_____^
|
= note: drop(&(t));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/significant_drop.rs:105:13
|
LL | let c = || {
| _____________^
LL | |
LL | |
LL | | let _t = t.1;
LL | | };
| |_____^
|
= note: drop(&(t));
error: drop order affected for closure because of `capture_disjoint_fields`
--> $DIR/significant_drop.rs:120:13
|
LL | let c = move || {
| _____________^
LL | |
LL | |
LL | | println!("{:?} {:?}", t1.1, t.1);
LL | | };
| |_____^
|
= note: drop(&(t1, t));
error: aborting due to 7 previous errors

View File

@ -21,7 +21,6 @@ where
fn substs3<const L: usize>() -> Substs1<{ (L - 1) * 2 }>
where
[(); (L - 1)]: ,
[(); (L - 1) * 2 + 1]: ,
{
substs2::<{ L - 1 }>()

View File

@ -0,0 +1,17 @@
// run-pass
#![feature(const_generics, const_evaluatable_checked)]
#![allow(incomplete_features)]
fn make_array<const M: usize>() -> [(); M + 1] {
[(); M + 1]
}
fn foo<const N: usize>() -> [(); (N * 2) + 1] {
make_array::<{ N * 2 }>()
}
fn main() {
assert_eq!(foo::<10>(), [(); 10 * 2 + 1])
}
// Tests that N * 2 is considered const_evalutable by appearing as part of the (N * 2) + 1 const

View File

@ -7,10 +7,10 @@ LL | let mut closure1 = || p = &y;
= note: defining type: test::{closure#0}::{closure#0} with closure substs [
i16,
extern "rust-call" fn(()),
(&'_#1r i32, &'_#2r mut &'_#3r i32),
(&'_#1r mut &'_#2r i32, &'_#3r i32),
]
= note: number of external vids: 4
= note: where '_#1r: '_#3r
= note: where '_#3r: '_#2r
note: external requirements
--> $DIR/escape-upvar-nested.rs:20:27
@ -25,10 +25,10 @@ LL | | };
= note: defining type: test::{closure#0} with closure substs [
i16,
extern "rust-call" fn(()),
(&'_#1r i32, &'_#2r mut &'_#3r i32),
(&'_#1r mut &'_#2r i32, &'_#3r i32),
]
= note: number of external vids: 4
= note: where '_#1r: '_#3r
= note: where '_#3r: '_#2r
note: no external requirements
--> $DIR/escape-upvar-nested.rs:13:1

View File

@ -7,10 +7,10 @@ LL | let mut closure = || p = &y;
= note: defining type: test::{closure#0} with closure substs [
i16,
extern "rust-call" fn(()),
(&'_#1r i32, &'_#2r mut &'_#3r i32),
(&'_#1r mut &'_#2r i32, &'_#3r i32),
]
= note: number of external vids: 4
= note: where '_#1r: '_#3r
= note: where '_#3r: '_#2r
note: no external requirements
--> $DIR/escape-upvar-ref.rs:17:1

View File

@ -0,0 +1,14 @@
// aux-build:test-macros.rs
// Regression test for issue #81543
// Tests that we emit a properly spanned error
// when the output of a proc-macro cannot be parsed
// as the expected AST node kind
extern crate test_macros;
test_macros::identity! {
fn 32() {} //~ ERROR expected identifier
}
fn main() {}

View File

@ -0,0 +1,8 @@
error: expected identifier, found `32`
--> $DIR/issue-81543-item-parse-err.rs:11:8
|
LL | fn 32() {}
| ^^ expected identifier
error: aborting due to previous error

View File

@ -2,7 +2,12 @@ error: expected type, found `'`
--> $DIR/lifetimes.rs:7:10
|
LL | type A = single_quote_alone!();
| ^^^^^^^^^^^^^^^^^^^^^ this macro call doesn't expand to a type
| ^^^^^^^^^^^^^^^^^^^^^
| |
| expected type
| this macro call doesn't expand to a type
|
= note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to previous error

View File

@ -1,7 +0,0 @@
// error-pattern:casting
struct A;
fn main() {
println!("{:?}", 1.0 as *const A); // Can't cast float to foreign.
}