From 26edcee09309dad251532a7e1c1d96bac1562a57 Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Fri, 2 Nov 2018 16:14:24 +0100 Subject: [PATCH 1/6] Prevent stack overflow for deeply recursive code --- Cargo.lock | 23 ++ src/librustc_ast_lowering/expr.rs | 373 +++++++++--------- src/librustc_ast_lowering/pat.rs | 144 +++---- src/librustc_interface/util.rs | 9 +- src/librustc_middle/Cargo.toml | 1 + src/librustc_middle/middle/limits.rs | 18 + src/librustc_middle/ty/inhabitedness/mod.rs | 5 +- src/librustc_middle/ty/query/plumbing.rs | 4 +- src/librustc_mir/monomorphize/collector.rs | 8 +- src/librustc_mir_build/build/expr/as_temp.rs | 7 +- .../traits/project.rs | 3 +- .../traits/query/normalize.rs | 3 +- src/librustc_trait_selection/traits/select.rs | 190 +++++---- src/librustc_traits/dropck_outlives.rs | 14 +- 14 files changed, 448 insertions(+), 354 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8bd1e5f3308..7231bedb6ae 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2630,6 +2630,15 @@ dependencies = [ "core", ] +[[package]] +name = "psm" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b14fc68b454f875abc8354c2555e1d56596f74833ddc0f77f87f4871ed6a30e0" +dependencies = [ + "cc", +] + [[package]] name = "publicsuffix" version = "1.5.3" @@ -3152,6 +3161,7 @@ checksum = "81dfcfbb0ddfd533abf8c076e3b49d1e5042d1962526a12ce2c66d514b24cca3" dependencies = [ "rustc-ap-rustc_data_structures", "smallvec 1.0.0", + "stacker", ] [[package]] @@ -4657,6 +4667,19 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffbc596e092fe5f598b12ef46cc03754085ac2f4d8c739ad61c4ae266cc3b3fa" +[[package]] +name = "stacker" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d96fc4f13a0ac088e9a3cd9af1cc8c5cc1ab5deb2145cef661267dfc9c542f8a" +dependencies = [ + "cc", + "cfg-if", + "libc", + "psm", + "winapi 0.3.8", +] + [[package]] name = "std" version = "0.0.0" diff --git a/src/librustc_ast_lowering/expr.rs b/src/librustc_ast_lowering/expr.rs index cbe192d35e5..b295da8ecb9 100644 --- a/src/librustc_ast_lowering/expr.rs +++ b/src/librustc_ast_lowering/expr.rs @@ -7,6 +7,7 @@ use rustc_data_structures::thin_vec::ThinVec; use rustc_errors::struct_span_err; use rustc_hir as hir; use rustc_hir::def::Res; +use rustc_middle::limits::ensure_sufficient_stack; use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned}; use rustc_span::symbol::{sym, Symbol}; @@ -20,192 +21,206 @@ impl<'hir> LoweringContext<'_, 'hir> { } pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> { - let kind = match e.kind { - ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)), - ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)), - ExprKind::Repeat(ref expr, ref count) => { - let expr = self.lower_expr(expr); - let count = self.lower_anon_const(count); - hir::ExprKind::Repeat(expr, count) - } - ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)), - ExprKind::Call(ref f, ref args) => { - let f = self.lower_expr(f); - hir::ExprKind::Call(f, self.lower_exprs(args)) - } - ExprKind::MethodCall(ref seg, ref args) => { - let hir_seg = self.arena.alloc(self.lower_path_segment( - e.span, - seg, - ParamMode::Optional, - 0, - ParenthesizedGenericArgs::Err, - ImplTraitContext::disallowed(), - None, - )); - let args = self.lower_exprs(args); - hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args) - } - ExprKind::Binary(binop, ref lhs, ref rhs) => { - let binop = self.lower_binop(binop); - let lhs = self.lower_expr(lhs); - let rhs = self.lower_expr(rhs); - hir::ExprKind::Binary(binop, lhs, rhs) - } - ExprKind::Unary(op, ref ohs) => { - let op = self.lower_unop(op); - let ohs = self.lower_expr(ohs); - hir::ExprKind::Unary(op, ohs) - } - ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())), - ExprKind::Cast(ref expr, ref ty) => { - let expr = self.lower_expr(expr); - let ty = self.lower_ty(ty, ImplTraitContext::disallowed()); - hir::ExprKind::Cast(expr, ty) - } - ExprKind::Type(ref expr, ref ty) => { - let expr = self.lower_expr(expr); - let ty = self.lower_ty(ty, ImplTraitContext::disallowed()); - hir::ExprKind::Type(expr, ty) - } - ExprKind::AddrOf(k, m, ref ohs) => { - let ohs = self.lower_expr(ohs); - hir::ExprKind::AddrOf(k, m, ohs) - } - ExprKind::Let(ref pat, ref scrutinee) => self.lower_expr_let(e.span, pat, scrutinee), - ExprKind::If(ref cond, ref then, ref else_opt) => { - self.lower_expr_if(e.span, cond, then, else_opt.as_deref()) - } - ExprKind::While(ref cond, ref body, opt_label) => self.with_loop_scope(e.id, |this| { - this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label) - }), - ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| { - hir::ExprKind::Loop(this.lower_block(body, false), opt_label, hir::LoopSource::Loop) - }), - ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body), - ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match( - self.lower_expr(expr), - self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))), - hir::MatchSource::Normal, - ), - ExprKind::Async(capture_clause, closure_node_id, ref block) => self.make_async_expr( - capture_clause, - closure_node_id, - None, - block.span, - hir::AsyncGeneratorKind::Block, - |this| this.with_new_scopes(|this| this.lower_block_expr(block)), - ), - ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr), - ExprKind::Closure( - capture_clause, - asyncness, - movability, - ref decl, - ref body, - fn_decl_span, - ) => { - if let Async::Yes { closure_id, .. } = asyncness { - self.lower_expr_async_closure( - capture_clause, - closure_id, - decl, - body, - fn_decl_span, - ) - } else { - self.lower_expr_closure(capture_clause, movability, decl, body, fn_decl_span) + ensure_sufficient_stack(|| { + let kind = match e.kind { + ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)), + ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)), + ExprKind::Repeat(ref expr, ref count) => { + let expr = self.lower_expr(expr); + let count = self.lower_anon_const(count); + hir::ExprKind::Repeat(expr, count) } - } - ExprKind::Block(ref blk, opt_label) => { - hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label) - } - ExprKind::Assign(ref el, ref er, span) => { - hir::ExprKind::Assign(self.lower_expr(el), self.lower_expr(er), span) - } - ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp( - self.lower_binop(op), - self.lower_expr(el), - self.lower_expr(er), - ), - ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident), - ExprKind::Index(ref el, ref er) => { - hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er)) - } - ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => { - self.lower_expr_range_closed(e.span, e1, e2) - } - ExprKind::Range(ref e1, ref e2, lims) => { - self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims) - } - ExprKind::Path(ref qself, ref path) => { - let qpath = self.lower_qpath( - e.id, - qself, - path, - ParamMode::Optional, - ImplTraitContext::disallowed(), - ); - hir::ExprKind::Path(qpath) - } - ExprKind::Break(opt_label, ref opt_expr) => { - let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x)); - hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr) - } - ExprKind::Continue(opt_label) => { - hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label)) - } - ExprKind::Ret(ref e) => { - let e = e.as_ref().map(|x| self.lower_expr(x)); - hir::ExprKind::Ret(e) - } - ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_asm(asm), - ExprKind::Struct(ref path, ref fields, ref maybe_expr) => { - let maybe_expr = maybe_expr.as_ref().map(|x| self.lower_expr(x)); - hir::ExprKind::Struct( - self.arena.alloc(self.lower_qpath( + ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)), + ExprKind::Call(ref f, ref args) => { + let f = self.lower_expr(f); + hir::ExprKind::Call(f, self.lower_exprs(args)) + } + ExprKind::MethodCall(ref seg, ref args) => { + let hir_seg = self.arena.alloc(self.lower_path_segment( + e.span, + seg, + ParamMode::Optional, + 0, + ParenthesizedGenericArgs::Err, + ImplTraitContext::disallowed(), + None, + )); + let args = self.lower_exprs(args); + hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args) + } + ExprKind::Binary(binop, ref lhs, ref rhs) => { + let binop = self.lower_binop(binop); + let lhs = self.lower_expr(lhs); + let rhs = self.lower_expr(rhs); + hir::ExprKind::Binary(binop, lhs, rhs) + } + ExprKind::Unary(op, ref ohs) => { + let op = self.lower_unop(op); + let ohs = self.lower_expr(ohs); + hir::ExprKind::Unary(op, ohs) + } + ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())), + ExprKind::Cast(ref expr, ref ty) => { + let expr = self.lower_expr(expr); + let ty = self.lower_ty(ty, ImplTraitContext::disallowed()); + hir::ExprKind::Cast(expr, ty) + } + ExprKind::Type(ref expr, ref ty) => { + let expr = self.lower_expr(expr); + let ty = self.lower_ty(ty, ImplTraitContext::disallowed()); + hir::ExprKind::Type(expr, ty) + } + ExprKind::AddrOf(k, m, ref ohs) => { + let ohs = self.lower_expr(ohs); + hir::ExprKind::AddrOf(k, m, ohs) + } + ExprKind::Let(ref pat, ref scrutinee) => { + self.lower_expr_let(e.span, pat, scrutinee) + } + ExprKind::If(ref cond, ref then, ref else_opt) => { + self.lower_expr_if(e.span, cond, then, else_opt.as_deref()) + } + ExprKind::While(ref cond, ref body, opt_label) => self + .with_loop_scope(e.id, |this| { + this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label) + }), + ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| { + hir::ExprKind::Loop( + this.lower_block(body, false), + opt_label, + hir::LoopSource::Loop, + ) + }), + ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body), + ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match( + self.lower_expr(expr), + self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))), + hir::MatchSource::Normal, + ), + ExprKind::Async(capture_clause, closure_node_id, ref block) => self + .make_async_expr( + capture_clause, + closure_node_id, + None, + block.span, + hir::AsyncGeneratorKind::Block, + |this| this.with_new_scopes(|this| this.lower_block_expr(block)), + ), + ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr), + ExprKind::Closure( + capture_clause, + asyncness, + movability, + ref decl, + ref body, + fn_decl_span, + ) => { + if let Async::Yes { closure_id, .. } = asyncness { + self.lower_expr_async_closure( + capture_clause, + closure_id, + decl, + body, + fn_decl_span, + ) + } else { + self.lower_expr_closure( + capture_clause, + movability, + decl, + body, + fn_decl_span, + ) + } + } + ExprKind::Block(ref blk, opt_label) => { + hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label) + } + ExprKind::Assign(ref el, ref er, span) => { + hir::ExprKind::Assign(self.lower_expr(el), self.lower_expr(er), span) + } + ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp( + self.lower_binop(op), + self.lower_expr(el), + self.lower_expr(er), + ), + ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident), + ExprKind::Index(ref el, ref er) => { + hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er)) + } + ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => { + self.lower_expr_range_closed(e.span, e1, e2) + } + ExprKind::Range(ref e1, ref e2, lims) => { + self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims) + } + ExprKind::Path(ref qself, ref path) => { + let qpath = self.lower_qpath( e.id, - &None, + qself, path, ParamMode::Optional, ImplTraitContext::disallowed(), - )), - self.arena.alloc_from_iter(fields.iter().map(|x| self.lower_field(x))), - maybe_expr, - ) - } - ExprKind::Paren(ref ex) => { - let mut ex = self.lower_expr_mut(ex); - // Include parens in span, but only if it is a super-span. - if e.span.contains(ex.span) { - ex.span = e.span; + ); + hir::ExprKind::Path(qpath) } - // Merge attributes into the inner expression. - let mut attrs = e.attrs.clone(); - attrs.extend::>(ex.attrs.into()); - ex.attrs = attrs; - return ex; + ExprKind::Break(opt_label, ref opt_expr) => { + let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x)); + hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr) + } + ExprKind::Continue(opt_label) => { + hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label)) + } + ExprKind::Ret(ref e) => { + let e = e.as_ref().map(|x| self.lower_expr(x)); + hir::ExprKind::Ret(e) + } + ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_asm(asm), + ExprKind::Struct(ref path, ref fields, ref maybe_expr) => { + let maybe_expr = maybe_expr.as_ref().map(|x| self.lower_expr(x)); + hir::ExprKind::Struct( + self.arena.alloc(self.lower_qpath( + e.id, + &None, + path, + ParamMode::Optional, + ImplTraitContext::disallowed(), + )), + self.arena.alloc_from_iter(fields.iter().map(|x| self.lower_field(x))), + maybe_expr, + ) + } + ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()), + ExprKind::Err => hir::ExprKind::Err, + ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr), + ExprKind::Paren(ref ex) => { + let mut ex = self.lower_expr_mut(ex); + // Include parens in span, but only if it is a super-span. + if e.span.contains(ex.span) { + ex.span = e.span; + } + // Merge attributes into the inner expression. + let mut attrs = e.attrs.clone(); + attrs.extend::>(ex.attrs.into()); + ex.attrs = attrs; + return ex; + } + + // Desugar `ExprForLoop` + // from: `[opt_ident]: for in ` + ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => { + return self.lower_expr_for(e, pat, head, body, opt_label); + } + ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span), + }; + + hir::Expr { + hir_id: self.lower_node_id(e.id), + kind, + span: e.span, + attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::>().into(), } - - ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()), - - ExprKind::Err => hir::ExprKind::Err, - - // Desugar `ExprForLoop` - // from: `[opt_ident]: for in ` - ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => { - return self.lower_expr_for(e, pat, head, body, opt_label); - } - ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr), - ExprKind::MacCall(_) => panic!("Shouldn't exist here"), - }; - - hir::Expr { - hir_id: self.lower_node_id(e.id), - kind, - span: e.span, - attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::>().into(), - } + }) } fn lower_unop(&mut self, u: UnOp) -> hir::UnOp { diff --git a/src/librustc_ast_lowering/pat.rs b/src/librustc_ast_lowering/pat.rs index 28469dc5367..614450ed7cf 100644 --- a/src/librustc_ast_lowering/pat.rs +++ b/src/librustc_ast_lowering/pat.rs @@ -4,81 +4,87 @@ use rustc_ast::ast::*; use rustc_ast::ptr::P; use rustc_hir as hir; use rustc_hir::def::Res; +use rustc_middle::limits::ensure_sufficient_stack; use rustc_span::{source_map::Spanned, Span}; impl<'a, 'hir> LoweringContext<'a, 'hir> { crate fn lower_pat(&mut self, p: &Pat) -> &'hir hir::Pat<'hir> { - let node = match p.kind { - PatKind::Wild => hir::PatKind::Wild, - PatKind::Ident(ref binding_mode, ident, ref sub) => { - let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s)); - let node = self.lower_pat_ident(p, binding_mode, ident, lower_sub); - node - } - PatKind::Lit(ref e) => hir::PatKind::Lit(self.lower_expr(e)), - PatKind::TupleStruct(ref path, ref pats) => { - let qpath = self.lower_qpath( - p.id, - &None, - path, - ParamMode::Optional, - ImplTraitContext::disallowed(), - ); - let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct"); - hir::PatKind::TupleStruct(qpath, pats, ddpos) - } - PatKind::Or(ref pats) => { - hir::PatKind::Or(self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat(x)))) - } - PatKind::Path(ref qself, ref path) => { - let qpath = self.lower_qpath( - p.id, - qself, - path, - ParamMode::Optional, - ImplTraitContext::disallowed(), - ); - hir::PatKind::Path(qpath) - } - PatKind::Struct(ref path, ref fields, etc) => { - let qpath = self.lower_qpath( - p.id, - &None, - path, - ParamMode::Optional, - ImplTraitContext::disallowed(), - ); + ensure_sufficient_stack(|| { + let node = match p.kind { + PatKind::Wild => hir::PatKind::Wild, + PatKind::Ident(ref binding_mode, ident, ref sub) => { + let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s)); + let node = self.lower_pat_ident(p, binding_mode, ident, lower_sub); + node + } + PatKind::Lit(ref e) => hir::PatKind::Lit(self.lower_expr(e)), + PatKind::TupleStruct(ref path, ref pats) => { + let qpath = self.lower_qpath( + p.id, + &None, + path, + ParamMode::Optional, + ImplTraitContext::disallowed(), + ); + let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct"); + hir::PatKind::TupleStruct(qpath, pats, ddpos) + } + PatKind::Or(ref pats) => hir::PatKind::Or( + self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat(x))), + ), + PatKind::Path(ref qself, ref path) => { + let qpath = self.lower_qpath( + p.id, + qself, + path, + ParamMode::Optional, + ImplTraitContext::disallowed(), + ); + hir::PatKind::Path(qpath) + } + PatKind::Struct(ref path, ref fields, etc) => { + let qpath = self.lower_qpath( + p.id, + &None, + path, + ParamMode::Optional, + ImplTraitContext::disallowed(), + ); - let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat { - hir_id: self.next_id(), - ident: f.ident, - pat: self.lower_pat(&f.pat), - is_shorthand: f.is_shorthand, - span: f.span, - })); - hir::PatKind::Struct(qpath, fs, etc) - } - PatKind::Tuple(ref pats) => { - let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple"); - hir::PatKind::Tuple(pats, ddpos) - } - PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)), - PatKind::Ref(ref inner, mutbl) => hir::PatKind::Ref(self.lower_pat(inner), mutbl), - PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => hir::PatKind::Range( - e1.as_deref().map(|e| self.lower_expr(e)), - e2.as_deref().map(|e| self.lower_expr(e)), - self.lower_range_end(end, e2.is_some()), - ), - PatKind::Slice(ref pats) => self.lower_pat_slice(pats), - PatKind::Rest => { - // If we reach here the `..` pattern is not semantically allowed. - self.ban_illegal_rest_pat(p.span) - } - PatKind::Paren(ref inner) => return self.lower_pat(inner), - PatKind::MacCall(_) => panic!("Shouldn't exist here"), - }; + let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat { + hir_id: self.next_id(), + ident: f.ident, + pat: self.lower_pat(&f.pat), + is_shorthand: f.is_shorthand, + span: f.span, + })); + hir::PatKind::Struct(qpath, fs, etc) + } + PatKind::Tuple(ref pats) => { + let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple"); + hir::PatKind::Tuple(pats, ddpos) + } + PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)), + PatKind::Ref(ref inner, mutbl) => hir::PatKind::Ref(self.lower_pat(inner), mutbl), + PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => { + hir::PatKind::Range( + e1.as_deref().map(|e| self.lower_expr(e)), + e2.as_deref().map(|e| self.lower_expr(e)), + self.lower_range_end(end, e2.is_some()), + ) + } + PatKind::Slice(ref pats) => self.lower_pat_slice(pats), + PatKind::Rest => { + // If we reach here the `..` pattern is not semantically allowed. + self.ban_illegal_rest_pat(p.span) + } + // FIXME: consider not using recursion to lower this. + PatKind::Paren(ref inner) => return self.lower_pat(inner), + PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", p.span), + }; - self.pat_with_node_id_of(p, node) + self.pat_with_node_id_of(p, node) + }) } fn lower_pat_tuple( diff --git a/src/librustc_interface/util.rs b/src/librustc_interface/util.rs index 72c25270a5d..01d92f3d869 100644 --- a/src/librustc_interface/util.rs +++ b/src/librustc_interface/util.rs @@ -80,14 +80,7 @@ pub fn create_session( (Lrc::new(sess), Lrc::new(codegen_backend), source_map) } -// Temporarily have stack size set to 32MB to deal with various crates with long method -// chains or deep syntax trees, except when on Haiku. -// FIXME(oli-obk): get https://github.com/rust-lang/rust/pull/55617 the finish line -#[cfg(not(target_os = "haiku"))] -const STACK_SIZE: usize = 32 * 1024 * 1024; - -#[cfg(target_os = "haiku")] -const STACK_SIZE: usize = 16 * 1024 * 1024; +const STACK_SIZE: usize = 2 * 1024 * 1024; fn get_stack_size() -> Option { // FIXME: Hacks on hacks. If the env is trying to override the stack size diff --git a/src/librustc_middle/Cargo.toml b/src/librustc_middle/Cargo.toml index 398ba4d72d4..6d3980ef689 100644 --- a/src/librustc_middle/Cargo.toml +++ b/src/librustc_middle/Cargo.toml @@ -34,3 +34,4 @@ byteorder = { version = "1.3" } smallvec = { version = "1.0", features = ["union", "may_dangle"] } measureme = "0.7.1" rustc_session = { path = "../librustc_session" } +stacker = "0.1.6" diff --git a/src/librustc_middle/middle/limits.rs b/src/librustc_middle/middle/limits.rs index c43c22cd61b..8704c4c46aa 100644 --- a/src/librustc_middle/middle/limits.rs +++ b/src/librustc_middle/middle/limits.rs @@ -13,6 +13,24 @@ use rustc_span::symbol::{sym, Symbol}; use std::num::IntErrorKind; +// This is the amount of bytes that need to be left on the stack before increasing the size. +// It must be at least as large as the stack required by any code that does not call +// `ensure_sufficient_stack`. +const RED_ZONE: usize = 100 * 1024; // 100k + +// Ony the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then +// on. This flag has performance relevant characteristics. Don't set it too high. +const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB + +/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations +/// to "break up" recursive calls. E.g. almost any call to `visit_expr` or equivalent can benefit +/// from this. +/// +/// Should not be sprinkled around carelessly, as it causes a little bit of overhead. +pub fn ensure_sufficient_stack R>(f: F) -> R { + stacker::maybe_grow(RED_ZONE, STACK_PER_RECURSION, f) +} + pub fn update_limits(sess: &Session, krate: &ast::Crate) { update_limit(sess, krate, &sess.recursion_limit, sym::recursion_limit, 128); update_limit(sess, krate, &sess.type_length_limit, sym::type_length_limit, 1048576); diff --git a/src/librustc_middle/ty/inhabitedness/mod.rs b/src/librustc_middle/ty/inhabitedness/mod.rs index b166c4dea0c..35b5bde7c9e 100644 --- a/src/librustc_middle/ty/inhabitedness/mod.rs +++ b/src/librustc_middle/ty/inhabitedness/mod.rs @@ -1,5 +1,6 @@ pub use self::def_id_forest::DefIdForest; +use crate::middle::limits::ensure_sufficient_stack; use crate::ty; use crate::ty::context::TyCtxt; use crate::ty::TyKind::*; @@ -196,7 +197,9 @@ impl<'tcx> TyS<'tcx> { /// Calculates the forest of `DefId`s from which this type is visibly uninhabited. fn uninhabited_from(&self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> DefIdForest { match self.kind { - Adt(def, substs) => def.uninhabited_from(tcx, substs, param_env), + Adt(def, substs) => { + ensure_sufficient_stack(|| def.uninhabited_from(tcx, substs, param_env)) + } Never => DefIdForest::full(tcx), diff --git a/src/librustc_middle/ty/query/plumbing.rs b/src/librustc_middle/ty/query/plumbing.rs index d6d4335e938..76e5a65472e 100644 --- a/src/librustc_middle/ty/query/plumbing.rs +++ b/src/librustc_middle/ty/query/plumbing.rs @@ -68,7 +68,9 @@ impl QueryContext for TyCtxt<'tcx> { }; // Use the `ImplicitCtxt` while we execute the query. - tls::enter_context(&new_icx, |_| compute(*self)) + tls::enter_context(&new_icx, |_| { + crate::middle::limits::ensure_sufficient_stack(|| compute(*self)) + }) }) } } diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs index dfcd2c3c936..6c844dd407b 100644 --- a/src/librustc_mir/monomorphize/collector.rs +++ b/src/librustc_mir/monomorphize/collector.rs @@ -369,7 +369,9 @@ fn collect_items_rec<'tcx>( recursion_depth_reset = Some(check_recursion_limit(tcx, instance, recursion_depths)); check_type_length_limit(tcx, instance); - collect_neighbours(tcx, instance, &mut neighbors); + rustc::middle::limits::ensure_sufficient_stack(|| { + collect_neighbours(tcx, instance, &mut neighbors); + }); } MonoItem::GlobalAsm(..) => { recursion_depth_reset = None; @@ -1146,7 +1148,9 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut Vec { trace!("collecting {:?} with {:#?}", alloc_id, alloc); for &((), inner) in alloc.relocations().values() { - collect_miri(tcx, inner, output); + rustc_middle::limits::ensure_sufficient_stack(|| { + collect_miri(tcx, inner, output); + }); } } Some(GlobalAlloc::Function(fn_instance)) => { diff --git a/src/librustc_mir_build/build/expr/as_temp.rs b/src/librustc_mir_build/build/expr/as_temp.rs index b4ef536afb7..ccd6829b653 100644 --- a/src/librustc_mir_build/build/expr/as_temp.rs +++ b/src/librustc_mir_build/build/expr/as_temp.rs @@ -4,6 +4,7 @@ use crate::build::scope::DropKind; use crate::build::{BlockAnd, BlockAndExtension, Builder}; use crate::hair::*; use rustc_hir as hir; +use rustc_middle::limits::ensure_sufficient_stack; use rustc_middle::middle::region; use rustc_middle::mir::*; @@ -21,7 +22,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { M: Mirror<'tcx, Output = Expr<'tcx>>, { let expr = self.hir.mirror(expr); - self.expr_as_temp(block, temp_lifetime, expr, mutability) + // + // this is the only place in mir building that we need to truly need to worry about + // infinite recursion. Everything else does recurse, too, but it always gets broken up + // at some point by inserting an intermediate temporary + ensure_sufficient_stack(|| self.expr_as_temp(block, temp_lifetime, expr, mutability)) } fn expr_as_temp( diff --git a/src/librustc_trait_selection/traits/project.rs b/src/librustc_trait_selection/traits/project.rs index 2b4a0409fd1..7621516a732 100644 --- a/src/librustc_trait_selection/traits/project.rs +++ b/src/librustc_trait_selection/traits/project.rs @@ -20,6 +20,7 @@ use crate::traits::error_reporting::InferCtxtExt; use rustc_ast::ast::Ident; use rustc_errors::ErrorReported; use rustc_hir::def_id::DefId; +use rustc_middle::limits::ensure_sufficient_stack; use rustc_middle::ty::fold::{TypeFoldable, TypeFolder}; use rustc_middle::ty::subst::{InternalSubsts, Subst}; use rustc_middle::ty::{self, ToPolyTraitRef, ToPredicate, Ty, TyCtxt, WithConstness}; @@ -261,7 +262,7 @@ where { debug!("normalize_with_depth(depth={}, value={:?})", depth, value); let mut normalizer = AssocTypeNormalizer::new(selcx, param_env, cause, depth, obligations); - let result = normalizer.fold(value); + let result = ensure_sufficient_stack(|| normalizer.fold(value)); debug!( "normalize_with_depth: depth={} result={:?} with {} obligations", depth, diff --git a/src/librustc_trait_selection/traits/query/normalize.rs b/src/librustc_trait_selection/traits/query/normalize.rs index 0da26abc330..98693d21d7f 100644 --- a/src/librustc_trait_selection/traits/query/normalize.rs +++ b/src/librustc_trait_selection/traits/query/normalize.rs @@ -8,6 +8,7 @@ use crate::infer::{InferCtxt, InferOk}; use crate::traits::error_reporting::InferCtxtExt; use crate::traits::{Obligation, ObligationCause, PredicateObligation, Reveal}; use rustc_infer::traits::Normalized; +use rustc_middle::limits::ensure_sufficient_stack; use rustc_middle::ty::fold::{TypeFoldable, TypeFolder}; use rustc_middle::ty::subst::Subst; use rustc_middle::ty::{self, Ty, TyCtxt}; @@ -131,7 +132,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> { ty ); } - let folded_ty = self.fold_ty(concrete_ty); + let folded_ty = ensure_sufficient_stack(|| self.fold_ty(concrete_ty)); self.anon_depth -= 1; folded_ty } diff --git a/src/librustc_trait_selection/traits/select.rs b/src/librustc_trait_selection/traits/select.rs index dfbb0742448..adb1997502d 100644 --- a/src/librustc_trait_selection/traits/select.rs +++ b/src/librustc_trait_selection/traits/select.rs @@ -42,6 +42,7 @@ use rustc_hir::def_id::DefId; use rustc_hir::lang_items; use rustc_index::bit_set::GrowableBitSet; use rustc_middle::dep_graph::{DepKind, DepNodeIndex}; +use rustc_middle::limits::ensure_sufficient_stack; use rustc_middle::ty::fast_reject; use rustc_middle::ty::relate::TypeRelation; use rustc_middle::ty::subst::{GenericArg, GenericArgKind, Subst, SubstsRef}; @@ -2365,13 +2366,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.infcx.commit_unconditionally(|_| { let (skol_ty, _) = self.infcx.replace_bound_vars_with_placeholders(&ty); let Normalized { value: normalized_ty, mut obligations } = - project::normalize_with_depth( - self, - param_env, - cause.clone(), - recursion_depth, - &skol_ty, - ); + ensure_sufficient_stack(|| { + project::normalize_with_depth( + self, + param_env, + cause.clone(), + recursion_depth, + &skol_ty, + ) + }); let skol_obligation = predicate_for_trait_def( self.tcx(), param_env, @@ -2525,13 +2528,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { }; let cause = obligation.derived_cause(BuiltinDerivedObligation); - self.collect_predicates_for_types( - obligation.param_env, - cause, - obligation.recursion_depth + 1, - trait_def, - nested, - ) + ensure_sufficient_stack(|| { + self.collect_predicates_for_types( + obligation.param_env, + cause, + obligation.recursion_depth + 1, + trait_def, + nested, + ) + }) } else { vec![] }; @@ -2568,38 +2573,39 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { nested: ty::Binder>>, ) -> VtableAutoImplData> { debug!("vtable_auto_impl: nested={:?}", nested); + ensure_sufficient_stack(|| { + let cause = obligation.derived_cause(BuiltinDerivedObligation); + let mut obligations = self.collect_predicates_for_types( + obligation.param_env, + cause, + obligation.recursion_depth + 1, + trait_def_id, + nested, + ); - let cause = obligation.derived_cause(BuiltinDerivedObligation); - let mut obligations = self.collect_predicates_for_types( - obligation.param_env, - cause, - obligation.recursion_depth + 1, - trait_def_id, - nested, - ); + let trait_obligations: Vec> = + self.infcx.commit_unconditionally(|_| { + let poly_trait_ref = obligation.predicate.to_poly_trait_ref(); + let (trait_ref, _) = + self.infcx.replace_bound_vars_with_placeholders(&poly_trait_ref); + let cause = obligation.derived_cause(ImplDerivedObligation); + self.impl_or_trait_obligations( + cause, + obligation.recursion_depth + 1, + obligation.param_env, + trait_def_id, + &trait_ref.substs, + ) + }); - let trait_obligations: Vec> = - self.infcx.commit_unconditionally(|_| { - let poly_trait_ref = obligation.predicate.to_poly_trait_ref(); - let (trait_ref, _) = - self.infcx.replace_bound_vars_with_placeholders(&poly_trait_ref); - let cause = obligation.derived_cause(ImplDerivedObligation); - self.impl_or_trait_obligations( - cause, - obligation.recursion_depth + 1, - obligation.param_env, - trait_def_id, - &trait_ref.substs, - ) - }); + // Adds the predicates from the trait. Note that this contains a `Self: Trait` + // predicate as usual. It won't have any effect since auto traits are coinductive. + obligations.extend(trait_obligations); - // Adds the predicates from the trait. Note that this contains a `Self: Trait` - // predicate as usual. It won't have any effect since auto traits are coinductive. - obligations.extend(trait_obligations); + debug!("vtable_auto_impl: obligations={:?}", obligations); - debug!("vtable_auto_impl: obligations={:?}", obligations); - - VtableAutoImplData { trait_def_id, nested: obligations } + VtableAutoImplData { trait_def_id, nested: obligations } + }) } fn confirm_impl_candidate( @@ -2615,13 +2621,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let substs = self.rematch_impl(impl_def_id, obligation, snapshot); debug!("confirm_impl_candidate: substs={:?}", substs); let cause = obligation.derived_cause(ImplDerivedObligation); - self.vtable_impl( - impl_def_id, - substs, - cause, - obligation.recursion_depth + 1, - obligation.param_env, - ) + ensure_sufficient_stack(|| { + self.vtable_impl( + impl_def_id, + substs, + cause, + obligation.recursion_depth + 1, + obligation.param_env, + ) + }) }) } @@ -2734,13 +2742,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { ) .map_bound(|(trait_ref, _)| trait_ref); - let Normalized { value: trait_ref, obligations } = project::normalize_with_depth( - self, - obligation.param_env, - obligation.cause.clone(), - obligation.recursion_depth + 1, - &trait_ref, - ); + let Normalized { value: trait_ref, obligations } = ensure_sufficient_stack(|| { + project::normalize_with_depth( + self, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth + 1, + &trait_ref, + ) + }); self.confirm_poly_trait_refs( obligation.cause.clone(), @@ -2798,13 +2808,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { debug!("confirm_generator_candidate({:?},{:?},{:?})", obligation, generator_def_id, substs); let trait_ref = self.generator_trait_ref_unnormalized(obligation, substs); - let Normalized { value: trait_ref, mut obligations } = normalize_with_depth( - self, - obligation.param_env, - obligation.cause.clone(), - obligation.recursion_depth + 1, - &trait_ref, - ); + let Normalized { value: trait_ref, mut obligations } = ensure_sufficient_stack(|| { + normalize_with_depth( + self, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth + 1, + &trait_ref, + ) + }); debug!( "confirm_generator_candidate(generator_def_id={:?}, \ @@ -2843,13 +2855,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { }; let trait_ref = self.closure_trait_ref_unnormalized(obligation, substs); - let Normalized { value: trait_ref, mut obligations } = normalize_with_depth( - self, - obligation.param_env, - obligation.cause.clone(), - obligation.recursion_depth + 1, - &trait_ref, - ); + let Normalized { value: trait_ref, mut obligations } = ensure_sufficient_stack(|| { + normalize_with_depth( + self, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth + 1, + &trait_ref, + ) + }); debug!( "confirm_closure_candidate(closure_def_id={:?}, trait_ref={:?}, obligations={:?})", @@ -3139,15 +3153,17 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { nested.extend(obligations); // Construct the nested `T: Unsize` predicate. - nested.push(predicate_for_trait_def( - tcx, - obligation.param_env, - obligation.cause.clone(), - obligation.predicate.def_id(), - obligation.recursion_depth + 1, - a_last.expect_ty(), - &[b_last], - )); + nested.push(ensure_sufficient_stack(|| { + predicate_for_trait_def( + tcx, + obligation.param_env, + obligation.cause.clone(), + obligation.predicate.def_id(), + obligation.recursion_depth + 1, + a_last.expect_ty(), + &[b_last], + ) + })); } _ => bug!(), @@ -3208,13 +3224,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let impl_trait_ref = impl_trait_ref.subst(self.tcx(), impl_substs); let Normalized { value: impl_trait_ref, obligations: mut nested_obligations } = - project::normalize_with_depth( - self, - obligation.param_env, - obligation.cause.clone(), - obligation.recursion_depth + 1, - &impl_trait_ref, - ); + ensure_sufficient_stack(|| { + project::normalize_with_depth( + self, + obligation.param_env, + obligation.cause.clone(), + obligation.recursion_depth + 1, + &impl_trait_ref, + ) + }); debug!( "match_impl(impl_def_id={:?}, obligation={:?}, \ diff --git a/src/librustc_traits/dropck_outlives.rs b/src/librustc_traits/dropck_outlives.rs index 93b15e146ec..460b84e6326 100644 --- a/src/librustc_traits/dropck_outlives.rs +++ b/src/librustc_traits/dropck_outlives.rs @@ -191,10 +191,12 @@ fn dtorck_constraint_for_ty<'tcx>( ty::Array(ety, _) | ty::Slice(ety) => { // single-element containers, behave like their element - dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety, constraints)?; + rustc_middle::limits::ensure_sufficient_stack(|| { + dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety, constraints) + })?; } - ty::Tuple(tys) => { + ty::Tuple(tys) => rustc_middle::limits::ensure_sufficient_stack(|| { for ty in tys.iter() { dtorck_constraint_for_ty( tcx, @@ -205,13 +207,15 @@ fn dtorck_constraint_for_ty<'tcx>( constraints, )?; } - } + Ok::<_, NoSolution>(()) + })?, - ty::Closure(_, substs) => { + ty::Closure(_, substs) => rustc_middle::limits::ensure_sufficient_stack(|| { for ty in substs.as_closure().upvar_tys() { dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty, constraints)?; } - } + Ok::<_, NoSolution>(()) + })?, ty::Generator(_, substs, _movability) => { // rust-lang/rust#49918: types can be constructed, stored From 96c5012b57c4688a8a0a076302706bb6a1e95588 Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Tue, 26 Nov 2019 14:12:59 +0100 Subject: [PATCH 2/6] Add `psm` to the crate whitelist --- src/tools/tidy/src/deps.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/tools/tidy/src/deps.rs b/src/tools/tidy/src/deps.rs index 8d38404d31b..1423f1db385 100644 --- a/src/tools/tidy/src/deps.rs +++ b/src/tools/tidy/src/deps.rs @@ -131,6 +131,7 @@ const WHITELIST: &[&str] = &[ "polonius-engine", "ppv-lite86", "proc-macro2", + "psm", "punycode", "quick-error", "quote", @@ -160,6 +161,7 @@ const WHITELIST: &[&str] = &[ "sha-1", "smallvec", "stable_deref_trait", + "stacker", "syn", "synstructure", "tempfile", From 968f442c7ce591e9cb03b57bb0dac30df4909b50 Mon Sep 17 00:00:00 2001 From: Simonas Kazlauskas Date: Sun, 23 Feb 2020 22:23:45 +0200 Subject: [PATCH 3/6] Set the default stack size to 8MB This was the value used before we originally started raising the stack size to infinity. --- src/librustc_interface/util.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/librustc_interface/util.rs b/src/librustc_interface/util.rs index 01d92f3d869..ecc336337f8 100644 --- a/src/librustc_interface/util.rs +++ b/src/librustc_interface/util.rs @@ -80,7 +80,7 @@ pub fn create_session( (Lrc::new(sess), Lrc::new(codegen_backend), source_map) } -const STACK_SIZE: usize = 2 * 1024 * 1024; +const STACK_SIZE: usize = 8 * 1024 * 1024; fn get_stack_size() -> Option { // FIXME: Hacks on hacks. If the env is trying to override the stack size From a5c5365031ce8ec3926e06e207e16e58c4bf3e8b Mon Sep 17 00:00:00 2001 From: Simonas Kazlauskas Date: Sat, 14 Mar 2020 20:13:55 +0200 Subject: [PATCH 4/6] Move ensure_sufficient_stack to data_structures We anticipate this to have uses in all sorts of crates and keeping it in `rustc_data_structures` enables access to it from more locations without necessarily pulling in the large `librustc` crate. --- Cargo.lock | 2 +- src/librustc_ast_lowering/expr.rs | 2 +- src/librustc_ast_lowering/pat.rs | 2 +- src/librustc_data_structures/Cargo.toml | 1 + src/librustc_data_structures/lib.rs | 1 + src/librustc_data_structures/stack.rs | 17 +++++++++++++++++ src/librustc_middle/Cargo.toml | 1 - src/librustc_middle/middle/limits.rs | 18 ------------------ src/librustc_middle/ty/inhabitedness/mod.rs | 2 +- src/librustc_middle/ty/query/plumbing.rs | 2 +- src/librustc_mir/monomorphize/collector.rs | 4 ++-- src/librustc_mir_build/build/expr/as_temp.rs | 2 +- src/librustc_trait_selection/traits/project.rs | 2 +- .../traits/query/normalize.rs | 2 +- src/librustc_trait_selection/traits/select.rs | 2 +- src/librustc_traits/dropck_outlives.rs | 6 +++--- 16 files changed, 33 insertions(+), 33 deletions(-) create mode 100644 src/librustc_data_structures/stack.rs diff --git a/Cargo.lock b/Cargo.lock index 7231bedb6ae..ba92fcc8308 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3161,7 +3161,6 @@ checksum = "81dfcfbb0ddfd533abf8c076e3b49d1e5042d1962526a12ce2c66d514b24cca3" dependencies = [ "rustc-ap-rustc_data_structures", "smallvec 1.0.0", - "stacker", ] [[package]] @@ -3706,6 +3705,7 @@ dependencies = [ "serialize", "smallvec 1.0.0", "stable_deref_trait", + "stacker", "winapi 0.3.8", ] diff --git a/src/librustc_ast_lowering/expr.rs b/src/librustc_ast_lowering/expr.rs index b295da8ecb9..251faf6af00 100644 --- a/src/librustc_ast_lowering/expr.rs +++ b/src/librustc_ast_lowering/expr.rs @@ -3,11 +3,11 @@ use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericAr use rustc_ast::ast::*; use rustc_ast::attr; use rustc_ast::ptr::P as AstP; +use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_data_structures::thin_vec::ThinVec; use rustc_errors::struct_span_err; use rustc_hir as hir; use rustc_hir::def::Res; -use rustc_middle::limits::ensure_sufficient_stack; use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned}; use rustc_span::symbol::{sym, Symbol}; diff --git a/src/librustc_ast_lowering/pat.rs b/src/librustc_ast_lowering/pat.rs index 614450ed7cf..d54ad2036d5 100644 --- a/src/librustc_ast_lowering/pat.rs +++ b/src/librustc_ast_lowering/pat.rs @@ -2,9 +2,9 @@ use super::{ImplTraitContext, LoweringContext, ParamMode}; use rustc_ast::ast::*; use rustc_ast::ptr::P; +use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_hir as hir; use rustc_hir::def::Res; -use rustc_middle::limits::ensure_sufficient_stack; use rustc_span::{source_map::Spanned, Span}; impl<'a, 'hir> LoweringContext<'a, 'hir> { diff --git a/src/librustc_data_structures/Cargo.toml b/src/librustc_data_structures/Cargo.toml index 6d7022acc78..fbabd1ab541 100644 --- a/src/librustc_data_structures/Cargo.toml +++ b/src/librustc_data_structures/Cargo.toml @@ -28,6 +28,7 @@ rustc_index = { path = "../librustc_index", package = "rustc_index" } bitflags = "1.2.1" measureme = "0.7.1" libc = "0.2" +stacker = "0.1.6" [dependencies.parking_lot] version = "0.10" diff --git a/src/librustc_data_structures/lib.rs b/src/librustc_data_structures/lib.rs index bc2da535fd3..a23181befc1 100644 --- a/src/librustc_data_structures/lib.rs +++ b/src/librustc_data_structures/lib.rs @@ -80,6 +80,7 @@ pub mod stable_set; #[macro_use] pub mod stable_hasher; pub mod sharded; +pub mod stack; pub mod sync; pub mod thin_vec; pub mod tiny_list; diff --git a/src/librustc_data_structures/stack.rs b/src/librustc_data_structures/stack.rs new file mode 100644 index 00000000000..a4964b7aa0c --- /dev/null +++ b/src/librustc_data_structures/stack.rs @@ -0,0 +1,17 @@ +// This is the amount of bytes that need to be left on the stack before increasing the size. +// It must be at least as large as the stack required by any code that does not call +// `ensure_sufficient_stack`. +const RED_ZONE: usize = 100 * 1024; // 100k + +// Only the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then +// on. This flag has performance relevant characteristics. Don't set it too high. +const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB + +/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations +/// to "break up" recursive calls. E.g. almost any call to `visit_expr` or equivalent can benefit +/// from this. +/// +/// Should not be sprinkled around carelessly, as it causes a little bit of overhead. +pub fn ensure_sufficient_stack(f: impl FnOnce() -> R) -> R { + stacker::maybe_grow(RED_ZONE, STACK_PER_RECURSION, f) +} diff --git a/src/librustc_middle/Cargo.toml b/src/librustc_middle/Cargo.toml index 6d3980ef689..398ba4d72d4 100644 --- a/src/librustc_middle/Cargo.toml +++ b/src/librustc_middle/Cargo.toml @@ -34,4 +34,3 @@ byteorder = { version = "1.3" } smallvec = { version = "1.0", features = ["union", "may_dangle"] } measureme = "0.7.1" rustc_session = { path = "../librustc_session" } -stacker = "0.1.6" diff --git a/src/librustc_middle/middle/limits.rs b/src/librustc_middle/middle/limits.rs index 8704c4c46aa..c43c22cd61b 100644 --- a/src/librustc_middle/middle/limits.rs +++ b/src/librustc_middle/middle/limits.rs @@ -13,24 +13,6 @@ use rustc_span::symbol::{sym, Symbol}; use std::num::IntErrorKind; -// This is the amount of bytes that need to be left on the stack before increasing the size. -// It must be at least as large as the stack required by any code that does not call -// `ensure_sufficient_stack`. -const RED_ZONE: usize = 100 * 1024; // 100k - -// Ony the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then -// on. This flag has performance relevant characteristics. Don't set it too high. -const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB - -/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations -/// to "break up" recursive calls. E.g. almost any call to `visit_expr` or equivalent can benefit -/// from this. -/// -/// Should not be sprinkled around carelessly, as it causes a little bit of overhead. -pub fn ensure_sufficient_stack R>(f: F) -> R { - stacker::maybe_grow(RED_ZONE, STACK_PER_RECURSION, f) -} - pub fn update_limits(sess: &Session, krate: &ast::Crate) { update_limit(sess, krate, &sess.recursion_limit, sym::recursion_limit, 128); update_limit(sess, krate, &sess.type_length_limit, sym::type_length_limit, 1048576); diff --git a/src/librustc_middle/ty/inhabitedness/mod.rs b/src/librustc_middle/ty/inhabitedness/mod.rs index 35b5bde7c9e..d1b5eed921b 100644 --- a/src/librustc_middle/ty/inhabitedness/mod.rs +++ b/src/librustc_middle/ty/inhabitedness/mod.rs @@ -1,12 +1,12 @@ pub use self::def_id_forest::DefIdForest; -use crate::middle::limits::ensure_sufficient_stack; use crate::ty; use crate::ty::context::TyCtxt; use crate::ty::TyKind::*; use crate::ty::{AdtDef, FieldDef, Ty, TyS, VariantDef}; use crate::ty::{AdtKind, Visibility}; use crate::ty::{DefId, SubstsRef}; +use rustc_data_structures::stack::ensure_sufficient_stack; mod def_id_forest; diff --git a/src/librustc_middle/ty/query/plumbing.rs b/src/librustc_middle/ty/query/plumbing.rs index 76e5a65472e..e62a9efaee3 100644 --- a/src/librustc_middle/ty/query/plumbing.rs +++ b/src/librustc_middle/ty/query/plumbing.rs @@ -69,7 +69,7 @@ impl QueryContext for TyCtxt<'tcx> { // Use the `ImplicitCtxt` while we execute the query. tls::enter_context(&new_icx, |_| { - crate::middle::limits::ensure_sufficient_stack(|| compute(*self)) + rustc_data_structures::stack::ensure_sufficient_stack(|| compute(*self)) }) }) } diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs index 6c844dd407b..3d798254735 100644 --- a/src/librustc_mir/monomorphize/collector.rs +++ b/src/librustc_mir/monomorphize/collector.rs @@ -369,7 +369,7 @@ fn collect_items_rec<'tcx>( recursion_depth_reset = Some(check_recursion_limit(tcx, instance, recursion_depths)); check_type_length_limit(tcx, instance); - rustc::middle::limits::ensure_sufficient_stack(|| { + rustc_data_structures::stack::ensure_sufficient_stack(|| { collect_neighbours(tcx, instance, &mut neighbors); }); } @@ -1148,7 +1148,7 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut Vec { trace!("collecting {:?} with {:#?}", alloc_id, alloc); for &((), inner) in alloc.relocations().values() { - rustc_middle::limits::ensure_sufficient_stack(|| { + rustc_data_structures::stack::ensure_sufficient_stack(|| { collect_miri(tcx, inner, output); }); } diff --git a/src/librustc_mir_build/build/expr/as_temp.rs b/src/librustc_mir_build/build/expr/as_temp.rs index ccd6829b653..d3304c71e61 100644 --- a/src/librustc_mir_build/build/expr/as_temp.rs +++ b/src/librustc_mir_build/build/expr/as_temp.rs @@ -3,8 +3,8 @@ use crate::build::scope::DropKind; use crate::build::{BlockAnd, BlockAndExtension, Builder}; use crate::hair::*; +use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_hir as hir; -use rustc_middle::limits::ensure_sufficient_stack; use rustc_middle::middle::region; use rustc_middle::mir::*; diff --git a/src/librustc_trait_selection/traits/project.rs b/src/librustc_trait_selection/traits/project.rs index 7621516a732..591a41f12bf 100644 --- a/src/librustc_trait_selection/traits/project.rs +++ b/src/librustc_trait_selection/traits/project.rs @@ -18,9 +18,9 @@ use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use crate::infer::{InferCtxt, InferOk, LateBoundRegionConversionTime}; use crate::traits::error_reporting::InferCtxtExt; use rustc_ast::ast::Ident; +use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_errors::ErrorReported; use rustc_hir::def_id::DefId; -use rustc_middle::limits::ensure_sufficient_stack; use rustc_middle::ty::fold::{TypeFoldable, TypeFolder}; use rustc_middle::ty::subst::{InternalSubsts, Subst}; use rustc_middle::ty::{self, ToPolyTraitRef, ToPredicate, Ty, TyCtxt, WithConstness}; diff --git a/src/librustc_trait_selection/traits/query/normalize.rs b/src/librustc_trait_selection/traits/query/normalize.rs index 98693d21d7f..3b985a4b150 100644 --- a/src/librustc_trait_selection/traits/query/normalize.rs +++ b/src/librustc_trait_selection/traits/query/normalize.rs @@ -7,8 +7,8 @@ use crate::infer::canonical::OriginalQueryValues; use crate::infer::{InferCtxt, InferOk}; use crate::traits::error_reporting::InferCtxtExt; use crate::traits::{Obligation, ObligationCause, PredicateObligation, Reveal}; +use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_infer::traits::Normalized; -use rustc_middle::limits::ensure_sufficient_stack; use rustc_middle::ty::fold::{TypeFoldable, TypeFolder}; use rustc_middle::ty::subst::Subst; use rustc_middle::ty::{self, Ty, TyCtxt}; diff --git a/src/librustc_trait_selection/traits/select.rs b/src/librustc_trait_selection/traits/select.rs index adb1997502d..6cc2efc2306 100644 --- a/src/librustc_trait_selection/traits/select.rs +++ b/src/librustc_trait_selection/traits/select.rs @@ -37,12 +37,12 @@ use crate::traits::error_reporting::InferCtxtExt; use crate::traits::project::ProjectionCacheKeyExt; use rustc_ast::attr; use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_data_structures::stack::ensure_sufficient_stack; use rustc_hir as hir; use rustc_hir::def_id::DefId; use rustc_hir::lang_items; use rustc_index::bit_set::GrowableBitSet; use rustc_middle::dep_graph::{DepKind, DepNodeIndex}; -use rustc_middle::limits::ensure_sufficient_stack; use rustc_middle::ty::fast_reject; use rustc_middle::ty::relate::TypeRelation; use rustc_middle::ty::subst::{GenericArg, GenericArgKind, Subst, SubstsRef}; diff --git a/src/librustc_traits/dropck_outlives.rs b/src/librustc_traits/dropck_outlives.rs index 460b84e6326..76ff58d61a2 100644 --- a/src/librustc_traits/dropck_outlives.rs +++ b/src/librustc_traits/dropck_outlives.rs @@ -191,12 +191,12 @@ fn dtorck_constraint_for_ty<'tcx>( ty::Array(ety, _) | ty::Slice(ety) => { // single-element containers, behave like their element - rustc_middle::limits::ensure_sufficient_stack(|| { + rustc_data_structures::stack::ensure_sufficient_stack(|| { dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety, constraints) })?; } - ty::Tuple(tys) => rustc_middle::limits::ensure_sufficient_stack(|| { + ty::Tuple(tys) => rustc_data_structures::stack::ensure_sufficient_stack(|| { for ty in tys.iter() { dtorck_constraint_for_ty( tcx, @@ -210,7 +210,7 @@ fn dtorck_constraint_for_ty<'tcx>( Ok::<_, NoSolution>(()) })?, - ty::Closure(_, substs) => rustc_middle::limits::ensure_sufficient_stack(|| { + ty::Closure(_, substs) => rustc_data_structures::stack::ensure_sufficient_stack(|| { for ty in substs.as_closure().upvar_tys() { dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty, constraints)?; } From a569f2970af629d012b08cf5869e2d3a300ff500 Mon Sep 17 00:00:00 2001 From: Simonas Kazlauskas Date: Sun, 22 Mar 2020 04:07:06 +0200 Subject: [PATCH 5/6] Update psm and stacker to 0.1.7 --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ba92fcc8308..3aa073c6802 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2632,9 +2632,9 @@ dependencies = [ [[package]] name = "psm" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b14fc68b454f875abc8354c2555e1d56596f74833ddc0f77f87f4871ed6a30e0" +checksum = "586f7889555eb729b6074a71cabd2aec657406aa89a76734c8d69f428fbbac42" dependencies = [ "cc", ] @@ -4669,9 +4669,9 @@ checksum = "ffbc596e092fe5f598b12ef46cc03754085ac2f4d8c739ad61c4ae266cc3b3fa" [[package]] name = "stacker" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d96fc4f13a0ac088e9a3cd9af1cc8c5cc1ab5deb2145cef661267dfc9c542f8a" +checksum = "a89b851ac6cb6bcd07a2693a552cad1dc743d3f822df2a484d44209209be3133" dependencies = [ "cc", "cfg-if", From 935a05f1beaa5ed872e66e521e510bba61509b48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateusz=20Miku=C5=82a?= Date: Wed, 6 May 2020 11:29:30 +0200 Subject: [PATCH 6/6] Update stacker and psm to 0.1.8 --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3aa073c6802..18476d7a60d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2632,9 +2632,9 @@ dependencies = [ [[package]] name = "psm" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "586f7889555eb729b6074a71cabd2aec657406aa89a76734c8d69f428fbbac42" +checksum = "659ecfea2142a458893bb7673134bad50b752fea932349c213d6a23874ce3aa7" dependencies = [ "cc", ] @@ -4669,9 +4669,9 @@ checksum = "ffbc596e092fe5f598b12ef46cc03754085ac2f4d8c739ad61c4ae266cc3b3fa" [[package]] name = "stacker" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a89b851ac6cb6bcd07a2693a552cad1dc743d3f822df2a484d44209209be3133" +checksum = "32c2467b8abbb417e4e62fd62229719b9c9d77714a7fa989f1afad16ba9c9743" dependencies = [ "cc", "cfg-if",