Auto merge of #55617 - oli-obk:stacker, r=nagisa,oli-obk

Prevent compiler stack overflow for deeply recursive code

I was unable to write a test that

1. runs in under 1s
2. overflows on my machine without this patch

The following reproduces the issue, but I don't think it's sensible to include a test that takes 30s to compile. We can now easily squash newly appearing overflows by the strategic insertion of calls to `ensure_sufficient_stack`.

```rust
// compile-pass

#![recursion_limit="1000000"]

macro_rules! chain {
    (EE $e:expr) => {$e.sin()};
    (RECURSE $i:ident $e:expr) => {chain!($i chain!($i chain!($i chain!($i $e))))};
    (Z $e:expr) => {chain!(RECURSE EE $e)};
    (Y $e:expr) => {chain!(RECURSE Z $e)};
    (X $e:expr) => {chain!(RECURSE Y $e)};
    (A $e:expr) => {chain!(RECURSE X $e)};
    (B $e:expr) => {chain!(RECURSE A $e)};
    (C $e:expr) => {chain!(RECURSE B $e)};
    // causes overflow on x86_64 linux
    // less than 1 second until overflow on test machine
    // after overflow has been fixed, takes 30s to compile :/
    (D $e:expr) => {chain!(RECURSE C $e)};
    (E $e:expr) => {chain!(RECURSE D $e)};
    (F $e:expr) => {chain!(RECURSE E $e)};
    // more than 10 seconds
    (G $e:expr) => {chain!(RECURSE F $e)};
    (H $e:expr) => {chain!(RECURSE G $e)};
    (I $e:expr) => {chain!(RECURSE H $e)};
    (J $e:expr) => {chain!(RECURSE I $e)};
    (K $e:expr) => {chain!(RECURSE J $e)};
    (L $e:expr) => {chain!(RECURSE L $e)};
}

fn main() {
    let x = chain!(D 42.0_f32);
}
```

fixes #55471
fixes #41884
fixes #40161
fixes #34844
fixes #32594

cc @alexcrichton @rust-lang/compiler

I looked at all code that checks the recursion limit and inserted stack growth calls where appropriate.
This commit is contained in:
bors 2020-05-07 00:03:23 +00:00
commit 97f3eeec82
16 changed files with 450 additions and 354 deletions

View File

@ -2639,6 +2639,15 @@ dependencies = [
"core",
]
[[package]]
name = "psm"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "659ecfea2142a458893bb7673134bad50b752fea932349c213d6a23874ce3aa7"
dependencies = [
"cc",
]
[[package]]
name = "publicsuffix"
version = "1.5.3"
@ -3708,6 +3717,7 @@ dependencies = [
"serialize",
"smallvec 1.4.0",
"stable_deref_trait",
"stacker",
"winapi 0.3.8",
]
@ -4669,6 +4679,19 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ffbc596e092fe5f598b12ef46cc03754085ac2f4d8c739ad61c4ae266cc3b3fa"
[[package]]
name = "stacker"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32c2467b8abbb417e4e62fd62229719b9c9d77714a7fa989f1afad16ba9c9743"
dependencies = [
"cc",
"cfg-if",
"libc",
"psm",
"winapi 0.3.8",
]
[[package]]
name = "std"
version = "0.0.0"

View File

@ -3,6 +3,7 @@ use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericAr
use rustc_ast::ast::*;
use rustc_ast::attr;
use rustc_ast::ptr::P as AstP;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::thin_vec::ThinVec;
use rustc_errors::struct_span_err;
use rustc_hir as hir;
@ -20,192 +21,206 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
let kind = match e.kind {
ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
ExprKind::Repeat(ref expr, ref count) => {
let expr = self.lower_expr(expr);
let count = self.lower_anon_const(count);
hir::ExprKind::Repeat(expr, count)
}
ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
ExprKind::Call(ref f, ref args) => {
let f = self.lower_expr(f);
hir::ExprKind::Call(f, self.lower_exprs(args))
}
ExprKind::MethodCall(ref seg, ref args) => {
let hir_seg = self.arena.alloc(self.lower_path_segment(
e.span,
seg,
ParamMode::Optional,
0,
ParenthesizedGenericArgs::Err,
ImplTraitContext::disallowed(),
None,
));
let args = self.lower_exprs(args);
hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args)
}
ExprKind::Binary(binop, ref lhs, ref rhs) => {
let binop = self.lower_binop(binop);
let lhs = self.lower_expr(lhs);
let rhs = self.lower_expr(rhs);
hir::ExprKind::Binary(binop, lhs, rhs)
}
ExprKind::Unary(op, ref ohs) => {
let op = self.lower_unop(op);
let ohs = self.lower_expr(ohs);
hir::ExprKind::Unary(op, ohs)
}
ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())),
ExprKind::Cast(ref expr, ref ty) => {
let expr = self.lower_expr(expr);
let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
hir::ExprKind::Cast(expr, ty)
}
ExprKind::Type(ref expr, ref ty) => {
let expr = self.lower_expr(expr);
let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
hir::ExprKind::Type(expr, ty)
}
ExprKind::AddrOf(k, m, ref ohs) => {
let ohs = self.lower_expr(ohs);
hir::ExprKind::AddrOf(k, m, ohs)
}
ExprKind::Let(ref pat, ref scrutinee) => self.lower_expr_let(e.span, pat, scrutinee),
ExprKind::If(ref cond, ref then, ref else_opt) => {
self.lower_expr_if(e.span, cond, then, else_opt.as_deref())
}
ExprKind::While(ref cond, ref body, opt_label) => self.with_loop_scope(e.id, |this| {
this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label)
}),
ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
hir::ExprKind::Loop(this.lower_block(body, false), opt_label, hir::LoopSource::Loop)
}),
ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
self.lower_expr(expr),
self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
hir::MatchSource::Normal,
),
ExprKind::Async(capture_clause, closure_node_id, ref block) => self.make_async_expr(
capture_clause,
closure_node_id,
None,
block.span,
hir::AsyncGeneratorKind::Block,
|this| this.with_new_scopes(|this| this.lower_block_expr(block)),
),
ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr),
ExprKind::Closure(
capture_clause,
asyncness,
movability,
ref decl,
ref body,
fn_decl_span,
) => {
if let Async::Yes { closure_id, .. } = asyncness {
self.lower_expr_async_closure(
capture_clause,
closure_id,
decl,
body,
fn_decl_span,
)
} else {
self.lower_expr_closure(capture_clause, movability, decl, body, fn_decl_span)
ensure_sufficient_stack(|| {
let kind = match e.kind {
ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
ExprKind::Repeat(ref expr, ref count) => {
let expr = self.lower_expr(expr);
let count = self.lower_anon_const(count);
hir::ExprKind::Repeat(expr, count)
}
}
ExprKind::Block(ref blk, opt_label) => {
hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
}
ExprKind::Assign(ref el, ref er, span) => {
hir::ExprKind::Assign(self.lower_expr(el), self.lower_expr(er), span)
}
ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
self.lower_binop(op),
self.lower_expr(el),
self.lower_expr(er),
),
ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident),
ExprKind::Index(ref el, ref er) => {
hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
}
ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
self.lower_expr_range_closed(e.span, e1, e2)
}
ExprKind::Range(ref e1, ref e2, lims) => {
self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
}
ExprKind::Path(ref qself, ref path) => {
let qpath = self.lower_qpath(
e.id,
qself,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
hir::ExprKind::Path(qpath)
}
ExprKind::Break(opt_label, ref opt_expr) => {
let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
}
ExprKind::Continue(opt_label) => {
hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
}
ExprKind::Ret(ref e) => {
let e = e.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Ret(e)
}
ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_asm(asm),
ExprKind::Struct(ref path, ref fields, ref maybe_expr) => {
let maybe_expr = maybe_expr.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Struct(
self.arena.alloc(self.lower_qpath(
ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
ExprKind::Call(ref f, ref args) => {
let f = self.lower_expr(f);
hir::ExprKind::Call(f, self.lower_exprs(args))
}
ExprKind::MethodCall(ref seg, ref args) => {
let hir_seg = self.arena.alloc(self.lower_path_segment(
e.span,
seg,
ParamMode::Optional,
0,
ParenthesizedGenericArgs::Err,
ImplTraitContext::disallowed(),
None,
));
let args = self.lower_exprs(args);
hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args)
}
ExprKind::Binary(binop, ref lhs, ref rhs) => {
let binop = self.lower_binop(binop);
let lhs = self.lower_expr(lhs);
let rhs = self.lower_expr(rhs);
hir::ExprKind::Binary(binop, lhs, rhs)
}
ExprKind::Unary(op, ref ohs) => {
let op = self.lower_unop(op);
let ohs = self.lower_expr(ohs);
hir::ExprKind::Unary(op, ohs)
}
ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())),
ExprKind::Cast(ref expr, ref ty) => {
let expr = self.lower_expr(expr);
let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
hir::ExprKind::Cast(expr, ty)
}
ExprKind::Type(ref expr, ref ty) => {
let expr = self.lower_expr(expr);
let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
hir::ExprKind::Type(expr, ty)
}
ExprKind::AddrOf(k, m, ref ohs) => {
let ohs = self.lower_expr(ohs);
hir::ExprKind::AddrOf(k, m, ohs)
}
ExprKind::Let(ref pat, ref scrutinee) => {
self.lower_expr_let(e.span, pat, scrutinee)
}
ExprKind::If(ref cond, ref then, ref else_opt) => {
self.lower_expr_if(e.span, cond, then, else_opt.as_deref())
}
ExprKind::While(ref cond, ref body, opt_label) => self
.with_loop_scope(e.id, |this| {
this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label)
}),
ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
hir::ExprKind::Loop(
this.lower_block(body, false),
opt_label,
hir::LoopSource::Loop,
)
}),
ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
self.lower_expr(expr),
self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
hir::MatchSource::Normal,
),
ExprKind::Async(capture_clause, closure_node_id, ref block) => self
.make_async_expr(
capture_clause,
closure_node_id,
None,
block.span,
hir::AsyncGeneratorKind::Block,
|this| this.with_new_scopes(|this| this.lower_block_expr(block)),
),
ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr),
ExprKind::Closure(
capture_clause,
asyncness,
movability,
ref decl,
ref body,
fn_decl_span,
) => {
if let Async::Yes { closure_id, .. } = asyncness {
self.lower_expr_async_closure(
capture_clause,
closure_id,
decl,
body,
fn_decl_span,
)
} else {
self.lower_expr_closure(
capture_clause,
movability,
decl,
body,
fn_decl_span,
)
}
}
ExprKind::Block(ref blk, opt_label) => {
hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
}
ExprKind::Assign(ref el, ref er, span) => {
hir::ExprKind::Assign(self.lower_expr(el), self.lower_expr(er), span)
}
ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
self.lower_binop(op),
self.lower_expr(el),
self.lower_expr(er),
),
ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident),
ExprKind::Index(ref el, ref er) => {
hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
}
ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
self.lower_expr_range_closed(e.span, e1, e2)
}
ExprKind::Range(ref e1, ref e2, lims) => {
self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
}
ExprKind::Path(ref qself, ref path) => {
let qpath = self.lower_qpath(
e.id,
&None,
qself,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
)),
self.arena.alloc_from_iter(fields.iter().map(|x| self.lower_field(x))),
maybe_expr,
)
}
ExprKind::Paren(ref ex) => {
let mut ex = self.lower_expr_mut(ex);
// Include parens in span, but only if it is a super-span.
if e.span.contains(ex.span) {
ex.span = e.span;
);
hir::ExprKind::Path(qpath)
}
// Merge attributes into the inner expression.
let mut attrs = e.attrs.clone();
attrs.extend::<Vec<_>>(ex.attrs.into());
ex.attrs = attrs;
return ex;
ExprKind::Break(opt_label, ref opt_expr) => {
let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
}
ExprKind::Continue(opt_label) => {
hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
}
ExprKind::Ret(ref e) => {
let e = e.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Ret(e)
}
ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_asm(asm),
ExprKind::Struct(ref path, ref fields, ref maybe_expr) => {
let maybe_expr = maybe_expr.as_ref().map(|x| self.lower_expr(x));
hir::ExprKind::Struct(
self.arena.alloc(self.lower_qpath(
e.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
)),
self.arena.alloc_from_iter(fields.iter().map(|x| self.lower_field(x))),
maybe_expr,
)
}
ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
ExprKind::Err => hir::ExprKind::Err,
ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
ExprKind::Paren(ref ex) => {
let mut ex = self.lower_expr_mut(ex);
// Include parens in span, but only if it is a super-span.
if e.span.contains(ex.span) {
ex.span = e.span;
}
// Merge attributes into the inner expression.
let mut attrs = e.attrs.clone();
attrs.extend::<Vec<_>>(ex.attrs.into());
ex.attrs = attrs;
return ex;
}
// Desugar `ExprForLoop`
// from: `[opt_ident]: for <pat> in <head> <body>`
ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
return self.lower_expr_for(e, pat, head, body, opt_label);
}
ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
};
hir::Expr {
hir_id: self.lower_node_id(e.id),
kind,
span: e.span,
attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
}
ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
ExprKind::Err => hir::ExprKind::Err,
// Desugar `ExprForLoop`
// from: `[opt_ident]: for <pat> in <head> <body>`
ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
return self.lower_expr_for(e, pat, head, body, opt_label);
}
ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
ExprKind::MacCall(_) => panic!("Shouldn't exist here"),
};
hir::Expr {
hir_id: self.lower_node_id(e.id),
kind,
span: e.span,
attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
}
})
}
fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {

View File

@ -2,83 +2,89 @@ use super::{ImplTraitContext, LoweringContext, ParamMode};
use rustc_ast::ast::*;
use rustc_ast::ptr::P;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_span::{source_map::Spanned, Span};
impl<'a, 'hir> LoweringContext<'a, 'hir> {
crate fn lower_pat(&mut self, p: &Pat) -> &'hir hir::Pat<'hir> {
let node = match p.kind {
PatKind::Wild => hir::PatKind::Wild,
PatKind::Ident(ref binding_mode, ident, ref sub) => {
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s));
let node = self.lower_pat_ident(p, binding_mode, ident, lower_sub);
node
}
PatKind::Lit(ref e) => hir::PatKind::Lit(self.lower_expr(e)),
PatKind::TupleStruct(ref path, ref pats) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
hir::PatKind::TupleStruct(qpath, pats, ddpos)
}
PatKind::Or(ref pats) => {
hir::PatKind::Or(self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat(x))))
}
PatKind::Path(ref qself, ref path) => {
let qpath = self.lower_qpath(
p.id,
qself,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
hir::PatKind::Path(qpath)
}
PatKind::Struct(ref path, ref fields, etc) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
ensure_sufficient_stack(|| {
let node = match p.kind {
PatKind::Wild => hir::PatKind::Wild,
PatKind::Ident(ref binding_mode, ident, ref sub) => {
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(&*s));
let node = self.lower_pat_ident(p, binding_mode, ident, lower_sub);
node
}
PatKind::Lit(ref e) => hir::PatKind::Lit(self.lower_expr(e)),
PatKind::TupleStruct(ref path, ref pats) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple struct");
hir::PatKind::TupleStruct(qpath, pats, ddpos)
}
PatKind::Or(ref pats) => hir::PatKind::Or(
self.arena.alloc_from_iter(pats.iter().map(|x| self.lower_pat(x))),
),
PatKind::Path(ref qself, ref path) => {
let qpath = self.lower_qpath(
p.id,
qself,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
hir::PatKind::Path(qpath)
}
PatKind::Struct(ref path, ref fields, etc) => {
let qpath = self.lower_qpath(
p.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
);
let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat {
hir_id: self.next_id(),
ident: f.ident,
pat: self.lower_pat(&f.pat),
is_shorthand: f.is_shorthand,
span: f.span,
}));
hir::PatKind::Struct(qpath, fs, etc)
}
PatKind::Tuple(ref pats) => {
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
hir::PatKind::Tuple(pats, ddpos)
}
PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)),
PatKind::Ref(ref inner, mutbl) => hir::PatKind::Ref(self.lower_pat(inner), mutbl),
PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => hir::PatKind::Range(
e1.as_deref().map(|e| self.lower_expr(e)),
e2.as_deref().map(|e| self.lower_expr(e)),
self.lower_range_end(end, e2.is_some()),
),
PatKind::Slice(ref pats) => self.lower_pat_slice(pats),
PatKind::Rest => {
// If we reach here the `..` pattern is not semantically allowed.
self.ban_illegal_rest_pat(p.span)
}
PatKind::Paren(ref inner) => return self.lower_pat(inner),
PatKind::MacCall(_) => panic!("Shouldn't exist here"),
};
let fs = self.arena.alloc_from_iter(fields.iter().map(|f| hir::FieldPat {
hir_id: self.next_id(),
ident: f.ident,
pat: self.lower_pat(&f.pat),
is_shorthand: f.is_shorthand,
span: f.span,
}));
hir::PatKind::Struct(qpath, fs, etc)
}
PatKind::Tuple(ref pats) => {
let (pats, ddpos) = self.lower_pat_tuple(pats, "tuple");
hir::PatKind::Tuple(pats, ddpos)
}
PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)),
PatKind::Ref(ref inner, mutbl) => hir::PatKind::Ref(self.lower_pat(inner), mutbl),
PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => {
hir::PatKind::Range(
e1.as_deref().map(|e| self.lower_expr(e)),
e2.as_deref().map(|e| self.lower_expr(e)),
self.lower_range_end(end, e2.is_some()),
)
}
PatKind::Slice(ref pats) => self.lower_pat_slice(pats),
PatKind::Rest => {
// If we reach here the `..` pattern is not semantically allowed.
self.ban_illegal_rest_pat(p.span)
}
// FIXME: consider not using recursion to lower this.
PatKind::Paren(ref inner) => return self.lower_pat(inner),
PatKind::MacCall(_) => panic!("{:?} shouldn't exist here", p.span),
};
self.pat_with_node_id_of(p, node)
self.pat_with_node_id_of(p, node)
})
}
fn lower_pat_tuple(

View File

@ -28,6 +28,7 @@ rustc_index = { path = "../librustc_index", package = "rustc_index" }
bitflags = "1.2.1"
measureme = "0.7.1"
libc = "0.2"
stacker = "0.1.6"
[dependencies.parking_lot]
version = "0.10"

View File

@ -80,6 +80,7 @@ pub mod stable_set;
#[macro_use]
pub mod stable_hasher;
pub mod sharded;
pub mod stack;
pub mod sync;
pub mod thin_vec;
pub mod tiny_list;

View File

@ -0,0 +1,17 @@
// This is the amount of bytes that need to be left on the stack before increasing the size.
// It must be at least as large as the stack required by any code that does not call
// `ensure_sufficient_stack`.
const RED_ZONE: usize = 100 * 1024; // 100k
// Only the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then
// on. This flag has performance relevant characteristics. Don't set it too high.
const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB
/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations
/// to "break up" recursive calls. E.g. almost any call to `visit_expr` or equivalent can benefit
/// from this.
///
/// Should not be sprinkled around carelessly, as it causes a little bit of overhead.
pub fn ensure_sufficient_stack<R>(f: impl FnOnce() -> R) -> R {
stacker::maybe_grow(RED_ZONE, STACK_PER_RECURSION, f)
}

View File

@ -81,14 +81,7 @@ pub fn create_session(
(Lrc::new(sess), Lrc::new(codegen_backend), source_map)
}
// Temporarily have stack size set to 32MB to deal with various crates with long method
// chains or deep syntax trees, except when on Haiku.
// FIXME(oli-obk): get https://github.com/rust-lang/rust/pull/55617 the finish line
#[cfg(not(target_os = "haiku"))]
const STACK_SIZE: usize = 32 * 1024 * 1024;
#[cfg(target_os = "haiku")]
const STACK_SIZE: usize = 16 * 1024 * 1024;
const STACK_SIZE: usize = 8 * 1024 * 1024;
fn get_stack_size() -> Option<usize> {
// FIXME: Hacks on hacks. If the env is trying to override the stack size

View File

@ -6,6 +6,7 @@ use crate::ty::TyKind::*;
use crate::ty::{AdtDef, FieldDef, Ty, TyS, VariantDef};
use crate::ty::{AdtKind, Visibility};
use crate::ty::{DefId, SubstsRef};
use rustc_data_structures::stack::ensure_sufficient_stack;
mod def_id_forest;
@ -196,7 +197,9 @@ impl<'tcx> TyS<'tcx> {
/// Calculates the forest of `DefId`s from which this type is visibly uninhabited.
fn uninhabited_from(&self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> DefIdForest {
match self.kind {
Adt(def, substs) => def.uninhabited_from(tcx, substs, param_env),
Adt(def, substs) => {
ensure_sufficient_stack(|| def.uninhabited_from(tcx, substs, param_env))
}
Never => DefIdForest::full(tcx),

View File

@ -68,7 +68,9 @@ impl QueryContext for TyCtxt<'tcx> {
};
// Use the `ImplicitCtxt` while we execute the query.
tls::enter_context(&new_icx, |_| compute(*self))
tls::enter_context(&new_icx, |_| {
rustc_data_structures::stack::ensure_sufficient_stack(|| compute(*self))
})
})
}
}

View File

@ -369,7 +369,9 @@ fn collect_items_rec<'tcx>(
recursion_depth_reset = Some(check_recursion_limit(tcx, instance, recursion_depths));
check_type_length_limit(tcx, instance);
collect_neighbours(tcx, instance, &mut neighbors);
rustc_data_structures::stack::ensure_sufficient_stack(|| {
collect_neighbours(tcx, instance, &mut neighbors);
});
}
MonoItem::GlobalAsm(..) => {
recursion_depth_reset = None;
@ -1146,7 +1148,9 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut Vec<Mon
Some(GlobalAlloc::Memory(alloc)) => {
trace!("collecting {:?} with {:#?}", alloc_id, alloc);
for &((), inner) in alloc.relocations().values() {
collect_miri(tcx, inner, output);
rustc_data_structures::stack::ensure_sufficient_stack(|| {
collect_miri(tcx, inner, output);
});
}
}
Some(GlobalAlloc::Function(fn_instance)) => {

View File

@ -3,6 +3,7 @@
use crate::build::scope::DropKind;
use crate::build::{BlockAnd, BlockAndExtension, Builder};
use crate::hair::*;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_hir as hir;
use rustc_middle::middle::region;
use rustc_middle::mir::*;
@ -21,7 +22,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
M: Mirror<'tcx, Output = Expr<'tcx>>,
{
let expr = self.hir.mirror(expr);
self.expr_as_temp(block, temp_lifetime, expr, mutability)
//
// this is the only place in mir building that we need to truly need to worry about
// infinite recursion. Everything else does recurse, too, but it always gets broken up
// at some point by inserting an intermediate temporary
ensure_sufficient_stack(|| self.expr_as_temp(block, temp_lifetime, expr, mutability))
}
fn expr_as_temp(

View File

@ -18,6 +18,7 @@ use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use crate::infer::{InferCtxt, InferOk, LateBoundRegionConversionTime};
use crate::traits::error_reporting::InferCtxtExt;
use rustc_ast::ast::Ident;
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_errors::ErrorReported;
use rustc_hir::def_id::DefId;
use rustc_middle::ty::fold::{TypeFoldable, TypeFolder};
@ -261,7 +262,7 @@ where
{
debug!("normalize_with_depth(depth={}, value={:?})", depth, value);
let mut normalizer = AssocTypeNormalizer::new(selcx, param_env, cause, depth, obligations);
let result = normalizer.fold(value);
let result = ensure_sufficient_stack(|| normalizer.fold(value));
debug!(
"normalize_with_depth: depth={} result={:?} with {} obligations",
depth,

View File

@ -7,6 +7,7 @@ use crate::infer::canonical::OriginalQueryValues;
use crate::infer::{InferCtxt, InferOk};
use crate::traits::error_reporting::InferCtxtExt;
use crate::traits::{Obligation, ObligationCause, PredicateObligation, Reveal};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_infer::traits::Normalized;
use rustc_middle::ty::fold::{TypeFoldable, TypeFolder};
use rustc_middle::ty::subst::Subst;
@ -131,7 +132,7 @@ impl<'cx, 'tcx> TypeFolder<'tcx> for QueryNormalizer<'cx, 'tcx> {
ty
);
}
let folded_ty = self.fold_ty(concrete_ty);
let folded_ty = ensure_sufficient_stack(|| self.fold_ty(concrete_ty));
self.anon_depth -= 1;
folded_ty
}

View File

@ -37,6 +37,7 @@ use crate::traits::error_reporting::InferCtxtExt;
use crate::traits::project::ProjectionCacheKeyExt;
use rustc_ast::attr;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::lang_items;
@ -2365,13 +2366,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
self.infcx.commit_unconditionally(|_| {
let (skol_ty, _) = self.infcx.replace_bound_vars_with_placeholders(&ty);
let Normalized { value: normalized_ty, mut obligations } =
project::normalize_with_depth(
self,
param_env,
cause.clone(),
recursion_depth,
&skol_ty,
);
ensure_sufficient_stack(|| {
project::normalize_with_depth(
self,
param_env,
cause.clone(),
recursion_depth,
&skol_ty,
)
});
let skol_obligation = predicate_for_trait_def(
self.tcx(),
param_env,
@ -2525,13 +2528,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
};
let cause = obligation.derived_cause(BuiltinDerivedObligation);
self.collect_predicates_for_types(
obligation.param_env,
cause,
obligation.recursion_depth + 1,
trait_def,
nested,
)
ensure_sufficient_stack(|| {
self.collect_predicates_for_types(
obligation.param_env,
cause,
obligation.recursion_depth + 1,
trait_def,
nested,
)
})
} else {
vec![]
};
@ -2568,38 +2573,39 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
nested: ty::Binder<Vec<Ty<'tcx>>>,
) -> VtableAutoImplData<PredicateObligation<'tcx>> {
debug!("vtable_auto_impl: nested={:?}", nested);
ensure_sufficient_stack(|| {
let cause = obligation.derived_cause(BuiltinDerivedObligation);
let mut obligations = self.collect_predicates_for_types(
obligation.param_env,
cause,
obligation.recursion_depth + 1,
trait_def_id,
nested,
);
let cause = obligation.derived_cause(BuiltinDerivedObligation);
let mut obligations = self.collect_predicates_for_types(
obligation.param_env,
cause,
obligation.recursion_depth + 1,
trait_def_id,
nested,
);
let trait_obligations: Vec<PredicateObligation<'_>> =
self.infcx.commit_unconditionally(|_| {
let poly_trait_ref = obligation.predicate.to_poly_trait_ref();
let (trait_ref, _) =
self.infcx.replace_bound_vars_with_placeholders(&poly_trait_ref);
let cause = obligation.derived_cause(ImplDerivedObligation);
self.impl_or_trait_obligations(
cause,
obligation.recursion_depth + 1,
obligation.param_env,
trait_def_id,
&trait_ref.substs,
)
});
let trait_obligations: Vec<PredicateObligation<'_>> =
self.infcx.commit_unconditionally(|_| {
let poly_trait_ref = obligation.predicate.to_poly_trait_ref();
let (trait_ref, _) =
self.infcx.replace_bound_vars_with_placeholders(&poly_trait_ref);
let cause = obligation.derived_cause(ImplDerivedObligation);
self.impl_or_trait_obligations(
cause,
obligation.recursion_depth + 1,
obligation.param_env,
trait_def_id,
&trait_ref.substs,
)
});
// Adds the predicates from the trait. Note that this contains a `Self: Trait`
// predicate as usual. It won't have any effect since auto traits are coinductive.
obligations.extend(trait_obligations);
// Adds the predicates from the trait. Note that this contains a `Self: Trait`
// predicate as usual. It won't have any effect since auto traits are coinductive.
obligations.extend(trait_obligations);
debug!("vtable_auto_impl: obligations={:?}", obligations);
debug!("vtable_auto_impl: obligations={:?}", obligations);
VtableAutoImplData { trait_def_id, nested: obligations }
VtableAutoImplData { trait_def_id, nested: obligations }
})
}
fn confirm_impl_candidate(
@ -2615,13 +2621,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let substs = self.rematch_impl(impl_def_id, obligation, snapshot);
debug!("confirm_impl_candidate: substs={:?}", substs);
let cause = obligation.derived_cause(ImplDerivedObligation);
self.vtable_impl(
impl_def_id,
substs,
cause,
obligation.recursion_depth + 1,
obligation.param_env,
)
ensure_sufficient_stack(|| {
self.vtable_impl(
impl_def_id,
substs,
cause,
obligation.recursion_depth + 1,
obligation.param_env,
)
})
})
}
@ -2734,13 +2742,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
)
.map_bound(|(trait_ref, _)| trait_ref);
let Normalized { value: trait_ref, obligations } = project::normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&trait_ref,
);
let Normalized { value: trait_ref, obligations } = ensure_sufficient_stack(|| {
project::normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&trait_ref,
)
});
self.confirm_poly_trait_refs(
obligation.cause.clone(),
@ -2798,13 +2808,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
debug!("confirm_generator_candidate({:?},{:?},{:?})", obligation, generator_def_id, substs);
let trait_ref = self.generator_trait_ref_unnormalized(obligation, substs);
let Normalized { value: trait_ref, mut obligations } = normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&trait_ref,
);
let Normalized { value: trait_ref, mut obligations } = ensure_sufficient_stack(|| {
normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&trait_ref,
)
});
debug!(
"confirm_generator_candidate(generator_def_id={:?}, \
@ -2843,13 +2855,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
};
let trait_ref = self.closure_trait_ref_unnormalized(obligation, substs);
let Normalized { value: trait_ref, mut obligations } = normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&trait_ref,
);
let Normalized { value: trait_ref, mut obligations } = ensure_sufficient_stack(|| {
normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&trait_ref,
)
});
debug!(
"confirm_closure_candidate(closure_def_id={:?}, trait_ref={:?}, obligations={:?})",
@ -3139,15 +3153,17 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
nested.extend(obligations);
// Construct the nested `T: Unsize<U>` predicate.
nested.push(predicate_for_trait_def(
tcx,
obligation.param_env,
obligation.cause.clone(),
obligation.predicate.def_id(),
obligation.recursion_depth + 1,
a_last.expect_ty(),
&[b_last],
));
nested.push(ensure_sufficient_stack(|| {
predicate_for_trait_def(
tcx,
obligation.param_env,
obligation.cause.clone(),
obligation.predicate.def_id(),
obligation.recursion_depth + 1,
a_last.expect_ty(),
&[b_last],
)
}));
}
_ => bug!(),
@ -3208,13 +3224,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let impl_trait_ref = impl_trait_ref.subst(self.tcx(), impl_substs);
let Normalized { value: impl_trait_ref, obligations: mut nested_obligations } =
project::normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&impl_trait_ref,
);
ensure_sufficient_stack(|| {
project::normalize_with_depth(
self,
obligation.param_env,
obligation.cause.clone(),
obligation.recursion_depth + 1,
&impl_trait_ref,
)
});
debug!(
"match_impl(impl_def_id={:?}, obligation={:?}, \

View File

@ -191,10 +191,12 @@ fn dtorck_constraint_for_ty<'tcx>(
ty::Array(ety, _) | ty::Slice(ety) => {
// single-element containers, behave like their element
dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety, constraints)?;
rustc_data_structures::stack::ensure_sufficient_stack(|| {
dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety, constraints)
})?;
}
ty::Tuple(tys) => {
ty::Tuple(tys) => rustc_data_structures::stack::ensure_sufficient_stack(|| {
for ty in tys.iter() {
dtorck_constraint_for_ty(
tcx,
@ -205,13 +207,15 @@ fn dtorck_constraint_for_ty<'tcx>(
constraints,
)?;
}
}
Ok::<_, NoSolution>(())
})?,
ty::Closure(_, substs) => {
ty::Closure(_, substs) => rustc_data_structures::stack::ensure_sufficient_stack(|| {
for ty in substs.as_closure().upvar_tys() {
dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty, constraints)?;
}
}
Ok::<_, NoSolution>(())
})?,
ty::Generator(_, substs, _movability) => {
// rust-lang/rust#49918: types can be constructed, stored

View File

@ -131,6 +131,7 @@ const WHITELIST: &[&str] = &[
"polonius-engine",
"ppv-lite86",
"proc-macro2",
"psm",
"punycode",
"quick-error",
"quote",
@ -160,6 +161,7 @@ const WHITELIST: &[&str] = &[
"sha-1",
"smallvec",
"stable_deref_trait",
"stacker",
"syn",
"synstructure",
"tempfile",