mirror of
https://github.com/rust-lang/rust.git
synced 2024-12-01 11:13:43 +00:00
Auto merge of #3731 - rust-lang:rustup-2024-07-04, r=saethlin
Automatic Rustup
This commit is contained in:
commit
4a4a81aec4
23
Cargo.lock
23
Cargo.lock
@ -3141,7 +3141,19 @@ dependencies = [
|
||||
"bitflags 2.5.0",
|
||||
"getopts",
|
||||
"memchr",
|
||||
"pulldown-cmark-escape",
|
||||
"pulldown-cmark-escape 0.10.1",
|
||||
"unicase",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pulldown-cmark"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8746739f11d39ce5ad5c2520a9b75285310dbfe78c541ccf832d38615765aec0"
|
||||
dependencies = [
|
||||
"bitflags 2.5.0",
|
||||
"memchr",
|
||||
"pulldown-cmark-escape 0.11.0",
|
||||
"unicase",
|
||||
]
|
||||
|
||||
@ -3151,6 +3163,12 @@ version = "0.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bd348ff538bc9caeda7ee8cad2d1d48236a1f443c1fa3913c6a02fe0043b1dd3"
|
||||
|
||||
[[package]]
|
||||
name = "pulldown-cmark-escape"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "007d8adb5ddab6f8e3f491ac63566a7d5002cc7ed73901f72057943fa71ae1ae"
|
||||
|
||||
[[package]]
|
||||
name = "pulldown-cmark-to-cmark"
|
||||
version = "13.0.0"
|
||||
@ -4604,7 +4622,7 @@ name = "rustc_resolve"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"bitflags 2.5.0",
|
||||
"pulldown-cmark 0.9.6",
|
||||
"pulldown-cmark 0.11.0",
|
||||
"rustc_arena",
|
||||
"rustc_ast",
|
||||
"rustc_ast_pretty",
|
||||
@ -4883,6 +4901,7 @@ dependencies = [
|
||||
"indexmap",
|
||||
"itertools",
|
||||
"minifier",
|
||||
"pulldown-cmark 0.9.6",
|
||||
"regex",
|
||||
"rustdoc-json-types",
|
||||
"serde",
|
||||
|
@ -176,6 +176,8 @@ pub enum GenericArgs {
|
||||
AngleBracketed(AngleBracketedArgs),
|
||||
/// The `(A, B)` and `C` in `Foo(A, B) -> C`.
|
||||
Parenthesized(ParenthesizedArgs),
|
||||
/// `(..)` in return type notation
|
||||
ParenthesizedElided(Span),
|
||||
}
|
||||
|
||||
impl GenericArgs {
|
||||
@ -187,6 +189,7 @@ impl GenericArgs {
|
||||
match self {
|
||||
AngleBracketed(data) => data.span,
|
||||
Parenthesized(data) => data.span,
|
||||
ParenthesizedElided(span) => *span,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2051,7 +2054,7 @@ impl UintTy {
|
||||
/// * the `A: Bound` in `Trait<A: Bound>`
|
||||
/// * the `RetTy` in `Trait(ArgTy, ArgTy) -> RetTy`
|
||||
/// * the `C = { Ct }` in `Trait<C = { Ct }>` (feature `associated_const_equality`)
|
||||
/// * the `f(): Bound` in `Trait<f(): Bound>` (feature `return_type_notation`)
|
||||
/// * the `f(..): Bound` in `Trait<f(..): Bound>` (feature `return_type_notation`)
|
||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||
pub struct AssocItemConstraint {
|
||||
pub id: NodeId,
|
||||
|
@ -204,12 +204,14 @@ impl Attribute {
|
||||
|
||||
pub fn tokens(&self) -> TokenStream {
|
||||
match &self.kind {
|
||||
AttrKind::Normal(normal) => normal
|
||||
.tokens
|
||||
.as_ref()
|
||||
.unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}"))
|
||||
.to_attr_token_stream()
|
||||
.to_tokenstream(),
|
||||
AttrKind::Normal(normal) => TokenStream::new(
|
||||
normal
|
||||
.tokens
|
||||
.as_ref()
|
||||
.unwrap_or_else(|| panic!("attribute is missing tokens: {self:?}"))
|
||||
.to_attr_token_stream()
|
||||
.to_token_trees(),
|
||||
),
|
||||
&AttrKind::DocComment(comment_kind, data) => TokenStream::token_alone(
|
||||
token::DocComment(comment_kind, self.style, data),
|
||||
self.span,
|
||||
|
@ -582,6 +582,7 @@ fn noop_visit_generic_args<T: MutVisitor>(generic_args: &mut GenericArgs, vis: &
|
||||
match generic_args {
|
||||
GenericArgs::AngleBracketed(data) => vis.visit_angle_bracketed_parameter_data(data),
|
||||
GenericArgs::Parenthesized(data) => vis.visit_parenthesized_parameter_data(data),
|
||||
GenericArgs::ParenthesizedElided(span) => vis.visit_span(span),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,7 +23,6 @@ use rustc_data_structures::sync::{self, Lrc};
|
||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||
use rustc_serialize::{Decodable, Encodable};
|
||||
use rustc_span::{sym, Span, SpanDecoder, SpanEncoder, Symbol, DUMMY_SP};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::{cmp, fmt, iter};
|
||||
@ -180,27 +179,25 @@ impl AttrTokenStream {
|
||||
AttrTokenStream(Lrc::new(tokens))
|
||||
}
|
||||
|
||||
/// Converts this `AttrTokenStream` to a plain `TokenStream`.
|
||||
/// Converts this `AttrTokenStream` to a plain `Vec<TokenTree>`.
|
||||
/// During conversion, `AttrTokenTree::Attributes` get 'flattened'
|
||||
/// back to a `TokenStream` of the form `outer_attr attr_target`.
|
||||
/// If there are inner attributes, they are inserted into the proper
|
||||
/// place in the attribute target tokens.
|
||||
pub fn to_tokenstream(&self) -> TokenStream {
|
||||
let trees: Vec<_> = self
|
||||
.0
|
||||
.iter()
|
||||
.flat_map(|tree| match &tree {
|
||||
pub fn to_token_trees(&self) -> Vec<TokenTree> {
|
||||
let mut res = Vec::with_capacity(self.0.len());
|
||||
for tree in self.0.iter() {
|
||||
match tree {
|
||||
AttrTokenTree::Token(inner, spacing) => {
|
||||
smallvec![TokenTree::Token(inner.clone(), *spacing)].into_iter()
|
||||
res.push(TokenTree::Token(inner.clone(), *spacing));
|
||||
}
|
||||
AttrTokenTree::Delimited(span, spacing, delim, stream) => {
|
||||
smallvec![TokenTree::Delimited(
|
||||
res.push(TokenTree::Delimited(
|
||||
*span,
|
||||
*spacing,
|
||||
*delim,
|
||||
stream.to_tokenstream()
|
||||
),]
|
||||
.into_iter()
|
||||
TokenStream::new(stream.to_token_trees()),
|
||||
))
|
||||
}
|
||||
AttrTokenTree::Attributes(data) => {
|
||||
let idx = data
|
||||
@ -208,14 +205,7 @@ impl AttrTokenStream {
|
||||
.partition_point(|attr| matches!(attr.style, crate::AttrStyle::Outer));
|
||||
let (outer_attrs, inner_attrs) = data.attrs.split_at(idx);
|
||||
|
||||
let mut target_tokens: Vec<_> = data
|
||||
.tokens
|
||||
.to_attr_token_stream()
|
||||
.to_tokenstream()
|
||||
.0
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect();
|
||||
let mut target_tokens = data.tokens.to_attr_token_stream().to_token_trees();
|
||||
if !inner_attrs.is_empty() {
|
||||
let mut found = false;
|
||||
// Check the last two trees (to account for a trailing semi)
|
||||
@ -251,17 +241,14 @@ impl AttrTokenStream {
|
||||
"Failed to find trailing delimited group in: {target_tokens:?}"
|
||||
);
|
||||
}
|
||||
let mut flat: SmallVec<[_; 1]> =
|
||||
SmallVec::with_capacity(target_tokens.len() + outer_attrs.len());
|
||||
for attr in outer_attrs {
|
||||
flat.extend(attr.tokens().0.iter().cloned());
|
||||
res.extend(attr.tokens().0.iter().cloned());
|
||||
}
|
||||
flat.extend(target_tokens);
|
||||
flat.into_iter()
|
||||
res.extend(target_tokens);
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
TokenStream::new(trees)
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
@ -409,8 +396,8 @@ impl PartialEq<TokenStream> for TokenStream {
|
||||
}
|
||||
|
||||
impl TokenStream {
|
||||
pub fn new(streams: Vec<TokenTree>) -> TokenStream {
|
||||
TokenStream(Lrc::new(streams))
|
||||
pub fn new(tts: Vec<TokenTree>) -> TokenStream {
|
||||
TokenStream(Lrc::new(tts))
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
@ -461,7 +448,7 @@ impl TokenStream {
|
||||
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
|
||||
AttrTokenStream::new(vec![AttrTokenTree::Attributes(attr_data)])
|
||||
};
|
||||
attr_stream.to_tokenstream()
|
||||
TokenStream::new(attr_stream.to_token_trees())
|
||||
}
|
||||
|
||||
pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
|
||||
|
@ -1,7 +1,8 @@
|
||||
//! Routines the parser and pretty-printer use to classify AST nodes.
|
||||
|
||||
use crate::ast::ExprKind::*;
|
||||
use crate::{ast, token::Delimiter};
|
||||
use crate::ast::{self, MatchKind};
|
||||
use crate::token::Delimiter;
|
||||
|
||||
/// This classification determines whether various syntactic positions break out
|
||||
/// of parsing the current expression (true) or continue parsing more of the
|
||||
@ -81,6 +82,82 @@ pub fn expr_requires_semi_to_be_stmt(e: &ast::Expr) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the leftmost token of the given expression is the label of a
|
||||
/// labeled loop or block, such as in `'inner: loop { break 'inner 1 } + 1`.
|
||||
///
|
||||
/// Such expressions are not allowed as the value of an unlabeled break.
|
||||
///
|
||||
/// ```ignore (illustrative)
|
||||
/// 'outer: {
|
||||
/// break 'inner: loop { break 'inner 1 } + 1; // invalid syntax
|
||||
///
|
||||
/// break 'outer 'inner: loop { break 'inner 1 } + 1; // okay
|
||||
///
|
||||
/// break ('inner: loop { break 'inner 1 } + 1); // okay
|
||||
///
|
||||
/// break ('inner: loop { break 'inner 1 }) + 1; // okay
|
||||
/// }
|
||||
/// ```
|
||||
pub fn leading_labeled_expr(mut expr: &ast::Expr) -> bool {
|
||||
loop {
|
||||
match &expr.kind {
|
||||
Block(_, label) | ForLoop { label, .. } | Loop(_, label, _) | While(_, _, label) => {
|
||||
return label.is_some();
|
||||
}
|
||||
|
||||
Assign(e, _, _)
|
||||
| AssignOp(_, e, _)
|
||||
| Await(e, _)
|
||||
| Binary(_, e, _)
|
||||
| Call(e, _)
|
||||
| Cast(e, _)
|
||||
| Field(e, _)
|
||||
| Index(e, _, _)
|
||||
| Match(e, _, MatchKind::Postfix)
|
||||
| Range(Some(e), _, _)
|
||||
| Try(e) => {
|
||||
expr = e;
|
||||
}
|
||||
MethodCall(method_call) => {
|
||||
expr = &method_call.receiver;
|
||||
}
|
||||
|
||||
AddrOf(..)
|
||||
| Array(..)
|
||||
| Become(..)
|
||||
| Break(..)
|
||||
| Closure(..)
|
||||
| ConstBlock(..)
|
||||
| Continue(..)
|
||||
| FormatArgs(..)
|
||||
| Gen(..)
|
||||
| If(..)
|
||||
| IncludedBytes(..)
|
||||
| InlineAsm(..)
|
||||
| Let(..)
|
||||
| Lit(..)
|
||||
| MacCall(..)
|
||||
| Match(_, _, MatchKind::Prefix)
|
||||
| OffsetOf(..)
|
||||
| Paren(..)
|
||||
| Path(..)
|
||||
| Range(None, _, _)
|
||||
| Repeat(..)
|
||||
| Ret(..)
|
||||
| Struct(..)
|
||||
| TryBlock(..)
|
||||
| Tup(..)
|
||||
| Type(..)
|
||||
| Unary(..)
|
||||
| Underscore
|
||||
| Yeet(..)
|
||||
| Yield(..)
|
||||
| Err(..)
|
||||
| Dummy => return false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum TrailingBrace<'a> {
|
||||
/// Trailing brace in a macro call, like the one in `x as *const brace! {}`.
|
||||
/// We will suggest changing the macro call to a different delimiter.
|
||||
@ -234,6 +311,6 @@ fn path_return_type(path: &ast::Path) -> Option<&ast::Ty> {
|
||||
ast::FnRetTy::Default(_) => None,
|
||||
ast::FnRetTy::Ty(ret) => Some(ret),
|
||||
},
|
||||
ast::GenericArgs::AngleBracketed(_) => None,
|
||||
ast::GenericArgs::AngleBracketed(_) | ast::GenericArgs::ParenthesizedElided(_) => None,
|
||||
}
|
||||
}
|
||||
|
@ -609,6 +609,7 @@ where
|
||||
walk_list!(visitor, visit_ty, inputs);
|
||||
try_visit!(visitor.visit_fn_ret_ty(output));
|
||||
}
|
||||
GenericArgs::ParenthesizedElided(_span) => {}
|
||||
}
|
||||
V::Result::output()
|
||||
}
|
||||
|
@ -36,10 +36,15 @@ ast_lowering_bad_return_type_notation_inputs =
|
||||
argument types not allowed with return type notation
|
||||
.suggestion = remove the input types
|
||||
|
||||
ast_lowering_bad_return_type_notation_needs_dots = return type notation arguments must be elided with `..`
|
||||
.suggestion = add `..`
|
||||
|
||||
ast_lowering_bad_return_type_notation_output =
|
||||
return type not allowed with return type notation
|
||||
.suggestion = remove the return type
|
||||
|
||||
ast_lowering_bad_return_type_notation_position = return type notation not allowed in this position yet
|
||||
|
||||
ast_lowering_base_expression_double_dot =
|
||||
base expression required after `..`
|
||||
.suggestion = add a base expression here
|
||||
|
@ -393,6 +393,17 @@ pub enum BadReturnTypeNotation {
|
||||
#[suggestion(code = "", applicability = "maybe-incorrect")]
|
||||
span: Span,
|
||||
},
|
||||
#[diag(ast_lowering_bad_return_type_notation_needs_dots)]
|
||||
NeedsDots {
|
||||
#[primary_span]
|
||||
#[suggestion(code = "(..)", applicability = "maybe-incorrect")]
|
||||
span: Span,
|
||||
},
|
||||
#[diag(ast_lowering_bad_return_type_notation_position)]
|
||||
Position {
|
||||
#[primary_span]
|
||||
span: Span,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
|
@ -985,20 +985,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
self.lower_angle_bracketed_parameter_data(data, ParamMode::Explicit, itctx).0
|
||||
}
|
||||
GenericArgs::Parenthesized(data) => {
|
||||
if data.inputs.is_empty() && matches!(data.output, FnRetTy::Default(..)) {
|
||||
let parenthesized = if self.tcx.features().return_type_notation {
|
||||
hir::GenericArgsParentheses::ReturnTypeNotation
|
||||
} else {
|
||||
self.emit_bad_parenthesized_trait_in_assoc_ty(data);
|
||||
hir::GenericArgsParentheses::No
|
||||
};
|
||||
GenericArgsCtor {
|
||||
args: Default::default(),
|
||||
constraints: &[],
|
||||
parenthesized,
|
||||
span: data.inputs_span,
|
||||
}
|
||||
} else if let Some(first_char) = constraint.ident.as_str().chars().next()
|
||||
if let Some(first_char) = constraint.ident.as_str().chars().next()
|
||||
&& first_char.is_ascii_lowercase()
|
||||
{
|
||||
let mut err = if !data.inputs.is_empty() {
|
||||
@ -1010,7 +997,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
span: data.inputs_span.shrink_to_hi().to(ty.span),
|
||||
})
|
||||
} else {
|
||||
unreachable!("inputs are empty and return type is not provided")
|
||||
self.dcx().create_err(errors::BadReturnTypeNotation::NeedsDots {
|
||||
span: data.inputs_span,
|
||||
})
|
||||
};
|
||||
if !self.tcx.features().return_type_notation
|
||||
&& self.tcx.sess.is_nightly_build()
|
||||
@ -1040,6 +1029,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
.0
|
||||
}
|
||||
}
|
||||
GenericArgs::ParenthesizedElided(span) => GenericArgsCtor {
|
||||
args: Default::default(),
|
||||
constraints: &[],
|
||||
parenthesized: hir::GenericArgsParentheses::ReturnTypeNotation,
|
||||
span: *span,
|
||||
},
|
||||
};
|
||||
gen_args_ctor.into_generic_args(self)
|
||||
} else {
|
||||
|
@ -1,7 +1,8 @@
|
||||
use crate::ImplTraitPosition;
|
||||
|
||||
use super::errors::{
|
||||
AsyncBoundNotOnTrait, AsyncBoundOnlyForFnTraits, GenericTypeWithParentheses, UseAngleBrackets,
|
||||
AsyncBoundNotOnTrait, AsyncBoundOnlyForFnTraits, BadReturnTypeNotation,
|
||||
GenericTypeWithParentheses, UseAngleBrackets,
|
||||
};
|
||||
use super::ResolverAstLoweringExt;
|
||||
use super::{GenericArgsCtor, LifetimeRes, ParenthesizedGenericArgs};
|
||||
@ -271,6 +272,18 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
)
|
||||
}
|
||||
},
|
||||
GenericArgs::ParenthesizedElided(span) => {
|
||||
self.dcx().emit_err(BadReturnTypeNotation::Position { span: *span });
|
||||
(
|
||||
GenericArgsCtor {
|
||||
args: Default::default(),
|
||||
constraints: &[],
|
||||
parenthesized: hir::GenericArgsParentheses::ReturnTypeNotation,
|
||||
span: *span,
|
||||
},
|
||||
false,
|
||||
)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
(
|
||||
|
@ -1312,6 +1312,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||
self.with_impl_trait(None, |this| this.visit_ty(ty));
|
||||
}
|
||||
}
|
||||
GenericArgs::ParenthesizedElided(_span) => {}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1468,7 +1469,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
|
||||
span: args.span,
|
||||
});
|
||||
}
|
||||
None => {}
|
||||
Some(ast::GenericArgs::ParenthesizedElided(_)) | None => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1716,7 +1717,9 @@ fn deny_equality_constraints(
|
||||
// Add `<Bar = RhsTy>` to `Foo`.
|
||||
match &mut assoc_path.segments[len].args {
|
||||
Some(args) => match args.deref_mut() {
|
||||
GenericArgs::Parenthesized(_) => continue,
|
||||
GenericArgs::Parenthesized(_) | GenericArgs::ParenthesizedElided(..) => {
|
||||
continue;
|
||||
}
|
||||
GenericArgs::AngleBracketed(args) => {
|
||||
args.args.push(arg);
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::visit::{self, AssocCtxt, FnCtxt, FnKind, Visitor};
|
||||
use rustc_ast::{attr, AssocItemConstraint, AssocItemConstraintKind, NodeId};
|
||||
use rustc_ast::{attr, NodeId};
|
||||
use rustc_ast::{token, PatKind};
|
||||
use rustc_feature::{AttributeGate, BuiltinAttribute, Features, GateIssue, BUILTIN_ATTRIBUTE_MAP};
|
||||
use rustc_session::parse::{feature_err, feature_err_issue, feature_warn};
|
||||
@ -445,23 +445,6 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||
visit::walk_fn(self, fn_kind)
|
||||
}
|
||||
|
||||
fn visit_assoc_item_constraint(&mut self, constraint: &'a AssocItemConstraint) {
|
||||
if let AssocItemConstraintKind::Bound { .. } = constraint.kind
|
||||
&& let Some(ast::GenericArgs::Parenthesized(args)) = constraint.gen_args.as_ref()
|
||||
&& args.inputs.is_empty()
|
||||
&& let ast::FnRetTy::Default(..) = args.output
|
||||
{
|
||||
gate!(
|
||||
&self,
|
||||
return_type_notation,
|
||||
constraint.span,
|
||||
"return type notation is experimental"
|
||||
);
|
||||
}
|
||||
|
||||
visit::walk_assoc_item_constraint(self, constraint)
|
||||
}
|
||||
|
||||
fn visit_assoc_item(&mut self, i: &'a ast::AssocItem, ctxt: AssocCtxt) {
|
||||
let is_fn = match &i.kind {
|
||||
ast::AssocItemKind::Fn(_) => true,
|
||||
@ -566,6 +549,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
|
||||
unsafe_extern_blocks,
|
||||
"`unsafe extern {}` blocks and `safe` keyword are experimental"
|
||||
);
|
||||
gate_all!(return_type_notation, "return type notation is experimental");
|
||||
|
||||
if !visitor.features.never_patterns {
|
||||
if let Some(spans) = spans.get(&sym::never_patterns) {
|
||||
@ -611,10 +595,6 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
|
||||
|
||||
gate_all_legacy_dont_use!(box_patterns, "box pattern syntax is experimental");
|
||||
gate_all_legacy_dont_use!(trait_alias, "trait aliases are experimental");
|
||||
// Despite being a new feature, `where T: Trait<Assoc(): Sized>`, which is RTN syntax now,
|
||||
// used to be gated under associated_type_bounds, which are right above, so RTN needs to
|
||||
// be too.
|
||||
gate_all_legacy_dont_use!(return_type_notation, "return type notation is experimental");
|
||||
gate_all_legacy_dont_use!(decl_macro, "`macro` is experimental");
|
||||
gate_all_legacy_dont_use!(try_blocks, "`try` blocks are unstable");
|
||||
gate_all_legacy_dont_use!(auto_traits, "`auto` traits are unstable");
|
||||
|
@ -1060,6 +1060,11 @@ impl<'a> PrintState<'a> for State<'a> {
|
||||
self.word(")");
|
||||
self.print_fn_ret_ty(&data.output);
|
||||
}
|
||||
ast::GenericArgs::ParenthesizedElided(_) => {
|
||||
self.word("(");
|
||||
self.word("..");
|
||||
self.word(")");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ use ast::{ForLoopKind, MatchKind};
|
||||
use itertools::{Itertools, Position};
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::util::classify;
|
||||
use rustc_ast::util::literal::escape_byte_str_symbol;
|
||||
use rustc_ast::util::parser::{self, AssocOp, Fixity};
|
||||
use rustc_ast::{self as ast, BlockCheckMode};
|
||||
@ -610,9 +611,12 @@ impl<'a> State<'a> {
|
||||
}
|
||||
if let Some(expr) = opt_expr {
|
||||
self.space();
|
||||
self.print_expr_maybe_paren(
|
||||
self.print_expr_cond_paren(
|
||||
expr,
|
||||
parser::PREC_JUMP,
|
||||
// Parenthesize if required by precedence, or in the
|
||||
// case of `break 'inner: loop { break 'inner 1 } + 1`
|
||||
expr.precedence().order() < parser::PREC_JUMP
|
||||
|| (opt_label.is_none() && classify::leading_labeled_expr(expr)),
|
||||
fixup.subsequent_subexpression(),
|
||||
);
|
||||
}
|
||||
|
@ -1,4 +1,6 @@
|
||||
use crate::region_infer::{ConstraintSccs, RegionDefinition, RegionTracker};
|
||||
use crate::type_check::Locations;
|
||||
use crate::universal_regions::UniversalRegions;
|
||||
use rustc_index::{IndexSlice, IndexVec};
|
||||
use rustc_middle::mir::ConstraintCategory;
|
||||
use rustc_middle::ty::{RegionVid, TyCtxt, VarianceDiagInfo};
|
||||
@ -48,6 +50,110 @@ impl<'tcx> OutlivesConstraintSet<'tcx> {
|
||||
) -> &IndexSlice<OutlivesConstraintIndex, OutlivesConstraint<'tcx>> {
|
||||
&self.outlives
|
||||
}
|
||||
|
||||
/// Computes cycles (SCCs) in the graph of regions. In particular,
|
||||
/// find all regions R1, R2 such that R1: R2 and R2: R1 and group
|
||||
/// them into an SCC, and find the relationships between SCCs.
|
||||
pub(crate) fn compute_sccs(
|
||||
&self,
|
||||
static_region: RegionVid,
|
||||
definitions: &IndexVec<RegionVid, RegionDefinition<'tcx>>,
|
||||
) -> ConstraintSccs {
|
||||
let constraint_graph = self.graph(definitions.len());
|
||||
let region_graph = &constraint_graph.region_graph(self, static_region);
|
||||
ConstraintSccs::new_with_annotation(®ion_graph, |r| {
|
||||
RegionTracker::new(r, &definitions[r])
|
||||
})
|
||||
}
|
||||
|
||||
/// This method handles Universe errors by rewriting the constraint
|
||||
/// graph. For each strongly connected component in the constraint
|
||||
/// graph such that there is a series of constraints
|
||||
/// A: B: C: ... : X where
|
||||
/// A's universe is smaller than X's and A is a placeholder,
|
||||
/// add a constraint that A: 'static. This is a safe upper bound
|
||||
/// in the face of borrow checker/trait solver limitations that will
|
||||
/// eventually go away.
|
||||
///
|
||||
/// For a more precise definition, see the documentation for
|
||||
/// [`RegionTracker::has_incompatible_universes()`].
|
||||
///
|
||||
/// This edge case used to be handled during constraint propagation
|
||||
/// by iterating over the strongly connected components in the constraint
|
||||
/// graph while maintaining a set of bookkeeping mappings similar
|
||||
/// to what is stored in `RegionTracker` and manually adding 'sttaic as
|
||||
/// needed.
|
||||
///
|
||||
/// It was rewritten as part of the Polonius project with the goal of moving
|
||||
/// higher-kindedness concerns out of the path of the borrow checker,
|
||||
/// for two reasons:
|
||||
///
|
||||
/// 1. Implementing Polonius is difficult enough without also
|
||||
/// handling them.
|
||||
/// 2. The long-term goal is to handle higher-kinded concerns
|
||||
/// in the trait solver, where they belong. This avoids
|
||||
/// logic duplication and allows future trait solvers
|
||||
/// to compute better bounds than for example our
|
||||
/// "must outlive 'static" here.
|
||||
///
|
||||
/// This code is a stop-gap measure in preparation for the future trait solver.
|
||||
///
|
||||
/// Every constraint added by this method is an
|
||||
/// internal `IllegalUniverse` constraint.
|
||||
#[instrument(skip(self, universal_regions, definitions))]
|
||||
pub(crate) fn add_outlives_static(
|
||||
&mut self,
|
||||
universal_regions: &UniversalRegions<'tcx>,
|
||||
definitions: &IndexVec<RegionVid, RegionDefinition<'tcx>>,
|
||||
) -> ConstraintSccs {
|
||||
let fr_static = universal_regions.fr_static;
|
||||
let sccs = self.compute_sccs(fr_static, definitions);
|
||||
|
||||
// Changed to `true` if we added any constraints to `self` and need to
|
||||
// recompute SCCs.
|
||||
let mut added_constraints = false;
|
||||
|
||||
for scc in sccs.all_sccs() {
|
||||
// No point in adding 'static: 'static!
|
||||
// This micro-optimisation makes somewhat sense
|
||||
// because static outlives *everything*.
|
||||
if scc == sccs.scc(fr_static) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let annotation = sccs.annotation(scc);
|
||||
|
||||
// If this SCC participates in a universe violation,
|
||||
// e.g. if it reaches a region with a universe smaller than
|
||||
// the largest region reached, add a requirement that it must
|
||||
// outlive `'static`.
|
||||
if annotation.has_incompatible_universes() {
|
||||
// Optimisation opportunity: this will add more constraints than
|
||||
// needed for correctness, since an SCC upstream of another with
|
||||
// a universe violation will "infect" its downstream SCCs to also
|
||||
// outlive static.
|
||||
added_constraints = true;
|
||||
let scc_representative_outlives_static = OutlivesConstraint {
|
||||
sup: annotation.representative,
|
||||
sub: fr_static,
|
||||
category: ConstraintCategory::IllegalUniverse,
|
||||
locations: Locations::All(rustc_span::DUMMY_SP),
|
||||
span: rustc_span::DUMMY_SP,
|
||||
variance_info: VarianceDiagInfo::None,
|
||||
from_closure: false,
|
||||
};
|
||||
self.push(scc_representative_outlives_static);
|
||||
}
|
||||
}
|
||||
|
||||
if added_constraints {
|
||||
// We changed the constraint set and so must recompute SCCs.
|
||||
self.compute_sccs(fr_static, definitions)
|
||||
} else {
|
||||
// If we didn't add any back-edges; no more work needs doing
|
||||
sccs
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Index<OutlivesConstraintIndex> for OutlivesConstraintSet<'tcx> {
|
||||
|
@ -3733,7 +3733,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> {
|
||||
if tcx.is_diagnostic_item(sym::deref_method, method_did) {
|
||||
let deref_target =
|
||||
tcx.get_diagnostic_item(sym::deref_target).and_then(|deref_target| {
|
||||
Instance::resolve(tcx, self.param_env, deref_target, method_args)
|
||||
Instance::try_resolve(tcx, self.param_env, deref_target, method_args)
|
||||
.transpose()
|
||||
});
|
||||
if let Some(Ok(instance)) = deref_target {
|
||||
|
@ -66,7 +66,8 @@ impl<'tcx> ConstraintDescription for ConstraintCategory<'tcx> {
|
||||
ConstraintCategory::Predicate(_)
|
||||
| ConstraintCategory::Boring
|
||||
| ConstraintCategory::BoringNoLocation
|
||||
| ConstraintCategory::Internal => "",
|
||||
| ConstraintCategory::Internal
|
||||
| ConstraintCategory::IllegalUniverse => "",
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -948,7 +949,7 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> {
|
||||
return;
|
||||
}
|
||||
|
||||
if let Ok(Some(instance)) = ty::Instance::resolve(
|
||||
if let Ok(Some(instance)) = ty::Instance::try_resolve(
|
||||
tcx,
|
||||
self.param_env,
|
||||
*fn_did,
|
||||
|
@ -62,7 +62,7 @@ pub struct RegionTracker {
|
||||
/// The representative Region Variable Id for this SCC. We prefer
|
||||
/// placeholders over existentially quantified variables, otherwise
|
||||
/// it's the one with the smallest Region Variable ID.
|
||||
representative: RegionVid,
|
||||
pub(crate) representative: RegionVid,
|
||||
|
||||
/// Is the current representative a placeholder?
|
||||
representative_is_placeholder: bool,
|
||||
@ -97,7 +97,7 @@ impl scc::Annotation for RegionTracker {
|
||||
}
|
||||
|
||||
impl RegionTracker {
|
||||
fn new(rvid: RegionVid, definition: &RegionDefinition<'_>) -> Self {
|
||||
pub(crate) fn new(rvid: RegionVid, definition: &RegionDefinition<'_>) -> Self {
|
||||
let (representative_is_placeholder, representative_is_existential) = match definition.origin
|
||||
{
|
||||
rustc_infer::infer::NllRegionVariableOrigin::FreeRegion => (false, false),
|
||||
@ -116,7 +116,9 @@ impl RegionTracker {
|
||||
representative_is_existential,
|
||||
}
|
||||
}
|
||||
fn universe(self) -> UniverseIndex {
|
||||
|
||||
/// The smallest-indexed universe reachable from and/or in this SCC.
|
||||
fn min_universe(self) -> UniverseIndex {
|
||||
self.min_reachable_universe
|
||||
}
|
||||
|
||||
@ -132,8 +134,8 @@ impl RegionTracker {
|
||||
|
||||
/// Returns `true` if during the annotated SCC reaches a placeholder
|
||||
/// with a universe larger than the smallest reachable one, `false` otherwise.
|
||||
pub fn has_incompatible_universes(&self) -> bool {
|
||||
self.universe().cannot_name(self.max_placeholder_universe_reached)
|
||||
pub(crate) fn has_incompatible_universes(&self) -> bool {
|
||||
self.min_universe().cannot_name(self.max_placeholder_universe_reached)
|
||||
}
|
||||
}
|
||||
|
||||
@ -163,7 +165,7 @@ pub struct RegionInferenceContext<'tcx> {
|
||||
/// The SCC computed from `constraints` and the constraint
|
||||
/// graph. We have an edge from SCC A to SCC B if `A: B`. Used to
|
||||
/// compute the values of each region.
|
||||
constraint_sccs: Rc<ConstraintSccs>,
|
||||
constraint_sccs: ConstraintSccs,
|
||||
|
||||
/// Reverse of the SCC constraint graph -- i.e., an edge `A -> B` exists if
|
||||
/// `B: A`. This is used to compute the universal regions that are required
|
||||
@ -401,7 +403,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
universal_regions: Rc<UniversalRegions<'tcx>>,
|
||||
placeholder_indices: Rc<PlaceholderIndices>,
|
||||
universal_region_relations: Frozen<UniversalRegionRelations<'tcx>>,
|
||||
outlives_constraints: OutlivesConstraintSet<'tcx>,
|
||||
mut outlives_constraints: OutlivesConstraintSet<'tcx>,
|
||||
member_constraints_in: MemberConstraintSet<'tcx, RegionVid>,
|
||||
universe_causes: FxIndexMap<ty::UniverseIndex, UniverseInfo<'tcx>>,
|
||||
type_tests: Vec<TypeTest<'tcx>>,
|
||||
@ -419,17 +421,10 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
.map(|info| RegionDefinition::new(info.universe, info.origin))
|
||||
.collect();
|
||||
|
||||
let fr_static = universal_regions.fr_static;
|
||||
let constraint_sccs =
|
||||
outlives_constraints.add_outlives_static(&universal_regions, &definitions);
|
||||
let constraints = Frozen::freeze(outlives_constraints);
|
||||
let constraint_graph = Frozen::freeze(constraints.graph(definitions.len()));
|
||||
let constraint_sccs = {
|
||||
let constraint_graph = constraints.graph(definitions.len());
|
||||
let region_graph = &constraint_graph.region_graph(&constraints, fr_static);
|
||||
let sccs = ConstraintSccs::new_with_annotation(®ion_graph, |r| {
|
||||
RegionTracker::new(r, &definitions[r])
|
||||
});
|
||||
Rc::new(sccs)
|
||||
};
|
||||
|
||||
if cfg!(debug_assertions) {
|
||||
sccs_info(infcx, &constraint_sccs);
|
||||
@ -548,21 +543,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
}
|
||||
|
||||
NllRegionVariableOrigin::Placeholder(placeholder) => {
|
||||
// Each placeholder region is only visible from
|
||||
// its universe `ui` and its extensions. So we
|
||||
// can't just add it into `scc` unless the
|
||||
// universe of the scc can name this region.
|
||||
let scc_universe = self.scc_universe(scc);
|
||||
if scc_universe.can_name(placeholder.universe) {
|
||||
self.scc_values.add_element(scc, placeholder);
|
||||
} else {
|
||||
debug!(
|
||||
"init_free_and_bound_regions: placeholder {:?} is \
|
||||
not compatible with universe {:?} of its SCC {:?}",
|
||||
placeholder, scc_universe, scc,
|
||||
);
|
||||
self.add_incompatible_universe(scc);
|
||||
}
|
||||
self.scc_values.add_element(scc, placeholder);
|
||||
}
|
||||
|
||||
NllRegionVariableOrigin::Existential { .. } => {
|
||||
@ -744,23 +725,10 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
/// (which is assured by iterating over SCCs in dependency order).
|
||||
#[instrument(skip(self), level = "debug")]
|
||||
fn compute_value_for_scc(&mut self, scc_a: ConstraintSccIndex) {
|
||||
let constraint_sccs = self.constraint_sccs.clone();
|
||||
|
||||
// Walk each SCC `B` such that `A: B`...
|
||||
for &scc_b in constraint_sccs.successors(scc_a) {
|
||||
for &scc_b in self.constraint_sccs.successors(scc_a) {
|
||||
debug!(?scc_b);
|
||||
|
||||
// ...and add elements from `B` into `A`. One complication
|
||||
// arises because of universes: If `B` contains something
|
||||
// that `A` cannot name, then `A` can only contain `B` if
|
||||
// it outlives static.
|
||||
if self.universe_compatible(scc_b, scc_a) {
|
||||
// `A` can name everything that is in `B`, so just
|
||||
// merge the bits.
|
||||
self.scc_values.add_region(scc_a, scc_b);
|
||||
} else {
|
||||
self.add_incompatible_universe(scc_a);
|
||||
}
|
||||
self.scc_values.add_region(scc_a, scc_b);
|
||||
}
|
||||
|
||||
// Now take member constraints into account.
|
||||
@ -814,7 +782,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
// If the member region lives in a higher universe, we currently choose
|
||||
// the most conservative option by leaving it unchanged.
|
||||
|
||||
if !self.constraint_sccs().annotation(scc).universe().is_root() {
|
||||
if !self.constraint_sccs().annotation(scc).min_universe().is_root() {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -886,35 +854,20 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
/// in `scc_a`. Used during constraint propagation, and only once
|
||||
/// the value of `scc_b` has been computed.
|
||||
fn universe_compatible(&self, scc_b: ConstraintSccIndex, scc_a: ConstraintSccIndex) -> bool {
|
||||
let universe_a = self.constraint_sccs().annotation(scc_a).universe();
|
||||
let universe_b = self.constraint_sccs().annotation(scc_b).universe();
|
||||
let a_annotation = self.constraint_sccs().annotation(scc_a);
|
||||
let b_annotation = self.constraint_sccs().annotation(scc_b);
|
||||
let a_universe = a_annotation.min_universe();
|
||||
|
||||
// Quick check: if scc_b's declared universe is a subset of
|
||||
// If scc_b's declared universe is a subset of
|
||||
// scc_a's declared universe (typically, both are ROOT), then
|
||||
// it cannot contain any problematic universe elements.
|
||||
if universe_a.can_name(universe_b) {
|
||||
if a_universe.can_name(b_annotation.min_universe()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Otherwise, we have to iterate over the universe elements in
|
||||
// B's value, and check whether all of them are nameable
|
||||
// from universe_a
|
||||
self.scc_values.placeholders_contained_in(scc_b).all(|p| universe_a.can_name(p.universe))
|
||||
}
|
||||
|
||||
/// Extend `scc` so that it can outlive some placeholder region
|
||||
/// from a universe it can't name; at present, the only way for
|
||||
/// this to be true is if `scc` outlives `'static`. This is
|
||||
/// actually stricter than necessary: ideally, we'd support bounds
|
||||
/// like `for<'a: 'b>` that might then allow us to approximate
|
||||
/// `'a` with `'b` and not `'static`. But it will have to do for
|
||||
/// now.
|
||||
fn add_incompatible_universe(&mut self, scc: ConstraintSccIndex) {
|
||||
debug!("add_incompatible_universe(scc={:?})", scc);
|
||||
|
||||
let fr_static = self.universal_regions.fr_static;
|
||||
self.scc_values.add_all_points(scc);
|
||||
self.scc_values.add_element(scc, fr_static);
|
||||
// Otherwise, there can be no placeholder in `b` with a too high
|
||||
// universe index to name from `a`.
|
||||
a_universe.can_name(b_annotation.max_placeholder_universe_reached)
|
||||
}
|
||||
|
||||
/// Once regions have been propagated, this method is used to see
|
||||
@ -1022,7 +975,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
"lower_bound = {:?} r_scc={:?} universe={:?}",
|
||||
lower_bound,
|
||||
r_scc,
|
||||
self.constraint_sccs.annotation(r_scc).universe()
|
||||
self.constraint_sccs.annotation(r_scc).min_universe()
|
||||
);
|
||||
|
||||
// If the type test requires that `T: 'a` where `'a` is a
|
||||
@ -1539,7 +1492,7 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
/// The minimum universe of any variable reachable from this
|
||||
/// SCC, inside or outside of it.
|
||||
fn scc_universe(&self, scc: ConstraintSccIndex) -> UniverseIndex {
|
||||
self.constraint_sccs().annotation(scc).universe()
|
||||
self.constraint_sccs().annotation(scc).min_universe()
|
||||
}
|
||||
/// Checks the final value for the free region `fr` to see if it
|
||||
/// grew too large. In particular, examine what `end(X)` points
|
||||
@ -1896,6 +1849,10 @@ impl<'tcx> RegionInferenceContext<'tcx> {
|
||||
|
||||
// This loop can be hot.
|
||||
for constraint in outgoing_edges_from_graph {
|
||||
if matches!(constraint.category, ConstraintCategory::IllegalUniverse) {
|
||||
debug!("Ignoring illegal universe constraint: {constraint:?}");
|
||||
continue;
|
||||
}
|
||||
handle_constraint(constraint);
|
||||
}
|
||||
|
||||
|
@ -38,16 +38,14 @@ pub(crate) fn cfg_eval(
|
||||
lint_node_id: NodeId,
|
||||
) -> Annotatable {
|
||||
let features = Some(features);
|
||||
CfgEval { cfg: &mut StripUnconfigured { sess, features, config_tokens: true, lint_node_id } }
|
||||
CfgEval(StripUnconfigured { sess, features, config_tokens: true, lint_node_id })
|
||||
.configure_annotatable(annotatable)
|
||||
// Since the item itself has already been configured by the `InvocationCollector`,
|
||||
// we know that fold result vector will contain exactly one element.
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
struct CfgEval<'a, 'b> {
|
||||
cfg: &'a mut StripUnconfigured<'b>,
|
||||
}
|
||||
struct CfgEval<'a>(StripUnconfigured<'a>);
|
||||
|
||||
fn flat_map_annotatable(
|
||||
vis: &mut impl MutVisitor,
|
||||
@ -125,9 +123,9 @@ fn has_cfg_or_cfg_attr(annotatable: &Annotatable) -> bool {
|
||||
res.is_break()
|
||||
}
|
||||
|
||||
impl CfgEval<'_, '_> {
|
||||
impl CfgEval<'_> {
|
||||
fn configure<T: HasAttrs + HasTokens>(&mut self, node: T) -> Option<T> {
|
||||
self.cfg.configure(node)
|
||||
self.0.configure(node)
|
||||
}
|
||||
|
||||
fn configure_annotatable(&mut self, mut annotatable: Annotatable) -> Option<Annotatable> {
|
||||
@ -196,7 +194,7 @@ impl CfgEval<'_, '_> {
|
||||
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
|
||||
// to the captured `AttrTokenStream` (specifically, we capture
|
||||
// `AttrTokenTree::AttributesData` for all occurrences of `#[cfg]` and `#[cfg_attr]`)
|
||||
let mut parser = Parser::new(&self.cfg.sess.psess, orig_tokens, None);
|
||||
let mut parser = Parser::new(&self.0.sess.psess, orig_tokens, None);
|
||||
parser.capture_cfg = true;
|
||||
match parse_annotatable_with(&mut parser) {
|
||||
Ok(a) => annotatable = a,
|
||||
@ -212,16 +210,16 @@ impl CfgEval<'_, '_> {
|
||||
}
|
||||
}
|
||||
|
||||
impl MutVisitor for CfgEval<'_, '_> {
|
||||
impl MutVisitor for CfgEval<'_> {
|
||||
#[instrument(level = "trace", skip(self))]
|
||||
fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
|
||||
self.cfg.configure_expr(expr, false);
|
||||
self.0.configure_expr(expr, false);
|
||||
mut_visit::noop_visit_expr(expr, self);
|
||||
}
|
||||
|
||||
#[instrument(level = "trace", skip(self))]
|
||||
fn visit_method_receiver_expr(&mut self, expr: &mut P<ast::Expr>) {
|
||||
self.cfg.configure_expr(expr, true);
|
||||
self.0.configure_expr(expr, true);
|
||||
mut_visit::noop_visit_expr(expr, self);
|
||||
}
|
||||
|
||||
|
@ -371,9 +371,14 @@ pub(crate) fn codegen_terminator_call<'tcx>(
|
||||
|
||||
// Handle special calls like intrinsics and empty drop glue.
|
||||
let instance = if let ty::FnDef(def_id, fn_args) = *func.layout().ty.kind() {
|
||||
let instance =
|
||||
ty::Instance::expect_resolve(fx.tcx, ty::ParamEnv::reveal_all(), def_id, fn_args)
|
||||
.polymorphize(fx.tcx);
|
||||
let instance = ty::Instance::expect_resolve(
|
||||
fx.tcx,
|
||||
ty::ParamEnv::reveal_all(),
|
||||
def_id,
|
||||
fn_args,
|
||||
source_info.span,
|
||||
)
|
||||
.polymorphize(fx.tcx);
|
||||
|
||||
if is_call_from_compiler_builtins_to_upstream_monomorphization(fx.tcx, instance) {
|
||||
if target.is_some() {
|
||||
|
@ -4,6 +4,7 @@ use rustc_middle::ty::AssocKind;
|
||||
use rustc_middle::ty::GenericArg;
|
||||
use rustc_session::config::{sigpipe, EntryFnType};
|
||||
use rustc_span::symbol::Ident;
|
||||
use rustc_span::DUMMY_SP;
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
@ -119,6 +120,7 @@ pub(crate) fn maybe_create_entry_wrapper(
|
||||
ParamEnv::reveal_all(),
|
||||
report.def_id,
|
||||
tcx.mk_args(&[GenericArg::from(main_ret_ty)]),
|
||||
DUMMY_SP,
|
||||
)
|
||||
.polymorphize(tcx);
|
||||
|
||||
@ -144,6 +146,7 @@ pub(crate) fn maybe_create_entry_wrapper(
|
||||
ParamEnv::reveal_all(),
|
||||
start_def_id,
|
||||
tcx.mk_args(&[main_ret_ty.into()]),
|
||||
DUMMY_SP,
|
||||
)
|
||||
.polymorphize(tcx);
|
||||
let start_func_id = import_function(tcx, m, start_instance);
|
||||
|
@ -17,7 +17,7 @@ use rustc_middle::ty::layout::{
|
||||
};
|
||||
use rustc_middle::ty::{self, Instance, ParamEnv, PolyExistentialTraitRef, Ty, TyCtxt};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::{source_map::respan, Span};
|
||||
use rustc_span::{source_map::respan, Span, DUMMY_SP};
|
||||
use rustc_target::abi::{
|
||||
call::FnAbi, HasDataLayout, PointeeInfo, Size, TargetDataLayout, VariantIdx,
|
||||
};
|
||||
@ -479,6 +479,7 @@ impl<'gcc, 'tcx> MiscMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
ty::ParamEnv::reveal_all(),
|
||||
def_id,
|
||||
ty::List::empty(),
|
||||
DUMMY_SP,
|
||||
);
|
||||
|
||||
let symbol_name = tcx.symbol_name(instance).name;
|
||||
|
@ -226,7 +226,8 @@ impl<'ll, 'tcx> ArgAbiExt<'ll, 'tcx> for ArgAbi<'tcx, Ty<'tcx>> {
|
||||
// when passed by value, making it smaller.
|
||||
// - On some ABIs, the Rust layout { u16, u16, u16 } may be padded up to 8 bytes
|
||||
// when passed by value, making it larger.
|
||||
let copy_bytes = cmp::min(scratch_size.bytes(), self.layout.size.bytes());
|
||||
let copy_bytes =
|
||||
cmp::min(cast.unaligned_size(bx).bytes(), self.layout.size.bytes());
|
||||
// Allocate some scratch space...
|
||||
let llscratch = bx.alloca(scratch_size, scratch_align);
|
||||
bx.lifetime_start(llscratch, scratch_size);
|
||||
|
@ -32,7 +32,7 @@ impl OwnedTargetMachine {
|
||||
unique_section_names: bool,
|
||||
trap_unreachable: bool,
|
||||
singletree: bool,
|
||||
asm_comments: bool,
|
||||
verbose_asm: bool,
|
||||
emit_stack_size_section: bool,
|
||||
relax_elf_relocations: bool,
|
||||
use_init_array: bool,
|
||||
@ -64,7 +64,7 @@ impl OwnedTargetMachine {
|
||||
unique_section_names,
|
||||
trap_unreachable,
|
||||
singletree,
|
||||
asm_comments,
|
||||
verbose_asm,
|
||||
emit_stack_size_section,
|
||||
relax_elf_relocations,
|
||||
use_init_array,
|
||||
|
@ -214,7 +214,7 @@ pub fn target_machine_factory(
|
||||
sess.opts.unstable_opts.trap_unreachable.unwrap_or(sess.target.trap_unreachable);
|
||||
let emit_stack_size_section = sess.opts.unstable_opts.emit_stack_sizes;
|
||||
|
||||
let asm_comments = sess.opts.unstable_opts.asm_comments;
|
||||
let verbose_asm = sess.opts.unstable_opts.verbose_asm;
|
||||
let relax_elf_relocations =
|
||||
sess.opts.unstable_opts.relax_elf_relocations.unwrap_or(sess.target.relax_elf_relocations);
|
||||
|
||||
@ -289,7 +289,7 @@ pub fn target_machine_factory(
|
||||
funique_section_names,
|
||||
trap_unreachable,
|
||||
singlethread,
|
||||
asm_comments,
|
||||
verbose_asm,
|
||||
emit_stack_size_section,
|
||||
relax_elf_relocations,
|
||||
use_init_array,
|
||||
|
@ -28,7 +28,7 @@ use rustc_session::config::{BranchProtection, CFGuard, CFProtection};
|
||||
use rustc_session::config::{CrateType, DebugInfo, PAuthKey, PacRet};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::Span;
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
use rustc_target::abi::{call::FnAbi, HasDataLayout, TargetDataLayout, VariantIdx};
|
||||
use rustc_target::spec::{HasTargetSpec, RelocModel, Target, TlsModel};
|
||||
use smallvec::SmallVec;
|
||||
@ -580,6 +580,7 @@ impl<'ll, 'tcx> MiscMethods<'tcx> for CodegenCx<'ll, 'tcx> {
|
||||
ty::ParamEnv::reveal_all(),
|
||||
def_id,
|
||||
ty::List::empty(),
|
||||
DUMMY_SP,
|
||||
)),
|
||||
_ => {
|
||||
let name = name.unwrap_or("rust_eh_personality");
|
||||
|
@ -2185,7 +2185,7 @@ extern "C" {
|
||||
UniqueSectionNames: bool,
|
||||
TrapUnreachable: bool,
|
||||
Singlethread: bool,
|
||||
AsmComments: bool,
|
||||
VerboseAsm: bool,
|
||||
EmitStackSizeSection: bool,
|
||||
RelaxELFRelocations: bool,
|
||||
UseInitArray: bool,
|
||||
|
@ -2817,6 +2817,15 @@ fn rehome_sysroot_lib_dir(sess: &Session, lib_dir: &Path) -> PathBuf {
|
||||
}
|
||||
}
|
||||
|
||||
fn rehome_lib_path(sess: &Session, path: &Path) -> PathBuf {
|
||||
if let Some(dir) = path.parent() {
|
||||
let file_name = path.file_name().expect("library path has no file name component");
|
||||
rehome_sysroot_lib_dir(sess, dir).join(file_name)
|
||||
} else {
|
||||
fix_windows_verbatim_for_gcc(path)
|
||||
}
|
||||
}
|
||||
|
||||
// Adds the static "rlib" versions of all crates to the command line.
|
||||
// There's a bit of magic which happens here specifically related to LTO,
|
||||
// namely that we remove upstream object files.
|
||||
@ -2847,15 +2856,8 @@ fn add_static_crate(
|
||||
let src = &codegen_results.crate_info.used_crate_source[&cnum];
|
||||
let cratepath = &src.rlib.as_ref().unwrap().0;
|
||||
|
||||
let mut link_upstream = |path: &Path| {
|
||||
let rlib_path = if let Some(dir) = path.parent() {
|
||||
let file_name = path.file_name().expect("rlib path has no file name path component");
|
||||
rehome_sysroot_lib_dir(sess, dir).join(file_name)
|
||||
} else {
|
||||
fix_windows_verbatim_for_gcc(path)
|
||||
};
|
||||
cmd.link_staticlib_by_path(&rlib_path, false);
|
||||
};
|
||||
let mut link_upstream =
|
||||
|path: &Path| cmd.link_staticlib_by_path(&rehome_lib_path(sess, path), false);
|
||||
|
||||
if !are_upstream_rust_objects_already_included(sess)
|
||||
|| ignored_for_lto(sess, &codegen_results.crate_info, cnum)
|
||||
@ -2919,27 +2921,7 @@ fn add_static_crate(
|
||||
|
||||
// Same thing as above, but for dynamic crates instead of static crates.
|
||||
fn add_dynamic_crate(cmd: &mut dyn Linker, sess: &Session, cratepath: &Path) {
|
||||
// Just need to tell the linker about where the library lives and
|
||||
// what its name is
|
||||
let parent = cratepath.parent();
|
||||
// When producing a dll, the MSVC linker may not actually emit a
|
||||
// `foo.lib` file if the dll doesn't actually export any symbols, so we
|
||||
// check to see if the file is there and just omit linking to it if it's
|
||||
// not present.
|
||||
if sess.target.is_like_msvc && !cratepath.with_extension("dll.lib").exists() {
|
||||
return;
|
||||
}
|
||||
if let Some(dir) = parent {
|
||||
cmd.include_path(&rehome_sysroot_lib_dir(sess, dir));
|
||||
}
|
||||
// "<dir>/name.dll -> name.dll" on windows-msvc
|
||||
// "<dir>/name.dll -> name" on windows-gnu
|
||||
// "<dir>/libname.<ext> -> name" elsewhere
|
||||
let stem = if sess.target.is_like_msvc { cratepath.file_name() } else { cratepath.file_stem() };
|
||||
let stem = stem.unwrap().to_str().unwrap();
|
||||
// Convert library file-stem into a cc -l argument.
|
||||
let prefix = if stem.starts_with("lib") && !sess.target.is_like_windows { 3 } else { 0 };
|
||||
cmd.link_dylib_by_name(&stem[prefix..], false, true);
|
||||
cmd.link_dylib_by_path(&rehome_lib_path(sess, cratepath), true);
|
||||
}
|
||||
|
||||
fn relevant_lib(sess: &Session, lib: &NativeLib) -> bool {
|
||||
|
@ -268,7 +268,12 @@ pub trait Linker {
|
||||
false
|
||||
}
|
||||
fn set_output_kind(&mut self, output_kind: LinkOutputKind, out_filename: &Path);
|
||||
fn link_dylib_by_name(&mut self, name: &str, verbatim: bool, as_needed: bool);
|
||||
fn link_dylib_by_name(&mut self, _name: &str, _verbatim: bool, _as_needed: bool) {
|
||||
bug!("dylib linked with unsupported linker")
|
||||
}
|
||||
fn link_dylib_by_path(&mut self, _path: &Path, _as_needed: bool) {
|
||||
bug!("dylib linked with unsupported linker")
|
||||
}
|
||||
fn link_framework_by_name(&mut self, _name: &str, _verbatim: bool, _as_needed: bool) {
|
||||
bug!("framework linked with unsupported linker")
|
||||
}
|
||||
@ -403,28 +408,53 @@ impl<'a> GccLinker<'a> {
|
||||
}
|
||||
} else {
|
||||
self.link_or_cc_arg("-shared");
|
||||
if self.sess.target.is_like_windows {
|
||||
// The output filename already contains `dll_suffix` so
|
||||
// the resulting import library will have a name in the
|
||||
// form of libfoo.dll.a
|
||||
let implib_name =
|
||||
out_filename.file_name().and_then(|file| file.to_str()).map(|file| {
|
||||
format!(
|
||||
"{}{}{}",
|
||||
self.sess.target.staticlib_prefix,
|
||||
file,
|
||||
self.sess.target.staticlib_suffix
|
||||
)
|
||||
});
|
||||
if let Some(implib_name) = implib_name {
|
||||
let implib = out_filename.parent().map(|dir| dir.join(&implib_name));
|
||||
if let Some(implib) = implib {
|
||||
self.link_arg(&format!("--out-implib={}", (*implib).to_str().unwrap()));
|
||||
}
|
||||
if let Some(name) = out_filename.file_name() {
|
||||
if self.sess.target.is_like_windows {
|
||||
// The output filename already contains `dll_suffix` so
|
||||
// the resulting import library will have a name in the
|
||||
// form of libfoo.dll.a
|
||||
let mut implib_name = OsString::from(&*self.sess.target.staticlib_prefix);
|
||||
implib_name.push(name);
|
||||
implib_name.push(&*self.sess.target.staticlib_suffix);
|
||||
let mut out_implib = OsString::from("--out-implib=");
|
||||
out_implib.push(out_filename.with_file_name(implib_name));
|
||||
self.link_arg(out_implib);
|
||||
} else {
|
||||
// When dylibs are linked by a full path this value will get into `DT_NEEDED`
|
||||
// instead of the full path, so the library can be later found in some other
|
||||
// location than that specific path.
|
||||
let mut soname = OsString::from("-soname=");
|
||||
soname.push(name);
|
||||
self.link_arg(soname);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn with_as_needed(&mut self, as_needed: bool, f: impl FnOnce(&mut Self)) {
|
||||
if !as_needed {
|
||||
if self.sess.target.is_like_osx {
|
||||
// FIXME(81490): ld64 doesn't support these flags but macOS 11
|
||||
// has -needed-l{} / -needed_library {}
|
||||
// but we have no way to detect that here.
|
||||
self.sess.dcx().emit_warn(errors::Ld64UnimplementedModifier);
|
||||
} else if self.is_gnu && !self.sess.target.is_like_windows {
|
||||
self.link_arg("--no-as-needed");
|
||||
} else {
|
||||
self.sess.dcx().emit_warn(errors::LinkerUnsupportedModifier);
|
||||
}
|
||||
}
|
||||
|
||||
f(self);
|
||||
|
||||
if !as_needed {
|
||||
if self.sess.target.is_like_osx {
|
||||
// See above FIXME comment
|
||||
} else if self.is_gnu && !self.sess.target.is_like_windows {
|
||||
self.link_arg("--as-needed");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Linker for GccLinker<'a> {
|
||||
@ -506,27 +536,18 @@ impl<'a> Linker for GccLinker<'a> {
|
||||
// to the linker.
|
||||
return;
|
||||
}
|
||||
if !as_needed {
|
||||
if self.sess.target.is_like_osx {
|
||||
// FIXME(81490): ld64 doesn't support these flags but macOS 11
|
||||
// has -needed-l{} / -needed_library {}
|
||||
// but we have no way to detect that here.
|
||||
self.sess.dcx().emit_warn(errors::Ld64UnimplementedModifier);
|
||||
} else if self.is_gnu && !self.sess.target.is_like_windows {
|
||||
self.link_arg("--no-as-needed");
|
||||
} else {
|
||||
self.sess.dcx().emit_warn(errors::LinkerUnsupportedModifier);
|
||||
}
|
||||
}
|
||||
self.hint_dynamic();
|
||||
self.link_or_cc_arg(format!("-l{}{name}", if verbatim && self.is_gnu { ":" } else { "" },));
|
||||
if !as_needed {
|
||||
if self.sess.target.is_like_osx {
|
||||
// See above FIXME comment
|
||||
} else if self.is_gnu && !self.sess.target.is_like_windows {
|
||||
self.link_arg("--as-needed");
|
||||
}
|
||||
}
|
||||
self.with_as_needed(as_needed, |this| {
|
||||
let colon = if verbatim && this.is_gnu { ":" } else { "" };
|
||||
this.link_or_cc_arg(format!("-l{colon}{name}"));
|
||||
});
|
||||
}
|
||||
|
||||
fn link_dylib_by_path(&mut self, path: &Path, as_needed: bool) {
|
||||
self.hint_dynamic();
|
||||
self.with_as_needed(as_needed, |this| {
|
||||
this.link_or_cc_arg(path);
|
||||
})
|
||||
}
|
||||
|
||||
fn link_framework_by_name(&mut self, name: &str, _verbatim: bool, as_needed: bool) {
|
||||
@ -861,6 +882,15 @@ impl<'a> Linker for MsvcLinker<'a> {
|
||||
self.link_arg(format!("{}{}", name, if verbatim { "" } else { ".lib" }));
|
||||
}
|
||||
|
||||
fn link_dylib_by_path(&mut self, path: &Path, _as_needed: bool) {
|
||||
// When producing a dll, MSVC linker may not emit an implib file if the dll doesn't export
|
||||
// any symbols, so we skip linking if the implib file is not present.
|
||||
let implib_path = path.with_extension("dll.lib");
|
||||
if implib_path.exists() {
|
||||
self.link_or_cc_arg(implib_path);
|
||||
}
|
||||
}
|
||||
|
||||
fn link_staticlib_by_name(&mut self, name: &str, verbatim: bool, whole_archive: bool) {
|
||||
let prefix = if whole_archive { "/WHOLEARCHIVE:" } else { "" };
|
||||
let suffix = if verbatim { "" } else { ".lib" };
|
||||
@ -1083,6 +1113,10 @@ impl<'a> Linker for EmLinker<'a> {
|
||||
self.link_or_cc_args(&["-l", name]);
|
||||
}
|
||||
|
||||
fn link_dylib_by_path(&mut self, path: &Path, _as_needed: bool) {
|
||||
self.link_or_cc_arg(path);
|
||||
}
|
||||
|
||||
fn link_staticlib_by_name(&mut self, name: &str, _verbatim: bool, _whole_archive: bool) {
|
||||
self.link_or_cc_args(&["-l", name]);
|
||||
}
|
||||
@ -1240,6 +1274,10 @@ impl<'a> Linker for WasmLd<'a> {
|
||||
self.link_or_cc_args(&["-l", name]);
|
||||
}
|
||||
|
||||
fn link_dylib_by_path(&mut self, path: &Path, _as_needed: bool) {
|
||||
self.link_or_cc_arg(path);
|
||||
}
|
||||
|
||||
fn link_staticlib_by_name(&mut self, name: &str, _verbatim: bool, whole_archive: bool) {
|
||||
if !whole_archive {
|
||||
self.link_or_cc_args(&["-l", name]);
|
||||
@ -1368,10 +1406,6 @@ impl<'a> Linker for L4Bender<'a> {
|
||||
|
||||
fn set_output_kind(&mut self, _output_kind: LinkOutputKind, _out_filename: &Path) {}
|
||||
|
||||
fn link_dylib_by_name(&mut self, _name: &str, _verbatim: bool, _as_needed: bool) {
|
||||
bug!("dylibs are not supported on L4Re");
|
||||
}
|
||||
|
||||
fn link_staticlib_by_name(&mut self, name: &str, _verbatim: bool, whole_archive: bool) {
|
||||
self.hint_static();
|
||||
if !whole_archive {
|
||||
@ -1536,6 +1570,11 @@ impl<'a> Linker for AixLinker<'a> {
|
||||
self.link_or_cc_arg(format!("-l{name}"));
|
||||
}
|
||||
|
||||
fn link_dylib_by_path(&mut self, path: &Path, _as_needed: bool) {
|
||||
self.hint_dynamic();
|
||||
self.link_or_cc_arg(path);
|
||||
}
|
||||
|
||||
fn link_staticlib_by_name(&mut self, name: &str, verbatim: bool, whole_archive: bool) {
|
||||
self.hint_static();
|
||||
if !whole_archive {
|
||||
@ -1721,10 +1760,6 @@ impl<'a> Linker for PtxLinker<'a> {
|
||||
|
||||
fn set_output_kind(&mut self, _output_kind: LinkOutputKind, _out_filename: &Path) {}
|
||||
|
||||
fn link_dylib_by_name(&mut self, _name: &str, _verbatim: bool, _as_needed: bool) {
|
||||
panic!("external dylibs not supported")
|
||||
}
|
||||
|
||||
fn link_staticlib_by_name(&mut self, _name: &str, _verbatim: bool, _whole_archive: bool) {
|
||||
panic!("staticlibs not supported")
|
||||
}
|
||||
@ -1791,10 +1826,6 @@ impl<'a> Linker for LlbcLinker<'a> {
|
||||
|
||||
fn set_output_kind(&mut self, _output_kind: LinkOutputKind, _out_filename: &Path) {}
|
||||
|
||||
fn link_dylib_by_name(&mut self, _name: &str, _verbatim: bool, _as_needed: bool) {
|
||||
panic!("external dylibs not supported")
|
||||
}
|
||||
|
||||
fn link_staticlib_by_name(&mut self, _name: &str, _verbatim: bool, _whole_archive: bool) {
|
||||
panic!("staticlibs not supported")
|
||||
}
|
||||
@ -1866,10 +1897,6 @@ impl<'a> Linker for BpfLinker<'a> {
|
||||
|
||||
fn set_output_kind(&mut self, _output_kind: LinkOutputKind, _out_filename: &Path) {}
|
||||
|
||||
fn link_dylib_by_name(&mut self, _name: &str, _verbatim: bool, _as_needed: bool) {
|
||||
panic!("external dylibs not supported")
|
||||
}
|
||||
|
||||
fn link_staticlib_by_name(&mut self, _name: &str, _verbatim: bool, _whole_archive: bool) {
|
||||
panic!("staticlibs not supported")
|
||||
}
|
||||
|
@ -37,7 +37,7 @@ use rustc_middle::ty::{self, Instance, Ty, TyCtxt};
|
||||
use rustc_session::config::{self, CrateType, EntryFnType, OptLevel, OutputType};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::Symbol;
|
||||
use rustc_span::{Symbol, DUMMY_SP};
|
||||
use rustc_target::abi::FIRST_VARIANT;
|
||||
|
||||
use std::cmp;
|
||||
@ -467,6 +467,7 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
||||
ty::ParamEnv::reveal_all(),
|
||||
start_def_id,
|
||||
cx.tcx().mk_args(&[main_ret_ty.into()]),
|
||||
DUMMY_SP,
|
||||
);
|
||||
let start_fn = cx.get_fn_addr(start_instance);
|
||||
|
||||
|
@ -403,7 +403,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
//
|
||||
// Why only in unoptimized builds?
|
||||
// - In unoptimized builds LLVM uses FastISel which does not support switches, so it
|
||||
// must fall back to the to the slower SelectionDAG isel. Therefore, using `br` gives
|
||||
// must fall back to the slower SelectionDAG isel. Therefore, using `br` gives
|
||||
// significant compile time speedups for unoptimized builds.
|
||||
// - In optimized builds the above doesn't hold, and using `br` sometimes results in
|
||||
// worse generated code because LLVM can no longer tell that the value being switched
|
||||
@ -842,6 +842,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
ty::ParamEnv::reveal_all(),
|
||||
def_id,
|
||||
args,
|
||||
fn_span,
|
||||
)
|
||||
.polymorphize(bx.tcx()),
|
||||
),
|
||||
@ -1521,7 +1522,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
// when passed by value, making it smaller.
|
||||
// - On some ABIs, the Rust layout { u16, u16, u16 } may be padded up to 8 bytes
|
||||
// when passed by value, making it larger.
|
||||
let copy_bytes = cmp::min(scratch_size.bytes(), arg.layout.size.bytes());
|
||||
let copy_bytes = cmp::min(cast.unaligned_size(bx).bytes(), arg.layout.size.bytes());
|
||||
// Allocate some scratch space...
|
||||
let llscratch = bx.alloca(scratch_size, scratch_align);
|
||||
bx.lifetime_start(llscratch, scratch_size);
|
||||
|
@ -47,7 +47,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
|
||||
let expected_ty = self.monomorphize(self.mir.local_decls[local].ty);
|
||||
if expected_ty != op.layout.ty {
|
||||
warn!(
|
||||
"Unexpected initial operand type: expected {expected_ty:?}, found {:?}.\
|
||||
"Unexpected initial operand type:\nexpected {expected_ty:?},\nfound {:?}.\n\
|
||||
See <https://github.com/rust-lang/rust/issues/114858>.",
|
||||
op.layout.ty
|
||||
);
|
||||
|
@ -230,10 +230,20 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
|
||||
let layout = start_bx.layout_of(fx.monomorphize(decl.ty));
|
||||
assert!(!layout.ty.has_erasable_regions());
|
||||
|
||||
if local == mir::RETURN_PLACE && fx.fn_abi.ret.is_indirect() {
|
||||
debug!("alloc: {:?} (return place) -> place", local);
|
||||
let llretptr = start_bx.get_param(0);
|
||||
return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
|
||||
if local == mir::RETURN_PLACE {
|
||||
match fx.fn_abi.ret.mode {
|
||||
PassMode::Indirect { .. } => {
|
||||
debug!("alloc: {:?} (return place) -> place", local);
|
||||
let llretptr = start_bx.get_param(0);
|
||||
return LocalRef::Place(PlaceRef::new_sized(llretptr, layout));
|
||||
}
|
||||
PassMode::Cast { ref cast, .. } => {
|
||||
debug!("alloc: {:?} (return place) -> place", local);
|
||||
let size = cast.size(&start_bx);
|
||||
return LocalRef::Place(PlaceRef::alloca_size(&mut start_bx, size, layout));
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
}
|
||||
|
||||
if memory_locals.contains(local) {
|
||||
|
@ -108,9 +108,17 @@ impl<'a, 'tcx, V: CodegenObject> PlaceRef<'tcx, V> {
|
||||
pub fn alloca<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
||||
bx: &mut Bx,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
) -> Self {
|
||||
Self::alloca_size(bx, layout.size, layout)
|
||||
}
|
||||
|
||||
pub fn alloca_size<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
|
||||
bx: &mut Bx,
|
||||
size: Size,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
) -> Self {
|
||||
assert!(layout.is_sized(), "tried to statically allocate unsized place");
|
||||
PlaceValue::alloca(bx, layout.size, layout.align.abi).with_type(layout)
|
||||
PlaceValue::alloca(bx, size, layout.align.abi).with_type(layout)
|
||||
}
|
||||
|
||||
/// Returns a place for an indirect reference to an unsized place.
|
||||
|
@ -768,7 +768,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
|
||||
is_trait = true;
|
||||
|
||||
if let Ok(Some(instance)) =
|
||||
Instance::resolve(tcx, param_env, callee, fn_args)
|
||||
Instance::try_resolve(tcx, param_env, callee, fn_args)
|
||||
&& let InstanceKind::Item(def) = instance.def
|
||||
{
|
||||
// Resolve a trait method call to its concrete implementation, which may be in a
|
||||
|
@ -253,6 +253,7 @@ impl<'tcx> CompileTimeInterpCx<'tcx> {
|
||||
ty::ParamEnv::reveal_all(),
|
||||
const_def_id,
|
||||
instance.args,
|
||||
self.cur_span(),
|
||||
);
|
||||
|
||||
return Ok(Some(new_instance));
|
||||
|
@ -245,7 +245,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
|
||||
// The tag of a `Single` enum is like the tag of the niched
|
||||
// variant: there's no tag as the discriminant is encoded
|
||||
// entirely implicitly. If `write_discriminant` ever hits this
|
||||
// case, we do a "validation read" to ensure the the right
|
||||
// case, we do a "validation read" to ensure the right
|
||||
// discriminant is encoded implicitly, so any attempt to write
|
||||
// the wrong discriminant for a `Single` enum will reliably
|
||||
// result in UB.
|
||||
|
@ -618,7 +618,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
|
||||
trace!("resolve: {:?}, {:#?}", def, args);
|
||||
trace!("param_env: {:#?}", self.param_env);
|
||||
trace!("args: {:#?}", args);
|
||||
match ty::Instance::resolve(*self.tcx, self.param_env, def, args) {
|
||||
match ty::Instance::try_resolve(*self.tcx, self.param_env, def, args) {
|
||||
Ok(Some(instance)) => Ok(instance),
|
||||
Ok(None) => throw_inval!(TooGeneric),
|
||||
|
||||
|
@ -883,13 +883,13 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
|
||||
ty::ExistentialTraitRef::erase_self_ty(tcx, virtual_trait_ref);
|
||||
let concrete_trait_ref = existential_trait_ref.with_self_ty(tcx, dyn_ty);
|
||||
|
||||
let concrete_method = Instance::resolve_for_vtable(
|
||||
let concrete_method = Instance::expect_resolve_for_vtable(
|
||||
tcx,
|
||||
self.param_env,
|
||||
def_id,
|
||||
instance.args.rebase_onto(tcx, trait_def_id, concrete_trait_ref.args),
|
||||
)
|
||||
.unwrap();
|
||||
self.cur_span(),
|
||||
);
|
||||
assert_eq!(fn_inst, concrete_method);
|
||||
}
|
||||
|
||||
|
@ -35,11 +35,11 @@
|
||||
//! | | | |
|
||||
//! | `ParallelIterator` | `Iterator` | `rayon::iter::ParallelIterator` |
|
||||
//!
|
||||
//! [^1] `MTLock` is similar to `Lock`, but the serial version avoids the cost
|
||||
//! [^1]: `MTLock` is similar to `Lock`, but the serial version avoids the cost
|
||||
//! of a `RefCell`. This is appropriate when interior mutability is not
|
||||
//! required.
|
||||
//!
|
||||
//! [^2] `MTRef`, `MTLockRef` are type aliases.
|
||||
//! [^2]: `MTRef`, `MTLockRef` are type aliases.
|
||||
|
||||
pub use crate::marker::*;
|
||||
use std::collections::HashMap;
|
||||
|
@ -30,7 +30,7 @@ use rustc_errors::{
|
||||
};
|
||||
use rustc_feature::find_gated_cfg;
|
||||
use rustc_interface::util::{self, get_codegen_backend};
|
||||
use rustc_interface::{interface, passes, Queries};
|
||||
use rustc_interface::{interface, passes, Linker, Queries};
|
||||
use rustc_lint::unerased_lint_store;
|
||||
use rustc_metadata::creader::MetadataLoader;
|
||||
use rustc_metadata::locator;
|
||||
@ -41,7 +41,6 @@ use rustc_session::getopts::{self, Matches};
|
||||
use rustc_session::lint::{Lint, LintId};
|
||||
use rustc_session::output::collect_crate_types;
|
||||
use rustc_session::{config, filesearch, EarlyDiagCtxt, Session};
|
||||
use rustc_span::def_id::LOCAL_CRATE;
|
||||
use rustc_span::source_map::FileLoader;
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::FileName;
|
||||
@ -448,21 +447,9 @@ fn run_compiler(
|
||||
return early_exit();
|
||||
}
|
||||
|
||||
let linker = queries.codegen_and_build_linker()?;
|
||||
|
||||
// This must run after monomorphization so that all generic types
|
||||
// have been instantiated.
|
||||
if sess.opts.unstable_opts.print_type_sizes {
|
||||
sess.code_stats.print_type_sizes();
|
||||
}
|
||||
|
||||
if sess.opts.unstable_opts.print_vtable_sizes {
|
||||
let crate_name = queries.global_ctxt()?.enter(|tcx| tcx.crate_name(LOCAL_CRATE));
|
||||
|
||||
sess.code_stats.print_vtable_sizes(crate_name);
|
||||
}
|
||||
|
||||
Ok(Some(linker))
|
||||
queries.global_ctxt()?.enter(|tcx| {
|
||||
Ok(Some(Linker::codegen_and_build_linker(tcx, &*compiler.codegen_backend)?))
|
||||
})
|
||||
})?;
|
||||
|
||||
// Linking is done outside the `compiler.enter()` so that the
|
||||
|
@ -120,21 +120,21 @@ struct CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
|
||||
|
||||
struct BestFailure {
|
||||
token: Token,
|
||||
position_in_tokenstream: usize,
|
||||
position_in_tokenstream: u32,
|
||||
msg: &'static str,
|
||||
remaining_matcher: MatcherLoc,
|
||||
}
|
||||
|
||||
impl BestFailure {
|
||||
fn is_better_position(&self, position: usize) -> bool {
|
||||
fn is_better_position(&self, position: u32) -> bool {
|
||||
position > self.position_in_tokenstream
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, 'matcher> {
|
||||
type Failure = (Token, usize, &'static str);
|
||||
type Failure = (Token, u32, &'static str);
|
||||
|
||||
fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure {
|
||||
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure {
|
||||
(tok, position, msg)
|
||||
}
|
||||
|
||||
@ -211,9 +211,9 @@ impl<'matcher> FailureForwarder<'matcher> {
|
||||
}
|
||||
|
||||
impl<'matcher> Tracker<'matcher> for FailureForwarder<'matcher> {
|
||||
type Failure = (Token, usize, &'static str);
|
||||
type Failure = (Token, u32, &'static str);
|
||||
|
||||
fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure {
|
||||
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure {
|
||||
(tok, position, msg)
|
||||
}
|
||||
|
||||
|
@ -452,7 +452,7 @@ impl TtParser {
|
||||
&mut self,
|
||||
matcher: &'matcher [MatcherLoc],
|
||||
token: &Token,
|
||||
approx_position: usize,
|
||||
approx_position: u32,
|
||||
track: &mut T,
|
||||
) -> Option<NamedParseResult<T::Failure>> {
|
||||
// Matcher positions that would be valid if the macro invocation was over now. Only
|
||||
|
@ -153,7 +153,7 @@ pub(super) trait Tracker<'matcher> {
|
||||
/// Arm failed to match. If the token is `token::Eof`, it indicates an unexpected
|
||||
/// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
|
||||
/// The usize is the approximate position of the token in the input token stream.
|
||||
fn build_failure(tok: Token, position: usize, msg: &'static str) -> Self::Failure;
|
||||
fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
|
||||
|
||||
/// This is called before trying to match next MatcherLoc on the current token.
|
||||
fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
|
||||
@ -182,7 +182,7 @@ pub(super) struct NoopTracker;
|
||||
impl<'matcher> Tracker<'matcher> for NoopTracker {
|
||||
type Failure = ();
|
||||
|
||||
fn build_failure(_tok: Token, _position: usize, _msg: &'static str) -> Self::Failure {}
|
||||
fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
|
||||
|
||||
fn description() -> &'static str {
|
||||
"none"
|
||||
|
@ -2414,7 +2414,7 @@ pub enum ImplItemKind<'hir> {
|
||||
/// * the `A: Bound` in `Trait<A: Bound>`
|
||||
/// * the `RetTy` in `Trait(ArgTy, ArgTy) -> RetTy`
|
||||
/// * the `C = { Ct }` in `Trait<C = { Ct }>` (feature `associated_const_equality`)
|
||||
/// * the `f(): Bound` in `Trait<f(): Bound>` (feature `return_type_notation`)
|
||||
/// * the `f(..): Bound` in `Trait<f(..): Bound>` (feature `return_type_notation`)
|
||||
#[derive(Debug, Clone, Copy, HashStable_Generic)]
|
||||
pub struct AssocItemConstraint<'hir> {
|
||||
pub hir_id: HirId,
|
||||
|
@ -11,6 +11,7 @@ use crate::def_id::DefId;
|
||||
use crate::{MethodKind, Target};
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||
use rustc_span::symbol::{kw, sym, Symbol};
|
||||
@ -23,6 +24,7 @@ pub struct LanguageItems {
|
||||
/// Mappings from lang items to their possibly found [`DefId`]s.
|
||||
/// The index corresponds to the order in [`LangItem`].
|
||||
items: [Option<DefId>; std::mem::variant_count::<LangItem>()],
|
||||
reverse_items: FxIndexMap<DefId, LangItem>,
|
||||
/// Lang items that were not found during collection.
|
||||
pub missing: Vec<LangItem>,
|
||||
}
|
||||
@ -30,7 +32,11 @@ pub struct LanguageItems {
|
||||
impl LanguageItems {
|
||||
/// Construct an empty collection of lang items and no missing ones.
|
||||
pub fn new() -> Self {
|
||||
Self { items: [None; std::mem::variant_count::<LangItem>()], missing: Vec::new() }
|
||||
Self {
|
||||
items: [None; std::mem::variant_count::<LangItem>()],
|
||||
reverse_items: FxIndexMap::default(),
|
||||
missing: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self, item: LangItem) -> Option<DefId> {
|
||||
@ -39,6 +45,11 @@ impl LanguageItems {
|
||||
|
||||
pub fn set(&mut self, item: LangItem, def_id: DefId) {
|
||||
self.items[item as usize] = Some(def_id);
|
||||
self.reverse_items.insert(def_id, item);
|
||||
}
|
||||
|
||||
pub fn from_def_id(&self, def_id: DefId) -> Option<LangItem> {
|
||||
self.reverse_items.get(&def_id).copied()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (LangItem, DefId)> + '_ {
|
||||
|
@ -708,7 +708,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
// else an error would have been flagged by the
|
||||
// `loops` pass for using break with an expression
|
||||
// where you are not supposed to.
|
||||
assert!(expr_opt.is_none() || self.dcx().has_errors().is_some());
|
||||
assert!(expr_opt.is_none() || self.tainted_by_errors().is_some());
|
||||
}
|
||||
|
||||
// If we encountered a `break`, then (no surprise) it may be possible to break from the
|
||||
|
@ -734,9 +734,7 @@ impl<'tcx, Cx: TypeInformationCtxt<'tcx>, D: Delegate<'tcx>> ExprUseVisitor<'tcx
|
||||
// struct; however, when EUV is run during typeck, it
|
||||
// may not. This will generate an error earlier in typeck,
|
||||
// so we can just ignore it.
|
||||
if self.cx.tcx().dcx().has_errors().is_none() {
|
||||
span_bug!(with_expr.span, "with expression doesn't evaluate to a struct");
|
||||
}
|
||||
span_bug!(with_expr.span, "with expression doesn't evaluate to a struct");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1678,7 +1678,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
self.warn_if_unreachable(stmt.hir_id, stmt.span, "statement");
|
||||
|
||||
// Hide the outer diverging and `has_errors` flags.
|
||||
// Hide the outer diverging flags.
|
||||
let old_diverges = self.diverges.replace(Diverges::Maybe);
|
||||
|
||||
match stmt.kind {
|
||||
|
@ -510,9 +510,12 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> {
|
||||
.report_mismatched_types(&cause, method_self_ty, self_ty, terr)
|
||||
.emit();
|
||||
} else {
|
||||
error!("{self_ty} was a subtype of {method_self_ty} but now is not?");
|
||||
// This must already have errored elsewhere.
|
||||
self.dcx().has_errors().unwrap();
|
||||
// This has/will have errored in wfcheck, which we cannot depend on from here, as typeck on functions
|
||||
// may run before wfcheck if the function is used in const eval.
|
||||
self.dcx().span_delayed_bug(
|
||||
cause.span(),
|
||||
format!("{self_ty} was a subtype of {method_self_ty} but now is not?"),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -499,7 +499,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
// If the shadowed binding has an an itializer expression,
|
||||
// If the shadowed binding has an itializer expression,
|
||||
// use the initializer expression'ty to try to find the method again.
|
||||
// For example like: `let mut x = Vec::new();`,
|
||||
// `Vec::new()` is the itializer expression.
|
||||
@ -968,7 +968,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
}
|
||||
|
||||
// Make sure that, if any traits other than the found ones were involved,
|
||||
// we don't don't report an unimplemented trait.
|
||||
// we don't report an unimplemented trait.
|
||||
// We don't want to say that `iter::Cloned` is not an iterator, just
|
||||
// because of some non-Clone item being iterated over.
|
||||
for (predicate, _parent_pred, _cause) in unsatisfied_predicates {
|
||||
@ -2129,7 +2129,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
let target_ty = self
|
||||
.autoderef(sugg_span, rcvr_ty)
|
||||
.find(|(rcvr_ty, _)| {
|
||||
DeepRejectCtxt { treat_obligation_params: TreatParams::AsCandidateKey }
|
||||
DeepRejectCtxt::new(self.tcx, TreatParams::ForLookup)
|
||||
.types_may_unify(*rcvr_ty, impl_ty)
|
||||
})
|
||||
.map_or(impl_ty, |(ty, _)| ty)
|
||||
|
@ -219,28 +219,9 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
|
||||
fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
|
||||
if let hir::ExprKind::Index(ref base, ref index, _) = e.kind {
|
||||
// All valid indexing looks like this; might encounter non-valid indexes at this point.
|
||||
let base_ty = self.typeck_results.expr_ty_adjusted_opt(base);
|
||||
if base_ty.is_none() {
|
||||
// When encountering `return [0][0]` outside of a `fn` body we can encounter a base
|
||||
// that isn't in the type table. We assume more relevant errors have already been
|
||||
// emitted. (#64638)
|
||||
assert!(self.tcx().dcx().has_errors().is_some(), "bad base: `{base:?}`");
|
||||
}
|
||||
if let Some(base_ty) = base_ty
|
||||
&& let ty::Ref(_, base_ty_inner, _) = *base_ty.kind()
|
||||
{
|
||||
let index_ty =
|
||||
self.typeck_results.expr_ty_adjusted_opt(index).unwrap_or_else(|| {
|
||||
// When encountering `return [0][0]` outside of a `fn` body we would attempt
|
||||
// to access an nonexistent index. We assume that more relevant errors will
|
||||
// already have been emitted, so we only gate on this with an ICE if no
|
||||
// error has been emitted. (#64638)
|
||||
Ty::new_error_with_message(
|
||||
self.fcx.tcx,
|
||||
e.span,
|
||||
format!("bad index {index:?} for base: `{base:?}`"),
|
||||
)
|
||||
});
|
||||
let base_ty = self.typeck_results.expr_ty_adjusted(base);
|
||||
if let ty::Ref(_, base_ty_inner, _) = *base_ty.kind() {
|
||||
let index_ty = self.typeck_results.expr_ty_adjusted(index);
|
||||
if self.is_builtin_index(e, base_ty_inner, index_ty) {
|
||||
// Remove the method call record
|
||||
self.typeck_results.type_dependent_defs_mut().remove(e.hir_id);
|
||||
|
@ -534,7 +534,7 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
|
||||
let tcx = self.tcx();
|
||||
|
||||
// Find the method being called.
|
||||
let Ok(Some(instance)) = ty::Instance::resolve(
|
||||
let Ok(Some(instance)) = ty::Instance::try_resolve(
|
||||
tcx,
|
||||
ctxt.param_env,
|
||||
ctxt.assoc_item.def_id,
|
||||
|
@ -16,7 +16,7 @@ pub mod util;
|
||||
pub use callbacks::setup_callbacks;
|
||||
pub use interface::{run_compiler, Config};
|
||||
pub use passes::DEFAULT_QUERY_PROVIDERS;
|
||||
pub use queries::Queries;
|
||||
pub use queries::{Linker, Queries};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
@ -65,12 +65,6 @@ impl<'a, 'tcx> QueryResult<'a, &'tcx GlobalCtxt<'tcx>> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Default for Query<T> {
|
||||
fn default() -> Self {
|
||||
Query { result: RefCell::new(None) }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Queries<'tcx> {
|
||||
compiler: &'tcx Compiler,
|
||||
gcx_cell: OnceLock<GlobalCtxt<'tcx>>,
|
||||
@ -90,8 +84,8 @@ impl<'tcx> Queries<'tcx> {
|
||||
gcx_cell: OnceLock::new(),
|
||||
arena: WorkerLocal::new(|_| Arena::default()),
|
||||
hir_arena: WorkerLocal::new(|_| rustc_hir::Arena::default()),
|
||||
parse: Default::default(),
|
||||
gcx: Default::default(),
|
||||
parse: Query { result: RefCell::new(None) },
|
||||
gcx: Query { result: RefCell::new(None) },
|
||||
}
|
||||
}
|
||||
|
||||
@ -116,23 +110,6 @@ impl<'tcx> Queries<'tcx> {
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn codegen_and_build_linker(&'tcx self) -> Result<Linker> {
|
||||
self.global_ctxt()?.enter(|tcx| {
|
||||
let ongoing_codegen = passes::start_codegen(&*self.compiler.codegen_backend, tcx)?;
|
||||
|
||||
Ok(Linker {
|
||||
dep_graph: tcx.dep_graph.clone(),
|
||||
output_filenames: tcx.output_filenames(()).clone(),
|
||||
crate_hash: if tcx.needs_crate_hash() {
|
||||
Some(tcx.crate_hash(LOCAL_CRATE))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
ongoing_codegen,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Linker {
|
||||
@ -144,6 +121,36 @@ pub struct Linker {
|
||||
}
|
||||
|
||||
impl Linker {
|
||||
pub fn codegen_and_build_linker(
|
||||
tcx: TyCtxt<'_>,
|
||||
codegen_backend: &dyn CodegenBackend,
|
||||
) -> Result<Linker> {
|
||||
let ongoing_codegen = passes::start_codegen(codegen_backend, tcx)?;
|
||||
|
||||
// This must run after monomorphization so that all generic types
|
||||
// have been instantiated.
|
||||
if tcx.sess.opts.unstable_opts.print_type_sizes {
|
||||
tcx.sess.code_stats.print_type_sizes();
|
||||
}
|
||||
|
||||
if tcx.sess.opts.unstable_opts.print_vtable_sizes {
|
||||
let crate_name = tcx.crate_name(LOCAL_CRATE);
|
||||
|
||||
tcx.sess.code_stats.print_vtable_sizes(crate_name);
|
||||
}
|
||||
|
||||
Ok(Linker {
|
||||
dep_graph: tcx.dep_graph.clone(),
|
||||
output_filenames: tcx.output_filenames(()).clone(),
|
||||
crate_hash: if tcx.needs_crate_hash() {
|
||||
Some(tcx.crate_hash(LOCAL_CRATE))
|
||||
} else {
|
||||
None
|
||||
},
|
||||
ongoing_codegen,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn link(self, sess: &Session, codegen_backend: &dyn CodegenBackend) -> Result<()> {
|
||||
let (codegen_results, work_products) =
|
||||
codegen_backend.join_codegen(self.ongoing_codegen, sess, &self.output_filenames);
|
||||
@ -197,7 +204,7 @@ impl Compiler {
|
||||
F: for<'tcx> FnOnce(&'tcx Queries<'tcx>) -> T,
|
||||
{
|
||||
// Must declare `_timer` first so that it is dropped after `queries`.
|
||||
let mut _timer = None;
|
||||
let _timer;
|
||||
let queries = Queries::new(self);
|
||||
let ret = f(&queries);
|
||||
|
||||
@ -220,7 +227,7 @@ impl Compiler {
|
||||
|
||||
// The timer's lifetime spans the dropping of `queries`, which contains
|
||||
// the global context.
|
||||
_timer = Some(self.sess.timer("free_global_ctxt"));
|
||||
_timer = self.sess.timer("free_global_ctxt");
|
||||
if let Err((path, error)) = queries.finish() {
|
||||
self.sess.dcx().emit_fatal(errors::FailedWritingFile { path: &path, error });
|
||||
}
|
||||
|
@ -757,7 +757,6 @@ fn test_unstable_options_tracking_hash() {
|
||||
// tidy-alphabetical-start
|
||||
tracked!(allow_features, Some(vec![String::from("lang_items")]));
|
||||
tracked!(always_encode_mir, true);
|
||||
tracked!(asm_comments, true);
|
||||
tracked!(assume_incomplete_release, true);
|
||||
tracked!(binary_dep_depinfo, true);
|
||||
tracked!(box_noalias, false);
|
||||
@ -862,6 +861,7 @@ fn test_unstable_options_tracking_hash() {
|
||||
tracked!(uninit_const_chunk_threshold, 123);
|
||||
tracked!(unleash_the_miri_inside_of_you, true);
|
||||
tracked!(use_ctors_section, Some(true));
|
||||
tracked!(verbose_asm, true);
|
||||
tracked!(verify_llvm_ir, true);
|
||||
tracked!(virtual_function_elimination, true);
|
||||
tracked!(wasi_exec_model, Some(WasiExecModel::Reactor));
|
||||
|
@ -88,7 +88,7 @@ declare_lint_pass!(QueryStability => [POTENTIAL_QUERY_INSTABILITY]);
|
||||
impl LateLintPass<'_> for QueryStability {
|
||||
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
|
||||
let Some((span, def_id, args)) = typeck_results_of_method_fn(cx, expr) else { return };
|
||||
if let Ok(Some(instance)) = ty::Instance::resolve(cx.tcx, cx.param_env, def_id, args) {
|
||||
if let Ok(Some(instance)) = ty::Instance::try_resolve(cx.tcx, cx.param_env, def_id, args) {
|
||||
let def_id = instance.def_id();
|
||||
if cx.tcx.has_attr(def_id, sym::rustc_lint_query_instability) {
|
||||
cx.emit_span_lint(
|
||||
@ -393,7 +393,7 @@ impl LateLintPass<'_> for Diagnostics {
|
||||
};
|
||||
|
||||
// Is the callee marked with `#[rustc_lint_diagnostics]`?
|
||||
let has_attr = ty::Instance::resolve(cx.tcx, cx.param_env, def_id, fn_gen_args)
|
||||
let has_attr = ty::Instance::try_resolve(cx.tcx, cx.param_env, def_id, fn_gen_args)
|
||||
.ok()
|
||||
.flatten()
|
||||
.is_some_and(|inst| cx.tcx.has_attr(inst.def_id(), sym::rustc_lint_diagnostics));
|
||||
|
@ -96,7 +96,9 @@ impl<'tcx> LateLintPass<'tcx> for NoopMethodCall {
|
||||
.tcx
|
||||
.normalize_erasing_regions(cx.param_env, cx.typeck_results().node_args(expr.hir_id));
|
||||
// Resolve the trait method instance.
|
||||
let Ok(Some(i)) = ty::Instance::resolve(cx.tcx, cx.param_env, did, args) else { return };
|
||||
let Ok(Some(i)) = ty::Instance::try_resolve(cx.tcx, cx.param_env, did, args) else {
|
||||
return;
|
||||
};
|
||||
// (Re)check that it implements the noop diagnostic.
|
||||
let Some(name) = cx.tcx.get_diagnostic_name(i.def_id()) else { return };
|
||||
if !matches!(
|
||||
|
@ -407,7 +407,7 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
|
||||
const char *ABIStr, LLVMRustCodeModel RustCM, LLVMRustRelocModel RustReloc,
|
||||
LLVMRustCodeGenOptLevel RustOptLevel, bool UseSoftFloat,
|
||||
bool FunctionSections, bool DataSections, bool UniqueSectionNames,
|
||||
bool TrapUnreachable, bool Singlethread, bool AsmComments,
|
||||
bool TrapUnreachable, bool Singlethread, bool VerboseAsm,
|
||||
bool EmitStackSizeSection, bool RelaxELFRelocations, bool UseInitArray,
|
||||
const char *SplitDwarfFile, const char *OutputObjFile,
|
||||
const char *DebugInfoCompression, bool UseEmulatedTls,
|
||||
@ -435,8 +435,9 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
|
||||
Options.DataSections = DataSections;
|
||||
Options.FunctionSections = FunctionSections;
|
||||
Options.UniqueSectionNames = UniqueSectionNames;
|
||||
Options.MCOptions.AsmVerbose = AsmComments;
|
||||
Options.MCOptions.PreserveAsmComments = AsmComments;
|
||||
Options.MCOptions.AsmVerbose = VerboseAsm;
|
||||
// Always preserve comments that were written by the user
|
||||
Options.MCOptions.PreserveAsmComments = true;
|
||||
Options.MCOptions.ABIName = ABIStr;
|
||||
if (SplitDwarfFile) {
|
||||
Options.MCOptions.SplitDwarfFile = SplitDwarfFile;
|
||||
|
@ -41,6 +41,9 @@ middle_cannot_be_normalized =
|
||||
middle_conflict_types =
|
||||
this expression supplies two conflicting concrete types for the same opaque type
|
||||
|
||||
middle_consider_type_length_limit =
|
||||
consider adding a `#![type_length_limit="{$type_length}"]` attribute to your crate
|
||||
|
||||
middle_const_eval_non_int =
|
||||
constant evaluation of enum discriminant resulted in non-integer
|
||||
|
||||
@ -94,8 +97,11 @@ middle_strict_coherence_needs_negative_coherence =
|
||||
to use `strict_coherence` on this trait, the `with_negative_coherence` feature must be enabled
|
||||
.label = due to this attribute
|
||||
|
||||
middle_type_length_limit = reached the type-length limit while instantiating `{$shrunk}`
|
||||
|
||||
middle_unknown_layout =
|
||||
the type `{$ty}` has an unknown layout
|
||||
|
||||
middle_values_too_big =
|
||||
values of the type `{$ty}` are too big for the current architecture
|
||||
middle_written_to_path = the full type name has been written to '{$path}'
|
||||
|
@ -1,4 +1,5 @@
|
||||
use std::fmt;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use rustc_errors::{codes::*, DiagArgName, DiagArgValue, DiagMessage};
|
||||
use rustc_macros::{Diagnostic, Subdiagnostic};
|
||||
@ -149,3 +150,16 @@ pub struct ErroneousConstant {
|
||||
|
||||
/// Used by `rustc_const_eval`
|
||||
pub use crate::fluent_generated::middle_adjust_for_foreign_abi_error;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(middle_type_length_limit)]
|
||||
#[help(middle_consider_type_length_limit)]
|
||||
pub struct TypeLengthLimit {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub shrunk: String,
|
||||
#[note(middle_written_to_path)]
|
||||
pub was_written: Option<()>,
|
||||
pub path: PathBuf,
|
||||
pub type_length: usize,
|
||||
}
|
||||
|
@ -27,6 +27,10 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
self.lang_items().get(lang_item) == Some(def_id)
|
||||
}
|
||||
|
||||
pub fn as_lang_item(self, def_id: DefId) -> Option<LangItem> {
|
||||
self.lang_items().from_def_id(def_id)
|
||||
}
|
||||
|
||||
/// Given a [`DefId`] of one of the [`Fn`], [`FnMut`] or [`FnOnce`] traits,
|
||||
/// returns a corresponding [`ty::ClosureKind`].
|
||||
/// For any other [`DefId`] return `None`.
|
||||
|
@ -30,7 +30,7 @@ pub fn provide(providers: &mut Providers) {
|
||||
tcx.hir().krate_attrs(),
|
||||
tcx.sess,
|
||||
sym::type_length_limit,
|
||||
1048576,
|
||||
2usize.pow(24),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
@ -73,7 +73,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
bug!("did not expect inference variables here");
|
||||
}
|
||||
|
||||
match ty::Instance::resolve(
|
||||
match ty::Instance::try_resolve(
|
||||
self, param_env,
|
||||
// FIXME: maybe have a separate version for resolving mir::UnevaluatedConst?
|
||||
ct.def, ct.args,
|
||||
@ -106,7 +106,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
bug!("did not expect inference variables here");
|
||||
}
|
||||
|
||||
match ty::Instance::resolve(self, param_env, ct.def, ct.args) {
|
||||
match ty::Instance::try_resolve(self, param_env, ct.def, ct.args) {
|
||||
Ok(Some(instance)) => {
|
||||
let cid = GlobalId { instance, promoted: None };
|
||||
self.const_eval_global_id_for_typeck(param_env, cid, span).inspect(|_| {
|
||||
|
@ -274,6 +274,9 @@ pub enum ConstraintCategory<'tcx> {
|
||||
|
||||
/// A constraint that doesn't correspond to anything the user sees.
|
||||
Internal,
|
||||
|
||||
/// An internal constraint derived from an illegal universe relation.
|
||||
IllegalUniverse,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash)]
|
||||
|
@ -2197,8 +2197,8 @@ rustc_queries! {
|
||||
/// * `Err(ErrorGuaranteed)` when the `Instance` resolution process
|
||||
/// couldn't complete due to errors elsewhere - this is distinct
|
||||
/// from `Ok(None)` to avoid misleading diagnostics when an error
|
||||
/// has already been/will be emitted, for the original cause
|
||||
query resolve_instance(
|
||||
/// has already been/will be emitted, for the original cause.
|
||||
query resolve_instance_raw(
|
||||
key: ty::ParamEnvAnd<'tcx, (DefId, GenericArgsRef<'tcx>)>
|
||||
) -> Result<Option<ty::Instance<'tcx>>, ErrorGuaranteed> {
|
||||
desc { "resolving instance `{}`", ty::Instance::new(key.value.0, key.value.1) }
|
||||
|
@ -237,7 +237,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
/// Eg: 1. `foo.x` which is represented using `projections=[Field(x)]` is an ancestor of
|
||||
/// `foo.x.y` which is represented using `projections=[Field(x), Field(y)]`.
|
||||
/// Note both `foo.x` and `foo.x.y` start off of the same root variable `foo`.
|
||||
/// 2. Since we only look at the projections here function will return `bar.x` as an a valid
|
||||
/// 2. Since we only look at the projections here function will return `bar.x` as a valid
|
||||
/// ancestor of `foo.x.y`. It's the caller's responsibility to ensure that both projections
|
||||
/// list are being applied to the same root variable.
|
||||
pub fn is_ancestor_or_same_capture(
|
||||
|
@ -366,6 +366,10 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||
self.is_lang_item(def_id, trait_lang_item_to_lang_item(lang_item))
|
||||
}
|
||||
|
||||
fn as_lang_item(self, def_id: DefId) -> Option<TraitSolverLangItem> {
|
||||
lang_item_to_trait_lang_item(self.lang_items().from_def_id(def_id)?)
|
||||
}
|
||||
|
||||
fn associated_type_def_ids(self, def_id: DefId) -> impl IntoIterator<Item = DefId> {
|
||||
self.associated_items(def_id)
|
||||
.in_definition_order()
|
||||
@ -373,17 +377,6 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||
.map(|assoc_item| assoc_item.def_id)
|
||||
}
|
||||
|
||||
fn args_may_unify_deep(
|
||||
self,
|
||||
obligation_args: ty::GenericArgsRef<'tcx>,
|
||||
impl_args: ty::GenericArgsRef<'tcx>,
|
||||
) -> bool {
|
||||
ty::fast_reject::DeepRejectCtxt {
|
||||
treat_obligation_params: ty::fast_reject::TreatParams::ForLookup,
|
||||
}
|
||||
.args_may_unify(obligation_args, impl_args)
|
||||
}
|
||||
|
||||
// This implementation is a bit different from `TyCtxt::for_each_relevant_impl`,
|
||||
// since we want to skip over blanket impls for non-rigid aliases, and also we
|
||||
// only want to consider types that *actually* unify with float/int vars.
|
||||
@ -533,14 +526,6 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||
self.trait_def(trait_def_id).implement_via_object
|
||||
}
|
||||
|
||||
fn fn_trait_kind_from_def_id(self, trait_def_id: DefId) -> Option<ty::ClosureKind> {
|
||||
self.fn_trait_kind_from_def_id(trait_def_id)
|
||||
}
|
||||
|
||||
fn async_fn_trait_kind_from_def_id(self, trait_def_id: DefId) -> Option<ty::ClosureKind> {
|
||||
self.async_fn_trait_kind_from_def_id(trait_def_id)
|
||||
}
|
||||
|
||||
fn supertrait_def_ids(self, trait_def_id: DefId) -> impl IntoIterator<Item = DefId> {
|
||||
self.supertrait_def_ids(trait_def_id)
|
||||
}
|
||||
@ -584,46 +569,69 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn trait_lang_item_to_lang_item(lang_item: TraitSolverLangItem) -> LangItem {
|
||||
match lang_item {
|
||||
TraitSolverLangItem::AsyncDestruct => LangItem::AsyncDestruct,
|
||||
TraitSolverLangItem::AsyncFnKindHelper => LangItem::AsyncFnKindHelper,
|
||||
TraitSolverLangItem::AsyncFnKindUpvars => LangItem::AsyncFnKindUpvars,
|
||||
TraitSolverLangItem::AsyncFnOnceOutput => LangItem::AsyncFnOnceOutput,
|
||||
TraitSolverLangItem::AsyncIterator => LangItem::AsyncIterator,
|
||||
TraitSolverLangItem::CallOnceFuture => LangItem::CallOnceFuture,
|
||||
TraitSolverLangItem::CallRefFuture => LangItem::CallRefFuture,
|
||||
TraitSolverLangItem::Clone => LangItem::Clone,
|
||||
TraitSolverLangItem::Copy => LangItem::Copy,
|
||||
TraitSolverLangItem::Coroutine => LangItem::Coroutine,
|
||||
TraitSolverLangItem::CoroutineReturn => LangItem::CoroutineReturn,
|
||||
TraitSolverLangItem::CoroutineYield => LangItem::CoroutineYield,
|
||||
TraitSolverLangItem::Destruct => LangItem::Destruct,
|
||||
TraitSolverLangItem::DiscriminantKind => LangItem::DiscriminantKind,
|
||||
TraitSolverLangItem::DynMetadata => LangItem::DynMetadata,
|
||||
TraitSolverLangItem::EffectsMaybe => LangItem::EffectsMaybe,
|
||||
TraitSolverLangItem::EffectsIntersection => LangItem::EffectsIntersection,
|
||||
TraitSolverLangItem::EffectsIntersectionOutput => LangItem::EffectsIntersectionOutput,
|
||||
TraitSolverLangItem::EffectsNoRuntime => LangItem::EffectsNoRuntime,
|
||||
TraitSolverLangItem::EffectsRuntime => LangItem::EffectsRuntime,
|
||||
TraitSolverLangItem::FnPtrTrait => LangItem::FnPtrTrait,
|
||||
TraitSolverLangItem::FusedIterator => LangItem::FusedIterator,
|
||||
TraitSolverLangItem::Future => LangItem::Future,
|
||||
TraitSolverLangItem::FutureOutput => LangItem::FutureOutput,
|
||||
TraitSolverLangItem::Iterator => LangItem::Iterator,
|
||||
TraitSolverLangItem::Metadata => LangItem::Metadata,
|
||||
TraitSolverLangItem::Option => LangItem::Option,
|
||||
TraitSolverLangItem::PointeeTrait => LangItem::PointeeTrait,
|
||||
TraitSolverLangItem::PointerLike => LangItem::PointerLike,
|
||||
TraitSolverLangItem::Poll => LangItem::Poll,
|
||||
TraitSolverLangItem::Sized => LangItem::Sized,
|
||||
TraitSolverLangItem::TransmuteTrait => LangItem::TransmuteTrait,
|
||||
TraitSolverLangItem::Tuple => LangItem::Tuple,
|
||||
TraitSolverLangItem::Unpin => LangItem::Unpin,
|
||||
TraitSolverLangItem::Unsize => LangItem::Unsize,
|
||||
macro_rules! bidirectional_lang_item_map {
|
||||
($($name:ident),+ $(,)?) => {
|
||||
fn trait_lang_item_to_lang_item(lang_item: TraitSolverLangItem) -> LangItem {
|
||||
match lang_item {
|
||||
$(TraitSolverLangItem::$name => LangItem::$name,)+
|
||||
}
|
||||
}
|
||||
|
||||
fn lang_item_to_trait_lang_item(lang_item: LangItem) -> Option<TraitSolverLangItem> {
|
||||
Some(match lang_item {
|
||||
$(LangItem::$name => TraitSolverLangItem::$name,)+
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bidirectional_lang_item_map! {
|
||||
// tidy-alphabetical-start
|
||||
AsyncDestruct,
|
||||
AsyncFn,
|
||||
AsyncFnKindHelper,
|
||||
AsyncFnKindUpvars,
|
||||
AsyncFnMut,
|
||||
AsyncFnOnce,
|
||||
AsyncFnOnceOutput,
|
||||
AsyncIterator,
|
||||
CallOnceFuture,
|
||||
CallRefFuture,
|
||||
Clone,
|
||||
Copy,
|
||||
Coroutine,
|
||||
CoroutineReturn,
|
||||
CoroutineYield,
|
||||
Destruct,
|
||||
DiscriminantKind,
|
||||
DynMetadata,
|
||||
EffectsIntersection,
|
||||
EffectsIntersectionOutput,
|
||||
EffectsMaybe,
|
||||
EffectsNoRuntime,
|
||||
EffectsRuntime,
|
||||
Fn,
|
||||
FnMut,
|
||||
FnOnce,
|
||||
FnPtrTrait,
|
||||
FusedIterator,
|
||||
Future,
|
||||
FutureOutput,
|
||||
Iterator,
|
||||
Metadata,
|
||||
Option,
|
||||
PointeeTrait,
|
||||
PointerLike,
|
||||
Poll,
|
||||
Sized,
|
||||
TransmuteTrait,
|
||||
Tuple,
|
||||
Unpin,
|
||||
Unsize,
|
||||
// tidy-alphabetical-end
|
||||
}
|
||||
|
||||
impl<'tcx> rustc_type_ir::inherent::DefId<TyCtxt<'tcx>> for DefId {
|
||||
fn as_local(self) -> Option<LocalDefId> {
|
||||
self.as_local()
|
||||
|
@ -1,369 +1,9 @@
|
||||
use crate::mir::Mutability;
|
||||
use crate::ty::GenericArgKind;
|
||||
use crate::ty::{self, GenericArgsRef, Ty, TyCtxt, TypeVisitableExt};
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_macros::{HashStable, TyDecodable, TyEncodable};
|
||||
use std::fmt::Debug;
|
||||
use std::hash::Hash;
|
||||
use std::iter;
|
||||
|
||||
/// See `simplify_type`.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, TyEncodable, TyDecodable, HashStable)]
|
||||
pub enum SimplifiedType {
|
||||
Bool,
|
||||
Char,
|
||||
Int(ty::IntTy),
|
||||
Uint(ty::UintTy),
|
||||
Float(ty::FloatTy),
|
||||
Adt(DefId),
|
||||
Foreign(DefId),
|
||||
Str,
|
||||
Array,
|
||||
Slice,
|
||||
Ref(Mutability),
|
||||
Ptr(Mutability),
|
||||
Never,
|
||||
Tuple(usize),
|
||||
/// A trait object, all of whose components are markers
|
||||
/// (e.g., `dyn Send + Sync`).
|
||||
MarkerTraitObject,
|
||||
Trait(DefId),
|
||||
Closure(DefId),
|
||||
Coroutine(DefId),
|
||||
CoroutineWitness(DefId),
|
||||
Function(usize),
|
||||
Placeholder,
|
||||
Error,
|
||||
}
|
||||
use super::TyCtxt;
|
||||
|
||||
/// Generic parameters are pretty much just bound variables, e.g.
|
||||
/// the type of `fn foo<'a, T>(x: &'a T) -> u32 { ... }` can be thought of as
|
||||
/// `for<'a, T> fn(&'a T) -> u32`.
|
||||
///
|
||||
/// Typecheck of `foo` has to succeed for all possible generic arguments, so
|
||||
/// during typeck, we have to treat its generic parameters as if they
|
||||
/// were placeholders.
|
||||
///
|
||||
/// But when calling `foo` we only have to provide a specific generic argument.
|
||||
/// In that case the generic parameters are instantiated with inference variables.
|
||||
/// As we use `simplify_type` before that instantiation happens, we just treat
|
||||
/// generic parameters as if they were inference variables in that case.
|
||||
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
|
||||
pub enum TreatParams {
|
||||
/// Treat parameters as infer vars. This is the correct mode for caching
|
||||
/// an impl's type for lookup.
|
||||
AsCandidateKey,
|
||||
/// Treat parameters as placeholders in the given environment. This is the
|
||||
/// correct mode for *lookup*, as during candidate selection.
|
||||
///
|
||||
/// This also treats projections with inference variables as infer vars
|
||||
/// since they could be further normalized.
|
||||
ForLookup,
|
||||
}
|
||||
pub use rustc_type_ir::fast_reject::*;
|
||||
|
||||
/// Tries to simplify a type by only returning the outermost injective¹ layer, if one exists.
|
||||
///
|
||||
/// **This function should only be used if you need to store or retrieve the type from some
|
||||
/// hashmap. If you want to quickly decide whether two types may unify, use the [DeepRejectCtxt]
|
||||
/// instead.**
|
||||
///
|
||||
/// The idea is to get something simple that we can use to quickly decide if two types could unify,
|
||||
/// for example during method lookup. If this function returns `Some(x)` it can only unify with
|
||||
/// types for which this method returns either `Some(x)` as well or `None`.
|
||||
///
|
||||
/// A special case here are parameters and projections, which are only injective
|
||||
/// if they are treated as placeholders.
|
||||
///
|
||||
/// For example when storing impls based on their simplified self type, we treat
|
||||
/// generic parameters as if they were inference variables. We must not simplify them here,
|
||||
/// as they can unify with any other type.
|
||||
///
|
||||
/// With projections we have to be even more careful, as treating them as placeholders
|
||||
/// is only correct if they are fully normalized.
|
||||
///
|
||||
/// ¹ meaning that if the outermost layers are different, then the whole types are also different.
|
||||
pub fn simplify_type<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
treat_params: TreatParams,
|
||||
) -> Option<SimplifiedType> {
|
||||
match *ty.kind() {
|
||||
ty::Bool => Some(SimplifiedType::Bool),
|
||||
ty::Char => Some(SimplifiedType::Char),
|
||||
ty::Int(int_type) => Some(SimplifiedType::Int(int_type)),
|
||||
ty::Uint(uint_type) => Some(SimplifiedType::Uint(uint_type)),
|
||||
ty::Float(float_type) => Some(SimplifiedType::Float(float_type)),
|
||||
ty::Adt(def, _) => Some(SimplifiedType::Adt(def.did())),
|
||||
ty::Str => Some(SimplifiedType::Str),
|
||||
ty::Array(..) => Some(SimplifiedType::Array),
|
||||
ty::Slice(..) => Some(SimplifiedType::Slice),
|
||||
ty::Pat(ty, ..) => simplify_type(tcx, ty, treat_params),
|
||||
ty::RawPtr(_, mutbl) => Some(SimplifiedType::Ptr(mutbl)),
|
||||
ty::Dynamic(trait_info, ..) => match trait_info.principal_def_id() {
|
||||
Some(principal_def_id) if !tcx.trait_is_auto(principal_def_id) => {
|
||||
Some(SimplifiedType::Trait(principal_def_id))
|
||||
}
|
||||
_ => Some(SimplifiedType::MarkerTraitObject),
|
||||
},
|
||||
ty::Ref(_, _, mutbl) => Some(SimplifiedType::Ref(mutbl)),
|
||||
ty::FnDef(def_id, _) | ty::Closure(def_id, _) | ty::CoroutineClosure(def_id, _) => {
|
||||
Some(SimplifiedType::Closure(def_id))
|
||||
}
|
||||
ty::Coroutine(def_id, _) => Some(SimplifiedType::Coroutine(def_id)),
|
||||
ty::CoroutineWitness(def_id, _) => Some(SimplifiedType::CoroutineWitness(def_id)),
|
||||
ty::Never => Some(SimplifiedType::Never),
|
||||
ty::Tuple(tys) => Some(SimplifiedType::Tuple(tys.len())),
|
||||
ty::FnPtr(f) => Some(SimplifiedType::Function(f.skip_binder().inputs().len())),
|
||||
ty::Placeholder(..) => Some(SimplifiedType::Placeholder),
|
||||
ty::Param(_) => match treat_params {
|
||||
TreatParams::ForLookup => Some(SimplifiedType::Placeholder),
|
||||
TreatParams::AsCandidateKey => None,
|
||||
},
|
||||
ty::Alias(..) => match treat_params {
|
||||
// When treating `ty::Param` as a placeholder, projections also
|
||||
// don't unify with anything else as long as they are fully normalized.
|
||||
// FIXME(-Znext-solver): Can remove this `if` and always simplify to `Placeholder`
|
||||
// when the new solver is enabled by default.
|
||||
TreatParams::ForLookup if !ty.has_non_region_infer() => {
|
||||
Some(SimplifiedType::Placeholder)
|
||||
}
|
||||
TreatParams::ForLookup | TreatParams::AsCandidateKey => None,
|
||||
},
|
||||
ty::Foreign(def_id) => Some(SimplifiedType::Foreign(def_id)),
|
||||
ty::Error(_) => Some(SimplifiedType::Error),
|
||||
ty::Bound(..) | ty::Infer(_) => None,
|
||||
}
|
||||
}
|
||||
pub type DeepRejectCtxt<'tcx> = rustc_type_ir::fast_reject::DeepRejectCtxt<TyCtxt<'tcx>>;
|
||||
|
||||
impl SimplifiedType {
|
||||
pub fn def(self) -> Option<DefId> {
|
||||
match self {
|
||||
SimplifiedType::Adt(d)
|
||||
| SimplifiedType::Foreign(d)
|
||||
| SimplifiedType::Trait(d)
|
||||
| SimplifiedType::Closure(d)
|
||||
| SimplifiedType::Coroutine(d)
|
||||
| SimplifiedType::CoroutineWitness(d) => Some(d),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given generic arguments from an obligation and an impl,
|
||||
/// could these two be unified after replacing parameters in the
|
||||
/// the impl with inference variables.
|
||||
///
|
||||
/// For obligations, parameters won't be replaced by inference
|
||||
/// variables and only unify with themselves. We treat them
|
||||
/// the same way we treat placeholders.
|
||||
///
|
||||
/// We also use this function during coherence. For coherence the
|
||||
/// impls only have to overlap for some value, so we treat parameters
|
||||
/// on both sides like inference variables. This behavior is toggled
|
||||
/// using the `treat_obligation_params` field.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct DeepRejectCtxt {
|
||||
pub treat_obligation_params: TreatParams,
|
||||
}
|
||||
|
||||
impl DeepRejectCtxt {
|
||||
pub fn args_may_unify<'tcx>(
|
||||
self,
|
||||
obligation_args: GenericArgsRef<'tcx>,
|
||||
impl_args: GenericArgsRef<'tcx>,
|
||||
) -> bool {
|
||||
iter::zip(obligation_args, impl_args).all(|(obl, imp)| {
|
||||
match (obl.unpack(), imp.unpack()) {
|
||||
// We don't fast reject based on regions.
|
||||
(GenericArgKind::Lifetime(_), GenericArgKind::Lifetime(_)) => true,
|
||||
(GenericArgKind::Type(obl), GenericArgKind::Type(imp)) => {
|
||||
self.types_may_unify(obl, imp)
|
||||
}
|
||||
(GenericArgKind::Const(obl), GenericArgKind::Const(imp)) => {
|
||||
self.consts_may_unify(obl, imp)
|
||||
}
|
||||
_ => bug!("kind mismatch: {obl} {imp}"),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn types_may_unify<'tcx>(self, obligation_ty: Ty<'tcx>, impl_ty: Ty<'tcx>) -> bool {
|
||||
match impl_ty.kind() {
|
||||
// Start by checking whether the type in the impl may unify with
|
||||
// pretty much everything. Just return `true` in that case.
|
||||
ty::Param(_) | ty::Error(_) | ty::Alias(..) => return true,
|
||||
// These types only unify with inference variables or their own
|
||||
// variant.
|
||||
ty::Bool
|
||||
| ty::Char
|
||||
| ty::Int(_)
|
||||
| ty::Uint(_)
|
||||
| ty::Float(_)
|
||||
| ty::Adt(..)
|
||||
| ty::Str
|
||||
| ty::Array(..)
|
||||
| ty::Slice(..)
|
||||
| ty::RawPtr(..)
|
||||
| ty::Dynamic(..)
|
||||
| ty::Pat(..)
|
||||
| ty::Ref(..)
|
||||
| ty::Never
|
||||
| ty::Tuple(..)
|
||||
| ty::FnPtr(..)
|
||||
| ty::Foreign(..) => debug_assert!(impl_ty.is_known_rigid()),
|
||||
ty::FnDef(..)
|
||||
| ty::Closure(..)
|
||||
| ty::CoroutineClosure(..)
|
||||
| ty::Coroutine(..)
|
||||
| ty::CoroutineWitness(..)
|
||||
| ty::Placeholder(..)
|
||||
| ty::Bound(..)
|
||||
| ty::Infer(_) => bug!("unexpected impl_ty: {impl_ty}"),
|
||||
}
|
||||
|
||||
let k = impl_ty.kind();
|
||||
match *obligation_ty.kind() {
|
||||
// Purely rigid types, use structural equivalence.
|
||||
ty::Bool
|
||||
| ty::Char
|
||||
| ty::Int(_)
|
||||
| ty::Uint(_)
|
||||
| ty::Float(_)
|
||||
| ty::Str
|
||||
| ty::Never
|
||||
| ty::Foreign(_) => obligation_ty == impl_ty,
|
||||
ty::Ref(_, obl_ty, obl_mutbl) => match k {
|
||||
&ty::Ref(_, impl_ty, impl_mutbl) => {
|
||||
obl_mutbl == impl_mutbl && self.types_may_unify(obl_ty, impl_ty)
|
||||
}
|
||||
_ => false,
|
||||
},
|
||||
ty::Adt(obl_def, obl_args) => match k {
|
||||
&ty::Adt(impl_def, impl_args) => {
|
||||
obl_def == impl_def && self.args_may_unify(obl_args, impl_args)
|
||||
}
|
||||
_ => false,
|
||||
},
|
||||
ty::Pat(obl_ty, _) => {
|
||||
// FIXME(pattern_types): take pattern into account
|
||||
matches!(k, &ty::Pat(impl_ty, _) if self.types_may_unify(obl_ty, impl_ty))
|
||||
}
|
||||
ty::Slice(obl_ty) => {
|
||||
matches!(k, &ty::Slice(impl_ty) if self.types_may_unify(obl_ty, impl_ty))
|
||||
}
|
||||
ty::Array(obl_ty, obl_len) => match k {
|
||||
&ty::Array(impl_ty, impl_len) => {
|
||||
self.types_may_unify(obl_ty, impl_ty)
|
||||
&& self.consts_may_unify(obl_len, impl_len)
|
||||
}
|
||||
_ => false,
|
||||
},
|
||||
ty::Tuple(obl) => match k {
|
||||
&ty::Tuple(imp) => {
|
||||
obl.len() == imp.len()
|
||||
&& iter::zip(obl, imp).all(|(obl, imp)| self.types_may_unify(obl, imp))
|
||||
}
|
||||
_ => false,
|
||||
},
|
||||
ty::RawPtr(obl_ty, obl_mutbl) => match *k {
|
||||
ty::RawPtr(imp_ty, imp_mutbl) => {
|
||||
obl_mutbl == imp_mutbl && self.types_may_unify(obl_ty, imp_ty)
|
||||
}
|
||||
_ => false,
|
||||
},
|
||||
ty::Dynamic(obl_preds, ..) => {
|
||||
// Ideally we would walk the existential predicates here or at least
|
||||
// compare their length. But considering that the relevant `Relate` impl
|
||||
// actually sorts and deduplicates these, that doesn't work.
|
||||
matches!(k, ty::Dynamic(impl_preds, ..) if
|
||||
obl_preds.principal_def_id() == impl_preds.principal_def_id()
|
||||
)
|
||||
}
|
||||
ty::FnPtr(obl_sig) => match k {
|
||||
ty::FnPtr(impl_sig) => {
|
||||
let ty::FnSig { inputs_and_output, c_variadic, safety, abi } =
|
||||
obl_sig.skip_binder();
|
||||
let impl_sig = impl_sig.skip_binder();
|
||||
|
||||
abi == impl_sig.abi
|
||||
&& c_variadic == impl_sig.c_variadic
|
||||
&& safety == impl_sig.safety
|
||||
&& inputs_and_output.len() == impl_sig.inputs_and_output.len()
|
||||
&& iter::zip(inputs_and_output, impl_sig.inputs_and_output)
|
||||
.all(|(obl, imp)| self.types_may_unify(obl, imp))
|
||||
}
|
||||
_ => false,
|
||||
},
|
||||
|
||||
// Impls cannot contain these types as these cannot be named directly.
|
||||
ty::FnDef(..) | ty::Closure(..) | ty::CoroutineClosure(..) | ty::Coroutine(..) => false,
|
||||
|
||||
// Placeholder types don't unify with anything on their own
|
||||
ty::Placeholder(..) | ty::Bound(..) => false,
|
||||
|
||||
// Depending on the value of `treat_obligation_params`, we either
|
||||
// treat generic parameters like placeholders or like inference variables.
|
||||
ty::Param(_) => match self.treat_obligation_params {
|
||||
TreatParams::ForLookup => false,
|
||||
TreatParams::AsCandidateKey => true,
|
||||
},
|
||||
|
||||
ty::Infer(ty::IntVar(_)) => impl_ty.is_integral(),
|
||||
|
||||
ty::Infer(ty::FloatVar(_)) => impl_ty.is_floating_point(),
|
||||
|
||||
ty::Infer(_) => true,
|
||||
|
||||
// As we're walking the whole type, it may encounter projections
|
||||
// inside of binders and what not, so we're just going to assume that
|
||||
// projections can unify with other stuff.
|
||||
//
|
||||
// Looking forward to lazy normalization this is the safer strategy anyways.
|
||||
ty::Alias(..) => true,
|
||||
|
||||
ty::Error(_) => true,
|
||||
|
||||
ty::CoroutineWitness(..) => {
|
||||
bug!("unexpected obligation type: {:?}", obligation_ty)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn consts_may_unify(self, obligation_ct: ty::Const<'_>, impl_ct: ty::Const<'_>) -> bool {
|
||||
let impl_val = match impl_ct.kind() {
|
||||
ty::ConstKind::Expr(_)
|
||||
| ty::ConstKind::Param(_)
|
||||
| ty::ConstKind::Unevaluated(_)
|
||||
| ty::ConstKind::Error(_) => {
|
||||
return true;
|
||||
}
|
||||
ty::ConstKind::Value(_, impl_val) => impl_val,
|
||||
ty::ConstKind::Infer(_) | ty::ConstKind::Bound(..) | ty::ConstKind::Placeholder(_) => {
|
||||
bug!("unexpected impl arg: {:?}", impl_ct)
|
||||
}
|
||||
};
|
||||
|
||||
match obligation_ct.kind() {
|
||||
ty::ConstKind::Param(_) => match self.treat_obligation_params {
|
||||
TreatParams::ForLookup => false,
|
||||
TreatParams::AsCandidateKey => true,
|
||||
},
|
||||
|
||||
// Placeholder consts don't unify with anything on their own
|
||||
ty::ConstKind::Placeholder(_) => false,
|
||||
|
||||
// As we don't necessarily eagerly evaluate constants,
|
||||
// they might unify with any value.
|
||||
ty::ConstKind::Expr(_) | ty::ConstKind::Unevaluated(_) | ty::ConstKind::Error(_) => {
|
||||
true
|
||||
}
|
||||
ty::ConstKind::Value(_, obl_val) => obl_val == impl_val,
|
||||
|
||||
ty::ConstKind::Infer(_) => true,
|
||||
|
||||
ty::ConstKind::Bound(..) => {
|
||||
bug!("unexpected obl const: {:?}", obligation_ct)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
pub type SimplifiedType = rustc_type_ir::fast_reject::SimplifiedType<DefId>;
|
||||
|
@ -4,7 +4,6 @@
|
||||
use crate::middle::region;
|
||||
use crate::mir;
|
||||
use crate::ty;
|
||||
use crate::ty::fast_reject::SimplifiedType;
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::stable_hasher::HashingControls;
|
||||
@ -57,18 +56,6 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ToStableHashKey<StableHashingContext<'a>> for SimplifiedType {
|
||||
type KeyType = Fingerprint;
|
||||
|
||||
#[inline]
|
||||
fn to_stable_hash_key(&self, hcx: &StableHashingContext<'a>) -> Fingerprint {
|
||||
let mut hasher = StableHasher::new();
|
||||
let mut hcx: StableHashingContext<'a> = hcx.clone();
|
||||
self.hash_stable(&mut hcx, &mut hasher);
|
||||
hasher.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for ty::GenericArg<'tcx> {
|
||||
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
||||
self.unpack().hash_stable(hcx, hasher);
|
||||
|
@ -1,23 +1,25 @@
|
||||
use crate::error;
|
||||
use crate::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
||||
use crate::ty::print::{FmtPrinter, Printer};
|
||||
use crate::ty::{self, Ty, TyCtxt, TypeFoldable, TypeSuperFoldable};
|
||||
use crate::ty::{EarlyBinder, GenericArgs, GenericArgsRef, TypeVisitableExt};
|
||||
use crate::ty::print::{shrunk_instance_name, FmtPrinter, Printer};
|
||||
use crate::ty::{
|
||||
self, EarlyBinder, GenericArgs, GenericArgsRef, Ty, TyCtxt, TypeFoldable, TypeSuperFoldable,
|
||||
TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor,
|
||||
};
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::Namespace;
|
||||
use rustc_hir::def_id::{CrateNum, DefId};
|
||||
use rustc_hir::lang_items::LangItem;
|
||||
use rustc_index::bit_set::FiniteBitSet;
|
||||
use rustc_macros::{
|
||||
Decodable, Encodable, HashStable, Lift, TyDecodable, TyEncodable, TypeVisitable,
|
||||
};
|
||||
use rustc_macros::{Decodable, Encodable, HashStable, Lift, TyDecodable, TyEncodable};
|
||||
use rustc_middle::ty::normalize_erasing_regions::NormalizationError;
|
||||
use rustc_span::def_id::LOCAL_CRATE;
|
||||
use rustc_span::Symbol;
|
||||
use rustc_span::{Span, Symbol, DUMMY_SP};
|
||||
use tracing::{debug, instrument};
|
||||
|
||||
use std::assert_matches::assert_matches;
|
||||
use std::fmt;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// An `InstanceKind` along with the args that are needed to substitute the instance.
|
||||
///
|
||||
@ -385,7 +387,28 @@ impl<'tcx> InstanceKind<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn fmt_instance(
|
||||
fn type_length<'tcx>(item: impl TypeVisitable<TyCtxt<'tcx>>) -> usize {
|
||||
struct Visitor {
|
||||
type_length: usize,
|
||||
}
|
||||
impl<'tcx> TypeVisitor<TyCtxt<'tcx>> for Visitor {
|
||||
fn visit_ty(&mut self, t: Ty<'tcx>) {
|
||||
self.type_length += 1;
|
||||
t.super_visit_with(self);
|
||||
}
|
||||
|
||||
fn visit_const(&mut self, ct: ty::Const<'tcx>) {
|
||||
self.type_length += 1;
|
||||
ct.super_visit_with(self);
|
||||
}
|
||||
}
|
||||
let mut visitor = Visitor { type_length: 0 };
|
||||
item.visit_with(&mut visitor);
|
||||
|
||||
visitor.type_length
|
||||
}
|
||||
|
||||
pub fn fmt_instance(
|
||||
f: &mut fmt::Formatter<'_>,
|
||||
instance: Instance<'_>,
|
||||
type_length: Option<rustc_session::Limit>,
|
||||
@ -485,19 +508,30 @@ impl<'tcx> Instance<'tcx> {
|
||||
///
|
||||
/// Presuming that coherence and type-check have succeeded, if this method is invoked
|
||||
/// in a monomorphic context (i.e., like during codegen), then it is guaranteed to return
|
||||
/// `Ok(Some(instance))`.
|
||||
/// `Ok(Some(instance))`, **except** for when the instance's inputs hit the type size limit,
|
||||
/// in which case it may bail out and return `Ok(None)`.
|
||||
///
|
||||
/// Returns `Err(ErrorGuaranteed)` when the `Instance` resolution process
|
||||
/// couldn't complete due to errors elsewhere - this is distinct
|
||||
/// from `Ok(None)` to avoid misleading diagnostics when an error
|
||||
/// has already been/will be emitted, for the original cause
|
||||
#[instrument(level = "debug", skip(tcx), ret)]
|
||||
pub fn resolve(
|
||||
pub fn try_resolve(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
def_id: DefId,
|
||||
args: GenericArgsRef<'tcx>,
|
||||
) -> Result<Option<Instance<'tcx>>, ErrorGuaranteed> {
|
||||
// Rust code can easily create exponentially-long types using only a
|
||||
// polynomial recursion depth. Even with the default recursion
|
||||
// depth, you can easily get cases that take >2^60 steps to run,
|
||||
// which means that rustc basically hangs.
|
||||
//
|
||||
// Bail out in these cases to avoid that bad user experience.
|
||||
if !tcx.type_length_limit().value_within_limit(type_length(args)) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// All regions in the result of this query are erased, so it's
|
||||
// fine to erase all of the input regions.
|
||||
|
||||
@ -505,7 +539,7 @@ impl<'tcx> Instance<'tcx> {
|
||||
// below is more likely to ignore the bounds in scope (e.g. if the only
|
||||
// generic parameters mentioned by `args` were lifetime ones).
|
||||
let args = tcx.erase_regions(args);
|
||||
tcx.resolve_instance(tcx.erase_regions(param_env.and((def_id, args))))
|
||||
tcx.resolve_instance_raw(tcx.erase_regions(param_env.and((def_id, args))))
|
||||
}
|
||||
|
||||
pub fn expect_resolve(
|
||||
@ -513,10 +547,48 @@ impl<'tcx> Instance<'tcx> {
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
def_id: DefId,
|
||||
args: GenericArgsRef<'tcx>,
|
||||
span: Span,
|
||||
) -> Instance<'tcx> {
|
||||
match ty::Instance::resolve(tcx, param_env, def_id, args) {
|
||||
// We compute the span lazily, to avoid unnecessary query calls.
|
||||
// If `span` is a DUMMY_SP, and the def id is local, then use the
|
||||
// def span of the def id.
|
||||
let span_or_local_def_span =
|
||||
|| if span.is_dummy() && def_id.is_local() { tcx.def_span(def_id) } else { span };
|
||||
|
||||
match ty::Instance::try_resolve(tcx, param_env, def_id, args) {
|
||||
Ok(Some(instance)) => instance,
|
||||
instance => bug!(
|
||||
Ok(None) => {
|
||||
let type_length = type_length(args);
|
||||
if !tcx.type_length_limit().value_within_limit(type_length) {
|
||||
let (shrunk, written_to_path) =
|
||||
shrunk_instance_name(tcx, Instance::new(def_id, args));
|
||||
let mut path = PathBuf::new();
|
||||
let was_written = if let Some(path2) = written_to_path {
|
||||
path = path2;
|
||||
Some(())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
tcx.dcx().emit_fatal(error::TypeLengthLimit {
|
||||
// We don't use `def_span(def_id)` so that diagnostics point
|
||||
// to the crate root during mono instead of to foreign items.
|
||||
// This is arguably better.
|
||||
span: span_or_local_def_span(),
|
||||
shrunk,
|
||||
was_written,
|
||||
path,
|
||||
type_length,
|
||||
});
|
||||
} else {
|
||||
span_bug!(
|
||||
span_or_local_def_span(),
|
||||
"failed to resolve instance for {}",
|
||||
tcx.def_path_str_with_args(def_id, args)
|
||||
)
|
||||
}
|
||||
}
|
||||
instance => span_bug!(
|
||||
span_or_local_def_span(),
|
||||
"failed to resolve instance for {}: {instance:#?}",
|
||||
tcx.def_path_str_with_args(def_id, args)
|
||||
),
|
||||
@ -533,7 +605,7 @@ impl<'tcx> Instance<'tcx> {
|
||||
// Use either `resolve_closure` or `resolve_for_vtable`
|
||||
assert!(!tcx.is_closure_like(def_id), "Called `resolve_for_fn_ptr` on closure: {def_id:?}");
|
||||
let reason = tcx.sess.is_sanitizer_kcfi_enabled().then_some(ReifyReason::FnPtr);
|
||||
Instance::resolve(tcx, param_env, def_id, args).ok().flatten().map(|mut resolved| {
|
||||
Instance::try_resolve(tcx, param_env, def_id, args).ok().flatten().map(|mut resolved| {
|
||||
match resolved.def {
|
||||
InstanceKind::Item(def) if resolved.def.requires_caller_location(tcx) => {
|
||||
debug!(" => fn pointer created for function with #[track_caller]");
|
||||
@ -571,77 +643,82 @@ impl<'tcx> Instance<'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resolve_for_vtable(
|
||||
pub fn expect_resolve_for_vtable(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
def_id: DefId,
|
||||
args: GenericArgsRef<'tcx>,
|
||||
) -> Option<Instance<'tcx>> {
|
||||
span: Span,
|
||||
) -> Instance<'tcx> {
|
||||
debug!("resolve_for_vtable(def_id={:?}, args={:?})", def_id, args);
|
||||
let fn_sig = tcx.fn_sig(def_id).instantiate_identity();
|
||||
let is_vtable_shim = !fn_sig.inputs().skip_binder().is_empty()
|
||||
&& fn_sig.input(0).skip_binder().is_param(0)
|
||||
&& tcx.generics_of(def_id).has_self;
|
||||
|
||||
if is_vtable_shim {
|
||||
debug!(" => associated item with unsizeable self: Self");
|
||||
Some(Instance { def: InstanceKind::VTableShim(def_id), args })
|
||||
} else {
|
||||
let reason = tcx.sess.is_sanitizer_kcfi_enabled().then_some(ReifyReason::Vtable);
|
||||
Instance::resolve(tcx, param_env, def_id, args).ok().flatten().map(|mut resolved| {
|
||||
match resolved.def {
|
||||
InstanceKind::Item(def) => {
|
||||
// We need to generate a shim when we cannot guarantee that
|
||||
// the caller of a trait object method will be aware of
|
||||
// `#[track_caller]` - this ensures that the caller
|
||||
// and callee ABI will always match.
|
||||
//
|
||||
// The shim is generated when all of these conditions are met:
|
||||
//
|
||||
// 1) The underlying method expects a caller location parameter
|
||||
// in the ABI
|
||||
if resolved.def.requires_caller_location(tcx)
|
||||
// 2) The caller location parameter comes from having `#[track_caller]`
|
||||
// on the implementation, and *not* on the trait method.
|
||||
&& !tcx.should_inherit_track_caller(def)
|
||||
// If the method implementation comes from the trait definition itself
|
||||
// (e.g. `trait Foo { #[track_caller] my_fn() { /* impl */ } }`),
|
||||
// then we don't need to generate a shim. This check is needed because
|
||||
// `should_inherit_track_caller` returns `false` if our method
|
||||
// implementation comes from the trait block, and not an impl block
|
||||
&& !matches!(
|
||||
tcx.opt_associated_item(def),
|
||||
Some(ty::AssocItem {
|
||||
container: ty::AssocItemContainer::TraitContainer,
|
||||
..
|
||||
})
|
||||
)
|
||||
{
|
||||
if tcx.is_closure_like(def) {
|
||||
debug!(" => vtable fn pointer created for closure with #[track_caller]: {:?} for method {:?} {:?}",
|
||||
def, def_id, args);
|
||||
|
||||
// Create a shim for the `FnOnce/FnMut/Fn` method we are calling
|
||||
// - unlike functions, invoking a closure always goes through a
|
||||
// trait.
|
||||
resolved = Instance { def: InstanceKind::ReifyShim(def_id, reason), args };
|
||||
} else {
|
||||
debug!(
|
||||
" => vtable fn pointer created for function with #[track_caller]: {:?}", def
|
||||
);
|
||||
resolved.def = InstanceKind::ReifyShim(def, reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
InstanceKind::Virtual(def_id, _) => {
|
||||
debug!(" => vtable fn pointer created for virtual call");
|
||||
resolved.def = InstanceKind::ReifyShim(def_id, reason)
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
resolved
|
||||
})
|
||||
return Instance { def: InstanceKind::VTableShim(def_id), args };
|
||||
}
|
||||
|
||||
let mut resolved = Instance::expect_resolve(tcx, param_env, def_id, args, span);
|
||||
|
||||
let reason = tcx.sess.is_sanitizer_kcfi_enabled().then_some(ReifyReason::Vtable);
|
||||
match resolved.def {
|
||||
InstanceKind::Item(def) => {
|
||||
// We need to generate a shim when we cannot guarantee that
|
||||
// the caller of a trait object method will be aware of
|
||||
// `#[track_caller]` - this ensures that the caller
|
||||
// and callee ABI will always match.
|
||||
//
|
||||
// The shim is generated when all of these conditions are met:
|
||||
//
|
||||
// 1) The underlying method expects a caller location parameter
|
||||
// in the ABI
|
||||
if resolved.def.requires_caller_location(tcx)
|
||||
// 2) The caller location parameter comes from having `#[track_caller]`
|
||||
// on the implementation, and *not* on the trait method.
|
||||
&& !tcx.should_inherit_track_caller(def)
|
||||
// If the method implementation comes from the trait definition itself
|
||||
// (e.g. `trait Foo { #[track_caller] my_fn() { /* impl */ } }`),
|
||||
// then we don't need to generate a shim. This check is needed because
|
||||
// `should_inherit_track_caller` returns `false` if our method
|
||||
// implementation comes from the trait block, and not an impl block
|
||||
&& !matches!(
|
||||
tcx.opt_associated_item(def),
|
||||
Some(ty::AssocItem {
|
||||
container: ty::AssocItemContainer::TraitContainer,
|
||||
..
|
||||
})
|
||||
)
|
||||
{
|
||||
if tcx.is_closure_like(def) {
|
||||
debug!(
|
||||
" => vtable fn pointer created for closure with #[track_caller]: {:?} for method {:?} {:?}",
|
||||
def, def_id, args
|
||||
);
|
||||
|
||||
// Create a shim for the `FnOnce/FnMut/Fn` method we are calling
|
||||
// - unlike functions, invoking a closure always goes through a
|
||||
// trait.
|
||||
resolved = Instance { def: InstanceKind::ReifyShim(def_id, reason), args };
|
||||
} else {
|
||||
debug!(
|
||||
" => vtable fn pointer created for function with #[track_caller]: {:?}",
|
||||
def
|
||||
);
|
||||
resolved.def = InstanceKind::ReifyShim(def, reason);
|
||||
}
|
||||
}
|
||||
}
|
||||
InstanceKind::Virtual(def_id, _) => {
|
||||
debug!(" => vtable fn pointer created for virtual call");
|
||||
resolved.def = InstanceKind::ReifyShim(def_id, reason)
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
resolved
|
||||
}
|
||||
|
||||
pub fn resolve_closure(
|
||||
@ -661,13 +738,25 @@ impl<'tcx> Instance<'tcx> {
|
||||
pub fn resolve_drop_in_place(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ty::Instance<'tcx> {
|
||||
let def_id = tcx.require_lang_item(LangItem::DropInPlace, None);
|
||||
let args = tcx.mk_args(&[ty.into()]);
|
||||
Instance::expect_resolve(tcx, ty::ParamEnv::reveal_all(), def_id, args)
|
||||
Instance::expect_resolve(
|
||||
tcx,
|
||||
ty::ParamEnv::reveal_all(),
|
||||
def_id,
|
||||
args,
|
||||
ty.ty_adt_def().and_then(|adt| tcx.hir().span_if_local(adt.did())).unwrap_or(DUMMY_SP),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn resolve_async_drop_in_place(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> ty::Instance<'tcx> {
|
||||
let def_id = tcx.require_lang_item(LangItem::AsyncDropInPlace, None);
|
||||
let args = tcx.mk_args(&[ty.into()]);
|
||||
Instance::expect_resolve(tcx, ty::ParamEnv::reveal_all(), def_id, args)
|
||||
Instance::expect_resolve(
|
||||
tcx,
|
||||
ty::ParamEnv::reveal_all(),
|
||||
def_id,
|
||||
args,
|
||||
ty.ty_adt_def().and_then(|adt| tcx.hir().span_if_local(adt.did())).unwrap_or(DUMMY_SP),
|
||||
)
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(tcx), ret)]
|
||||
|
@ -1,5 +1,7 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::ty::GenericArg;
|
||||
use crate::ty::{self, Ty, TyCtxt};
|
||||
use crate::ty::{self, ShortInstance, Ty, TyCtxt};
|
||||
|
||||
use hir::def::Namespace;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
@ -356,3 +358,31 @@ where
|
||||
with_no_trimmed_paths!(Self::print(t, fmt))
|
||||
}
|
||||
}
|
||||
|
||||
/// Format instance name that is already known to be too long for rustc.
|
||||
/// Show only the first 2 types if it is longer than 32 characters to avoid blasting
|
||||
/// the user's terminal with thousands of lines of type-name.
|
||||
///
|
||||
/// If the type name is longer than before+after, it will be written to a file.
|
||||
pub fn shrunk_instance_name<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
instance: ty::Instance<'tcx>,
|
||||
) -> (String, Option<PathBuf>) {
|
||||
let s = instance.to_string();
|
||||
|
||||
// Only use the shrunk version if it's really shorter.
|
||||
// This also avoids the case where before and after slices overlap.
|
||||
if s.chars().nth(33).is_some() {
|
||||
let shrunk = format!("{}", ShortInstance(instance, 4));
|
||||
if shrunk == s {
|
||||
return (s, None);
|
||||
}
|
||||
|
||||
let path = tcx.output_filenames(()).temp_path_ext("long-type.txt", None);
|
||||
let written_to_path = std::fs::write(&path, s).ok().map(|_| path);
|
||||
|
||||
(shrunk, written_to_path)
|
||||
} else {
|
||||
(s, None)
|
||||
}
|
||||
}
|
||||
|
@ -1710,22 +1710,24 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
ty::Bool if int == ScalarInt::FALSE => p!("false"),
|
||||
ty::Bool if int == ScalarInt::TRUE => p!("true"),
|
||||
// Float
|
||||
ty::Float(ty::FloatTy::F16) => {
|
||||
let val = Half::try_from(int).unwrap();
|
||||
p!(write("{}{}f16", val, if val.is_finite() { "" } else { "_" }))
|
||||
}
|
||||
ty::Float(ty::FloatTy::F32) => {
|
||||
let val = Single::try_from(int).unwrap();
|
||||
p!(write("{}{}f32", val, if val.is_finite() { "" } else { "_" }))
|
||||
}
|
||||
ty::Float(ty::FloatTy::F64) => {
|
||||
let val = Double::try_from(int).unwrap();
|
||||
p!(write("{}{}f64", val, if val.is_finite() { "" } else { "_" }))
|
||||
}
|
||||
ty::Float(ty::FloatTy::F128) => {
|
||||
let val = Quad::try_from(int).unwrap();
|
||||
p!(write("{}{}f128", val, if val.is_finite() { "" } else { "_" }))
|
||||
}
|
||||
ty::Float(fty) => match fty {
|
||||
ty::FloatTy::F16 => {
|
||||
let val = Half::try_from(int).unwrap();
|
||||
p!(write("{}{}f16", val, if val.is_finite() { "" } else { "_" }))
|
||||
}
|
||||
ty::FloatTy::F32 => {
|
||||
let val = Single::try_from(int).unwrap();
|
||||
p!(write("{}{}f32", val, if val.is_finite() { "" } else { "_" }))
|
||||
}
|
||||
ty::FloatTy::F64 => {
|
||||
let val = Double::try_from(int).unwrap();
|
||||
p!(write("{}{}f64", val, if val.is_finite() { "" } else { "_" }))
|
||||
}
|
||||
ty::FloatTy::F128 => {
|
||||
let val = Quad::try_from(int).unwrap();
|
||||
p!(write("{}{}f128", val, if val.is_finite() { "" } else { "_" }))
|
||||
}
|
||||
},
|
||||
// Int
|
||||
ty::Uint(_) | ty::Int(_) => {
|
||||
let int =
|
||||
|
@ -68,6 +68,10 @@ impl<'tcx> ty::CoroutineArgs<TyCtxt<'tcx>> {
|
||||
const RETURNED: usize = 1;
|
||||
/// Coroutine has been poisoned.
|
||||
const POISONED: usize = 2;
|
||||
/// Number of variants to reserve in coroutine state. Corresponds to
|
||||
/// `UNRESUMED` (beginning of a coroutine) and `RETURNED`/`POISONED`
|
||||
/// (end of a coroutine) states.
|
||||
const RESERVED_VARIANTS: usize = 3;
|
||||
|
||||
const UNRESUMED_NAME: &'static str = "Unresumed";
|
||||
const RETURNED_NAME: &'static str = "Returned";
|
||||
@ -116,7 +120,7 @@ impl<'tcx> ty::CoroutineArgs<TyCtxt<'tcx>> {
|
||||
Self::UNRESUMED => Cow::from(Self::UNRESUMED_NAME),
|
||||
Self::RETURNED => Cow::from(Self::RETURNED_NAME),
|
||||
Self::POISONED => Cow::from(Self::POISONED_NAME),
|
||||
_ => Cow::from(format!("Suspend{}", v.as_usize() - 3)),
|
||||
_ => Cow::from(format!("Suspend{}", v.as_usize() - Self::RESERVED_VARIANTS)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -98,7 +98,7 @@ pub fn call_kind<'tcx>(
|
||||
Some(CallKind::Operator { self_arg, trait_id, self_ty: method_args.type_at(0) })
|
||||
} else if is_deref {
|
||||
let deref_target = tcx.get_diagnostic_item(sym::deref_target).and_then(|deref_target| {
|
||||
Instance::resolve(tcx, param_env, deref_target, method_args).transpose()
|
||||
Instance::try_resolve(tcx, param_env, deref_target, method_args).transpose()
|
||||
});
|
||||
if let Some(Ok(instance)) = deref_target {
|
||||
let deref_target_ty = instance.ty(tcx, param_env);
|
||||
|
@ -130,7 +130,7 @@ fn convert_to_hir_projections_and_truncate_for_capture(
|
||||
/// Eg: 1. `foo.x` which is represented using `projections=[Field(x)]` is an ancestor of
|
||||
/// `foo.x.y` which is represented using `projections=[Field(x), Field(y)]`.
|
||||
/// Note both `foo.x` and `foo.x.y` start off of the same root variable `foo`.
|
||||
/// 2. Since we only look at the projections here function will return `bar.x` as an a valid
|
||||
/// 2. Since we only look at the projections here function will return `bar.x` as a valid
|
||||
/// ancestor of `foo.x.y`. It's the caller's responsibility to ensure that both projections
|
||||
/// list are being applied to the same root variable.
|
||||
fn is_ancestor_or_same_capture(
|
||||
|
@ -141,7 +141,7 @@ impl<'tcx> TerminatorClassifier<'tcx> for CallRecursion<'tcx> {
|
||||
return false;
|
||||
};
|
||||
let (callee, call_args) = if let Ok(Some(instance)) =
|
||||
Instance::resolve(tcx, param_env, callee, normalized_args)
|
||||
Instance::try_resolve(tcx, param_env, callee, normalized_args)
|
||||
{
|
||||
(instance.def_id(), instance.args)
|
||||
} else {
|
||||
|
@ -138,7 +138,7 @@ impl<'tcx> ConstToPat<'tcx> {
|
||||
// lints, but no errors), double-check that all types in the const implement
|
||||
// `PartialEq`. Even if we have a valtree, we may have found something
|
||||
// in there with non-structural-equality, meaning we match using `PartialEq`
|
||||
// and we hence have to check that that impl exists.
|
||||
// and we hence have to check if that impl exists.
|
||||
// This is all messy but not worth cleaning up: at some point we'll emit
|
||||
// a hard error when we don't have a valtree or when we find something in
|
||||
// the valtree that is not structural; then this can all be made a lot simpler.
|
||||
|
@ -558,7 +558,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
|
||||
let args = self
|
||||
.tcx
|
||||
.normalize_erasing_regions(param_env_reveal_all, self.typeck_results.node_args(id));
|
||||
let instance = match ty::Instance::resolve(self.tcx, param_env_reveal_all, def_id, args) {
|
||||
let instance = match ty::Instance::try_resolve(self.tcx, param_env_reveal_all, def_id, args)
|
||||
{
|
||||
Ok(Some(i)) => i,
|
||||
Ok(None) => {
|
||||
// It should be assoc consts if there's no error but we cannot resolve it.
|
||||
|
@ -76,6 +76,8 @@ pub trait MeetSemiLattice: Eq {
|
||||
/// A set that has a "bottom" element, which is less than or equal to any other element.
|
||||
pub trait HasBottom {
|
||||
const BOTTOM: Self;
|
||||
|
||||
fn is_bottom(&self) -> bool;
|
||||
}
|
||||
|
||||
/// A set that has a "top" element, which is greater than or equal to any other element.
|
||||
@ -114,6 +116,10 @@ impl MeetSemiLattice for bool {
|
||||
|
||||
impl HasBottom for bool {
|
||||
const BOTTOM: Self = false;
|
||||
|
||||
fn is_bottom(&self) -> bool {
|
||||
!self
|
||||
}
|
||||
}
|
||||
|
||||
impl HasTop for bool {
|
||||
@ -267,6 +273,10 @@ impl<T: Clone + Eq> MeetSemiLattice for FlatSet<T> {
|
||||
|
||||
impl<T> HasBottom for FlatSet<T> {
|
||||
const BOTTOM: Self = Self::Bottom;
|
||||
|
||||
fn is_bottom(&self) -> bool {
|
||||
matches!(self, Self::Bottom)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasTop for FlatSet<T> {
|
||||
@ -291,6 +301,10 @@ impl<T> MaybeReachable<T> {
|
||||
|
||||
impl<T> HasBottom for MaybeReachable<T> {
|
||||
const BOTTOM: Self = MaybeReachable::Unreachable;
|
||||
|
||||
fn is_bottom(&self) -> bool {
|
||||
matches!(self, Self::Unreachable)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: HasTop> HasTop for MaybeReachable<T> {
|
||||
|
@ -36,10 +36,10 @@ use std::collections::VecDeque;
|
||||
use std::fmt::{Debug, Formatter};
|
||||
use std::ops::Range;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::{FxHashMap, StdEntry};
|
||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||
use rustc_index::bit_set::BitSet;
|
||||
use rustc_index::{IndexSlice, IndexVec};
|
||||
use rustc_index::IndexVec;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::mir::visit::{MutatingUseContext, PlaceContext, Visitor};
|
||||
use rustc_middle::mir::*;
|
||||
@ -336,14 +336,13 @@ impl<'tcx, T: ValueAnalysis<'tcx>> AnalysisDomain<'tcx> for ValueAnalysisWrapper
|
||||
const NAME: &'static str = T::NAME;
|
||||
|
||||
fn bottom_value(&self, _body: &Body<'tcx>) -> Self::Domain {
|
||||
State(StateData::Unreachable)
|
||||
State::Unreachable
|
||||
}
|
||||
|
||||
fn initialize_start_block(&self, body: &Body<'tcx>, state: &mut Self::Domain) {
|
||||
// The initial state maps all tracked places of argument projections to ⊤ and the rest to ⊥.
|
||||
assert!(matches!(state.0, StateData::Unreachable));
|
||||
let values = IndexVec::from_elem_n(T::Value::BOTTOM, self.0.map().value_count);
|
||||
*state = State(StateData::Reachable(values));
|
||||
assert!(matches!(state, State::Unreachable));
|
||||
*state = State::new_reachable();
|
||||
for arg in body.args_iter() {
|
||||
state.flood(PlaceRef { local: arg, projection: &[] }, self.0.map());
|
||||
}
|
||||
@ -415,27 +414,54 @@ rustc_index::newtype_index!(
|
||||
|
||||
/// See [`State`].
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
enum StateData<V> {
|
||||
Reachable(IndexVec<ValueIndex, V>),
|
||||
Unreachable,
|
||||
pub struct StateData<V> {
|
||||
bottom: V,
|
||||
/// This map only contains values that are not `⊥`.
|
||||
map: FxHashMap<ValueIndex, V>,
|
||||
}
|
||||
|
||||
impl<V: HasBottom> StateData<V> {
|
||||
fn new() -> StateData<V> {
|
||||
StateData { bottom: V::BOTTOM, map: FxHashMap::default() }
|
||||
}
|
||||
|
||||
fn get(&self, idx: ValueIndex) -> &V {
|
||||
self.map.get(&idx).unwrap_or(&self.bottom)
|
||||
}
|
||||
|
||||
fn insert(&mut self, idx: ValueIndex, elem: V) {
|
||||
if elem.is_bottom() {
|
||||
self.map.remove(&idx);
|
||||
} else {
|
||||
self.map.insert(idx, elem);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<V: Clone> Clone for StateData<V> {
|
||||
fn clone(&self) -> Self {
|
||||
match self {
|
||||
Self::Reachable(x) => Self::Reachable(x.clone()),
|
||||
Self::Unreachable => Self::Unreachable,
|
||||
}
|
||||
StateData { bottom: self.bottom.clone(), map: self.map.clone() }
|
||||
}
|
||||
|
||||
fn clone_from(&mut self, source: &Self) {
|
||||
match (&mut *self, source) {
|
||||
(Self::Reachable(x), Self::Reachable(y)) => {
|
||||
// We go through `raw` here, because `IndexVec` currently has a naive `clone_from`.
|
||||
x.raw.clone_from(&y.raw);
|
||||
self.map.clone_from(&source.map)
|
||||
}
|
||||
}
|
||||
|
||||
impl<V: JoinSemiLattice + Clone + HasBottom> JoinSemiLattice for StateData<V> {
|
||||
fn join(&mut self, other: &Self) -> bool {
|
||||
let mut changed = false;
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
for (i, v) in other.map.iter() {
|
||||
match self.map.entry(*i) {
|
||||
StdEntry::Vacant(e) => {
|
||||
e.insert(v.clone());
|
||||
changed = true
|
||||
}
|
||||
StdEntry::Occupied(e) => changed |= e.into_mut().join(v),
|
||||
}
|
||||
_ => *self = source.clone(),
|
||||
}
|
||||
changed
|
||||
}
|
||||
}
|
||||
|
||||
@ -450,33 +476,47 @@ impl<V: Clone> Clone for StateData<V> {
|
||||
///
|
||||
/// Flooding means assigning a value (by default `⊤`) to all tracked projections of a given place.
|
||||
#[derive(PartialEq, Eq, Debug)]
|
||||
pub struct State<V>(StateData<V>);
|
||||
pub enum State<V> {
|
||||
Unreachable,
|
||||
Reachable(StateData<V>),
|
||||
}
|
||||
|
||||
impl<V: Clone> Clone for State<V> {
|
||||
fn clone(&self) -> Self {
|
||||
Self(self.0.clone())
|
||||
match self {
|
||||
Self::Reachable(x) => Self::Reachable(x.clone()),
|
||||
Self::Unreachable => Self::Unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
fn clone_from(&mut self, source: &Self) {
|
||||
self.0.clone_from(&source.0);
|
||||
match (&mut *self, source) {
|
||||
(Self::Reachable(x), Self::Reachable(y)) => {
|
||||
x.clone_from(&y);
|
||||
}
|
||||
_ => *self = source.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<V: Clone> State<V> {
|
||||
pub fn new(init: V, map: &Map) -> State<V> {
|
||||
let values = IndexVec::from_elem_n(init, map.value_count);
|
||||
State(StateData::Reachable(values))
|
||||
impl<V: Clone + HasBottom> State<V> {
|
||||
pub fn new_reachable() -> State<V> {
|
||||
State::Reachable(StateData::new())
|
||||
}
|
||||
|
||||
pub fn all(&self, f: impl Fn(&V) -> bool) -> bool {
|
||||
match self.0 {
|
||||
StateData::Unreachable => true,
|
||||
StateData::Reachable(ref values) => values.iter().all(f),
|
||||
pub fn all_bottom(&self) -> bool {
|
||||
match self {
|
||||
State::Unreachable => false,
|
||||
State::Reachable(ref values) =>
|
||||
{
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
values.map.values().all(V::is_bottom)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_reachable(&self) -> bool {
|
||||
matches!(&self.0, StateData::Reachable(_))
|
||||
matches!(self, State::Reachable(_))
|
||||
}
|
||||
|
||||
/// Assign `value` to all places that are contained in `place` or may alias one.
|
||||
@ -519,10 +559,8 @@ impl<V: Clone> State<V> {
|
||||
map: &Map,
|
||||
value: V,
|
||||
) {
|
||||
let StateData::Reachable(values) = &mut self.0 else { return };
|
||||
map.for_each_aliasing_place(place, tail_elem, &mut |vi| {
|
||||
values[vi] = value.clone();
|
||||
});
|
||||
let State::Reachable(values) = self else { return };
|
||||
map.for_each_aliasing_place(place, tail_elem, &mut |vi| values.insert(vi, value.clone()));
|
||||
}
|
||||
|
||||
/// Low-level method that assigns to a place.
|
||||
@ -541,9 +579,9 @@ impl<V: Clone> State<V> {
|
||||
///
|
||||
/// The target place must have been flooded before calling this method.
|
||||
pub fn insert_value_idx(&mut self, target: PlaceIndex, value: V, map: &Map) {
|
||||
let StateData::Reachable(values) = &mut self.0 else { return };
|
||||
let State::Reachable(values) = self else { return };
|
||||
if let Some(value_index) = map.places[target].value_index {
|
||||
values[value_index] = value;
|
||||
values.insert(value_index, value)
|
||||
}
|
||||
}
|
||||
|
||||
@ -555,14 +593,14 @@ impl<V: Clone> State<V> {
|
||||
///
|
||||
/// The target place must have been flooded before calling this method.
|
||||
pub fn insert_place_idx(&mut self, target: PlaceIndex, source: PlaceIndex, map: &Map) {
|
||||
let StateData::Reachable(values) = &mut self.0 else { return };
|
||||
let State::Reachable(values) = self else { return };
|
||||
|
||||
// If both places are tracked, we copy the value to the target.
|
||||
// If the target is tracked, but the source is not, we do nothing, as invalidation has
|
||||
// already been performed.
|
||||
if let Some(target_value) = map.places[target].value_index {
|
||||
if let Some(source_value) = map.places[source].value_index {
|
||||
values[target_value] = values[source_value].clone();
|
||||
values.insert(target_value, values.get(source_value).clone());
|
||||
}
|
||||
}
|
||||
for target_child in map.children(target) {
|
||||
@ -616,11 +654,11 @@ impl<V: Clone> State<V> {
|
||||
|
||||
/// Retrieve the value stored for a place index, or `None` if it is not tracked.
|
||||
pub fn try_get_idx(&self, place: PlaceIndex, map: &Map) -> Option<V> {
|
||||
match &self.0 {
|
||||
StateData::Reachable(values) => {
|
||||
map.places[place].value_index.map(|v| values[v].clone())
|
||||
match self {
|
||||
State::Reachable(values) => {
|
||||
map.places[place].value_index.map(|v| values.get(v).clone())
|
||||
}
|
||||
StateData::Unreachable => None,
|
||||
State::Unreachable => None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -631,10 +669,10 @@ impl<V: Clone> State<V> {
|
||||
where
|
||||
V: HasBottom + HasTop,
|
||||
{
|
||||
match &self.0 {
|
||||
StateData::Reachable(_) => self.try_get(place, map).unwrap_or(V::TOP),
|
||||
match self {
|
||||
State::Reachable(_) => self.try_get(place, map).unwrap_or(V::TOP),
|
||||
// Because this is unreachable, we can return any value we want.
|
||||
StateData::Unreachable => V::BOTTOM,
|
||||
State::Unreachable => V::BOTTOM,
|
||||
}
|
||||
}
|
||||
|
||||
@ -645,10 +683,10 @@ impl<V: Clone> State<V> {
|
||||
where
|
||||
V: HasBottom + HasTop,
|
||||
{
|
||||
match &self.0 {
|
||||
StateData::Reachable(_) => self.try_get_discr(place, map).unwrap_or(V::TOP),
|
||||
match self {
|
||||
State::Reachable(_) => self.try_get_discr(place, map).unwrap_or(V::TOP),
|
||||
// Because this is unreachable, we can return any value we want.
|
||||
StateData::Unreachable => V::BOTTOM,
|
||||
State::Unreachable => V::BOTTOM,
|
||||
}
|
||||
}
|
||||
|
||||
@ -659,10 +697,10 @@ impl<V: Clone> State<V> {
|
||||
where
|
||||
V: HasBottom + HasTop,
|
||||
{
|
||||
match &self.0 {
|
||||
StateData::Reachable(_) => self.try_get_len(place, map).unwrap_or(V::TOP),
|
||||
match self {
|
||||
State::Reachable(_) => self.try_get_len(place, map).unwrap_or(V::TOP),
|
||||
// Because this is unreachable, we can return any value we want.
|
||||
StateData::Unreachable => V::BOTTOM,
|
||||
State::Unreachable => V::BOTTOM,
|
||||
}
|
||||
}
|
||||
|
||||
@ -673,11 +711,11 @@ impl<V: Clone> State<V> {
|
||||
where
|
||||
V: HasBottom + HasTop,
|
||||
{
|
||||
match &self.0 {
|
||||
StateData::Reachable(values) => {
|
||||
map.places[place].value_index.map(|v| values[v].clone()).unwrap_or(V::TOP)
|
||||
match self {
|
||||
State::Reachable(values) => {
|
||||
map.places[place].value_index.map(|v| values.get(v).clone()).unwrap_or(V::TOP)
|
||||
}
|
||||
StateData::Unreachable => {
|
||||
State::Unreachable => {
|
||||
// Because this is unreachable, we can return any value we want.
|
||||
V::BOTTOM
|
||||
}
|
||||
@ -685,15 +723,15 @@ impl<V: Clone> State<V> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<V: JoinSemiLattice + Clone> JoinSemiLattice for State<V> {
|
||||
impl<V: JoinSemiLattice + Clone + HasBottom> JoinSemiLattice for State<V> {
|
||||
fn join(&mut self, other: &Self) -> bool {
|
||||
match (&mut self.0, &other.0) {
|
||||
(_, StateData::Unreachable) => false,
|
||||
(StateData::Unreachable, _) => {
|
||||
match (&mut *self, other) {
|
||||
(_, State::Unreachable) => false,
|
||||
(State::Unreachable, _) => {
|
||||
*self = other.clone();
|
||||
true
|
||||
}
|
||||
(StateData::Reachable(this), StateData::Reachable(other)) => this.join(other),
|
||||
(State::Reachable(this), State::Reachable(ref other)) => this.join(other),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1194,9 +1232,9 @@ where
|
||||
T::Value: Debug,
|
||||
{
|
||||
fn fmt_with(&self, ctxt: &ValueAnalysisWrapper<T>, f: &mut Formatter<'_>) -> std::fmt::Result {
|
||||
match &self.0 {
|
||||
StateData::Reachable(values) => debug_with_context(values, None, ctxt.0.map(), f),
|
||||
StateData::Unreachable => write!(f, "unreachable"),
|
||||
match self {
|
||||
State::Reachable(values) => debug_with_context(values, None, ctxt.0.map(), f),
|
||||
State::Unreachable => write!(f, "unreachable"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -1206,8 +1244,8 @@ where
|
||||
ctxt: &ValueAnalysisWrapper<T>,
|
||||
f: &mut Formatter<'_>,
|
||||
) -> std::fmt::Result {
|
||||
match (&self.0, &old.0) {
|
||||
(StateData::Reachable(this), StateData::Reachable(old)) => {
|
||||
match (self, old) {
|
||||
(State::Reachable(this), State::Reachable(old)) => {
|
||||
debug_with_context(this, Some(old), ctxt.0.map(), f)
|
||||
}
|
||||
_ => Ok(()), // Consider printing something here.
|
||||
@ -1215,21 +1253,21 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
fn debug_with_context_rec<V: Debug + Eq>(
|
||||
fn debug_with_context_rec<V: Debug + Eq + HasBottom>(
|
||||
place: PlaceIndex,
|
||||
place_str: &str,
|
||||
new: &IndexSlice<ValueIndex, V>,
|
||||
old: Option<&IndexSlice<ValueIndex, V>>,
|
||||
new: &StateData<V>,
|
||||
old: Option<&StateData<V>>,
|
||||
map: &Map,
|
||||
f: &mut Formatter<'_>,
|
||||
) -> std::fmt::Result {
|
||||
if let Some(value) = map.places[place].value_index {
|
||||
match old {
|
||||
None => writeln!(f, "{}: {:?}", place_str, new[value])?,
|
||||
None => writeln!(f, "{}: {:?}", place_str, new.get(value))?,
|
||||
Some(old) => {
|
||||
if new[value] != old[value] {
|
||||
writeln!(f, "\u{001f}-{}: {:?}", place_str, old[value])?;
|
||||
writeln!(f, "\u{001f}+{}: {:?}", place_str, new[value])?;
|
||||
if new.get(value) != old.get(value) {
|
||||
writeln!(f, "\u{001f}-{}: {:?}", place_str, old.get(value))?;
|
||||
writeln!(f, "\u{001f}+{}: {:?}", place_str, new.get(value))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1261,9 +1299,9 @@ fn debug_with_context_rec<V: Debug + Eq>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn debug_with_context<V: Debug + Eq>(
|
||||
new: &IndexSlice<ValueIndex, V>,
|
||||
old: Option<&IndexSlice<ValueIndex, V>>,
|
||||
fn debug_with_context<V: Debug + Eq + HasBottom>(
|
||||
new: &StateData<V>,
|
||||
old: Option<&StateData<V>>,
|
||||
map: &Map,
|
||||
f: &mut Formatter<'_>,
|
||||
) -> std::fmt::Result {
|
||||
|
@ -208,11 +208,8 @@ const UNRESUMED: usize = CoroutineArgs::UNRESUMED;
|
||||
const RETURNED: usize = CoroutineArgs::RETURNED;
|
||||
/// Coroutine has panicked and is poisoned.
|
||||
const POISONED: usize = CoroutineArgs::POISONED;
|
||||
|
||||
/// Number of variants to reserve in coroutine state. Corresponds to
|
||||
/// `UNRESUMED` (beginning of a coroutine) and `RETURNED`/`POISONED`
|
||||
/// (end of a coroutine) states.
|
||||
const RESERVED_VARIANTS: usize = 3;
|
||||
/// Number of reserved variants of coroutine state.
|
||||
const RESERVED_VARIANTS: usize = CoroutineArgs::RESERVED_VARIANTS;
|
||||
|
||||
/// A `yield` point in the coroutine.
|
||||
struct SuspensionPoint<'tcx> {
|
||||
|
@ -8,7 +8,7 @@
|
||||
//! `Value` is interned as a `VnIndex`, which allows us to cheaply compute identical values.
|
||||
//!
|
||||
//! From those assignments, we construct a mapping `VnIndex -> Vec<(Local, Location)>` of available
|
||||
//! values, the locals in which they are stored, and a the assignment location.
|
||||
//! values, the locals in which they are stored, and the assignment location.
|
||||
//!
|
||||
//! In a second pass, we traverse all (non SSA) assignments `x = rvalue` and operands. For each
|
||||
//! one, we compute the `VnIndex` of the rvalue. If this `VnIndex` is associated to a constant, we
|
||||
|
@ -389,7 +389,7 @@ impl<'tcx> Inliner<'tcx> {
|
||||
// To resolve an instance its args have to be fully normalized.
|
||||
let args = self.tcx.try_normalize_erasing_regions(self.param_env, args).ok()?;
|
||||
let callee =
|
||||
Instance::resolve(self.tcx, self.param_env, def_id, args).ok().flatten()?;
|
||||
Instance::try_resolve(self.tcx, self.param_env, def_id, args).ok().flatten()?;
|
||||
|
||||
if let InstanceKind::Virtual(..) | InstanceKind::Intrinsic(_) = callee.def {
|
||||
return None;
|
||||
|
@ -53,7 +53,7 @@ pub(crate) fn mir_callgraph_reachable<'tcx>(
|
||||
trace!(?caller, ?param_env, ?args, "cannot normalize, skipping");
|
||||
continue;
|
||||
};
|
||||
let Ok(Some(callee)) = ty::Instance::resolve(tcx, param_env, callee, args) else {
|
||||
let Ok(Some(callee)) = ty::Instance::try_resolve(tcx, param_env, callee, args) else {
|
||||
trace!(?callee, "cannot resolve, skipping");
|
||||
continue;
|
||||
};
|
||||
|
@ -47,6 +47,7 @@ use rustc_middle::mir::visit::Visitor;
|
||||
use rustc_middle::mir::*;
|
||||
use rustc_middle::ty::layout::LayoutOf;
|
||||
use rustc_middle::ty::{self, ScalarInt, TyCtxt};
|
||||
use rustc_mir_dataflow::lattice::HasBottom;
|
||||
use rustc_mir_dataflow::value_analysis::{Map, PlaceIndex, State, TrackElem};
|
||||
use rustc_span::DUMMY_SP;
|
||||
use rustc_target::abi::{TagEncoding, Variants};
|
||||
@ -158,9 +159,17 @@ impl Condition {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
struct ConditionSet<'a>(&'a [Condition]);
|
||||
|
||||
impl HasBottom for ConditionSet<'_> {
|
||||
const BOTTOM: Self = ConditionSet(&[]);
|
||||
|
||||
fn is_bottom(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ConditionSet<'a> {
|
||||
fn iter(self) -> impl Iterator<Item = Condition> + 'a {
|
||||
self.0.iter().copied()
|
||||
@ -177,7 +186,7 @@ impl<'a> ConditionSet<'a> {
|
||||
|
||||
impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
fn is_empty(&self, state: &State<ConditionSet<'a>>) -> bool {
|
||||
state.all(|cs| cs.0.is_empty())
|
||||
state.all_bottom()
|
||||
}
|
||||
|
||||
/// Recursion entry point to find threading opportunities.
|
||||
@ -198,7 +207,7 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
debug!(?discr);
|
||||
|
||||
let cost = CostChecker::new(self.tcx, self.param_env, None, self.body);
|
||||
let mut state = State::new(ConditionSet::default(), self.map);
|
||||
let mut state = State::new_reachable();
|
||||
|
||||
let conds = if let Some((value, then, else_)) = targets.as_static_if() {
|
||||
let value = ScalarInt::try_from_uint(value, discr_layout.size)?;
|
||||
@ -255,7 +264,7 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
// _1 = 5 // Whatever happens here, it won't change the result of a `SwitchInt`.
|
||||
// _1 = 6
|
||||
if let Some((lhs, tail)) = self.mutated_statement(stmt) {
|
||||
state.flood_with_tail_elem(lhs.as_ref(), tail, self.map, ConditionSet::default());
|
||||
state.flood_with_tail_elem(lhs.as_ref(), tail, self.map, ConditionSet::BOTTOM);
|
||||
}
|
||||
}
|
||||
|
||||
@ -609,7 +618,7 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
// We can recurse through this terminator.
|
||||
let mut state = state();
|
||||
if let Some(place_to_flood) = place_to_flood {
|
||||
state.flood_with(place_to_flood.as_ref(), self.map, ConditionSet::default());
|
||||
state.flood_with(place_to_flood.as_ref(), self.map, ConditionSet::BOTTOM);
|
||||
}
|
||||
self.find_opportunity(bb, state, cost.clone(), depth + 1);
|
||||
}
|
||||
|
@ -519,7 +519,7 @@ fn run_runtime_lowering_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
||||
&add_subtyping_projections::Subtyper, // calling this after reveal_all ensures that we don't deal with opaque types
|
||||
&elaborate_drops::ElaborateDrops,
|
||||
// This will remove extraneous landing pads which are no longer
|
||||
// necessary as well as well as forcing any call in a non-unwinding
|
||||
// necessary as well as forcing any call in a non-unwinding
|
||||
// function calling a possibly-unwinding function to abort the process.
|
||||
&abort_unwinding_calls::AbortUnwindingCalls,
|
||||
// AddMovesForPackedDrops needs to run after drop
|
||||
|
@ -816,7 +816,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
|
||||
mut func, mut args, call_source: desugar, fn_span, ..
|
||||
} => {
|
||||
// This promoted involves a function call, so it may fail to evaluate.
|
||||
// Let's make sure it is added to `required_consts` so that that failure cannot get lost.
|
||||
// Let's make sure it is added to `required_consts` so that failure cannot get lost.
|
||||
self.add_to_required = true;
|
||||
|
||||
self.visit_operand(&mut func, loc);
|
||||
|
@ -1,18 +1,17 @@
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::lang_items::LangItem;
|
||||
use rustc_index::{Idx, IndexVec};
|
||||
use rustc_middle::mir::*;
|
||||
use rustc_middle::query::Providers;
|
||||
use rustc_middle::ty::GenericArgs;
|
||||
use rustc_middle::ty::{self, CoroutineArgs, CoroutineArgsExt, EarlyBinder, Ty, TyCtxt};
|
||||
use rustc_middle::{bug, span_bug};
|
||||
use rustc_target::abi::{FieldIdx, VariantIdx, FIRST_VARIANT};
|
||||
|
||||
use rustc_index::{Idx, IndexVec};
|
||||
|
||||
use rustc_span::{source_map::Spanned, Span, DUMMY_SP};
|
||||
use rustc_target::abi::{FieldIdx, VariantIdx, FIRST_VARIANT};
|
||||
use rustc_target::spec::abi::Abi;
|
||||
|
||||
use std::assert_matches::assert_matches;
|
||||
use std::fmt;
|
||||
use std::iter;
|
||||
|
||||
@ -1020,21 +1019,19 @@ fn build_construct_coroutine_by_move_shim<'tcx>(
|
||||
receiver_by_ref: bool,
|
||||
) -> Body<'tcx> {
|
||||
let mut self_ty = tcx.type_of(coroutine_closure_def_id).instantiate_identity();
|
||||
let mut self_local: Place<'tcx> = Local::from_usize(1).into();
|
||||
let ty::CoroutineClosure(_, args) = *self_ty.kind() else {
|
||||
bug!();
|
||||
};
|
||||
|
||||
// We use `&mut Self` here because we only need to emit an ABI-compatible shim body,
|
||||
// rather than match the signature exactly (which might take `&self` instead).
|
||||
// We use `&Self` here because we only need to emit an ABI-compatible shim body,
|
||||
// rather than match the signature exactly (which might take `&mut self` instead).
|
||||
//
|
||||
// The self type here is a coroutine-closure, not a coroutine, and we never read from
|
||||
// it because it never has any captures, because this is only true in the Fn/FnMut
|
||||
// implementation, not the AsyncFn/AsyncFnMut implementation, which is implemented only
|
||||
// if the coroutine-closure has no captures.
|
||||
// We adjust the `self_local` to be a deref since we want to copy fields out of
|
||||
// a reference to the closure.
|
||||
if receiver_by_ref {
|
||||
// Triple-check that there's no captures here.
|
||||
assert_eq!(args.as_coroutine_closure().tupled_upvars_ty(), tcx.types.unit);
|
||||
self_ty = Ty::new_mut_ref(tcx, tcx.lifetimes.re_erased, self_ty);
|
||||
self_local = tcx.mk_place_deref(self_local);
|
||||
self_ty = Ty::new_imm_ref(tcx, tcx.lifetimes.re_erased, self_ty);
|
||||
}
|
||||
|
||||
let poly_sig = args.as_coroutine_closure().coroutine_closure_sig().map_bound(|sig| {
|
||||
@ -1067,11 +1064,27 @@ fn build_construct_coroutine_by_move_shim<'tcx>(
|
||||
fields.push(Operand::Move(Local::from_usize(idx + 1).into()));
|
||||
}
|
||||
for (idx, ty) in args.as_coroutine_closure().upvar_tys().iter().enumerate() {
|
||||
fields.push(Operand::Move(tcx.mk_place_field(
|
||||
Local::from_usize(1).into(),
|
||||
FieldIdx::from_usize(idx),
|
||||
ty,
|
||||
)));
|
||||
if receiver_by_ref {
|
||||
// The only situation where it's possible is when we capture immuatable references,
|
||||
// since those don't need to be reborrowed with the closure's env lifetime. Since
|
||||
// references are always `Copy`, just emit a copy.
|
||||
assert_matches!(
|
||||
ty.kind(),
|
||||
ty::Ref(_, _, hir::Mutability::Not),
|
||||
"field should be captured by immutable ref if we have an `Fn` instance"
|
||||
);
|
||||
fields.push(Operand::Copy(tcx.mk_place_field(
|
||||
self_local,
|
||||
FieldIdx::from_usize(idx),
|
||||
ty,
|
||||
)));
|
||||
} else {
|
||||
fields.push(Operand::Move(tcx.mk_place_field(
|
||||
self_local,
|
||||
FieldIdx::from_usize(idx),
|
||||
ty,
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
let source_info = SourceInfo::outermost(span);
|
||||
|
@ -1,6 +1,3 @@
|
||||
monomorphize_consider_type_length_limit =
|
||||
consider adding a `#![type_length_limit="{$type_length}"]` attribute to your crate
|
||||
|
||||
monomorphize_couldnt_dump_mono_stats =
|
||||
unexpected error occurred while dumping monomorphization stats: {$error}
|
||||
|
||||
@ -25,8 +22,6 @@ monomorphize_start_not_found = using `fn main` requires the standard library
|
||||
|
||||
monomorphize_symbol_already_defined = symbol `{$symbol}` is already defined
|
||||
|
||||
monomorphize_type_length_limit = reached the type-length limit while instantiating `{$shrunk}`
|
||||
|
||||
monomorphize_unknown_cgu_collection_mode =
|
||||
unknown codegen-item collection mode '{$mode}', falling back to 'lazy' mode
|
||||
|
||||
|
@ -222,12 +222,12 @@ use rustc_middle::mir::{self, Location, MentionedItem};
|
||||
use rustc_middle::query::TyCtxtAt;
|
||||
use rustc_middle::ty::adjustment::{CustomCoerceUnsized, PointerCoercion};
|
||||
use rustc_middle::ty::layout::ValidityRequirement;
|
||||
use rustc_middle::ty::print::with_no_trimmed_paths;
|
||||
use rustc_middle::ty::print::{shrunk_instance_name, with_no_trimmed_paths};
|
||||
use rustc_middle::ty::GenericArgs;
|
||||
use rustc_middle::ty::{
|
||||
self, AssocKind, GenericParamDefKind, Instance, InstanceKind, Ty, TyCtxt, TypeFoldable,
|
||||
TypeVisitableExt, VtblEntry,
|
||||
};
|
||||
use rustc_middle::ty::{GenericArgKind, GenericArgs};
|
||||
use rustc_middle::{bug, span_bug};
|
||||
use rustc_session::config::EntryFnType;
|
||||
use rustc_session::Limit;
|
||||
@ -238,9 +238,7 @@ use rustc_target::abi::Size;
|
||||
use std::path::PathBuf;
|
||||
use tracing::{debug, instrument, trace};
|
||||
|
||||
use crate::errors::{
|
||||
self, EncounteredErrorWhileInstantiating, NoOptimizedMir, RecursionLimit, TypeLengthLimit,
|
||||
};
|
||||
use crate::errors::{self, EncounteredErrorWhileInstantiating, NoOptimizedMir, RecursionLimit};
|
||||
use move_check::MoveCheckState;
|
||||
|
||||
#[derive(PartialEq)]
|
||||
@ -443,7 +441,6 @@ fn collect_items_rec<'tcx>(
|
||||
recursion_depths,
|
||||
recursion_limit,
|
||||
));
|
||||
check_type_length_limit(tcx, instance);
|
||||
|
||||
rustc_data_structures::stack::ensure_sufficient_stack(|| {
|
||||
collect_items_of_instance(
|
||||
@ -554,34 +551,6 @@ fn collect_items_rec<'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
/// Format instance name that is already known to be too long for rustc.
|
||||
/// Show only the first 2 types if it is longer than 32 characters to avoid blasting
|
||||
/// the user's terminal with thousands of lines of type-name.
|
||||
///
|
||||
/// If the type name is longer than before+after, it will be written to a file.
|
||||
fn shrunk_instance_name<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
instance: Instance<'tcx>,
|
||||
) -> (String, Option<PathBuf>) {
|
||||
let s = instance.to_string();
|
||||
|
||||
// Only use the shrunk version if it's really shorter.
|
||||
// This also avoids the case where before and after slices overlap.
|
||||
if s.chars().nth(33).is_some() {
|
||||
let shrunk = format!("{}", ty::ShortInstance(instance, 4));
|
||||
if shrunk == s {
|
||||
return (s, None);
|
||||
}
|
||||
|
||||
let path = tcx.output_filenames(()).temp_path_ext("long-type.txt", None);
|
||||
let written_to_path = std::fs::write(&path, s).ok().map(|_| path);
|
||||
|
||||
(shrunk, written_to_path)
|
||||
} else {
|
||||
(s, None)
|
||||
}
|
||||
}
|
||||
|
||||
fn check_recursion_limit<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
instance: Instance<'tcx>,
|
||||
@ -630,38 +599,6 @@ fn check_recursion_limit<'tcx>(
|
||||
(def_id, recursion_depth)
|
||||
}
|
||||
|
||||
fn check_type_length_limit<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) {
|
||||
let type_length = instance
|
||||
.args
|
||||
.iter()
|
||||
.flat_map(|arg| arg.walk())
|
||||
.filter(|arg| match arg.unpack() {
|
||||
GenericArgKind::Type(_) | GenericArgKind::Const(_) => true,
|
||||
GenericArgKind::Lifetime(_) => false,
|
||||
})
|
||||
.count();
|
||||
debug!(" => type length={}", type_length);
|
||||
|
||||
// Rust code can easily create exponentially-long types using only a
|
||||
// polynomial recursion depth. Even with the default recursion
|
||||
// depth, you can easily get cases that take >2^60 steps to run,
|
||||
// which means that rustc basically hangs.
|
||||
//
|
||||
// Bail out in these cases to avoid that bad user experience.
|
||||
if !tcx.type_length_limit().value_within_limit(type_length) {
|
||||
let (shrunk, written_to_path) = shrunk_instance_name(tcx, instance);
|
||||
let span = tcx.def_span(instance.def_id());
|
||||
let mut path = PathBuf::new();
|
||||
let was_written = if let Some(path2) = written_to_path {
|
||||
path = path2;
|
||||
Some(())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
tcx.dcx().emit_fatal(TypeLengthLimit { span, shrunk, was_written, path, type_length });
|
||||
}
|
||||
}
|
||||
|
||||
struct MirUsedCollector<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
body: &'a mir::Body<'tcx>,
|
||||
@ -916,7 +853,7 @@ fn visit_fn_use<'tcx>(
|
||||
) {
|
||||
if let ty::FnDef(def_id, args) = *ty.kind() {
|
||||
let instance = if is_direct_call {
|
||||
ty::Instance::expect_resolve(tcx, ty::ParamEnv::reveal_all(), def_id, args)
|
||||
ty::Instance::expect_resolve(tcx, ty::ParamEnv::reveal_all(), def_id, args, source)
|
||||
} else {
|
||||
match ty::Instance::resolve_for_fn_ptr(tcx, ty::ParamEnv::reveal_all(), def_id, args) {
|
||||
Some(instance) => instance,
|
||||
@ -1319,7 +1256,7 @@ fn visit_mentioned_item<'tcx>(
|
||||
MentionedItem::Fn(ty) => {
|
||||
if let ty::FnDef(def_id, args) = *ty.kind() {
|
||||
let instance =
|
||||
Instance::expect_resolve(tcx, ty::ParamEnv::reveal_all(), def_id, args);
|
||||
Instance::expect_resolve(tcx, ty::ParamEnv::reveal_all(), def_id, args, span);
|
||||
// `visit_instance_use` was written for "used" item collection but works just as well
|
||||
// for "mentioned" item collection.
|
||||
// We can set `is_direct_call`; that just means we'll skip a bunch of shims that anyway
|
||||
@ -1544,6 +1481,7 @@ impl<'v> RootCollector<'_, 'v> {
|
||||
ty::ParamEnv::reveal_all(),
|
||||
start_def_id,
|
||||
self.tcx.mk_args(&[main_ret_ty.into()]),
|
||||
DUMMY_SP,
|
||||
);
|
||||
|
||||
self.output.push(create_fn_mono_item(self.tcx, start_instance, DUMMY_SP));
|
||||
@ -1612,9 +1550,10 @@ fn create_mono_items_for_default_impls<'tcx>(
|
||||
}
|
||||
|
||||
// As mentioned above, the method is legal to eagerly instantiate if it
|
||||
// only has lifetime generic parameters. This is validated by
|
||||
// only has lifetime generic parameters. This is validated by calling
|
||||
// `own_requires_monomorphization` on both the impl and method.
|
||||
let args = trait_ref.args.extend_to(tcx, method.def_id, only_region_params);
|
||||
let instance = ty::Instance::expect_resolve(tcx, param_env, method.def_id, args);
|
||||
let instance = ty::Instance::expect_resolve(tcx, param_env, method.def_id, args, DUMMY_SP);
|
||||
|
||||
let mono_item = create_fn_mono_item(tcx, instance, DUMMY_SP);
|
||||
if mono_item.node.is_instantiable(tcx) && should_codegen_locally(tcx, instance) {
|
||||
|
@ -19,19 +19,6 @@ pub struct RecursionLimit {
|
||||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_type_length_limit)]
|
||||
#[help(monomorphize_consider_type_length_limit)]
|
||||
pub struct TypeLengthLimit {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub shrunk: String,
|
||||
#[note(monomorphize_written_to_path)]
|
||||
pub was_written: Option<()>,
|
||||
pub path: PathBuf,
|
||||
pub type_length: usize,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_no_optimized_mir)]
|
||||
pub struct NoOptimizedMir {
|
||||
|
@ -387,48 +387,83 @@ where
|
||||
G::consider_auto_trait_candidate(self, goal)
|
||||
} else if cx.trait_is_alias(trait_def_id) {
|
||||
G::consider_trait_alias_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Sized) {
|
||||
G::consider_builtin_sized_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Copy)
|
||||
|| cx.is_lang_item(trait_def_id, TraitSolverLangItem::Clone)
|
||||
{
|
||||
G::consider_builtin_copy_clone_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::PointerLike) {
|
||||
G::consider_builtin_pointer_like_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::FnPtrTrait) {
|
||||
G::consider_builtin_fn_ptr_trait_candidate(self, goal)
|
||||
} else if let Some(kind) = self.cx().fn_trait_kind_from_def_id(trait_def_id) {
|
||||
G::consider_builtin_fn_trait_candidates(self, goal, kind)
|
||||
} else if let Some(kind) = self.cx().async_fn_trait_kind_from_def_id(trait_def_id) {
|
||||
G::consider_builtin_async_fn_trait_candidates(self, goal, kind)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::AsyncFnKindHelper) {
|
||||
G::consider_builtin_async_fn_kind_helper_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Tuple) {
|
||||
G::consider_builtin_tuple_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::PointeeTrait) {
|
||||
G::consider_builtin_pointee_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Future) {
|
||||
G::consider_builtin_future_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Iterator) {
|
||||
G::consider_builtin_iterator_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::FusedIterator) {
|
||||
G::consider_builtin_fused_iterator_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::AsyncIterator) {
|
||||
G::consider_builtin_async_iterator_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Coroutine) {
|
||||
G::consider_builtin_coroutine_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::DiscriminantKind) {
|
||||
G::consider_builtin_discriminant_kind_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::AsyncDestruct) {
|
||||
G::consider_builtin_async_destruct_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::Destruct) {
|
||||
G::consider_builtin_destruct_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::TransmuteTrait) {
|
||||
G::consider_builtin_transmute_candidate(self, goal)
|
||||
} else if cx.is_lang_item(trait_def_id, TraitSolverLangItem::EffectsIntersection) {
|
||||
G::consider_builtin_effects_intersection_candidate(self, goal)
|
||||
} else {
|
||||
Err(NoSolution)
|
||||
match cx.as_lang_item(trait_def_id) {
|
||||
Some(TraitSolverLangItem::Sized) => G::consider_builtin_sized_candidate(self, goal),
|
||||
Some(TraitSolverLangItem::Copy | TraitSolverLangItem::Clone) => {
|
||||
G::consider_builtin_copy_clone_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::Fn) => {
|
||||
G::consider_builtin_fn_trait_candidates(self, goal, ty::ClosureKind::Fn)
|
||||
}
|
||||
Some(TraitSolverLangItem::FnMut) => {
|
||||
G::consider_builtin_fn_trait_candidates(self, goal, ty::ClosureKind::FnMut)
|
||||
}
|
||||
Some(TraitSolverLangItem::FnOnce) => {
|
||||
G::consider_builtin_fn_trait_candidates(self, goal, ty::ClosureKind::FnOnce)
|
||||
}
|
||||
Some(TraitSolverLangItem::AsyncFn) => {
|
||||
G::consider_builtin_async_fn_trait_candidates(self, goal, ty::ClosureKind::Fn)
|
||||
}
|
||||
Some(TraitSolverLangItem::AsyncFnMut) => {
|
||||
G::consider_builtin_async_fn_trait_candidates(
|
||||
self,
|
||||
goal,
|
||||
ty::ClosureKind::FnMut,
|
||||
)
|
||||
}
|
||||
Some(TraitSolverLangItem::AsyncFnOnce) => {
|
||||
G::consider_builtin_async_fn_trait_candidates(
|
||||
self,
|
||||
goal,
|
||||
ty::ClosureKind::FnOnce,
|
||||
)
|
||||
}
|
||||
Some(TraitSolverLangItem::PointerLike) => {
|
||||
G::consider_builtin_pointer_like_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::FnPtrTrait) => {
|
||||
G::consider_builtin_fn_ptr_trait_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::AsyncFnKindHelper) => {
|
||||
G::consider_builtin_async_fn_kind_helper_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::Tuple) => G::consider_builtin_tuple_candidate(self, goal),
|
||||
Some(TraitSolverLangItem::PointeeTrait) => {
|
||||
G::consider_builtin_pointee_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::Future) => {
|
||||
G::consider_builtin_future_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::Iterator) => {
|
||||
G::consider_builtin_iterator_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::FusedIterator) => {
|
||||
G::consider_builtin_fused_iterator_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::AsyncIterator) => {
|
||||
G::consider_builtin_async_iterator_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::Coroutine) => {
|
||||
G::consider_builtin_coroutine_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::DiscriminantKind) => {
|
||||
G::consider_builtin_discriminant_kind_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::AsyncDestruct) => {
|
||||
G::consider_builtin_async_destruct_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::Destruct) => {
|
||||
G::consider_builtin_destruct_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::TransmuteTrait) => {
|
||||
G::consider_builtin_transmute_candidate(self, goal)
|
||||
}
|
||||
Some(TraitSolverLangItem::EffectsIntersection) => {
|
||||
G::consider_builtin_effects_intersection_candidate(self, goal)
|
||||
}
|
||||
_ => Err(NoSolution),
|
||||
}
|
||||
};
|
||||
|
||||
candidates.extend(result);
|
||||
|
@ -3,6 +3,7 @@ mod inherent;
|
||||
mod opaque_types;
|
||||
mod weak_types;
|
||||
|
||||
use rustc_type_ir::fast_reject::{DeepRejectCtxt, TreatParams};
|
||||
use rustc_type_ir::inherent::*;
|
||||
use rustc_type_ir::lang_items::TraitSolverLangItem;
|
||||
use rustc_type_ir::Upcast as _;
|
||||
@ -144,7 +145,7 @@ where
|
||||
|
||||
let goal_trait_ref = goal.predicate.alias.trait_ref(cx);
|
||||
let impl_trait_ref = cx.impl_trait_ref(impl_def_id);
|
||||
if !ecx.cx().args_may_unify_deep(
|
||||
if !DeepRejectCtxt::new(ecx.cx(), TreatParams::ForLookup).args_may_unify(
|
||||
goal.predicate.alias.trait_ref(cx).args,
|
||||
impl_trait_ref.skip_binder().args,
|
||||
) {
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
use rustc_ast_ir::Movability;
|
||||
use rustc_type_ir::data_structures::IndexSet;
|
||||
use rustc_type_ir::fast_reject::{DeepRejectCtxt, TreatParams};
|
||||
use rustc_type_ir::inherent::*;
|
||||
use rustc_type_ir::lang_items::TraitSolverLangItem;
|
||||
use rustc_type_ir::visit::TypeVisitableExt as _;
|
||||
@ -46,7 +47,8 @@ where
|
||||
let cx = ecx.cx();
|
||||
|
||||
let impl_trait_ref = cx.impl_trait_ref(impl_def_id);
|
||||
if !cx.args_may_unify_deep(goal.predicate.trait_ref.args, impl_trait_ref.skip_binder().args)
|
||||
if !DeepRejectCtxt::new(ecx.cx(), TreatParams::ForLookup)
|
||||
.args_may_unify(goal.predicate.trait_ref.args, impl_trait_ref.skip_binder().args)
|
||||
{
|
||||
return Err(NoSolution);
|
||||
}
|
||||
|
@ -45,10 +45,6 @@ parse_bad_assoc_type_bounds = bounds on associated types do not belong here
|
||||
parse_bad_item_kind = {$descr} is not supported in {$ctx}
|
||||
.help = consider moving the {$descr} out to a nearby module scope
|
||||
|
||||
parse_bad_return_type_notation_dotdot =
|
||||
return type notation uses `()` instead of `(..)` for elided arguments
|
||||
.suggestion = remove the `..`
|
||||
|
||||
parse_bad_return_type_notation_output =
|
||||
return type not allowed with return type notation
|
||||
.suggestion = remove the return type
|
||||
|
@ -2567,14 +2567,6 @@ pub(crate) struct BadReturnTypeNotationOutput {
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_bad_return_type_notation_dotdot)]
|
||||
pub(crate) struct BadReturnTypeNotationDotDot {
|
||||
#[primary_span]
|
||||
#[suggestion(code = "", applicability = "maybe-incorrect")]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_bad_assoc_type_bounds)]
|
||||
pub(crate) struct BadAssocTypeBounds {
|
||||
|
@ -9,6 +9,7 @@ use rustc_session::parse::ParseSess;
|
||||
use rustc_span::{sym, Span, DUMMY_SP};
|
||||
|
||||
use std::ops::Range;
|
||||
use std::{iter, mem};
|
||||
|
||||
/// A wrapper type to ensure that the parser handles outer attributes correctly.
|
||||
/// When we parse outer attributes, we need to ensure that we capture tokens
|
||||
@ -29,15 +30,15 @@ pub struct AttrWrapper {
|
||||
// The start of the outer attributes in the token cursor.
|
||||
// This allows us to create a `ReplaceRange` for the entire attribute
|
||||
// target, including outer attributes.
|
||||
start_pos: usize,
|
||||
start_pos: u32,
|
||||
}
|
||||
|
||||
impl AttrWrapper {
|
||||
pub(super) fn new(attrs: AttrVec, start_pos: usize) -> AttrWrapper {
|
||||
pub(super) fn new(attrs: AttrVec, start_pos: u32) -> AttrWrapper {
|
||||
AttrWrapper { attrs, start_pos }
|
||||
}
|
||||
pub fn empty() -> AttrWrapper {
|
||||
AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
|
||||
AttrWrapper { attrs: AttrVec::new(), start_pos: u32::MAX }
|
||||
}
|
||||
|
||||
pub(crate) fn take_for_recovery(self, psess: &ParseSess) -> AttrVec {
|
||||
@ -53,7 +54,7 @@ impl AttrWrapper {
|
||||
// FIXME: require passing an NT to prevent misuse of this method
|
||||
pub(crate) fn prepend_to_nt_inner(self, attrs: &mut AttrVec) {
|
||||
let mut self_attrs = self.attrs;
|
||||
std::mem::swap(attrs, &mut self_attrs);
|
||||
mem::swap(attrs, &mut self_attrs);
|
||||
attrs.extend(self_attrs);
|
||||
}
|
||||
|
||||
@ -91,7 +92,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
|
||||
struct LazyAttrTokenStreamImpl {
|
||||
start_token: (Token, Spacing),
|
||||
cursor_snapshot: TokenCursor,
|
||||
num_calls: usize,
|
||||
num_calls: u32,
|
||||
break_last_token: bool,
|
||||
replace_ranges: Box<[ReplaceRange]>,
|
||||
}
|
||||
@ -104,15 +105,16 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||
// produce an empty `TokenStream` if no calls were made, and omit the
|
||||
// final token otherwise.
|
||||
let mut cursor_snapshot = self.cursor_snapshot.clone();
|
||||
let tokens =
|
||||
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
|
||||
.chain(std::iter::repeat_with(|| {
|
||||
let token = cursor_snapshot.next();
|
||||
(FlatToken::Token(token.0), token.1)
|
||||
}))
|
||||
.take(self.num_calls);
|
||||
let tokens = iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
|
||||
.chain(iter::repeat_with(|| {
|
||||
let token = cursor_snapshot.next();
|
||||
(FlatToken::Token(token.0), token.1)
|
||||
}))
|
||||
.take(self.num_calls as usize);
|
||||
|
||||
if !self.replace_ranges.is_empty() {
|
||||
if self.replace_ranges.is_empty() {
|
||||
make_attr_token_stream(tokens, self.break_last_token)
|
||||
} else {
|
||||
let mut tokens: Vec<_> = tokens.collect();
|
||||
let mut replace_ranges = self.replace_ranges.to_vec();
|
||||
replace_ranges.sort_by_key(|(range, _)| range.start);
|
||||
@ -156,7 +158,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||
// This keeps the total length of `tokens` constant throughout the
|
||||
// replacement process, allowing us to use all of the `ReplaceRanges` entries
|
||||
// without adjusting indices.
|
||||
let filler = std::iter::repeat((FlatToken::Empty, Spacing::Alone))
|
||||
let filler = iter::repeat((FlatToken::Empty, Spacing::Alone))
|
||||
.take(range.len() - new_tokens.len());
|
||||
|
||||
tokens.splice(
|
||||
@ -164,9 +166,7 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||
new_tokens.into_iter().chain(filler),
|
||||
);
|
||||
}
|
||||
make_token_stream(tokens.into_iter(), self.break_last_token)
|
||||
} else {
|
||||
make_token_stream(tokens, self.break_last_token)
|
||||
make_attr_token_stream(tokens.into_iter(), self.break_last_token)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -218,24 +218,23 @@ impl<'a> Parser<'a> {
|
||||
let start_token = (self.token.clone(), self.token_spacing);
|
||||
let cursor_snapshot = self.token_cursor.clone();
|
||||
let start_pos = self.num_bump_calls;
|
||||
|
||||
let has_outer_attrs = !attrs.attrs.is_empty();
|
||||
let prev_capturing = std::mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
|
||||
let replace_ranges_start = self.capture_state.replace_ranges.len();
|
||||
|
||||
let ret = f(self, attrs.attrs);
|
||||
|
||||
self.capture_state.capturing = prev_capturing;
|
||||
|
||||
let (mut ret, trailing) = ret?;
|
||||
let (mut ret, trailing) = {
|
||||
let prev_capturing = mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
|
||||
let ret_and_trailing = f(self, attrs.attrs);
|
||||
self.capture_state.capturing = prev_capturing;
|
||||
ret_and_trailing?
|
||||
};
|
||||
|
||||
// When we're not in `capture-cfg` mode, then bail out early if:
|
||||
// 1. Our target doesn't support tokens at all (e.g we're parsing an `NtIdent`)
|
||||
// so there's nothing for us to do.
|
||||
// 2. Our target already has tokens set (e.g. we've parsed something
|
||||
// like `#[my_attr] $item`. The actual parsing code takes care of prepending
|
||||
// any attributes to the nonterminal, so we don't need to modify the
|
||||
// already captured tokens.
|
||||
// like `#[my_attr] $item`). The actual parsing code takes care of
|
||||
// prepending any attributes to the nonterminal, so we don't need to
|
||||
// modify the already captured tokens.
|
||||
// Note that this check is independent of `force_collect`- if we already
|
||||
// have tokens, or can't even store them, then there's never a need to
|
||||
// force collection of new tokens.
|
||||
@ -276,37 +275,32 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let replace_ranges_end = self.capture_state.replace_ranges.len();
|
||||
|
||||
let mut end_pos = self.num_bump_calls;
|
||||
|
||||
let mut captured_trailing = false;
|
||||
|
||||
// Capture a trailing token if requested by the callback 'f'
|
||||
match trailing {
|
||||
TrailingToken::None => {}
|
||||
let captured_trailing = match trailing {
|
||||
TrailingToken::None => false,
|
||||
TrailingToken::Gt => {
|
||||
assert_eq!(self.token.kind, token::Gt);
|
||||
false
|
||||
}
|
||||
TrailingToken::Semi => {
|
||||
assert_eq!(self.token.kind, token::Semi);
|
||||
end_pos += 1;
|
||||
captured_trailing = true;
|
||||
true
|
||||
}
|
||||
TrailingToken::MaybeComma => {
|
||||
if self.token.kind == token::Comma {
|
||||
end_pos += 1;
|
||||
captured_trailing = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
TrailingToken::MaybeComma => self.token.kind == token::Comma,
|
||||
};
|
||||
|
||||
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
|
||||
// then extend the range of captured tokens to include it, since the parser
|
||||
// was not actually bumped past it. When the `LazyAttrTokenStream` gets converted
|
||||
// into an `AttrTokenStream`, we will create the proper token.
|
||||
if self.break_last_token {
|
||||
assert!(!captured_trailing, "Cannot set break_last_token and have trailing token");
|
||||
end_pos += 1;
|
||||
}
|
||||
assert!(
|
||||
!(self.break_last_token && captured_trailing),
|
||||
"Cannot set break_last_token and have trailing token"
|
||||
);
|
||||
|
||||
let end_pos = self.num_bump_calls
|
||||
+ captured_trailing as u32
|
||||
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens), then
|
||||
// extend the range of captured tokens to include it, since the parser was not actually
|
||||
// bumped past it. When the `LazyAttrTokenStream` gets converted into an
|
||||
// `AttrTokenStream`, we will create the proper token.
|
||||
+ self.break_last_token as u32;
|
||||
|
||||
let num_calls = end_pos - start_pos;
|
||||
|
||||
@ -318,14 +312,11 @@ impl<'a> Parser<'a> {
|
||||
// Grab any replace ranges that occur *inside* the current AST node.
|
||||
// We will perform the actual replacement when we convert the `LazyAttrTokenStream`
|
||||
// to an `AttrTokenStream`.
|
||||
let start_calls: u32 = start_pos.try_into().unwrap();
|
||||
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(inner_attr_replace_ranges.iter().cloned())
|
||||
.map(|(range, tokens)| {
|
||||
((range.start - start_calls)..(range.end - start_calls), tokens)
|
||||
})
|
||||
.map(|(range, tokens)| ((range.start - start_pos)..(range.end - start_pos), tokens))
|
||||
.collect()
|
||||
};
|
||||
|
||||
@ -340,7 +331,7 @@ impl<'a> Parser<'a> {
|
||||
// If we support tokens at all
|
||||
if let Some(target_tokens) = ret.tokens_mut() {
|
||||
if target_tokens.is_none() {
|
||||
// Store se our newly captured tokens into the AST node
|
||||
// Store our newly captured tokens into the AST node.
|
||||
*target_tokens = Some(tokens.clone());
|
||||
}
|
||||
}
|
||||
@ -382,10 +373,10 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
|
||||
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
|
||||
/// of open and close delims.
|
||||
fn make_token_stream(
|
||||
/// Converts a flattened iterator of tokens (including open and close delimiter tokens) into an
|
||||
/// `AttrTokenStream`, creating an `AttrTokenTree::Delimited` for each matching pair of open and
|
||||
/// close delims.
|
||||
fn make_attr_token_stream(
|
||||
mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
|
||||
break_last_token: bool,
|
||||
) -> AttrTokenStream {
|
||||
@ -464,6 +455,6 @@ mod size_asserts {
|
||||
use rustc_data_structures::static_assert_size;
|
||||
// tidy-alphabetical-start
|
||||
static_assert_size!(AttrWrapper, 16);
|
||||
static_assert_size!(LazyAttrTokenStreamImpl, 104);
|
||||
static_assert_size!(LazyAttrTokenStreamImpl, 96);
|
||||
// tidy-alphabetical-end
|
||||
}
|
||||
|
@ -153,7 +153,7 @@ pub struct Parser<'a> {
|
||||
expected_tokens: Vec<TokenType>,
|
||||
token_cursor: TokenCursor,
|
||||
// The number of calls to `bump`, i.e. the position in the token stream.
|
||||
num_bump_calls: usize,
|
||||
num_bump_calls: u32,
|
||||
// During parsing we may sometimes need to 'unglue' a glued token into two
|
||||
// component tokens (e.g. '>>' into '>' and '>), so the parser can consume
|
||||
// them one at a time. This process bypasses the normal capturing mechanism
|
||||
@ -192,7 +192,7 @@ pub struct Parser<'a> {
|
||||
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
|
||||
// it doesn't unintentionally get bigger.
|
||||
#[cfg(target_pointer_width = "64")]
|
||||
rustc_data_structures::static_assert_size!(Parser<'_>, 264);
|
||||
rustc_data_structures::static_assert_size!(Parser<'_>, 256);
|
||||
|
||||
/// Stores span information about a closure.
|
||||
#[derive(Clone, Debug)]
|
||||
@ -1572,7 +1572,7 @@ impl<'a> Parser<'a> {
|
||||
self.expected_tokens.clear();
|
||||
}
|
||||
|
||||
pub fn approx_token_stream_pos(&self) -> usize {
|
||||
pub fn approx_token_stream_pos(&self) -> u32 {
|
||||
self.num_bump_calls
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user