mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 14:55:26 +00:00
Auto merge of #121890 - matthiaskrgr:rollup-mv26uwt, r=matthiaskrgr
Rollup of 9 pull requests Successful merges: - #109263 (fix typo in documentation for std::fs::Permissions) - #120684 (Update E0716.md for clarity) - #121715 (match lowering: pre-simplify or-patterns too) - #121739 (Display short types for unimplemented trait) - #121815 (Move `gather_comments`.) - #121835 (Move `HandleStore` into `server.rs`.) - #121847 (Remove hidden use of Global) - #121861 (Use the guaranteed precision of a couple of float functions in docs) - #121875 ( Account for unmet T: !Copy in E0277 message) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
5257aee7dd
@ -3491,6 +3491,7 @@ version = "0.0.0"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"itertools 0.11.0",
|
"itertools 0.11.0",
|
||||||
"rustc_ast",
|
"rustc_ast",
|
||||||
|
"rustc_lexer",
|
||||||
"rustc_span",
|
"rustc_span",
|
||||||
"thin-vec",
|
"thin-vec",
|
||||||
]
|
]
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
use crate::token::CommentKind;
|
use crate::token::CommentKind;
|
||||||
use rustc_span::source_map::SourceMap;
|
use rustc_span::{BytePos, Symbol};
|
||||||
use rustc_span::{BytePos, CharPos, FileName, Pos, Symbol};
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
@ -131,126 +130,3 @@ pub fn beautify_doc_string(data: Symbol, kind: CommentKind) -> Symbol {
|
|||||||
}
|
}
|
||||||
data
|
data
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `None` if the first `col` chars of `s` contain a non-whitespace char.
|
|
||||||
/// Otherwise returns `Some(k)` where `k` is first char offset after that leading
|
|
||||||
/// whitespace. Note that `k` may be outside bounds of `s`.
|
|
||||||
fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
|
|
||||||
let mut idx = 0;
|
|
||||||
for (i, ch) in s.char_indices().take(col.to_usize()) {
|
|
||||||
if !ch.is_whitespace() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
idx = i + ch.len_utf8();
|
|
||||||
}
|
|
||||||
Some(idx)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn trim_whitespace_prefix(s: &str, col: CharPos) -> &str {
|
|
||||||
let len = s.len();
|
|
||||||
match all_whitespace(s, col) {
|
|
||||||
Some(col) => {
|
|
||||||
if col < len {
|
|
||||||
&s[col..]
|
|
||||||
} else {
|
|
||||||
""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => s,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn split_block_comment_into_lines(text: &str, col: CharPos) -> Vec<String> {
|
|
||||||
let mut res: Vec<String> = vec![];
|
|
||||||
let mut lines = text.lines();
|
|
||||||
// just push the first line
|
|
||||||
res.extend(lines.next().map(|it| it.to_string()));
|
|
||||||
// for other lines, strip common whitespace prefix
|
|
||||||
for line in lines {
|
|
||||||
res.push(trim_whitespace_prefix(line, col).to_string())
|
|
||||||
}
|
|
||||||
res
|
|
||||||
}
|
|
||||||
|
|
||||||
// it appears this function is called only from pprust... that's
|
|
||||||
// probably not a good thing.
|
|
||||||
pub fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comment> {
|
|
||||||
let sm = SourceMap::new(sm.path_mapping().clone());
|
|
||||||
let source_file = sm.new_source_file(path, src);
|
|
||||||
let text = (*source_file.src.as_ref().unwrap()).clone();
|
|
||||||
|
|
||||||
let text: &str = text.as_str();
|
|
||||||
let start_bpos = source_file.start_pos;
|
|
||||||
let mut pos = 0;
|
|
||||||
let mut comments: Vec<Comment> = Vec::new();
|
|
||||||
let mut code_to_the_left = false;
|
|
||||||
|
|
||||||
if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
|
|
||||||
comments.push(Comment {
|
|
||||||
style: CommentStyle::Isolated,
|
|
||||||
lines: vec![text[..shebang_len].to_string()],
|
|
||||||
pos: start_bpos,
|
|
||||||
});
|
|
||||||
pos += shebang_len;
|
|
||||||
}
|
|
||||||
|
|
||||||
for token in rustc_lexer::tokenize(&text[pos..]) {
|
|
||||||
let token_text = &text[pos..pos + token.len as usize];
|
|
||||||
match token.kind {
|
|
||||||
rustc_lexer::TokenKind::Whitespace => {
|
|
||||||
if let Some(mut idx) = token_text.find('\n') {
|
|
||||||
code_to_the_left = false;
|
|
||||||
while let Some(next_newline) = &token_text[idx + 1..].find('\n') {
|
|
||||||
idx += 1 + next_newline;
|
|
||||||
comments.push(Comment {
|
|
||||||
style: CommentStyle::BlankLine,
|
|
||||||
lines: vec![],
|
|
||||||
pos: start_bpos + BytePos((pos + idx) as u32),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
rustc_lexer::TokenKind::BlockComment { doc_style, .. } => {
|
|
||||||
if doc_style.is_none() {
|
|
||||||
let code_to_the_right = !matches!(
|
|
||||||
text[pos + token.len as usize..].chars().next(),
|
|
||||||
Some('\r' | '\n')
|
|
||||||
);
|
|
||||||
let style = match (code_to_the_left, code_to_the_right) {
|
|
||||||
(_, true) => CommentStyle::Mixed,
|
|
||||||
(false, false) => CommentStyle::Isolated,
|
|
||||||
(true, false) => CommentStyle::Trailing,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Count the number of chars since the start of the line by rescanning.
|
|
||||||
let pos_in_file = start_bpos + BytePos(pos as u32);
|
|
||||||
let line_begin_in_file = source_file.line_begin_pos(pos_in_file);
|
|
||||||
let line_begin_pos = (line_begin_in_file - start_bpos).to_usize();
|
|
||||||
let col = CharPos(text[line_begin_pos..pos].chars().count());
|
|
||||||
|
|
||||||
let lines = split_block_comment_into_lines(token_text, col);
|
|
||||||
comments.push(Comment { style, lines, pos: pos_in_file })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
rustc_lexer::TokenKind::LineComment { doc_style } => {
|
|
||||||
if doc_style.is_none() {
|
|
||||||
comments.push(Comment {
|
|
||||||
style: if code_to_the_left {
|
|
||||||
CommentStyle::Trailing
|
|
||||||
} else {
|
|
||||||
CommentStyle::Isolated
|
|
||||||
},
|
|
||||||
lines: vec![token_text.to_string()],
|
|
||||||
pos: start_bpos + BytePos(pos as u32),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
code_to_the_left = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pos += token.len as usize;
|
|
||||||
}
|
|
||||||
|
|
||||||
comments
|
|
||||||
}
|
|
||||||
|
@ -7,6 +7,7 @@ edition = "2021"
|
|||||||
# tidy-alphabetical-start
|
# tidy-alphabetical-start
|
||||||
itertools = "0.11"
|
itertools = "0.11"
|
||||||
rustc_ast = { path = "../rustc_ast" }
|
rustc_ast = { path = "../rustc_ast" }
|
||||||
|
rustc_lexer = { path = "../rustc_lexer" }
|
||||||
rustc_span = { path = "../rustc_span" }
|
rustc_span = { path = "../rustc_span" }
|
||||||
thin-vec = "0.2.12"
|
thin-vec = "0.2.12"
|
||||||
# tidy-alphabetical-end
|
# tidy-alphabetical-end
|
||||||
|
@ -14,7 +14,7 @@ use rustc_ast::ptr::P;
|
|||||||
use rustc_ast::token::{self, BinOpToken, CommentKind, Delimiter, Nonterminal, Token, TokenKind};
|
use rustc_ast::token::{self, BinOpToken, CommentKind, Delimiter, Nonterminal, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{Spacing, TokenStream, TokenTree};
|
use rustc_ast::tokenstream::{Spacing, TokenStream, TokenTree};
|
||||||
use rustc_ast::util::classify;
|
use rustc_ast::util::classify;
|
||||||
use rustc_ast::util::comments::{gather_comments, Comment, CommentStyle};
|
use rustc_ast::util::comments::{Comment, CommentStyle};
|
||||||
use rustc_ast::util::parser;
|
use rustc_ast::util::parser;
|
||||||
use rustc_ast::{self as ast, AttrArgs, AttrArgsEq, BlockCheckMode, PatKind};
|
use rustc_ast::{self as ast, AttrArgs, AttrArgsEq, BlockCheckMode, PatKind};
|
||||||
use rustc_ast::{attr, BindingAnnotation, ByRef, DelimArgs, RangeEnd, RangeSyntax, Term};
|
use rustc_ast::{attr, BindingAnnotation, ByRef, DelimArgs, RangeEnd, RangeSyntax, Term};
|
||||||
@ -24,7 +24,7 @@ use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
|
|||||||
use rustc_span::edition::Edition;
|
use rustc_span::edition::Edition;
|
||||||
use rustc_span::source_map::{SourceMap, Spanned};
|
use rustc_span::source_map::{SourceMap, Spanned};
|
||||||
use rustc_span::symbol::{kw, sym, Ident, IdentPrinter, Symbol};
|
use rustc_span::symbol::{kw, sym, Ident, IdentPrinter, Symbol};
|
||||||
use rustc_span::{BytePos, FileName, Span, DUMMY_SP};
|
use rustc_span::{BytePos, CharPos, FileName, Pos, Span, DUMMY_SP};
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use thin_vec::ThinVec;
|
use thin_vec::ThinVec;
|
||||||
|
|
||||||
@ -59,6 +59,127 @@ pub struct Comments<'a> {
|
|||||||
current: usize,
|
current: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns `None` if the first `col` chars of `s` contain a non-whitespace char.
|
||||||
|
/// Otherwise returns `Some(k)` where `k` is first char offset after that leading
|
||||||
|
/// whitespace. Note that `k` may be outside bounds of `s`.
|
||||||
|
fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
|
||||||
|
let mut idx = 0;
|
||||||
|
for (i, ch) in s.char_indices().take(col.to_usize()) {
|
||||||
|
if !ch.is_whitespace() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
idx = i + ch.len_utf8();
|
||||||
|
}
|
||||||
|
Some(idx)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn trim_whitespace_prefix(s: &str, col: CharPos) -> &str {
|
||||||
|
let len = s.len();
|
||||||
|
match all_whitespace(s, col) {
|
||||||
|
Some(col) => {
|
||||||
|
if col < len {
|
||||||
|
&s[col..]
|
||||||
|
} else {
|
||||||
|
""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => s,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn split_block_comment_into_lines(text: &str, col: CharPos) -> Vec<String> {
|
||||||
|
let mut res: Vec<String> = vec![];
|
||||||
|
let mut lines = text.lines();
|
||||||
|
// just push the first line
|
||||||
|
res.extend(lines.next().map(|it| it.to_string()));
|
||||||
|
// for other lines, strip common whitespace prefix
|
||||||
|
for line in lines {
|
||||||
|
res.push(trim_whitespace_prefix(line, col).to_string())
|
||||||
|
}
|
||||||
|
res
|
||||||
|
}
|
||||||
|
|
||||||
|
fn gather_comments(sm: &SourceMap, path: FileName, src: String) -> Vec<Comment> {
|
||||||
|
let sm = SourceMap::new(sm.path_mapping().clone());
|
||||||
|
let source_file = sm.new_source_file(path, src);
|
||||||
|
let text = (*source_file.src.as_ref().unwrap()).clone();
|
||||||
|
|
||||||
|
let text: &str = text.as_str();
|
||||||
|
let start_bpos = source_file.start_pos;
|
||||||
|
let mut pos = 0;
|
||||||
|
let mut comments: Vec<Comment> = Vec::new();
|
||||||
|
let mut code_to_the_left = false;
|
||||||
|
|
||||||
|
if let Some(shebang_len) = rustc_lexer::strip_shebang(text) {
|
||||||
|
comments.push(Comment {
|
||||||
|
style: CommentStyle::Isolated,
|
||||||
|
lines: vec![text[..shebang_len].to_string()],
|
||||||
|
pos: start_bpos,
|
||||||
|
});
|
||||||
|
pos += shebang_len;
|
||||||
|
}
|
||||||
|
|
||||||
|
for token in rustc_lexer::tokenize(&text[pos..]) {
|
||||||
|
let token_text = &text[pos..pos + token.len as usize];
|
||||||
|
match token.kind {
|
||||||
|
rustc_lexer::TokenKind::Whitespace => {
|
||||||
|
if let Some(mut idx) = token_text.find('\n') {
|
||||||
|
code_to_the_left = false;
|
||||||
|
while let Some(next_newline) = &token_text[idx + 1..].find('\n') {
|
||||||
|
idx += 1 + next_newline;
|
||||||
|
comments.push(Comment {
|
||||||
|
style: CommentStyle::BlankLine,
|
||||||
|
lines: vec![],
|
||||||
|
pos: start_bpos + BytePos((pos + idx) as u32),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
rustc_lexer::TokenKind::BlockComment { doc_style, .. } => {
|
||||||
|
if doc_style.is_none() {
|
||||||
|
let code_to_the_right = !matches!(
|
||||||
|
text[pos + token.len as usize..].chars().next(),
|
||||||
|
Some('\r' | '\n')
|
||||||
|
);
|
||||||
|
let style = match (code_to_the_left, code_to_the_right) {
|
||||||
|
(_, true) => CommentStyle::Mixed,
|
||||||
|
(false, false) => CommentStyle::Isolated,
|
||||||
|
(true, false) => CommentStyle::Trailing,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Count the number of chars since the start of the line by rescanning.
|
||||||
|
let pos_in_file = start_bpos + BytePos(pos as u32);
|
||||||
|
let line_begin_in_file = source_file.line_begin_pos(pos_in_file);
|
||||||
|
let line_begin_pos = (line_begin_in_file - start_bpos).to_usize();
|
||||||
|
let col = CharPos(text[line_begin_pos..pos].chars().count());
|
||||||
|
|
||||||
|
let lines = split_block_comment_into_lines(token_text, col);
|
||||||
|
comments.push(Comment { style, lines, pos: pos_in_file })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
rustc_lexer::TokenKind::LineComment { doc_style } => {
|
||||||
|
if doc_style.is_none() {
|
||||||
|
comments.push(Comment {
|
||||||
|
style: if code_to_the_left {
|
||||||
|
CommentStyle::Trailing
|
||||||
|
} else {
|
||||||
|
CommentStyle::Isolated
|
||||||
|
},
|
||||||
|
lines: vec![token_text.to_string()],
|
||||||
|
pos: start_bpos + BytePos(pos as u32),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
code_to_the_left = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pos += token.len as usize;
|
||||||
|
}
|
||||||
|
|
||||||
|
comments
|
||||||
|
}
|
||||||
|
|
||||||
impl<'a> Comments<'a> {
|
impl<'a> Comments<'a> {
|
||||||
pub fn new(sm: &'a SourceMap, filename: FileName, input: String) -> Comments<'a> {
|
pub fn new(sm: &'a SourceMap, filename: FileName, input: String) -> Comments<'a> {
|
||||||
let comments = gather_comments(sm, filename, input);
|
let comments = gather_comments(sm, filename, input);
|
||||||
|
@ -30,7 +30,7 @@ let q = p;
|
|||||||
|
|
||||||
Whenever a temporary is created, it is automatically dropped (freed) according
|
Whenever a temporary is created, it is automatically dropped (freed) according
|
||||||
to fixed rules. Ordinarily, the temporary is dropped at the end of the enclosing
|
to fixed rules. Ordinarily, the temporary is dropped at the end of the enclosing
|
||||||
statement -- in this case, after the `let`. This is illustrated in the example
|
statement -- in this case, after the `let p`. This is illustrated in the example
|
||||||
above by showing that `tmp` would be freed as we exit the block.
|
above by showing that `tmp` would be freed as we exit the block.
|
||||||
|
|
||||||
To fix this problem, you need to create a local variable to store the value in
|
To fix this problem, you need to create a local variable to store the value in
|
||||||
|
@ -1049,6 +1049,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
bound_list.into_iter().map(|(_, path)| path).collect::<Vec<_>>().join("\n");
|
bound_list.into_iter().map(|(_, path)| path).collect::<Vec<_>>().join("\n");
|
||||||
let actual_prefix = rcvr_ty.prefix_string(self.tcx);
|
let actual_prefix = rcvr_ty.prefix_string(self.tcx);
|
||||||
info!("unimplemented_traits.len() == {}", unimplemented_traits.len());
|
info!("unimplemented_traits.len() == {}", unimplemented_traits.len());
|
||||||
|
let mut long_ty_file = None;
|
||||||
let (primary_message, label) = if unimplemented_traits.len() == 1
|
let (primary_message, label) = if unimplemented_traits.len() == 1
|
||||||
&& unimplemented_traits_only
|
&& unimplemented_traits_only
|
||||||
{
|
{
|
||||||
@ -1061,8 +1062,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
// Avoid crashing.
|
// Avoid crashing.
|
||||||
return (None, None);
|
return (None, None);
|
||||||
}
|
}
|
||||||
let OnUnimplementedNote { message, label, .. } =
|
let OnUnimplementedNote { message, label, .. } = self
|
||||||
self.err_ctxt().on_unimplemented_note(trait_ref, &obligation);
|
.err_ctxt()
|
||||||
|
.on_unimplemented_note(trait_ref, &obligation, &mut long_ty_file);
|
||||||
(message, label)
|
(message, label)
|
||||||
})
|
})
|
||||||
.unwrap()
|
.unwrap()
|
||||||
@ -1076,6 +1078,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
)
|
)
|
||||||
});
|
});
|
||||||
err.primary_message(primary_message);
|
err.primary_message(primary_message);
|
||||||
|
if let Some(file) = long_ty_file {
|
||||||
|
err.note(format!(
|
||||||
|
"the full name for the type has been written to '{}'",
|
||||||
|
file.display(),
|
||||||
|
));
|
||||||
|
err.note(
|
||||||
|
"consider using `--verbose` to print the full type name to the console",
|
||||||
|
);
|
||||||
|
}
|
||||||
if let Some(label) = label {
|
if let Some(label) = label {
|
||||||
custom_span_label = true;
|
custom_span_label = true;
|
||||||
err.span_label(span, label);
|
err.span_label(span, label);
|
||||||
|
@ -321,20 +321,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
// The set of places that we are creating fake borrows of. If there are
|
// The set of places that we are creating fake borrows of. If there are
|
||||||
// no match guards then we don't need any fake borrows, so don't track
|
// no match guards then we don't need any fake borrows, so don't track
|
||||||
// them.
|
// them.
|
||||||
let mut fake_borrows = match_has_guard.then(FxIndexSet::default);
|
let fake_borrows = match_has_guard
|
||||||
|
.then(|| util::FakeBorrowCollector::collect_fake_borrows(self, candidates));
|
||||||
|
|
||||||
let otherwise_block = self.cfg.start_new_block();
|
let otherwise_block = self.cfg.start_new_block();
|
||||||
|
|
||||||
// This will generate code to test scrutinee_place and
|
// This will generate code to test scrutinee_place and
|
||||||
// branch to the appropriate arm block
|
// branch to the appropriate arm block
|
||||||
self.match_candidates(
|
self.match_candidates(match_start_span, scrutinee_span, block, otherwise_block, candidates);
|
||||||
match_start_span,
|
|
||||||
scrutinee_span,
|
|
||||||
block,
|
|
||||||
otherwise_block,
|
|
||||||
candidates,
|
|
||||||
&mut fake_borrows,
|
|
||||||
);
|
|
||||||
|
|
||||||
// See the doc comment on `match_candidates` for why we may have an
|
// See the doc comment on `match_candidates` for why we may have an
|
||||||
// otherwise block. Match checking will ensure this is actually
|
// otherwise block. Match checking will ensure this is actually
|
||||||
@ -944,6 +938,40 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A pattern in a form suitable for generating code.
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct FlatPat<'pat, 'tcx> {
|
||||||
|
/// [`Span`] of the original pattern.
|
||||||
|
span: Span,
|
||||||
|
|
||||||
|
/// To match the pattern, all of these must be satisfied...
|
||||||
|
// Invariant: all the `MatchPair`s are recursively simplified.
|
||||||
|
// Invariant: or-patterns must be sorted to the end.
|
||||||
|
match_pairs: Vec<MatchPair<'pat, 'tcx>>,
|
||||||
|
|
||||||
|
/// ...these bindings established...
|
||||||
|
bindings: Vec<Binding<'tcx>>,
|
||||||
|
|
||||||
|
/// ...and these types asserted.
|
||||||
|
ascriptions: Vec<Ascription<'tcx>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tcx, 'pat> FlatPat<'pat, 'tcx> {
|
||||||
|
fn new(
|
||||||
|
place: PlaceBuilder<'tcx>,
|
||||||
|
pattern: &'pat Pat<'tcx>,
|
||||||
|
cx: &mut Builder<'_, 'tcx>,
|
||||||
|
) -> Self {
|
||||||
|
let mut match_pairs = vec![MatchPair::new(place, pattern, cx)];
|
||||||
|
let mut bindings = Vec::new();
|
||||||
|
let mut ascriptions = Vec::new();
|
||||||
|
|
||||||
|
cx.simplify_match_pairs(&mut match_pairs, &mut bindings, &mut ascriptions);
|
||||||
|
|
||||||
|
FlatPat { span: pattern.span, match_pairs, bindings, ascriptions }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct Candidate<'pat, 'tcx> {
|
struct Candidate<'pat, 'tcx> {
|
||||||
/// [`Span`] of the original pattern that gave rise to this candidate.
|
/// [`Span`] of the original pattern that gave rise to this candidate.
|
||||||
@ -958,11 +986,11 @@ struct Candidate<'pat, 'tcx> {
|
|||||||
match_pairs: Vec<MatchPair<'pat, 'tcx>>,
|
match_pairs: Vec<MatchPair<'pat, 'tcx>>,
|
||||||
|
|
||||||
/// ...these bindings established...
|
/// ...these bindings established...
|
||||||
// Invariant: not mutated outside `Candidate::new()`.
|
// Invariant: not mutated after candidate creation.
|
||||||
bindings: Vec<Binding<'tcx>>,
|
bindings: Vec<Binding<'tcx>>,
|
||||||
|
|
||||||
/// ...and these types asserted...
|
/// ...and these types asserted...
|
||||||
// Invariant: not mutated outside `Candidate::new()`.
|
// Invariant: not mutated after candidate creation.
|
||||||
ascriptions: Vec<Ascription<'tcx>>,
|
ascriptions: Vec<Ascription<'tcx>>,
|
||||||
|
|
||||||
/// ...and if this is non-empty, one of these subcandidates also has to match...
|
/// ...and if this is non-empty, one of these subcandidates also has to match...
|
||||||
@ -984,25 +1012,21 @@ impl<'tcx, 'pat> Candidate<'pat, 'tcx> {
|
|||||||
has_guard: bool,
|
has_guard: bool,
|
||||||
cx: &mut Builder<'_, 'tcx>,
|
cx: &mut Builder<'_, 'tcx>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let mut candidate = Candidate {
|
Self::from_flat_pat(FlatPat::new(place, pattern, cx), has_guard)
|
||||||
span: pattern.span,
|
}
|
||||||
|
|
||||||
|
fn from_flat_pat(flat_pat: FlatPat<'pat, 'tcx>, has_guard: bool) -> Self {
|
||||||
|
Candidate {
|
||||||
|
span: flat_pat.span,
|
||||||
|
match_pairs: flat_pat.match_pairs,
|
||||||
|
bindings: flat_pat.bindings,
|
||||||
|
ascriptions: flat_pat.ascriptions,
|
||||||
has_guard,
|
has_guard,
|
||||||
match_pairs: vec![MatchPair::new(place, pattern, cx)],
|
|
||||||
bindings: Vec::new(),
|
|
||||||
ascriptions: Vec::new(),
|
|
||||||
subcandidates: Vec::new(),
|
subcandidates: Vec::new(),
|
||||||
otherwise_block: None,
|
otherwise_block: None,
|
||||||
pre_binding_block: None,
|
pre_binding_block: None,
|
||||||
next_candidate_pre_binding_block: None,
|
next_candidate_pre_binding_block: None,
|
||||||
};
|
}
|
||||||
|
|
||||||
cx.simplify_match_pairs(
|
|
||||||
&mut candidate.match_pairs,
|
|
||||||
&mut candidate.bindings,
|
|
||||||
&mut candidate.ascriptions,
|
|
||||||
);
|
|
||||||
|
|
||||||
candidate
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Visit the leaf candidates (those with no subcandidates) contained in
|
/// Visit the leaf candidates (those with no subcandidates) contained in
|
||||||
@ -1065,7 +1089,7 @@ enum TestCase<'pat, 'tcx> {
|
|||||||
Constant { value: mir::Const<'tcx> },
|
Constant { value: mir::Const<'tcx> },
|
||||||
Range(&'pat PatRange<'tcx>),
|
Range(&'pat PatRange<'tcx>),
|
||||||
Slice { len: usize, variable_length: bool },
|
Slice { len: usize, variable_length: bool },
|
||||||
Or,
|
Or { pats: Box<[FlatPat<'pat, 'tcx>]> },
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@ -1208,7 +1232,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
///
|
///
|
||||||
/// Note how we test `x` twice. This is the tradeoff of backtracking automata: we prefer smaller
|
/// Note how we test `x` twice. This is the tradeoff of backtracking automata: we prefer smaller
|
||||||
/// code size at the expense of non-optimal code paths.
|
/// code size at the expense of non-optimal code paths.
|
||||||
#[instrument(skip(self, fake_borrows), level = "debug")]
|
#[instrument(skip(self), level = "debug")]
|
||||||
fn match_candidates<'pat>(
|
fn match_candidates<'pat>(
|
||||||
&mut self,
|
&mut self,
|
||||||
span: Span,
|
span: Span,
|
||||||
@ -1216,11 +1240,10 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
start_block: BasicBlock,
|
start_block: BasicBlock,
|
||||||
otherwise_block: BasicBlock,
|
otherwise_block: BasicBlock,
|
||||||
candidates: &mut [&mut Candidate<'pat, 'tcx>],
|
candidates: &mut [&mut Candidate<'pat, 'tcx>],
|
||||||
fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>,
|
|
||||||
) {
|
) {
|
||||||
let mut split_or_candidate = false;
|
let mut split_or_candidate = false;
|
||||||
for candidate in &mut *candidates {
|
for candidate in &mut *candidates {
|
||||||
if let [MatchPair { pattern: Pat { kind: PatKind::Or { pats }, .. }, place, .. }] =
|
if let [MatchPair { test_case: TestCase::Or { pats, .. }, .. }] =
|
||||||
&*candidate.match_pairs
|
&*candidate.match_pairs
|
||||||
{
|
{
|
||||||
// Split a candidate in which the only match-pair is an or-pattern into multiple
|
// Split a candidate in which the only match-pair is an or-pattern into multiple
|
||||||
@ -1232,8 +1255,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// only generates a single switch.
|
// only generates a single switch.
|
||||||
candidate.subcandidates =
|
candidate.subcandidates = self.create_or_subcandidates(pats, candidate.has_guard);
|
||||||
self.create_or_subcandidates(place, pats, candidate.has_guard);
|
|
||||||
candidate.match_pairs.pop();
|
candidate.match_pairs.pop();
|
||||||
split_or_candidate = true;
|
split_or_candidate = true;
|
||||||
}
|
}
|
||||||
@ -1254,7 +1276,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
start_block,
|
start_block,
|
||||||
otherwise_block,
|
otherwise_block,
|
||||||
&mut *new_candidates,
|
&mut *new_candidates,
|
||||||
fake_borrows,
|
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
self.match_simplified_candidates(
|
self.match_simplified_candidates(
|
||||||
@ -1263,7 +1284,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
start_block,
|
start_block,
|
||||||
otherwise_block,
|
otherwise_block,
|
||||||
candidates,
|
candidates,
|
||||||
fake_borrows,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -1276,7 +1296,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
mut start_block: BasicBlock,
|
mut start_block: BasicBlock,
|
||||||
otherwise_block: BasicBlock,
|
otherwise_block: BasicBlock,
|
||||||
candidates: &mut [&mut Candidate<'_, 'tcx>],
|
candidates: &mut [&mut Candidate<'_, 'tcx>],
|
||||||
fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>,
|
|
||||||
) {
|
) {
|
||||||
match candidates {
|
match candidates {
|
||||||
[] => {
|
[] => {
|
||||||
@ -1288,14 +1307,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
[first, remaining @ ..] if first.match_pairs.is_empty() => {
|
[first, remaining @ ..] if first.match_pairs.is_empty() => {
|
||||||
// The first candidate has satisfied all its match pairs; we link it up and continue
|
// The first candidate has satisfied all its match pairs; we link it up and continue
|
||||||
// with the remaining candidates.
|
// with the remaining candidates.
|
||||||
start_block = self.select_matched_candidate(first, start_block, fake_borrows);
|
start_block = self.select_matched_candidate(first, start_block);
|
||||||
self.match_simplified_candidates(
|
self.match_simplified_candidates(
|
||||||
span,
|
span,
|
||||||
scrutinee_span,
|
scrutinee_span,
|
||||||
start_block,
|
start_block,
|
||||||
otherwise_block,
|
otherwise_block,
|
||||||
remaining,
|
remaining,
|
||||||
fake_borrows,
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
candidates => {
|
candidates => {
|
||||||
@ -1306,7 +1324,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
candidates,
|
candidates,
|
||||||
start_block,
|
start_block,
|
||||||
otherwise_block,
|
otherwise_block,
|
||||||
fake_borrows,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1341,43 +1358,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
&mut self,
|
&mut self,
|
||||||
candidate: &mut Candidate<'_, 'tcx>,
|
candidate: &mut Candidate<'_, 'tcx>,
|
||||||
start_block: BasicBlock,
|
start_block: BasicBlock,
|
||||||
fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>,
|
|
||||||
) -> BasicBlock {
|
) -> BasicBlock {
|
||||||
assert!(candidate.otherwise_block.is_none());
|
assert!(candidate.otherwise_block.is_none());
|
||||||
assert!(candidate.pre_binding_block.is_none());
|
assert!(candidate.pre_binding_block.is_none());
|
||||||
assert!(candidate.subcandidates.is_empty());
|
assert!(candidate.subcandidates.is_empty());
|
||||||
|
|
||||||
if let Some(fake_borrows) = fake_borrows {
|
|
||||||
// Insert a borrows of prefixes of places that are bound and are
|
|
||||||
// behind a dereference projection.
|
|
||||||
//
|
|
||||||
// These borrows are taken to avoid situations like the following:
|
|
||||||
//
|
|
||||||
// match x[10] {
|
|
||||||
// _ if { x = &[0]; false } => (),
|
|
||||||
// y => (), // Out of bounds array access!
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// match *x {
|
|
||||||
// // y is bound by reference in the guard and then by copy in the
|
|
||||||
// // arm, so y is 2 in the arm!
|
|
||||||
// y if { y == 1 && (x = &2) == () } => y,
|
|
||||||
// _ => 3,
|
|
||||||
// }
|
|
||||||
for Binding { source, .. } in &candidate.bindings {
|
|
||||||
if let Some(i) =
|
|
||||||
source.projection.iter().rposition(|elem| elem == ProjectionElem::Deref)
|
|
||||||
{
|
|
||||||
let proj_base = &source.projection[..i];
|
|
||||||
|
|
||||||
fake_borrows.insert(Place {
|
|
||||||
local: source.local,
|
|
||||||
projection: self.tcx.mk_place_elems(proj_base),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
candidate.pre_binding_block = Some(start_block);
|
candidate.pre_binding_block = Some(start_block);
|
||||||
let otherwise_block = self.cfg.start_new_block();
|
let otherwise_block = self.cfg.start_new_block();
|
||||||
if candidate.has_guard {
|
if candidate.has_guard {
|
||||||
@ -1448,38 +1433,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
candidates: &mut [&mut Candidate<'_, 'tcx>],
|
candidates: &mut [&mut Candidate<'_, 'tcx>],
|
||||||
start_block: BasicBlock,
|
start_block: BasicBlock,
|
||||||
otherwise_block: BasicBlock,
|
otherwise_block: BasicBlock,
|
||||||
fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>,
|
|
||||||
) {
|
) {
|
||||||
let (first_candidate, remaining_candidates) = candidates.split_first_mut().unwrap();
|
let (first_candidate, remaining_candidates) = candidates.split_first_mut().unwrap();
|
||||||
assert!(first_candidate.subcandidates.is_empty());
|
assert!(first_candidate.subcandidates.is_empty());
|
||||||
if !matches!(first_candidate.match_pairs[0].pattern.kind, PatKind::Or { .. }) {
|
if !matches!(first_candidate.match_pairs[0].test_case, TestCase::Or { .. }) {
|
||||||
self.test_candidates(
|
self.test_candidates(span, scrutinee_span, candidates, start_block, otherwise_block);
|
||||||
span,
|
|
||||||
scrutinee_span,
|
|
||||||
candidates,
|
|
||||||
start_block,
|
|
||||||
otherwise_block,
|
|
||||||
fake_borrows,
|
|
||||||
);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let match_pairs = mem::take(&mut first_candidate.match_pairs);
|
let match_pairs = mem::take(&mut first_candidate.match_pairs);
|
||||||
let (first_match_pair, remaining_match_pairs) = match_pairs.split_first().unwrap();
|
let (first_match_pair, remaining_match_pairs) = match_pairs.split_first().unwrap();
|
||||||
let PatKind::Or { ref pats } = &first_match_pair.pattern.kind else { unreachable!() };
|
let TestCase::Or { ref pats } = &first_match_pair.test_case else { unreachable!() };
|
||||||
|
|
||||||
let remainder_start = self.cfg.start_new_block();
|
let remainder_start = self.cfg.start_new_block();
|
||||||
let or_span = first_match_pair.pattern.span;
|
let or_span = first_match_pair.pattern.span;
|
||||||
// Test the alternatives of this or-pattern.
|
// Test the alternatives of this or-pattern.
|
||||||
self.test_or_pattern(
|
self.test_or_pattern(first_candidate, start_block, remainder_start, pats, or_span);
|
||||||
first_candidate,
|
|
||||||
start_block,
|
|
||||||
remainder_start,
|
|
||||||
pats,
|
|
||||||
or_span,
|
|
||||||
&first_match_pair.place,
|
|
||||||
fake_borrows,
|
|
||||||
);
|
|
||||||
|
|
||||||
if !remaining_match_pairs.is_empty() {
|
if !remaining_match_pairs.is_empty() {
|
||||||
// If more match pairs remain, test them after each subcandidate.
|
// If more match pairs remain, test them after each subcandidate.
|
||||||
@ -1500,7 +1469,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
&mut [leaf_candidate],
|
&mut [leaf_candidate],
|
||||||
or_start,
|
or_start,
|
||||||
or_otherwise,
|
or_otherwise,
|
||||||
fake_borrows,
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -1512,12 +1480,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
remainder_start,
|
remainder_start,
|
||||||
otherwise_block,
|
otherwise_block,
|
||||||
remaining_candidates,
|
remaining_candidates,
|
||||||
fake_borrows,
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(
|
#[instrument(
|
||||||
skip(self, start_block, otherwise_block, or_span, place, fake_borrows, candidate, pats),
|
skip(self, start_block, otherwise_block, or_span, candidate, pats),
|
||||||
level = "debug"
|
level = "debug"
|
||||||
)]
|
)]
|
||||||
fn test_or_pattern<'pat>(
|
fn test_or_pattern<'pat>(
|
||||||
@ -1525,15 +1492,14 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
candidate: &mut Candidate<'pat, 'tcx>,
|
candidate: &mut Candidate<'pat, 'tcx>,
|
||||||
start_block: BasicBlock,
|
start_block: BasicBlock,
|
||||||
otherwise_block: BasicBlock,
|
otherwise_block: BasicBlock,
|
||||||
pats: &'pat [Box<Pat<'tcx>>],
|
pats: &[FlatPat<'pat, 'tcx>],
|
||||||
or_span: Span,
|
or_span: Span,
|
||||||
place: &PlaceBuilder<'tcx>,
|
|
||||||
fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>,
|
|
||||||
) {
|
) {
|
||||||
debug!("candidate={:#?}\npats={:#?}", candidate, pats);
|
debug!("candidate={:#?}\npats={:#?}", candidate, pats);
|
||||||
let mut or_candidates: Vec<_> = pats
|
let mut or_candidates: Vec<_> = pats
|
||||||
.iter()
|
.iter()
|
||||||
.map(|pat| Candidate::new(place.clone(), pat, candidate.has_guard, self))
|
.cloned()
|
||||||
|
.map(|flat_pat| Candidate::from_flat_pat(flat_pat, candidate.has_guard))
|
||||||
.collect();
|
.collect();
|
||||||
let mut or_candidate_refs: Vec<_> = or_candidates.iter_mut().collect();
|
let mut or_candidate_refs: Vec<_> = or_candidates.iter_mut().collect();
|
||||||
self.match_candidates(
|
self.match_candidates(
|
||||||
@ -1542,7 +1508,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
start_block,
|
start_block,
|
||||||
otherwise_block,
|
otherwise_block,
|
||||||
&mut or_candidate_refs,
|
&mut or_candidate_refs,
|
||||||
fake_borrows,
|
|
||||||
);
|
);
|
||||||
candidate.subcandidates = or_candidates;
|
candidate.subcandidates = or_candidates;
|
||||||
self.merge_trivial_subcandidates(candidate, self.source_info(or_span));
|
self.merge_trivial_subcandidates(candidate, self.source_info(or_span));
|
||||||
@ -1602,7 +1567,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
fn pick_test(
|
fn pick_test(
|
||||||
&mut self,
|
&mut self,
|
||||||
candidates: &mut [&mut Candidate<'_, 'tcx>],
|
candidates: &mut [&mut Candidate<'_, 'tcx>],
|
||||||
fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>,
|
|
||||||
) -> (PlaceBuilder<'tcx>, Test<'tcx>) {
|
) -> (PlaceBuilder<'tcx>, Test<'tcx>) {
|
||||||
// Extract the match-pair from the highest priority candidate
|
// Extract the match-pair from the highest priority candidate
|
||||||
let match_pair = &candidates.first().unwrap().match_pairs[0];
|
let match_pair = &candidates.first().unwrap().match_pairs[0];
|
||||||
@ -1631,13 +1595,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Insert a Shallow borrow of any places that is switched on.
|
|
||||||
if let Some(fb) = fake_borrows
|
|
||||||
&& let Some(resolved_place) = match_place.try_to_place(self)
|
|
||||||
{
|
|
||||||
fb.insert(resolved_place);
|
|
||||||
}
|
|
||||||
|
|
||||||
(match_place, test)
|
(match_place, test)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1811,10 +1768,9 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>],
|
candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>],
|
||||||
start_block: BasicBlock,
|
start_block: BasicBlock,
|
||||||
otherwise_block: BasicBlock,
|
otherwise_block: BasicBlock,
|
||||||
fake_borrows: &mut Option<FxIndexSet<Place<'tcx>>>,
|
|
||||||
) {
|
) {
|
||||||
// Extract the match-pair from the highest priority candidate and build a test from it.
|
// Extract the match-pair from the highest priority candidate and build a test from it.
|
||||||
let (match_place, test) = self.pick_test(candidates, fake_borrows);
|
let (match_place, test) = self.pick_test(candidates);
|
||||||
|
|
||||||
// For each of the N possible test outcomes, build the vector of candidates that applies if
|
// For each of the N possible test outcomes, build the vector of candidates that applies if
|
||||||
// the test has that particular outcome.
|
// the test has that particular outcome.
|
||||||
@ -1831,7 +1787,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
remainder_start,
|
remainder_start,
|
||||||
otherwise_block,
|
otherwise_block,
|
||||||
remaining_candidates,
|
remaining_candidates,
|
||||||
fake_borrows,
|
|
||||||
);
|
);
|
||||||
remainder_start
|
remainder_start
|
||||||
} else {
|
} else {
|
||||||
@ -1853,7 +1808,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
candidate_start,
|
candidate_start,
|
||||||
remainder_start,
|
remainder_start,
|
||||||
&mut *candidates,
|
&mut *candidates,
|
||||||
fake_borrows,
|
|
||||||
);
|
);
|
||||||
candidate_start
|
candidate_start
|
||||||
} else {
|
} else {
|
||||||
|
@ -12,10 +12,8 @@
|
|||||||
//! sort of test: for example, testing which variant an enum is, or
|
//! sort of test: for example, testing which variant an enum is, or
|
||||||
//! testing a value against a constant.
|
//! testing a value against a constant.
|
||||||
|
|
||||||
use crate::build::expr::as_place::PlaceBuilder;
|
use crate::build::matches::{Ascription, Binding, Candidate, FlatPat, MatchPair, TestCase};
|
||||||
use crate::build::matches::{Ascription, Binding, Candidate, MatchPair, TestCase};
|
|
||||||
use crate::build::Builder;
|
use crate::build::Builder;
|
||||||
use rustc_middle::thir::{Pat, PatKind};
|
|
||||||
|
|
||||||
use std::mem;
|
use std::mem;
|
||||||
|
|
||||||
@ -100,7 +98,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
// Move or-patterns to the end, because they can result in us
|
// Move or-patterns to the end, because they can result in us
|
||||||
// creating additional candidates, so we want to test them as
|
// creating additional candidates, so we want to test them as
|
||||||
// late as possible.
|
// late as possible.
|
||||||
match_pairs.sort_by_key(|pair| matches!(pair.pattern.kind, PatKind::Or { .. }));
|
match_pairs.sort_by_key(|pair| matches!(pair.test_case, TestCase::Or { .. }));
|
||||||
debug!(simplified = ?match_pairs, "simplify_match_pairs");
|
debug!(simplified = ?match_pairs, "simplify_match_pairs");
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -108,18 +106,17 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
/// single-or-pattern case.
|
/// single-or-pattern case.
|
||||||
pub(super) fn create_or_subcandidates<'pat>(
|
pub(super) fn create_or_subcandidates<'pat>(
|
||||||
&mut self,
|
&mut self,
|
||||||
place: &PlaceBuilder<'tcx>,
|
pats: &[FlatPat<'pat, 'tcx>],
|
||||||
pats: &'pat [Box<Pat<'tcx>>],
|
|
||||||
has_guard: bool,
|
has_guard: bool,
|
||||||
) -> Vec<Candidate<'pat, 'tcx>> {
|
) -> Vec<Candidate<'pat, 'tcx>> {
|
||||||
pats.iter()
|
pats.iter()
|
||||||
.map(|box pat| {
|
.cloned()
|
||||||
let mut candidate = Candidate::new(place.clone(), pat, has_guard, self);
|
.map(|flat_pat| {
|
||||||
if let [MatchPair { pattern: Pat { kind: PatKind::Or { pats }, .. }, place, .. }] =
|
let mut candidate = Candidate::from_flat_pat(flat_pat, has_guard);
|
||||||
|
if let [MatchPair { test_case: TestCase::Or { pats, .. }, .. }] =
|
||||||
&*candidate.match_pairs
|
&*candidate.match_pairs
|
||||||
{
|
{
|
||||||
candidate.subcandidates =
|
candidate.subcandidates = self.create_or_subcandidates(pats, has_guard);
|
||||||
self.create_or_subcandidates(place, pats, candidate.has_guard);
|
|
||||||
candidate.match_pairs.pop();
|
candidate.match_pairs.pop();
|
||||||
}
|
}
|
||||||
candidate
|
candidate
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use crate::build::expr::as_place::{PlaceBase, PlaceBuilder};
|
use crate::build::expr::as_place::{PlaceBase, PlaceBuilder};
|
||||||
use crate::build::matches::{MatchPair, TestCase};
|
use crate::build::matches::{Binding, Candidate, FlatPat, MatchPair, TestCase};
|
||||||
use crate::build::Builder;
|
use crate::build::Builder;
|
||||||
|
use rustc_data_structures::fx::FxIndexSet;
|
||||||
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
|
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
|
||||||
use rustc_middle::mir::*;
|
use rustc_middle::mir::*;
|
||||||
use rustc_middle::thir::{self, *};
|
use rustc_middle::thir::{self, *};
|
||||||
@ -121,7 +122,9 @@ impl<'pat, 'tcx> MatchPair<'pat, 'tcx> {
|
|||||||
let mut subpairs = Vec::new();
|
let mut subpairs = Vec::new();
|
||||||
let test_case = match pattern.kind {
|
let test_case = match pattern.kind {
|
||||||
PatKind::Never | PatKind::Wild | PatKind::Error(_) => default_irrefutable(),
|
PatKind::Never | PatKind::Wild | PatKind::Error(_) => default_irrefutable(),
|
||||||
PatKind::Or { .. } => TestCase::Or,
|
PatKind::Or { ref pats } => TestCase::Or {
|
||||||
|
pats: pats.iter().map(|pat| FlatPat::new(place.clone(), pat, cx)).collect(),
|
||||||
|
},
|
||||||
|
|
||||||
PatKind::Range(ref range) => {
|
PatKind::Range(ref range) => {
|
||||||
if range.is_full_range(cx.tcx) == Some(true) {
|
if range.is_full_range(cx.tcx) == Some(true) {
|
||||||
@ -258,3 +261,82 @@ impl<'pat, 'tcx> MatchPair<'pat, 'tcx> {
|
|||||||
MatchPair { place, test_case, subpairs, pattern }
|
MatchPair { place, test_case, subpairs, pattern }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(super) struct FakeBorrowCollector<'a, 'b, 'tcx> {
|
||||||
|
cx: &'a mut Builder<'b, 'tcx>,
|
||||||
|
fake_borrows: FxIndexSet<Place<'tcx>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, 'b, 'tcx> FakeBorrowCollector<'a, 'b, 'tcx> {
|
||||||
|
pub(super) fn collect_fake_borrows(
|
||||||
|
cx: &'a mut Builder<'b, 'tcx>,
|
||||||
|
candidates: &[&mut Candidate<'_, 'tcx>],
|
||||||
|
) -> FxIndexSet<Place<'tcx>> {
|
||||||
|
let mut collector = Self { cx, fake_borrows: FxIndexSet::default() };
|
||||||
|
for candidate in candidates.iter() {
|
||||||
|
collector.visit_candidate(candidate);
|
||||||
|
}
|
||||||
|
collector.fake_borrows
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_candidate(&mut self, candidate: &Candidate<'_, 'tcx>) {
|
||||||
|
for binding in &candidate.bindings {
|
||||||
|
self.visit_binding(binding);
|
||||||
|
}
|
||||||
|
for match_pair in &candidate.match_pairs {
|
||||||
|
self.visit_match_pair(match_pair);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_flat_pat(&mut self, flat_pat: &FlatPat<'_, 'tcx>) {
|
||||||
|
for binding in &flat_pat.bindings {
|
||||||
|
self.visit_binding(binding);
|
||||||
|
}
|
||||||
|
for match_pair in &flat_pat.match_pairs {
|
||||||
|
self.visit_match_pair(match_pair);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_match_pair(&mut self, match_pair: &MatchPair<'_, 'tcx>) {
|
||||||
|
if let TestCase::Or { pats, .. } = &match_pair.test_case {
|
||||||
|
for flat_pat in pats.iter() {
|
||||||
|
self.visit_flat_pat(flat_pat)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Insert a Shallow borrow of any place that is switched on.
|
||||||
|
if let Some(resolved_place) = match_pair.place.try_to_place(self.cx) {
|
||||||
|
self.fake_borrows.insert(resolved_place);
|
||||||
|
}
|
||||||
|
|
||||||
|
for subpair in &match_pair.subpairs {
|
||||||
|
self.visit_match_pair(subpair);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_binding(&mut self, Binding { source, .. }: &Binding<'tcx>) {
|
||||||
|
// Insert a borrows of prefixes of places that are bound and are
|
||||||
|
// behind a dereference projection.
|
||||||
|
//
|
||||||
|
// These borrows are taken to avoid situations like the following:
|
||||||
|
//
|
||||||
|
// match x[10] {
|
||||||
|
// _ if { x = &[0]; false } => (),
|
||||||
|
// y => (), // Out of bounds array access!
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// match *x {
|
||||||
|
// // y is bound by reference in the guard and then by copy in the
|
||||||
|
// // arm, so y is 2 in the arm!
|
||||||
|
// y if { y == 1 && (x = &2) == () } => y,
|
||||||
|
// _ => 3,
|
||||||
|
// }
|
||||||
|
if let Some(i) = source.projection.iter().rposition(|elem| elem == ProjectionElem::Deref) {
|
||||||
|
let proj_base = &source.projection[..i];
|
||||||
|
self.fake_borrows.insert(Place {
|
||||||
|
local: source.local,
|
||||||
|
projection: self.cx.tcx.mk_place_elems(proj_base),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -16,6 +16,7 @@ use rustc_session::lint::builtin::UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES;
|
|||||||
use rustc_span::symbol::{kw, sym, Symbol};
|
use rustc_span::symbol::{kw, sym, Symbol};
|
||||||
use rustc_span::Span;
|
use rustc_span::Span;
|
||||||
use std::iter;
|
use std::iter;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::errors::{
|
use crate::errors::{
|
||||||
EmptyOnClauseInOnUnimplemented, InvalidOnClauseInOnUnimplemented, NoValueInOnUnimplemented,
|
EmptyOnClauseInOnUnimplemented, InvalidOnClauseInOnUnimplemented, NoValueInOnUnimplemented,
|
||||||
@ -110,6 +111,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
&self,
|
&self,
|
||||||
trait_ref: ty::PolyTraitRef<'tcx>,
|
trait_ref: ty::PolyTraitRef<'tcx>,
|
||||||
obligation: &PredicateObligation<'tcx>,
|
obligation: &PredicateObligation<'tcx>,
|
||||||
|
long_ty_file: &mut Option<PathBuf>,
|
||||||
) -> OnUnimplementedNote {
|
) -> OnUnimplementedNote {
|
||||||
let (def_id, args) = self
|
let (def_id, args) = self
|
||||||
.impl_similar_to(trait_ref, obligation)
|
.impl_similar_to(trait_ref, obligation)
|
||||||
@ -265,7 +267,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
}));
|
}));
|
||||||
|
|
||||||
if let Ok(Some(command)) = OnUnimplementedDirective::of_item(self.tcx, def_id) {
|
if let Ok(Some(command)) = OnUnimplementedDirective::of_item(self.tcx, def_id) {
|
||||||
command.evaluate(self.tcx, trait_ref, &flags)
|
command.evaluate(self.tcx, trait_ref, &flags, long_ty_file)
|
||||||
} else {
|
} else {
|
||||||
OnUnimplementedNote::default()
|
OnUnimplementedNote::default()
|
||||||
}
|
}
|
||||||
@ -657,6 +659,7 @@ impl<'tcx> OnUnimplementedDirective {
|
|||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
trait_ref: ty::TraitRef<'tcx>,
|
trait_ref: ty::TraitRef<'tcx>,
|
||||||
options: &[(Symbol, Option<String>)],
|
options: &[(Symbol, Option<String>)],
|
||||||
|
long_ty_file: &mut Option<PathBuf>,
|
||||||
) -> OnUnimplementedNote {
|
) -> OnUnimplementedNote {
|
||||||
let mut message = None;
|
let mut message = None;
|
||||||
let mut label = None;
|
let mut label = None;
|
||||||
@ -669,6 +672,7 @@ impl<'tcx> OnUnimplementedDirective {
|
|||||||
options.iter().filter_map(|(k, v)| v.clone().map(|v| (*k, v))).collect();
|
options.iter().filter_map(|(k, v)| v.clone().map(|v| (*k, v))).collect();
|
||||||
|
|
||||||
for command in self.subcommands.iter().chain(Some(self)).rev() {
|
for command in self.subcommands.iter().chain(Some(self)).rev() {
|
||||||
|
debug!(?command);
|
||||||
if let Some(ref condition) = command.condition
|
if let Some(ref condition) = command.condition
|
||||||
&& !attr::eval_condition(condition, &tcx.sess, Some(tcx.features()), &mut |cfg| {
|
&& !attr::eval_condition(condition, &tcx.sess, Some(tcx.features()), &mut |cfg| {
|
||||||
let value = cfg.value.map(|v| {
|
let value = cfg.value.map(|v| {
|
||||||
@ -680,7 +684,12 @@ impl<'tcx> OnUnimplementedDirective {
|
|||||||
span: cfg.span,
|
span: cfg.span,
|
||||||
is_diagnostic_namespace_variant: false
|
is_diagnostic_namespace_variant: false
|
||||||
}
|
}
|
||||||
.format(tcx, trait_ref, &options_map)
|
.format(
|
||||||
|
tcx,
|
||||||
|
trait_ref,
|
||||||
|
&options_map,
|
||||||
|
long_ty_file
|
||||||
|
)
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -709,10 +718,14 @@ impl<'tcx> OnUnimplementedDirective {
|
|||||||
}
|
}
|
||||||
|
|
||||||
OnUnimplementedNote {
|
OnUnimplementedNote {
|
||||||
label: label.map(|l| l.format(tcx, trait_ref, &options_map)),
|
label: label.map(|l| l.format(tcx, trait_ref, &options_map, long_ty_file)),
|
||||||
message: message.map(|m| m.format(tcx, trait_ref, &options_map)),
|
message: message.map(|m| m.format(tcx, trait_ref, &options_map, long_ty_file)),
|
||||||
notes: notes.into_iter().map(|n| n.format(tcx, trait_ref, &options_map)).collect(),
|
notes: notes
|
||||||
parent_label: parent_label.map(|e_s| e_s.format(tcx, trait_ref, &options_map)),
|
.into_iter()
|
||||||
|
.map(|n| n.format(tcx, trait_ref, &options_map, long_ty_file))
|
||||||
|
.collect(),
|
||||||
|
parent_label: parent_label
|
||||||
|
.map(|e_s| e_s.format(tcx, trait_ref, &options_map, long_ty_file)),
|
||||||
append_const_msg,
|
append_const_msg,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -814,6 +827,7 @@ impl<'tcx> OnUnimplementedFormatString {
|
|||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
trait_ref: ty::TraitRef<'tcx>,
|
trait_ref: ty::TraitRef<'tcx>,
|
||||||
options: &FxHashMap<Symbol, String>,
|
options: &FxHashMap<Symbol, String>,
|
||||||
|
long_ty_file: &mut Option<PathBuf>,
|
||||||
) -> String {
|
) -> String {
|
||||||
let name = tcx.item_name(trait_ref.def_id);
|
let name = tcx.item_name(trait_ref.def_id);
|
||||||
let trait_str = tcx.def_path_str(trait_ref.def_id);
|
let trait_str = tcx.def_path_str(trait_ref.def_id);
|
||||||
@ -824,7 +838,11 @@ impl<'tcx> OnUnimplementedFormatString {
|
|||||||
.filter_map(|param| {
|
.filter_map(|param| {
|
||||||
let value = match param.kind {
|
let value = match param.kind {
|
||||||
GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => {
|
GenericParamDefKind::Type { .. } | GenericParamDefKind::Const { .. } => {
|
||||||
trait_ref.args[param.index as usize].to_string()
|
if let Some(ty) = trait_ref.args[param.index as usize].as_type() {
|
||||||
|
tcx.short_ty_string(ty, long_ty_file)
|
||||||
|
} else {
|
||||||
|
trait_ref.args[param.index as usize].to_string()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
GenericParamDefKind::Lifetime => return None,
|
GenericParamDefKind::Lifetime => return None,
|
||||||
};
|
};
|
||||||
|
@ -2671,6 +2671,8 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
) where
|
) where
|
||||||
T: ToPredicate<'tcx>,
|
T: ToPredicate<'tcx>,
|
||||||
{
|
{
|
||||||
|
let mut long_ty_file = None;
|
||||||
|
|
||||||
let tcx = self.tcx;
|
let tcx = self.tcx;
|
||||||
let predicate = predicate.to_predicate(tcx);
|
let predicate = predicate.to_predicate(tcx);
|
||||||
match *cause_code {
|
match *cause_code {
|
||||||
@ -2853,21 +2855,13 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ObligationCauseCode::Coercion { source, target } => {
|
ObligationCauseCode::Coercion { source, target } => {
|
||||||
let mut file = None;
|
let source =
|
||||||
let source = tcx.short_ty_string(self.resolve_vars_if_possible(source), &mut file);
|
tcx.short_ty_string(self.resolve_vars_if_possible(source), &mut long_ty_file);
|
||||||
let target = tcx.short_ty_string(self.resolve_vars_if_possible(target), &mut file);
|
let target =
|
||||||
|
tcx.short_ty_string(self.resolve_vars_if_possible(target), &mut long_ty_file);
|
||||||
err.note(with_forced_trimmed_paths!(format!(
|
err.note(with_forced_trimmed_paths!(format!(
|
||||||
"required for the cast from `{source}` to `{target}`",
|
"required for the cast from `{source}` to `{target}`",
|
||||||
)));
|
)));
|
||||||
if let Some(file) = file {
|
|
||||||
err.note(format!(
|
|
||||||
"the full name for the type has been written to '{}'",
|
|
||||||
file.display(),
|
|
||||||
));
|
|
||||||
err.note(
|
|
||||||
"consider using `--verbose` to print the full type name to the console",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
ObligationCauseCode::RepeatElementCopy {
|
ObligationCauseCode::RepeatElementCopy {
|
||||||
is_constable,
|
is_constable,
|
||||||
@ -3170,8 +3164,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
// Don't print the tuple of capture types
|
// Don't print the tuple of capture types
|
||||||
'print: {
|
'print: {
|
||||||
if !is_upvar_tys_infer_tuple {
|
if !is_upvar_tys_infer_tuple {
|
||||||
let mut file = None;
|
let ty_str = tcx.short_ty_string(ty, &mut long_ty_file);
|
||||||
let ty_str = tcx.short_ty_string(ty, &mut file);
|
|
||||||
let msg = format!("required because it appears within the type `{ty_str}`");
|
let msg = format!("required because it appears within the type `{ty_str}`");
|
||||||
match ty.kind() {
|
match ty.kind() {
|
||||||
ty::Adt(def, _) => match tcx.opt_item_ident(def.did()) {
|
ty::Adt(def, _) => match tcx.opt_item_ident(def.did()) {
|
||||||
@ -3269,9 +3262,8 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
let mut parent_trait_pred =
|
let mut parent_trait_pred =
|
||||||
self.resolve_vars_if_possible(data.derived.parent_trait_pred);
|
self.resolve_vars_if_possible(data.derived.parent_trait_pred);
|
||||||
let parent_def_id = parent_trait_pred.def_id();
|
let parent_def_id = parent_trait_pred.def_id();
|
||||||
let mut file = None;
|
let self_ty_str = tcx
|
||||||
let self_ty_str =
|
.short_ty_string(parent_trait_pred.skip_binder().self_ty(), &mut long_ty_file);
|
||||||
tcx.short_ty_string(parent_trait_pred.skip_binder().self_ty(), &mut file);
|
|
||||||
let trait_name = parent_trait_pred.print_modifiers_and_trait_path().to_string();
|
let trait_name = parent_trait_pred.print_modifiers_and_trait_path().to_string();
|
||||||
let msg = format!("required for `{self_ty_str}` to implement `{trait_name}`");
|
let msg = format!("required for `{self_ty_str}` to implement `{trait_name}`");
|
||||||
let mut is_auto_trait = false;
|
let mut is_auto_trait = false;
|
||||||
@ -3329,15 +3321,6 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(file) = file {
|
|
||||||
err.note(format!(
|
|
||||||
"the full type name has been written to '{}'",
|
|
||||||
file.display(),
|
|
||||||
));
|
|
||||||
err.note(
|
|
||||||
"consider using `--verbose` to print the full type name to the console",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
let mut parent_predicate = parent_trait_pred;
|
let mut parent_predicate = parent_trait_pred;
|
||||||
let mut data = &data.derived;
|
let mut data = &data.derived;
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
@ -3378,22 +3361,14 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
count,
|
count,
|
||||||
pluralize!(count)
|
pluralize!(count)
|
||||||
));
|
));
|
||||||
let mut file = None;
|
let self_ty = tcx.short_ty_string(
|
||||||
let self_ty =
|
parent_trait_pred.skip_binder().self_ty(),
|
||||||
tcx.short_ty_string(parent_trait_pred.skip_binder().self_ty(), &mut file);
|
&mut long_ty_file,
|
||||||
|
);
|
||||||
err.note(format!(
|
err.note(format!(
|
||||||
"required for `{self_ty}` to implement `{}`",
|
"required for `{self_ty}` to implement `{}`",
|
||||||
parent_trait_pred.print_modifiers_and_trait_path()
|
parent_trait_pred.print_modifiers_and_trait_path()
|
||||||
));
|
));
|
||||||
if let Some(file) = file {
|
|
||||||
err.note(format!(
|
|
||||||
"the full type name has been written to '{}'",
|
|
||||||
file.display(),
|
|
||||||
));
|
|
||||||
err.note(
|
|
||||||
"consider using `--verbose` to print the full type name to the console",
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
// #74711: avoid a stack overflow
|
// #74711: avoid a stack overflow
|
||||||
ensure_sufficient_stack(|| {
|
ensure_sufficient_stack(|| {
|
||||||
@ -3502,7 +3477,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
}
|
}
|
||||||
ObligationCauseCode::OpaqueReturnType(expr_info) => {
|
ObligationCauseCode::OpaqueReturnType(expr_info) => {
|
||||||
if let Some((expr_ty, expr_span)) = expr_info {
|
if let Some((expr_ty, expr_span)) = expr_info {
|
||||||
let expr_ty = with_forced_trimmed_paths!(self.ty_to_string(expr_ty));
|
let expr_ty = self.tcx.short_ty_string(expr_ty, &mut long_ty_file);
|
||||||
err.span_label(
|
err.span_label(
|
||||||
expr_span,
|
expr_span,
|
||||||
with_forced_trimmed_paths!(format!(
|
with_forced_trimmed_paths!(format!(
|
||||||
@ -3512,6 +3487,14 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if let Some(file) = long_ty_file {
|
||||||
|
err.note(format!(
|
||||||
|
"the full name for the type has been written to '{}'",
|
||||||
|
file.display(),
|
||||||
|
));
|
||||||
|
err.note("consider using `--verbose` to print the full type name to the console");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(
|
#[instrument(
|
||||||
@ -4769,20 +4752,21 @@ pub(super) fn get_explanation_based_on_obligation<'tcx>(
|
|||||||
} else {
|
} else {
|
||||||
String::new()
|
String::new()
|
||||||
};
|
};
|
||||||
match ty_desc {
|
let desc = match ty_desc {
|
||||||
Some(desc) => format!(
|
Some(desc) => format!(" {desc}"),
|
||||||
"{}the trait `{}` is not implemented for {} `{}`{post}",
|
None => String::new(),
|
||||||
pre_message,
|
};
|
||||||
trait_predicate.print_modifiers_and_trait_path(),
|
if let ty::ImplPolarity::Positive = trait_predicate.polarity() {
|
||||||
desc,
|
format!(
|
||||||
tcx.short_ty_string(trait_ref.skip_binder().self_ty(), &mut None),
|
"{pre_message}the trait `{}` is not implemented for{desc} `{}`{post}",
|
||||||
),
|
|
||||||
None => format!(
|
|
||||||
"{}the trait `{}` is not implemented for `{}`{post}",
|
|
||||||
pre_message,
|
|
||||||
trait_predicate.print_modifiers_and_trait_path(),
|
trait_predicate.print_modifiers_and_trait_path(),
|
||||||
tcx.short_ty_string(trait_ref.skip_binder().self_ty(), &mut None),
|
tcx.short_ty_string(trait_ref.skip_binder().self_ty(), &mut None),
|
||||||
),
|
)
|
||||||
|
} else {
|
||||||
|
// "the trait bound `T: !Send` is not satisfied" reads better than "`!Send` is
|
||||||
|
// not implemented for `T`".
|
||||||
|
// FIXME: add note explaining explicit negative trait bounds.
|
||||||
|
format!("{pre_message}the trait bound `{trait_predicate}` is not satisfied{post}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -395,6 +395,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
kind: _,
|
kind: _,
|
||||||
} = *obligation.cause.code()
|
} = *obligation.cause.code()
|
||||||
{
|
{
|
||||||
|
debug!("ObligationCauseCode::CompareImplItemObligation");
|
||||||
return self.report_extra_impl_obligation(
|
return self.report_extra_impl_obligation(
|
||||||
span,
|
span,
|
||||||
impl_item_def_id,
|
impl_item_def_id,
|
||||||
@ -445,18 +446,21 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
.unwrap_or_default();
|
.unwrap_or_default();
|
||||||
let file_note = file.map(|file| format!(
|
let file_note = file.as_ref().map(|file| format!(
|
||||||
"the full trait has been written to '{}'",
|
"the full trait has been written to '{}'",
|
||||||
file.display(),
|
file.display(),
|
||||||
));
|
));
|
||||||
|
|
||||||
|
let mut long_ty_file = None;
|
||||||
|
|
||||||
let OnUnimplementedNote {
|
let OnUnimplementedNote {
|
||||||
message,
|
message,
|
||||||
label,
|
label,
|
||||||
notes,
|
notes,
|
||||||
parent_label,
|
parent_label,
|
||||||
append_const_msg,
|
append_const_msg,
|
||||||
} = self.on_unimplemented_note(trait_ref, &obligation);
|
} = self.on_unimplemented_note(trait_ref, &obligation, &mut long_ty_file);
|
||||||
|
|
||||||
let have_alt_message = message.is_some() || label.is_some();
|
let have_alt_message = message.is_some() || label.is_some();
|
||||||
let is_try_conversion = self.is_try_conversion(span, trait_ref.def_id());
|
let is_try_conversion = self.is_try_conversion(span, trait_ref.def_id());
|
||||||
let is_unsize =
|
let is_unsize =
|
||||||
@ -511,6 +515,13 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
|
|
||||||
let mut err = struct_span_code_err!(self.dcx(), span, E0277, "{}", err_msg);
|
let mut err = struct_span_code_err!(self.dcx(), span, E0277, "{}", err_msg);
|
||||||
|
|
||||||
|
if let Some(long_ty_file) = long_ty_file {
|
||||||
|
err.note(format!(
|
||||||
|
"the full name for the type has been written to '{}'",
|
||||||
|
long_ty_file.display(),
|
||||||
|
));
|
||||||
|
err.note("consider using `--verbose` to print the full type name to the console");
|
||||||
|
}
|
||||||
let mut suggested = false;
|
let mut suggested = false;
|
||||||
if is_try_conversion {
|
if is_try_conversion {
|
||||||
suggested = self.try_conversion_context(&obligation, trait_ref.skip_binder(), &mut err);
|
suggested = self.try_conversion_context(&obligation, trait_ref.skip_binder(), &mut err);
|
||||||
@ -758,6 +769,8 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
|||||||
return err.emit();
|
return err.emit();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
err
|
err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -180,7 +180,7 @@ pub struct BTreeMap<
|
|||||||
/// `ManuallyDrop` to control drop order (needs to be dropped after all the nodes).
|
/// `ManuallyDrop` to control drop order (needs to be dropped after all the nodes).
|
||||||
pub(super) alloc: ManuallyDrop<A>,
|
pub(super) alloc: ManuallyDrop<A>,
|
||||||
// For dropck; the `Box` avoids making the `Unpin` impl more strict than before
|
// For dropck; the `Box` avoids making the `Unpin` impl more strict than before
|
||||||
_marker: PhantomData<crate::boxed::Box<(K, V)>>,
|
_marker: PhantomData<crate::boxed::Box<(K, V), A>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "btree_drop", since = "1.7.0")]
|
#[stable(feature = "btree_drop", since = "1.7.0")]
|
||||||
|
@ -5,16 +5,16 @@ use super::*;
|
|||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::sync::atomic::AtomicU32;
|
use std::sync::atomic::AtomicU32;
|
||||||
|
|
||||||
macro_rules! define_handles {
|
macro_rules! define_client_handles {
|
||||||
(
|
(
|
||||||
'owned: $($oty:ident,)*
|
'owned: $($oty:ident,)*
|
||||||
'interned: $($ity:ident,)*
|
'interned: $($ity:ident,)*
|
||||||
) => {
|
) => {
|
||||||
#[repr(C)]
|
#[repr(C)]
|
||||||
#[allow(non_snake_case)]
|
#[allow(non_snake_case)]
|
||||||
pub struct HandleCounters {
|
pub(super) struct HandleCounters {
|
||||||
$($oty: AtomicU32,)*
|
$(pub(super) $oty: AtomicU32,)*
|
||||||
$($ity: AtomicU32,)*
|
$(pub(super) $ity: AtomicU32,)*
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HandleCounters {
|
impl HandleCounters {
|
||||||
@ -29,22 +29,6 @@ macro_rules! define_handles {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
|
|
||||||
#[allow(non_snake_case)]
|
|
||||||
pub(super) struct HandleStore<S: server::Types> {
|
|
||||||
$($oty: handle::OwnedStore<S::$oty>,)*
|
|
||||||
$($ity: handle::InternedStore<S::$ity>,)*
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S: server::Types> HandleStore<S> {
|
|
||||||
pub(super) fn new(handle_counters: &'static HandleCounters) -> Self {
|
|
||||||
HandleStore {
|
|
||||||
$($oty: handle::OwnedStore::new(&handle_counters.$oty),)*
|
|
||||||
$($ity: handle::InternedStore::new(&handle_counters.$ity),)*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
$(
|
$(
|
||||||
pub(crate) struct $oty {
|
pub(crate) struct $oty {
|
||||||
handle: handle::Handle,
|
handle: handle::Handle,
|
||||||
@ -72,53 +56,18 @@ macro_rules! define_handles {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
|
|
||||||
for Marked<S::$oty, $oty>
|
|
||||||
{
|
|
||||||
fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
|
|
||||||
s.$oty.take(handle::Handle::decode(r, &mut ()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S> Encode<S> for &$oty {
|
impl<S> Encode<S> for &$oty {
|
||||||
fn encode(self, w: &mut Writer, s: &mut S) {
|
fn encode(self, w: &mut Writer, s: &mut S) {
|
||||||
self.handle.encode(w, s);
|
self.handle.encode(w, s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s, S: server::Types> Decode<'_, 's, HandleStore<server::MarkedTypes<S>>>
|
|
||||||
for &'s Marked<S::$oty, $oty>
|
|
||||||
{
|
|
||||||
fn decode(r: &mut Reader<'_>, s: &'s HandleStore<server::MarkedTypes<S>>) -> Self {
|
|
||||||
&s.$oty[handle::Handle::decode(r, &mut ())]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S> Encode<S> for &mut $oty {
|
impl<S> Encode<S> for &mut $oty {
|
||||||
fn encode(self, w: &mut Writer, s: &mut S) {
|
fn encode(self, w: &mut Writer, s: &mut S) {
|
||||||
self.handle.encode(w, s);
|
self.handle.encode(w, s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore<server::MarkedTypes<S>>>
|
|
||||||
for &'s mut Marked<S::$oty, $oty>
|
|
||||||
{
|
|
||||||
fn decode(
|
|
||||||
r: &mut Reader<'_>,
|
|
||||||
s: &'s mut HandleStore<server::MarkedTypes<S>>
|
|
||||||
) -> Self {
|
|
||||||
&mut s.$oty[handle::Handle::decode(r, &mut ())]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
|
|
||||||
for Marked<S::$oty, $oty>
|
|
||||||
{
|
|
||||||
fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
|
|
||||||
s.$oty.alloc(self).encode(w, s);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S> DecodeMut<'_, '_, S> for $oty {
|
impl<S> DecodeMut<'_, '_, S> for $oty {
|
||||||
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
|
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
|
||||||
$oty {
|
$oty {
|
||||||
@ -145,22 +94,6 @@ macro_rules! define_handles {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: server::Types> DecodeMut<'_, '_, HandleStore<server::MarkedTypes<S>>>
|
|
||||||
for Marked<S::$ity, $ity>
|
|
||||||
{
|
|
||||||
fn decode(r: &mut Reader<'_>, s: &mut HandleStore<server::MarkedTypes<S>>) -> Self {
|
|
||||||
s.$ity.copy(handle::Handle::decode(r, &mut ()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S: server::Types> Encode<HandleStore<server::MarkedTypes<S>>>
|
|
||||||
for Marked<S::$ity, $ity>
|
|
||||||
{
|
|
||||||
fn encode(self, w: &mut Writer, s: &mut HandleStore<server::MarkedTypes<S>>) {
|
|
||||||
s.$ity.alloc(self).encode(w, s);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<S> DecodeMut<'_, '_, S> for $ity {
|
impl<S> DecodeMut<'_, '_, S> for $ity {
|
||||||
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
|
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
|
||||||
$ity {
|
$ity {
|
||||||
@ -172,15 +105,7 @@ macro_rules! define_handles {
|
|||||||
)*
|
)*
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
define_handles! {
|
with_api_handle_types!(define_client_handles);
|
||||||
'owned:
|
|
||||||
FreeFunctions,
|
|
||||||
TokenStream,
|
|
||||||
SourceFile,
|
|
||||||
|
|
||||||
'interned:
|
|
||||||
Span,
|
|
||||||
}
|
|
||||||
|
|
||||||
// FIXME(eddyb) generate these impls by pattern-matching on the
|
// FIXME(eddyb) generate these impls by pattern-matching on the
|
||||||
// names of methods - also could use the presence of `fn drop`
|
// names of methods - also could use the presence of `fn drop`
|
||||||
|
@ -113,6 +113,23 @@ macro_rules! with_api {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Similar to `with_api`, but only lists the types requiring handles, and they
|
||||||
|
// are divided into the two storage categories.
|
||||||
|
macro_rules! with_api_handle_types {
|
||||||
|
($m:ident) => {
|
||||||
|
$m! {
|
||||||
|
'owned:
|
||||||
|
FreeFunctions,
|
||||||
|
TokenStream,
|
||||||
|
SourceFile,
|
||||||
|
|
||||||
|
'interned:
|
||||||
|
Span,
|
||||||
|
// Symbol is handled manually
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME(eddyb) this calls `encode` for each argument, but in reverse,
|
// FIXME(eddyb) this calls `encode` for each argument, but in reverse,
|
||||||
// to match the ordering in `reverse_decode`.
|
// to match the ordering in `reverse_decode`.
|
||||||
macro_rules! reverse_encode {
|
macro_rules! reverse_encode {
|
||||||
|
@ -5,8 +5,79 @@ use super::*;
|
|||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
|
|
||||||
// FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`.
|
macro_rules! define_server_handles {
|
||||||
use super::client::HandleStore;
|
(
|
||||||
|
'owned: $($oty:ident,)*
|
||||||
|
'interned: $($ity:ident,)*
|
||||||
|
) => {
|
||||||
|
#[allow(non_snake_case)]
|
||||||
|
pub(super) struct HandleStore<S: Types> {
|
||||||
|
$($oty: handle::OwnedStore<S::$oty>,)*
|
||||||
|
$($ity: handle::InternedStore<S::$ity>,)*
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Types> HandleStore<S> {
|
||||||
|
fn new(handle_counters: &'static client::HandleCounters) -> Self {
|
||||||
|
HandleStore {
|
||||||
|
$($oty: handle::OwnedStore::new(&handle_counters.$oty),)*
|
||||||
|
$($ity: handle::InternedStore::new(&handle_counters.$ity),)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$(
|
||||||
|
impl<S: Types> Encode<HandleStore<MarkedTypes<S>>> for Marked<S::$oty, client::$oty> {
|
||||||
|
fn encode(self, w: &mut Writer, s: &mut HandleStore<MarkedTypes<S>>) {
|
||||||
|
s.$oty.alloc(self).encode(w, s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Types> DecodeMut<'_, '_, HandleStore<MarkedTypes<S>>>
|
||||||
|
for Marked<S::$oty, client::$oty>
|
||||||
|
{
|
||||||
|
fn decode(r: &mut Reader<'_>, s: &mut HandleStore<MarkedTypes<S>>) -> Self {
|
||||||
|
s.$oty.take(handle::Handle::decode(r, &mut ()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s, S: Types> Decode<'_, 's, HandleStore<MarkedTypes<S>>>
|
||||||
|
for &'s Marked<S::$oty, client::$oty>
|
||||||
|
{
|
||||||
|
fn decode(r: &mut Reader<'_>, s: &'s HandleStore<MarkedTypes<S>>) -> Self {
|
||||||
|
&s.$oty[handle::Handle::decode(r, &mut ())]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'s, S: Types> DecodeMut<'_, 's, HandleStore<MarkedTypes<S>>>
|
||||||
|
for &'s mut Marked<S::$oty, client::$oty>
|
||||||
|
{
|
||||||
|
fn decode(
|
||||||
|
r: &mut Reader<'_>,
|
||||||
|
s: &'s mut HandleStore<MarkedTypes<S>>
|
||||||
|
) -> Self {
|
||||||
|
&mut s.$oty[handle::Handle::decode(r, &mut ())]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)*
|
||||||
|
|
||||||
|
$(
|
||||||
|
impl<S: Types> Encode<HandleStore<MarkedTypes<S>>> for Marked<S::$ity, client::$ity> {
|
||||||
|
fn encode(self, w: &mut Writer, s: &mut HandleStore<MarkedTypes<S>>) {
|
||||||
|
s.$ity.alloc(self).encode(w, s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<S: Types> DecodeMut<'_, '_, HandleStore<MarkedTypes<S>>>
|
||||||
|
for Marked<S::$ity, client::$ity>
|
||||||
|
{
|
||||||
|
fn decode(r: &mut Reader<'_>, s: &mut HandleStore<MarkedTypes<S>>) -> Self {
|
||||||
|
s.$ity.copy(handle::Handle::decode(r, &mut ()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
with_api_handle_types!(define_server_handles);
|
||||||
|
|
||||||
pub trait Types {
|
pub trait Types {
|
||||||
type FreeFunctions: 'static;
|
type FreeFunctions: 'static;
|
||||||
|
@ -109,18 +109,18 @@ impl<S> Encode<S> for Symbol {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: server::Server> DecodeMut<'_, '_, client::HandleStore<server::MarkedTypes<S>>>
|
impl<S: server::Server> DecodeMut<'_, '_, server::HandleStore<server::MarkedTypes<S>>>
|
||||||
for Marked<S::Symbol, Symbol>
|
for Marked<S::Symbol, Symbol>
|
||||||
{
|
{
|
||||||
fn decode(r: &mut Reader<'_>, s: &mut client::HandleStore<server::MarkedTypes<S>>) -> Self {
|
fn decode(r: &mut Reader<'_>, s: &mut server::HandleStore<server::MarkedTypes<S>>) -> Self {
|
||||||
Mark::mark(S::intern_symbol(<&str>::decode(r, s)))
|
Mark::mark(S::intern_symbol(<&str>::decode(r, s)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: server::Server> Encode<client::HandleStore<server::MarkedTypes<S>>>
|
impl<S: server::Server> Encode<server::HandleStore<server::MarkedTypes<S>>>
|
||||||
for Marked<S::Symbol, Symbol>
|
for Marked<S::Symbol, Symbol>
|
||||||
{
|
{
|
||||||
fn encode(self, w: &mut Writer, s: &mut client::HandleStore<server::MarkedTypes<S>>) {
|
fn encode(self, w: &mut Writer, s: &mut server::HandleStore<server::MarkedTypes<S>>) {
|
||||||
S::with_symbol_string(&self.unmark(), |sym| sym.encode(w, s))
|
S::with_symbol_string(&self.unmark(), |sym| sym.encode(w, s))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -186,11 +186,8 @@ impl f32 {
|
|||||||
/// let x = 3.5_f32;
|
/// let x = 3.5_f32;
|
||||||
/// let y = -3.5_f32;
|
/// let y = -3.5_f32;
|
||||||
///
|
///
|
||||||
/// let abs_difference_x = (x.abs() - x).abs();
|
/// assert_eq!(x.abs(), x);
|
||||||
/// let abs_difference_y = (y.abs() - (-y)).abs();
|
/// assert_eq!(y.abs(), -y);
|
||||||
///
|
|
||||||
/// assert!(abs_difference_x <= f32::EPSILON);
|
|
||||||
/// assert!(abs_difference_y <= f32::EPSILON);
|
|
||||||
///
|
///
|
||||||
/// assert!(f32::NAN.abs().is_nan());
|
/// assert!(f32::NAN.abs().is_nan());
|
||||||
/// ```
|
/// ```
|
||||||
@ -276,10 +273,17 @@ impl f32 {
|
|||||||
/// let x = 4.0_f32;
|
/// let x = 4.0_f32;
|
||||||
/// let b = 60.0_f32;
|
/// let b = 60.0_f32;
|
||||||
///
|
///
|
||||||
/// // 100.0
|
/// assert_eq!(m.mul_add(x, b), 100.0);
|
||||||
/// let abs_difference = (m.mul_add(x, b) - ((m * x) + b)).abs();
|
/// assert_eq!(m * x + b, 100.0);
|
||||||
///
|
///
|
||||||
/// assert!(abs_difference <= f32::EPSILON);
|
/// let one_plus_eps = 1.0_f32 + f32::EPSILON;
|
||||||
|
/// let one_minus_eps = 1.0_f32 - f32::EPSILON;
|
||||||
|
/// let minus_one = -1.0_f32;
|
||||||
|
///
|
||||||
|
/// // The exact result (1 + eps) * (1 - eps) = 1 - eps * eps.
|
||||||
|
/// assert_eq!(one_plus_eps.mul_add(one_minus_eps, minus_one), -f32::EPSILON * f32::EPSILON);
|
||||||
|
/// // Different rounding with the non-fused multiply and add.
|
||||||
|
/// assert_eq!(one_plus_eps * one_minus_eps + minus_one, 0.0);
|
||||||
/// ```
|
/// ```
|
||||||
#[rustc_allow_incoherent_impl]
|
#[rustc_allow_incoherent_impl]
|
||||||
#[must_use = "method returns a new number and does not mutate the original value"]
|
#[must_use = "method returns a new number and does not mutate the original value"]
|
||||||
@ -426,9 +430,7 @@ impl f32 {
|
|||||||
/// let negative = -4.0_f32;
|
/// let negative = -4.0_f32;
|
||||||
/// let negative_zero = -0.0_f32;
|
/// let negative_zero = -0.0_f32;
|
||||||
///
|
///
|
||||||
/// let abs_difference = (positive.sqrt() - 2.0).abs();
|
/// assert_eq!(positive.sqrt(), 2.0);
|
||||||
///
|
|
||||||
/// assert!(abs_difference <= f32::EPSILON);
|
|
||||||
/// assert!(negative.sqrt().is_nan());
|
/// assert!(negative.sqrt().is_nan());
|
||||||
/// assert!(negative_zero.sqrt() == negative_zero);
|
/// assert!(negative_zero.sqrt() == negative_zero);
|
||||||
/// ```
|
/// ```
|
||||||
|
@ -186,11 +186,8 @@ impl f64 {
|
|||||||
/// let x = 3.5_f64;
|
/// let x = 3.5_f64;
|
||||||
/// let y = -3.5_f64;
|
/// let y = -3.5_f64;
|
||||||
///
|
///
|
||||||
/// let abs_difference_x = (x.abs() - x).abs();
|
/// assert_eq!(x.abs(), x);
|
||||||
/// let abs_difference_y = (y.abs() - (-y)).abs();
|
/// assert_eq!(y.abs(), -y);
|
||||||
///
|
|
||||||
/// assert!(abs_difference_x < 1e-10);
|
|
||||||
/// assert!(abs_difference_y < 1e-10);
|
|
||||||
///
|
///
|
||||||
/// assert!(f64::NAN.abs().is_nan());
|
/// assert!(f64::NAN.abs().is_nan());
|
||||||
/// ```
|
/// ```
|
||||||
@ -276,10 +273,17 @@ impl f64 {
|
|||||||
/// let x = 4.0_f64;
|
/// let x = 4.0_f64;
|
||||||
/// let b = 60.0_f64;
|
/// let b = 60.0_f64;
|
||||||
///
|
///
|
||||||
/// // 100.0
|
/// assert_eq!(m.mul_add(x, b), 100.0);
|
||||||
/// let abs_difference = (m.mul_add(x, b) - ((m * x) + b)).abs();
|
/// assert_eq!(m * x + b, 100.0);
|
||||||
///
|
///
|
||||||
/// assert!(abs_difference < 1e-10);
|
/// let one_plus_eps = 1.0_f64 + f64::EPSILON;
|
||||||
|
/// let one_minus_eps = 1.0_f64 - f64::EPSILON;
|
||||||
|
/// let minus_one = -1.0_f64;
|
||||||
|
///
|
||||||
|
/// // The exact result (1 + eps) * (1 - eps) = 1 - eps * eps.
|
||||||
|
/// assert_eq!(one_plus_eps.mul_add(one_minus_eps, minus_one), -f64::EPSILON * f64::EPSILON);
|
||||||
|
/// // Different rounding with the non-fused multiply and add.
|
||||||
|
/// assert_eq!(one_plus_eps * one_minus_eps + minus_one, 0.0);
|
||||||
/// ```
|
/// ```
|
||||||
#[rustc_allow_incoherent_impl]
|
#[rustc_allow_incoherent_impl]
|
||||||
#[must_use = "method returns a new number and does not mutate the original value"]
|
#[must_use = "method returns a new number and does not mutate the original value"]
|
||||||
@ -426,9 +430,7 @@ impl f64 {
|
|||||||
/// let negative = -4.0_f64;
|
/// let negative = -4.0_f64;
|
||||||
/// let negative_zero = -0.0_f64;
|
/// let negative_zero = -0.0_f64;
|
||||||
///
|
///
|
||||||
/// let abs_difference = (positive.sqrt() - 2.0).abs();
|
/// assert_eq!(positive.sqrt(), 2.0);
|
||||||
///
|
|
||||||
/// assert!(abs_difference < 1e-10);
|
|
||||||
/// assert!(negative.sqrt().is_nan());
|
/// assert!(negative.sqrt().is_nan());
|
||||||
/// assert!(negative_zero.sqrt() == negative_zero);
|
/// assert!(negative_zero.sqrt() == negative_zero);
|
||||||
/// ```
|
/// ```
|
||||||
|
@ -1476,11 +1476,10 @@ impl Permissions {
|
|||||||
/// On Unix-based platforms this checks if *any* of the owner, group or others
|
/// On Unix-based platforms this checks if *any* of the owner, group or others
|
||||||
/// write permission bits are set. It does not check if the current
|
/// write permission bits are set. It does not check if the current
|
||||||
/// user is in the file's assigned group. It also does not check ACLs.
|
/// user is in the file's assigned group. It also does not check ACLs.
|
||||||
/// Therefore even if this returns true you may not be able to write to the
|
/// Therefore the return value of this function cannot be relied upon
|
||||||
/// file, and vice versa. The [`PermissionsExt`] trait gives direct access
|
/// to predict whether attempts to read or write the file will actually succeed.
|
||||||
/// to the permission bits but also does not read ACLs. If you need to
|
/// The [`PermissionsExt`] trait gives direct access to the permission bits but
|
||||||
/// accurately know whether or not a file is writable use the `access()`
|
/// also does not read ACLs.
|
||||||
/// function from libc.
|
|
||||||
///
|
///
|
||||||
/// [`PermissionsExt`]: crate::os::unix::fs::PermissionsExt
|
/// [`PermissionsExt`]: crate::os::unix::fs::PermissionsExt
|
||||||
///
|
///
|
||||||
|
@ -2,7 +2,7 @@ error[E0277]: the trait bound `T: !Copy` is not satisfied
|
|||||||
--> $DIR/simple.rs:10:16
|
--> $DIR/simple.rs:10:16
|
||||||
|
|
|
|
||||||
LL | not_copy::<T>();
|
LL | not_copy::<T>();
|
||||||
| ^ the trait `!Copy` is not implemented for `T`
|
| ^ the trait bound `T: !Copy` is not satisfied
|
||||||
|
|
|
|
||||||
note: required by a bound in `not_copy`
|
note: required by a bound in `not_copy`
|
||||||
--> $DIR/simple.rs:3:16
|
--> $DIR/simple.rs:3:16
|
||||||
@ -14,7 +14,7 @@ error[E0277]: the trait bound `T: !Copy` is not satisfied
|
|||||||
--> $DIR/simple.rs:15:16
|
--> $DIR/simple.rs:15:16
|
||||||
|
|
|
|
||||||
LL | not_copy::<T>();
|
LL | not_copy::<T>();
|
||||||
| ^ the trait `!Copy` is not implemented for `T`
|
| ^ the trait bound `T: !Copy` is not satisfied
|
||||||
|
|
|
|
||||||
note: required by a bound in `not_copy`
|
note: required by a bound in `not_copy`
|
||||||
--> $DIR/simple.rs:3:16
|
--> $DIR/simple.rs:3:16
|
||||||
@ -26,7 +26,7 @@ error[E0277]: the trait bound `Copyable: !Copy` is not satisfied
|
|||||||
--> $DIR/simple.rs:30:16
|
--> $DIR/simple.rs:30:16
|
||||||
|
|
|
|
||||||
LL | not_copy::<Copyable>();
|
LL | not_copy::<Copyable>();
|
||||||
| ^^^^^^^^ the trait `!Copy` is not implemented for `Copyable`
|
| ^^^^^^^^ the trait bound `Copyable: !Copy` is not satisfied
|
||||||
|
|
|
|
||||||
= help: the trait `Copy` is implemented for `Copyable`
|
= help: the trait `Copy` is implemented for `Copyable`
|
||||||
note: required by a bound in `not_copy`
|
note: required by a bound in `not_copy`
|
||||||
@ -44,7 +44,7 @@ error[E0277]: the trait bound `NotNecessarilyCopyable: !Copy` is not satisfied
|
|||||||
--> $DIR/simple.rs:37:16
|
--> $DIR/simple.rs:37:16
|
||||||
|
|
|
|
||||||
LL | not_copy::<NotNecessarilyCopyable>();
|
LL | not_copy::<NotNecessarilyCopyable>();
|
||||||
| ^^^^^^^^^^^^^^^^^^^^^^ the trait `!Copy` is not implemented for `NotNecessarilyCopyable`
|
| ^^^^^^^^^^^^^^^^^^^^^^ the trait bound `NotNecessarilyCopyable: !Copy` is not satisfied
|
||||||
|
|
|
|
||||||
note: required by a bound in `not_copy`
|
note: required by a bound in `not_copy`
|
||||||
--> $DIR/simple.rs:3:16
|
--> $DIR/simple.rs:3:16
|
||||||
|
17
tests/ui/traits/on_unimplemented_long_types.rs
Normal file
17
tests/ui/traits/on_unimplemented_long_types.rs
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
//@ compile-flags: --diagnostic-width=60 -Z write-long-types-to-disk=yes
|
||||||
|
//@ normalize-stderr-test: "long-type-\d+" -> "long-type-hash"
|
||||||
|
|
||||||
|
pub fn foo() -> impl std::fmt::Display {
|
||||||
|
//~^ ERROR doesn't implement `std::fmt::Display`
|
||||||
|
Some(Some(Some(Some(Some(Some(Some(Some(Some(Some(Some(
|
||||||
|
Some(Some(Some(Some(Some(Some(Some(Some(Some(Some(Some(
|
||||||
|
Some(Some(Some(Some(Some(Some(Some(Some(Some(Some(Some(
|
||||||
|
Some(Some(Some(Some(Some(Some(Some(Some(Some(Some(Some(
|
||||||
|
Some(Some(Some(Some(Some(Some(Some(Some(())))))))),
|
||||||
|
))))))))))),
|
||||||
|
))))))))))),
|
||||||
|
))))))))))),
|
||||||
|
)))))))))))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
25
tests/ui/traits/on_unimplemented_long_types.stderr
Normal file
25
tests/ui/traits/on_unimplemented_long_types.stderr
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
error[E0277]: `Option<Option<Option<...>>>` doesn't implement `std::fmt::Display`
|
||||||
|
--> $DIR/on_unimplemented_long_types.rs:4:17
|
||||||
|
|
|
||||||
|
LL | pub fn foo() -> impl std::fmt::Display {
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^ `Option<Option<Option<...>>>` cannot be formatted with the default formatter
|
||||||
|
LL |
|
||||||
|
LL | / Some(Some(Some(Some(Some(Some(Some(Some(Some(S...
|
||||||
|
LL | | Some(Some(Some(Some(Some(Some(Some(Some(So...
|
||||||
|
LL | | Some(Some(Some(Some(Some(Some(Some(Som...
|
||||||
|
LL | | Some(Some(Some(Some(Some(Some(Some...
|
||||||
|
... |
|
||||||
|
LL | | ))))))))))),
|
||||||
|
LL | | )))))))))))
|
||||||
|
| |_______________- return type was inferred to be `Option<Option<Option<...>>>` here
|
||||||
|
|
|
||||||
|
= note: the full name for the type has been written to '$TEST_BUILD_DIR/traits/on_unimplemented_long_types/on_unimplemented_long_types.long-type-hash.txt'
|
||||||
|
= note: consider using `--verbose` to print the full type name to the console
|
||||||
|
= help: the trait `std::fmt::Display` is not implemented for `Option<Option<Option<...>>>`
|
||||||
|
= note: in format strings you may be able to use `{:?}` (or {:#?} for pretty-print) instead
|
||||||
|
= note: the full name for the type has been written to '$TEST_BUILD_DIR/traits/on_unimplemented_long_types/on_unimplemented_long_types.long-type-hash.txt'
|
||||||
|
= note: consider using `--verbose` to print the full type name to the console
|
||||||
|
|
||||||
|
error: aborting due to 1 previous error
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0277`.
|
Loading…
Reference in New Issue
Block a user