mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 06:44:35 +00:00
Auto merge of #108747 - matthiaskrgr:rollup-wfc7fx4, r=matthiaskrgr
Rollup of 7 pull requests Successful merges: - #108627 (Properly colorize multi-part suggestions in the same line) - #108632 (Omit unchanged options from config.toml in `configure.py`) - #108715 (Remove unclosed_delims from parser) - #108723 (rustdoc: function signature search with traits in `where` clause) - #108724 (field is not used outside the crate) - #108734 (rustdoc: Note in a type's layout/size if it is uninhabited) - #108736 (Remove `allow(potential_query_instability)` from `ast_passes`) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
bb1838847d
@ -12,7 +12,7 @@ use rustc_ast::visit::{self, AssocCtxt, BoundKind, FnCtxt, FnKind, Visitor};
|
||||
use rustc_ast::walk_list;
|
||||
use rustc_ast::*;
|
||||
use rustc_ast_pretty::pprust::{self, State};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_macros::Subdiagnostic;
|
||||
use rustc_parse::validate_attr;
|
||||
use rustc_session::lint::builtin::{
|
||||
@ -643,7 +643,7 @@ fn validate_generic_param_order(
|
||||
span: Span,
|
||||
) {
|
||||
let mut max_param: Option<ParamKindOrd> = None;
|
||||
let mut out_of_order = FxHashMap::default();
|
||||
let mut out_of_order = FxIndexMap::default();
|
||||
let mut param_idents = Vec::with_capacity(generics.len());
|
||||
|
||||
for (idx, param) in generics.iter().enumerate() {
|
||||
|
@ -4,7 +4,6 @@
|
||||
//!
|
||||
//! The crate also contains other misc AST visitors, e.g. `node_count` and `show_span`.
|
||||
|
||||
#![allow(rustc::potential_query_instability)]
|
||||
#![feature(box_patterns)]
|
||||
#![feature(if_let_guard)]
|
||||
#![feature(iter_is_partitioned)]
|
||||
|
@ -1895,7 +1895,7 @@ impl EmitterWriter {
|
||||
self.draw_code_line(
|
||||
&mut buffer,
|
||||
&mut row_num,
|
||||
&Vec::new(),
|
||||
&[],
|
||||
p + line_start,
|
||||
l,
|
||||
show_code_change,
|
||||
@ -1919,7 +1919,7 @@ impl EmitterWriter {
|
||||
self.draw_code_line(
|
||||
&mut buffer,
|
||||
&mut row_num,
|
||||
&Vec::new(),
|
||||
&[],
|
||||
p + line_start,
|
||||
l,
|
||||
show_code_change,
|
||||
@ -1936,7 +1936,7 @@ impl EmitterWriter {
|
||||
self.draw_code_line(
|
||||
&mut buffer,
|
||||
&mut row_num,
|
||||
&Vec::new(),
|
||||
&[],
|
||||
p + line_start,
|
||||
l,
|
||||
show_code_change,
|
||||
@ -1951,7 +1951,7 @@ impl EmitterWriter {
|
||||
self.draw_code_line(
|
||||
&mut buffer,
|
||||
&mut row_num,
|
||||
highlight_parts,
|
||||
&highlight_parts,
|
||||
line_pos + line_start,
|
||||
line,
|
||||
show_code_change,
|
||||
@ -2176,7 +2176,7 @@ impl EmitterWriter {
|
||||
&self,
|
||||
buffer: &mut StyledBuffer,
|
||||
row_num: &mut usize,
|
||||
highlight_parts: &Vec<SubstitutionHighlight>,
|
||||
highlight_parts: &[SubstitutionHighlight],
|
||||
line_num: usize,
|
||||
line_to_add: &str,
|
||||
show_code_change: DisplaySuggestion,
|
||||
|
@ -331,7 +331,7 @@ impl CodeSuggestion {
|
||||
});
|
||||
buf.push_str(&part.snippet);
|
||||
let cur_hi = sm.lookup_char_pos(part.span.hi());
|
||||
if prev_hi.line == cur_lo.line && cur_hi.line == cur_lo.line {
|
||||
if cur_hi.line == cur_lo.line {
|
||||
// Account for the difference between the width of the current code and the
|
||||
// snippet being suggested, so that the *later* suggestions are correctly
|
||||
// aligned on the screen.
|
||||
|
@ -94,10 +94,10 @@ pub(crate) type UnificationTable<'a, 'tcx, T> = ut::UnificationTable<
|
||||
/// call to `start_snapshot` and `rollback_to`.
|
||||
#[derive(Clone)]
|
||||
pub struct InferCtxtInner<'tcx> {
|
||||
/// Cache for projections. This cache is snapshotted along with the infcx.
|
||||
/// Cache for projections.
|
||||
///
|
||||
/// Public so that `traits::project` can use it.
|
||||
pub projection_cache: traits::ProjectionCacheStorage<'tcx>,
|
||||
/// This cache is snapshotted along with the infcx.
|
||||
projection_cache: traits::ProjectionCacheStorage<'tcx>,
|
||||
|
||||
/// We instantiate `UnificationTable` with `bounds<Ty>` because the types
|
||||
/// that might instantiate a general type variable have an order,
|
||||
|
@ -19,7 +19,6 @@ use crate::errors::{
|
||||
};
|
||||
|
||||
use crate::fluent_generated as fluent;
|
||||
use crate::lexer::UnmatchedDelim;
|
||||
use crate::parser;
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::ptr::P;
|
||||
@ -220,7 +219,6 @@ impl MultiSugg {
|
||||
/// is dropped.
|
||||
pub struct SnapshotParser<'a> {
|
||||
parser: Parser<'a>,
|
||||
unclosed_delims: Vec<UnmatchedDelim>,
|
||||
}
|
||||
|
||||
impl<'a> Deref for SnapshotParser<'a> {
|
||||
@ -255,27 +253,15 @@ impl<'a> Parser<'a> {
|
||||
&self.sess.span_diagnostic
|
||||
}
|
||||
|
||||
/// Replace `self` with `snapshot.parser` and extend `unclosed_delims` with `snapshot.unclosed_delims`.
|
||||
/// This is to avoid losing unclosed delims errors `create_snapshot_for_diagnostic` clears.
|
||||
/// Replace `self` with `snapshot.parser`.
|
||||
pub(super) fn restore_snapshot(&mut self, snapshot: SnapshotParser<'a>) {
|
||||
*self = snapshot.parser;
|
||||
self.unclosed_delims.extend(snapshot.unclosed_delims);
|
||||
}
|
||||
|
||||
pub fn unclosed_delims(&self) -> &[UnmatchedDelim] {
|
||||
&self.unclosed_delims
|
||||
}
|
||||
|
||||
/// Create a snapshot of the `Parser`.
|
||||
pub fn create_snapshot_for_diagnostic(&self) -> SnapshotParser<'a> {
|
||||
let mut snapshot = self.clone();
|
||||
let unclosed_delims = self.unclosed_delims.clone();
|
||||
// Clear `unclosed_delims` in snapshot to avoid
|
||||
// duplicate errors being emitted when the `Parser`
|
||||
// is dropped (which may or may not happen, depending
|
||||
// if the parsing the snapshot is created for is successful)
|
||||
snapshot.unclosed_delims.clear();
|
||||
SnapshotParser { parser: snapshot, unclosed_delims }
|
||||
let snapshot = self.clone();
|
||||
SnapshotParser { parser: snapshot }
|
||||
}
|
||||
|
||||
pub(super) fn span_to_snippet(&self, span: Span) -> Result<String, SpanSnippetError> {
|
||||
@ -579,21 +565,6 @@ impl<'a> Parser<'a> {
|
||||
} else {
|
||||
label_sp
|
||||
};
|
||||
match self.recover_closing_delimiter(
|
||||
&expected
|
||||
.iter()
|
||||
.filter_map(|tt| match tt {
|
||||
TokenType::Token(t) => Some(t.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
err,
|
||||
) {
|
||||
Err(e) => err = e,
|
||||
Ok(recovered) => {
|
||||
return Ok(recovered);
|
||||
}
|
||||
}
|
||||
|
||||
if self.check_too_many_raw_str_terminators(&mut err) {
|
||||
if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) {
|
||||
@ -1573,12 +1544,6 @@ impl<'a> Parser<'a> {
|
||||
);
|
||||
let mut err = self.struct_span_err(sp, &msg);
|
||||
let label_exp = format!("expected `{token_str}`");
|
||||
match self.recover_closing_delimiter(&[t.clone()], err) {
|
||||
Err(e) => err = e,
|
||||
Ok(recovered) => {
|
||||
return Ok(recovered);
|
||||
}
|
||||
}
|
||||
let sm = self.sess.source_map();
|
||||
if !sm.is_multiline(prev_sp.until(sp)) {
|
||||
// When the spans are in the same line, it means that the only content
|
||||
@ -1795,81 +1760,6 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn recover_closing_delimiter(
|
||||
&mut self,
|
||||
tokens: &[TokenKind],
|
||||
mut err: DiagnosticBuilder<'a, ErrorGuaranteed>,
|
||||
) -> PResult<'a, bool> {
|
||||
let mut pos = None;
|
||||
// We want to use the last closing delim that would apply.
|
||||
for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
|
||||
if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
|
||||
&& Some(self.token.span) > unmatched.unclosed_span
|
||||
{
|
||||
pos = Some(i);
|
||||
}
|
||||
}
|
||||
match pos {
|
||||
Some(pos) => {
|
||||
// Recover and assume that the detected unclosed delimiter was meant for
|
||||
// this location. Emit the diagnostic and act as if the delimiter was
|
||||
// present for the parser's sake.
|
||||
|
||||
// Don't attempt to recover from this unclosed delimiter more than once.
|
||||
let unmatched = self.unclosed_delims.remove(pos);
|
||||
let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
|
||||
if unmatched.found_delim.is_none() {
|
||||
// We encountered `Eof`, set this fact here to avoid complaining about missing
|
||||
// `fn main()` when we found place to suggest the closing brace.
|
||||
*self.sess.reached_eof.borrow_mut() = true;
|
||||
}
|
||||
|
||||
// We want to suggest the inclusion of the closing delimiter where it makes
|
||||
// the most sense, which is immediately after the last token:
|
||||
//
|
||||
// {foo(bar {}}
|
||||
// ^ ^
|
||||
// | |
|
||||
// | help: `)` may belong here
|
||||
// |
|
||||
// unclosed delimiter
|
||||
if let Some(sp) = unmatched.unclosed_span {
|
||||
let mut primary_span: Vec<Span> =
|
||||
err.span.primary_spans().iter().cloned().collect();
|
||||
primary_span.push(sp);
|
||||
let mut primary_span: MultiSpan = primary_span.into();
|
||||
for span_label in err.span.span_labels() {
|
||||
if let Some(label) = span_label.label {
|
||||
primary_span.push_span_label(span_label.span, label);
|
||||
}
|
||||
}
|
||||
err.set_span(primary_span);
|
||||
err.span_label(sp, "unclosed delimiter");
|
||||
}
|
||||
// Backticks should be removed to apply suggestions.
|
||||
let mut delim = delim.to_string();
|
||||
delim.retain(|c| c != '`');
|
||||
err.span_suggestion_short(
|
||||
self.prev_token.span.shrink_to_hi(),
|
||||
&format!("`{delim}` may belong here"),
|
||||
delim,
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
if unmatched.found_delim.is_none() {
|
||||
// Encountered `Eof` when lexing blocks. Do not recover here to avoid knockdown
|
||||
// errors which would be emitted elsewhere in the parser and let other error
|
||||
// recovery consume the rest of the file.
|
||||
Err(err)
|
||||
} else {
|
||||
err.emit();
|
||||
self.expected_tokens.clear(); // Reduce the number of errors.
|
||||
Ok(true)
|
||||
}
|
||||
}
|
||||
_ => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
/// Eats tokens until we can be relatively sure we reached the end of the
|
||||
/// statement. This is something of a best-effort heuristic.
|
||||
///
|
||||
|
@ -1394,19 +1394,6 @@ impl<'a> Parser<'a> {
|
||||
self.parse_expr_let()
|
||||
} else if self.eat_keyword(kw::Underscore) {
|
||||
Ok(self.mk_expr(self.prev_token.span, ExprKind::Underscore))
|
||||
} else if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
|
||||
// Don't complain about bare semicolons after unclosed braces
|
||||
// recovery in order to keep the error count down. Fixing the
|
||||
// delimiters will possibly also fix the bare semicolon found in
|
||||
// expression context. For example, silence the following error:
|
||||
//
|
||||
// error: expected expression, found `;`
|
||||
// --> file.rs:2:13
|
||||
// |
|
||||
// 2 | foo(bar(;
|
||||
// | ^ expected expression
|
||||
self.bump();
|
||||
Ok(self.mk_expr_err(self.token.span))
|
||||
} else if self.token.uninterpolated_span().rust_2018() {
|
||||
// `Span::rust_2018()` is somewhat expensive; don't get it repeatedly.
|
||||
if self.check_keyword(kw::Async) {
|
||||
|
@ -125,16 +125,13 @@ impl<'a> Parser<'a> {
|
||||
return Ok(Some(item.into_inner()));
|
||||
};
|
||||
|
||||
let mut unclosed_delims = vec![];
|
||||
let item =
|
||||
self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| {
|
||||
let item =
|
||||
this.parse_item_common_(attrs, mac_allowed, attrs_allowed, fn_parse_mode);
|
||||
unclosed_delims.append(&mut this.unclosed_delims);
|
||||
Ok((item?, TrailingToken::None))
|
||||
})?;
|
||||
|
||||
self.unclosed_delims.append(&mut unclosed_delims);
|
||||
Ok(item)
|
||||
}
|
||||
|
||||
@ -1960,21 +1957,12 @@ impl<'a> Parser<'a> {
|
||||
// FIXME: This will make us not emit the help even for declarative
|
||||
// macros within the same crate (that we can fix), which is sad.
|
||||
if !span.from_expansion() {
|
||||
if self.unclosed_delims.is_empty() {
|
||||
let DelimSpan { open, close } = args.dspan;
|
||||
err.multipart_suggestion(
|
||||
"change the delimiters to curly braces",
|
||||
vec![(open, "{".to_string()), (close, '}'.to_string())],
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
} else {
|
||||
err.span_suggestion(
|
||||
span,
|
||||
"change the delimiters to curly braces",
|
||||
" { /* items */ }",
|
||||
Applicability::HasPlaceholders,
|
||||
);
|
||||
}
|
||||
let DelimSpan { open, close } = args.dspan;
|
||||
err.multipart_suggestion(
|
||||
"change the delimiters to curly braces",
|
||||
vec![(open, "{".to_string()), (close, '}'.to_string())],
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
err.span_suggestion(
|
||||
span.shrink_to_hi(),
|
||||
"add a semicolon",
|
||||
|
@ -146,10 +146,7 @@ pub struct Parser<'a> {
|
||||
/// See the comments in the `parse_path_segment` function for more details.
|
||||
unmatched_angle_bracket_count: u32,
|
||||
max_angle_bracket_count: u32,
|
||||
/// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
|
||||
/// it gets removed from here. Every entry left at the end gets emitted as an independent
|
||||
/// error.
|
||||
pub(super) unclosed_delims: Vec<UnmatchedDelim>,
|
||||
|
||||
last_unexpected_token_span: Option<Span>,
|
||||
/// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
|
||||
/// looked like it could have been a mistyped path or literal `Option:Some(42)`).
|
||||
@ -168,7 +165,7 @@ pub struct Parser<'a> {
|
||||
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
|
||||
// it doesn't unintentionally get bigger.
|
||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||
rustc_data_structures::static_assert_size!(Parser<'_>, 312);
|
||||
rustc_data_structures::static_assert_size!(Parser<'_>, 288);
|
||||
|
||||
/// Stores span information about a closure.
|
||||
#[derive(Clone)]
|
||||
@ -215,12 +212,6 @@ struct CaptureState {
|
||||
inner_attr_ranges: FxHashMap<AttrId, ReplaceRange>,
|
||||
}
|
||||
|
||||
impl<'a> Drop for Parser<'a> {
|
||||
fn drop(&mut self) {
|
||||
emit_unclosed_delims(&mut self.unclosed_delims, &self.sess);
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that
|
||||
/// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
|
||||
/// use this type to emit them as a linear sequence. But a linear sequence is
|
||||
@ -478,7 +469,6 @@ impl<'a> Parser<'a> {
|
||||
desugar_doc_comments,
|
||||
unmatched_angle_bracket_count: 0,
|
||||
max_angle_bracket_count: 0,
|
||||
unclosed_delims: Vec::new(),
|
||||
last_unexpected_token_span: None,
|
||||
last_type_ascription: None,
|
||||
subparser_name,
|
||||
@ -859,7 +849,6 @@ impl<'a> Parser<'a> {
|
||||
let mut recovered = false;
|
||||
let mut trailing = false;
|
||||
let mut v = ThinVec::new();
|
||||
let unclosed_delims = !self.unclosed_delims.is_empty();
|
||||
|
||||
while !self.expect_any_with_type(kets, expect) {
|
||||
if let token::CloseDelim(..) | token::Eof = self.token.kind {
|
||||
@ -901,7 +890,7 @@ impl<'a> Parser<'a> {
|
||||
_ => {
|
||||
// Attempt to keep parsing if it was a similar separator.
|
||||
if let Some(tokens) = t.similar_tokens() {
|
||||
if tokens.contains(&self.token.kind) && !unclosed_delims {
|
||||
if tokens.contains(&self.token.kind) {
|
||||
self.bump();
|
||||
}
|
||||
}
|
||||
|
@ -164,6 +164,7 @@ changelog-seen = 2
|
||||
# General build configuration options
|
||||
# =============================================================================
|
||||
[build]
|
||||
|
||||
# The default stage to use for the `check` subcommand
|
||||
#check-stage = 0
|
||||
|
||||
|
@ -488,6 +488,22 @@ for section_key, section_config in config.items():
|
||||
else:
|
||||
configure_section(sections[section_key], section_config)
|
||||
|
||||
def write_uncommented(target, f):
|
||||
block = []
|
||||
is_comment = True
|
||||
|
||||
for line in target:
|
||||
block.append(line)
|
||||
if len(line) == 0:
|
||||
if not is_comment:
|
||||
for l in block:
|
||||
f.write(l + "\n")
|
||||
block = []
|
||||
is_comment = True
|
||||
continue
|
||||
is_comment = is_comment and line.startswith('#')
|
||||
return f
|
||||
|
||||
# Now that we've built up our `config.toml`, write it all out in the same
|
||||
# order that we read it in.
|
||||
p("")
|
||||
@ -496,11 +512,9 @@ with bootstrap.output('config.toml') as f:
|
||||
for section in section_order:
|
||||
if section == 'target':
|
||||
for target in targets:
|
||||
for line in targets[target]:
|
||||
f.write(line + "\n")
|
||||
f = write_uncommented(targets[target], f)
|
||||
else:
|
||||
for line in sections[section]:
|
||||
f.write(line + "\n")
|
||||
f = write_uncommented(sections[section], f)
|
||||
|
||||
with bootstrap.output('Makefile') as f:
|
||||
contents = os.path.join(rust_dir, 'src', 'bootstrap', 'mk', 'Makefile.in')
|
||||
|
@ -1839,6 +1839,12 @@ fn document_type_layout(w: &mut Buffer, cx: &Context<'_>, ty_def_id: DefId) {
|
||||
} else {
|
||||
let size = layout.size.bytes() - tag_size;
|
||||
write!(w, "{size} byte{pl}", pl = if size == 1 { "" } else { "s" },);
|
||||
if layout.abi.is_uninhabited() {
|
||||
write!(
|
||||
w,
|
||||
" (<a href=\"https://doc.rust-lang.org/stable/reference/glossary.html#uninhabited\">uninhabited</a>)"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,9 +7,7 @@ use rustc_span::symbol::Symbol;
|
||||
use serde::ser::{Serialize, SerializeStruct, Serializer};
|
||||
|
||||
use crate::clean;
|
||||
use crate::clean::types::{
|
||||
FnRetTy, Function, GenericBound, Generics, ItemId, Type, WherePredicate,
|
||||
};
|
||||
use crate::clean::types::{FnRetTy, Function, Generics, ItemId, Type, WherePredicate};
|
||||
use crate::formats::cache::{Cache, OrphanImplItem};
|
||||
use crate::formats::item_type::ItemType;
|
||||
use crate::html::format::join_with_double_colon;
|
||||
@ -482,29 +480,23 @@ fn add_generics_and_bounds_as_types<'tcx, 'a>(
|
||||
if let Type::Generic(arg_s) = *arg {
|
||||
// First we check if the bounds are in a `where` predicate...
|
||||
if let Some(where_pred) = generics.where_predicates.iter().find(|g| match g {
|
||||
WherePredicate::BoundPredicate { ty, .. } => ty.def_id(cache) == arg.def_id(cache),
|
||||
WherePredicate::BoundPredicate { ty: Type::Generic(ty_s), .. } => *ty_s == arg_s,
|
||||
_ => false,
|
||||
}) {
|
||||
let mut ty_generics = Vec::new();
|
||||
let bounds = where_pred.get_bounds().unwrap_or_else(|| &[]);
|
||||
for bound in bounds.iter() {
|
||||
if let GenericBound::TraitBound(poly_trait, _) = bound {
|
||||
for param_def in poly_trait.generic_params.iter() {
|
||||
match ¶m_def.kind {
|
||||
clean::GenericParamDefKind::Type { default: Some(ty), .. } => {
|
||||
add_generics_and_bounds_as_types(
|
||||
self_,
|
||||
generics,
|
||||
ty,
|
||||
tcx,
|
||||
recurse + 1,
|
||||
&mut ty_generics,
|
||||
cache,
|
||||
)
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
if let Some(path) = bound.get_trait_path() {
|
||||
let ty = Type::Path { path };
|
||||
add_generics_and_bounds_as_types(
|
||||
self_,
|
||||
generics,
|
||||
&ty,
|
||||
tcx,
|
||||
recurse + 1,
|
||||
&mut ty_generics,
|
||||
cache,
|
||||
);
|
||||
}
|
||||
}
|
||||
insert_ty(res, arg.clone(), ty_generics);
|
||||
|
7
tests/rustdoc-js-std/option-type-signatures.js
Normal file
7
tests/rustdoc-js-std/option-type-signatures.js
Normal file
@ -0,0 +1,7 @@
|
||||
const QUERY = 'option, fnonce -> option';
|
||||
|
||||
const EXPECTED = {
|
||||
'others': [
|
||||
{ 'path': 'std::option::Option', 'name': 'map' },
|
||||
],
|
||||
};
|
19
tests/rustdoc-js/where-clause.js
Normal file
19
tests/rustdoc-js/where-clause.js
Normal file
@ -0,0 +1,19 @@
|
||||
const QUERY = ['trait<nested>', '-> trait<nested>', 't1, t2'];
|
||||
|
||||
const EXPECTED = [
|
||||
{
|
||||
'in_args': [
|
||||
{ 'path': 'where_clause', 'name': 'abracadabra' },
|
||||
],
|
||||
},
|
||||
{
|
||||
'others': [
|
||||
{ 'path': 'where_clause', 'name': 'alacazam' },
|
||||
],
|
||||
},
|
||||
{
|
||||
'others': [
|
||||
{ 'path': 'where_clause', 'name': 'presto' },
|
||||
],
|
||||
},
|
||||
];
|
16
tests/rustdoc-js/where-clause.rs
Normal file
16
tests/rustdoc-js/where-clause.rs
Normal file
@ -0,0 +1,16 @@
|
||||
pub struct Nested;
|
||||
|
||||
pub trait Trait<T> {
|
||||
fn thank_you(x: T);
|
||||
}
|
||||
|
||||
pub fn abracadabra<X>(_: X) where X: Trait<Nested> {}
|
||||
|
||||
pub fn alacazam<X>() -> X where X: Trait<Nested> {}
|
||||
|
||||
pub trait T1 {}
|
||||
pub trait T2<'a, T> {
|
||||
fn please(_: &'a T);
|
||||
}
|
||||
|
||||
pub fn presto<A, B>(_: A, _: B) where A: T1, B: for <'b> T2<'b, Nested> {}
|
@ -83,3 +83,11 @@ pub enum WithNiche {
|
||||
None,
|
||||
Some(std::num::NonZeroU32),
|
||||
}
|
||||
|
||||
// @hasraw type_layout/enum.Uninhabited.html 'Size: '
|
||||
// @hasraw - '0 bytes (<a href="https://doc.rust-lang.org/stable/reference/glossary.html#uninhabited">uninhabited</a>)'
|
||||
pub enum Uninhabited {}
|
||||
|
||||
// @hasraw type_layout/struct.Uninhabited2.html 'Size: '
|
||||
// @hasraw - '8 bytes (<a href="https://doc.rust-lang.org/stable/reference/glossary.html#uninhabited">uninhabited</a>)'
|
||||
pub struct Uninhabited2(std::convert::Infallible, u64);
|
||||
|
19
tests/ui/suggestions/multiline-multipart-suggestion.rs
Normal file
19
tests/ui/suggestions/multiline-multipart-suggestion.rs
Normal file
@ -0,0 +1,19 @@
|
||||
// compile-flags: --error-format=human --color=always
|
||||
// ignore-windows
|
||||
|
||||
fn short(foo_bar: &Vec<&i32>) -> &i32 { //~ ERROR missing lifetime specifier
|
||||
&12
|
||||
}
|
||||
|
||||
fn long( //~ ERROR missing lifetime specifier
|
||||
foo_bar: &Vec<&i32>,
|
||||
something_very_long_so_that_the_line_will_wrap_around__________: i32,
|
||||
) -> &i32 {
|
||||
&12
|
||||
}
|
||||
|
||||
fn long2( //~ ERROR missing lifetime specifier
|
||||
foo_bar: &Vec<&i32>) -> &i32 {
|
||||
&12
|
||||
}
|
||||
fn main() {}
|
46
tests/ui/suggestions/multiline-multipart-suggestion.stderr
Normal file
46
tests/ui/suggestions/multiline-multipart-suggestion.stderr
Normal file
@ -0,0 +1,46 @@
|
||||
[0m[1m[38;5;9merror[E0106][0m[0m[1m: missing lifetime specifier[0m
|
||||
[0m [0m[0m[1m[38;5;12m--> [0m[0m$DIR/multiline-multipart-suggestion.rs:4:34[0m
|
||||
[0m [0m[0m[1m[38;5;12m|[0m
|
||||
[0m[1m[38;5;12mLL[0m[0m [0m[0m[1m[38;5;12m|[0m[0m [0m[0mfn short(foo_bar: &Vec<&i32>) -> &i32 {
|
||||
[0m [0m[0m[1m[38;5;12m| [0m[0m [0m[0m[1m[38;5;12m----------[0m[0m [0m[0m[1m[38;5;9m^[0m[0m [0m[0m[1m[38;5;9mexpected named lifetime parameter[0m
|
||||
[0m [0m[0m[1m[38;5;12m|[0m
|
||||
[0m [0m[0m[1m[38;5;12m= [0m[0m[1mhelp[0m[0m: this function's return type contains a borrowed value, but the signature does not say which one of `foo_bar`'s 2 lifetimes it is borrowed from[0m
|
||||
[0m[1m[38;5;14mhelp[0m[0m: consider introducing a named lifetime parameter[0m
|
||||
[0m [0m[0m[1m[38;5;12m|[0m
|
||||
[0m[1m[38;5;12mLL[0m[0m [0m[0m[1m[38;5;12m| [0m[0mfn short[0m[0m[38;5;10m<'a>[0m[0m(foo_bar: &[0m[0m[38;5;10m'a [0m[0mVec<&[0m[0m[38;5;10m'a [0m[0mi32>) -> &[0m[0m[38;5;10m'a [0m[0mi32 {
|
||||
[0m [0m[0m[1m[38;5;12m|[0m[0m [0m[0m[38;5;10m++++[0m[0m [0m[0m[38;5;10m++[0m[0m [0m[0m[38;5;10m++[0m[0m [0m[0m[38;5;10m++[0m
|
||||
|
||||
[0m[1m[38;5;9merror[E0106][0m[0m[1m: missing lifetime specifier[0m
|
||||
[0m [0m[0m[1m[38;5;12m--> [0m[0m$DIR/multiline-multipart-suggestion.rs:11:6[0m
|
||||
[0m [0m[0m[1m[38;5;12m|[0m
|
||||
[0m[1m[38;5;12mLL[0m[0m [0m[0m[1m[38;5;12m|[0m[0m [0m[0m foo_bar: &Vec<&i32>,[0m
|
||||
[0m [0m[0m[1m[38;5;12m| [0m[0m [0m[0m[1m[38;5;12m----------[0m
|
||||
[0m[1m[38;5;12mLL[0m[0m [0m[0m[1m[38;5;12m|[0m[0m [0m[0m something_very_long_so_that_the_line_will_wrap_around__________: i32,[0m
|
||||
[0m[1m[38;5;12mLL[0m[0m [0m[0m[1m[38;5;12m|[0m[0m [0m[0m) -> &i32 {[0m
|
||||
[0m [0m[0m[1m[38;5;12m| [0m[0m [0m[0m[1m[38;5;9m^[0m[0m [0m[0m[1m[38;5;9mexpected named lifetime parameter[0m
|
||||
[0m [0m[0m[1m[38;5;12m|[0m
|
||||
[0m [0m[0m[1m[38;5;12m= [0m[0m[1mhelp[0m[0m: this function's return type contains a borrowed value, but the signature does not say which one of `foo_bar`'s 2 lifetimes it is borrowed from[0m
|
||||
[0m[1m[38;5;14mhelp[0m[0m: consider introducing a named lifetime parameter[0m
|
||||
[0m [0m[0m[1m[38;5;12m|[0m
|
||||
[0m[1m[38;5;12mLL[0m[0m [0m[0m[38;5;10m~ [0m[0mfn long[0m[0m[38;5;10m<'a>[0m[0m(
|
||||
[0m[1m[38;5;12mLL[0m[0m [0m[0m[38;5;10m~ [0m[0m foo_bar: &[0m[0m[38;5;10m'a [0m[0mVec<&[0m[0m[38;5;10m'a [0m[0mi32>,[0m
|
||||
[0m[1m[38;5;12mLL[0m[0m [0m[0m[1m[38;5;12m| [0m[0m something_very_long_so_that_the_line_will_wrap_around__________: i32,[0m
|
||||
[0m[1m[38;5;12mLL[0m[0m [0m[0m[38;5;10m~ [0m[0m) -> &[0m[0m[38;5;10m'a [0m[0mi32 {[0m
|
||||
[0m [0m[0m[1m[38;5;12m|[0m
|
||||
|
||||
[0m[1m[38;5;9merror[E0106][0m[0m[1m: missing lifetime specifier[0m
|
||||
[0m [0m[0m[1m[38;5;12m--> [0m[0m$DIR/multiline-multipart-suggestion.rs:16:29[0m
|
||||
[0m [0m[0m[1m[38;5;12m|[0m
|
||||
[0m[1m[38;5;12mLL[0m[0m [0m[0m[1m[38;5;12m|[0m[0m [0m[0m foo_bar: &Vec<&i32>) -> &i32 {[0m
|
||||
[0m [0m[0m[1m[38;5;12m| [0m[0m [0m[0m[1m[38;5;12m----------[0m[0m [0m[0m[1m[38;5;9m^[0m[0m [0m[0m[1m[38;5;9mexpected named lifetime parameter[0m
|
||||
[0m [0m[0m[1m[38;5;12m|[0m
|
||||
[0m [0m[0m[1m[38;5;12m= [0m[0m[1mhelp[0m[0m: this function's return type contains a borrowed value, but the signature does not say which one of `foo_bar`'s 2 lifetimes it is borrowed from[0m
|
||||
[0m[1m[38;5;14mhelp[0m[0m: consider introducing a named lifetime parameter[0m
|
||||
[0m [0m[0m[1m[38;5;12m|[0m
|
||||
[0m[1m[38;5;12mLL[0m[0m [0m[0m[38;5;10m~ [0m[0mfn long2[0m[0m[38;5;10m<'a>[0m[0m(
|
||||
[0m[1m[38;5;12mLL[0m[0m [0m[0m[38;5;10m~ [0m[0m foo_bar: &[0m[0m[38;5;10m'a [0m[0mVec<&[0m[0m[38;5;10m'a [0m[0mi32>) -> &[0m[0m[38;5;10m'a [0m[0mi32 {[0m
|
||||
[0m [0m[0m[1m[38;5;12m|[0m
|
||||
|
||||
[0m[1m[38;5;9merror[0m[0m[1m: aborting due to 3 previous errors[0m
|
||||
|
||||
[0m[1mFor more information about this error, try `rustc --explain E0106`.[0m
|
Loading…
Reference in New Issue
Block a user