Auto merge of #109376 - matthiaskrgr:rollup-0aut57k, r=matthiaskrgr

Rollup of 13 pull requests

Successful merges:

 - #109249 (Update names/comments for new return-position impl trait in trait lowering strategy)
 - #109259 (rustdoc: Fix missing private inlining)
 - #109269 (rustdoc: cleanup some intermediate allocs)
 - #109301 (fix: fix ICE in `custom-test-frameworks` feature)
 - #109319 (Add test for `c_variadic` in rustdoc-json)
 - #109323 (Ignore files in .gitignore in mir opt check)
 - #109331 (rustdoc: implement bag semantics for function parameter search)
 - #109337 (Improve `Iterator::collect_into` documentation)
 - #109351 (rustdoc: Remove footnote references from doc summary)
 - #109353 (Fix wrong crate name in custom MIR docs)
 - #109362 (Split `items` from `-Zmeta-stats` in two.)
 - #109370 (fix ClashingExternDeclarations lint ICE)
 - #109375 (rustdoc: Fix improper escaping of deprecation reasons)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2023-03-20 06:13:10 +00:00
commit e91364bcf5
31 changed files with 400 additions and 162 deletions

View File

@ -33,7 +33,23 @@ pub fn expand_test_case(
}
let sp = ecx.with_def_site_ctxt(attr_sp);
let mut item = anno_item.expect_item();
let (mut item, is_stmt) = match anno_item {
Annotatable::Item(item) => (item, false),
Annotatable::Stmt(stmt) if let ast::StmtKind::Item(_) = stmt.kind => if let ast::StmtKind::Item(i) = stmt.into_inner().kind {
(i, true)
} else {
unreachable!()
},
_ => {
ecx.struct_span_err(
anno_item.span(),
"`#[test_case]` attribute is only allowed on items",
)
.emit();
return vec![];
}
};
item = item.map(|mut item| {
let test_path_symbol = Symbol::intern(&item_path(
// skip the name of the root module
@ -50,7 +66,13 @@ pub fn expand_test_case(
item
});
return vec![Annotatable::Item(item)];
let ret = if is_stmt {
Annotatable::Stmt(P(ecx.stmt_item(item.span, item)))
} else {
Annotatable::Item(item)
};
vec![ret]
}
pub fn expand_test(

View File

@ -3068,7 +3068,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
// generate the def_id of an associated type for the trait and return as
// type a projection.
let def_id = if in_trait && tcx.lower_impl_trait_in_trait_to_assoc_ty() {
tcx.associated_item_for_impl_trait_in_trait(local_def_id).to_def_id()
tcx.associated_type_for_impl_trait_in_trait(local_def_id).to_def_id()
} else {
local_def_id.to_def_id()
};

View File

@ -2781,8 +2781,7 @@ impl ClashingExternDeclarations {
// Given a transparent newtype, reach through and grab the inner
// type unless the newtype makes the type non-null.
let non_transparent_ty = |ty: Ty<'tcx>| -> Ty<'tcx> {
let mut ty = ty;
let non_transparent_ty = |mut ty: Ty<'tcx>| -> Ty<'tcx> {
loop {
if let ty::Adt(def, substs) = *ty.kind() {
let is_transparent = def.repr().transparent();
@ -2792,14 +2791,14 @@ impl ClashingExternDeclarations {
ty, is_transparent, is_non_null
);
if is_transparent && !is_non_null {
debug_assert!(def.variants().len() == 1);
debug_assert_eq!(def.variants().len(), 1);
let v = &def.variant(VariantIdx::new(0));
ty = transparent_newtype_field(tcx, v)
.expect(
"single-variant transparent structure with zero-sized field",
)
.ty(tcx, substs);
continue;
// continue with `ty`'s non-ZST field,
// otherwise `ty` is a ZST and we can return
if let Some(field) = transparent_newtype_field(tcx, v) {
ty = field.ty(tcx, substs);
continue;
}
}
}
debug!("non_transparent_ty -> {:?}", ty);
@ -2813,10 +2812,8 @@ impl ClashingExternDeclarations {
if !seen_types.insert((a, b)) {
// We've encountered a cycle. There's no point going any further -- the types are
// structurally the same.
return true;
}
let tcx = cx.tcx;
if a == b {
true
} else if a == b {
// All nominally-same types are structurally same, too.
true
} else {

View File

@ -254,7 +254,7 @@ provide! { tcx, def_id, other, cdata,
.process_decoded(tcx, || panic!("{def_id:?} does not have trait_impl_trait_tys")))
}
associated_items_for_impl_trait_in_trait => { table_defaulted_array }
associated_types_for_impl_traits_in_associated_fn => { table_defaulted_array }
visibility => { cdata.get_visibility(def_id.index) }
adt_def => { cdata.get_adt_def(def_id.index, tcx) }

View File

@ -609,10 +609,9 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
_ = stat!("mir", || self.encode_mir());
_ = stat!("items", || {
self.encode_def_ids();
self.encode_info_for_items();
});
_ = stat!("def-ids", || self.encode_def_ids());
_ = stat!("items", || self.encode_info_for_items());
let interpret_alloc_index = stat!("interpret-alloc-index", || {
let mut interpret_alloc_index = Vec::new();
@ -1198,8 +1197,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
record!(self.tables.trait_impl_trait_tys[def_id] <- table);
}
if should_encode_fn_impl_trait_in_trait(tcx, def_id) {
let table = tcx.associated_items_for_impl_trait_in_trait(def_id);
record_defaulted_array!(self.tables.associated_items_for_impl_trait_in_trait[def_id] <- table);
let table = tcx.associated_types_for_impl_traits_in_associated_fn(def_id);
record_defaulted_array!(self.tables.associated_types_for_impl_traits_in_associated_fn[def_id] <- table);
}
}

View File

@ -354,7 +354,7 @@ define_tables! {
explicit_item_bounds: Table<DefIndex, LazyArray<(ty::Predicate<'static>, Span)>>,
inferred_outlives_of: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
inherent_impls: Table<DefIndex, LazyArray<DefIndex>>,
associated_items_for_impl_trait_in_trait: Table<DefIndex, LazyArray<DefId>>,
associated_types_for_impl_traits_in_associated_fn: Table<DefIndex, LazyArray<DefId>>,
opt_rpitit_info: Table<DefIndex, Option<LazyValue<ty::ImplTraitInTraitData>>>,
unused_generic_params: Table<DefIndex, UnusedGenericParams>,

View File

@ -785,7 +785,7 @@ rustc_queries! {
/// if `fn_def_id` is the def id of a function defined inside an impl that implements a trait, then it
/// creates and returns the associated items that correspond to each impl trait in return position
/// of the implemented trait.
query associated_items_for_impl_trait_in_trait(fn_def_id: DefId) -> &'tcx [DefId] {
query associated_types_for_impl_traits_in_associated_fn(fn_def_id: DefId) -> &'tcx [DefId] {
desc { |tcx| "creating associated items for impl trait in trait returned by `{}`", tcx.def_path_str(fn_def_id) }
cache_on_disk_if { fn_def_id.is_local() }
separate_provide_extern
@ -793,7 +793,7 @@ rustc_queries! {
/// Given an impl trait in trait `opaque_ty_def_id`, create and return the corresponding
/// associated item.
query associated_item_for_impl_trait_in_trait(opaque_ty_def_id: LocalDefId) -> LocalDefId {
query associated_type_for_impl_trait_in_trait(opaque_ty_def_id: LocalDefId) -> LocalDefId {
desc { |tcx| "creates the associated item corresponding to the opaque type `{}`", tcx.def_path_str(opaque_ty_def_id.to_def_id()) }
cache_on_disk_if { true }
separate_provide_extern

View File

@ -2579,7 +2579,9 @@ impl<'tcx> TyCtxt<'tcx> {
let Some(trait_item_def_id) = item.trait_item_def_id else { return false; };
if self.lower_impl_trait_in_trait_to_assoc_ty() {
return !self.associated_items_for_impl_trait_in_trait(trait_item_def_id).is_empty();
return !self
.associated_types_for_impl_traits_in_associated_fn(trait_item_def_id)
.is_empty();
}
// FIXME(RPITIT): This does a somewhat manual walk through the signature

View File

@ -11,8 +11,8 @@ pub fn provide(providers: &mut ty::query::Providers) {
associated_item,
associated_item_def_ids,
associated_items,
associated_items_for_impl_trait_in_trait,
associated_item_for_impl_trait_in_trait,
associated_types_for_impl_traits_in_associated_fn,
associated_type_for_impl_trait_in_trait,
impl_item_implementor_ids,
..*providers
};
@ -24,7 +24,7 @@ fn associated_item_def_ids(tcx: TyCtxt<'_>, def_id: DefId) -> &[DefId] {
hir::ItemKind::Trait(.., ref trait_item_refs) => {
if tcx.lower_impl_trait_in_trait_to_assoc_ty() {
// We collect RPITITs for each trait method's return type and create a
// corresponding associated item using associated_items_for_impl_trait_in_trait
// corresponding associated item using associated_types_for_impl_traits_in_associated_fn
// query.
tcx.arena.alloc_from_iter(
trait_item_refs
@ -39,7 +39,9 @@ fn associated_item_def_ids(tcx: TyCtxt<'_>, def_id: DefId) -> &[DefId] {
.flat_map(|trait_item_ref| {
let trait_fn_def_id =
trait_item_ref.id.owner_id.def_id.to_def_id();
tcx.associated_items_for_impl_trait_in_trait(trait_fn_def_id)
tcx.associated_types_for_impl_traits_in_associated_fn(
trait_fn_def_id,
)
})
.map(|def_id| *def_id),
),
@ -56,7 +58,7 @@ fn associated_item_def_ids(tcx: TyCtxt<'_>, def_id: DefId) -> &[DefId] {
if tcx.lower_impl_trait_in_trait_to_assoc_ty() {
// We collect RPITITs for each trait method's return type, on the impl side too and
// create a corresponding associated item using
// associated_items_for_impl_trait_in_trait query.
// associated_types_for_impl_traits_in_associated_fn query.
tcx.arena.alloc_from_iter(
impl_
.items
@ -72,7 +74,9 @@ fn associated_item_def_ids(tcx: TyCtxt<'_>, def_id: DefId) -> &[DefId] {
.flat_map(|impl_item_ref| {
let impl_fn_def_id =
impl_item_ref.id.owner_id.def_id.to_def_id();
tcx.associated_items_for_impl_trait_in_trait(impl_fn_def_id)
tcx.associated_types_for_impl_traits_in_associated_fn(
impl_fn_def_id,
)
})
.map(|def_id| *def_id)
})),
@ -176,13 +180,19 @@ fn associated_item_from_impl_item_ref(impl_item_ref: &hir::ImplItemRef) -> ty::A
}
}
/// Given an `fn_def_id` of a trait or of an impl that implements a given trait:
/// if `fn_def_id` is the def id of a function defined inside a trait, then it creates and returns
/// the associated items that correspond to each impl trait in return position for that trait.
/// if `fn_def_id` is the def id of a function defined inside an impl that implements a trait, then it
/// creates and returns the associated items that correspond to each impl trait in return position
/// of the implemented trait.
fn associated_items_for_impl_trait_in_trait(tcx: TyCtxt<'_>, fn_def_id: DefId) -> &'_ [DefId] {
/// Given an `fn_def_id` of a trait or a trait implementation:
///
/// if `fn_def_id` is a function defined inside a trait, then it synthesizes
/// a new def id corresponding to a new associated type for each return-
/// position `impl Trait` in the signature.
///
/// if `fn_def_id` is a function inside of an impl, then for each synthetic
/// associated type generated for the corresponding trait function described
/// above, synthesize a corresponding associated type in the impl.
fn associated_types_for_impl_traits_in_associated_fn(
tcx: TyCtxt<'_>,
fn_def_id: DefId,
) -> &'_ [DefId] {
let parent_def_id = tcx.parent(fn_def_id);
match tcx.def_kind(parent_def_id) {
@ -206,7 +216,7 @@ fn associated_items_for_impl_trait_in_trait(tcx: TyCtxt<'_>, fn_def_id: DefId) -
visitor.visit_fn_ret_ty(output);
tcx.arena.alloc_from_iter(visitor.rpits.iter().map(|opaque_ty_def_id| {
tcx.associated_item_for_impl_trait_in_trait(opaque_ty_def_id).to_def_id()
tcx.associated_type_for_impl_trait_in_trait(opaque_ty_def_id).to_def_id()
}))
} else {
&[]
@ -217,9 +227,9 @@ fn associated_items_for_impl_trait_in_trait(tcx: TyCtxt<'_>, fn_def_id: DefId) -
let Some(trait_fn_def_id) = tcx.associated_item(fn_def_id).trait_item_def_id else { return &[] };
tcx.arena.alloc_from_iter(
tcx.associated_items_for_impl_trait_in_trait(trait_fn_def_id).iter().map(
tcx.associated_types_for_impl_traits_in_associated_fn(trait_fn_def_id).iter().map(
move |trait_assoc_def_id| {
impl_associated_item_for_impl_trait_in_trait(
associated_type_for_impl_trait_in_impl(
tcx,
trait_assoc_def_id.expect_local(),
fn_def_id.expect_local(),
@ -231,16 +241,17 @@ fn associated_items_for_impl_trait_in_trait(tcx: TyCtxt<'_>, fn_def_id: DefId) -
}
def_kind => bug!(
"associated_items_for_impl_trait_in_trait: {:?} should be Trait or Impl but is {:?}",
"associated_types_for_impl_traits_in_associated_fn: {:?} should be Trait or Impl but is {:?}",
parent_def_id,
def_kind
),
}
}
/// Given an `opaque_ty_def_id` corresponding to an impl trait in trait, create and return the
/// corresponding associated item.
fn associated_item_for_impl_trait_in_trait(
/// Given an `opaque_ty_def_id` corresponding to an `impl Trait` in an associated
/// function from a trait, synthesize an associated type for that `impl Trait`
/// that inherits properties that we infer from the method and the opaque type.
fn associated_type_for_impl_trait_in_trait(
tcx: TyCtxt<'_>,
opaque_ty_def_id: LocalDefId,
) -> LocalDefId {
@ -335,10 +346,12 @@ fn associated_item_for_impl_trait_in_trait(
local_def_id
}
/// Given an `trait_assoc_def_id` that corresponds to a previously synthesized impl trait in trait
/// into an associated type and an `impl_def_id` corresponding to an impl block, create and return
/// the corresponding associated item inside the impl block.
fn impl_associated_item_for_impl_trait_in_trait(
/// Given an `trait_assoc_def_id` corresponding to an associated item synthesized
/// from an `impl Trait` in an associated function from a trait, and an
/// `impl_fn_def_id` that represents an implementation of the associated function
/// that the `impl Trait` comes from, synthesize an associated type for that `impl Trait`
/// that inherits properties that we infer from the method and the associated type.
fn associated_type_for_impl_trait_in_impl(
tcx: TyCtxt<'_>,
trait_assoc_def_id: LocalDefId,
impl_fn_def_id: LocalDefId,

View File

@ -8,7 +8,7 @@
//!
//! The documentation for this module describes how to use this feature. If you are interested in
//! hacking on the implementation, most of that documentation lives at
//! `rustc_mir_building/src/build/custom/mod.rs`.
//! `rustc_mir_build/src/build/custom/mod.rs`.
//!
//! Typical usage will look like this:
//!

View File

@ -2003,7 +2003,7 @@ pub trait Iterator {
/// a.iter().map(|&x| x * 2).collect_into(&mut vec);
/// a.iter().map(|&x| x * 10).collect_into(&mut vec);
///
/// assert_eq!(vec![0, 1, 2, 4, 6, 10, 20, 30], vec);
/// assert_eq!(vec, vec![0, 1, 2, 4, 6, 10, 20, 30]);
/// ```
///
/// `Vec` can have a manual set capacity to avoid reallocating it:
@ -2018,7 +2018,7 @@ pub trait Iterator {
/// a.iter().map(|&x| x * 10).collect_into(&mut vec);
///
/// assert_eq!(6, vec.capacity());
/// println!("{:?}", vec);
/// assert_eq!(vec, vec![2, 4, 6, 10, 20, 30]);
/// ```
///
/// The returned mutable reference can be used to continue the call chain:
@ -2032,12 +2032,12 @@ pub trait Iterator {
/// let count = a.iter().collect_into(&mut vec).iter().count();
///
/// assert_eq!(count, vec.len());
/// println!("Vec len is {}", count);
/// assert_eq!(vec, vec![1, 2, 3]);
///
/// let count = a.iter().collect_into(&mut vec).iter().count();
///
/// assert_eq!(count, vec.len());
/// println!("Vec len now is {}", count);
/// assert_eq!(vec, vec![1, 2, 3, 1, 2, 3]);
/// ```
#[inline]
#[unstable(feature = "iter_collect_into", reason = "new API", issue = "94780")]

View File

@ -1480,7 +1480,7 @@ pub(crate) fn visibility_print_with_space<'a, 'tcx: 'a>(
debug!("path={:?}", path);
// modified from `resolved_path()` to work with `DefPathData`
let last_name = path.data.last().unwrap().data.get_opt_name().unwrap();
let anchor = anchor(vis_did, last_name, cx).to_string();
let anchor = anchor(vis_did, last_name, cx);
let mut s = "pub(in ".to_owned();
for seg in &path.data[..path.data.len() - 1] {

View File

@ -556,7 +556,15 @@ fn check_if_allowed_tag(t: &Tag<'_>) -> bool {
}
fn is_forbidden_tag(t: &Tag<'_>) -> bool {
matches!(t, Tag::CodeBlock(_) | Tag::Table(_) | Tag::TableHead | Tag::TableRow | Tag::TableCell)
matches!(
t,
Tag::CodeBlock(_)
| Tag::Table(_)
| Tag::TableHead
| Tag::TableRow
| Tag::TableCell
| Tag::FootnoteDefinition(_)
)
}
impl<'a, I: Iterator<Item = Event<'a>>> Iterator for SummaryLine<'a, I> {
@ -589,6 +597,10 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for SummaryLine<'a, I> {
is_start = false;
check_if_allowed_tag(c)
}
Event::FootnoteReference(_) => {
self.skipped_tags += 1;
false
}
_ => true,
};
if !is_allowed_tag {

View File

@ -352,7 +352,7 @@ impl<'tcx> Context<'tcx> {
},
);
path = href.into_inner().to_string_lossy().to_string();
path = href.into_inner().to_string_lossy().into_owned();
if let Some(c) = path.as_bytes().last() && *c != b'/' {
path.push('/');

View File

@ -1,5 +1,6 @@
use clean::AttributesExt;
use rustc_data_structures::captures::Captures;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_hir as hir;
use rustc_hir::def::CtorKind;
@ -28,8 +29,8 @@ use crate::formats::item_type::ItemType;
use crate::formats::{AssocItemRender, Impl, RenderMode};
use crate::html::escape::Escape;
use crate::html::format::{
join_with_double_colon, print_abi_with_space, print_constness_with_space, print_where_clause,
visibility_print_with_space, Buffer, Ending, PrintWithSpace,
display_fn, join_with_double_colon, print_abi_with_space, print_constness_with_space,
print_where_clause, visibility_print_with_space, Buffer, Ending, PrintWithSpace,
};
use crate::html::layout::Page;
use crate::html::markdown::{HeadingOffset, MarkdownSummaryLine};
@ -367,7 +368,7 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
..myitem.clone()
};
let stab_tags = Some(extra_info_tags(&import_item, item, cx.tcx()));
let stab_tags = Some(extra_info_tags(&import_item, item, cx.tcx()).to_string());
stab_tags
} else {
None
@ -461,42 +462,62 @@ fn item_module(w: &mut Buffer, cx: &mut Context<'_>, item: &clean::Item, items:
/// Render the stability, deprecation and portability tags that are displayed in the item's summary
/// at the module level.
fn extra_info_tags(item: &clean::Item, parent: &clean::Item, tcx: TyCtxt<'_>) -> String {
let mut tags = String::new();
fn extra_info_tags<'a, 'tcx: 'a>(
item: &'a clean::Item,
parent: &'a clean::Item,
tcx: TyCtxt<'tcx>,
) -> impl fmt::Display + 'a + Captures<'tcx> {
display_fn(move |f| {
fn tag_html<'a>(
class: &'a str,
title: &'a str,
contents: &'a str,
) -> impl fmt::Display + 'a {
display_fn(move |f| {
write!(
f,
r#"<span class="stab {}" title="{}">{}</span>"#,
class,
Escape(title),
contents
)
})
}
fn tag_html(class: &str, title: &str, contents: &str) -> String {
format!(r#"<span class="stab {}" title="{}">{}</span>"#, class, Escape(title), contents)
}
// The trailing space after each tag is to space it properly against the rest of the docs.
if let Some(depr) = &item.deprecation(tcx) {
let message = if stability::deprecation_in_effect(depr) {
"Deprecated"
} else {
"Deprecation planned"
};
write!(f, "{}", tag_html("deprecated", "", message))?;
}
// The trailing space after each tag is to space it properly against the rest of the docs.
if let Some(depr) = &item.deprecation(tcx) {
let message = if stability::deprecation_in_effect(depr) {
"Deprecated"
} else {
"Deprecation planned"
// The "rustc_private" crates are permanently unstable so it makes no sense
// to render "unstable" everywhere.
if item.stability(tcx).as_ref().map(|s| s.is_unstable() && s.feature != sym::rustc_private)
== Some(true)
{
write!(f, "{}", tag_html("unstable", "", "Experimental"))?;
}
let cfg = match (&item.cfg, parent.cfg.as_ref()) {
(Some(cfg), Some(parent_cfg)) => cfg.simplify_with(parent_cfg),
(cfg, _) => cfg.as_deref().cloned(),
};
tags += &tag_html("deprecated", "", message);
}
// The "rustc_private" crates are permanently unstable so it makes no sense
// to render "unstable" everywhere.
if item.stability(tcx).as_ref().map(|s| s.is_unstable() && s.feature != sym::rustc_private)
== Some(true)
{
tags += &tag_html("unstable", "", "Experimental");
}
let cfg = match (&item.cfg, parent.cfg.as_ref()) {
(Some(cfg), Some(parent_cfg)) => cfg.simplify_with(parent_cfg),
(cfg, _) => cfg.as_deref().cloned(),
};
debug!("Portability name={:?} {:?} - {:?} = {:?}", item.name, item.cfg, parent.cfg, cfg);
if let Some(ref cfg) = cfg {
tags += &tag_html("portability", &cfg.render_long_plain(), &cfg.render_short_html());
}
tags
debug!("Portability name={:?} {:?} - {:?} = {:?}", item.name, item.cfg, parent.cfg, cfg);
if let Some(ref cfg) = cfg {
write!(
f,
"{}",
tag_html("portability", &cfg.render_long_plain(), &cfg.render_short_html())
)
} else {
Ok(())
}
})
}
fn item_function(w: &mut Buffer, cx: &mut Context<'_>, it: &clean::Item, f: &clean::Function) {

View File

@ -85,7 +85,7 @@ impl LocalSourcesCollector<'_, '_> {
},
);
let mut href = href.into_inner().to_string_lossy().to_string();
let mut href = href.into_inner().to_string_lossy().into_owned();
if let Some(c) = href.as_bytes().last() && *c != b'/' {
href.push('/');
}

View File

@ -1202,28 +1202,42 @@ function initSearch(rawSearchIndex) {
* @param {Row} row
* @param {QueryElement} elem - The element from the parsed query.
* @param {integer} typeFilter
* @param {Array<integer>} skipPositions - Do not return one of these positions.
*
* @return {integer} - Returns an edit distance to the best match. If there is no
* match, returns `maxEditDistance + 1`.
* @return {dist: integer, position: integer} - Returns an edit distance to the best match.
* If there is no match, returns
* `maxEditDistance + 1` and position: -1.
*/
function findArg(row, elem, typeFilter, maxEditDistance) {
function findArg(row, elem, typeFilter, maxEditDistance, skipPositions) {
let dist = maxEditDistance + 1;
let position = -1;
if (row && row.type && row.type.inputs && row.type.inputs.length > 0) {
let i = 0;
for (const input of row.type.inputs) {
if (!typePassesFilter(typeFilter, input.ty)) {
if (!typePassesFilter(typeFilter, input.ty) ||
skipPositions.indexOf(i) !== -1) {
i += 1;
continue;
}
dist = Math.min(
dist,
checkType(input, elem, parsedQuery.literalSearch, maxEditDistance)
const typeDist = checkType(
input,
elem,
parsedQuery.literalSearch,
maxEditDistance
);
if (dist === 0) {
return 0;
if (typeDist === 0) {
return {dist: 0, position: i};
}
if (typeDist < dist) {
dist = typeDist;
position = i;
}
i += 1;
}
}
return parsedQuery.literalSearch ? maxEditDistance + 1 : dist;
dist = parsedQuery.literalSearch ? maxEditDistance + 1 : dist;
return {dist, position};
}
/**
@ -1232,29 +1246,43 @@ function initSearch(rawSearchIndex) {
* @param {Row} row
* @param {QueryElement} elem - The element from the parsed query.
* @param {integer} typeFilter
* @param {Array<integer>} skipPositions - Do not return one of these positions.
*
* @return {integer} - Returns an edit distance to the best match. If there is no
* match, returns `maxEditDistance + 1`.
* @return {dist: integer, position: integer} - Returns an edit distance to the best match.
* If there is no match, returns
* `maxEditDistance + 1` and position: -1.
*/
function checkReturned(row, elem, typeFilter, maxEditDistance) {
function checkReturned(row, elem, typeFilter, maxEditDistance, skipPositions) {
let dist = maxEditDistance + 1;
let position = -1;
if (row && row.type && row.type.output.length > 0) {
const ret = row.type.output;
let i = 0;
for (const ret_ty of ret) {
if (!typePassesFilter(typeFilter, ret_ty.ty)) {
if (!typePassesFilter(typeFilter, ret_ty.ty) ||
skipPositions.indexOf(i) !== -1) {
i += 1;
continue;
}
dist = Math.min(
dist,
checkType(ret_ty, elem, parsedQuery.literalSearch, maxEditDistance)
const typeDist = checkType(
ret_ty,
elem,
parsedQuery.literalSearch,
maxEditDistance
);
if (dist === 0) {
return 0;
if (typeDist === 0) {
return {dist: 0, position: i};
}
if (typeDist < dist) {
dist = typeDist;
position = i;
}
i += 1;
}
}
return parsedQuery.literalSearch ? maxEditDistance + 1 : dist;
dist = parsedQuery.literalSearch ? maxEditDistance + 1 : dist;
return {dist, position};
}
function checkPath(contains, ty, maxEditDistance) {
@ -1455,13 +1483,13 @@ function initSearch(rawSearchIndex) {
const fullId = row.id;
const searchWord = searchWords[pos];
const in_args = findArg(row, elem, parsedQuery.typeFilter, maxEditDistance);
const returned = checkReturned(row, elem, parsedQuery.typeFilter, maxEditDistance);
const in_args = findArg(row, elem, parsedQuery.typeFilter, maxEditDistance, []);
const returned = checkReturned(row, elem, parsedQuery.typeFilter, maxEditDistance, []);
// path_dist is 0 because no parent path information is currently stored
// in the search index
addIntoResults(results_in_args, fullId, pos, -1, in_args, 0, maxEditDistance);
addIntoResults(results_returned, fullId, pos, -1, returned, 0, maxEditDistance);
addIntoResults(results_in_args, fullId, pos, -1, in_args.dist, 0, maxEditDistance);
addIntoResults(results_returned, fullId, pos, -1, returned.dist, 0, maxEditDistance);
if (!typePassesFilter(parsedQuery.typeFilter, row.ty)) {
return;
@ -1534,12 +1562,20 @@ function initSearch(rawSearchIndex) {
// If the result is too "bad", we return false and it ends this search.
function checkArgs(elems, callback) {
const skipPositions = [];
for (const elem of elems) {
// There is more than one parameter to the query so all checks should be "exact"
const dist = callback(row, elem, NO_TYPE_FILTER, maxEditDistance);
const { dist, position } = callback(
row,
elem,
NO_TYPE_FILTER,
maxEditDistance,
skipPositions
);
if (dist <= 1) {
nbDist += 1;
totalDist += dist;
skipPositions.push(position);
} else {
return false;
}
@ -1597,9 +1633,17 @@ function initSearch(rawSearchIndex) {
row,
elem,
parsedQuery.typeFilter,
maxEditDistance,
[]
);
addIntoResults(
results_others,
row.id,
i,
-1,
in_returned.dist,
maxEditDistance
);
addIntoResults(results_others, row.id, i, -1, in_returned, maxEditDistance);
}
}
} else if (parsedQuery.foundElems > 0) {

View File

@ -2,7 +2,7 @@
{% when Self::Deprecation with { message } %}
<div class="stab deprecated"> {# #}
<span class="emoji">👎</span> {# #}
<span>{{message}}</span> {# #}
<span>{{message|safe}}</span> {# #}
</div> {# #}
{% when Self::Unstable with { feature, tracking } %}
<div class="stab unstable"> {# #}

View File

@ -286,7 +286,6 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
split.next().map(|f| Symbol::intern(f)).ok_or_else(no_res)?;
let path = split
.next()
.map(|f| f.to_owned())
// If there's no third component, we saw `[a::b]` before and it failed to resolve.
// So there's no partial res.
.ok_or_else(no_res)?;
@ -429,7 +428,6 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
let item_name = Symbol::intern(item_str);
let path_root = split
.next()
.map(|f| f.to_owned())
// If there's no `::`, it's not an associated item.
// So we can be sure that `rustc_resolve` was accurate when it said it wasn't resolved.
.ok_or_else(|| {

View File

@ -265,10 +265,6 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
return false;
}
if !self.view_item_stack.insert(res_did) {
return false;
}
if !please_inline &&
let mut visitor = OneLevelVisitor::new(self.cx.tcx.hir(), res_did) &&
let Some(item) = visitor.find_target(self.cx.tcx, def_id.to_def_id(), path) &&
@ -285,6 +281,10 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
return false;
}
if !self.view_item_stack.insert(res_did) {
return false;
}
let ret = match tcx.hir().get_by_def_id(res_did) {
Node::Item(&hir::Item { kind: hir::ItemKind::Mod(ref m), .. }) if glob => {
let prev = mem::replace(&mut self.inlining, true);

View File

@ -3,19 +3,24 @@
use std::collections::HashSet;
use std::path::{Path, PathBuf};
use crate::walk::walk_no_read;
fn check_unused_files(path: &Path, bless: bool, bad: &mut bool) {
let mut rs_files = Vec::<PathBuf>::new();
let mut output_files = HashSet::<PathBuf>::new();
let files = walkdir::WalkDir::new(&path.join("mir-opt")).into_iter();
for file in files.filter_map(Result::ok).filter(|e| e.file_type().is_file()) {
let filepath = file.path();
if filepath.extension() == Some("rs".as_ref()) {
rs_files.push(filepath.to_owned());
} else {
output_files.insert(filepath.to_owned());
}
}
walk_no_read(
&[&path.join("mir-opt")],
|path| path.file_name() == Some("README.md".as_ref()),
&mut |file| {
let filepath = file.path();
if filepath.extension() == Some("rs".as_ref()) {
rs_files.push(filepath.to_owned());
} else {
output_files.insert(filepath.to_owned());
}
},
);
for file in rs_files {
for bw in [32, 64] {
@ -26,16 +31,14 @@ fn check_unused_files(path: &Path, bless: bool, bad: &mut bool) {
}
for extra in output_files {
if extra.file_name() != Some("README.md".as_ref()) {
if !bless {
tidy_error!(
bad,
"the following output file is not associated with any mir-opt test, you can remove it: {}",
extra.display()
);
} else {
let _ = std::fs::remove_file(extra);
}
if !bless {
tidy_error!(
bad,
"the following output file is not associated with any mir-opt test, you can remove it: {}",
extra.display()
);
} else {
let _ = std::fs::remove_file(extra);
}
}
}

View File

@ -0,0 +1,20 @@
// exact-check
const QUERY = [
'P',
'P, P',
];
const EXPECTED = [
{
'in_args': [
{ 'path': 'search_bag_semantics', 'name': 'alacazam' },
{ 'path': 'search_bag_semantics', 'name': 'abracadabra' },
],
},
{
'others': [
{ 'path': 'search_bag_semantics', 'name': 'abracadabra' },
],
},
];

View File

@ -0,0 +1,4 @@
pub struct P;
pub fn abracadabra(a: P, b: P) {}
pub fn alacazam(a: P) {}

View File

@ -0,0 +1,9 @@
#![feature(no_core)]
#![no_core]
extern "C" {
// @is "$.index[*][?(@.name == 'not_variadic')].inner.decl.c_variadic" false
pub fn not_variadic(_: i32);
// @is "$.index[*][?(@.name == 'variadic')].inner.decl.c_variadic" true
pub fn variadic(_: i32, ...);
}

View File

@ -28,6 +28,6 @@ pub struct V;
pub struct W;
// @matches deprecated/struct.X.html '//*[@class="stab deprecated"]' \
// 'Deprecated: shorthand reason$'
#[deprecated = "shorthand reason"]
// 'Deprecated: shorthand reason: code$'
#[deprecated = "shorthand reason: `code`"]
pub struct X;

View File

@ -0,0 +1,17 @@
// This test ensures that no footnote reference is generated inside
// summary doc.
#![crate_name = "foo"]
// @has 'foo/index.html'
// @has - '//*[@class="desc docblock-short"]' 'hello bla'
// @!has - '//*[@class="desc docblock-short"]/sup' '1'
// @has 'foo/struct.S.html'
// @has - '//*[@class="docblock"]//sup' '1'
// @has - '//*[@class="docblock"]' 'hello 1 bla'
/// hello [^foot] bla
///
/// [^foot]: blabla
pub struct S;

View File

@ -0,0 +1,27 @@
// Regression test for <https://github.com/rust-lang/rust/issues/109258>.
#![crate_name = "foo"]
// @has 'foo/index.html'
// We should only have a "Re-exports" and a "Modules" headers.
// @count - '//*[@id="main-content"]/h2[@class="small-section-header"]' 2
// @has - '//*[@id="main-content"]/h2[@class="small-section-header"]' 'Re-exports'
// @has - '//*[@id="main-content"]/h2[@class="small-section-header"]' 'Modules'
// @has - '//*[@id="reexport.Foo"]' 'pub use crate::issue_109258::Foo;'
// @has - '//*[@id="reexport.Foo"]//a[@href="issue_109258/struct.Foo.html"]' 'Foo'
// @!has 'foo/struct.Foo.html'
pub use crate::issue_109258::Foo;
// @has 'foo/issue_109258/index.html'
// We should only have a "Structs" header.
// @count - '//*[@id="main-content"]/h2[@class="small-section-header"]' 1
// @has - '//*[@id="main-content"]/h2[@class="small-section-header"]' 'Structs'
// @has - '//*[@id="main-content"]//a[@href="struct.Foo.html"]' 'Foo'
// @has 'foo/issue_109258/struct.Foo.html'
pub mod issue_109258 {
mod priv_mod {
pub struct Foo;
}
pub use self::priv_mod::Foo;
}

View File

@ -122,8 +122,8 @@ mod banana {
weight: u32,
length: u16,
} // note: distinct type
// This should not trigger the lint because two::Banana is structurally equivalent to
// one::Banana.
// This should not trigger the lint because two::Banana is structurally equivalent to
// one::Banana.
extern "C" {
fn weigh_banana(count: *const Banana) -> u64;
}
@ -223,6 +223,27 @@ mod transparent {
}
}
#[allow(improper_ctypes)]
mod zst {
mod transparent {
#[repr(transparent)]
struct TransparentZst(());
extern "C" {
fn zst() -> ();
fn transparent_zst() -> TransparentZst;
}
}
mod not_transparent {
struct NotTransparentZst(());
extern "C" {
// These shouldn't warn since all return types are zero sized
fn zst() -> NotTransparentZst;
fn transparent_zst() -> NotTransparentZst;
}
}
}
mod missing_return_type {
mod a {
extern "C" {
@ -397,10 +418,14 @@ mod hidden_niche {
use std::num::NonZeroUsize;
#[repr(transparent)]
struct Transparent { x: NonZeroUsize }
struct Transparent {
x: NonZeroUsize,
}
#[repr(transparent)]
struct TransparentNoNiche { y: UnsafeCell<NonZeroUsize> }
struct TransparentNoNiche {
y: UnsafeCell<NonZeroUsize>,
}
extern "C" {
fn hidden_niche_transparent() -> Option<Transparent>;

View File

@ -130,7 +130,7 @@ LL | fn transparent_incorrect() -> isize;
found `unsafe extern "C" fn() -> isize`
warning: `missing_return_type` redeclared with a different signature
--> $DIR/clashing-extern-fn.rs:238:13
--> $DIR/clashing-extern-fn.rs:259:13
|
LL | fn missing_return_type() -> usize;
| ---------------------------------- `missing_return_type` previously declared here
@ -142,7 +142,7 @@ LL | fn missing_return_type();
found `unsafe extern "C" fn()`
warning: `non_zero_usize` redeclared with a different signature
--> $DIR/clashing-extern-fn.rs:256:13
--> $DIR/clashing-extern-fn.rs:277:13
|
LL | fn non_zero_usize() -> core::num::NonZeroUsize;
| ----------------------------------------------- `non_zero_usize` previously declared here
@ -154,7 +154,7 @@ LL | fn non_zero_usize() -> usize;
found `unsafe extern "C" fn() -> usize`
warning: `non_null_ptr` redeclared with a different signature
--> $DIR/clashing-extern-fn.rs:258:13
--> $DIR/clashing-extern-fn.rs:279:13
|
LL | fn non_null_ptr() -> core::ptr::NonNull<usize>;
| ----------------------------------------------- `non_null_ptr` previously declared here
@ -166,7 +166,7 @@ LL | fn non_null_ptr() -> *const usize;
found `unsafe extern "C" fn() -> *const usize`
warning: `option_non_zero_usize_incorrect` redeclared with a different signature
--> $DIR/clashing-extern-fn.rs:356:13
--> $DIR/clashing-extern-fn.rs:377:13
|
LL | fn option_non_zero_usize_incorrect() -> usize;
| ---------------------------------------------- `option_non_zero_usize_incorrect` previously declared here
@ -178,7 +178,7 @@ LL | fn option_non_zero_usize_incorrect() -> isize;
found `unsafe extern "C" fn() -> isize`
warning: `option_non_null_ptr_incorrect` redeclared with a different signature
--> $DIR/clashing-extern-fn.rs:358:13
--> $DIR/clashing-extern-fn.rs:379:13
|
LL | fn option_non_null_ptr_incorrect() -> *const usize;
| --------------------------------------------------- `option_non_null_ptr_incorrect` previously declared here
@ -190,7 +190,7 @@ LL | fn option_non_null_ptr_incorrect() -> *const isize;
found `unsafe extern "C" fn() -> *const isize`
warning: `hidden_niche_transparent_no_niche` redeclared with a different signature
--> $DIR/clashing-extern-fn.rs:408:13
--> $DIR/clashing-extern-fn.rs:433:13
|
LL | fn hidden_niche_transparent_no_niche() -> usize;
| ------------------------------------------------ `hidden_niche_transparent_no_niche` previously declared here
@ -202,7 +202,7 @@ LL | fn hidden_niche_transparent_no_niche() -> Option<TransparentNoN
found `unsafe extern "C" fn() -> Option<TransparentNoNiche>`
warning: `hidden_niche_unsafe_cell` redeclared with a different signature
--> $DIR/clashing-extern-fn.rs:412:13
--> $DIR/clashing-extern-fn.rs:437:13
|
LL | fn hidden_niche_unsafe_cell() -> usize;
| --------------------------------------- `hidden_niche_unsafe_cell` previously declared here
@ -214,7 +214,7 @@ LL | fn hidden_niche_unsafe_cell() -> Option<UnsafeCell<NonZeroUsize
found `unsafe extern "C" fn() -> Option<UnsafeCell<NonZeroUsize>>`
warning: `extern` block uses type `Option<TransparentNoNiche>`, which is not FFI-safe
--> $DIR/clashing-extern-fn.rs:408:55
--> $DIR/clashing-extern-fn.rs:433:55
|
LL | fn hidden_niche_transparent_no_niche() -> Option<TransparentNoNiche>;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe
@ -224,7 +224,7 @@ LL | fn hidden_niche_transparent_no_niche() -> Option<TransparentNoN
= note: `#[warn(improper_ctypes)]` on by default
warning: `extern` block uses type `Option<UnsafeCell<NonZeroUsize>>`, which is not FFI-safe
--> $DIR/clashing-extern-fn.rs:412:46
--> $DIR/clashing-extern-fn.rs:437:46
|
LL | fn hidden_niche_unsafe_cell() -> Option<UnsafeCell<NonZeroUsize>>;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe

View File

@ -0,0 +1,10 @@
// compile-flags: --test
#![feature(custom_test_frameworks)]
#![deny(unnameable_test_items)]
fn foo() {
#[test_case]
//~^ ERROR cannot test inner items [unnameable_test_items]
fn test2() {}
}

View File

@ -0,0 +1,15 @@
error: cannot test inner items
--> $DIR/issue-107454.rs:7:5
|
LL | #[test_case]
| ^^^^^^^^^^^^
|
note: the lint level is defined here
--> $DIR/issue-107454.rs:4:9
|
LL | #![deny(unnameable_test_items)]
| ^^^^^^^^^^^^^^^^^^^^^
= note: this error originates in the attribute macro `test_case` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to previous error