mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-15 21:47:04 +00:00
Auto merge of #97365 - klensy:rustdoc-vs-clippy, r=notriddle
rustdoc: fix few clippy lints Fix few clippy lints: second commit - perf ones, first - other ones.
This commit is contained in:
commit
6ac8adad1f
@ -643,11 +643,11 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
|
||||
/// both for visual consistency between 'rustdoc' runs, and to
|
||||
/// make writing tests much easier
|
||||
#[inline]
|
||||
fn sort_where_predicates(&self, mut predicates: &mut Vec<WherePredicate>) {
|
||||
fn sort_where_predicates(&self, predicates: &mut Vec<WherePredicate>) {
|
||||
// We should never have identical bounds - and if we do,
|
||||
// they're visually identical as well. Therefore, using
|
||||
// an unstable sort is fine.
|
||||
self.unstable_debug_sort(&mut predicates);
|
||||
self.unstable_debug_sort(predicates);
|
||||
}
|
||||
|
||||
/// Ensure that the bounds are in a consistent order. The precise
|
||||
@ -656,11 +656,11 @@ impl<'a, 'tcx> AutoTraitFinder<'a, 'tcx> {
|
||||
/// both for visual consistency between 'rustdoc' runs, and to
|
||||
/// make writing tests much easier
|
||||
#[inline]
|
||||
fn sort_where_bounds(&self, mut bounds: &mut Vec<GenericBound>) {
|
||||
fn sort_where_bounds(&self, bounds: &mut Vec<GenericBound>) {
|
||||
// We should never have identical bounds - and if we do,
|
||||
// they're visually identical as well. Therefore, using
|
||||
// an unstable sort is fine.
|
||||
self.unstable_debug_sort(&mut bounds);
|
||||
self.unstable_debug_sort(bounds);
|
||||
}
|
||||
|
||||
/// This might look horrendously hacky, but it's actually not that bad.
|
||||
|
@ -248,7 +248,7 @@ impl<'tcx> Clean<'tcx, Option<WherePredicate>> for hir::WherePredicate<'tcx> {
|
||||
hir::WherePredicate::BoundPredicate(ref wbp) => {
|
||||
let bound_params = wbp
|
||||
.bound_generic_params
|
||||
.into_iter()
|
||||
.iter()
|
||||
.map(|param| {
|
||||
// Higher-ranked params must be lifetimes.
|
||||
// Higher-ranked lifetimes can't have bounds.
|
||||
@ -525,7 +525,7 @@ fn clean_generic_param<'tcx>(
|
||||
},
|
||||
)
|
||||
}
|
||||
hir::GenericParamKind::Const { ref ty, default } => (
|
||||
hir::GenericParamKind::Const { ty, default } => (
|
||||
param.name.ident().name,
|
||||
GenericParamDefKind::Const {
|
||||
did: cx.tcx.hir().local_def_id(param.hir_id).to_def_id(),
|
||||
@ -947,7 +947,7 @@ fn clean_fn_decl_from_did_and_sig<'tcx>(
|
||||
// We assume all empty tuples are default return type. This theoretically can discard `-> ()`,
|
||||
// but shouldn't change any code meaning.
|
||||
let output = match sig.skip_binder().output().clean(cx) {
|
||||
Type::Tuple(inner) if inner.len() == 0 => DefaultReturn,
|
||||
Type::Tuple(inner) if inner.is_empty() => DefaultReturn,
|
||||
ty => Return(ty),
|
||||
};
|
||||
|
||||
@ -972,7 +972,7 @@ fn clean_fn_decl_from_did_and_sig<'tcx>(
|
||||
impl<'tcx> Clean<'tcx, FnRetTy> for hir::FnRetTy<'tcx> {
|
||||
fn clean(&self, cx: &mut DocContext<'tcx>) -> FnRetTy {
|
||||
match *self {
|
||||
Self::Return(ref typ) => Return(typ.clean(cx)),
|
||||
Self::Return(typ) => Return(typ.clean(cx)),
|
||||
Self::DefaultReturn(..) => DefaultReturn,
|
||||
}
|
||||
}
|
||||
@ -1013,13 +1013,13 @@ impl<'tcx> Clean<'tcx, Item> for hir::TraitItem<'tcx> {
|
||||
let local_did = self.def_id.to_def_id();
|
||||
cx.with_param_env(local_did, |cx| {
|
||||
let inner = match self.kind {
|
||||
hir::TraitItemKind::Const(ref ty, Some(default)) => AssocConstItem(
|
||||
hir::TraitItemKind::Const(ty, Some(default)) => AssocConstItem(
|
||||
ty.clean(cx),
|
||||
ConstantKind::Local { def_id: local_did, body: default },
|
||||
),
|
||||
hir::TraitItemKind::Const(ref ty, None) => TyAssocConstItem(ty.clean(cx)),
|
||||
hir::TraitItemKind::Const(ty, None) => TyAssocConstItem(ty.clean(cx)),
|
||||
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Provided(body)) => {
|
||||
let m = clean_function(cx, sig, &self.generics, body);
|
||||
let m = clean_function(cx, sig, self.generics, body);
|
||||
MethodItem(m, None)
|
||||
}
|
||||
hir::TraitItemKind::Fn(ref sig, hir::TraitFn::Required(names)) => {
|
||||
@ -1060,16 +1060,16 @@ impl<'tcx> Clean<'tcx, Item> for hir::ImplItem<'tcx> {
|
||||
let local_did = self.def_id.to_def_id();
|
||||
cx.with_param_env(local_did, |cx| {
|
||||
let inner = match self.kind {
|
||||
hir::ImplItemKind::Const(ref ty, expr) => {
|
||||
hir::ImplItemKind::Const(ty, expr) => {
|
||||
let default = ConstantKind::Local { def_id: local_did, body: expr };
|
||||
AssocConstItem(ty.clean(cx), default)
|
||||
}
|
||||
hir::ImplItemKind::Fn(ref sig, body) => {
|
||||
let m = clean_function(cx, sig, &self.generics, body);
|
||||
let m = clean_function(cx, sig, self.generics, body);
|
||||
let defaultness = cx.tcx.associated_item(self.def_id).defaultness;
|
||||
MethodItem(m, Some(defaultness))
|
||||
}
|
||||
hir::ImplItemKind::TyAlias(ref hir_ty) => {
|
||||
hir::ImplItemKind::TyAlias(hir_ty) => {
|
||||
let type_ = hir_ty.clean(cx);
|
||||
let generics = self.generics.clean(cx);
|
||||
let item_type = hir_ty_to_ty(cx.tcx, hir_ty).clean(cx);
|
||||
@ -1292,7 +1292,7 @@ fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type
|
||||
let hir::TyKind::Path(qpath) = kind else { unreachable!() };
|
||||
|
||||
match qpath {
|
||||
hir::QPath::Resolved(None, ref path) => {
|
||||
hir::QPath::Resolved(None, path) => {
|
||||
if let Res::Def(DefKind::TyParam, did) = path.res {
|
||||
if let Some(new_ty) = cx.substs.get(&did).and_then(|p| p.as_ty()).cloned() {
|
||||
return new_ty;
|
||||
@ -1309,7 +1309,7 @@ fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type
|
||||
resolve_type(cx, path)
|
||||
}
|
||||
}
|
||||
hir::QPath::Resolved(Some(ref qself), p) => {
|
||||
hir::QPath::Resolved(Some(qself), p) => {
|
||||
// Try to normalize `<X as Y>::T` to a type
|
||||
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
|
||||
if let Some(normalized_value) = normalize(cx, ty) {
|
||||
@ -1333,7 +1333,7 @@ fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type
|
||||
trait_,
|
||||
}
|
||||
}
|
||||
hir::QPath::TypeRelative(ref qself, segment) => {
|
||||
hir::QPath::TypeRelative(qself, segment) => {
|
||||
let ty = hir_ty_to_ty(cx.tcx, hir_ty);
|
||||
let res = match ty.kind() {
|
||||
ty::Projection(proj) => Res::Def(DefKind::Trait, proj.trait_ref(cx.tcx).def_id),
|
||||
@ -1463,8 +1463,8 @@ impl<'tcx> Clean<'tcx, Type> for hir::Ty<'tcx> {
|
||||
let lifetime = if elided { None } else { Some(l.clean(cx)) };
|
||||
BorrowedRef { lifetime, mutability: m.mutbl, type_: box m.ty.clean(cx) }
|
||||
}
|
||||
TyKind::Slice(ref ty) => Slice(box ty.clean(cx)),
|
||||
TyKind::Array(ref ty, ref length) => {
|
||||
TyKind::Slice(ty) => Slice(box ty.clean(cx)),
|
||||
TyKind::Array(ty, ref length) => {
|
||||
let length = match length {
|
||||
hir::ArrayLen::Infer(_, _) => "_".to_string(),
|
||||
hir::ArrayLen::Body(anon_const) => {
|
||||
@ -1499,7 +1499,7 @@ impl<'tcx> Clean<'tcx, Type> for hir::Ty<'tcx> {
|
||||
let lifetime = if !lifetime.is_elided() { Some(lifetime.clean(cx)) } else { None };
|
||||
DynTrait(bounds, lifetime)
|
||||
}
|
||||
TyKind::BareFn(ref barefn) => BareFunction(box barefn.clean(cx)),
|
||||
TyKind::BareFn(barefn) => BareFunction(box barefn.clean(cx)),
|
||||
// Rustdoc handles `TyKind::Err`s by turning them into `Type::Infer`s.
|
||||
TyKind::Infer | TyKind::Err => Infer,
|
||||
TyKind::Typeof(..) => panic!("unimplemented type {:?}", self.kind),
|
||||
@ -1908,7 +1908,7 @@ fn clean_maybe_renamed_item<'tcx>(
|
||||
bounds: ty.bounds.iter().filter_map(|x| x.clean(cx)).collect(),
|
||||
generics: ty.generics.clean(cx),
|
||||
}),
|
||||
ItemKind::TyAlias(hir_ty, ref generics) => {
|
||||
ItemKind::TyAlias(hir_ty, generics) => {
|
||||
let rustdoc_ty = hir_ty.clean(cx);
|
||||
let ty = hir_ty_to_ty(cx.tcx, hir_ty).clean(cx);
|
||||
TypedefItem(Typedef {
|
||||
@ -1917,26 +1917,26 @@ fn clean_maybe_renamed_item<'tcx>(
|
||||
item_type: Some(ty),
|
||||
})
|
||||
}
|
||||
ItemKind::Enum(ref def, ref generics) => EnumItem(Enum {
|
||||
ItemKind::Enum(ref def, generics) => EnumItem(Enum {
|
||||
variants: def.variants.iter().map(|v| v.clean(cx)).collect(),
|
||||
generics: generics.clean(cx),
|
||||
}),
|
||||
ItemKind::TraitAlias(ref generics, bounds) => TraitAliasItem(TraitAlias {
|
||||
ItemKind::TraitAlias(generics, bounds) => TraitAliasItem(TraitAlias {
|
||||
generics: generics.clean(cx),
|
||||
bounds: bounds.iter().filter_map(|x| x.clean(cx)).collect(),
|
||||
}),
|
||||
ItemKind::Union(ref variant_data, ref generics) => UnionItem(Union {
|
||||
ItemKind::Union(ref variant_data, generics) => UnionItem(Union {
|
||||
generics: generics.clean(cx),
|
||||
fields: variant_data.fields().iter().map(|x| x.clean(cx)).collect(),
|
||||
}),
|
||||
ItemKind::Struct(ref variant_data, ref generics) => StructItem(Struct {
|
||||
ItemKind::Struct(ref variant_data, generics) => StructItem(Struct {
|
||||
struct_type: CtorKind::from_hir(variant_data),
|
||||
generics: generics.clean(cx),
|
||||
fields: variant_data.fields().iter().map(|x| x.clean(cx)).collect(),
|
||||
}),
|
||||
ItemKind::Impl(ref impl_) => return clean_impl(impl_, item.hir_id(), cx),
|
||||
ItemKind::Impl(impl_) => return clean_impl(impl_, item.hir_id(), cx),
|
||||
// proc macros can have a name set by attributes
|
||||
ItemKind::Fn(ref sig, ref generics, body_id) => {
|
||||
ItemKind::Fn(ref sig, generics, body_id) => {
|
||||
clean_fn_or_proc_macro(item, sig, generics, body_id, &mut name, cx)
|
||||
}
|
||||
ItemKind::Macro(ref macro_def, _) => {
|
||||
@ -1945,7 +1945,7 @@ fn clean_maybe_renamed_item<'tcx>(
|
||||
source: display_macro_source(cx, name, macro_def, def_id, ty_vis),
|
||||
})
|
||||
}
|
||||
ItemKind::Trait(is_auto, unsafety, ref generics, bounds, item_ids) => {
|
||||
ItemKind::Trait(is_auto, unsafety, generics, bounds, item_ids) => {
|
||||
let items =
|
||||
item_ids.iter().map(|ti| cx.tcx.hir().trait_item(ti.id).clean(cx)).collect();
|
||||
TraitItem(Trait {
|
||||
@ -2192,7 +2192,7 @@ fn clean_maybe_renamed_foreign_item<'tcx>(
|
||||
let def_id = item.def_id.to_def_id();
|
||||
cx.with_param_env(def_id, |cx| {
|
||||
let kind = match item.kind {
|
||||
hir::ForeignItemKind::Fn(decl, names, ref generics) => {
|
||||
hir::ForeignItemKind::Fn(decl, names, generics) => {
|
||||
let (generics, decl) = enter_impl_trait(cx, |cx| {
|
||||
// NOTE: generics must be cleaned before args
|
||||
let generics = generics.clean(cx);
|
||||
@ -2202,7 +2202,7 @@ fn clean_maybe_renamed_foreign_item<'tcx>(
|
||||
});
|
||||
ForeignFunctionItem(Function { decl, generics })
|
||||
}
|
||||
hir::ForeignItemKind::Static(ref ty, mutability) => {
|
||||
hir::ForeignItemKind::Static(ty, mutability) => {
|
||||
ForeignStaticItem(Static { type_: ty.clean(cx), mutability, expr: None })
|
||||
}
|
||||
hir::ForeignItemKind::Type => ForeignTypeItem,
|
||||
@ -2232,7 +2232,7 @@ impl<'tcx> Clean<'tcx, TypeBindingKind> for hir::TypeBindingKind<'tcx> {
|
||||
hir::TypeBindingKind::Equality { ref term } => {
|
||||
TypeBindingKind::Equality { term: term.clean(cx) }
|
||||
}
|
||||
hir::TypeBindingKind::Constraint { ref bounds } => TypeBindingKind::Constraint {
|
||||
hir::TypeBindingKind::Constraint { bounds } => TypeBindingKind::Constraint {
|
||||
bounds: bounds.iter().filter_map(|b| b.clean(cx)).collect(),
|
||||
},
|
||||
}
|
||||
|
@ -171,7 +171,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) {
|
||||
if state != Start && needs_space {
|
||||
printer.space();
|
||||
}
|
||||
print_tt(printer, &tt);
|
||||
print_tt(printer, tt);
|
||||
state = next_state;
|
||||
}
|
||||
}
|
||||
|
@ -880,7 +880,7 @@ impl AttributesExt for [ast::Attribute] {
|
||||
let mut doc_cfg = self
|
||||
.iter()
|
||||
.filter(|attr| attr.has_name(sym::doc))
|
||||
.flat_map(|attr| attr.meta_item_list().unwrap_or_else(Vec::new))
|
||||
.flat_map(|attr| attr.meta_item_list().unwrap_or_default())
|
||||
.filter(|attr| attr.has_name(sym::cfg))
|
||||
.peekable();
|
||||
if doc_cfg.peek().is_some() && doc_cfg_active {
|
||||
@ -1011,7 +1011,7 @@ pub(crate) enum DocFragmentKind {
|
||||
fn add_doc_fragment(out: &mut String, frag: &DocFragment) {
|
||||
let s = frag.doc.as_str();
|
||||
let mut iter = s.lines();
|
||||
if s == "" {
|
||||
if s.is_empty() {
|
||||
out.push('\n');
|
||||
return;
|
||||
}
|
||||
@ -1594,17 +1594,17 @@ impl Type {
|
||||
match (self, other) {
|
||||
// Recursive cases.
|
||||
(Type::Tuple(a), Type::Tuple(b)) => {
|
||||
a.len() == b.len() && a.iter().zip(b).all(|(a, b)| a.is_same(&b, cache))
|
||||
a.len() == b.len() && a.iter().zip(b).all(|(a, b)| a.is_same(b, cache))
|
||||
}
|
||||
(Type::Slice(a), Type::Slice(b)) => a.is_same(&b, cache),
|
||||
(Type::Array(a, al), Type::Array(b, bl)) => al == bl && a.is_same(&b, cache),
|
||||
(Type::Slice(a), Type::Slice(b)) => a.is_same(b, cache),
|
||||
(Type::Array(a, al), Type::Array(b, bl)) => al == bl && a.is_same(b, cache),
|
||||
(Type::RawPointer(mutability, type_), Type::RawPointer(b_mutability, b_type_)) => {
|
||||
mutability == b_mutability && type_.is_same(&b_type_, cache)
|
||||
mutability == b_mutability && type_.is_same(b_type_, cache)
|
||||
}
|
||||
(
|
||||
Type::BorrowedRef { mutability, type_, .. },
|
||||
Type::BorrowedRef { mutability: b_mutability, type_: b_type_, .. },
|
||||
) => mutability == b_mutability && type_.is_same(&b_type_, cache),
|
||||
) => mutability == b_mutability && type_.is_same(b_type_, cache),
|
||||
// Placeholders and generics are equal to all other types.
|
||||
(Type::Infer, _) | (_, Type::Infer) => true,
|
||||
(Type::Generic(_), _) | (_, Type::Generic(_)) => true,
|
||||
@ -1667,7 +1667,7 @@ impl Type {
|
||||
|
||||
pub(crate) fn projection(&self) -> Option<(&Type, DefId, PathSegment)> {
|
||||
if let QPath { self_type, trait_, assoc, .. } = self {
|
||||
Some((&self_type, trait_.def_id(), *assoc.clone()))
|
||||
Some((self_type, trait_.def_id(), *assoc.clone()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -106,7 +106,7 @@ fn external_generic_args<'tcx>(
|
||||
bindings: Vec<TypeBinding>,
|
||||
substs: SubstsRef<'tcx>,
|
||||
) -> GenericArgs {
|
||||
let args = substs_to_args(cx, &substs, has_self);
|
||||
let args = substs_to_args(cx, substs, has_self);
|
||||
|
||||
if cx.tcx.fn_trait_kind_from_lang_item(did).is_some() {
|
||||
let inputs =
|
||||
|
@ -667,7 +667,7 @@ impl Options {
|
||||
return Err(1);
|
||||
}
|
||||
|
||||
let scrape_examples_options = ScrapeExamplesOptions::new(&matches, &diag)?;
|
||||
let scrape_examples_options = ScrapeExamplesOptions::new(matches, &diag)?;
|
||||
let with_examples = matches.opt_strs("with-examples");
|
||||
let call_locations = crate::scrape_examples::load_call_locations(with_examples, &diag)?;
|
||||
|
||||
|
@ -228,7 +228,7 @@ fn scrape_test_config(attrs: &[ast::Attribute]) -> GlobalTestOptions {
|
||||
let test_attrs: Vec<_> = attrs
|
||||
.iter()
|
||||
.filter(|a| a.has_name(sym::doc))
|
||||
.flat_map(|a| a.meta_item_list().unwrap_or_else(Vec::new))
|
||||
.flat_map(|a| a.meta_item_list().unwrap_or_default())
|
||||
.filter(|a| a.has_name(sym::test))
|
||||
.collect();
|
||||
let attrs = test_attrs.iter().flat_map(|a| a.meta_item_list().unwrap_or(&[]));
|
||||
@ -738,7 +738,7 @@ fn check_if_attr_is_complete(source: &str, edition: Edition) -> bool {
|
||||
}
|
||||
};
|
||||
// If a parsing error happened, it's very likely that the attribute is incomplete.
|
||||
if !parser.parse_attribute(InnerAttrPolicy::Permitted).is_ok() {
|
||||
if parser.parse_attribute(InnerAttrPolicy::Permitted).is_err() {
|
||||
return false;
|
||||
}
|
||||
// We now check if there is an unclosed delimiter for the attribute. To do so, we look at
|
||||
|
@ -456,7 +456,7 @@ impl<'a, 'tcx> DocFolder for CacheBuilder<'a, 'tcx> {
|
||||
let ty::Adt(adt, _) = self.tcx.type_of(path.def_id()).kind() &&
|
||||
adt.is_fundamental() {
|
||||
for ty in generics {
|
||||
if let Some(did) = ty.def_id(&self.cache) {
|
||||
if let Some(did) = ty.def_id(self.cache) {
|
||||
dids.insert(did);
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,7 @@
|
||||
//! assume that HTML output is desired, although it may be possible to redesign
|
||||
//! them in the future to instead emit any format desired.
|
||||
|
||||
use std::borrow::Cow;
|
||||
use std::cell::Cell;
|
||||
use std::fmt;
|
||||
use std::iter;
|
||||
@ -545,10 +546,10 @@ pub(crate) enum HrefError {
|
||||
// Panics if `syms` is empty.
|
||||
pub(crate) fn join_with_double_colon(syms: &[Symbol]) -> String {
|
||||
let mut s = String::with_capacity(estimate_item_path_byte_length(syms.len()));
|
||||
s.push_str(&syms[0].as_str());
|
||||
s.push_str(syms[0].as_str());
|
||||
for sym in &syms[1..] {
|
||||
s.push_str("::");
|
||||
s.push_str(&sym.as_str());
|
||||
s.push_str(sym.as_str());
|
||||
}
|
||||
s
|
||||
}
|
||||
@ -1069,7 +1070,7 @@ impl clean::Impl {
|
||||
write!(f, " for ")?;
|
||||
}
|
||||
|
||||
if let Some(ref ty) = self.kind.as_blanket_ty() {
|
||||
if let Some(ty) = self.kind.as_blanket_ty() {
|
||||
fmt_type(ty, f, use_absolute, cx)?;
|
||||
} else {
|
||||
fmt_type(&self.for_, f, use_absolute, cx)?;
|
||||
@ -1295,9 +1296,11 @@ impl clean::Visibility {
|
||||
item_did: ItemId,
|
||||
cx: &'a Context<'tcx>,
|
||||
) -> impl fmt::Display + 'a + Captures<'tcx> {
|
||||
let to_print = match self {
|
||||
clean::Public => "pub ".to_owned(),
|
||||
clean::Inherited => String::new(),
|
||||
use std::fmt::Write as _;
|
||||
|
||||
let to_print: Cow<'static, str> = match self {
|
||||
clean::Public => "pub ".into(),
|
||||
clean::Inherited => "".into(),
|
||||
clean::Visibility::Restricted(vis_did) => {
|
||||
// FIXME(camelid): This may not work correctly if `item_did` is a module.
|
||||
// However, rustdoc currently never displays a module's
|
||||
@ -1305,17 +1308,16 @@ impl clean::Visibility {
|
||||
let parent_module = find_nearest_parent_module(cx.tcx(), item_did.expect_def_id());
|
||||
|
||||
if vis_did.is_crate_root() {
|
||||
"pub(crate) ".to_owned()
|
||||
"pub(crate) ".into()
|
||||
} else if parent_module == Some(vis_did) {
|
||||
// `pub(in foo)` where `foo` is the parent module
|
||||
// is the same as no visibility modifier
|
||||
String::new()
|
||||
"".into()
|
||||
} else if parent_module
|
||||
.map(|parent| find_nearest_parent_module(cx.tcx(), parent))
|
||||
.flatten()
|
||||
.and_then(|parent| find_nearest_parent_module(cx.tcx(), parent))
|
||||
== Some(vis_did)
|
||||
{
|
||||
"pub(super) ".to_owned()
|
||||
"pub(super) ".into()
|
||||
} else {
|
||||
let path = cx.tcx().def_path(vis_did);
|
||||
debug!("path={:?}", path);
|
||||
@ -1325,14 +1327,14 @@ impl clean::Visibility {
|
||||
|
||||
let mut s = "pub(in ".to_owned();
|
||||
for seg in &path.data[..path.data.len() - 1] {
|
||||
s.push_str(&format!("{}::", seg.data.get_opt_name().unwrap()));
|
||||
let _ = write!(s, "{}::", seg.data.get_opt_name().unwrap());
|
||||
}
|
||||
s.push_str(&format!("{}) ", anchor));
|
||||
s
|
||||
let _ = write!(s, "{}) ", anchor);
|
||||
s.into()
|
||||
}
|
||||
}
|
||||
};
|
||||
display_fn(move |f| f.write_str(&to_print))
|
||||
display_fn(move |f| write!(f, "{}", to_print))
|
||||
}
|
||||
|
||||
/// This function is the same as print_with_space, except that it renders no links.
|
||||
@ -1358,9 +1360,7 @@ impl clean::Visibility {
|
||||
// `pub(in foo)` where `foo` is the parent module
|
||||
// is the same as no visibility modifier
|
||||
String::new()
|
||||
} else if parent_module
|
||||
.map(|parent| find_nearest_parent_module(tcx, parent))
|
||||
.flatten()
|
||||
} else if parent_module.and_then(|parent| find_nearest_parent_module(tcx, parent))
|
||||
== Some(vis_did)
|
||||
{
|
||||
"pub(super) ".to_owned()
|
||||
|
@ -1023,7 +1023,7 @@ impl Markdown<'_> {
|
||||
let Markdown {
|
||||
content: md,
|
||||
links,
|
||||
mut ids,
|
||||
ids,
|
||||
error_codes: codes,
|
||||
edition,
|
||||
playground,
|
||||
@ -1046,7 +1046,7 @@ impl Markdown<'_> {
|
||||
|
||||
let mut s = String::with_capacity(md.len() * 3 / 2);
|
||||
|
||||
let p = HeadingLinks::new(p, None, &mut ids, heading_offset);
|
||||
let p = HeadingLinks::new(p, None, ids, heading_offset);
|
||||
let p = Footnotes::new(p);
|
||||
let p = LinkReplacer::new(p.map(|(ev, _)| ev), links);
|
||||
let p = TableWrapper::new(p);
|
||||
@ -1059,7 +1059,7 @@ impl Markdown<'_> {
|
||||
|
||||
impl MarkdownWithToc<'_> {
|
||||
pub(crate) fn into_string(self) -> String {
|
||||
let MarkdownWithToc(md, mut ids, codes, edition, playground) = self;
|
||||
let MarkdownWithToc(md, ids, codes, edition, playground) = self;
|
||||
|
||||
let p = Parser::new_ext(md, main_body_opts()).into_offset_iter();
|
||||
|
||||
@ -1068,7 +1068,7 @@ impl MarkdownWithToc<'_> {
|
||||
let mut toc = TocBuilder::new();
|
||||
|
||||
{
|
||||
let p = HeadingLinks::new(p, Some(&mut toc), &mut ids, HeadingOffset::H1);
|
||||
let p = HeadingLinks::new(p, Some(&mut toc), ids, HeadingOffset::H1);
|
||||
let p = Footnotes::new(p);
|
||||
let p = TableWrapper::new(p.map(|(ev, _)| ev));
|
||||
let p = CodeBlocks::new(p, codes, edition, playground);
|
||||
@ -1081,7 +1081,7 @@ impl MarkdownWithToc<'_> {
|
||||
|
||||
impl MarkdownHtml<'_> {
|
||||
pub(crate) fn into_string(self) -> String {
|
||||
let MarkdownHtml(md, mut ids, codes, edition, playground) = self;
|
||||
let MarkdownHtml(md, ids, codes, edition, playground) = self;
|
||||
|
||||
// This is actually common enough to special-case
|
||||
if md.is_empty() {
|
||||
@ -1097,7 +1097,7 @@ impl MarkdownHtml<'_> {
|
||||
|
||||
let mut s = String::with_capacity(md.len() * 3 / 2);
|
||||
|
||||
let p = HeadingLinks::new(p, None, &mut ids, HeadingOffset::H1);
|
||||
let p = HeadingLinks::new(p, None, ids, HeadingOffset::H1);
|
||||
let p = Footnotes::new(p);
|
||||
let p = TableWrapper::new(p.map(|(ev, _)| ev));
|
||||
let p = CodeBlocks::new(p, codes, edition, playground);
|
||||
|
@ -232,18 +232,18 @@ impl<'tcx> Context<'tcx> {
|
||||
|
||||
let mut path = String::new();
|
||||
for name in &names[..names.len() - 1] {
|
||||
path.push_str(&name.as_str());
|
||||
path.push_str(name.as_str());
|
||||
path.push('/');
|
||||
}
|
||||
path.push_str(&item_path(ty, &names.last().unwrap().as_str()));
|
||||
path.push_str(&item_path(ty, names.last().unwrap().as_str()));
|
||||
match self.shared.redirections {
|
||||
Some(ref redirections) => {
|
||||
let mut current_path = String::new();
|
||||
for name in &self.current {
|
||||
current_path.push_str(&name.as_str());
|
||||
current_path.push_str(name.as_str());
|
||||
current_path.push('/');
|
||||
}
|
||||
current_path.push_str(&item_path(ty, &names.last().unwrap().as_str()));
|
||||
current_path.push_str(&item_path(ty, names.last().unwrap().as_str()));
|
||||
redirections.borrow_mut().insert(current_path, path);
|
||||
}
|
||||
None => return layout::redirect(&format!("{}{}", self.root_path(), path)),
|
||||
|
@ -840,7 +840,7 @@ fn render_stability_since_raw(
|
||||
let mut stability = String::new();
|
||||
|
||||
if let Some(ver) = stable_version {
|
||||
stability.push_str(&ver.as_str());
|
||||
stability.push_str(ver.as_str());
|
||||
title.push_str(&format!("Stable since Rust version {}", ver));
|
||||
}
|
||||
|
||||
@ -2299,7 +2299,7 @@ fn sidebar_trait(cx: &Context<'_>, buf: &mut Buffer, it: &clean::Item, t: &clean
|
||||
buf,
|
||||
"foreign-impls",
|
||||
"Implementations on Foreign Types",
|
||||
res.iter().map(|(name, id)| format!("<a href=\"#{}\">{}</a>", id, Escape(&name))),
|
||||
res.iter().map(|(name, id)| format!("<a href=\"#{}\">{}</a>", id, Escape(name))),
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -2537,6 +2537,8 @@ fn item_ty_to_section(ty: ItemType) -> ItemSection {
|
||||
}
|
||||
|
||||
fn sidebar_module(buf: &mut Buffer, items: &[clean::Item]) {
|
||||
use std::fmt::Write as _;
|
||||
|
||||
let mut sidebar = String::new();
|
||||
|
||||
let item_sections_in_use: FxHashSet<_> = items
|
||||
@ -2554,7 +2556,7 @@ fn sidebar_module(buf: &mut Buffer, items: &[clean::Item]) {
|
||||
.map(|it| item_ty_to_section(it.type_()))
|
||||
.collect();
|
||||
for &sec in ItemSection::ALL.iter().filter(|sec| item_sections_in_use.contains(sec)) {
|
||||
sidebar.push_str(&format!("<li><a href=\"#{}\">{}</a></li>", sec.id(), sec.name()));
|
||||
let _ = write!(sidebar, "<li><a href=\"#{}\">{}</a></li>", sec.id(), sec.name());
|
||||
}
|
||||
|
||||
if !sidebar.is_empty() {
|
||||
@ -2798,7 +2800,7 @@ fn render_call_locations(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item) {
|
||||
hi - lo
|
||||
};
|
||||
|
||||
let mut locs = call_locations.into_iter().collect::<Vec<_>>();
|
||||
let mut locs = call_locations.iter().collect::<Vec<_>>();
|
||||
locs.sort_by_key(sort_criterion);
|
||||
locs
|
||||
};
|
||||
@ -2842,7 +2844,7 @@ fn render_call_locations(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item) {
|
||||
if it.peek().is_some() {
|
||||
write!(w, r#"<div class="example-links">Additional examples can be found in:<br><ul>"#);
|
||||
it.for_each(|(_, call_data)| {
|
||||
let (url, _) = link_to_loc(&call_data, &call_data.locations[0]);
|
||||
let (url, _) = link_to_loc(call_data, &call_data.locations[0]);
|
||||
write!(
|
||||
w,
|
||||
r#"<li><a href="{url}">{name}</a></li>"#,
|
||||
|
@ -37,7 +37,7 @@ pub(crate) fn build_index<'tcx>(
|
||||
desc,
|
||||
parent: Some(did),
|
||||
parent_idx: None,
|
||||
search_type: get_function_type_for_search(item, tcx, &cache),
|
||||
search_type: get_function_type_for_search(item, tcx, cache),
|
||||
aliases: item.attrs.get_doc_aliases(),
|
||||
});
|
||||
}
|
||||
@ -182,8 +182,8 @@ pub(crate) fn build_index<'tcx>(
|
||||
})
|
||||
.expect("failed serde conversion")
|
||||
// All these `replace` calls are because we have to go through JS string for JSON content.
|
||||
.replace(r#"\"#, r"\\")
|
||||
.replace(r#"'"#, r"\'")
|
||||
.replace('\\', r"\\")
|
||||
.replace('\'', r"\'")
|
||||
// We need to escape double quotes for the JSON.
|
||||
.replace("\\\"", "\\\\\"")
|
||||
)
|
||||
|
@ -49,7 +49,7 @@ pub(crate) fn collect_spans_and_sources(
|
||||
if generate_link_to_definition {
|
||||
tcx.hir().walk_toplevel_module(&mut visitor);
|
||||
}
|
||||
let sources = sources::collect_local_sources(tcx, src_root, &krate);
|
||||
let sources = sources::collect_local_sources(tcx, src_root, krate);
|
||||
(sources, visitor.matches)
|
||||
} else {
|
||||
(Default::default(), Default::default())
|
||||
|
@ -163,15 +163,18 @@ impl TocBuilder {
|
||||
|
||||
impl Toc {
|
||||
fn print_inner(&self, v: &mut String) {
|
||||
use std::fmt::Write as _;
|
||||
|
||||
v.push_str("<ul>");
|
||||
for entry in &self.entries {
|
||||
// recursively format this table of contents
|
||||
v.push_str(&format!(
|
||||
let _ = write!(
|
||||
v,
|
||||
"\n<li><a href=\"#{id}\">{num} {name}</a>",
|
||||
id = entry.id,
|
||||
num = entry.sec_number,
|
||||
name = entry.name
|
||||
));
|
||||
);
|
||||
entry.children.print_inner(&mut *v);
|
||||
v.push_str("</li>");
|
||||
}
|
||||
|
@ -265,7 +265,7 @@ impl<'a, 'b> DocVisitor for CoverageCalculator<'a, 'b> {
|
||||
self.items.entry(filename).or_default().count_item(
|
||||
has_docs,
|
||||
has_doc_example,
|
||||
should_have_doc_example(self.ctx, &i),
|
||||
should_have_doc_example(self.ctx, i),
|
||||
should_have_docs,
|
||||
);
|
||||
}
|
||||
|
@ -160,7 +160,7 @@ impl<'a, 'tcx> DocVisitor for SyntaxChecker<'a, 'tcx> {
|
||||
sp,
|
||||
);
|
||||
for code_block in markdown::rust_code_blocks(dox, &extra) {
|
||||
self.check_rust_syntax(&item, dox, code_block);
|
||||
self.check_rust_syntax(item, dox, code_block);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -35,9 +35,9 @@ pub(crate) fn check_doc_test_visibility(krate: Crate, cx: &mut DocContext<'_>) -
|
||||
|
||||
impl<'a, 'tcx> DocVisitor for DocTestVisibilityLinter<'a, 'tcx> {
|
||||
fn visit_item(&mut self, item: &Item) {
|
||||
let dox = item.attrs.collapsed_doc_value().unwrap_or_else(String::new);
|
||||
let dox = item.attrs.collapsed_doc_value().unwrap_or_default();
|
||||
|
||||
look_for_tests(self.cx, &dox, &item);
|
||||
look_for_tests(self.cx, &dox, item);
|
||||
|
||||
self.visit_item_recur(item)
|
||||
}
|
||||
|
@ -494,7 +494,7 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
DefKind::AssocFn | DefKind::AssocConst | DefKind::AssocTy | DefKind::Variant,
|
||||
def_id,
|
||||
) => (Res::from_def_id(self.cx.tcx, self.cx.tcx.parent(def_id)), Some(def_id)),
|
||||
_ => ((res, None)),
|
||||
_ => (res, None),
|
||||
});
|
||||
} else if ns == MacroNS {
|
||||
return Err(UnresolvedPath {
|
||||
@ -636,10 +636,9 @@ impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
|
||||
Res::Primitive(prim) => {
|
||||
self.resolve_primitive_associated_item(prim, ns, item_name).or_else(|| {
|
||||
self.primitive_type_to_ty(prim)
|
||||
.map(|ty| {
|
||||
.and_then(|ty| {
|
||||
resolve_associated_trait_item(ty, module_id, item_name, ns, self.cx)
|
||||
})
|
||||
.flatten()
|
||||
.map(|item| (root_res, item.def_id))
|
||||
})
|
||||
}
|
||||
@ -903,7 +902,7 @@ impl<'a, 'tcx> DocVisitor for LinkCollector<'a, 'tcx> {
|
||||
tmp_links.insert(doc.clone(), preprocessed_markdown_links(&doc));
|
||||
}
|
||||
for md_link in &tmp_links[&doc] {
|
||||
let link = self.resolve_link(&item, &doc, parent_node, md_link);
|
||||
let link = self.resolve_link(item, &doc, parent_node, md_link);
|
||||
if let Some(link) = link {
|
||||
self.cx.cache.intra_doc_links.entry(item.item_id).or_default().push(link);
|
||||
}
|
||||
@ -1136,7 +1135,7 @@ impl LinkCollector<'_, '_> {
|
||||
let kind = self.cx.tcx.def_kind(id);
|
||||
self.verify_disambiguator(
|
||||
path_str,
|
||||
&ori_link,
|
||||
ori_link,
|
||||
kind,
|
||||
id,
|
||||
disambiguator,
|
||||
@ -1150,14 +1149,14 @@ impl LinkCollector<'_, '_> {
|
||||
&& item.item_id.is_local()
|
||||
&& !self.cx.tcx.features().intra_doc_pointers
|
||||
{
|
||||
self.report_rawptr_assoc_feature_gate(dox, &ori_link, item);
|
||||
self.report_rawptr_assoc_feature_gate(dox, ori_link, item);
|
||||
}
|
||||
} else {
|
||||
match disambiguator {
|
||||
Some(Disambiguator::Primitive | Disambiguator::Namespace(_)) | None => {}
|
||||
Some(other) => {
|
||||
self.report_disambiguator_mismatch(
|
||||
path_str, &ori_link, other, res, &diag_info,
|
||||
path_str, ori_link, other, res, &diag_info,
|
||||
);
|
||||
return None;
|
||||
}
|
||||
@ -1180,7 +1179,7 @@ impl LinkCollector<'_, '_> {
|
||||
};
|
||||
self.verify_disambiguator(
|
||||
path_str,
|
||||
&ori_link,
|
||||
ori_link,
|
||||
kind_for_dis,
|
||||
id_for_dis,
|
||||
disambiguator,
|
||||
@ -1274,7 +1273,7 @@ impl LinkCollector<'_, '_> {
|
||||
}
|
||||
suggest_disambiguator(resolved, diag, path_str, &ori_link.link, sp);
|
||||
};
|
||||
report_diagnostic(self.cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, &diag_info, callback);
|
||||
report_diagnostic(self.cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, diag_info, callback);
|
||||
}
|
||||
|
||||
fn report_rawptr_assoc_feature_gate(&self, dox: &str, ori_link: &MarkdownLink, item: &Item) {
|
||||
@ -1930,7 +1929,7 @@ fn anchor_failure(
|
||||
msg: &str,
|
||||
anchor_idx: usize,
|
||||
) {
|
||||
report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, &msg, &diag_info, |diag, sp| {
|
||||
report_diagnostic(cx.tcx, BROKEN_INTRA_DOC_LINKS, msg, &diag_info, |diag, sp| {
|
||||
if let Some(mut sp) = sp {
|
||||
if let Some((fragment_offset, _)) =
|
||||
diag_info.ori_link.char_indices().filter(|(_, x)| *x == '#').nth(anchor_idx)
|
||||
|
@ -91,11 +91,7 @@ fn extract_path_backwards(text: &str, end_pos: usize) -> Option<usize> {
|
||||
}
|
||||
break;
|
||||
}
|
||||
if current_pos == end_pos {
|
||||
return None;
|
||||
} else {
|
||||
return Some(current_pos);
|
||||
}
|
||||
if current_pos == end_pos { None } else { Some(current_pos) }
|
||||
}
|
||||
|
||||
fn extract_html_tag(
|
||||
|
@ -11,7 +11,6 @@ pub(crate) trait DocVisitor: Sized {
|
||||
StrippedItem(..) => unreachable!(),
|
||||
ModuleItem(i) => {
|
||||
self.visit_mod(i);
|
||||
return;
|
||||
}
|
||||
StructItem(i) => i.fields.iter().for_each(|x| self.visit_item(x)),
|
||||
UnionItem(i) => i.fields.iter().for_each(|x| self.visit_item(x)),
|
||||
|
@ -365,7 +365,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
|
||||
om.items.push((item, renamed));
|
||||
}
|
||||
}
|
||||
hir::ItemKind::Impl(ref impl_) => {
|
||||
hir::ItemKind::Impl(impl_) => {
|
||||
// Don't duplicate impls when inlining or if it's implementing a trait, we'll pick
|
||||
// them up regardless of where they're located.
|
||||
if !self.inlining && impl_.of_trait.is_none() {
|
||||
|
Loading…
Reference in New Issue
Block a user