6750: Remove documentation query, move doc handling to attributes r=matklad a=Veykril

Fixes #3182

Removes the documentation query in favor of `Attrs::docs`. Attrs already handlded doc comments partially but the alloc saving check was wrong so it only worked when other attributes existed as well. Unfortunately the `new` constructor has to do an intermediate allocation now because we need to keep the order of mixed doc attributes and doc comments.

I've also partially adjusted the `hover` module to have its tests check the changes, it still has some `HasSource` trait usage due to the `ShortLabel` trait usage, as that is only implemented on the Ast parts and not the Hir, should this ideally be implemented for the Hir types as well?(would be a follow up PR of course)

Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
bors[bot] 2020-12-08 13:23:12 +00:00 committed by GitHub
commit 2aa7f2ece5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 137 additions and 213 deletions

View File

@ -1,6 +1,9 @@
//! Attributes & documentation for hir types.
use hir_def::{
attr::Attrs, docs::Documentation, path::ModPath, resolver::HasResolver, AttrDefId, ModuleDefId,
attr::{Attrs, Documentation},
path::ModPath,
resolver::HasResolver,
AttrDefId, ModuleDefId,
};
use hir_expand::hygiene::Hygiene;
use hir_ty::db::HirDatabase;
@ -38,7 +41,7 @@ macro_rules! impl_has_attrs {
}
fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
let def = AttrDefId::$def_id(self.into());
db.documentation(def)
db.attrs(def).docs()
}
fn resolve_doc_path(self, db: &dyn HirDatabase, link: &str, ns: Option<Namespace>) -> Option<ModuleDef> {
let def = AttrDefId::$def_id(self.into());

View File

@ -2,12 +2,12 @@
pub use hir_def::db::{
AttrsQuery, BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQueryQuery,
CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery,
ExprScopesQuery, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, ImportMapQuery,
InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery, InternFunctionQuery,
InternImplQuery, InternStaticQuery, InternStructQuery, InternTraitQuery, InternTypeAliasQuery,
InternUnionQuery, ItemTreeQuery, LangItemQuery, ModuleLangItemsQuery, StaticDataQuery,
StructDataQuery, TraitDataQuery, TypeAliasDataQuery, UnionDataQuery,
CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery, ExprScopesQuery,
FunctionDataQuery, GenericParamsQuery, ImplDataQuery, ImportMapQuery, InternConstQuery,
InternDatabase, InternDatabaseStorage, InternEnumQuery, InternFunctionQuery, InternImplQuery,
InternStaticQuery, InternStructQuery, InternTraitQuery, InternTypeAliasQuery, InternUnionQuery,
ItemTreeQuery, LangItemQuery, ModuleLangItemsQuery, StaticDataQuery, StructDataQuery,
TraitDataQuery, TypeAliasDataQuery, UnionDataQuery,
};
pub use hir_expand::db::{
AstDatabase, AstDatabaseStorage, AstIdMapQuery, InternEagerExpansionQuery, InternMacroQuery,

View File

@ -44,10 +44,9 @@ pub use crate::{
pub use hir_def::{
adt::StructKind,
attr::Attrs,
attr::{Attrs, Documentation},
body::scope::ExprScopes,
builtin_type::BuiltinType,
docs::Documentation,
find_path::PrefixKind,
import_map,
item_scope::ItemInNs,

View File

@ -5,10 +5,11 @@ use std::{ops, sync::Arc};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{hygiene::Hygiene, AstId, InFile};
use itertools::Itertools;
use mbe::ast_to_token_tree;
use syntax::{
ast::{self, AstNode, AttrsOwner},
SmolStr,
AstToken, SmolStr,
};
use tt::Subtree;
@ -21,6 +22,22 @@ use crate::{
AdtId, AttrDefId, Lookup,
};
/// Holds documentation
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Documentation(String);
impl Documentation {
pub fn as_str(&self) -> &str {
&self.0
}
}
impl Into<String> for Documentation {
fn into(self) -> String {
self.0
}
}
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct Attrs {
entries: Option<Arc<[Attr]>>,
@ -93,18 +110,25 @@ impl Attrs {
}
pub(crate) fn new(owner: &dyn AttrsOwner, hygiene: &Hygiene) -> Attrs {
let docs = ast::CommentIter::from_syntax_node(owner.syntax()).doc_comment_text().map(
|docs_text| Attr {
input: Some(AttrInput::Literal(SmolStr::new(docs_text))),
path: ModPath::from(hir_expand::name!(doc)),
},
);
let mut attrs = owner.attrs().peekable();
let entries = if attrs.peek().is_none() {
let docs = ast::CommentIter::from_syntax_node(owner.syntax()).map(|docs_text| {
(
docs_text.syntax().text_range().start(),
docs_text.doc_comment().map(|doc| Attr {
input: Some(AttrInput::Literal(SmolStr::new(doc))),
path: ModPath::from(hir_expand::name!(doc)),
}),
)
});
let attrs = owner
.attrs()
.map(|attr| (attr.syntax().text_range().start(), Attr::from_src(attr, hygiene)));
// sort here by syntax node offset because the source can have doc attributes and doc strings be interleaved
let attrs: Vec<_> = docs.chain(attrs).sorted_by_key(|&(offset, _)| offset).collect();
let entries = if attrs.is_empty() {
// Avoid heap allocation
None
} else {
Some(attrs.flat_map(|ast| Attr::from_src(ast, hygiene)).chain(docs).collect())
Some(attrs.into_iter().flat_map(|(_, attr)| attr).collect())
};
Attrs { entries }
}
@ -140,6 +164,24 @@ impl Attrs {
Some(cfg) => cfg_options.check(&cfg) != Some(false),
}
}
pub fn docs(&self) -> Option<Documentation> {
let docs = self
.by_key("doc")
.attrs()
.flat_map(|attr| match attr.input.as_ref()? {
AttrInput::Literal(s) => Some(s),
AttrInput::TokenTree(_) => None,
})
.intersperse(&SmolStr::new_inline("\n"))
.map(|it| it.as_str())
.collect::<String>();
if docs.is_empty() {
None
} else {
Some(Documentation(docs.into()))
}
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
@ -160,8 +202,10 @@ impl Attr {
fn from_src(ast: ast::Attr, hygiene: &Hygiene) -> Option<Attr> {
let path = ModPath::from_src(ast.path()?, hygiene)?;
let input = if let Some(lit) = ast.literal() {
// FIXME: escape? raw string?
let value = lit.syntax().first_token()?.text().trim_matches('"').into();
let value = match lit.kind() {
ast::LiteralKind::String(string) => string.value()?.into(),
_ => lit.syntax().first_token()?.text().trim_matches('"').into(),
};
Some(AttrInput::Literal(value))
} else if let Some(tt) = ast.token_tree() {
Some(AttrInput::TokenTree(ast_to_token_tree(&tt)?.0))

View File

@ -10,7 +10,6 @@ use crate::{
attr::Attrs,
body::{scope::ExprScopes, Body, BodySourceMap},
data::{ConstData, FunctionData, ImplData, StaticData, TraitData, TypeAliasData},
docs::Documentation,
generics::GenericParams,
import_map::ImportMap,
item_tree::ItemTree,
@ -105,11 +104,6 @@ pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
#[salsa::invoke(LangItems::lang_item_query)]
fn lang_item(&self, start_crate: CrateId, item: SmolStr) -> Option<LangItemTarget>;
// FIXME(https://github.com/rust-analyzer/rust-analyzer/issues/2148#issuecomment-550519102)
// Remove this query completely, in favor of `Attrs::docs` method
#[salsa::invoke(Documentation::documentation_query)]
fn documentation(&self, def: AttrDefId) -> Option<Documentation>;
#[salsa::invoke(ImportMap::import_map_query)]
fn import_map(&self, krate: CrateId) -> Arc<ImportMap>;
}

View File

@ -1,121 +0,0 @@
//! Defines hir documentation.
//!
//! This really shouldn't exist, instead, we should deshugar doc comments into attributes, see
//! https://github.com/rust-analyzer/rust-analyzer/issues/2148#issuecomment-550519102
use std::sync::Arc;
use either::Either;
use itertools::Itertools;
use syntax::{ast, SmolStr};
use crate::{
db::DefDatabase,
src::{HasChildSource, HasSource},
AdtId, AttrDefId, Lookup,
};
/// Holds documentation
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Documentation(Arc<str>);
impl Into<String> for Documentation {
fn into(self) -> String {
self.as_str().to_owned()
}
}
impl Documentation {
fn new(s: &str) -> Documentation {
Documentation(s.into())
}
pub fn from_ast<N>(node: &N) -> Option<Documentation>
where
N: ast::DocCommentsOwner + ast::AttrsOwner,
{
docs_from_ast(node)
}
pub fn as_str(&self) -> &str {
&*self.0
}
pub(crate) fn documentation_query(
db: &dyn DefDatabase,
def: AttrDefId,
) -> Option<Documentation> {
match def {
AttrDefId::ModuleId(module) => {
let def_map = db.crate_def_map(module.krate);
let src = def_map[module.local_id].declaration_source(db)?;
docs_from_ast(&src.value)
}
AttrDefId::FieldId(it) => {
let src = it.parent.child_source(db);
match &src.value[it.local_id] {
Either::Left(_tuple) => None,
Either::Right(record) => docs_from_ast(record),
}
}
AttrDefId::AdtId(it) => match it {
AdtId::StructId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AdtId::EnumId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AdtId::UnionId(it) => docs_from_ast(&it.lookup(db).source(db).value),
},
AttrDefId::EnumVariantId(it) => {
let src = it.parent.child_source(db);
docs_from_ast(&src.value[it.local_id])
}
AttrDefId::TraitId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::MacroDefId(it) => docs_from_ast(&it.ast_id?.to_node(db.upcast())),
AttrDefId::ConstId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::StaticId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::FunctionId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::TypeAliasId(it) => docs_from_ast(&it.lookup(db).source(db).value),
AttrDefId::ImplId(_) => None,
}
}
}
pub(crate) fn docs_from_ast<N>(node: &N) -> Option<Documentation>
where
N: ast::DocCommentsOwner + ast::AttrsOwner,
{
let doc_comment_text = node.doc_comment_text();
let doc_attr_text = expand_doc_attrs(node);
let docs = merge_doc_comments_and_attrs(doc_comment_text, doc_attr_text);
docs.map(|it| Documentation::new(&it))
}
fn merge_doc_comments_and_attrs(
doc_comment_text: Option<String>,
doc_attr_text: Option<String>,
) -> Option<String> {
match (doc_comment_text, doc_attr_text) {
(Some(mut comment_text), Some(attr_text)) => {
comment_text.push_str("\n");
comment_text.push_str(&attr_text);
Some(comment_text)
}
(Some(comment_text), None) => Some(comment_text),
(None, Some(attr_text)) => Some(attr_text),
(None, None) => None,
}
}
fn expand_doc_attrs(owner: &dyn ast::AttrsOwner) -> Option<String> {
let mut docs = String::new();
owner
.attrs()
.filter_map(|attr| attr.as_simple_key_value().filter(|(key, _)| key == "doc"))
.map(|(_, value)| value)
.intersperse(SmolStr::new_inline("\n"))
// No FromIterator<SmolStr> for String
.for_each(|s| docs.push_str(s.as_str()));
if docs.is_empty() {
None
} else {
Some(docs)
}
}

View File

@ -31,7 +31,6 @@ pub mod adt;
pub mod data;
pub mod generics;
pub mod lang_item;
pub mod docs;
pub mod expr;
pub mod body;

View File

@ -372,7 +372,7 @@ fn module_resolution_explicit_path_mod_rs_with_win_separator() {
check(
r#"
//- /main.rs
#[path = "module\bar\mod.rs"]
#[path = r"module\bar\mod.rs"]
mod foo;
//- /module/bar/mod.rs

View File

@ -1,6 +1,6 @@
use hir::{
Adt, AsAssocItem, AssocItemContainer, Documentation, FieldSource, HasSource, HirDisplay,
Module, ModuleDef, ModuleSource, Semantics,
Adt, AsAssocItem, AssocItemContainer, FieldSource, HasAttrs, HasSource, HirDisplay, Module,
ModuleDef, ModuleSource, Semantics,
};
use ide_db::base_db::SourceDatabase;
use ide_db::{
@ -319,31 +319,27 @@ fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option<Markup> {
let mod_path = definition_mod_path(db, &def);
return match def {
Definition::Macro(it) => {
let src = it.source(db);
let docs = Documentation::from_ast(&src.value).map(Into::into);
hover_markup(docs, Some(macro_label(&src.value)), mod_path)
let label = macro_label(&it.source(db).value);
from_def_source_labeled(db, it, Some(label), mod_path)
}
Definition::Field(it) => {
let src = it.source(db);
match src.value {
FieldSource::Named(it) => {
let docs = Documentation::from_ast(&it).map(Into::into);
hover_markup(docs, it.short_label(), mod_path)
}
_ => None,
Definition::Field(def) => {
let src = def.source(db).value;
if let FieldSource::Named(it) = src {
from_def_source_labeled(db, def, it.short_label(), mod_path)
} else {
None
}
}
Definition::ModuleDef(it) => match it {
ModuleDef::Module(it) => match it.definition_source(db).value {
ModuleSource::Module(it) => {
let docs = Documentation::from_ast(&it).map(Into::into);
hover_markup(docs, it.short_label(), mod_path)
}
ModuleSource::SourceFile(it) => {
let docs = Documentation::from_ast(&it).map(Into::into);
hover_markup(docs, it.short_label(), mod_path)
}
},
ModuleDef::Module(it) => from_def_source_labeled(
db,
it,
match it.definition_source(db).value {
ModuleSource::Module(it) => it.short_label(),
ModuleSource::SourceFile(it) => it.short_label(),
},
mod_path,
),
ModuleDef::Function(it) => from_def_source(db, it, mod_path),
ModuleDef::Adt(Adt::Struct(it)) => from_def_source(db, it, mod_path),
ModuleDef::Adt(Adt::Union(it)) => from_def_source(db, it, mod_path),
@ -371,12 +367,24 @@ fn hover_for_definition(db: &RootDatabase, def: Definition) -> Option<Markup> {
fn from_def_source<A, D>(db: &RootDatabase, def: D, mod_path: Option<String>) -> Option<Markup>
where
D: HasSource<Ast = A>,
A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel + ast::AttrsOwner,
D: HasSource<Ast = A> + HasAttrs + Copy,
A: ShortLabel,
{
let src = def.source(db);
let docs = Documentation::from_ast(&src.value).map(Into::into);
hover_markup(docs, src.value.short_label(), mod_path)
let short_label = def.source(db).value.short_label();
from_def_source_labeled(db, def, short_label, mod_path)
}
fn from_def_source_labeled<D>(
db: &RootDatabase,
def: D,
short_label: Option<String>,
mod_path: Option<String>,
) -> Option<Markup>
where
D: HasAttrs,
{
let docs = def.attrs(db).docs().map(Into::into);
hover_markup(docs, short_label, mod_path)
}
}

View File

@ -166,7 +166,6 @@ impl RootDatabase {
hir::db::ModuleLangItemsQuery
hir::db::CrateLangItemsQuery
hir::db::LangItemQuery
hir::db::DocumentationQuery
hir::db::ImportMapQuery
// HirDatabase

View File

@ -24,6 +24,28 @@ impl ast::Comment {
.unwrap();
prefix
}
/// Returns the textual content of a doc comment block as a single string.
/// That is, strips leading `///` (+ optional 1 character of whitespace),
/// trailing `*/`, trailing whitespace and then joins the lines.
pub fn doc_comment(&self) -> Option<&str> {
let kind = self.kind();
match kind {
CommentKind { shape, doc: Some(_) } => {
let prefix = kind.prefix();
let text = &self.text().as_str()[prefix.len()..];
let ws = text.chars().next().filter(|c| c.is_whitespace());
let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]);
match shape {
CommentShape::Block if text.ends_with("*/") => {
Some(&text[..text.len() - "*/".len()])
}
_ => Some(text),
}
}
_ => None,
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
@ -73,6 +95,11 @@ impl CommentKind {
.unwrap();
kind
}
fn prefix(&self) -> &'static str {
let &(prefix, _) = CommentKind::BY_PREFIX.iter().find(|(_, kind)| kind == self).unwrap();
prefix
}
}
impl ast::Whitespace {

View File

@ -91,40 +91,12 @@ impl CommentIter {
/// That is, strips leading `///` (+ optional 1 character of whitespace),
/// trailing `*/`, trailing whitespace and then joins the lines.
pub fn doc_comment_text(self) -> Option<String> {
let mut has_comments = false;
let docs = self
.filter(|comment| comment.kind().doc.is_some())
.map(|comment| {
has_comments = true;
let prefix_len = comment.prefix().len();
let line: &str = comment.text().as_str();
// Determine if the prefix or prefix + 1 char is stripped
let pos =
if let Some(ws) = line.chars().nth(prefix_len).filter(|c| c.is_whitespace()) {
prefix_len + ws.len_utf8()
} else {
prefix_len
};
let end = if comment.kind().shape.is_block() && line.ends_with("*/") {
line.len() - 2
} else {
line.len()
};
// Note that we do not trim the end of the line here
// since whitespace can have special meaning at the end
// of a line in markdown.
line[pos..end].to_owned()
})
.join("\n");
if has_comments {
Some(docs)
} else {
let docs =
self.filter_map(|comment| comment.doc_comment().map(ToOwned::to_owned)).join("\n");
if docs.is_empty() {
None
} else {
Some(docs)
}
}
}