11157: internal: Remove `SemanticScope::speculative_resolve_as_mac` r=Veykril a=Veykril

Fixes https://github.com/rust-analyzer/rust-analyzer/issues/11132

Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
bors[bot] 2022-01-04 16:49:47 +00:00 committed by GitHub
commit 68bc12c3b8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 131 additions and 57 deletions

View File

@ -5,6 +5,7 @@ mod source_to_def;
use std::{cell::RefCell, fmt}; use std::{cell::RefCell, fmt};
use base_db::{FileId, FileRange}; use base_db::{FileId, FileRange};
use either::Either;
use hir_def::{ use hir_def::{
body, body,
resolver::{self, HasResolver, Resolver, TypeNs}, resolver::{self, HasResolver, Resolver, TypeNs},
@ -18,13 +19,14 @@ use smallvec::{smallvec, SmallVec};
use syntax::{ use syntax::{
algo::skip_trivia_token, algo::skip_trivia_token,
ast::{self, HasAttrs, HasGenericParams, HasLoopBody}, ast::{self, HasAttrs, HasGenericParams, HasLoopBody},
match_ast, AstNode, Direction, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize, match_ast, AstNode, AstToken, Direction, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken,
TextSize, T,
}; };
use crate::{ use crate::{
db::HirDatabase, db::HirDatabase,
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
source_analyzer::{resolve_hir_path, resolve_hir_path_as_macro, SourceAnalyzer}, source_analyzer::{resolve_hir_path, SourceAnalyzer},
Access, AssocItem, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasSource, Access, AssocItem, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasSource,
HirFileId, Impl, InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path, HirFileId, Impl, InFile, Label, LifetimeParam, Local, MacroDef, Module, ModuleDef, Name, Path,
ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
@ -354,6 +356,14 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.resolve_bind_pat_to_const(pat) self.imp.resolve_bind_pat_to_const(pat)
} }
pub fn resolve_derive_ident(
&self,
derive: &ast::Attr,
ident: &ast::Ident,
) -> Option<PathResolution> {
self.imp.resolve_derive_ident(derive, ident)
}
// FIXME: use this instead? // FIXME: use this instead?
// pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>;
@ -471,12 +481,12 @@ impl<'db> SemanticsImpl<'db> {
} }
fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> { fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
let item = attr.syntax().parent().and_then(ast::Item::cast)?; let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
let file_id = self.find_file(item.syntax()).file_id; let file_id = self.find_file(adt.syntax()).file_id;
let item = InFile::new(file_id, &item); let adt = InFile::new(file_id, &adt);
let src = InFile::new(file_id, attr.clone()); let src = InFile::new(file_id, attr.clone());
self.with_ctx(|ctx| { self.with_ctx(|ctx| {
let res = ctx.attr_to_derive_macro_call(item, src)?; let res = ctx.attr_to_derive_macro_call(adt, src)?;
Some(res.to_vec()) Some(res.to_vec())
}) })
} }
@ -894,6 +904,70 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat)
} }
fn resolve_derive_ident(
&self,
derive: &ast::Attr,
ident: &ast::Ident,
) -> Option<PathResolution> {
debug_assert!(ident.syntax().parent().and_then(ast::TokenTree::cast).is_some());
debug_assert!(ident.syntax().ancestors().any(|anc| anc == *derive.syntax()));
// derive macros are always at depth 2, tokentree -> meta -> attribute
let syntax = ident.syntax();
let tt = derive.token_tree()?;
let file = self.find_file(derive.syntax());
let adt = derive.syntax().parent().and_then(ast::Adt::cast)?;
let res = self.with_ctx(|ctx| {
let attr_def = ctx.attr_to_def(file.with_value(derive.clone()))?;
let derives = ctx.attr_to_derive_macro_call(
file.with_value(&adt),
file.with_value(derive.clone()),
)?;
let mut derive_paths = attr_def.parse_path_comma_token_tree()?;
let derive_idx = tt
.syntax()
.children_with_tokens()
.filter_map(SyntaxElement::into_token)
.take_while(|tok| tok != syntax)
.filter(|t| t.kind() == T![,])
.count();
let path_segment_idx = syntax
.siblings_with_tokens(Direction::Prev)
.filter_map(SyntaxElement::into_token)
.take_while(|tok| matches!(tok.kind(), T![:] | T![ident]))
.filter(|tok| tok.kind() == T![ident])
.count();
let mut mod_path = derive_paths.nth(derive_idx)?;
if path_segment_idx < mod_path.len() {
// the path for the given ident is a qualifier, resolve to module if possible
while path_segment_idx < mod_path.len() {
mod_path.pop_segment();
}
Some(Either::Left(mod_path))
} else {
// otherwise fetch the derive
Some(Either::Right(derives[derive_idx]))
}
})?;
match res {
Either::Left(path) => resolve_hir_path(
self.db,
&self.scope(derive.syntax()).resolver,
&Path::from_known_path(path, []),
)
.filter(|res| matches!(res, PathResolution::Def(ModuleDef::Module(_)))),
Either::Right(derive) => derive
.map(|call| MacroDef { id: self.db.lookup_intern_macro_call(call).def })
.map(PathResolution::Macro),
}
}
fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> { fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
self.analyze(literal.syntax()) self.analyze(literal.syntax())
.record_literal_missing_fields(self.db, literal) .record_literal_missing_fields(self.db, literal)
@ -1151,6 +1225,7 @@ to_def_impls![
(crate::Local, ast::SelfParam, self_param_to_def), (crate::Local, ast::SelfParam, self_param_to_def),
(crate::Label, ast::Label, label_to_def), (crate::Label, ast::Label, label_to_def),
(crate::Adt, ast::Adt, adt_to_def), (crate::Adt, ast::Adt, adt_to_def),
(crate::Attr, ast::Attr, attr_to_def),
]; ];
fn find_root(node: &SyntaxNode) -> SyntaxNode { fn find_root(node: &SyntaxNode) -> SyntaxNode {
@ -1230,14 +1305,4 @@ impl<'a> SemanticsScope<'a> {
let path = Path::from_src(path.clone(), &ctx)?; let path = Path::from_src(path.clone(), &ctx)?;
resolve_hir_path(self.db, &self.resolver, &path) resolve_hir_path(self.db, &self.resolver, &path)
} }
/// Resolve a path as-if it was written at the given scope. This is
/// necessary a heuristic, as it doesn't take hygiene into account.
// FIXME: This special casing solely exists for attributes for now
// ideally we should have a path resolution infra that properly knows about overlapping namespaces
pub fn speculative_resolve_as_mac(&self, path: &ast::Path) -> Option<MacroDef> {
let ctx = body::LowerCtx::new(self.db.upcast(), self.file_id);
let path = Path::from_src(path.clone(), &ctx)?;
resolve_hir_path_as_macro(self.db, &self.resolver, &path)
}
} }

View File

@ -210,6 +210,19 @@ impl SourceToDefCtx<'_, '_> {
ast::Adt::Union(it) => self.union_to_def(InFile::new(file_id, it)).map(AdtId::UnionId), ast::Adt::Union(it) => self.union_to_def(InFile::new(file_id, it)).map(AdtId::UnionId),
} }
} }
pub(super) fn attr_to_def(
&mut self,
InFile { file_id, value }: InFile<ast::Attr>,
) -> Option<crate::Attr> {
// FIXME: Use dynmap?
let adt = value.syntax().parent().and_then(ast::Adt::cast)?;
let attr_pos = ast::HasAttrs::attrs(&adt).position(|it| it == value)?;
let attrs = {
let def = self.adt_to_def(InFile::new(file_id, adt))?;
self.db.attrs(def.into())
};
attrs.get(attr_pos).cloned()
}
pub(super) fn bind_pat_to_def( pub(super) fn bind_pat_to_def(
&mut self, &mut self,
src: InFile<ast::IdentPat>, src: InFile<ast::IdentPat>,
@ -246,7 +259,7 @@ impl SourceToDefCtx<'_, '_> {
pub(super) fn attr_to_derive_macro_call( pub(super) fn attr_to_derive_macro_call(
&mut self, &mut self,
item: InFile<&ast::Item>, item: InFile<&ast::Adt>,
src: InFile<ast::Attr>, src: InFile<ast::Attr>,
) -> Option<&[Option<MacroCallId>]> { ) -> Option<&[Option<MacroCallId>]> {
let map = self.dyn_map(item)?; let map = self.dyn_map(item)?;

View File

@ -720,11 +720,8 @@ impl Attr {
Self::from_src(db, ast, hygiene, id) Self::from_src(db, ast, hygiene, id)
} }
/// Parses this attribute as a `#[derive]`, returns an iterator that yields all contained paths /// Parses this attribute as a token tree consisting of comma separated paths.
/// to derive macros. pub fn parse_path_comma_token_tree(&self) -> Option<impl Iterator<Item = ModPath> + '_> {
///
/// Returns `None` when the attribute does not have a well-formed `#[derive]` attribute input.
pub(crate) fn parse_derive(&self) -> Option<impl Iterator<Item = ModPath> + '_> {
let args = match self.input.as_deref() { let args = match self.input.as_deref() {
Some(AttrInput::TokenTree(args, _)) => args, Some(AttrInput::TokenTree(args, _)) => args,
_ => return None, _ => return None,
@ -749,7 +746,11 @@ impl Attr {
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
return Some(paths.into_iter()); Some(paths.into_iter())
}
pub fn path(&self) -> &ModPath {
&self.path
} }
pub fn string_value(&self) -> Option<&SmolStr> { pub fn string_value(&self) -> Option<&SmolStr> {

View File

@ -1145,7 +1145,7 @@ impl DefCollector<'_> {
} }
} }
match attr.parse_derive() { match attr.parse_path_comma_token_tree() {
Some(derive_macros) => { Some(derive_macros) => {
let mut len = 0; let mut len = 0;
for (idx, path) in derive_macros.enumerate() { for (idx, path) in derive_macros.enumerate() {

View File

@ -1381,6 +1381,18 @@ mod foo {
// ^^^^ // ^^^^
} }
#[derive(foo::Copy$0)] #[derive(foo::Copy$0)]
struct Foo;
"#,
);
check(
r#"
//- minicore:derive
mod foo {
// ^^^
#[rustc_builtin_macro]
pub macro Copy {}
}
#[derive(foo$0::Copy)]
struct Foo; struct Foo;
"#, "#,
); );

View File

@ -3,7 +3,7 @@
use hir::{AsAssocItem, HasVisibility, Semantics}; use hir::{AsAssocItem, HasVisibility, Semantics};
use ide_db::{ use ide_db::{
defs::{Definition, NameClass, NameRefClass}, defs::{Definition, NameClass, NameRefClass},
helpers::{try_resolve_derive_input, FamousDefs}, helpers::FamousDefs,
RootDatabase, SymbolKind, RootDatabase, SymbolKind,
}; };
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
@ -39,16 +39,17 @@ pub(super) fn token(
INT_NUMBER | FLOAT_NUMBER => HlTag::NumericLiteral.into(), INT_NUMBER | FLOAT_NUMBER => HlTag::NumericLiteral.into(),
BYTE => HlTag::ByteLiteral.into(), BYTE => HlTag::ByteLiteral.into(),
CHAR => HlTag::CharLiteral.into(), CHAR => HlTag::CharLiteral.into(),
IDENT if parent_matches::<ast::TokenTree>(&token) => { IDENT => {
match token.ancestors().nth(2).and_then(ast::Attr::cast) { let tt = ast::TokenTree::cast(token.parent()?)?;
Some(attr) => { let ident = ast::Ident::cast(token)?;
match try_resolve_derive_input(sema, &attr, &ast::Ident::cast(token).unwrap()) { // from this point on we are inside a token tree, this only happens for identifiers
Some(res) => highlight_def(sema, krate, Definition::from(res)), // that were not mapped down into macro invocations
None => HlTag::None.into(), (|| {
} let attr = tt.parent_meta()?.parent_attr()?;
} let res = sema.resolve_derive_ident(&attr, &ident)?;
None => HlTag::None.into(), Some(highlight_def(sema, krate, Definition::from(res)))
} })()
.unwrap_or_else(|| HlTag::None.into())
} }
p if p.is_punct() => punctuation(sema, token, p), p if p.is_punct() => punctuation(sema, token, p),
k if k.is_keyword() => keyword(sema, token, k)?, k if k.is_keyword() => keyword(sema, token, k)?,

View File

@ -17,7 +17,7 @@ use syntax::{
match_ast, AstToken, SyntaxKind, SyntaxNode, SyntaxToken, match_ast, AstToken, SyntaxKind, SyntaxNode, SyntaxToken,
}; };
use crate::{helpers::try_resolve_derive_input, RootDatabase}; use crate::RootDatabase;
// FIXME: a more precise name would probably be `Symbol`? // FIXME: a more precise name would probably be `Symbol`?
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)] #[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
@ -56,7 +56,8 @@ impl Definition {
.and_then(|tt| tt.parent_meta()) .and_then(|tt| tt.parent_meta())
.and_then(|meta| meta.parent_attr()); .and_then(|meta| meta.parent_attr());
if let Some(attr) = attr { if let Some(attr) = attr {
return try_resolve_derive_input(&sema, &attr, &ident) return sema
.resolve_derive_ident(&attr, &ident)
.map(Into::into) .map(Into::into)
.into_iter() .into_iter()
.collect(); .collect();

View File

@ -74,26 +74,6 @@ pub fn get_path_at_cursor_in_tt(cursor: &ast::Ident) -> Option<ast::Path> {
}) })
} }
/// Parses and resolves the path at the cursor position in the given attribute, if it is a derive.
/// This special case is required because the derive macro is a compiler builtin that discards the input derives.
pub fn try_resolve_derive_input(
sema: &hir::Semantics<RootDatabase>,
attr: &ast::Attr,
cursor: &ast::Ident,
) -> Option<PathResolution> {
let path = get_path_in_derive_attr(sema, attr, cursor)?;
let scope = sema.scope(attr.syntax());
// FIXME: This double resolve shouldn't be necessary
// It's only here so we prefer macros over other namespaces
match scope.speculative_resolve_as_mac(&path) {
Some(mac) if mac.kind() == hir::MacroKind::Derive => Some(PathResolution::Macro(mac)),
Some(_) => return None,
None => scope
.speculative_resolve(&path)
.filter(|res| matches!(res, PathResolution::Def(ModuleDef::Module(_)))),
}
}
/// Picks the token with the highest rank returned by the passed in function. /// Picks the token with the highest rank returned by the passed in function.
pub fn pick_best_token( pub fn pick_best_token(
tokens: TokenAtOffset<SyntaxToken>, tokens: TokenAtOffset<SyntaxToken>,

View File

@ -146,6 +146,7 @@ impl ImportAssets {
if let Some(_) = path.qualifier() { if let Some(_) = path.qualifier() {
return None; return None;
} }
let name = NameToImport::exact_case_sensitive(path.segment()?.name_ref()?.to_string()); let name = NameToImport::exact_case_sensitive(path.segment()?.name_ref()?.to_string());
let candidate_node = attr.syntax().clone(); let candidate_node = attr.syntax().clone();
Some(Self { Some(Self {