10562: fix: Fix clippy attribute completions always prefixing inserting `clippy::` r=Veykril a=Veykril

Fixes https://github.com/rust-analyzer/rust-analyzer/issues/7144

Co-authored-by: Lukas Wirth <lukastw97@gmail.com>
This commit is contained in:
bors[bot] 2021-10-17 11:30:49 +00:00 committed by GitHub
commit d9080addf9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 429 additions and 354 deletions

View File

@ -5,9 +5,10 @@
use hir::HasAttrs;
use ide_db::helpers::generated_lints::{CLIPPY_LINTS, DEFAULT_LINTS, FEATURES};
use itertools::Itertools;
use once_cell::sync::Lazy;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{algo::non_trivia_sibling, ast, AstNode, Direction, NodeOrToken, SyntaxKind, T};
use rustc_hash::FxHashMap;
use syntax::{algo::non_trivia_sibling, ast, AstNode, Direction, SyntaxKind, T};
use crate::{
context::CompletionContext,
@ -303,31 +304,38 @@ const ATTRIBUTES: &[AttrCompletion] = &[
.prefer_inner(),
];
fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Option<FxHashSet<String>> {
let (l_paren, r_paren) = derive_input.l_paren_token().zip(derive_input.r_paren_token())?;
let mut input_derives = FxHashSet::default();
let mut tokens = derive_input
fn parse_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Path>> {
let r_paren = input.r_paren_token()?;
let tokens = input
.syntax()
.children_with_tokens()
.filter_map(NodeOrToken::into_token)
.skip_while(|token| token != &l_paren)
.skip(1)
.take_while(|token| token != &r_paren)
.peekable();
let mut input = String::new();
while tokens.peek().is_some() {
for token in tokens.by_ref().take_while(|t| t.kind() != T![,]) {
input.push_str(token.text());
}
.take_while(|it| it.as_token() != Some(&r_paren));
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
Some(
input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|mut tokens| ast::Path::parse(&tokens.join("")).ok())
.collect::<Vec<ast::Path>>(),
)
}
if !input.is_empty() {
input_derives.insert(input.trim().to_owned());
}
input.clear();
}
Some(input_derives)
fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
let r_paren = input.r_paren_token()?;
let tokens = input
.syntax()
.children_with_tokens()
.skip(1)
.take_while(|it| it.as_token() != Some(&r_paren));
let input_expressions = tokens.into_iter().group_by(|tok| tok.kind() == T![,]);
Some(
input_expressions
.into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|mut tokens| ast::Expr::parse(&tokens.join("")).ok())
.collect::<Vec<ast::Expr>>(),
)
}
#[test]

View File

@ -2,7 +2,7 @@
use hir::HasAttrs;
use itertools::Itertools;
use rustc_hash::FxHashMap;
use syntax::ast;
use syntax::{ast, SmolStr};
use crate::{
context::CompletionContext,
@ -15,26 +15,31 @@ pub(super) fn complete_derive(
ctx: &CompletionContext,
derive_input: ast::TokenTree,
) {
if let Some(existing_derives) = super::parse_comma_sep_input(derive_input) {
if let Some(existing_derives) = super::parse_comma_sep_paths(derive_input) {
for (derive, docs) in get_derive_names_in_scope(ctx) {
let label;
let (label, lookup) = if let Some(derive_completion) = DEFAULT_DERIVE_COMPLETIONS
.iter()
.find(|derive_completion| derive_completion.label == derive)
{
let mut components = vec![derive_completion.label];
components.extend(
derive_completion
.dependencies
components.extend(derive_completion.dependencies.iter().filter(|&&dependency| {
!existing_derives
.iter()
.filter(|&&dependency| !existing_derives.contains(dependency)),
);
.filter_map(|it| it.as_single_name_ref())
.any(|it| it.text() == dependency)
}));
let lookup = components.join(", ");
let label = components.iter().rev().join(", ");
(label, Some(lookup))
} else if existing_derives.contains(&derive) {
label = components.iter().rev().join(", ");
(&*label, Some(lookup))
} else if existing_derives
.iter()
.filter_map(|it| it.as_single_name_ref())
.any(|it| it.text().as_str() == derive)
{
continue;
} else {
(derive, None)
(&*derive, None)
};
let mut item =
CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label);
@ -52,12 +57,12 @@ pub(super) fn complete_derive(
fn get_derive_names_in_scope(
ctx: &CompletionContext,
) -> FxHashMap<String, Option<hir::Documentation>> {
) -> FxHashMap<SmolStr, Option<hir::Documentation>> {
let mut result = FxHashMap::default();
ctx.process_all_names(&mut |name, scope_def| {
if let hir::ScopeDef::MacroDef(mac) = scope_def {
if mac.kind() == hir::MacroKind::Derive {
result.insert(name.to_string(), mac.docs(ctx.db));
result.insert(name.to_smol_str(), mac.docs(ctx.db));
}
}
});

View File

@ -1,6 +1,6 @@
//! Completion for lints
use ide_db::helpers::generated_lints::Lint;
use syntax::ast;
use syntax::{ast, T};
use crate::{
context::CompletionContext,
@ -14,17 +14,51 @@ pub(super) fn complete_lint(
derive_input: ast::TokenTree,
lints_completions: &[Lint],
) {
if let Some(existing_lints) = super::parse_comma_sep_input(derive_input) {
for lint_completion in
lints_completions.iter().filter(|completion| !existing_lints.contains(completion.label))
{
let mut item = CompletionItem::new(
CompletionKind::Attribute,
ctx.source_range(),
lint_completion.label,
);
if let Some(existing_lints) = super::parse_comma_sep_paths(derive_input) {
for &Lint { label, description } in lints_completions {
let (qual, name) = {
// FIXME: change `Lint`'s label to not store a path in it but split the prefix off instead?
let mut parts = label.split("::");
let ns_or_label = match parts.next() {
Some(it) => it,
None => continue,
};
let label = parts.next();
match label {
Some(label) => (Some(ns_or_label), label),
None => (None, ns_or_label),
}
};
let lint_already_annotated = existing_lints
.iter()
.filter_map(|path| {
let q = path.qualifier();
if q.as_ref().and_then(|it| it.qualifier()).is_some() {
return None;
}
Some((q.and_then(|it| it.as_single_name_ref()), path.segment()?.name_ref()?))
})
.any(|(q, name_ref)| {
let qualifier_matches = match (q, qual) {
(None, None) => true,
(None, Some(_)) => false,
(Some(_), None) => false,
(Some(q), Some(ns)) => q.text() == ns,
};
qualifier_matches && name_ref.text() == name
});
if lint_already_annotated {
continue;
}
let insert = match qual {
Some(qual) if !ctx.previous_token_is(T![:]) => format!("{}::{}", qual, name),
_ => name.to_owned(),
};
let mut item =
CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label);
item.kind(CompletionItemKind::Attribute)
.documentation(hir::Documentation::new(lint_completion.description.to_owned()));
.insert_text(insert)
.documentation(hir::Documentation::new(description.to_owned()));
item.add_to(acc)
}
}

View File

@ -8,29 +8,34 @@ use crate::{
Completions,
};
pub(super) fn complete_repr(
acc: &mut Completions,
ctx: &CompletionContext,
derive_input: ast::TokenTree,
) {
if let Some(existing_reprs) = super::parse_comma_sep_input(derive_input) {
for repr_completion in REPR_COMPLETIONS {
if existing_reprs
pub(super) fn complete_repr(acc: &mut Completions, ctx: &CompletionContext, input: ast::TokenTree) {
if let Some(existing_reprs) = super::parse_comma_sep_expr(input) {
for &ReprCompletion { label, snippet, lookup, collides } in REPR_COMPLETIONS {
let repr_already_annotated = existing_reprs
.iter()
.any(|it| repr_completion.label == it || repr_completion.collides.contains(&&**it))
{
.filter_map(|expr| match expr {
ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
ast::Expr::CallExpr(call) => match call.expr()? {
ast::Expr::PathExpr(path) => path.path()?.as_single_name_ref(),
_ => return None,
},
_ => None,
})
.any(|it| {
let text = it.text();
lookup.unwrap_or(label) == text || collides.contains(&text.as_str())
});
if repr_already_annotated {
continue;
}
let mut item = CompletionItem::new(
CompletionKind::Attribute,
ctx.source_range(),
repr_completion.label,
);
let mut item =
CompletionItem::new(CompletionKind::Attribute, ctx.source_range(), label);
item.kind(CompletionItemKind::Attribute);
if let Some(lookup) = repr_completion.lookup {
if let Some(lookup) = lookup {
item.lookup_by(lookup);
}
if let Some((snippet, cap)) = repr_completion.snippet.zip(ctx.config.snippet_cap) {
if let Some((snippet, cap)) = snippet.zip(ctx.config.snippet_cap) {
item.insert_snippet(cap, snippet);
}
item.add_to(acc);

View File

@ -120,153 +120,6 @@ pub(crate) struct CompletionContext<'a> {
}
impl<'a> CompletionContext<'a> {
pub(super) fn new(
db: &'a RootDatabase,
position: FilePosition,
config: &'a CompletionConfig,
) -> Option<CompletionContext<'a>> {
let sema = Semantics::new(db);
let original_file = sema.parse(position.file_id);
// Insert a fake ident to get a valid parse tree. We will use this file
// to determine context, though the original_file will be used for
// actual completion.
let file_with_fake_ident = {
let parse = db.parse(position.file_id);
let edit = Indel::insert(position.offset, "intellijRulezz".to_string());
parse.reparse(&edit).tree()
};
let fake_ident_token =
file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap();
let original_token =
original_file.syntax().token_at_offset(position.offset).left_biased()?;
let token = sema.descend_into_macros_single(original_token.clone());
let scope = sema.scope_at_offset(&token, position.offset);
let krate = scope.krate();
let mut locals = vec![];
scope.process_all_names(&mut |name, scope| {
if let ScopeDef::Local(local) = scope {
locals.push((name, local));
}
});
let mut ctx = CompletionContext {
sema,
scope,
db,
config,
position,
original_token,
token,
krate,
expected_name: None,
expected_type: None,
function_def: None,
impl_def: None,
name_syntax: None,
lifetime_ctx: None,
pattern_ctx: None,
completion_location: None,
prev_sibling: None,
attribute_under_caret: None,
previous_token: None,
path_context: None,
locals,
incomplete_let: false,
no_completion_required: false,
};
ctx.expand_and_fill(
original_file.syntax().clone(),
file_with_fake_ident.syntax().clone(),
position.offset,
fake_ident_token,
);
Some(ctx)
}
/// Do the attribute expansion at the current cursor position for both original file and fake file
/// as long as possible. As soon as one of the two expansions fail we stop to stay in sync.
fn expand_and_fill(
&mut self,
mut original_file: SyntaxNode,
mut speculative_file: SyntaxNode,
mut offset: TextSize,
mut fake_ident_token: SyntaxToken,
) {
loop {
// Expand attributes
if let (Some(actual_item), Some(item_with_fake_ident)) = (
find_node_at_offset::<ast::Item>(&original_file, offset),
find_node_at_offset::<ast::Item>(&speculative_file, offset),
) {
match (
self.sema.expand_attr_macro(&actual_item),
self.sema.speculative_expand_attr_macro(
&actual_item,
&item_with_fake_ident,
fake_ident_token.clone(),
),
) {
(Some(actual_expansion), Some(speculative_expansion)) => {
let new_offset = speculative_expansion.1.text_range().start();
if new_offset > actual_expansion.text_range().end() {
break;
}
original_file = actual_expansion;
speculative_file = speculative_expansion.0;
fake_ident_token = speculative_expansion.1;
offset = new_offset;
continue;
}
(None, None) => (),
_ => break,
}
}
// Expand fn-like macro calls
if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
find_node_at_offset::<ast::MacroCall>(&original_file, offset),
find_node_at_offset::<ast::MacroCall>(&speculative_file, offset),
) {
let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
let mac_call_path1 =
macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
if mac_call_path0 != mac_call_path1 {
break;
}
let speculative_args = match macro_call_with_fake_ident.token_tree() {
Some(tt) => tt,
None => break,
};
if let (Some(actual_expansion), Some(speculative_expansion)) = (
self.sema.expand(&actual_macro_call),
self.sema.speculative_expand(
&actual_macro_call,
&speculative_args,
fake_ident_token,
),
) {
let new_offset = speculative_expansion.1.text_range().start();
if new_offset > actual_expansion.text_range().end() {
break;
}
original_file = actual_expansion;
speculative_file = speculative_expansion.0;
fake_ident_token = speculative_expansion.1;
offset = new_offset;
} else {
break;
}
} else {
break;
}
}
self.fill(&original_file, speculative_file, offset);
}
/// Checks whether completions in that particular case don't make much sense.
/// Examples:
/// - `fn $0` -- we expect function name, it's unlikely that "hint" will be helpful.
@ -491,6 +344,156 @@ impl<'a> CompletionContext<'a> {
false
}
}
// CompletionContext construction
impl<'a> CompletionContext<'a> {
pub(super) fn new(
db: &'a RootDatabase,
position: FilePosition,
config: &'a CompletionConfig,
) -> Option<CompletionContext<'a>> {
let sema = Semantics::new(db);
let original_file = sema.parse(position.file_id);
// Insert a fake ident to get a valid parse tree. We will use this file
// to determine context, though the original_file will be used for
// actual completion.
let file_with_fake_ident = {
let parse = db.parse(position.file_id);
let edit = Indel::insert(position.offset, "intellijRulezz".to_string());
parse.reparse(&edit).tree()
};
let fake_ident_token =
file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap();
let original_token =
original_file.syntax().token_at_offset(position.offset).left_biased()?;
let token = sema.descend_into_macros_single(original_token.clone());
let scope = sema.scope_at_offset(&token, position.offset);
let krate = scope.krate();
let mut locals = vec![];
scope.process_all_names(&mut |name, scope| {
if let ScopeDef::Local(local) = scope {
locals.push((name, local));
}
});
let mut ctx = CompletionContext {
sema,
scope,
db,
config,
position,
original_token,
token,
krate,
expected_name: None,
expected_type: None,
function_def: None,
impl_def: None,
name_syntax: None,
lifetime_ctx: None,
pattern_ctx: None,
completion_location: None,
prev_sibling: None,
attribute_under_caret: None,
previous_token: None,
path_context: None,
locals,
incomplete_let: false,
no_completion_required: false,
};
ctx.expand_and_fill(
original_file.syntax().clone(),
file_with_fake_ident.syntax().clone(),
position.offset,
fake_ident_token,
);
Some(ctx)
}
/// Do the attribute expansion at the current cursor position for both original file and fake file
/// as long as possible. As soon as one of the two expansions fail we stop to stay in sync.
fn expand_and_fill(
&mut self,
mut original_file: SyntaxNode,
mut speculative_file: SyntaxNode,
mut offset: TextSize,
mut fake_ident_token: SyntaxToken,
) {
loop {
// Expand attributes
if let (Some(actual_item), Some(item_with_fake_ident)) = (
find_node_at_offset::<ast::Item>(&original_file, offset),
find_node_at_offset::<ast::Item>(&speculative_file, offset),
) {
match (
self.sema.expand_attr_macro(&actual_item),
self.sema.speculative_expand_attr_macro(
&actual_item,
&item_with_fake_ident,
fake_ident_token.clone(),
),
) {
(Some(actual_expansion), Some(speculative_expansion)) => {
let new_offset = speculative_expansion.1.text_range().start();
if new_offset > actual_expansion.text_range().end() {
break;
}
original_file = actual_expansion;
speculative_file = speculative_expansion.0;
fake_ident_token = speculative_expansion.1;
offset = new_offset;
continue;
}
(None, None) => (),
_ => break,
}
}
// Expand fn-like macro calls
if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
find_node_at_offset::<ast::MacroCall>(&original_file, offset),
find_node_at_offset::<ast::MacroCall>(&speculative_file, offset),
) {
let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
let mac_call_path1 =
macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
if mac_call_path0 != mac_call_path1 {
break;
}
let speculative_args = match macro_call_with_fake_ident.token_tree() {
Some(tt) => tt,
None => break,
};
if let (Some(actual_expansion), Some(speculative_expansion)) = (
self.sema.expand(&actual_macro_call),
self.sema.speculative_expand(
&actual_macro_call,
&speculative_args,
fake_ident_token,
),
) {
let new_offset = speculative_expansion.1.text_range().start();
if new_offset > actual_expansion.text_range().end() {
break;
}
original_file = actual_expansion;
speculative_file = speculative_expansion.0;
fake_ident_token = speculative_expansion.1;
offset = new_offset;
} else {
break;
}
} else {
break;
}
}
self.fill(&original_file, speculative_file, offset);
}
fn expected_type_and_name(&self) -> (Option<Type>, Option<NameOrNameRef>) {
let mut node = match self.token.parent() {
@ -658,170 +661,171 @@ impl<'a> CompletionContext<'a> {
.find_map(ast::Fn::cast);
match name_like {
ast::NameLike::Lifetime(lifetime) => {
self.classify_lifetime(original_file, lifetime, offset);
self.lifetime_ctx =
Self::classify_lifetime(&self.sema, original_file, lifetime, offset);
}
ast::NameLike::NameRef(name_ref) => {
self.classify_name_ref(original_file, name_ref);
self.path_context = Self::classify_name_ref(&self.sema, original_file, name_ref);
}
ast::NameLike::Name(name) => {
self.classify_name(name);
self.pattern_ctx = Self::classify_name(&self.sema, name);
}
}
}
fn classify_lifetime(
&mut self,
sema: &Semantics<RootDatabase>,
original_file: &SyntaxNode,
lifetime: ast::Lifetime,
offset: TextSize,
) {
if let Some(parent) = lifetime.syntax().parent() {
if parent.kind() == ERROR {
return;
}
) -> Option<LifetimeContext> {
let parent = lifetime.syntax().parent()?;
if parent.kind() == ERROR {
return None;
}
self.lifetime_ctx = Some(match_ast! {
match parent {
ast::LifetimeParam(_it) => LifetimeContext::LifetimeParam(self.sema.find_node_at_offset_with_macros(original_file, offset)),
ast::BreakExpr(_it) => LifetimeContext::LabelRef,
ast::ContinueExpr(_it) => LifetimeContext::LabelRef,
ast::Label(_it) => LifetimeContext::LabelDef,
_ => LifetimeContext::Lifetime,
Some(match_ast! {
match parent {
ast::LifetimeParam(_it) => LifetimeContext::LifetimeParam(sema.find_node_at_offset_with_macros(original_file, offset)),
ast::BreakExpr(_it) => LifetimeContext::LabelRef,
ast::ContinueExpr(_it) => LifetimeContext::LabelRef,
ast::Label(_it) => LifetimeContext::LabelDef,
_ => LifetimeContext::Lifetime,
}
})
}
fn classify_name(_sema: &Semantics<RootDatabase>, name: ast::Name) -> Option<PatternContext> {
let bind_pat = name.syntax().parent().and_then(ast::IdentPat::cast)?;
let is_name_in_field_pat = bind_pat
.syntax()
.parent()
.and_then(ast::RecordPatField::cast)
.map_or(false, |pat_field| pat_field.name_ref().is_none());
if is_name_in_field_pat {
return None;
}
if !bind_pat.is_simple_ident() {
return None;
}
let mut is_param = None;
let refutability = bind_pat
.syntax()
.ancestors()
.skip_while(|it| ast::Pat::can_cast(it.kind()))
.next()
.map_or(PatternRefutability::Irrefutable, |node| {
match_ast! {
match node {
ast::LetStmt(__) => PatternRefutability::Irrefutable,
ast::Param(param) => {
let is_closure_param = param
.syntax()
.ancestors()
.nth(2)
.and_then(ast::ClosureExpr::cast)
.is_some();
is_param = Some(if is_closure_param {
ParamKind::Closure
} else {
ParamKind::Function
});
PatternRefutability::Irrefutable
},
ast::MatchArm(__) => PatternRefutability::Refutable,
ast::Condition(__) => PatternRefutability::Refutable,
ast::ForExpr(__) => PatternRefutability::Irrefutable,
_ => PatternRefutability::Irrefutable,
}
}
});
}
Some(PatternContext { refutability, is_param })
}
fn classify_name(&mut self, name: ast::Name) {
if let Some(bind_pat) = name.syntax().parent().and_then(ast::IdentPat::cast) {
let is_name_in_field_pat = bind_pat
.syntax()
.parent()
.and_then(ast::RecordPatField::cast)
.map_or(false, |pat_field| pat_field.name_ref().is_none());
if is_name_in_field_pat {
return;
}
if bind_pat.is_simple_ident() {
let mut is_param = None;
let refutability = bind_pat
.syntax()
.ancestors()
.skip_while(|it| ast::Pat::can_cast(it.kind()))
.next()
.map_or(PatternRefutability::Irrefutable, |node| {
match_ast! {
match node {
ast::LetStmt(__) => PatternRefutability::Irrefutable,
ast::Param(param) => {
let is_closure_param = param
.syntax()
.ancestors()
.nth(2)
.and_then(ast::ClosureExpr::cast)
.is_some();
is_param = Some(if is_closure_param {
ParamKind::Closure
} else {
ParamKind::Function
});
PatternRefutability::Irrefutable
},
ast::MatchArm(__) => PatternRefutability::Refutable,
ast::Condition(__) => PatternRefutability::Refutable,
ast::ForExpr(__) => PatternRefutability::Irrefutable,
_ => PatternRefutability::Irrefutable,
}
}
});
self.pattern_ctx = Some(PatternContext { refutability, is_param });
}
}
}
fn classify_name_ref(
_sema: &Semantics<RootDatabase>,
original_file: &SyntaxNode,
name_ref: ast::NameRef,
) -> Option<PathCompletionContext> {
let parent = name_ref.syntax().parent()?;
let segment = ast::PathSegment::cast(parent)?;
fn classify_name_ref(&mut self, original_file: &SyntaxNode, name_ref: ast::NameRef) {
let parent = match name_ref.syntax().parent() {
Some(it) => it,
None => return,
let mut path_ctx = PathCompletionContext {
call_kind: None,
is_trivial_path: false,
qualifier: None,
has_type_args: false,
can_be_stmt: false,
in_loop_body: false,
use_tree_parent: false,
kind: None,
};
path_ctx.in_loop_body = is_in_loop_body(name_ref.syntax());
let path = segment.parent_path();
if let Some(segment) = ast::PathSegment::cast(parent) {
let path_ctx = self.path_context.get_or_insert(PathCompletionContext {
call_kind: None,
is_trivial_path: false,
qualifier: None,
has_type_args: false,
can_be_stmt: false,
in_loop_body: false,
use_tree_parent: false,
kind: None,
});
path_ctx.in_loop_body = is_in_loop_body(name_ref.syntax());
let path = segment.parent_path();
if let Some(p) = path.syntax().parent() {
path_ctx.call_kind = match_ast! {
match p {
ast::PathExpr(it) => it.syntax().parent().and_then(ast::CallExpr::cast).map(|_| CallKind::Expr),
ast::MacroCall(it) => it.excl_token().and(Some(CallKind::Mac)),
ast::TupleStructPat(_it) => Some(CallKind::Pat),
_ => None
}
};
}
if let Some(parent) = path.syntax().parent() {
path_ctx.kind = match_ast! {
match parent {
ast::PathType(_it) => Some(PathKind::Type),
ast::PathExpr(_it) => Some(PathKind::Expr),
_ => None,
}
};
}
path_ctx.has_type_args = segment.generic_arg_list().is_some();
if let Some((path, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
path_ctx.use_tree_parent = use_tree_parent;
path_ctx.qualifier = path
.segment()
.and_then(|it| {
find_node_with_range::<ast::PathSegment>(
original_file,
it.syntax().text_range(),
)
})
.map(|it| it.parent_path());
return;
}
if let Some(segment) = path.segment() {
if segment.coloncolon_token().is_some() {
return;
if let Some(p) = path.syntax().parent() {
path_ctx.call_kind = match_ast! {
match p {
ast::PathExpr(it) => it.syntax().parent().and_then(ast::CallExpr::cast).map(|_| CallKind::Expr),
ast::MacroCall(it) => it.excl_token().and(Some(CallKind::Mac)),
ast::TupleStructPat(_it) => Some(CallKind::Pat),
_ => None
}
}
path_ctx.is_trivial_path = true;
// Find either enclosing expr statement (thing with `;`) or a
// block. If block, check that we are the last expr.
path_ctx.can_be_stmt = name_ref
.syntax()
.ancestors()
.find_map(|node| {
if let Some(stmt) = ast::ExprStmt::cast(node.clone()) {
return Some(stmt.syntax().text_range() == name_ref.syntax().text_range());
}
if let Some(stmt_list) = ast::StmtList::cast(node) {
return Some(
stmt_list.tail_expr().map(|e| e.syntax().text_range())
== Some(name_ref.syntax().text_range()),
);
}
None
})
.unwrap_or(false);
};
}
if let Some(parent) = path.syntax().parent() {
path_ctx.kind = match_ast! {
match parent {
ast::PathType(_it) => Some(PathKind::Type),
ast::PathExpr(_it) => Some(PathKind::Expr),
_ => None,
}
};
}
path_ctx.has_type_args = segment.generic_arg_list().is_some();
if let Some((path, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
path_ctx.use_tree_parent = use_tree_parent;
path_ctx.qualifier = path
.segment()
.and_then(|it| {
find_node_with_range::<ast::PathSegment>(
original_file,
it.syntax().text_range(),
)
})
.map(|it| it.parent_path());
return Some(path_ctx);
}
if let Some(segment) = path.segment() {
if segment.coloncolon_token().is_some() {
return Some(path_ctx);
}
}
path_ctx.is_trivial_path = true;
// Find either enclosing expr statement (thing with `;`) or a
// block. If block, check that we are the last expr.
path_ctx.can_be_stmt = name_ref
.syntax()
.ancestors()
.find_map(|node| {
if let Some(stmt) = ast::ExprStmt::cast(node.clone()) {
return Some(stmt.syntax().text_range() == name_ref.syntax().text_range());
}
if let Some(stmt_list) = ast::StmtList::cast(node) {
return Some(
stmt_list.tail_expr().map(|e| e.syntax().text_range())
== Some(name_ref.syntax().text_range()),
);
}
None
})
.unwrap_or(false);
Some(path_ctx)
}
}

View File

@ -692,6 +692,24 @@ mod lint {
r#"#[feature(box_syntax)] struct Test;"#,
)
}
#[test]
fn lint_clippy_unqualified() {
check_edit(
"clippy::as_conversions",
r#"#[allow($0)] struct Test;"#,
r#"#[allow(clippy::as_conversions)] struct Test;"#,
);
}
#[test]
fn lint_clippy_qualified() {
check_edit(
"clippy::as_conversions",
r#"#[allow(clippy::$0)] struct Test;"#,
r#"#[allow(clippy::as_conversions)] struct Test;"#,
);
}
}
mod repr {
@ -742,7 +760,6 @@ mod repr {
check_repr(
r#"#[repr(align(1), $0)] struct Test;"#,
expect![[r#"
at align($0)
at transparent
at C
at u8

View File

@ -192,6 +192,8 @@ fn deny_clippy(path: &Path, text: &str) {
"ide_db/src/helpers/generated_lints.rs",
// The tests test clippy lint hovers
"ide/src/hover/tests.rs",
// The tests test clippy lint completions
"ide_completion/src/tests/attribute.rs",
];
if ignore.iter().any(|p| path.ends_with(p)) {
return;