mirror of
https://github.com/rust-lang/rust.git
synced 2024-12-03 20:23:59 +00:00
Remove no_completions_required from CompletionContext
This commit is contained in:
parent
119ba82e4b
commit
0b54d31359
@ -163,6 +163,7 @@ fn add_keyword(acc: &mut Completions, ctx: &CompletionContext, kw: &str, snippet
|
|||||||
match ctx.config.snippet_cap {
|
match ctx.config.snippet_cap {
|
||||||
Some(cap) => {
|
Some(cap) => {
|
||||||
if snippet.ends_with('}') && ctx.incomplete_let {
|
if snippet.ends_with('}') && ctx.incomplete_let {
|
||||||
|
// complete block expression snippets with a trailing semicolon, if inside an incomplete let
|
||||||
cov_mark::hit!(let_semi);
|
cov_mark::hit!(let_semi);
|
||||||
item.insert_snippet(cap, format!("{};", snippet));
|
item.insert_snippet(cap, format!("{};", snippet));
|
||||||
} else {
|
} else {
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
//! show up for normal completions, or they won't show completions other than lifetimes depending
|
//! show up for normal completions, or they won't show completions other than lifetimes depending
|
||||||
//! on the fixture input.
|
//! on the fixture input.
|
||||||
use hir::{known, ScopeDef};
|
use hir::{known, ScopeDef};
|
||||||
use syntax::ast;
|
use syntax::{ast, TokenText};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
completions::Completions,
|
completions::Completions,
|
||||||
@ -19,24 +19,24 @@ use crate::{
|
|||||||
pub(crate) fn complete_lifetime(acc: &mut Completions, ctx: &CompletionContext) {
|
pub(crate) fn complete_lifetime(acc: &mut Completions, ctx: &CompletionContext) {
|
||||||
let lp = match &ctx.lifetime_ctx {
|
let lp = match &ctx.lifetime_ctx {
|
||||||
Some(LifetimeContext::Lifetime) => None,
|
Some(LifetimeContext::Lifetime) => None,
|
||||||
Some(LifetimeContext::LifetimeParam(param)) => param.as_ref(),
|
Some(LifetimeContext::LifetimeParam { is_decl: false, param }) => Some(param),
|
||||||
_ => return,
|
_ => return,
|
||||||
};
|
};
|
||||||
let lp_string;
|
|
||||||
let param_lifetime = match (&ctx.name_syntax, lp.and_then(|lp| lp.lifetime())) {
|
let param_lifetime = match (&ctx.name_syntax, lp.and_then(|lp| lp.lifetime())) {
|
||||||
(Some(ast::NameLike::Lifetime(lt)), Some(lp)) if lp == lt.clone() => return,
|
(Some(ast::NameLike::Lifetime(lt)), Some(lp)) if lp == lt.clone() => return,
|
||||||
(Some(_), Some(lp)) => {
|
(Some(_), Some(lp)) => Some(lp),
|
||||||
lp_string = lp.to_string();
|
|
||||||
Some(&*lp_string)
|
|
||||||
}
|
|
||||||
_ => None,
|
_ => None,
|
||||||
};
|
};
|
||||||
|
let param_lifetime = param_lifetime.as_ref().map(ast::Lifetime::text);
|
||||||
|
let param_lifetime = param_lifetime.as_ref().map(TokenText::as_str);
|
||||||
|
|
||||||
ctx.scope.process_all_names(&mut |name, res| {
|
ctx.scope.process_all_names(&mut |name, res| {
|
||||||
if let ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_)) = res {
|
if matches!(
|
||||||
if param_lifetime != Some(&*name.to_smol_str()) {
|
res,
|
||||||
acc.add_lifetime(ctx, name);
|
ScopeDef::GenericParam(hir::GenericParam::LifetimeParam(_))
|
||||||
}
|
if param_lifetime != Some(&*name.to_smol_str())
|
||||||
|
) {
|
||||||
|
acc.add_lifetime(ctx, name);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
if param_lifetime.is_none() {
|
if param_lifetime.is_none() {
|
||||||
@ -195,6 +195,12 @@ fn foo2<'lifetime, T>() where T: Trait<Item = 'a$0> {}
|
|||||||
fn complete_lifetime_in_param_list() {
|
fn complete_lifetime_in_param_list() {
|
||||||
check(
|
check(
|
||||||
r#"
|
r#"
|
||||||
|
fn foo<'$0>() {}
|
||||||
|
"#,
|
||||||
|
expect![[r#""#]],
|
||||||
|
);
|
||||||
|
check(
|
||||||
|
r#"
|
||||||
fn foo<'a$0>() {}
|
fn foo<'a$0>() {}
|
||||||
"#,
|
"#,
|
||||||
expect![[r#""#]],
|
expect![[r#""#]],
|
||||||
|
@ -24,8 +24,8 @@ use text_edit::Indel;
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
patterns::{
|
patterns::{
|
||||||
determine_location, determine_prev_sibling, for_is_prev2, inside_impl_trait_block,
|
determine_location, determine_prev_sibling, for_is_prev2, is_in_loop_body, previous_token,
|
||||||
is_in_loop_body, previous_token, ImmediateLocation, ImmediatePrevSibling,
|
ImmediateLocation, ImmediatePrevSibling,
|
||||||
},
|
},
|
||||||
CompletionConfig,
|
CompletionConfig,
|
||||||
};
|
};
|
||||||
@ -94,7 +94,7 @@ pub(super) struct PatternContext {
|
|||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(super) enum LifetimeContext {
|
pub(super) enum LifetimeContext {
|
||||||
LifetimeParam(Option<ast::LifetimeParam>),
|
LifetimeParam { is_decl: bool, param: ast::LifetimeParam },
|
||||||
Lifetime,
|
Lifetime,
|
||||||
LabelRef,
|
LabelRef,
|
||||||
LabelDef,
|
LabelDef,
|
||||||
@ -115,6 +115,7 @@ pub(crate) struct CompletionContext<'a> {
|
|||||||
pub(super) db: &'a RootDatabase,
|
pub(super) db: &'a RootDatabase,
|
||||||
pub(super) config: &'a CompletionConfig,
|
pub(super) config: &'a CompletionConfig,
|
||||||
pub(super) position: FilePosition,
|
pub(super) position: FilePosition,
|
||||||
|
|
||||||
/// The token before the cursor, in the original file.
|
/// The token before the cursor, in the original file.
|
||||||
pub(super) original_token: SyntaxToken,
|
pub(super) original_token: SyntaxToken,
|
||||||
/// The token before the cursor, in the macro-expanded file.
|
/// The token before the cursor, in the macro-expanded file.
|
||||||
@ -146,32 +147,22 @@ pub(crate) struct CompletionContext<'a> {
|
|||||||
pub(super) existing_derives: FxHashSet<hir::Macro>,
|
pub(super) existing_derives: FxHashSet<hir::Macro>,
|
||||||
|
|
||||||
pub(super) locals: Vec<(Name, Local)>,
|
pub(super) locals: Vec<(Name, Local)>,
|
||||||
|
|
||||||
no_completion_required: bool,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> CompletionContext<'a> {
|
impl<'a> CompletionContext<'a> {
|
||||||
/// Checks whether completions in that particular case don't make much sense.
|
|
||||||
/// Examples:
|
|
||||||
/// - `fn $0` -- we expect function name, it's unlikely that "hint" will be helpful.
|
|
||||||
/// Exception for this case is `impl Trait for Foo`, where we would like to hint trait method names.
|
|
||||||
/// - `for _ i$0` -- obviously, it'll be "in" keyword.
|
|
||||||
pub(crate) fn no_completion_required(&self) -> bool {
|
|
||||||
self.no_completion_required
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The range of the identifier that is being completed.
|
/// The range of the identifier that is being completed.
|
||||||
pub(crate) fn source_range(&self) -> TextRange {
|
pub(crate) fn source_range(&self) -> TextRange {
|
||||||
// check kind of macro-expanded token, but use range of original token
|
// check kind of macro-expanded token, but use range of original token
|
||||||
let kind = self.token.kind();
|
let kind = self.token.kind();
|
||||||
if kind == IDENT || kind == LIFETIME_IDENT || kind == UNDERSCORE || kind.is_keyword() {
|
match kind {
|
||||||
self.original_token.text_range()
|
CHAR => {
|
||||||
} else if kind == CHAR {
|
// assume we are completing a lifetime but the user has only typed the '
|
||||||
// assume we are completing a lifetime but the user has only typed the '
|
cov_mark::hit!(completes_if_lifetime_without_idents);
|
||||||
cov_mark::hit!(completes_if_lifetime_without_idents);
|
TextRange::at(self.original_token.text_range().start(), TextSize::from(1))
|
||||||
TextRange::at(self.original_token.text_range().start(), TextSize::from(1))
|
}
|
||||||
} else {
|
IDENT | LIFETIME_IDENT | UNDERSCORE => self.original_token.text_range(),
|
||||||
TextRange::empty(self.position.offset)
|
_ if kind.is_keyword() => self.original_token.text_range(),
|
||||||
|
_ => TextRange::empty(self.position.offset),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -453,7 +444,6 @@ impl<'a> CompletionContext<'a> {
|
|||||||
path_context: None,
|
path_context: None,
|
||||||
locals,
|
locals,
|
||||||
incomplete_let: false,
|
incomplete_let: false,
|
||||||
no_completion_required: false,
|
|
||||||
existing_derives: Default::default(),
|
existing_derives: Default::default(),
|
||||||
};
|
};
|
||||||
ctx.expand_and_fill(
|
ctx.expand_and_fill(
|
||||||
@ -740,14 +730,16 @@ impl<'a> CompletionContext<'a> {
|
|||||||
) {
|
) {
|
||||||
let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap();
|
let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap();
|
||||||
let syntax_element = NodeOrToken::Token(fake_ident_token);
|
let syntax_element = NodeOrToken::Token(fake_ident_token);
|
||||||
self.previous_token = previous_token(syntax_element.clone());
|
if for_is_prev2(syntax_element.clone()) {
|
||||||
self.no_completion_required = {
|
// for pat $0
|
||||||
let inside_impl_trait_block = inside_impl_trait_block(syntax_element.clone());
|
// there is nothing to complete here except `in` keyword
|
||||||
let fn_is_prev = self.previous_token_is(T![fn]);
|
// don't bother populating the context
|
||||||
let for_is_prev2 = for_is_prev2(syntax_element.clone());
|
// FIXME: the completion calculations should end up good enough
|
||||||
(fn_is_prev && !inside_impl_trait_block) || for_is_prev2
|
// such that this special case becomes unnecessary
|
||||||
};
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.previous_token = previous_token(syntax_element.clone());
|
||||||
self.fake_attribute_under_caret = syntax_element.ancestors().find_map(ast::Attr::cast);
|
self.fake_attribute_under_caret = syntax_element.ancestors().find_map(ast::Attr::cast);
|
||||||
|
|
||||||
self.incomplete_let =
|
self.incomplete_let =
|
||||||
@ -755,9 +747,7 @@ impl<'a> CompletionContext<'a> {
|
|||||||
it.syntax().text_range().end() == syntax_element.text_range().end()
|
it.syntax().text_range().end() == syntax_element.text_range().end()
|
||||||
});
|
});
|
||||||
|
|
||||||
let (expected_type, expected_name) = self.expected_type_and_name();
|
(self.expected_type, self.expected_name) = self.expected_type_and_name();
|
||||||
self.expected_type = expected_type;
|
|
||||||
self.expected_name = expected_name;
|
|
||||||
|
|
||||||
// Overwrite the path kind for derives
|
// Overwrite the path kind for derives
|
||||||
if let Some((original_file, file_with_fake_ident, offset)) = derive_ctx {
|
if let Some((original_file, file_with_fake_ident, offset)) = derive_ctx {
|
||||||
@ -808,8 +798,7 @@ impl<'a> CompletionContext<'a> {
|
|||||||
|
|
||||||
match name_like {
|
match name_like {
|
||||||
ast::NameLike::Lifetime(lifetime) => {
|
ast::NameLike::Lifetime(lifetime) => {
|
||||||
self.lifetime_ctx =
|
self.lifetime_ctx = Self::classify_lifetime(&self.sema, original_file, lifetime);
|
||||||
Self::classify_lifetime(&self.sema, original_file, lifetime, offset);
|
|
||||||
}
|
}
|
||||||
ast::NameLike::NameRef(name_ref) => {
|
ast::NameLike::NameRef(name_ref) => {
|
||||||
if let Some((path_ctx, pat_ctx)) =
|
if let Some((path_ctx, pat_ctx)) =
|
||||||
@ -826,10 +815,9 @@ impl<'a> CompletionContext<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn classify_lifetime(
|
fn classify_lifetime(
|
||||||
sema: &Semantics<RootDatabase>,
|
_sema: &Semantics<RootDatabase>,
|
||||||
original_file: &SyntaxNode,
|
_original_file: &SyntaxNode,
|
||||||
lifetime: ast::Lifetime,
|
lifetime: ast::Lifetime,
|
||||||
offset: TextSize,
|
|
||||||
) -> Option<LifetimeContext> {
|
) -> Option<LifetimeContext> {
|
||||||
let parent = lifetime.syntax().parent()?;
|
let parent = lifetime.syntax().parent()?;
|
||||||
if parent.kind() == ERROR {
|
if parent.kind() == ERROR {
|
||||||
@ -838,7 +826,10 @@ impl<'a> CompletionContext<'a> {
|
|||||||
|
|
||||||
Some(match_ast! {
|
Some(match_ast! {
|
||||||
match parent {
|
match parent {
|
||||||
ast::LifetimeParam(_) => LifetimeContext::LifetimeParam(sema.find_node_at_offset_with_macros(original_file, offset)),
|
ast::LifetimeParam(param) => LifetimeContext::LifetimeParam {
|
||||||
|
is_decl: param.lifetime().as_ref() == Some(&lifetime),
|
||||||
|
param
|
||||||
|
},
|
||||||
ast::BreakExpr(_) => LifetimeContext::LabelRef,
|
ast::BreakExpr(_) => LifetimeContext::LabelRef,
|
||||||
ast::ContinueExpr(_) => LifetimeContext::LabelRef,
|
ast::ContinueExpr(_) => LifetimeContext::LabelRef,
|
||||||
ast::Label(_) => LifetimeContext::LabelDef,
|
ast::Label(_) => LifetimeContext::LabelDef,
|
||||||
|
@ -144,12 +144,6 @@ pub fn completions(
|
|||||||
) -> Option<Completions> {
|
) -> Option<Completions> {
|
||||||
let ctx = CompletionContext::new(db, position, config)?;
|
let ctx = CompletionContext::new(db, position, config)?;
|
||||||
|
|
||||||
if ctx.no_completion_required() {
|
|
||||||
cov_mark::hit!(no_completion_required);
|
|
||||||
// No work required here.
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut acc = Completions::default();
|
let mut acc = Completions::default();
|
||||||
completions::attribute::complete_attribute(&mut acc, &ctx);
|
completions::attribute::complete_attribute(&mut acc, &ctx);
|
||||||
completions::attribute::complete_derive(&mut acc, &ctx);
|
completions::attribute::complete_derive(&mut acc, &ctx);
|
||||||
|
@ -15,7 +15,7 @@ use syntax::{
|
|||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
use crate::tests::{check_pattern_is_applicable, check_pattern_is_not_applicable};
|
use crate::tests::check_pattern_is_applicable;
|
||||||
|
|
||||||
/// Immediate previous node to what we are completing.
|
/// Immediate previous node to what we are completing.
|
||||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||||
@ -291,24 +291,6 @@ fn find_node_with_range<N: AstNode>(syntax: &SyntaxNode, range: TextRange) -> Op
|
|||||||
syntax.covering_element(range).ancestors().find_map(N::cast)
|
syntax.covering_element(range).ancestors().find_map(N::cast)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn inside_impl_trait_block(element: SyntaxElement) -> bool {
|
|
||||||
// Here we search `impl` keyword up through the all ancestors, unlike in `has_impl_parent`,
|
|
||||||
// where we only check the first parent with different text range.
|
|
||||||
element
|
|
||||||
.ancestors()
|
|
||||||
.find(|it| it.kind() == IMPL)
|
|
||||||
.map(|it| ast::Impl::cast(it).unwrap())
|
|
||||||
.map(|it| it.trait_().is_some())
|
|
||||||
.unwrap_or(false)
|
|
||||||
}
|
|
||||||
#[test]
|
|
||||||
fn test_inside_impl_trait_block() {
|
|
||||||
check_pattern_is_applicable(r"impl Foo for Bar { f$0 }", inside_impl_trait_block);
|
|
||||||
check_pattern_is_applicable(r"impl Foo for Bar { fn f$0 }", inside_impl_trait_block);
|
|
||||||
check_pattern_is_not_applicable(r"impl A { f$0 }", inside_impl_trait_block);
|
|
||||||
check_pattern_is_not_applicable(r"impl A { fn f$0 }", inside_impl_trait_block);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn previous_token(element: SyntaxElement) -> Option<SyntaxToken> {
|
pub(crate) fn previous_token(element: SyntaxElement) -> Option<SyntaxToken> {
|
||||||
element.into_token().and_then(previous_non_trivia_token)
|
element.into_token().and_then(previous_non_trivia_token)
|
||||||
}
|
}
|
||||||
|
@ -154,10 +154,12 @@ fn render_completion_list(completions: Vec<CompletionItem>) -> String {
|
|||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
pub(crate) fn check_edit(what: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
|
pub(crate) fn check_edit(what: &str, ra_fixture_before: &str, ra_fixture_after: &str) {
|
||||||
check_edit_with_config(TEST_CONFIG, what, ra_fixture_before, ra_fixture_after)
|
check_edit_with_config(TEST_CONFIG, what, ra_fixture_before, ra_fixture_after)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
pub(crate) fn check_edit_with_config(
|
pub(crate) fn check_edit_with_config(
|
||||||
config: CompletionConfig,
|
config: CompletionConfig,
|
||||||
what: &str,
|
what: &str,
|
||||||
@ -199,32 +201,14 @@ pub(crate) fn check_pattern_is_applicable(code: &str, check: impl FnOnce(SyntaxE
|
|||||||
assert!(check(NodeOrToken::Token(token)));
|
assert!(check(NodeOrToken::Token(token)));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn check_pattern_is_not_applicable(code: &str, check: fn(SyntaxElement) -> bool) {
|
|
||||||
let (db, pos) = position(code);
|
|
||||||
let sema = Semantics::new(&db);
|
|
||||||
let original_file = sema.parse(pos.file_id);
|
|
||||||
let token = original_file.syntax().token_at_offset(pos.offset).left_biased().unwrap();
|
|
||||||
assert!(!check(NodeOrToken::Token(token)));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn get_all_items(config: CompletionConfig, code: &str) -> Vec<CompletionItem> {
|
pub(crate) fn get_all_items(config: CompletionConfig, code: &str) -> Vec<CompletionItem> {
|
||||||
let (db, position) = position(code);
|
let (db, position) = position(code);
|
||||||
crate::completions(&db, &config, position).map_or_else(Vec::default, Into::into)
|
crate::completions(&db, &config, position).map_or_else(Vec::default, Into::into)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_no_completion(ra_fixture: &str) {
|
|
||||||
let (db, position) = position(ra_fixture);
|
|
||||||
|
|
||||||
assert!(
|
|
||||||
crate::completions(&db, &TEST_CONFIG, position).is_none(),
|
|
||||||
"Completions were generated, but weren't expected"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_no_completions_required() {
|
fn test_no_completions_required() {
|
||||||
cov_mark::check!(no_completion_required);
|
assert_eq!(completion_list(r#"fn foo() { for i i$0 }"#), String::new());
|
||||||
check_no_completion(r#"fn foo() { for i i$0 }"#);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
Loading…
Reference in New Issue
Block a user