mirror of
https://github.com/rust-lang/rust.git
synced 2025-01-22 12:43:36 +00:00
Better support client completion resolve caps
This commit is contained in:
parent
47464e556c
commit
2a7be4afb0
@ -12,6 +12,7 @@ use crate::config::SnippetCap;
|
||||
/// `CompletionItem` describes a single completion variant in the editor pop-up.
|
||||
/// It is basically a POD with various properties. To construct a
|
||||
/// `CompletionItem`, use `new` method and the `Builder` struct.
|
||||
#[derive(Clone)]
|
||||
pub struct CompletionItem {
|
||||
/// Used only internally in tests, to check only specific kind of
|
||||
/// completion (postfix, keyword, reference, etc).
|
||||
|
@ -97,6 +97,7 @@ pub(crate) fn check_edit_with_config(
|
||||
.unwrap_or_else(|| panic!("can't find {:?} completion in {:#?}", what, completions));
|
||||
let mut actual = db.file_text(position.file_id).to_string();
|
||||
completion.text_edit().apply(&mut actual);
|
||||
// TODO kb how to apply imports now?
|
||||
assert_eq_text!(&ra_fixture_after, &actual)
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,7 @@ use std::{sync::Arc, time::Instant};
|
||||
|
||||
use crossbeam_channel::{unbounded, Receiver, Sender};
|
||||
use flycheck::FlycheckHandle;
|
||||
use ide::{Analysis, AnalysisHost, Change, FileId, ImportToAdd};
|
||||
use ide::{Analysis, AnalysisHost, Change, CompletionItem, FileId};
|
||||
use ide_db::base_db::{CrateId, VfsPath};
|
||||
use lsp_types::{SemanticTokens, Url};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
@ -51,6 +51,11 @@ pub(crate) struct Handle<H, C> {
|
||||
pub(crate) type ReqHandler = fn(&mut GlobalState, lsp_server::Response);
|
||||
pub(crate) type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>;
|
||||
|
||||
pub(crate) struct CompletionResolveData {
|
||||
pub(crate) file_id: FileId,
|
||||
pub(crate) item: CompletionItem,
|
||||
}
|
||||
|
||||
/// `GlobalState` is the primary mutable state of the language server
|
||||
///
|
||||
/// The most interesting components are `vfs`, which stores a consistent
|
||||
@ -69,7 +74,7 @@ pub(crate) struct GlobalState {
|
||||
pub(crate) config: Config,
|
||||
pub(crate) analysis_host: AnalysisHost,
|
||||
pub(crate) diagnostics: DiagnosticCollection,
|
||||
pub(crate) additional_imports: FxHashMap<usize, ImportToAdd>,
|
||||
pub(crate) completion_resolve_data: FxHashMap<usize, CompletionResolveData>,
|
||||
pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>,
|
||||
pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
|
||||
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
|
||||
@ -122,7 +127,7 @@ impl GlobalState {
|
||||
config,
|
||||
analysis_host,
|
||||
diagnostics: Default::default(),
|
||||
additional_imports: FxHashMap::default(),
|
||||
completion_resolve_data: FxHashMap::default(),
|
||||
mem_docs: FxHashMap::default(),
|
||||
semantic_tokens_cache: Arc::new(Default::default()),
|
||||
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
|
||||
|
@ -5,14 +5,12 @@
|
||||
use std::{
|
||||
io::Write as _,
|
||||
process::{self, Stdio},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use ide::{
|
||||
FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, ImportToAdd, LineIndex,
|
||||
NavigationTarget, Query, RangeInfo, Runnable, RunnableKind, SearchScope, TextEdit,
|
||||
FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query,
|
||||
RangeInfo, Runnable, RunnableKind, SearchScope, TextEdit,
|
||||
};
|
||||
use ide_db::helpers::{insert_use, mod_path_to_ast};
|
||||
use itertools::Itertools;
|
||||
use lsp_server::ErrorCode;
|
||||
use lsp_types::{
|
||||
@ -36,10 +34,10 @@ use crate::{
|
||||
cargo_target_spec::CargoTargetSpec,
|
||||
config::RustfmtConfig,
|
||||
from_json, from_proto,
|
||||
global_state::{GlobalState, GlobalStateSnapshot},
|
||||
line_endings::LineEndings,
|
||||
global_state::{CompletionResolveData, GlobalState, GlobalStateSnapshot},
|
||||
lsp_ext::{self, InlayHint, InlayHintsParams},
|
||||
to_proto, LspError, Result,
|
||||
to_proto::{self, append_import_edits},
|
||||
LspError, Result,
|
||||
};
|
||||
|
||||
pub(crate) fn handle_analyzer_status(
|
||||
@ -538,12 +536,6 @@ pub(crate) fn handle_runnables(
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
||||
pub(crate) struct ResolveCompletionData {
|
||||
completion_id: usize,
|
||||
completion_file_id: u32,
|
||||
}
|
||||
|
||||
pub(crate) fn handle_completion(
|
||||
global_state: &mut GlobalState,
|
||||
params: lsp_types::CompletionParams,
|
||||
@ -579,38 +571,31 @@ pub(crate) fn handle_completion(
|
||||
};
|
||||
let line_index = snap.analysis.file_line_index(position.file_id)?;
|
||||
let line_endings = snap.file_line_endings(position.file_id);
|
||||
let mut additional_imports = FxHashMap::default();
|
||||
let mut completion_resolve_data = FxHashMap::default();
|
||||
|
||||
let items: Vec<CompletionItem> = items
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.flat_map(|(item_index, item)| {
|
||||
let resolve_completion_data = ResolveCompletionData {
|
||||
completion_id: item_index,
|
||||
completion_file_id: position.file_id.0,
|
||||
};
|
||||
let import_to_add = item.import_to_add().cloned();
|
||||
let mut new_completion_items =
|
||||
to_proto::completion_item(&line_index, line_endings, item);
|
||||
let mut new_completion_items = to_proto::completion_item(
|
||||
&line_index,
|
||||
line_endings,
|
||||
item.clone(),
|
||||
&snap.config.completion.resolve_capabilities,
|
||||
);
|
||||
|
||||
if let Some(import_to_add) = import_to_add {
|
||||
for new_item in &mut new_completion_items {
|
||||
match serde_json::to_value(&resolve_completion_data) {
|
||||
Ok(resolve_value) => {
|
||||
new_item.data = Some(resolve_value);
|
||||
additional_imports.insert(item_index, import_to_add.clone());
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to serialize completion resolve metadata: {}", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
let item_id = serde_json::to_value(&item_index)
|
||||
.expect(&format!("Should be able to serialize usize value {}", item_index));
|
||||
completion_resolve_data
|
||||
.insert(item_index, CompletionResolveData { file_id: position.file_id, item });
|
||||
for new_item in &mut new_completion_items {
|
||||
new_item.data = Some(item_id.clone());
|
||||
}
|
||||
new_completion_items
|
||||
})
|
||||
.collect();
|
||||
|
||||
global_state.additional_imports = additional_imports;
|
||||
global_state.completion_resolve_data = completion_resolve_data;
|
||||
|
||||
let completion_list = lsp_types::CompletionList { is_incomplete: true, items };
|
||||
Ok(Some(completion_list.into()))
|
||||
@ -622,73 +607,40 @@ pub(crate) fn handle_resolve_completion(
|
||||
) -> Result<lsp_types::CompletionItem> {
|
||||
let _p = profile::span("handle_resolve_completion");
|
||||
|
||||
match original_completion.data.as_ref() {
|
||||
Some(completion_data) => {
|
||||
match serde_json::from_value::<ResolveCompletionData>(completion_data.clone()) {
|
||||
Ok(resolve_completion_data) => {
|
||||
if let Some(import_to_add) =
|
||||
global_state.additional_imports.get(&resolve_completion_data.completion_id)
|
||||
{
|
||||
let snap = global_state.snapshot();
|
||||
let file_id = FileId(resolve_completion_data.completion_file_id);
|
||||
let line_index = snap.analysis.file_line_index(file_id)?;
|
||||
let line_endings = snap.file_line_endings(file_id);
|
||||
let server_completion_data = match original_completion
|
||||
.data
|
||||
.as_ref()
|
||||
.map(|data| serde_json::from_value::<usize>(data.clone()))
|
||||
.transpose()?
|
||||
.and_then(|server_completion_id| {
|
||||
global_state.completion_resolve_data.get(&server_completion_id)
|
||||
}) {
|
||||
Some(data) => data,
|
||||
None => return Ok(original_completion),
|
||||
};
|
||||
|
||||
let resolved_edits =
|
||||
resolve_additional_edits(import_to_add, line_index, line_endings);
|
||||
|
||||
original_completion.additional_text_edits =
|
||||
match original_completion.additional_text_edits {
|
||||
Some(mut original_additional_edits) => {
|
||||
if let Some(mut new_edits) = resolved_edits {
|
||||
original_additional_edits.extend(new_edits.drain(..))
|
||||
}
|
||||
Some(original_additional_edits)
|
||||
}
|
||||
None => resolved_edits,
|
||||
};
|
||||
} else {
|
||||
log::error!(
|
||||
"Got no import data for completion with label {}, id {}",
|
||||
original_completion.label,
|
||||
resolve_completion_data.completion_id
|
||||
)
|
||||
}
|
||||
let snap = &global_state.snapshot();
|
||||
for supported_completion_resolve_cap in &snap.config.completion.resolve_capabilities {
|
||||
match supported_completion_resolve_cap {
|
||||
ide::CompletionResolveCapability::AdditionalTextEdits => {
|
||||
// TODO kb actually add all additional edits here?
|
||||
if let Some(import_to_add) = server_completion_data.item.import_to_add() {
|
||||
append_import_edits(
|
||||
&mut original_completion,
|
||||
import_to_add,
|
||||
snap.analysis.file_line_index(server_completion_data.file_id)?.as_ref(),
|
||||
snap.file_line_endings(server_completion_data.file_id),
|
||||
);
|
||||
}
|
||||
Err(e) => log::error!("Failed to deserialize completion resolve metadata: {}", e),
|
||||
}
|
||||
// TODO kb calculate the rest also?
|
||||
_ => {}
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
|
||||
Ok(original_completion)
|
||||
}
|
||||
|
||||
// TODO kb what to do when no resolve is available on the client?
|
||||
fn resolve_additional_edits(
|
||||
import_to_add: &ImportToAdd,
|
||||
line_index: Arc<LineIndex>,
|
||||
line_endings: LineEndings,
|
||||
) -> Option<Vec<lsp_types::TextEdit>> {
|
||||
let _p = profile::span("resolve_additional_edits");
|
||||
|
||||
let rewriter = insert_use::insert_use(
|
||||
&import_to_add.import_scope,
|
||||
mod_path_to_ast(&import_to_add.import_path),
|
||||
import_to_add.merge_behaviour,
|
||||
);
|
||||
let old_ast = rewriter.rewrite_root()?;
|
||||
let mut import_insert = TextEdit::builder();
|
||||
algo::diff(&old_ast, &rewriter.rewrite(&old_ast)).into_text_edit(&mut import_insert);
|
||||
let text_edit = import_insert.finish();
|
||||
|
||||
Some(
|
||||
text_edit
|
||||
.into_iter()
|
||||
.map(|indel| to_proto::text_edit(&line_index, line_endings, indel))
|
||||
.collect_vec(),
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn handle_folding_range(
|
||||
snap: GlobalStateSnapshot,
|
||||
params: FoldingRangeParams,
|
||||
|
@ -5,14 +5,19 @@ use std::{
|
||||
};
|
||||
|
||||
use ide::{
|
||||
Assist, AssistKind, CallInfo, CompletionItem, CompletionItemKind, Documentation,
|
||||
FileSystemEdit, Fold, FoldKind, Highlight, HighlightModifier, HighlightTag, HighlightedRange,
|
||||
Indel, InlayHint, InlayKind, InsertTextFormat, LineIndex, Markup, NavigationTarget,
|
||||
ReferenceAccess, ResolvedAssist, Runnable, Severity, SourceChange, SourceFileEdit, TextEdit,
|
||||
Assist, AssistKind, CallInfo, CompletionItem, CompletionItemKind, CompletionResolveCapability,
|
||||
Documentation, FileSystemEdit, Fold, FoldKind, Highlight, HighlightModifier, HighlightTag,
|
||||
HighlightedRange, ImportToAdd, Indel, InlayHint, InlayKind, InsertTextFormat, LineIndex,
|
||||
Markup, NavigationTarget, ReferenceAccess, ResolvedAssist, Runnable, Severity, SourceChange,
|
||||
SourceFileEdit, TextEdit,
|
||||
};
|
||||
use ide_db::{
|
||||
base_db::{FileId, FileRange},
|
||||
helpers::{insert_use, mod_path_to_ast},
|
||||
};
|
||||
use ide_db::base_db::{FileId, FileRange};
|
||||
use itertools::Itertools;
|
||||
use syntax::{SyntaxKind, TextRange, TextSize};
|
||||
use rustc_hash::FxHashSet;
|
||||
use syntax::{algo, SyntaxKind, TextRange, TextSize};
|
||||
|
||||
use crate::{
|
||||
cargo_target_spec::CargoTargetSpec, global_state::GlobalStateSnapshot,
|
||||
@ -158,6 +163,7 @@ pub(crate) fn completion_item(
|
||||
line_index: &LineIndex,
|
||||
line_endings: LineEndings,
|
||||
completion_item: CompletionItem,
|
||||
resolve_capabilities: &FxHashSet<CompletionResolveCapability>,
|
||||
) -> Vec<lsp_types::CompletionItem> {
|
||||
fn set_score(res: &mut lsp_types::CompletionItem, label: &str) {
|
||||
res.preselect = Some(true);
|
||||
@ -231,9 +237,17 @@ pub(crate) fn completion_item(
|
||||
None => vec![res],
|
||||
};
|
||||
|
||||
let unapplied_import_data = completion_item.import_to_add().filter(|_| {
|
||||
!resolve_capabilities.contains(&CompletionResolveCapability::AdditionalTextEdits)
|
||||
});
|
||||
|
||||
for mut r in all_results.iter_mut() {
|
||||
r.insert_text_format = Some(insert_text_format(completion_item.insert_text_format()));
|
||||
if let Some(unapplied_import_data) = unapplied_import_data {
|
||||
append_import_edits(r, unapplied_import_data, line_index, line_endings);
|
||||
}
|
||||
}
|
||||
|
||||
all_results
|
||||
}
|
||||
|
||||
@ -817,6 +831,47 @@ pub(crate) fn markup_content(markup: Markup) -> lsp_types::MarkupContent {
|
||||
lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value }
|
||||
}
|
||||
|
||||
pub(crate) fn import_into_edits(
|
||||
import_to_add: &ImportToAdd,
|
||||
line_index: &LineIndex,
|
||||
line_endings: LineEndings,
|
||||
) -> Option<Vec<lsp_types::TextEdit>> {
|
||||
let _p = profile::span("add_import_edits");
|
||||
|
||||
let rewriter = insert_use::insert_use(
|
||||
&import_to_add.import_scope,
|
||||
mod_path_to_ast(&import_to_add.import_path),
|
||||
import_to_add.merge_behaviour,
|
||||
);
|
||||
let old_ast = rewriter.rewrite_root()?;
|
||||
let mut import_insert = TextEdit::builder();
|
||||
algo::diff(&old_ast, &rewriter.rewrite(&old_ast)).into_text_edit(&mut import_insert);
|
||||
let import_edit = import_insert.finish();
|
||||
|
||||
Some(
|
||||
import_edit
|
||||
.into_iter()
|
||||
.map(|indel| text_edit(line_index, line_endings, indel))
|
||||
.collect_vec(),
|
||||
)
|
||||
}
|
||||
|
||||
pub(crate) fn append_import_edits(
|
||||
completion: &mut lsp_types::CompletionItem,
|
||||
import_to_add: &ImportToAdd,
|
||||
line_index: &LineIndex,
|
||||
line_endings: LineEndings,
|
||||
) {
|
||||
let new_edits = import_into_edits(import_to_add, line_index, line_endings);
|
||||
if let Some(original_additional_edits) = completion.additional_text_edits.as_mut() {
|
||||
if let Some(mut new_edits) = new_edits {
|
||||
original_additional_edits.extend(new_edits.drain(..))
|
||||
}
|
||||
} else {
|
||||
completion.additional_text_edits = new_edits;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use ide::Analysis;
|
||||
@ -836,6 +891,7 @@ mod tests {
|
||||
let (offset, text) = test_utils::extract_offset(fixture);
|
||||
let line_index = LineIndex::new(&text);
|
||||
let (analysis, file_id) = Analysis::from_single_file(text);
|
||||
let resolve_caps = FxHashSet::default();
|
||||
let completions: Vec<(String, Option<String>)> = analysis
|
||||
.completions(
|
||||
&ide::CompletionConfig::default(),
|
||||
@ -845,7 +901,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.filter(|c| c.label().ends_with("arg"))
|
||||
.map(|c| completion_item(&line_index, LineEndings::Unix, c))
|
||||
.map(|c| completion_item(&line_index, LineEndings::Unix, c, &resolve_caps))
|
||||
.flat_map(|comps| comps.into_iter().map(|c| (c.label, c.sort_text)))
|
||||
.collect();
|
||||
expect_test::expect![[r#"
|
||||
|
Loading…
Reference in New Issue
Block a user