Auto merge of #118592 - lnicola:sync-from-ra, r=lnicola

Subtree update of `rust-analyzer`

r? `@ghost`
This commit is contained in:
bors 2023-12-04 17:18:56 +00:00
commit 0e2dac8375
34 changed files with 823 additions and 172 deletions

View File

@ -2000,9 +2000,9 @@ dependencies = [
[[package]]
name = "triomphe"
version = "0.1.8"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1ee9bd9239c339d714d657fac840c6d2a4f9c45f4f9ec7b0975113458be78db"
checksum = "d0c5a71827ac326072b6405552093e2ad2accd25a32fd78d4edc82d98c7f2409"
[[package]]
name = "tt"

View File

@ -116,7 +116,7 @@ text-size = "1.1.1"
rayon = "1.8.0"
serde = { version = "1.0.192", features = ["derive"] }
serde_json = "1.0.108"
triomphe = { version = "0.1.8", default-features = false, features = ["std"] }
triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
# can't upgrade due to dashmap depending on 0.12.3 currently
hashbrown = { version = "0.12.3", features = [
"inline-more",

View File

@ -38,7 +38,6 @@ mod tests;
use std::{
fmt::{self, Debug},
hash::{Hash, Hasher},
marker::PhantomData,
ops::Index,
};
@ -340,34 +339,37 @@ pub trait ItemTreeNode: Clone {
fn id_to_mod_item(id: FileItemTreeId<Self>) -> ModItem;
}
pub struct FileItemTreeId<N: ItemTreeNode> {
index: Idx<N>,
_p: PhantomData<N>,
pub struct FileItemTreeId<N: ItemTreeNode>(Idx<N>);
impl<N: ItemTreeNode> FileItemTreeId<N> {
pub fn index(&self) -> Idx<N> {
self.0
}
}
impl<N: ItemTreeNode> Clone for FileItemTreeId<N> {
fn clone(&self) -> Self {
Self { index: self.index, _p: PhantomData }
Self(self.0)
}
}
impl<N: ItemTreeNode> Copy for FileItemTreeId<N> {}
impl<N: ItemTreeNode> PartialEq for FileItemTreeId<N> {
fn eq(&self, other: &FileItemTreeId<N>) -> bool {
self.index == other.index
self.0 == other.0
}
}
impl<N: ItemTreeNode> Eq for FileItemTreeId<N> {}
impl<N: ItemTreeNode> Hash for FileItemTreeId<N> {
fn hash<H: Hasher>(&self, state: &mut H) {
self.index.hash(state)
self.0.hash(state)
}
}
impl<N: ItemTreeNode> fmt::Debug for FileItemTreeId<N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.index.fmt(f)
self.0.fmt(f)
}
}
@ -548,7 +550,7 @@ impl Index<RawVisibilityId> for ItemTree {
impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree {
type Output = N;
fn index(&self, id: FileItemTreeId<N>) -> &N {
N::lookup(self, id.index)
N::lookup(self, id.index())
}
}
@ -925,23 +927,23 @@ impl ModItem {
pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
match self {
ModItem::Use(it) => tree[it.index].ast_id().upcast(),
ModItem::ExternCrate(it) => tree[it.index].ast_id().upcast(),
ModItem::ExternBlock(it) => tree[it.index].ast_id().upcast(),
ModItem::Function(it) => tree[it.index].ast_id().upcast(),
ModItem::Struct(it) => tree[it.index].ast_id().upcast(),
ModItem::Union(it) => tree[it.index].ast_id().upcast(),
ModItem::Enum(it) => tree[it.index].ast_id().upcast(),
ModItem::Const(it) => tree[it.index].ast_id().upcast(),
ModItem::Static(it) => tree[it.index].ast_id().upcast(),
ModItem::Trait(it) => tree[it.index].ast_id().upcast(),
ModItem::TraitAlias(it) => tree[it.index].ast_id().upcast(),
ModItem::Impl(it) => tree[it.index].ast_id().upcast(),
ModItem::TypeAlias(it) => tree[it.index].ast_id().upcast(),
ModItem::Mod(it) => tree[it.index].ast_id().upcast(),
ModItem::MacroCall(it) => tree[it.index].ast_id().upcast(),
ModItem::MacroRules(it) => tree[it.index].ast_id().upcast(),
ModItem::MacroDef(it) => tree[it.index].ast_id().upcast(),
ModItem::Use(it) => tree[it.index()].ast_id().upcast(),
ModItem::ExternCrate(it) => tree[it.index()].ast_id().upcast(),
ModItem::ExternBlock(it) => tree[it.index()].ast_id().upcast(),
ModItem::Function(it) => tree[it.index()].ast_id().upcast(),
ModItem::Struct(it) => tree[it.index()].ast_id().upcast(),
ModItem::Union(it) => tree[it.index()].ast_id().upcast(),
ModItem::Enum(it) => tree[it.index()].ast_id().upcast(),
ModItem::Const(it) => tree[it.index()].ast_id().upcast(),
ModItem::Static(it) => tree[it.index()].ast_id().upcast(),
ModItem::Trait(it) => tree[it.index()].ast_id().upcast(),
ModItem::TraitAlias(it) => tree[it.index()].ast_id().upcast(),
ModItem::Impl(it) => tree[it.index()].ast_id().upcast(),
ModItem::TypeAlias(it) => tree[it.index()].ast_id().upcast(),
ModItem::Mod(it) => tree[it.index()].ast_id().upcast(),
ModItem::MacroCall(it) => tree[it.index()].ast_id().upcast(),
ModItem::MacroRules(it) => tree[it.index()].ast_id().upcast(),
ModItem::MacroDef(it) => tree[it.index()].ast_id().upcast(),
}
}
}

View File

@ -13,7 +13,7 @@ use crate::{
use super::*;
fn id<N: ItemTreeNode>(index: Idx<N>) -> FileItemTreeId<N> {
FileItemTreeId { index, _p: PhantomData }
FileItemTreeId(index)
}
pub(super) struct Ctx<'a> {

View File

@ -1152,20 +1152,15 @@ impl<'a> InferenceContext<'a> {
(ty, variant)
}
TypeNs::TypeAliasId(it) => {
let container = it.lookup(self.db.upcast()).container;
let parent_subst = match container {
ItemContainerId::TraitId(id) => {
let subst = TyBuilder::subst_for_def(self.db, id, None)
.fill_with_inference_vars(&mut self.table)
.build();
Some(subst)
}
// Type aliases do not exist in impls.
_ => None,
let resolved_seg = match unresolved {
None => path.segments().last().unwrap(),
Some(n) => path.segments().get(path.segments().len() - n - 1).unwrap(),
};
let ty = TyBuilder::def_ty(self.db, it.into(), parent_subst)
.fill_with_inference_vars(&mut self.table)
.build();
let substs =
ctx.substs_from_path_segment(resolved_seg, Some(it.into()), true, None);
let ty = self.db.ty(it.into());
let ty = self.insert_type_vars(ty.substitute(Interner, &substs));
self.resolve_variant_on_alias(ty, unresolved, mod_path)
}
TypeNs::AdtSelfType(_) => {

View File

@ -768,7 +768,7 @@ impl<'a> TyLoweringContext<'a> {
}
}
fn substs_from_path_segment(
pub(super) fn substs_from_path_segment(
&self,
segment: PathSegment<'_>,
def: Option<GenericDefId>,

View File

@ -269,6 +269,10 @@ impl ProjectionStore {
impl ProjectionId {
pub const EMPTY: ProjectionId = ProjectionId(0);
pub fn is_empty(self) -> bool {
self == ProjectionId::EMPTY
}
pub fn lookup(self, store: &ProjectionStore) -> &[PlaceElem] {
store.id_to_proj.get(&self).unwrap()
}
@ -1069,6 +1073,10 @@ pub struct MirBody {
}
impl MirBody {
pub fn local_to_binding_map(&self) -> ArenaMap<LocalId, BindingId> {
self.binding_locals.iter().map(|(it, y)| (*y, it)).collect()
}
fn walk_places(&mut self, mut f: impl FnMut(&mut Place, &mut ProjectionStore)) {
fn for_operand(
op: &mut Operand,
@ -1188,3 +1196,9 @@ pub enum MirSpan {
}
impl_from!(ExprId, PatId for MirSpan);
impl From<&ExprId> for MirSpan {
fn from(value: &ExprId) -> Self {
(*value).into()
}
}

View File

@ -105,9 +105,14 @@ pub enum MirLowerError {
/// A token to ensuring that each drop scope is popped at most once, thanks to the compiler that checks moves.
struct DropScopeToken;
impl DropScopeToken {
fn pop_and_drop(self, ctx: &mut MirLowerCtx<'_>, current: BasicBlockId) -> BasicBlockId {
fn pop_and_drop(
self,
ctx: &mut MirLowerCtx<'_>,
current: BasicBlockId,
span: MirSpan,
) -> BasicBlockId {
std::mem::forget(self);
ctx.pop_drop_scope_internal(current)
ctx.pop_drop_scope_internal(current, span)
}
/// It is useful when we want a drop scope is syntaxically closed, but we don't want to execute any drop
@ -582,7 +587,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.lower_loop(current, place, *label, expr_id.into(), |this, begin| {
let scope = this.push_drop_scope();
if let Some((_, mut current)) = this.lower_expr_as_place(begin, *body, true)? {
current = scope.pop_and_drop(this, current);
current = scope.pop_and_drop(this, current, body.into());
this.set_goto(current, begin, expr_id.into());
} else {
scope.pop_assume_dropped(this);
@ -720,7 +725,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
.ok_or(MirLowerError::ContinueWithoutLoop)?,
};
let begin = loop_data.begin;
current = self.drop_until_scope(loop_data.drop_scope_index, current);
current =
self.drop_until_scope(loop_data.drop_scope_index, current, expr_id.into());
self.set_goto(current, begin, expr_id.into());
Ok(None)
}
@ -759,7 +765,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.current_loop_blocks.as_ref().unwrap().drop_scope_index,
),
};
current = self.drop_until_scope(drop_scope, current);
current = self.drop_until_scope(drop_scope, current, expr_id.into());
self.set_goto(current, end, expr_id.into());
Ok(None)
}
@ -773,7 +779,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
}
}
current = self.drop_until_scope(0, current);
current = self.drop_until_scope(0, current, expr_id.into());
self.set_terminator(current, TerminatorKind::Return, expr_id.into());
Ok(None)
}
@ -1782,7 +1788,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
return Ok(None);
};
self.push_fake_read(c, p, expr.into());
current = scope2.pop_and_drop(self, c);
current = scope2.pop_and_drop(self, c, expr.into());
}
}
}
@ -1793,7 +1799,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
};
current = c;
}
current = scope.pop_and_drop(self, current);
current = scope.pop_and_drop(self, current, span);
Ok(Some(current))
}
@ -1873,9 +1879,14 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
fn drop_until_scope(&mut self, scope_index: usize, mut current: BasicBlockId) -> BasicBlockId {
fn drop_until_scope(
&mut self,
scope_index: usize,
mut current: BasicBlockId,
span: MirSpan,
) -> BasicBlockId {
for scope in self.drop_scopes[scope_index..].to_vec().iter().rev() {
self.emit_drop_and_storage_dead_for_scope(scope, &mut current);
self.emit_drop_and_storage_dead_for_scope(scope, &mut current, span);
}
current
}
@ -1891,17 +1902,22 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
/// Don't call directly
fn pop_drop_scope_internal(&mut self, mut current: BasicBlockId) -> BasicBlockId {
fn pop_drop_scope_internal(
&mut self,
mut current: BasicBlockId,
span: MirSpan,
) -> BasicBlockId {
let scope = self.drop_scopes.pop().unwrap();
self.emit_drop_and_storage_dead_for_scope(&scope, &mut current);
self.emit_drop_and_storage_dead_for_scope(&scope, &mut current, span);
current
}
fn pop_drop_scope_assert_finished(
&mut self,
mut current: BasicBlockId,
span: MirSpan,
) -> Result<BasicBlockId> {
current = self.pop_drop_scope_internal(current);
current = self.pop_drop_scope_internal(current, span);
if !self.drop_scopes.is_empty() {
implementation_error!("Mismatched count between drop scope push and pops");
}
@ -1912,6 +1928,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
&mut self,
scope: &DropScope,
current: &mut Idx<BasicBlock>,
span: MirSpan,
) {
for &l in scope.locals.iter().rev() {
if !self.result.locals[l].ty.clone().is_copy(self.db, self.owner) {
@ -1919,13 +1936,10 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.set_terminator(
prev,
TerminatorKind::Drop { place: l.into(), target: *current, unwind: None },
MirSpan::Unknown,
span,
);
}
self.push_statement(
*current,
StatementKind::StorageDead(l).with_span(MirSpan::Unknown),
);
self.push_statement(*current, StatementKind::StorageDead(l).with_span(span));
}
}
}
@ -2002,7 +2016,7 @@ pub fn mir_body_for_closure_query(
|_| true,
)?;
if let Some(current) = ctx.lower_expr_to_place(*root, return_slot().into(), current)? {
let current = ctx.pop_drop_scope_assert_finished(current)?;
let current = ctx.pop_drop_scope_assert_finished(current, root.into())?;
ctx.set_terminator(current, TerminatorKind::Return, (*root).into());
}
let mut upvar_map: FxHashMap<LocalId, Vec<(&CapturedItem, usize)>> = FxHashMap::default();
@ -2146,7 +2160,7 @@ pub fn lower_to_mir(
ctx.lower_params_and_bindings([].into_iter(), binding_picker)?
};
if let Some(current) = ctx.lower_expr_to_place(root_expr, return_slot().into(), current)? {
let current = ctx.pop_drop_scope_assert_finished(current)?;
let current = ctx.pop_drop_scope_assert_finished(current, root_expr.into())?;
ctx.set_terminator(current, TerminatorKind::Return, root_expr.into());
}
Ok(ctx.result)

View File

@ -145,7 +145,7 @@ impl<'a> MirPrettyCtx<'a> {
let indent = mem::take(&mut self.indent);
let mut ctx = MirPrettyCtx {
body: &body,
local_to_binding: body.binding_locals.iter().map(|(it, y)| (*y, it)).collect(),
local_to_binding: body.local_to_binding_map(),
result,
indent,
..*self
@ -167,7 +167,7 @@ impl<'a> MirPrettyCtx<'a> {
}
fn new(body: &'a MirBody, hir_body: &'a Body, db: &'a dyn HirDatabase) -> Self {
let local_to_binding = body.binding_locals.iter().map(|(it, y)| (*y, it)).collect();
let local_to_binding = body.local_to_binding_map();
MirPrettyCtx {
body,
db,

View File

@ -1129,3 +1129,27 @@ fn foo() {
"#,
);
}
#[test]
fn generic_alias() {
check_types(
r#"
type Wrap<T> = T;
enum X {
A { cool: u32, stuff: u32 },
B,
}
fn main() {
let wrapped = Wrap::<X>::A {
cool: 100,
stuff: 100,
};
if let Wrap::<X>::A { cool, ..} = &wrapped {}
//^^^^ &u32
}
"#,
);
}

View File

@ -67,7 +67,7 @@ use hir_ty::{
known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
method_resolution::{self, TyFingerprint},
mir::{self, interpret_mir},
mir::interpret_mir,
primitive::UintTy,
traits::FnTrait,
AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId, GenericArg,
@ -129,9 +129,10 @@ pub use {
hir_ty::{
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
layout::LayoutError,
mir::MirEvalError,
PointerCast, Safety,
},
// FIXME: Properly encapsulate mir
hir_ty::{mir, Interner as ChalkTyInterner},
};
// These are negative re-exports: pub using these names is forbidden, they
@ -1914,17 +1915,20 @@ impl DefWithBody {
if let ast::Expr::MatchExpr(match_expr) =
&source_ptr.value.to_node(&root)
{
if let Some(scrut_expr) = match_expr.expr() {
acc.push(
MissingMatchArms {
scrutinee_expr: InFile::new(
source_ptr.file_id,
AstPtr::new(&scrut_expr),
),
uncovered_patterns,
}
.into(),
);
match match_expr.expr() {
Some(scrut_expr) if match_expr.match_arm_list().is_some() => {
acc.push(
MissingMatchArms {
scrutinee_expr: InFile::new(
source_ptr.file_id,
AstPtr::new(&scrut_expr),
),
uncovered_patterns,
}
.into(),
);
}
_ => {}
}
}
}

View File

@ -1,4 +1,4 @@
use syntax::{ast, AstNode};
use syntax::{ast, AstNode, SyntaxKind, T};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -39,7 +39,19 @@ pub(crate) fn remove_parentheses(acc: &mut Assists, ctx: &AssistContext<'_>) ->
AssistId("remove_parentheses", AssistKind::Refactor),
"Remove redundant parentheses",
target,
|builder| builder.replace_ast(parens.into(), expr),
|builder| {
let prev_token = parens.syntax().first_token().and_then(|it| it.prev_token());
let need_to_add_ws = match prev_token {
Some(it) => {
let tokens = vec![T![&], T![!], T!['('], T!['['], T!['{']];
it.kind() != SyntaxKind::WHITESPACE && !tokens.contains(&it.kind())
}
None => false,
};
let expr = if need_to_add_ws { format!(" {}", expr) } else { expr.to_string() };
builder.replace(parens.syntax().text_range(), expr)
},
)
}
@ -49,6 +61,15 @@ mod tests {
use super::*;
#[test]
fn remove_parens_space() {
check_assist(
remove_parentheses,
r#"fn f() { match$0(true) {} }"#,
r#"fn f() { match true {} }"#,
);
}
#[test]
fn remove_parens_simple() {
check_assist(remove_parentheses, r#"fn f() { $0(2) + 2; }"#, r#"fn f() { 2 + 2; }"#);
@ -94,8 +115,8 @@ mod tests {
check_assist(remove_parentheses, r#"fn f() { f(($02 + 2)); }"#, r#"fn f() { f(2 + 2); }"#);
check_assist(
remove_parentheses,
r#"fn f() { (1<2)&&$0(3>4); }"#,
r#"fn f() { (1<2)&&3>4; }"#,
r#"fn f() { (1<2) &&$0(3>4); }"#,
r#"fn f() { (1<2) && 3>4; }"#,
);
}
@ -164,8 +185,8 @@ mod tests {
fn remove_parens_weird_places() {
check_assist(
remove_parentheses,
r#"fn f() { match () { _=>$0(()) } }"#,
r#"fn f() { match () { _=>() } }"#,
r#"fn f() { match () { _ =>$0(()) } }"#,
r#"fn f() { match () { _ => () } }"#,
);
check_assist(

View File

@ -26,17 +26,17 @@ pub(crate) fn complete_dot(
item.add_to(acc, ctx.db);
}
if let DotAccessKind::Method { .. } = dot_access.kind {
cov_mark::hit!(test_no_struct_field_completion_for_method_call);
} else {
complete_fields(
acc,
ctx,
receiver_ty,
|acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
|acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
);
}
let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
complete_fields(
acc,
ctx,
receiver_ty,
|acc, field, ty| acc.add_field(ctx, dot_access, None, field, &ty),
|acc, field, ty| acc.add_tuple_field(ctx, None, field, &ty),
is_field_access,
);
complete_methods(ctx, receiver_ty, |func| acc.add_method(ctx, dot_access, func, None, None));
}
@ -82,6 +82,7 @@ pub(crate) fn complete_undotted_self(
)
},
|acc, field, ty| acc.add_tuple_field(ctx, Some(hir::known::SELF_PARAM), field, &ty),
true,
);
complete_methods(ctx, &ty, |func| {
acc.add_method(
@ -104,18 +105,23 @@ fn complete_fields(
receiver: &hir::Type,
mut named_field: impl FnMut(&mut Completions, hir::Field, hir::Type),
mut tuple_index: impl FnMut(&mut Completions, usize, hir::Type),
is_field_access: bool,
) {
let mut seen_names = FxHashSet::default();
for receiver in receiver.autoderef(ctx.db) {
for (field, ty) in receiver.fields(ctx.db) {
if seen_names.insert(field.name(ctx.db)) {
if seen_names.insert(field.name(ctx.db))
&& (is_field_access || ty.is_fn() || ty.is_closure())
{
named_field(acc, field, ty);
}
}
for (i, ty) in receiver.tuple_fields(ctx.db).into_iter().enumerate() {
// Tuples are always the last type in a deref chain, so just check if the name is
// already seen without inserting into the hashset.
if !seen_names.contains(&hir::Name::new_tuple_field(i)) {
if !seen_names.contains(&hir::Name::new_tuple_field(i))
&& (is_field_access || ty.is_fn() || ty.is_closure())
{
// Tuple fields are always public (tuple struct fields are handled above).
tuple_index(acc, i, ty);
}
@ -250,7 +256,6 @@ impl A {
#[test]
fn test_no_struct_field_completion_for_method_call() {
cov_mark::check!(test_no_struct_field_completion_for_method_call);
check(
r#"
struct A { the_field: u32 }
@ -1172,4 +1177,63 @@ impl<B: Bar, F: core::ops::Deref<Target = B>> Foo<F> {
"#]],
);
}
#[test]
fn test_struct_function_field_completion() {
check(
r#"
struct S { va_field: u32, fn_field: fn() }
fn foo() { S { va_field: 0, fn_field: || {} }.fi$0() }
"#,
expect![[r#"
fd fn_field fn()
"#]],
);
check_edit(
"fn_field",
r#"
struct S { va_field: u32, fn_field: fn() }
fn foo() { S { va_field: 0, fn_field: || {} }.fi$0() }
"#,
r#"
struct S { va_field: u32, fn_field: fn() }
fn foo() { (S { va_field: 0, fn_field: || {} }.fn_field)() }
"#,
);
}
#[test]
fn test_tuple_function_field_completion() {
check(
r#"
struct B(u32, fn())
fn foo() {
let b = B(0, || {});
b.$0()
}
"#,
expect![[r#"
fd 1 fn()
"#]],
);
check_edit(
"1",
r#"
struct B(u32, fn())
fn foo() {
let b = B(0, || {});
b.$0()
}
"#,
r#"
struct B(u32, fn())
fn foo() {
let b = B(0, || {});
(b.1)()
}
"#,
)
}
}

View File

@ -18,9 +18,10 @@ use ide_db::{
RootDatabase, SnippetCap, SymbolKind,
};
use syntax::{AstNode, SmolStr, SyntaxKind, TextRange};
use text_edit::TextEdit;
use crate::{
context::{DotAccess, PathCompletionCtx, PathKind, PatternContext},
context::{DotAccess, DotAccessKind, PathCompletionCtx, PathKind, PatternContext},
item::{Builder, CompletionRelevanceTypeMatch},
render::{
function::render_fn,
@ -147,7 +148,42 @@ pub(crate) fn render_field(
.set_documentation(field.docs(db))
.set_deprecated(is_deprecated)
.lookup_by(name);
item.insert_text(field_with_receiver(db, receiver.as_ref(), &escaped_name));
let is_field_access = matches!(dot_access.kind, DotAccessKind::Field { .. });
if !is_field_access || ty.is_fn() || ty.is_closure() {
let mut builder = TextEdit::builder();
// Using TextEdit, insert '(' before the struct name and ')' before the
// dot access, then comes the field name and optionally insert function
// call parens.
builder.replace(
ctx.source_range(),
field_with_receiver(db, receiver.as_ref(), &escaped_name).into(),
);
let expected_fn_type =
ctx.completion.expected_type.as_ref().is_some_and(|ty| ty.is_fn() || ty.is_closure());
if !expected_fn_type {
if let Some(receiver) = &dot_access.receiver {
if let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) {
builder.insert(receiver.syntax().text_range().start(), "(".to_string());
builder.insert(ctx.source_range().end(), ")".to_string());
}
}
let is_parens_needed =
!matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
if is_parens_needed {
builder.insert(ctx.source_range().end(), "()".to_string());
}
}
item.text_edit(builder.finish());
} else {
item.insert_text(field_with_receiver(db, receiver.as_ref(), &escaped_name));
}
if let Some(receiver) = &dot_access.receiver {
if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) {
if let Some(ref_match) = compute_ref_match(ctx.completion, ty) {
@ -1600,7 +1636,7 @@ fn main() {
fn struct_field_method_ref() {
check_kinds(
r#"
struct Foo { bar: u32 }
struct Foo { bar: u32, qux: fn() }
impl Foo { fn baz(&self) -> u32 { 0 } }
fn foo(f: Foo) { let _: &u32 = f.b$0 }
@ -1610,30 +1646,92 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 }
[
CompletionItem {
label: "baz()",
source_range: 98..99,
delete: 98..99,
source_range: 109..110,
delete: 109..110,
insert: "baz()$0",
kind: Method,
lookup: "baz",
detail: "fn(&self) -> u32",
ref_match: "&@96",
ref_match: "&@107",
},
CompletionItem {
label: "bar",
source_range: 98..99,
delete: 98..99,
source_range: 109..110,
delete: 109..110,
insert: "bar",
kind: SymbolKind(
Field,
),
detail: "u32",
ref_match: "&@96",
ref_match: "&@107",
},
CompletionItem {
label: "qux",
source_range: 109..110,
text_edit: TextEdit {
indels: [
Indel {
insert: "(",
delete: 107..107,
},
Indel {
insert: "qux)()",
delete: 109..110,
},
],
},
kind: SymbolKind(
Field,
),
detail: "fn()",
},
]
"#]],
);
}
#[test]
fn expected_fn_type_ref() {
check_kinds(
r#"
struct S { field: fn() }
fn foo() {
let foo: fn() = S { fields: || {}}.fi$0;
}
"#,
&[CompletionItemKind::SymbolKind(SymbolKind::Field)],
expect![[r#"
[
CompletionItem {
label: "field",
source_range: 76..78,
delete: 76..78,
insert: "field",
kind: SymbolKind(
Field,
),
detail: "fn()",
relevance: CompletionRelevance {
exact_name_match: false,
type_match: Some(
Exact,
),
is_local: false,
is_item_from_trait: false,
is_name_already_imported: false,
requires_import: false,
is_op_method: false,
is_private_editable: false,
postfix_match: None,
is_definite: false,
},
},
]
"#]],
)
}
#[test]
fn qualified_path_ref() {
check_kinds(

View File

@ -354,6 +354,35 @@ fn outer(Foo { bar$0 }: Foo) {}
)
}
#[test]
fn completes_in_record_field_pat_with_generic_type_alias() {
check_empty(
r#"
type Wrap<T> = T;
enum X {
A { cool: u32, stuff: u32 },
B,
}
fn main() {
let wrapped = Wrap::<X>::A {
cool: 100,
stuff: 100,
};
if let Wrap::<X>::A { $0 } = &wrapped {};
}
"#,
expect![[r#"
fd cool u32
fd stuff u32
kw mut
kw ref
"#]],
)
}
#[test]
fn completes_in_fn_param() {
check_empty(

View File

@ -17,7 +17,10 @@ pub(crate) fn missing_match_arms(
#[cfg(test)]
mod tests {
use crate::tests::check_diagnostics;
use crate::{
tests::{check_diagnostics, check_diagnostics_with_config},
DiagnosticsConfig,
};
#[track_caller]
fn check_diagnostics_no_bails(ra_fixture: &str) {
@ -25,6 +28,20 @@ mod tests {
crate::tests::check_diagnostics(ra_fixture)
}
#[test]
fn empty_body() {
let mut config = DiagnosticsConfig::test_sample();
config.disabled.insert("syntax-error".to_string());
check_diagnostics_with_config(
config,
r#"
fn main() {
match 0;
}
"#,
);
}
#[test]
fn empty_tuple() {
check_diagnostics_no_bails(

View File

@ -31,6 +31,7 @@ mod discriminant;
mod fn_lifetime_fn;
mod implicit_static;
mod param_name;
mod implicit_drop;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct InlayHintsConfig {
@ -45,6 +46,7 @@ pub struct InlayHintsConfig {
pub closure_return_type_hints: ClosureReturnTypeHints,
pub closure_capture_hints: bool,
pub binding_mode_hints: bool,
pub implicit_drop_hints: bool,
pub lifetime_elision_hints: LifetimeElisionHints,
pub param_names_for_lifetime_elision_hints: bool,
pub hide_named_constructor_hints: bool,
@ -124,6 +126,7 @@ pub enum InlayKind {
Lifetime,
Parameter,
Type,
Drop,
}
#[derive(Debug)]
@ -503,7 +506,10 @@ fn hints(
ast::Item(it) => match it {
// FIXME: record impl lifetimes so they aren't being reused in assoc item lifetime inlay hints
ast::Item::Impl(_) => None,
ast::Item::Fn(it) => fn_lifetime_fn::hints(hints, config, it),
ast::Item::Fn(it) => {
implicit_drop::hints(hints, sema, config, &it);
fn_lifetime_fn::hints(hints, config, it)
},
// static type elisions
ast::Item::Static(it) => implicit_static::hints(hints, config, Either::Left(it)),
ast::Item::Const(it) => implicit_static::hints(hints, config, Either::Right(it)),
@ -591,6 +597,7 @@ mod tests {
max_length: None,
closing_brace_hints_min_lines: None,
fields_to_resolve: InlayFieldsToResolve::empty(),
implicit_drop_hints: false,
};
pub(super) const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig {
type_hints: true,

View File

@ -0,0 +1,204 @@
//! Implementation of "implicit drop" inlay hints:
//! ```no_run
//! fn main() {
//! let x = vec![2];
//! if some_condition() {
//! /* drop(x) */return;
//! }
//! }
//! ```
use hir::{
db::{DefDatabase as _, HirDatabase as _},
mir::{MirSpan, TerminatorKind},
ChalkTyInterner, DefWithBody, Semantics,
};
use ide_db::{base_db::FileRange, RootDatabase};
use syntax::{
ast::{self, AstNode},
match_ast,
};
use crate::{InlayHint, InlayHintLabel, InlayHintPosition, InlayHintsConfig, InlayKind};
pub(super) fn hints(
acc: &mut Vec<InlayHint>,
sema: &Semantics<'_, RootDatabase>,
config: &InlayHintsConfig,
def: &ast::Fn,
) -> Option<()> {
if !config.implicit_drop_hints {
return None;
}
let def = sema.to_def(def)?;
let def: DefWithBody = def.into();
let source_map = sema.db.body_with_source_map(def.into()).1;
let hir = sema.db.body(def.into());
let mir = sema.db.mir_body(def.into()).ok()?;
let local_to_binding = mir.local_to_binding_map();
for (_, bb) in mir.basic_blocks.iter() {
let terminator = bb.terminator.as_ref()?;
if let TerminatorKind::Drop { place, .. } = terminator.kind {
if !place.projection.is_empty() {
continue; // Ignore complex cases for now
}
if mir.locals[place.local].ty.adt_id(ChalkTyInterner).is_none() {
continue; // Arguably only ADTs have significant drop impls
}
let Some(binding) = local_to_binding.get(place.local) else {
continue; // Ignore temporary values
};
let range = match terminator.span {
MirSpan::ExprId(e) => match source_map.expr_syntax(e) {
Ok(s) => {
let root = &s.file_syntax(sema.db);
let expr = s.value.to_node(root);
let expr = expr.syntax();
match_ast! {
match expr {
ast::BlockExpr(x) => x.stmt_list().and_then(|x| x.r_curly_token()).map(|x| x.text_range()).unwrap_or_else(|| expr.text_range()),
_ => expr.text_range(),
}
}
}
Err(_) => continue,
},
MirSpan::PatId(p) => match source_map.pat_syntax(p) {
Ok(s) => s.value.text_range(),
Err(_) => continue,
},
MirSpan::Unknown => continue,
};
let binding = &hir.bindings[*binding];
let binding_source = binding
.definitions
.first()
.and_then(|d| source_map.pat_syntax(*d).ok())
.and_then(|d| {
Some(FileRange { file_id: d.file_id.file_id()?, range: d.value.text_range() })
});
let name = binding.name.to_smol_str();
if name.starts_with("<ra@") {
continue; // Ignore desugared variables
}
let mut label = InlayHintLabel::simple(
name,
Some(crate::InlayTooltip::String("moz".into())),
binding_source,
);
label.prepend_str("drop(");
label.append_str(")");
acc.push(InlayHint {
range,
position: InlayHintPosition::Before,
pad_left: true,
pad_right: true,
kind: InlayKind::Drop,
needs_resolve: label.needs_resolve(),
label,
text_edit: None,
})
}
}
Some(())
}
#[cfg(test)]
mod tests {
use crate::{
inlay_hints::tests::{check_with_config, DISABLED_CONFIG},
InlayHintsConfig,
};
const ONLY_DROP_CONFIG: InlayHintsConfig =
InlayHintsConfig { implicit_drop_hints: true, ..DISABLED_CONFIG };
#[test]
fn basic() {
check_with_config(
ONLY_DROP_CONFIG,
r#"
struct X;
fn f() {
let x = X;
if 2 == 5 {
return;
//^^^^^^ drop(x)
}
}
//^ drop(x)
"#,
);
}
#[test]
fn no_hint_for_copy_types_and_mutable_references() {
// `T: Copy` and `T = &mut U` types do nothing on drop, so we should hide drop inlay hint for them.
check_with_config(
ONLY_DROP_CONFIG,
r#"
//- minicore: copy, derive
struct X(i32, i32);
#[derive(Clone, Copy)]
struct Y(i32, i32);
fn f() {
let a = 2;
let b = a + 4;
let mut x = X(a, b);
let mut y = Y(a, b);
let mx = &mut x;
let my = &mut y;
let c = a + b;
}
//^ drop(x)
"#,
);
}
#[test]
fn try_operator() {
// We currently show drop inlay hint for every `?` operator that may potentialy drop something. We probably need to
// make it configurable as it doesn't seem very useful.
check_with_config(
ONLY_DROP_CONFIG,
r#"
//- minicore: copy, try, option
struct X;
fn f() -> Option<()> {
let x = X;
let t_opt = Some(2);
let t = t_opt?;
//^^^^^^ drop(x)
Some(())
}
//^ drop(x)
"#,
);
}
#[test]
fn if_let() {
check_with_config(
ONLY_DROP_CONFIG,
r#"
struct X;
fn f() {
let x = X;
if let X = x {
let y = X;
}
//^ drop(y)
}
//^ drop(x)
"#,
);
}
}

View File

@ -118,6 +118,7 @@ impl StaticIndex<'_> {
adjustment_hints: crate::AdjustmentHints::Never,
adjustment_hints_mode: AdjustmentHintsMode::Prefix,
adjustment_hints_hide_outside_unsafe: false,
implicit_drop_hints: false,
hide_named_constructor_hints: false,
hide_closure_initialization_hints: false,
closure_style: hir::ClosureStyle::ImplFn,

View File

@ -376,6 +376,16 @@ fn error_block(p: &mut Parser<'_>, message: &str) {
m.complete(p, ERROR);
}
// test_err top_level_let
// let ref foo: fn() = 1 + 3;
fn error_let_stmt(p: &mut Parser<'_>, message: &str) {
assert!(p.at(T![let]));
let m = p.start();
p.error(message);
expressions::let_stmt(p, expressions::Semicolon::Optional);
m.complete(p, ERROR);
}
/// The `parser` passed this is required to at least consume one token if it returns `true`.
/// If the `parser` returns false, parsing will stop.
fn delimited(

View File

@ -59,7 +59,8 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
attributes::outer_attrs(p);
if p.at(T![let]) {
let_stmt(p, m, semicolon);
let_stmt(p, semicolon);
m.complete(p, LET_STMT);
return;
}
@ -109,54 +110,53 @@ pub(super) fn stmt(p: &mut Parser<'_>, semicolon: Semicolon) {
m.complete(p, EXPR_STMT);
}
}
}
// test let_stmt
// fn f() { let x: i32 = 92; }
fn let_stmt(p: &mut Parser<'_>, m: Marker, with_semi: Semicolon) {
p.bump(T![let]);
patterns::pattern(p);
if p.at(T![:]) {
// test let_stmt_ascription
// fn f() { let x: i32; }
types::ascription(p);
}
// test let_stmt
// fn f() { let x: i32 = 92; }
pub(super) fn let_stmt(p: &mut Parser<'_>, with_semi: Semicolon) {
p.bump(T![let]);
patterns::pattern(p);
if p.at(T![:]) {
// test let_stmt_ascription
// fn f() { let x: i32; }
types::ascription(p);
}
let mut expr_after_eq: Option<CompletedMarker> = None;
if p.eat(T![=]) {
// test let_stmt_init
// fn f() { let x = 92; }
expr_after_eq = expressions::expr(p);
}
let mut expr_after_eq: Option<CompletedMarker> = None;
if p.eat(T![=]) {
// test let_stmt_init
// fn f() { let x = 92; }
expr_after_eq = expressions::expr(p);
}
if p.at(T![else]) {
// test_err let_else_right_curly_brace
// fn func() { let Some(_) = {Some(1)} else { panic!("h") };}
if let Some(expr) = expr_after_eq {
if BlockLike::is_blocklike(expr.kind()) {
p.error(
"right curly brace `}` before `else` in a `let...else` statement not allowed",
)
}
}
// test let_else
// fn f() { let Some(x) = opt else { return }; }
let m = p.start();
p.bump(T![else]);
block_expr(p);
m.complete(p, LET_ELSE);
}
match with_semi {
Semicolon::Forbidden => (),
Semicolon::Optional => {
p.eat(T![;]);
}
Semicolon::Required => {
p.expect(T![;]);
if p.at(T![else]) {
// test_err let_else_right_curly_brace
// fn func() { let Some(_) = {Some(1)} else { panic!("h") };}
if let Some(expr) = expr_after_eq {
if BlockLike::is_blocklike(expr.kind()) {
p.error(
"right curly brace `}` before `else` in a `let...else` statement not allowed",
)
}
}
m.complete(p, LET_STMT);
// test let_else
// fn f() { let Some(x) = opt else { return }; }
let m = p.start();
p.bump(T![else]);
block_expr(p);
m.complete(p, LET_ELSE);
}
match with_semi {
Semicolon::Forbidden => (),
Semicolon::Optional => {
p.eat(T![;]);
}
Semicolon::Required => {
p.expect(T![;]);
}
}
}
@ -693,6 +693,17 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) {
// We permit `.. }` on the left-hand side of a destructuring assignment.
if !p.at(T!['}']) {
expr(p);
if p.at(T![,]) {
// test_err comma_after_functional_update_syntax
// fn foo() {
// S { ..x, };
// S { ..x, a: 0 }
// }
// Do not bump, so we can support additional fields after this comma.
p.error("cannot use a comma after the base struct");
}
}
}
T!['{'] => {

View File

@ -79,6 +79,7 @@ pub(super) fn item_or_macro(p: &mut Parser<'_>, stop_on_r_curly: bool) {
e.complete(p, ERROR);
}
EOF | T!['}'] => p.error("expected an item"),
T![let] => error_let_stmt(p, "expected an item"),
_ => p.err_and_bump("expected an item"),
}
}

View File

@ -0,0 +1,66 @@
SOURCE_FILE
FN
FN_KW "fn"
WHITESPACE " "
NAME
IDENT "foo"
PARAM_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
BLOCK_EXPR
STMT_LIST
L_CURLY "{"
WHITESPACE "\n "
EXPR_STMT
RECORD_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
WHITESPACE " "
RECORD_EXPR_FIELD_LIST
L_CURLY "{"
WHITESPACE " "
DOT2 ".."
PATH_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "x"
COMMA ","
WHITESPACE " "
R_CURLY "}"
SEMICOLON ";"
WHITESPACE "\n "
RECORD_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "S"
WHITESPACE " "
RECORD_EXPR_FIELD_LIST
L_CURLY "{"
WHITESPACE " "
DOT2 ".."
PATH_EXPR
PATH
PATH_SEGMENT
NAME_REF
IDENT "x"
COMMA ","
WHITESPACE " "
RECORD_EXPR_FIELD
NAME_REF
IDENT "a"
COLON ":"
WHITESPACE " "
LITERAL
INT_NUMBER "0"
WHITESPACE " "
R_CURLY "}"
WHITESPACE "\n"
R_CURLY "}"
WHITESPACE "\n"
error 22: cannot use a comma after the base struct
error 38: cannot use a comma after the base struct

View File

@ -0,0 +1,4 @@
fn foo() {
S { ..x, };
S { ..x, a: 0 }
}

View File

@ -0,0 +1,30 @@
SOURCE_FILE
ERROR
LET_KW "let"
WHITESPACE " "
IDENT_PAT
REF_KW "ref"
WHITESPACE " "
NAME
IDENT "foo"
COLON ":"
WHITESPACE " "
FN_PTR_TYPE
FN_KW "fn"
PARAM_LIST
L_PAREN "("
R_PAREN ")"
WHITESPACE " "
EQ "="
WHITESPACE " "
BIN_EXPR
LITERAL
INT_NUMBER "1"
WHITESPACE " "
PLUS "+"
WHITESPACE " "
LITERAL
INT_NUMBER "3"
SEMICOLON ";"
WHITESPACE "\n"
error 0: expected an item

View File

@ -0,0 +1 @@
let ref foo: fn() = 1 + 3;

View File

@ -783,6 +783,7 @@ impl flags::AnalysisStats {
closure_return_type_hints: ide::ClosureReturnTypeHints::Always,
closure_capture_hints: true,
binding_mode_hints: true,
implicit_drop_hints: true,
lifetime_elision_hints: ide::LifetimeElisionHints::Always,
param_names_for_lifetime_elision_hints: true,
hide_named_constructor_hints: false,

View File

@ -381,6 +381,8 @@ config_data! {
inlayHints_expressionAdjustmentHints_hideOutsideUnsafe: bool = "false",
/// Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).
inlayHints_expressionAdjustmentHints_mode: AdjustmentHintsModeDef = "\"prefix\"",
/// Whether to show implicit drop hints.
inlayHints_implicitDrops_enable: bool = "false",
/// Whether to show inlay type hints for elided lifetimes in function signatures.
inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"",
/// Whether to prefer using parameter names as the name for elided lifetime hints if possible.
@ -1391,6 +1393,7 @@ impl Config {
type_hints: self.data.inlayHints_typeHints_enable,
parameter_hints: self.data.inlayHints_parameterHints_enable,
chaining_hints: self.data.inlayHints_chainingHints_enable,
implicit_drop_hints: self.data.inlayHints_implicitDrops_enable,
discriminant_hints: match self.data.inlayHints_discriminantHints_enable {
DiscriminantHintsDef::Always => ide::DiscriminantHints::Always,
DiscriminantHintsDef::Never => ide::DiscriminantHints::Never,

View File

@ -22,6 +22,7 @@ use ide_db::{
base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, ProcMacros},
FxHashMap,
};
use itertools::Itertools;
use load_cargo::{load_proc_macro, ProjectFolders};
use proc_macro_api::ProcMacroServer;
use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
@ -227,16 +228,12 @@ impl GlobalState {
let mut i = 0;
while i < workspaces.len() {
if let Ok(w) = &workspaces[i] {
let dupes: Vec<_> = workspaces
let dupes: Vec<_> = workspaces[i + 1..]
.iter()
.enumerate()
.skip(i + 1)
.filter_map(|(i, it)| {
it.as_ref().ok().filter(|ws| ws.eq_ignore_build_data(w)).map(|_| i)
})
.positions(|it| it.as_ref().is_ok_and(|ws| ws.eq_ignore_build_data(w)))
.collect();
dupes.into_iter().rev().for_each(|d| {
_ = workspaces.remove(d);
_ = workspaces.remove(d + i + 1);
});
}
i += 1;

View File

@ -1054,6 +1054,10 @@ pub mod option {
Some(T),
}
// region:copy
impl<T: Copy> Copy for Option<T> {}
// endregion:copy
impl<T> Option<T> {
pub const fn unwrap(self) -> T {
match self {

View File

@ -564,6 +564,11 @@ Whether to hide inlay hints for type adjustments outside of `unsafe` blocks.
--
Whether to show inlay hints as postfix ops (`.*` instead of `*`, etc).
--
[[rust-analyzer.inlayHints.implicitDrops.enable]]rust-analyzer.inlayHints.implicitDrops.enable (default: `false`)::
+
--
Whether to show implicit drop hints.
--
[[rust-analyzer.inlayHints.lifetimeElisionHints.enable]]rust-analyzer.inlayHints.lifetimeElisionHints.enable (default: `"never"`)::
+
--

View File

@ -1264,6 +1264,11 @@
"Show prefix or postfix depending on which uses less parenthesis, preferring postfix."
]
},
"rust-analyzer.inlayHints.implicitDrops.enable": {
"markdownDescription": "Whether to show implicit drop hints.",
"default": false,
"type": "boolean"
},
"rust-analyzer.inlayHints.lifetimeElisionHints.enable": {
"markdownDescription": "Whether to show inlay type hints for elided lifetimes in function signatures.",
"default": "never",

View File

@ -3,7 +3,7 @@ import * as vscode from "vscode";
import * as path from "path";
import type * as ra from "./lsp_ext";
import { Cargo, getRustcId, getSysroot } from "./toolchain";
import { Cargo, type ExecutableInfo, getRustcId, getSysroot } from "./toolchain";
import type { Ctx } from "./ctx";
import { prepareEnv } from "./run";
import { unwrapUndefinable } from "./undefinable";
@ -12,6 +12,7 @@ const debugOutput = vscode.window.createOutputChannel("Debug");
type DebugConfigProvider = (
config: ra.Runnable,
executable: string,
cargoWorkspace: string,
env: Record<string, string>,
sourceFileMap?: Record<string, string>,
) => vscode.DebugConfiguration;
@ -130,7 +131,7 @@ async function getDebugConfiguration(
}
const env = prepareEnv(runnable, ctx.config.runnablesExtraEnv);
const executable = await getDebugExecutable(runnable, env);
const { executable, workspace: cargoWorkspace } = await getDebugExecutableInfo(runnable, env);
let sourceFileMap = debugOptions.sourceFileMap;
if (sourceFileMap === "auto") {
// let's try to use the default toolchain
@ -142,7 +143,13 @@ async function getDebugConfiguration(
}
const provider = unwrapUndefinable(knownEngines[debugEngine.id]);
const debugConfig = provider(runnable, simplifyPath(executable), env, sourceFileMap);
const debugConfig = provider(
runnable,
simplifyPath(executable),
cargoWorkspace,
env,
sourceFileMap,
);
if (debugConfig.type in debugOptions.engineSettings) {
const settingsMap = (debugOptions.engineSettings as any)[debugConfig.type];
for (var key in settingsMap) {
@ -164,20 +171,21 @@ async function getDebugConfiguration(
return debugConfig;
}
async function getDebugExecutable(
async function getDebugExecutableInfo(
runnable: ra.Runnable,
env: Record<string, string>,
): Promise<string> {
): Promise<ExecutableInfo> {
const cargo = new Cargo(runnable.args.workspaceRoot || ".", debugOutput, env);
const executable = await cargo.executableFromArgs(runnable.args.cargoArgs);
const executableInfo = await cargo.executableInfoFromArgs(runnable.args.cargoArgs);
// if we are here, there were no compilation errors.
return executable;
return executableInfo;
}
function getLldbDebugConfig(
runnable: ra.Runnable,
executable: string,
cargoWorkspace: string,
env: Record<string, string>,
sourceFileMap?: Record<string, string>,
): vscode.DebugConfiguration {
@ -187,7 +195,7 @@ function getLldbDebugConfig(
name: runnable.label,
program: executable,
args: runnable.args.executableArgs,
cwd: runnable.args.workspaceRoot,
cwd: cargoWorkspace || runnable.args.workspaceRoot,
sourceMap: sourceFileMap,
sourceLanguages: ["rust"],
env,
@ -197,6 +205,7 @@ function getLldbDebugConfig(
function getCppvsDebugConfig(
runnable: ra.Runnable,
executable: string,
cargoWorkspace: string,
env: Record<string, string>,
sourceFileMap?: Record<string, string>,
): vscode.DebugConfiguration {
@ -206,7 +215,7 @@ function getCppvsDebugConfig(
name: runnable.label,
program: executable,
args: runnable.args.executableArgs,
cwd: runnable.args.workspaceRoot,
cwd: cargoWorkspace || runnable.args.workspaceRoot,
sourceFileMap,
env,
};

View File

@ -9,11 +9,17 @@ import { unwrapUndefinable } from "./undefinable";
interface CompilationArtifact {
fileName: string;
workspace: string;
name: string;
kind: string;
isTest: boolean;
}
export interface ExecutableInfo {
executable: string;
workspace: string;
}
export interface ArtifactSpec {
cargoArgs: string[];
filter?: (artifacts: CompilationArtifact[]) => CompilationArtifact[];
@ -68,6 +74,7 @@ export class Cargo {
artifacts.push({
fileName: message.executable,
name: message.target.name,
workspace: message.manifest_path.replace(/\/Cargo\.toml$/, ""),
kind: message.target.kind[0],
isTest: message.profile.test,
});
@ -86,7 +93,7 @@ export class Cargo {
return spec.filter?.(artifacts) ?? artifacts;
}
async executableFromArgs(args: readonly string[]): Promise<string> {
async executableInfoFromArgs(args: readonly string[]): Promise<ExecutableInfo> {
const artifacts = await this.getArtifacts(Cargo.artifactSpec(args));
if (artifacts.length === 0) {
@ -96,7 +103,10 @@ export class Cargo {
}
const artifact = unwrapUndefinable(artifacts[0]);
return artifact.fileName;
return {
executable: artifact.fileName,
workspace: artifact.workspace,
};
}
private async runCargo(