From d0f2db3bf44cbfea1a6293de2199974421e60a27 Mon Sep 17 00:00:00 2001 From: feniljain Date: Thu, 15 Sep 2022 17:22:44 +0530 Subject: [PATCH 01/66] fix(generate_method): correct method indentation inside generated impl --- .../src/handlers/generate_function.rs | 85 +++++++++---------- 1 file changed, 38 insertions(+), 47 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index e26c76da189..f67cc924409 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -174,10 +174,11 @@ fn add_func_to_accumulator( label: String, ) -> Option<()> { acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |builder| { - let function_template = function_builder.render(); + let indent = IndentLevel::from_node(function_builder.target.syntax()); + let function_template = function_builder.render(adt_name.is_some()); let mut func = function_template.to_string(ctx.config.snippet_cap); if let Some(name) = adt_name { - func = format!("\nimpl {} {{\n{}\n}}", name, func); + func = format!("\n{}impl {} {{\n{}\n{}}}", indent, name, func, indent); } builder.edit_file(file); match ctx.config.snippet_cap { @@ -307,7 +308,7 @@ impl FunctionBuilder { }) } - fn render(self) -> FunctionTemplate { + fn render(self, is_method: bool) -> FunctionTemplate { let placeholder_expr = make::ext::expr_todo(); let fn_body = make::block_expr(vec![], Some(placeholder_expr)); let visibility = if self.needs_pub { Some(make::visibility_pub_crate()) } else { None }; @@ -325,8 +326,14 @@ impl FunctionBuilder { match self.target { GeneratedFunctionTarget::BehindItem(it) => { - let indent = IndentLevel::from_node(&it); - leading_ws = format!("\n\n{}", indent); + let mut indent = IndentLevel::from_node(&it); + if is_method { + indent = indent + 1; + leading_ws = format!("{}", indent); + } else { + leading_ws = format!("\n\n{}", indent); + } + fn_def = fn_def.indent(indent); trailing_ws = String::new(); } @@ -1470,11 +1477,9 @@ fn foo() {S.bar$0();} struct S; fn foo() {S.bar();} impl S { - - -fn bar(&self) ${0:-> _} { - todo!() -} + fn bar(&self) ${0:-> _} { + todo!() + } } ", ) @@ -1516,14 +1521,12 @@ fn foo() {s::S.bar$0();} r" mod s { pub struct S; -impl S { - - - pub(crate) fn bar(&self) ${0:-> _} { - todo!() + impl S { + pub(crate) fn bar(&self) ${0:-> _} { + todo!() + } } } -} fn foo() {s::S.bar();} ", ) @@ -1550,11 +1553,9 @@ mod s { } } impl S { - - -fn bar(&self) ${0:-> _} { - todo!() -} + fn bar(&self) ${0:-> _} { + todo!() + } } ", @@ -1573,11 +1574,9 @@ fn foo() {$0S.bar();} struct S; fn foo() {S.bar();} impl S { - - -fn bar(&self) ${0:-> _} { - todo!() -} + fn bar(&self) ${0:-> _} { + todo!() + } } ", ) @@ -1595,11 +1594,9 @@ fn foo() {S::bar$0();} struct S; fn foo() {S::bar();} impl S { - - -fn bar() ${0:-> _} { - todo!() -} + fn bar() ${0:-> _} { + todo!() + } } ", ) @@ -1641,14 +1638,12 @@ fn foo() {s::S::bar$0();} r" mod s { pub struct S; -impl S { - - - pub(crate) fn bar() ${0:-> _} { - todo!() + impl S { + pub(crate) fn bar() ${0:-> _} { + todo!() + } } } -} fn foo() {s::S::bar();} ", ) @@ -1666,11 +1661,9 @@ fn foo() {$0S::bar();} struct S; fn foo() {S::bar();} impl S { - - -fn bar() ${0:-> _} { - todo!() -} + fn bar() ${0:-> _} { + todo!() + } } ", ) @@ -1845,11 +1838,9 @@ fn main() { Foo::new(); } impl Foo { - - -fn new() ${0:-> _} { - todo!() -} + fn new() ${0:-> _} { + todo!() + } } ", ) From 37ff07e1ffa0f3e59ac6c214d40ed4b44f31db3c Mon Sep 17 00:00:00 2001 From: feniljain Date: Thu, 15 Sep 2022 19:33:19 +0530 Subject: [PATCH 02/66] fix(generate_module): generate new impl near its ADT --- .../src/handlers/generate_function.rs | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_function.rs b/crates/ide-assists/src/handlers/generate_function.rs index f67cc924409..8b67982f915 100644 --- a/crates/ide-assists/src/handlers/generate_function.rs +++ b/crates/ide-assists/src/handlers/generate_function.rs @@ -1,4 +1,4 @@ -use hir::{HasSource, HirDisplay, Module, Semantics, TypeInfo}; +use hir::{Adt, HasSource, HirDisplay, Module, Semantics, TypeInfo}; use ide_db::{ base_db::FileId, defs::{Definition, NameRefClass}, @@ -145,7 +145,8 @@ fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { return None; } let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?; - let (target, insert_offset) = get_method_target(ctx, &target_module, &impl_)?; + let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?; + let function_builder = FunctionBuilder::from_method_call(ctx, &call, &fn_name, target_module, target)?; let text_range = call.syntax().text_range(); @@ -418,14 +419,13 @@ fn get_fn_target( fn get_method_target( ctx: &AssistContext<'_>, - target_module: &Module, impl_: &Option, + adt: &Adt, ) -> Option<(GeneratedFunctionTarget, TextSize)> { let target = match impl_ { Some(impl_) => next_space_for_fn_in_impl(impl_)?, None => { - next_space_for_fn_in_module(ctx.sema.db, &target_module.definition_source(ctx.sema.db))? - .1 + GeneratedFunctionTarget::BehindItem(adt.source(ctx.sema.db)?.syntax().value.clone()) } }; Some((target.clone(), get_insert_offset(&target))) @@ -444,7 +444,7 @@ fn assoc_fn_target_info( return None; } let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?; - let (target, insert_offset) = get_method_target(ctx, &module, &impl_)?; + let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?; let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None }; Some(TargetInfo::new(target_module, adt_name, target, file, insert_offset)) } @@ -1475,12 +1475,12 @@ fn foo() {S.bar$0();} ", r" struct S; -fn foo() {S.bar();} impl S { fn bar(&self) ${0:-> _} { todo!() } } +fn foo() {S.bar();} ", ) } @@ -1547,16 +1547,16 @@ mod s { ", r" struct S; -mod s { - fn foo() { - super::S.bar(); - } -} impl S { fn bar(&self) ${0:-> _} { todo!() } } +mod s { + fn foo() { + super::S.bar(); + } +} ", ) @@ -1572,12 +1572,12 @@ fn foo() {$0S.bar();} ", r" struct S; -fn foo() {S.bar();} impl S { fn bar(&self) ${0:-> _} { todo!() } } +fn foo() {S.bar();} ", ) } @@ -1592,12 +1592,12 @@ fn foo() {S::bar$0();} ", r" struct S; -fn foo() {S::bar();} impl S { fn bar() ${0:-> _} { todo!() } } +fn foo() {S::bar();} ", ) } @@ -1659,12 +1659,12 @@ fn foo() {$0S::bar();} ", r" struct S; -fn foo() {S::bar();} impl S { fn bar() ${0:-> _} { todo!() } } +fn foo() {S::bar();} ", ) } @@ -1834,14 +1834,14 @@ fn main() { ", r" enum Foo {} -fn main() { - Foo::new(); -} impl Foo { fn new() ${0:-> _} { todo!() } } +fn main() { + Foo::new(); +} ", ) } From 2c666a08b03a16b4ecf28e87e6deae81b4bac590 Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Tue, 27 Sep 2022 17:35:50 +0200 Subject: [PATCH 03/66] Add convert_named_struct_to_tuple_struct assist --- .../convert_named_struct_to_tuple_struct.rs | 822 ++++++++++++++++++ crates/ide-assists/src/lib.rs | 2 + 2 files changed, 824 insertions(+) create mode 100644 crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs diff --git a/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs new file mode 100644 index 00000000000..8d11e0bac94 --- /dev/null +++ b/crates/ide-assists/src/handlers/convert_named_struct_to_tuple_struct.rs @@ -0,0 +1,822 @@ +use either::Either; +use ide_db::defs::Definition; +use itertools::Itertools; +use syntax::{ + ast::{self, AstNode, HasGenericParams, HasVisibility}, + match_ast, SyntaxKind, SyntaxNode, +}; + +use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists}; + +// Assist: convert_named_struct_to_tuple_struct +// +// Converts struct with named fields to tuple struct, and analogously for enum variants with named +// fields. +// +// ``` +// struct Point$0 { x: f32, y: f32 } +// +// impl Point { +// pub fn new(x: f32, y: f32) -> Self { +// Point { x, y } +// } +// +// pub fn x(&self) -> f32 { +// self.x +// } +// +// pub fn y(&self) -> f32 { +// self.y +// } +// } +// ``` +// -> +// ``` +// struct Point(f32, f32); +// +// impl Point { +// pub fn new(x: f32, y: f32) -> Self { +// Point(x, y) +// } +// +// pub fn x(&self) -> f32 { +// self.0 +// } +// +// pub fn y(&self) -> f32 { +// self.1 +// } +// } +// ``` +pub(crate) fn convert_named_struct_to_tuple_struct( + acc: &mut Assists, + ctx: &AssistContext<'_>, +) -> Option<()> { + let strukt = ctx + .find_node_at_offset::() + .map(Either::Left) + .or_else(|| ctx.find_node_at_offset::().map(Either::Right))?; + let field_list = strukt.as_ref().either(|s| s.field_list(), |v| v.field_list())?; + let record_fields = match field_list { + ast::FieldList::RecordFieldList(it) => it, + ast::FieldList::TupleFieldList(_) => return None, + }; + let strukt_def = match &strukt { + Either::Left(s) => Either::Left(ctx.sema.to_def(s)?), + Either::Right(v) => Either::Right(ctx.sema.to_def(v)?), + }; + let target = strukt.as_ref().either(|s| s.syntax(), |v| v.syntax()).text_range(); + + acc.add( + AssistId("convert_named_struct_to_tuple_struct", AssistKind::RefactorRewrite), + "Convert to tuple struct", + target, + |edit| { + edit_field_references(ctx, edit, record_fields.fields()); + edit_struct_references(ctx, edit, strukt_def); + edit_struct_def(ctx, edit, &strukt, record_fields); + }, + ) +} + +fn edit_struct_def( + ctx: &AssistContext<'_>, + edit: &mut SourceChangeBuilder, + strukt: &Either, + record_fields: ast::RecordFieldList, +) { + let tuple_fields = record_fields + .fields() + .filter_map(|f| Some(ast::make::tuple_field(f.visibility(), f.ty()?))); + let tuple_fields = ast::make::tuple_field_list(tuple_fields); + let record_fields_text_range = record_fields.syntax().text_range(); + + edit.edit_file(ctx.file_id()); + edit.replace(record_fields_text_range, tuple_fields.syntax().text()); + + if let Either::Left(strukt) = strukt { + if let Some(w) = strukt.where_clause() { + let mut where_clause = w.to_string(); + if where_clause.ends_with(',') { + where_clause.pop(); + } + where_clause.push(';'); + + edit.delete(w.syntax().text_range()); + edit.insert(record_fields_text_range.end(), ast::make::tokens::single_newline().text()); + edit.insert(record_fields_text_range.end(), where_clause); + edit.insert(record_fields_text_range.end(), ast::make::tokens::single_newline().text()); + + if let Some(tok) = strukt + .generic_param_list() + .and_then(|l| l.r_angle_token()) + .and_then(|tok| tok.next_token()) + .filter(|tok| tok.kind() == SyntaxKind::WHITESPACE) + { + edit.delete(tok.text_range()); + } + } else { + edit.insert(record_fields_text_range.end(), ";"); + } + } + + if let Some(tok) = record_fields + .l_curly_token() + .and_then(|tok| tok.prev_token()) + .filter(|tok| tok.kind() == SyntaxKind::WHITESPACE) + { + edit.delete(tok.text_range()) + } +} + +fn edit_struct_references( + ctx: &AssistContext<'_>, + edit: &mut SourceChangeBuilder, + strukt: Either, +) { + let strukt_def = match strukt { + Either::Left(s) => Definition::Adt(hir::Adt::Struct(s)), + Either::Right(v) => Definition::Variant(v), + }; + let usages = strukt_def.usages(&ctx.sema).include_self_refs().all(); + + let edit_node = |edit: &mut SourceChangeBuilder, node: SyntaxNode| -> Option<()> { + match_ast! { + match node { + ast::RecordPat(record_struct_pat) => { + edit.replace( + record_struct_pat.syntax().text_range(), + ast::make::tuple_struct_pat( + record_struct_pat.path()?, + record_struct_pat + .record_pat_field_list()? + .fields() + .filter_map(|pat| pat.pat()) + ) + .to_string() + ); + }, + ast::RecordExpr(record_expr) => { + let path = record_expr.path()?; + let args = record_expr + .record_expr_field_list()? + .fields() + .filter_map(|f| f.expr()) + .join(", "); + + edit.replace(record_expr.syntax().text_range(), format!("{path}({args})")); + }, + _ => return None, + } + } + Some(()) + }; + + for (file_id, refs) in usages { + edit.edit_file(file_id); + for r in refs { + for node in r.name.syntax().ancestors() { + if edit_node(edit, node).is_some() { + break; + } + } + } + } +} + +fn edit_field_references( + ctx: &AssistContext<'_>, + edit: &mut SourceChangeBuilder, + fields: impl Iterator, +) { + for (index, field) in fields.enumerate() { + let field = match ctx.sema.to_def(&field) { + Some(it) => it, + None => continue, + }; + let def = Definition::Field(field); + let usages = def.usages(&ctx.sema).all(); + for (file_id, refs) in usages { + edit.edit_file(file_id); + for r in refs { + if let Some(name_ref) = r.name.as_name_ref() { + // Only edit the field reference if it's part of a `.field` access + if name_ref.syntax().parent().and_then(ast::FieldExpr::cast).is_some() { + edit.replace(name_ref.syntax().text_range(), index.to_string()); + } + } + } + } + } +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn not_applicable_other_than_record_struct() { + check_assist_not_applicable(convert_named_struct_to_tuple_struct, r#"struct Foo$0(u32)"#); + check_assist_not_applicable(convert_named_struct_to_tuple_struct, r#"struct Foo$0;"#); + } + + #[test] + fn convert_simple_struct() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +struct Inner; +struct A$0 { inner: Inner } + +impl A { + fn new(inner: Inner) -> A { + A { inner } + } + + fn new_with_default() -> A { + A::new(Inner) + } + + fn into_inner(self) -> Inner { + self.inner + } +}"#, + r#" +struct Inner; +struct A(Inner); + +impl A { + fn new(inner: Inner) -> A { + A(inner) + } + + fn new_with_default() -> A { + A::new(Inner) + } + + fn into_inner(self) -> Inner { + self.0 + } +}"#, + ); + } + + #[test] + fn convert_struct_referenced_via_self_kw() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +struct Inner; +struct A$0 { inner: Inner } + +impl A { + fn new(inner: Inner) -> Self { + Self { inner } + } + + fn new_with_default() -> Self { + Self::new(Inner) + } + + fn into_inner(self) -> Inner { + self.inner + } +}"#, + r#" +struct Inner; +struct A(Inner); + +impl A { + fn new(inner: Inner) -> Self { + Self(inner) + } + + fn new_with_default() -> Self { + Self::new(Inner) + } + + fn into_inner(self) -> Inner { + self.0 + } +}"#, + ); + } + + #[test] + fn convert_destructured_struct() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +struct Inner; +struct A$0 { inner: Inner } + +impl A { + fn into_inner(self) -> Inner { + let A { inner: a } = self; + a + } + + fn into_inner_via_self(self) -> Inner { + let Self { inner } = self; + inner + } +}"#, + r#" +struct Inner; +struct A(Inner); + +impl A { + fn into_inner(self) -> Inner { + let A(a) = self; + a + } + + fn into_inner_via_self(self) -> Inner { + let Self(inner) = self; + inner + } +}"#, + ); + } + + #[test] + fn convert_struct_with_visibility() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +struct A$0 { + pub first: u32, + pub(crate) second: u64 +} + +impl A { + fn new() -> A { + A { first: 42, second: 42 } + } + + fn into_first(self) -> u32 { + self.first + } + + fn into_second(self) -> u64 { + self.second + } +}"#, + r#" +struct A(pub u32, pub(crate) u64); + +impl A { + fn new() -> A { + A(42, 42) + } + + fn into_first(self) -> u32 { + self.0 + } + + fn into_second(self) -> u64 { + self.1 + } +}"#, + ); + } + + #[test] + fn convert_struct_with_wrapped_references() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +struct Inner$0 { uint: u32 } +struct Outer { inner: Inner } + +impl Outer { + fn new() -> Self { + Self { inner: Inner { uint: 42 } } + } + + fn into_inner(self) -> u32 { + self.inner.uint + } + + fn into_inner_destructed(self) -> u32 { + let Outer { inner: Inner { uint: x } } = self; + x + } +}"#, + r#" +struct Inner(u32); +struct Outer { inner: Inner } + +impl Outer { + fn new() -> Self { + Self { inner: Inner(42) } + } + + fn into_inner(self) -> u32 { + self.inner.0 + } + + fn into_inner_destructed(self) -> u32 { + let Outer { inner: Inner(x) } = self; + x + } +}"#, + ); + + check_assist( + convert_named_struct_to_tuple_struct, + r#" +struct Inner { uint: u32 } +struct Outer$0 { inner: Inner } + +impl Outer { + fn new() -> Self { + Self { inner: Inner { uint: 42 } } + } + + fn into_inner(self) -> u32 { + self.inner.uint + } + + fn into_inner_destructed(self) -> u32 { + let Outer { inner: Inner { uint: x } } = self; + x + } +}"#, + r#" +struct Inner { uint: u32 } +struct Outer(Inner); + +impl Outer { + fn new() -> Self { + Self(Inner { uint: 42 }) + } + + fn into_inner(self) -> u32 { + self.0.uint + } + + fn into_inner_destructed(self) -> u32 { + let Outer(Inner { uint: x }) = self; + x + } +}"#, + ); + } + + #[test] + fn convert_struct_with_multi_file_references() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +//- /main.rs +struct Inner; +struct A$0 { inner: Inner } + +mod foo; + +//- /foo.rs +use crate::{A, Inner}; +fn f() { + let a = A { inner: Inner }; +} +"#, + r#" +//- /main.rs +struct Inner; +struct A(Inner); + +mod foo; + +//- /foo.rs +use crate::{A, Inner}; +fn f() { + let a = A(Inner); +} +"#, + ); + } + + #[test] + fn convert_struct_with_where_clause() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +struct Wrap$0 +where + T: Display, +{ field1: T } +"#, + r#" +struct Wrap(T) +where + T: Display; + +"#, + ); + } + + #[test] + fn not_applicable_other_than_record_variant() { + check_assist_not_applicable( + convert_named_struct_to_tuple_struct, + r#"enum Enum { Variant$0(usize) };"#, + ); + check_assist_not_applicable( + convert_named_struct_to_tuple_struct, + r#"enum Enum { Variant$0 }"#, + ); + } + + #[test] + fn convert_simple_variant() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +enum A { + $0Variant { field1: usize }, +} + +impl A { + fn new(value: usize) -> A { + A::Variant { field1: value } + } + + fn new_with_default() -> A { + A::new(Default::default()) + } + + fn value(self) -> usize { + match self { + A::Variant { field1: value } => value, + } + } +}"#, + r#" +enum A { + Variant(usize), +} + +impl A { + fn new(value: usize) -> A { + A::Variant(value) + } + + fn new_with_default() -> A { + A::new(Default::default()) + } + + fn value(self) -> usize { + match self { + A::Variant(value) => value, + } + } +}"#, + ); + } + + #[test] + fn convert_variant_referenced_via_self_kw() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +enum A { + $0Variant { field1: usize }, +} + +impl A { + fn new(value: usize) -> A { + Self::Variant { field1: value } + } + + fn new_with_default() -> A { + Self::new(Default::default()) + } + + fn value(self) -> usize { + match self { + Self::Variant { field1: value } => value, + } + } +}"#, + r#" +enum A { + Variant(usize), +} + +impl A { + fn new(value: usize) -> A { + Self::Variant(value) + } + + fn new_with_default() -> A { + Self::new(Default::default()) + } + + fn value(self) -> usize { + match self { + Self::Variant(value) => value, + } + } +}"#, + ); + } + + #[test] + fn convert_destructured_variant() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +enum A { + $0Variant { field1: usize }, +} + +impl A { + fn into_inner(self) -> usize { + let A::Variant { field1: first } = self; + first + } + + fn into_inner_via_self(self) -> usize { + let Self::Variant { field1: first } = self; + first + } +}"#, + r#" +enum A { + Variant(usize), +} + +impl A { + fn into_inner(self) -> usize { + let A::Variant(first) = self; + first + } + + fn into_inner_via_self(self) -> usize { + let Self::Variant(first) = self; + first + } +}"#, + ); + } + + #[test] + fn convert_variant_with_wrapped_references() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +enum Inner { + $0Variant { field1: usize }, +} +enum Outer { + Variant(Inner), +} + +impl Outer { + fn new() -> Self { + Self::Variant(Inner::Variant { field1: 42 }) + } + + fn into_inner_destructed(self) -> u32 { + let Outer::Variant(Inner::Variant { field1: x }) = self; + x + } +}"#, + r#" +enum Inner { + Variant(usize), +} +enum Outer { + Variant(Inner), +} + +impl Outer { + fn new() -> Self { + Self::Variant(Inner::Variant(42)) + } + + fn into_inner_destructed(self) -> u32 { + let Outer::Variant(Inner::Variant(x)) = self; + x + } +}"#, + ); + + check_assist( + convert_named_struct_to_tuple_struct, + r#" +enum Inner { + Variant(usize), +} +enum Outer { + $0Variant { field1: Inner }, +} + +impl Outer { + fn new() -> Self { + Self::Variant { field1: Inner::Variant(42) } + } + + fn into_inner_destructed(self) -> u32 { + let Outer::Variant { field1: Inner::Variant(x) } = self; + x + } +}"#, + r#" +enum Inner { + Variant(usize), +} +enum Outer { + Variant(Inner), +} + +impl Outer { + fn new() -> Self { + Self::Variant(Inner::Variant(42)) + } + + fn into_inner_destructed(self) -> u32 { + let Outer::Variant(Inner::Variant(x)) = self; + x + } +}"#, + ); + } + + #[test] + fn convert_variant_with_multi_file_references() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +//- /main.rs +struct Inner; +enum A { + $0Variant { field1: Inner }, +} + +mod foo; + +//- /foo.rs +use crate::{A, Inner}; +fn f() { + let a = A::Variant { field1: Inner }; +} +"#, + r#" +//- /main.rs +struct Inner; +enum A { + Variant(Inner), +} + +mod foo; + +//- /foo.rs +use crate::{A, Inner}; +fn f() { + let a = A::Variant(Inner); +} +"#, + ); + } + + #[test] + fn convert_directly_used_variant() { + check_assist( + convert_named_struct_to_tuple_struct, + r#" +//- /main.rs +struct Inner; +enum A { + $0Variant { field1: Inner }, +} + +mod foo; + +//- /foo.rs +use crate::{A::Variant, Inner}; +fn f() { + let a = Variant { field1: Inner }; +} +"#, + r#" +//- /main.rs +struct Inner; +enum A { + Variant(Inner), +} + +mod foo; + +//- /foo.rs +use crate::{A::Variant, Inner}; +fn f() { + let a = Variant(Inner); +} +"#, + ); + } +} diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 82bcc3dfa5d..a07318cefad 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -121,6 +121,7 @@ mod handlers { mod convert_iter_for_each_to_for; mod convert_let_else_to_match; mod convert_tuple_struct_to_named_struct; + mod convert_named_struct_to_tuple_struct; mod convert_to_guarded_return; mod convert_two_arm_bool_match_to_matches_macro; mod convert_while_to_loop; @@ -218,6 +219,7 @@ mod handlers { convert_iter_for_each_to_for::convert_iter_for_each_to_for, convert_iter_for_each_to_for::convert_for_loop_with_for_each, convert_let_else_to_match::convert_let_else_to_match, + convert_named_struct_to_tuple_struct::convert_named_struct_to_tuple_struct, convert_to_guarded_return::convert_to_guarded_return, convert_tuple_struct_to_named_struct::convert_tuple_struct_to_named_struct, convert_two_arm_bool_match_to_matches_macro::convert_two_arm_bool_match_to_matches_macro, From a3d79b517273e333127cb6edb7ea001153df800c Mon Sep 17 00:00:00 2001 From: Jonas Platte Date: Tue, 27 Sep 2022 20:40:51 +0200 Subject: [PATCH 04/66] Update outside test and generated code --- crates/ide-assists/src/tests.rs | 1 + crates/ide-assists/src/tests/generated.rs | 41 +++++++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/crates/ide-assists/src/tests.rs b/crates/ide-assists/src/tests.rs index a3bb66e379e..f7f2417d074 100644 --- a/crates/ide-assists/src/tests.rs +++ b/crates/ide-assists/src/tests.rs @@ -232,6 +232,7 @@ fn assist_order_field_struct() { assert_eq!(assists.next().expect("expected assist").label, "Generate a getter method"); assert_eq!(assists.next().expect("expected assist").label, "Generate a mut getter method"); assert_eq!(assists.next().expect("expected assist").label, "Generate a setter method"); + assert_eq!(assists.next().expect("expected assist").label, "Convert to tuple struct"); assert_eq!(assists.next().expect("expected assist").label, "Add `#[derive]`"); } diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index d403f86c6d8..2c4000efe0f 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -407,6 +407,47 @@ fn main() { ) } +#[test] +fn doctest_convert_named_struct_to_tuple_struct() { + check_doc_test( + "convert_named_struct_to_tuple_struct", + r#####" +struct Point$0 { x: f32, y: f32 } + +impl Point { + pub fn new(x: f32, y: f32) -> Self { + Point { x, y } + } + + pub fn x(&self) -> f32 { + self.x + } + + pub fn y(&self) -> f32 { + self.y + } +} +"#####, + r#####" +struct Point(f32, f32); + +impl Point { + pub fn new(x: f32, y: f32) -> Self { + Point(x, y) + } + + pub fn x(&self) -> f32 { + self.0 + } + + pub fn y(&self) -> f32 { + self.1 + } +} +"#####, + ) +} + #[test] fn doctest_convert_to_guarded_return() { check_doc_test( From 28c21bc8a8044ae232558a4d359045008df63597 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 6 Oct 2022 14:31:06 +0200 Subject: [PATCH 05/66] internal: Allow minicore flags specification to be order independent --- crates/test-utils/src/fixture.rs | 53 +++++++++++++++--------------- crates/test-utils/src/minicore.rs | 54 +++++++++++++++---------------- 2 files changed, 54 insertions(+), 53 deletions(-) diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs index 8c806e7925b..c824f5af725 100644 --- a/crates/test-utils/src/fixture.rs +++ b/crates/test-utils/src/fixture.rs @@ -61,6 +61,8 @@ //! " //! ``` +use std::iter; + use rustc_hash::FxHashMap; use stdx::trim_indent; @@ -259,7 +261,7 @@ impl MiniCore { if res.has_flag(entry) { panic!("duplicate minicore flag: {:?}", entry); } - res.activated_flags.push(entry.to_string()); + res.activated_flags.push(entry.to_owned()); } res @@ -273,35 +275,34 @@ impl MiniCore { let raw_mini_core = include_str!("./minicore.rs"); let mut lines = raw_mini_core.split_inclusive('\n'); - let mut parsing_flags = false; let mut implications = Vec::new(); // Parse `//!` preamble and extract flags and dependencies. - for line in lines.by_ref() { - let line = match line.strip_prefix("//!") { - Some(it) => it, - None => { - assert!(line.trim().is_empty()); - break; - } - }; - - if parsing_flags { - let (flag, deps) = line.split_once(':').unwrap(); - let flag = flag.trim(); - self.valid_flags.push(flag.to_string()); - for dep in deps.split(", ") { - let dep = dep.trim(); - if !dep.is_empty() { - self.assert_valid_flag(dep); - implications.push((flag, dep)); - } - } + let trim_doc: fn(&str) -> Option<&str> = |line| match line.strip_prefix("//!") { + Some(it) => Some(it), + None => { + assert!(line.trim().is_empty(), "expected empty line after minicore header"); + None } + }; + for line in lines + .by_ref() + .map_while(trim_doc) + .skip_while(|line| !line.contains("Available flags:")) + .skip(1) + { + let (flag, deps) = line.split_once(':').unwrap(); + let flag = flag.trim(); - if line.contains("Available flags:") { - parsing_flags = true; - } + self.valid_flags.push(flag.to_string()); + implications.extend( + iter::repeat(flag) + .zip(deps.split(", ").map(str::trim).filter(|dep| !dep.is_empty())), + ); + } + + for (_, dep) in &implications { + self.assert_valid_flag(dep); } for flag in &self.activated_flags { @@ -332,7 +333,7 @@ impl MiniCore { } if let Some(region) = trimmed.strip_prefix("// endregion:") { let prev = active_regions.pop().unwrap(); - assert_eq!(prev, region); + assert_eq!(prev, region, "unbalanced region pairs"); continue; } diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index 10386b5b7bc..69d2e62b256 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -8,36 +8,36 @@ //! We then strip all the code marked with other flags. //! //! Available flags: -//! sized: -//! unsize: sized -//! coerce_unsized: unsize -//! slice: -//! range: -//! deref: sized -//! deref_mut: deref -//! index: sized -//! fn: -//! try: -//! pin: -//! future: pin -//! option: -//! result: -//! iterator: option -//! iterators: iterator, fn -//! default: sized -//! hash: -//! clone: sized -//! copy: clone -//! from: sized -//! eq: sized -//! ord: eq, option -//! derive: -//! fmt: result -//! bool_impl: option, fn //! add: //! as_ref: sized +//! bool_impl: option, fn +//! clone: sized +//! coerce_unsized: unsize +//! copy: clone +//! default: sized +//! deref_mut: deref +//! deref: sized +//! derive: //! drop: +//! eq: sized +//! fmt: result +//! fn: +//! from: sized +//! future: pin //! generator: pin +//! hash: +//! index: sized +//! iterator: option +//! iterators: iterator, fn +//! option: +//! ord: eq, option +//! pin: +//! range: +//! result: +//! sized: +//! slice: +//! try: +//! unsize: sized pub mod marker { // region:sized @@ -584,7 +584,7 @@ pub mod iter { } } } - pub use self::adapters::{Take, FilterMap}; + pub use self::adapters::{FilterMap, Take}; mod sources { mod repeat { From 75f4c54d8c1304966dff2e1d45cff88ac96a9d0f Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Wed, 5 Oct 2022 00:02:03 +0200 Subject: [PATCH 06/66] Add stub for cargo environment variables auto completion --- crates/ide-completion/src/completions.rs | 1 + .../src/completions/env_vars.rs | 60 +++++++++++++++++++ crates/ide-completion/src/lib.rs | 1 + 3 files changed, 62 insertions(+) create mode 100644 crates/ide-completion/src/completions/env_vars.rs diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index 97b90c62dd7..296dfc14250 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -19,6 +19,7 @@ pub(crate) mod snippet; pub(crate) mod r#type; pub(crate) mod use_; pub(crate) mod vis; +pub(crate) mod env_vars; use std::iter; diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs new file mode 100644 index 00000000000..6cfbd0f922f --- /dev/null +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -0,0 +1,60 @@ +//! Completes environment variables defined by Cargo (https://doc.rust-lang.org/cargo/reference/environment-variables.html) + +use syntax::{ast, AstToken, AstNode, TextRange, TextSize}; + +use crate::{context::CompletionContext, CompletionItem, CompletionItemKind}; + +use super::Completions; + +pub(crate) fn complete_cargo_env_vars( + acc: &mut Completions, + ctx: &CompletionContext<'_>, + original: &ast::String +) { + if !is_env_macro(original) { + return; + } + + let start = ctx.original_token.text_range().start() + TextSize::from(1); + let cursor = ctx.position.offset; + + CompletionItem::new(CompletionItemKind::Binding, TextRange::new(start, cursor), "CARGO").add_to(acc); +} + +fn is_env_macro(string: &ast::String) -> bool { + //todo: replace copypaste from format_string with separate function + (|| { + let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?; + let name = macro_call.path()?.segment()?.name_ref()?; + + if !matches!(name.text().as_str(), + "env" | "option_env") { + return None; + } + + + Some(()) + })() + .is_some() +} + +#[cfg(test)] +mod tests { + use expect_test::{expect, Expect}; + use crate::tests::{check_edit}; + + #[test] + fn completes_env_variables() { + check_edit("CARGO", + r#" + fn main() { + let foo = env!("CA$0); + } + "# + ,r#" + fn main() { + let foo = env!("CARGO); + } + "#) + } +} \ No newline at end of file diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index 8d21f4fce0a..cd9da0d3dcf 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -183,6 +183,7 @@ pub fn completions( CompletionAnalysis::String { original, expanded: Some(expanded) } => { completions::extern_abi::complete_extern_abi(acc, ctx, expanded); completions::format_string::format_string(acc, ctx, original, expanded); + completions::env_vars::complete_cargo_env_vars(acc, ctx, original) } CompletionAnalysis::UnexpandedAttrTT { colon_prefix, From 07621ce09676b5a2267c8305ba85b3d37e1403f4 Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Thu, 6 Oct 2022 16:25:12 +0200 Subject: [PATCH 07/66] Use expanded version of text for env var completion --- crates/ide-completion/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index cd9da0d3dcf..c5718ec40f5 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -183,7 +183,7 @@ pub fn completions( CompletionAnalysis::String { original, expanded: Some(expanded) } => { completions::extern_abi::complete_extern_abi(acc, ctx, expanded); completions::format_string::format_string(acc, ctx, original, expanded); - completions::env_vars::complete_cargo_env_vars(acc, ctx, original) + completions::env_vars::complete_cargo_env_vars(acc, expanded); } CompletionAnalysis::UnexpandedAttrTT { colon_prefix, From 08b0c92105fc9a0139a30f8215c7ca44e55ac7b0 Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Thu, 6 Oct 2022 16:26:49 +0200 Subject: [PATCH 08/66] Add helper method to get a macro name from passed string --- crates/ide-db/src/syntax_helpers/node_ext.rs | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs index b890e2b58df..78d6bd008af 100644 --- a/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -2,8 +2,8 @@ use itertools::Itertools; use parser::T; use syntax::{ - ast::{self, HasLoopBody, PathSegmentKind, VisibilityKind}, - AstNode, Preorder, RustLanguage, WalkEvent, + ast::{self, HasLoopBody, PathSegmentKind, VisibilityKind, NameRef}, + AstNode, Preorder, RustLanguage, WalkEvent, AstToken, }; pub fn expr_as_name_ref(expr: &ast::Expr) -> Option { @@ -457,3 +457,10 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option Option { + let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?; + let name = macro_call.path()?.segment()?.name_ref()?; + + Some(name) +} \ No newline at end of file From ddf68ea7c330ec4fef8564062af2a2e15dd5f45e Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Thu, 6 Oct 2022 16:27:27 +0200 Subject: [PATCH 09/66] Add const list of cargo-defined env variables with descriptions --- .../src/completions/env_vars.rs | 21 +++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs index 6cfbd0f922f..eba39bba3d5 100644 --- a/crates/ide-completion/src/completions/env_vars.rs +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -5,6 +5,27 @@ use syntax::{ast, AstToken, AstNode, TextRange, TextSize}; use crate::{context::CompletionContext, CompletionItem, CompletionItemKind}; use super::Completions; +const CARGO_DEFINED_VARS: &[(&str, &str)] = &[ +("CARGO","Path to the cargo binary performing the build"), +("CARGO_MANIFEST_DIR","The directory containing the manifest of your package"), +("CARGO_PKG_VERSION","The full version of your package"), +("CARGO_PKG_VERSION_MAJOR","The major version of your package"), +("CARGO_PKG_VERSION_MINOR","The minor version of your package"), +("CARGO_PKG_VERSION_PATCH","The patch version of your package"), +("CARGO_PKG_VERSION_PRE","The pre-release version of your package"), +("CARGO_PKG_AUTHORS","Colon separated list of authors from the manifest of your package"), +("CARGO_PKG_NAME","The name of your package"), +("CARGO_PKG_DESCRIPTION","The description from the manifest of your package"), +("CARGO_PKG_HOMEPAGE","The home page from the manifest of your package"), +("CARGO_PKG_REPOSITORY","The repository from the manifest of your package"), +("CARGO_PKG_LICENSE","The license from the manifest of your package"), +("CARGO_PKG_LICENSE_FILE","The license file from the manifest of your package"), +("CARGO_PKG_RUST_VERSION","The Rust version from the manifest of your package. Note that this is the minimum Rust version supported by the package, not the current Rust version"), +("CARGO_CRATE_NAME","The name of the crate that is currently being compiled"), +("CARGO_BIN_NAME","The name of the binary that is currently being compiled (if it is a binary). This name does not include any file extension, such as .exe"), +("CARGO_PRIMARY_PACKAGE","This environment variable will be set if the package being built is primary. Primary packages are the ones the user selected on the command-line, either with -p flags or the defaults based on the current directory and the default workspace members. This environment variable will not be set when building dependencies. This is only set when compiling the package (not when running binaries or tests)"), +("CARGO_TARGET_TMPDIR","Only set when building integration test or benchmark code. This is a path to a directory inside the target directory where integration tests or benchmarks are free to put any data needed by the tests/benches. Cargo initially creates this directory but doesn't manage its content in any way, this is the responsibility of the test code") +]; pub(crate) fn complete_cargo_env_vars( acc: &mut Completions, From bc7080884c92148fdb47701067d1f1dbdef4ed1d Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Thu, 6 Oct 2022 16:27:53 +0200 Subject: [PATCH 10/66] Add tests for env var completion --- .../src/completions/env_vars.rs | 54 +++++++++++++++---- 1 file changed, 44 insertions(+), 10 deletions(-) diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs index eba39bba3d5..43d67f093e9 100644 --- a/crates/ide-completion/src/completions/env_vars.rs +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -61,21 +61,55 @@ fn is_env_macro(string: &ast::String) -> bool { #[cfg(test)] mod tests { - use expect_test::{expect, Expect}; - use crate::tests::{check_edit}; + use crate::tests::{check_edit, completion_list}; + + fn check(macro_name: &str) { + check_edit("CARGO_BIN_NAME",&format!(r#" + fn main() {{ + let foo = {}!("CAR$0"); + }} + "#, macro_name), &format!(r#" + fn main() {{ + let foo = {}!("CARGO_BIN_NAME"); + }} + "#, macro_name)); + } + #[test] + fn completes_env_variable_in_env() { + check("env") + } #[test] - fn completes_env_variables() { - check_edit("CARGO", - r#" + fn completes_env_variable_in_option_env() { + check("option_env"); + } + + #[test] + fn doesnt_complete_in_random_strings() { + let fixture = r#" fn main() { - let foo = env!("CA$0); + let foo = "CA$0"; } - "# - ,r#" + "#; + + let completions = completion_list(fixture); + assert!(completions.is_empty(), "Completions weren't empty: {}", completions); + } + + #[test] + fn doesnt_complete_in_random_macro() { + let fixture = r#" + macro_rules! bar { + ($($arg:tt)*) => { 0 } + } + fn main() { - let foo = env!("CARGO); + let foo = bar!("CA$0"); + } - "#) + "#; + + let completions = completion_list(fixture); + assert!(completions.is_empty(), "Completions weren't empty: {}", completions); } } \ No newline at end of file From 55c5014b760a5d4bf3c6dc6a19818278ec83816e Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Thu, 6 Oct 2022 16:28:33 +0200 Subject: [PATCH 11/66] Replace if with option, add detail for each env variable completion --- .../src/completions/env_vars.rs | 47 ++++++++----------- 1 file changed, 20 insertions(+), 27 deletions(-) diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs index 43d67f093e9..e2e0bc142c0 100644 --- a/crates/ide-completion/src/completions/env_vars.rs +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -1,8 +1,8 @@ //! Completes environment variables defined by Cargo (https://doc.rust-lang.org/cargo/reference/environment-variables.html) +use ide_db::syntax_helpers::node_ext::get_outer_macro_name; +use syntax::ast::{self, IsString}; -use syntax::{ast, AstToken, AstNode, TextRange, TextSize}; - -use crate::{context::CompletionContext, CompletionItem, CompletionItemKind}; +use crate::{CompletionItem, CompletionItemKind}; use super::Completions; const CARGO_DEFINED_VARS: &[(&str, &str)] = &[ @@ -29,34 +29,27 @@ const CARGO_DEFINED_VARS: &[(&str, &str)] = &[ pub(crate) fn complete_cargo_env_vars( acc: &mut Completions, - ctx: &CompletionContext<'_>, - original: &ast::String -) { - if !is_env_macro(original) { - return; - } + expanded: &ast::String, +) -> Option<()> { + guard_env_macro(expanded)?; + let range = expanded.text_range_between_quotes()?; - let start = ctx.original_token.text_range().start() + TextSize::from(1); - let cursor = ctx.position.offset; + CARGO_DEFINED_VARS.iter().for_each(|(var, detail)| { + let mut item = CompletionItem::new(CompletionItemKind::Keyword, range, *var); + item.detail(*detail); + item.add_to(acc); + }); - CompletionItem::new(CompletionItemKind::Binding, TextRange::new(start, cursor), "CARGO").add_to(acc); + Some(()) } -fn is_env_macro(string: &ast::String) -> bool { - //todo: replace copypaste from format_string with separate function - (|| { - let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?; - let name = macro_call.path()?.segment()?.name_ref()?; +fn guard_env_macro(string: &ast::String) -> Option<()> { + let name = get_outer_macro_name(string)?; + if !matches!(name.text().as_str(), "env" | "option_env") { + return None; + } - if !matches!(name.text().as_str(), - "env" | "option_env") { - return None; - } - - - Some(()) - })() - .is_some() + Some(()) } #[cfg(test)] @@ -112,4 +105,4 @@ mod tests { let completions = completion_list(fixture); assert!(completions.is_empty(), "Completions weren't empty: {}", completions); } -} \ No newline at end of file +} From 386f46a5a1bff654fc8b29ea84dcd18c29c75c18 Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Thu, 6 Oct 2022 16:29:23 +0200 Subject: [PATCH 12/66] Use helper method in is_format_string --- crates/ide-db/src/syntax_helpers/format_string.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/ide-db/src/syntax_helpers/format_string.rs b/crates/ide-db/src/syntax_helpers/format_string.rs index f48a5700866..894a4b0fbb3 100644 --- a/crates/ide-db/src/syntax_helpers/format_string.rs +++ b/crates/ide-db/src/syntax_helpers/format_string.rs @@ -1,9 +1,10 @@ //! Tools to work with format string literals for the `format_args!` family of macros. use syntax::{ - ast::{self, IsString}, - AstNode, AstToken, TextRange, TextSize, + ast::{self, IsString}, TextRange, TextSize, }; +use super::node_ext::get_outer_macro_name; + pub fn is_format_string(string: &ast::String) -> bool { // Check if `string` is a format string argument of a macro invocation. // `string` is a string literal, mapped down into the innermost macro expansion. @@ -14,8 +15,7 @@ pub fn is_format_string(string: &ast::String) -> bool { // This setup lets us correctly highlight the components of `concat!("{}", "bla")` format // strings. It still fails for `concat!("{", "}")`, but that is rare. (|| { - let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?; - let name = macro_call.path()?.segment()?.name_ref()?; + let name = get_outer_macro_name(string)?; if !matches!( name.text().as_str(), From c660a5ce3444a8c84eeb3afe8ea7cdf12b025e25 Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Thu, 6 Oct 2022 16:42:31 +0200 Subject: [PATCH 13/66] Remove unnecessary dereference --- crates/ide-completion/src/completions/env_vars.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs index e2e0bc142c0..3c573a5f984 100644 --- a/crates/ide-completion/src/completions/env_vars.rs +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -35,7 +35,7 @@ pub(crate) fn complete_cargo_env_vars( let range = expanded.text_range_between_quotes()?; CARGO_DEFINED_VARS.iter().for_each(|(var, detail)| { - let mut item = CompletionItem::new(CompletionItemKind::Keyword, range, *var); + let mut item = CompletionItem::new(CompletionItemKind::Keyword, range, var); item.detail(*detail); item.add_to(acc); }); From 8a92910f9765689a6422de42a208724f8d3935e8 Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Thu, 6 Oct 2022 16:53:17 +0200 Subject: [PATCH 14/66] Formatting --- .../src/completions/env_vars.rs | 21 ++++++++++++------- .../src/syntax_helpers/format_string.rs | 3 ++- crates/ide-db/src/syntax_helpers/node_ext.rs | 6 +++--- 3 files changed, 19 insertions(+), 11 deletions(-) diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs index 3c573a5f984..809f6d4b7c1 100644 --- a/crates/ide-completion/src/completions/env_vars.rs +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -27,10 +27,7 @@ const CARGO_DEFINED_VARS: &[(&str, &str)] = &[ ("CARGO_TARGET_TMPDIR","Only set when building integration test or benchmark code. This is a path to a directory inside the target directory where integration tests or benchmarks are free to put any data needed by the tests/benches. Cargo initially creates this directory but doesn't manage its content in any way, this is the responsibility of the test code") ]; -pub(crate) fn complete_cargo_env_vars( - acc: &mut Completions, - expanded: &ast::String, -) -> Option<()> { +pub(crate) fn complete_cargo_env_vars(acc: &mut Completions, expanded: &ast::String) -> Option<()> { guard_env_macro(expanded)?; let range = expanded.text_range_between_quotes()?; @@ -57,15 +54,25 @@ mod tests { use crate::tests::{check_edit, completion_list}; fn check(macro_name: &str) { - check_edit("CARGO_BIN_NAME",&format!(r#" + check_edit( + "CARGO_BIN_NAME", + &format!( + r#" fn main() {{ let foo = {}!("CAR$0"); }} - "#, macro_name), &format!(r#" + "#, + macro_name + ), + &format!( + r#" fn main() {{ let foo = {}!("CARGO_BIN_NAME"); }} - "#, macro_name)); + "#, + macro_name + ), + ); } #[test] fn completes_env_variable_in_env() { diff --git a/crates/ide-db/src/syntax_helpers/format_string.rs b/crates/ide-db/src/syntax_helpers/format_string.rs index 894a4b0fbb3..8836167630a 100644 --- a/crates/ide-db/src/syntax_helpers/format_string.rs +++ b/crates/ide-db/src/syntax_helpers/format_string.rs @@ -1,6 +1,7 @@ //! Tools to work with format string literals for the `format_args!` family of macros. use syntax::{ - ast::{self, IsString}, TextRange, TextSize, + ast::{self, IsString}, + TextRange, TextSize, }; use super::node_ext::get_outer_macro_name; diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs index 78d6bd008af..5265b3d1615 100644 --- a/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -2,8 +2,8 @@ use itertools::Itertools; use parser::T; use syntax::{ - ast::{self, HasLoopBody, PathSegmentKind, VisibilityKind, NameRef}, - AstNode, Preorder, RustLanguage, WalkEvent, AstToken, + ast::{self, HasLoopBody, NameRef, PathSegmentKind, VisibilityKind}, + AstNode, AstToken, Preorder, RustLanguage, WalkEvent, }; pub fn expr_as_name_ref(expr: &ast::Expr) -> Option { @@ -463,4 +463,4 @@ pub fn get_outer_macro_name(string: &ast::String) -> Option { let name = macro_call.path()?.segment()?.name_ref()?; Some(name) -} \ No newline at end of file +} From f458ea15d637db94d1e95c595aab6c1d7f3937e0 Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Thu, 6 Oct 2022 21:34:19 +0200 Subject: [PATCH 15/66] Make helper method less specific --- crates/ide-db/src/syntax_helpers/format_string.rs | 4 ++-- crates/ide-db/src/syntax_helpers/node_ext.rs | 8 +++----- 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/crates/ide-db/src/syntax_helpers/format_string.rs b/crates/ide-db/src/syntax_helpers/format_string.rs index 8836167630a..b3e227ddd27 100644 --- a/crates/ide-db/src/syntax_helpers/format_string.rs +++ b/crates/ide-db/src/syntax_helpers/format_string.rs @@ -4,7 +4,7 @@ use syntax::{ TextRange, TextSize, }; -use super::node_ext::get_outer_macro_name; +use super::node_ext::get_outer_macro; pub fn is_format_string(string: &ast::String) -> bool { // Check if `string` is a format string argument of a macro invocation. @@ -16,7 +16,7 @@ pub fn is_format_string(string: &ast::String) -> bool { // This setup lets us correctly highlight the components of `concat!("{}", "bla")` format // strings. It still fails for `concat!("{", "}")`, but that is rare. (|| { - let name = get_outer_macro_name(string)?; + let name = get_outer_macro(string)?.path()?.segment()?.name_ref()?; if !matches!( name.text().as_str(), diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs index 5265b3d1615..9cfcdfb77b9 100644 --- a/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -2,7 +2,7 @@ use itertools::Itertools; use parser::T; use syntax::{ - ast::{self, HasLoopBody, NameRef, PathSegmentKind, VisibilityKind}, + ast::{self, HasLoopBody, MacroCall, PathSegmentKind, VisibilityKind}, AstNode, AstToken, Preorder, RustLanguage, WalkEvent, }; @@ -458,9 +458,7 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option Option { +pub fn get_outer_macro(string: &ast::String) -> Option { let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?; - let name = macro_call.path()?.segment()?.name_ref()?; - - Some(name) + Some(macro_call) } From e2d3c1506f10861f9c864e3611fa41387c11546b Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Thu, 6 Oct 2022 21:35:01 +0200 Subject: [PATCH 16/66] Restrict auto-completion for only built-in macros --- .../src/completions/env_vars.rs | 61 +++++++++++++++---- 1 file changed, 49 insertions(+), 12 deletions(-) diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs index 809f6d4b7c1..5483fdf31a2 100644 --- a/crates/ide-completion/src/completions/env_vars.rs +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -1,8 +1,9 @@ //! Completes environment variables defined by Cargo (https://doc.rust-lang.org/cargo/reference/environment-variables.html) -use ide_db::syntax_helpers::node_ext::get_outer_macro_name; +use hir::Semantics; +use ide_db::{syntax_helpers::node_ext::get_outer_macro, RootDatabase}; use syntax::ast::{self, IsString}; -use crate::{CompletionItem, CompletionItemKind}; +use crate::{context::CompletionContext, CompletionItem, CompletionItemKind}; use super::Completions; const CARGO_DEFINED_VARS: &[(&str, &str)] = &[ @@ -27,8 +28,12 @@ const CARGO_DEFINED_VARS: &[(&str, &str)] = &[ ("CARGO_TARGET_TMPDIR","Only set when building integration test or benchmark code. This is a path to a directory inside the target directory where integration tests or benchmarks are free to put any data needed by the tests/benches. Cargo initially creates this directory but doesn't manage its content in any way, this is the responsibility of the test code") ]; -pub(crate) fn complete_cargo_env_vars(acc: &mut Completions, expanded: &ast::String) -> Option<()> { - guard_env_macro(expanded)?; +pub(crate) fn complete_cargo_env_vars( + acc: &mut Completions, + ctx: &CompletionContext<'_>, + expanded: &ast::String, +) -> Option<()> { + guard_env_macro(expanded, &ctx.sema, &ctx.db)?; let range = expanded.text_range_between_quotes()?; CARGO_DEFINED_VARS.iter().for_each(|(var, detail)| { @@ -40,13 +45,19 @@ pub(crate) fn complete_cargo_env_vars(acc: &mut Completions, expanded: &ast::Str Some(()) } -fn guard_env_macro(string: &ast::String) -> Option<()> { - let name = get_outer_macro_name(string)?; - if !matches!(name.text().as_str(), "env" | "option_env") { - return None; - } +fn guard_env_macro( + string: &ast::String, + semantics: &Semantics<'_, RootDatabase>, + db: &RootDatabase, +) -> Option<()> { + let call = get_outer_macro(string)?; + let name = call.path()?.segment()?.name_ref()?; + let makro = semantics.resolve_macro_call(&call)?; - Some(()) + match name.text().as_str() { + "env" | "option_env" if makro.kind(db) == hir::MacroKind::BuiltIn => Some(()), + _ => None, + } } #[cfg(test)] @@ -58,19 +69,29 @@ mod tests { "CARGO_BIN_NAME", &format!( r#" + #[rustc_builtin_macro] + macro_rules! {} {{ + ($var:literal) => {{ 0 }} + }} + fn main() {{ let foo = {}!("CAR$0"); }} "#, - macro_name + macro_name, macro_name ), &format!( r#" + #[rustc_builtin_macro] + macro_rules! {} {{ + ($var:literal) => {{ 0 }} + }} + fn main() {{ let foo = {}!("CARGO_BIN_NAME"); }} "#, - macro_name + macro_name, macro_name ), ); } @@ -112,4 +133,20 @@ mod tests { let completions = completion_list(fixture); assert!(completions.is_empty(), "Completions weren't empty: {}", completions); } + + #[test] + fn doesnt_complete_for_shadowed_macro() { + let fixture = r#" + macro_rules! env { + ($var:literal) => { 0 } + } + + fn main() { + let foo = env!("CA$0"); + } + "#; + + let completions = completion_list(fixture); + assert!(completions.is_empty(), "Completions weren't empty: {}", completions) + } } From 3732d159b2e7dec436f8bb91c53832e56b1f2720 Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Thu, 6 Oct 2022 21:35:12 +0200 Subject: [PATCH 17/66] Pass context to env vars completion --- crates/ide-completion/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/ide-completion/src/lib.rs b/crates/ide-completion/src/lib.rs index c5718ec40f5..9d0044e55f5 100644 --- a/crates/ide-completion/src/lib.rs +++ b/crates/ide-completion/src/lib.rs @@ -183,7 +183,7 @@ pub fn completions( CompletionAnalysis::String { original, expanded: Some(expanded) } => { completions::extern_abi::complete_extern_abi(acc, ctx, expanded); completions::format_string::format_string(acc, ctx, original, expanded); - completions::env_vars::complete_cargo_env_vars(acc, expanded); + completions::env_vars::complete_cargo_env_vars(acc, ctx, expanded); } CompletionAnalysis::UnexpandedAttrTT { colon_prefix, From df7d39b2ed601fe4d3284d15c21e871750b3cbc4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 2 Oct 2022 20:04:47 +0300 Subject: [PATCH 18/66] Bump once_cell --- Cargo.lock | 4 ++-- crates/hir-def/Cargo.toml | 2 +- crates/hir-ty/Cargo.toml | 2 +- crates/hir/Cargo.toml | 2 +- crates/ide-completion/Cargo.toml | 2 +- crates/ide-db/Cargo.toml | 2 +- crates/profile/Cargo.toml | 2 +- crates/syntax/Cargo.toml | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e45cde68388..cf1f166703f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1017,9 +1017,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.13.1" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e" +checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" [[package]] name = "oorandom" diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index e8cff2f3e6c..54b0832ca6a 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -23,7 +23,7 @@ hashbrown = { version = "0.12.1", default-features = false } indexmap = "1.9.1" itertools = "0.10.3" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } -once_cell = "1.12.0" +once_cell = "1.15.0" rustc-hash = "1.1.0" smallvec = "1.9.0" tracing = "0.1.35" diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index 7f143f396c7..d8ede9956fc 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -22,7 +22,7 @@ chalk-solve = { version = "0.84.0", default-features = false } chalk-ir = "0.84.0" chalk-recursive = { version = "0.84.0", default-features = false } la-arena = { version = "0.3.0", path = "../../lib/la-arena" } -once_cell = "1.12.0" +once_cell = "1.15.0" typed-arena = "2.0.1" stdx = { path = "../stdx", version = "0.0.0" } diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 8e6a2441b33..0c3eed57af0 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -15,7 +15,7 @@ either = "1.7.0" arrayvec = "0.7.2" itertools = "0.10.3" smallvec = "1.9.0" -once_cell = "1.12.0" +once_cell = "1.15.0" stdx = { path = "../stdx", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" } diff --git a/crates/ide-completion/Cargo.toml b/crates/ide-completion/Cargo.toml index 8c9d6b22864..550f65375a8 100644 --- a/crates/ide-completion/Cargo.toml +++ b/crates/ide-completion/Cargo.toml @@ -13,7 +13,7 @@ doctest = false cov-mark = "2.0.0-pre.1" itertools = "0.10.3" -once_cell = "1.12.0" +once_cell = "1.15.0" smallvec = "1.9.0" stdx = { path = "../stdx", version = "0.0.0" } diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml index 30272bc16f6..3dca3e20222 100644 --- a/crates/ide-db/Cargo.toml +++ b/crates/ide-db/Cargo.toml @@ -15,7 +15,7 @@ tracing = "0.1.35" rayon = "1.5.3" fst = { version = "0.4.7", default-features = false } rustc-hash = "1.1.0" -once_cell = "1.12.0" +once_cell = "1.15.0" either = "1.7.0" itertools = "0.10.3" arrayvec = "0.7.2" diff --git a/crates/profile/Cargo.toml b/crates/profile/Cargo.toml index 0b78a45a24b..ed3d816da52 100644 --- a/crates/profile/Cargo.toml +++ b/crates/profile/Cargo.toml @@ -10,7 +10,7 @@ rust-version = "1.57" doctest = false [dependencies] -once_cell = "1.12.0" +once_cell = "1.15.0" cfg-if = "1.0.0" libc = "0.2.126" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 0e2dec386ff..8005b20206d 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -16,7 +16,7 @@ itertools = "0.10.3" rowan = "0.15.8" rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" } rustc-hash = "1.1.0" -once_cell = "1.12.0" +once_cell = "1.15.0" indexmap = "1.9.1" smol_str = "0.1.23" From 5bbfea03ccada14bbaca6df6c0ef3760ac44a9a5 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sat, 8 Oct 2022 23:18:11 +0100 Subject: [PATCH 19/66] fix: in VSCode, correctly resolve relative paths to errors VS Code problem matcher are restricted to be static "regexes". You can't create a problem matcher dynamically, and you can't use custom code in lieu of problem matcher. This creates a problem for rust/cargo compiler errors. They use paths relative to the root of the Cargo workspace, but VS Code doesn't necessary know where that root is. Luckily, there's a way out: our current problem matcher is defined like this: "fileLocation": [ "autoDetect", "${workspaceRoot}" ], That means that relative pahts would be resoleved relative to workspace root. VS Code allows to specify a command inside `${}`. So we can plug custom logic there to fetch Cargo's workspace root! And that's exactly what this PR is doing! --- editors/code/package.json | 9 +++++++++ editors/code/src/commands.ts | 4 ++++ editors/code/src/ctx.ts | 4 ++++ editors/code/src/main.ts | 1 + editors/code/src/run.ts | 2 ++ editors/code/src/tasks.ts | 2 +- 6 files changed, 21 insertions(+), 1 deletion(-) diff --git a/editors/code/package.json b/editors/code/package.json index f1dd3aa79ff..ccf62d002bb 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1301,6 +1301,15 @@ "endsPattern": "^\\[Finished running\\b" }, "pattern": "$rustc" + }, + { + "name": "rustc-run", + "base": "$rustc", + "fileLocation": [ + "autoDetect", + "${command:rust-analyzer.cargoWorkspaceRootForCurrentRun}" + ], + "pattern": "$rustc-run" } ], "colors": [ diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index b9ad525e361..6b10073aa86 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts @@ -842,6 +842,7 @@ export function run(ctx: Ctx): Cmd { item.detail = "rerun"; prevRunnable = item; const task = await createTask(item.runnable, ctx.config); + ctx.cargoWorkspaceRootForCurrentRun = item.cargoWorkspaceRoot; return await vscode.tasks.executeTask(task); }; } @@ -946,3 +947,6 @@ export function linkToCommand(ctx: Ctx): Cmd { } }; } +export function getCargoWorkspaceDir(ctx: Ctx): Cmd { + return async () => ctx.cargoWorkspaceRootForCurrentRun; +} diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index 26510011d43..b6c0eedfb16 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts @@ -17,6 +17,10 @@ export type Workspace = }; export class Ctx { + // Helps VS Code to correctly link problems from runnables. This is used by + // `rust-analyzer.cargoWorkspaceRootForCurrentRun` command of $rustc-run problem matcher. + cargoWorkspaceRootForCurrentRun?: string = undefined; + private constructor( readonly config: Config, private readonly extCtx: vscode.ExtensionContext, diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 41bde4195e0..6e6c2513dd9 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts @@ -189,6 +189,7 @@ async function initCommonContext(context: vscode.ExtensionContext, ctx: Ctx) { ctx.registerCommand("resolveCodeAction", commands.resolveCodeAction); ctx.registerCommand("applyActionGroup", commands.applyActionGroup); ctx.registerCommand("gotoLocation", commands.gotoLocation); + ctx.registerCommand("cargoWorkspaceRootForCurrentRun", commands.getCargoWorkspaceDir); ctx.registerCommand("linkToCommand", commands.linkToCommand); } diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts index 22e5eda6827..100c0fe2d8c 100644 --- a/editors/code/src/run.ts +++ b/editors/code/src/run.ts @@ -89,12 +89,14 @@ export async function selectRunnable( export class RunnableQuickPick implements vscode.QuickPickItem { public label: string; + public cargoWorkspaceRoot?: string; public description?: string | undefined; public detail?: string | undefined; public picked?: boolean | undefined; constructor(public runnable: ra.Runnable) { this.label = runnable.label; + this.cargoWorkspaceRoot = runnable.args.workspaceRoot; } } diff --git a/editors/code/src/tasks.ts b/editors/code/src/tasks.ts index e6239deeb21..44697f95bab 100644 --- a/editors/code/src/tasks.ts +++ b/editors/code/src/tasks.ts @@ -128,7 +128,7 @@ export async function buildCargoTask( name, TASK_SOURCE, exec, - ["$rustc"] + ["$rustc-run"] ); } From 75f641799effae2c0138215eca968b33429d23fe Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Sun, 9 Oct 2022 17:58:55 -0400 Subject: [PATCH 20/66] Add `GenericParamList::to_generic_args` --- .../extract_struct_from_enum_variant.rs | 34 ++----------------- crates/syntax/src/ast/edit_in_place.rs | 17 ++++++++++ 2 files changed, 19 insertions(+), 32 deletions(-) diff --git a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs index 8d5cab283d0..970e948dfd9 100644 --- a/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/ide-assists/src/handlers/extract_struct_from_enum_variant.rs @@ -9,7 +9,7 @@ use ide_db::{ search::FileReference, FxHashSet, RootDatabase, }; -use itertools::{Itertools, Position}; +use itertools::Itertools; use syntax::{ ast::{ self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasAttrs, HasGenericParams, @@ -298,37 +298,7 @@ fn update_variant(variant: &ast::Variant, generics: Option 0) - .map(|generics| { - let mut generic_str = String::with_capacity(8); - - for (p, more) in generics.generic_params().with_position().map(|p| match p { - Position::First(p) | Position::Middle(p) => (p, true), - Position::Last(p) | Position::Only(p) => (p, false), - }) { - match p { - ast::GenericParam::ConstParam(konst) => { - if let Some(name) = konst.name() { - generic_str.push_str(name.text().as_str()); - } - } - ast::GenericParam::LifetimeParam(lt) => { - if let Some(lt) = lt.lifetime() { - generic_str.push_str(lt.text().as_str()); - } - } - ast::GenericParam::TypeParam(ty) => { - if let Some(name) = ty.name() { - generic_str.push_str(name.text().as_str()); - } - } - } - if more { - generic_str.push_str(", "); - } - } - - make::ty(&format!("{}<{}>", &name.text(), &generic_str)) - }) + .map(|generics| make::ty(&format!("{}{}", &name.text(), generics.to_generic_args()))) .unwrap_or_else(|| make::ty(&name.text())); // change from a record to a tuple field list diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index eadebbe8a21..7d632352828 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -235,6 +235,23 @@ impl ast::GenericParamList { } } } + + /// Extracts the const, type, and lifetime names into a new [`ast::GenericParamList`] + pub fn to_generic_args(&self) -> ast::GenericParamList { + let params = self.generic_params().filter_map(|param| match param { + ast::GenericParam::ConstParam(it) => { + Some(ast::GenericParam::TypeParam(make::type_param(it.name()?, None))) + } + ast::GenericParam::LifetimeParam(it) => { + Some(ast::GenericParam::LifetimeParam(make::lifetime_param(it.lifetime()?))) + } + ast::GenericParam::TypeParam(it) => { + Some(ast::GenericParam::TypeParam(make::type_param(it.name()?, None))) + } + }); + + make::generic_param_list(params) + } } impl ast::WhereClause { From bfe6ec9b778d12287fce1fdf2df7e7b0d58ee775 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Sun, 9 Oct 2022 18:12:08 -0400 Subject: [PATCH 21/66] Add `{TypeParam, ConstParam}::remove_default` Also includes a drive-by refactor of `utils::generate_impl_text_inner`, since that's what drove this change --- .../ide-assists/src/handlers/generate_impl.rs | 13 ++++ crates/ide-assists/src/utils.rs | 76 +++++++++---------- crates/syntax/src/ast/edit_in_place.rs | 36 +++++++++ 3 files changed, 87 insertions(+), 38 deletions(-) diff --git a/crates/ide-assists/src/handlers/generate_impl.rs b/crates/ide-assists/src/handlers/generate_impl.rs index 68287a20bf8..307cea3d0a4 100644 --- a/crates/ide-assists/src/handlers/generate_impl.rs +++ b/crates/ide-assists/src/handlers/generate_impl.rs @@ -52,6 +52,7 @@ mod tests { use super::*; + // FIXME: break up into separate test fns #[test] fn test_add_impl() { check_assist( @@ -134,6 +135,18 @@ mod tests { }"#, ); + check_assist( + generate_impl, + r#" + struct Defaulted {}$0"#, + r#" + struct Defaulted {} + + impl Defaulted { + $0 + }"#, + ); + check_assist( generate_impl, r#"pub trait Trait {} diff --git a/crates/ide-assists/src/utils.rs b/crates/ide-assists/src/utils.rs index 4ab6e2627fa..38396cd7d7b 100644 --- a/crates/ide-assists/src/utils.rs +++ b/crates/ide-assists/src/utils.rs @@ -2,8 +2,6 @@ use std::ops; -use itertools::Itertools; - pub(crate) use gen_trait_fn_body::gen_trait_fn_body; use hir::{db::HirDatabase, HirDisplay, Semantics}; use ide_db::{famous_defs::FamousDefs, path_transform::PathTransform, RootDatabase, SnippetCap}; @@ -15,7 +13,7 @@ use syntax::{ edit_in_place::{AttrsOwnerEdit, Removable}, make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace, }, - ted, AstNode, AstToken, Direction, SmolStr, SourceFile, + ted, AstNode, AstToken, Direction, SourceFile, SyntaxKind::*, SyntaxNode, TextRange, TextSize, T, }; @@ -424,34 +422,44 @@ pub(crate) fn generate_trait_impl_text(adt: &ast::Adt, trait_text: &str, code: & } fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str) -> String { - let generic_params = adt.generic_param_list(); + // Ensure lifetime params are before type & const params + let generic_params = adt.generic_param_list().map(|generic_params| { + let lifetime_params = + generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam); + let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| { + // remove defaults since they can't be specified in impls + match param { + ast::TypeOrConstParam::Type(param) => { + let param = param.clone_for_update(); + param.remove_default(); + Some(ast::GenericParam::TypeParam(param)) + } + ast::TypeOrConstParam::Const(param) => { + let param = param.clone_for_update(); + param.remove_default(); + Some(ast::GenericParam::ConstParam(param)) + } + } + }); + + make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params)) + }); + + // FIXME: use syntax::make & mutable AST apis instead + // `trait_text` and `code` can't be opaque blobs of text let mut buf = String::with_capacity(code.len()); + + // Copy any cfg attrs from the original adt buf.push_str("\n\n"); - adt.attrs() - .filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false)) - .for_each(|attr| buf.push_str(format!("{}\n", attr).as_str())); + let cfg_attrs = adt + .attrs() + .filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false)); + cfg_attrs.for_each(|attr| buf.push_str(&format!("{attr}\n"))); + + // `impl{generic_params} {trait_text} for {name}{generic_params.to_generic_args()}` buf.push_str("impl"); if let Some(generic_params) = &generic_params { - let lifetimes = generic_params.lifetime_params().map(|lt| format!("{}", lt.syntax())); - let toc_params = generic_params.type_or_const_params().map(|toc_param| { - let type_param = match toc_param { - ast::TypeOrConstParam::Type(x) => x, - ast::TypeOrConstParam::Const(x) => return x.syntax().to_string(), - }; - let mut buf = String::new(); - if let Some(it) = type_param.name() { - format_to!(buf, "{}", it.syntax()); - } - if let Some(it) = type_param.colon_token() { - format_to!(buf, "{} ", it); - } - if let Some(it) = type_param.type_bound_list() { - format_to!(buf, "{}", it.syntax()); - } - buf - }); - let generics = lifetimes.chain(toc_params).format(", "); - format_to!(buf, "<{}>", generics); + format_to!(buf, "{generic_params}"); } buf.push(' '); if let Some(trait_text) = trait_text { @@ -460,23 +468,15 @@ fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str } buf.push_str(&adt.name().unwrap().text()); if let Some(generic_params) = generic_params { - let lifetime_params = generic_params - .lifetime_params() - .filter_map(|it| it.lifetime()) - .map(|it| SmolStr::from(it.text())); - let toc_params = generic_params - .type_or_const_params() - .filter_map(|it| it.name()) - .map(|it| SmolStr::from(it.text())); - format_to!(buf, "<{}>", lifetime_params.chain(toc_params).format(", ")) + format_to!(buf, "{}", generic_params.to_generic_args()); } match adt.where_clause() { Some(where_clause) => { - format_to!(buf, "\n{}\n{{\n{}\n}}", where_clause, code); + format_to!(buf, "\n{where_clause}\n{{\n{code}\n}}"); } None => { - format_to!(buf, " {{\n{}\n}}", code); + format_to!(buf, " {{\n{code}\n}}"); } } diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index 7d632352828..173c064ccc9 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -265,6 +265,42 @@ impl ast::WhereClause { } } +impl ast::TypeParam { + pub fn remove_default(&self) { + if let Some((eq, last)) = self + .syntax() + .children_with_tokens() + .find(|it| it.kind() == T![=]) + .zip(self.syntax().last_child_or_token()) + { + ted::remove_all(eq..=last); + + // remove any trailing ws + if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE) { + last.detach(); + } + } + } +} + +impl ast::ConstParam { + pub fn remove_default(&self) { + if let Some((eq, last)) = self + .syntax() + .children_with_tokens() + .find(|it| it.kind() == T![=]) + .zip(self.syntax().last_child_or_token()) + { + ted::remove_all(eq..=last); + + // remove any trailing ws + if let Some(last) = self.syntax().last_token().filter(|it| it.kind() == WHITESPACE) { + last.detach(); + } + } + } +} + pub trait Removable: AstNode { fn remove(&self); } From 1015a177d4a9f1c8ed853dee68dc2bf13e0cb02d Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Sun, 9 Oct 2022 20:45:20 -0400 Subject: [PATCH 22/66] Have `to_generic_args` return `ast::GenericArgList` --- crates/syntax/src/ast/edit_in_place.rs | 21 +++++++++++---------- crates/syntax/src/ast/make.rs | 23 +++++++++++++++++++---- 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index 173c064ccc9..229e7419b73 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -236,21 +236,22 @@ impl ast::GenericParamList { } } - /// Extracts the const, type, and lifetime names into a new [`ast::GenericParamList`] - pub fn to_generic_args(&self) -> ast::GenericParamList { - let params = self.generic_params().filter_map(|param| match param { - ast::GenericParam::ConstParam(it) => { - Some(ast::GenericParam::TypeParam(make::type_param(it.name()?, None))) - } + /// Constructs a matching [`ast::GenericArgList`] + pub fn to_generic_args(&self) -> ast::GenericArgList { + let args = self.generic_params().filter_map(|param| match param { ast::GenericParam::LifetimeParam(it) => { - Some(ast::GenericParam::LifetimeParam(make::lifetime_param(it.lifetime()?))) + Some(ast::GenericArg::LifetimeArg(make::lifetime_arg(it.lifetime()?))) } ast::GenericParam::TypeParam(it) => { - Some(ast::GenericParam::TypeParam(make::type_param(it.name()?, None))) + Some(ast::GenericArg::TypeArg(make::type_arg(make::ext::ty_name(it.name()?)))) + } + ast::GenericParam::ConstParam(it) => { + // Name-only const params get parsed as `TypeArg`s + Some(ast::GenericArg::TypeArg(make::type_arg(make::ext::ty_name(it.name()?)))) } }); - make::generic_param_list(params) + make::generic_arg_list(args) } } @@ -317,7 +318,7 @@ impl Removable for ast::TypeBoundList { impl ast::PathSegment { pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList { if self.generic_arg_list().is_none() { - let arg_list = make::generic_arg_list().clone_for_update(); + let arg_list = make::generic_arg_list(empty()).clone_for_update(); ted::append_child(self.syntax(), arg_list.syntax()); } self.generic_arg_list().unwrap() diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 83f8bbac588..c9a21e12c08 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -88,6 +88,9 @@ pub mod ext { block_expr(None, None) } + pub fn ty_name(name: ast::Name) -> ast::Type { + ty_path(ident_path(&format!("{name}"))) + } pub fn ty_bool() -> ast::Type { ty_path(ident_path("bool")) } @@ -160,6 +163,7 @@ pub fn assoc_item_list() -> ast::AssocItemList { ast_from_text("impl C for D {}") } +// FIXME: `ty_params` should be `ast::GenericArgList` pub fn impl_( ty: ast::Path, params: Option, @@ -185,10 +189,6 @@ pub fn impl_trait( ast_from_text(&format!("impl{ty_params} {trait_} for {ty}{ty_params} {{}}")) } -pub(crate) fn generic_arg_list() -> ast::GenericArgList { - ast_from_text("const S: T<> = ();") -} - pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment { ast_from_text(&format!("type __ = {name_ref};")) } @@ -718,6 +718,21 @@ pub fn generic_param_list( ast_from_text(&format!("fn f<{args}>() {{ }}")) } +pub fn type_arg(ty: ast::Type) -> ast::TypeArg { + ast_from_text(&format!("const S: T<{ty}> = ();")) +} + +pub fn lifetime_arg(lifetime: ast::Lifetime) -> ast::LifetimeArg { + ast_from_text(&format!("const S: T<{lifetime}> = ();")) +} + +pub(crate) fn generic_arg_list( + args: impl IntoIterator, +) -> ast::GenericArgList { + let args = args.into_iter().join(", "); + ast_from_text(&format!("const S: T<{args}> = ();")) +} + pub fn visibility_pub_crate() -> ast::Visibility { ast_from_text("pub(crate) struct S") } From a69cccfc0e40095f3f9f9b73eeeb4ac913ee3796 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Sun, 9 Oct 2022 21:28:21 -0400 Subject: [PATCH 23/66] Underline only the intra-doc link instead of the whole doc comment --- crates/ide/src/goto_definition.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/crates/ide/src/goto_definition.rs b/crates/ide/src/goto_definition.rs index f86ea61d158..d0be1b3f404 100644 --- a/crates/ide/src/goto_definition.rs +++ b/crates/ide/src/goto_definition.rs @@ -48,10 +48,14 @@ pub(crate) fn goto_definition( _ => 1, })?; if let Some(doc_comment) = token_as_doc_comment(&original_token) { - return doc_comment.get_definition_with_descend_at(sema, position.offset, |def, _, _| { - let nav = def.try_to_nav(db)?; - Some(RangeInfo::new(original_token.text_range(), vec![nav])) - }); + return doc_comment.get_definition_with_descend_at( + sema, + position.offset, + |def, _, link_range| { + let nav = def.try_to_nav(db)?; + Some(RangeInfo::new(link_range, vec![nav])) + }, + ); } let navs = sema .descend_into_macros(original_token.clone()) From 63ed71bd30fab43630547b97e25ebf73a51ace6e Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 10 Oct 2022 09:47:09 +0200 Subject: [PATCH 24/66] Honor cfg attributes on params when lowering their patterns --- crates/hir-def/src/body.rs | 18 ++++++++++++++++-- crates/hir-def/src/body/lower.rs | 16 +++++++++++----- crates/hir-ty/src/tests/patterns.rs | 10 ++++++++++ crates/ide-completion/src/context.rs | 2 +- 4 files changed, 38 insertions(+), 8 deletions(-) diff --git a/crates/hir-def/src/body.rs b/crates/hir-def/src/body.rs index 2dc7714bbb5..759f3b8c04b 100644 --- a/crates/hir-def/src/body.rs +++ b/crates/hir-def/src/body.rs @@ -311,7 +311,20 @@ impl Body { DefWithBodyId::FunctionId(f) => { let f = f.lookup(db); let src = f.source(db); - params = src.value.param_list(); + params = src.value.param_list().map(|param_list| { + let item_tree = f.id.item_tree(db); + let func = &item_tree[f.id.value]; + let krate = f.container.module(db).krate; + let crate_graph = db.crate_graph(); + ( + param_list, + func.params.clone().map(move |param| { + item_tree + .attrs(db, krate, param.into()) + .is_cfg_enabled(&crate_graph[krate].cfg_options) + }), + ) + }); (src.file_id, f.module(db), src.value.body().map(ast::Expr::from)) } DefWithBodyId::ConstId(c) => { @@ -334,6 +347,7 @@ impl Body { let expander = Expander::new(db, file_id, module); let (mut body, source_map) = Body::new(db, expander, params, body); body.shrink_to_fit(); + (Arc::new(body), Arc::new(source_map)) } @@ -370,7 +384,7 @@ impl Body { fn new( db: &dyn DefDatabase, expander: Expander, - params: Option, + params: Option<(ast::ParamList, impl Iterator)>, body: Option, ) -> (Body, BodySourceMap) { lower::lower(db, expander, params, body) diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index c4f91e49a6e..ccc01c3efca 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -77,7 +77,7 @@ impl<'a> LowerCtx<'a> { pub(super) fn lower( db: &dyn DefDatabase, expander: Expander, - params: Option, + params: Option<(ast::ParamList, impl Iterator)>, body: Option, ) -> (Body, BodySourceMap) { ExprCollector { @@ -119,11 +119,13 @@ struct ExprCollector<'a> { impl ExprCollector<'_> { fn collect( mut self, - param_list: Option, + param_list: Option<(ast::ParamList, impl Iterator)>, body: Option, ) -> (Body, BodySourceMap) { - if let Some(param_list) = param_list { - if let Some(self_param) = param_list.self_param() { + if let Some((param_list, mut attr_enabled)) = param_list { + if let Some(self_param) = + param_list.self_param().filter(|_| attr_enabled.next().unwrap_or(false)) + { let ptr = AstPtr::new(&self_param); let param_pat = self.alloc_pat( Pat::Bind { @@ -139,7 +141,11 @@ impl ExprCollector<'_> { self.body.params.push(param_pat); } - for pat in param_list.params().filter_map(|param| param.pat()) { + for pat in param_list + .params() + .zip(attr_enabled) + .filter_map(|(param, enabled)| param.pat().filter(|_| enabled)) + { let param_pat = self.collect_pat(pat); self.body.params.push(param_pat); } diff --git a/crates/hir-ty/src/tests/patterns.rs b/crates/hir-ty/src/tests/patterns.rs index eb04bf87783..74de33117ee 100644 --- a/crates/hir-ty/src/tests/patterns.rs +++ b/crates/hir-ty/src/tests/patterns.rs @@ -1070,3 +1070,13 @@ fn main() { "#, ); } + +#[test] +fn cfg_params() { + check_types( + r#" +fn my_fn(#[cfg(feature = "feature")] u8: u8, u32: u32) {} + //^^^ u32 +"#, + ); +} diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index a5e854b74df..f0563e290db 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -1,4 +1,4 @@ -//! See `CompletionContext` structure. +//! See [`CompletionContext`] structure. mod analysis; #[cfg(test)] From 1574fe0d5438f63ddb8c59d7837d5acf3fd77c94 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sat, 1 Oct 2022 22:51:09 +0200 Subject: [PATCH 25/66] Use $crate instead of std for panic builtin_fn_macro This should be closer to the expected output and gets rid of a few type mismatches in rustc/library --- .../macro_expansion_tests/builtin_fn_macro.rs | 24 +++++++++---------- crates/hir-expand/src/builtin_fn_macro.rs | 14 +++++------ 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs index 4f626105a53..c04cd165192 100644 --- a/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs +++ b/crates/hir-def/src/macro_expansion_tests/builtin_fn_macro.rs @@ -93,12 +93,12 @@ macro_rules! option_env {() => {}} fn main() { option_env!("TEST_ENV_VAR"); } "#, - expect![[r##" + expect![[r#" #[rustc_builtin_macro] macro_rules! option_env {() => {}} -fn main() { std::option::Option::None:: < &str>; } -"##]], +fn main() { $crate::option::Option::None:: < &str>; } +"#]], ); } @@ -191,7 +191,7 @@ fn main() { format_args!("{} {:?}", arg1(a, b, c), arg2); } "#, - expect![[r##" + expect![[r#" #[rustc_builtin_macro] macro_rules! format_args { ($fmt:expr) => ({ /* compiler built-in */ }); @@ -199,9 +199,9 @@ macro_rules! format_args { } fn main() { - std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(arg1(a, b, c)), std::fmt::Display::fmt), std::fmt::ArgumentV1::new(&(arg2), std::fmt::Display::fmt), ]); + $crate::fmt::Arguments::new_v1(&[], &[$crate::fmt::ArgumentV1::new(&(arg1(a, b, c)), $crate::fmt::Display::fmt), $crate::fmt::ArgumentV1::new(&(arg2), $crate::fmt::Display::fmt), ]); } -"##]], +"#]], ); } @@ -219,7 +219,7 @@ fn main() { format_args!("{} {:?}", a::(), b); } "#, - expect![[r##" + expect![[r#" #[rustc_builtin_macro] macro_rules! format_args { ($fmt:expr) => ({ /* compiler built-in */ }); @@ -227,9 +227,9 @@ macro_rules! format_args { } fn main() { - std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(a::()), std::fmt::Display::fmt), std::fmt::ArgumentV1::new(&(b), std::fmt::Display::fmt), ]); + $crate::fmt::Arguments::new_v1(&[], &[$crate::fmt::ArgumentV1::new(&(a::()), $crate::fmt::Display::fmt), $crate::fmt::ArgumentV1::new(&(b), $crate::fmt::Display::fmt), ]); } -"##]], +"#]], ); } @@ -248,7 +248,7 @@ fn main() { format_args!/*+errors*/("{} {:?}", a.); } "#, - expect![[r##" + expect![[r#" #[rustc_builtin_macro] macro_rules! format_args { ($fmt:expr) => ({ /* compiler built-in */ }); @@ -258,9 +258,9 @@ macro_rules! format_args { fn main() { let _ = /* parse error: expected field name or number */ -std::fmt::Arguments::new_v1(&[], &[std::fmt::ArgumentV1::new(&(a.), std::fmt::Display::fmt), ]); +$crate::fmt::Arguments::new_v1(&[], &[$crate::fmt::ArgumentV1::new(&(a.), $crate::fmt::Display::fmt), ]); } -"##]], +"#]], ); } diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 8befa7f7da7..7b19518e25a 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -238,9 +238,9 @@ fn format_args_expand( ) -> ExpandResult { // We expand `format_args!("", a1, a2)` to // ``` - // std::fmt::Arguments::new_v1(&[], &[ - // std::fmt::ArgumentV1::new(&arg1,std::fmt::Display::fmt), - // std::fmt::ArgumentV1::new(&arg2,std::fmt::Display::fmt), + // $crate::fmt::Arguments::new_v1(&[], &[ + // $crate::fmt::ArgumentV1::new(&arg1,$crate::fmt::Display::fmt), + // $crate::fmt::ArgumentV1::new(&arg2,$crate::fmt::Display::fmt), // ]) // ```, // which is still not really correct, but close enough for now @@ -262,10 +262,10 @@ fn format_args_expand( } let _format_string = args.remove(0); let arg_tts = args.into_iter().flat_map(|arg| { - quote! { std::fmt::ArgumentV1::new(&(#arg), std::fmt::Display::fmt), } + quote! { #DOLLAR_CRATE::fmt::ArgumentV1::new(&(#arg), #DOLLAR_CRATE::fmt::Display::fmt), } }.token_trees); let expanded = quote! { - std::fmt::Arguments::new_v1(&[], &[##arg_tts]) + #DOLLAR_CRATE::fmt::Arguments::new_v1(&[], &[##arg_tts]) }; ExpandResult::ok(expanded) } @@ -675,8 +675,8 @@ fn option_env_expand( }; let expanded = match get_env_inner(db, arg_id, &key) { - None => quote! { std::option::Option::None::<&str> }, - Some(s) => quote! { std::option::Some(#s) }, + None => quote! { #DOLLAR_CRATE::option::Option::None::<&str> }, + Some(s) => quote! { #DOLLAR_CRATE::option::Some(#s) }, }; ExpandResult::ok(ExpandedEager::new(expanded)) From 78f33c0e960a260d02d0f005350b8378d3003c0d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 10 Oct 2022 14:25:14 +0200 Subject: [PATCH 26/66] Expand unmatched mbe fragments to reasonable default token trees Currently we expand unmatched fragments by not replacing them at all, leaving us with `$ident`. This trips up the parser or subsequent macro calls. Instead it makes more sense to replace these with some reasonable default depending on the fragment kind which should make more recursive macro calls work better for completions. --- crates/mbe/src/benchmark.rs | 30 +++++++------- crates/mbe/src/expander.rs | 3 +- crates/mbe/src/expander/matcher.rs | 54 +++++++++++++++--------- crates/mbe/src/expander/transcriber.rs | 57 ++++++++++++++++++++++++-- crates/mbe/src/lib.rs | 6 +-- crates/mbe/src/parser.rs | 41 ++++++++++++++++-- 6 files changed, 146 insertions(+), 45 deletions(-) diff --git a/crates/mbe/src/benchmark.rs b/crates/mbe/src/benchmark.rs index ac691578d88..9c92bae6a19 100644 --- a/crates/mbe/src/benchmark.rs +++ b/crates/mbe/src/benchmark.rs @@ -8,7 +8,7 @@ use syntax::{ use test_utils::{bench, bench_fixture, skip_slow_tests}; use crate::{ - parser::{Op, RepeatKind, Separator}, + parser::{MetaVarKind, Op, RepeatKind, Separator}, syntax_node_to_token_tree, DeclarativeMacro, }; @@ -111,35 +111,35 @@ fn invocation_fixtures(rules: &FxHashMap) -> Vec<(Stri fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) { return match op { - Op::Var { kind, .. } => match kind.as_ref().map(|it| it.as_str()) { - Some("ident") => parent.token_trees.push(make_ident("foo")), - Some("ty") => parent.token_trees.push(make_ident("Foo")), - Some("tt") => parent.token_trees.push(make_ident("foo")), - Some("vis") => parent.token_trees.push(make_ident("pub")), - Some("pat") => parent.token_trees.push(make_ident("foo")), - Some("path") => parent.token_trees.push(make_ident("foo")), - Some("literal") => parent.token_trees.push(make_literal("1")), - Some("expr") => parent.token_trees.push(make_ident("foo")), - Some("lifetime") => { + Op::Var { kind, .. } => match kind.as_ref() { + Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")), + Some(MetaVarKind::Ty) => parent.token_trees.push(make_ident("Foo")), + Some(MetaVarKind::Tt) => parent.token_trees.push(make_ident("foo")), + Some(MetaVarKind::Vis) => parent.token_trees.push(make_ident("pub")), + Some(MetaVarKind::Pat) => parent.token_trees.push(make_ident("foo")), + Some(MetaVarKind::Path) => parent.token_trees.push(make_ident("foo")), + Some(MetaVarKind::Literal) => parent.token_trees.push(make_literal("1")), + Some(MetaVarKind::Expr) => parent.token_trees.push(make_ident("foo")), + Some(MetaVarKind::Lifetime) => { parent.token_trees.push(make_punct('\'')); parent.token_trees.push(make_ident("a")); } - Some("block") => { + Some(MetaVarKind::Block) => { parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None)) } - Some("item") => { + Some(MetaVarKind::Item) => { parent.token_trees.push(make_ident("fn")); parent.token_trees.push(make_ident("foo")); parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None)); parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None)); } - Some("meta") => { + Some(MetaVarKind::Meta) => { parent.token_trees.push(make_ident("foo")); parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None)); } None => (), - Some(kind) => panic!("Unhandled kind {}", kind), + Some(kind) => panic!("Unhandled kind {:?}", kind), }, Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()), Op::Repeat { tokens, kind, separator } => { diff --git a/crates/mbe/src/expander.rs b/crates/mbe/src/expander.rs index 1e1bfa55055..100ec6bfb93 100644 --- a/crates/mbe/src/expander.rs +++ b/crates/mbe/src/expander.rs @@ -8,7 +8,7 @@ mod transcriber; use rustc_hash::FxHashMap; use syntax::SmolStr; -use crate::{ExpandError, ExpandResult}; +use crate::{parser::MetaVarKind, ExpandError, ExpandResult}; pub(crate) fn expand_rules( rules: &[crate::Rule], @@ -104,6 +104,7 @@ enum Binding { Fragment(Fragment), Nested(Vec), Empty, + Missing(MetaVarKind), } #[derive(Debug, Clone, PartialEq, Eq)] diff --git a/crates/mbe/src/expander/matcher.rs b/crates/mbe/src/expander/matcher.rs index 139a8cb8cbe..3f656df25f7 100644 --- a/crates/mbe/src/expander/matcher.rs +++ b/crates/mbe/src/expander/matcher.rs @@ -66,7 +66,7 @@ use syntax::SmolStr; use crate::{ expander::{Binding, Bindings, ExpandResult, Fragment}, - parser::{Op, RepeatKind, Separator}, + parser::{MetaVarKind, Op, RepeatKind, Separator}, tt_iter::TtIter, ExpandError, MetaTemplate, }; @@ -119,6 +119,7 @@ pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree) -> Match { .map(|it| match it { Binding::Fragment(_) => 1, Binding::Empty => 1, + Binding::Missing(_) => 1, Binding::Nested(it) => count(it.iter()), }) .sum() @@ -130,6 +131,7 @@ enum BindingKind { Empty(SmolStr), Optional(SmolStr), Fragment(SmolStr, Fragment), + Missing(SmolStr, MetaVarKind), Nested(usize, usize), } @@ -190,6 +192,10 @@ impl BindingsBuilder { .push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment)))); } + fn push_missing(&mut self, idx: &mut BindingsIdx, var: &SmolStr, kind: MetaVarKind) { + self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Missing(var.clone(), kind)))); + } + fn push_nested(&mut self, parent: &mut BindingsIdx, child: &BindingsIdx) { let BindingsIdx(idx, nidx) = self.copy(child); self.nodes[parent.0].push(LinkNode::Node(Rc::new(BindingKind::Nested(idx, nidx)))); @@ -222,6 +228,9 @@ impl BindingsBuilder { BindingKind::Fragment(name, fragment) => { bindings.inner.insert(name.clone(), Binding::Fragment(fragment.clone())); } + BindingKind::Missing(name, kind) => { + bindings.inner.insert(name.clone(), Binding::Missing(*kind)); + } BindingKind::Nested(idx, nested_idx) => { let mut nested_nodes = Vec::new(); self.collect_nested(*idx, *nested_idx, &mut nested_nodes); @@ -458,9 +467,9 @@ fn match_loop_inner<'t>( } } OpDelimited::Op(Op::Var { kind, name, .. }) => { - if let Some(kind) = kind { + if let &Some(kind) = kind { let mut fork = src.clone(); - let match_res = match_meta_var(kind.as_str(), &mut fork); + let match_res = match_meta_var(kind, &mut fork); match match_res.err { None => { // Some meta variables are optional (e.g. vis) @@ -475,8 +484,15 @@ fn match_loop_inner<'t>( } Some(err) => { res.add_err(err); - if let Some(fragment) = match_res.value { - bindings_builder.push_fragment(&mut item.bindings, name, fragment); + match match_res.value { + Some(fragment) => bindings_builder.push_fragment( + &mut item.bindings, + name, + fragment, + ), + None => { + bindings_builder.push_missing(&mut item.bindings, name, kind) + } } item.is_error = true; error_items.push(item); @@ -668,20 +684,20 @@ fn match_leaf(lhs: &tt::Leaf, src: &mut TtIter<'_>) -> Result<(), ExpandError> { } } -fn match_meta_var(kind: &str, input: &mut TtIter<'_>) -> ExpandResult> { +fn match_meta_var(kind: MetaVarKind, input: &mut TtIter<'_>) -> ExpandResult> { let fragment = match kind { - "path" => parser::PrefixEntryPoint::Path, - "ty" => parser::PrefixEntryPoint::Ty, + MetaVarKind::Path => parser::PrefixEntryPoint::Path, + MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, // FIXME: These two should actually behave differently depending on the edition. // // https://doc.rust-lang.org/edition-guide/rust-2021/or-patterns-macro-rules.html - "pat" | "pat_param" => parser::PrefixEntryPoint::Pat, - "stmt" => parser::PrefixEntryPoint::Stmt, - "block" => parser::PrefixEntryPoint::Block, - "meta" => parser::PrefixEntryPoint::MetaItem, - "item" => parser::PrefixEntryPoint::Item, - "vis" => parser::PrefixEntryPoint::Vis, - "expr" => { + MetaVarKind::Pat | MetaVarKind::PatParam => parser::PrefixEntryPoint::Pat, + MetaVarKind::Stmt => parser::PrefixEntryPoint::Stmt, + MetaVarKind::Block => parser::PrefixEntryPoint::Block, + MetaVarKind::Meta => parser::PrefixEntryPoint::MetaItem, + MetaVarKind::Item => parser::PrefixEntryPoint::Item, + MetaVarKind::Vis => parser::PrefixEntryPoint::Vis, + MetaVarKind::Expr => { // `expr` should not match underscores. // HACK: Macro expansion should not be done using "rollback and try another alternative". // rustc [explicitly checks the next token][0]. @@ -698,17 +714,17 @@ fn match_meta_var(kind: &str, input: &mut TtIter<'_>) -> ExpandResult { let tt_result = match kind { - "ident" => input + MetaVarKind::Ident => input .expect_ident() .map(|ident| tt::Leaf::from(ident.clone()).into()) .map_err(|()| ExpandError::binding_error("expected ident")), - "tt" => input + MetaVarKind::Tt => input .expect_tt() .map_err(|()| ExpandError::binding_error("expected token tree")), - "lifetime" => input + MetaVarKind::Lifetime => input .expect_lifetime() .map_err(|()| ExpandError::binding_error("expected lifetime")), - "literal" => { + MetaVarKind::Literal => { let neg = input.eat_char('-'); input .expect_literal() diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index 7bcc84740f1..cbb59ab8e67 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -6,7 +6,7 @@ use tt::{Delimiter, Subtree}; use crate::{ expander::{Binding, Bindings, Fragment}, - parser::{Op, RepeatKind, Separator}, + parser::{MetaVarKind, Op, RepeatKind, Separator}, ExpandError, ExpandResult, MetaTemplate, }; @@ -15,7 +15,7 @@ impl Bindings { self.inner.contains_key(name) } - fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result<&Fragment, ExpandError> { + fn get(&self, name: &str, nesting: &mut [NestingState]) -> Result { macro_rules! binding_err { ($($arg:tt)*) => { ExpandError::binding_error(format!($($arg)*)) }; } @@ -26,6 +26,7 @@ impl Bindings { nesting_state.hit = true; b = match b { Binding::Fragment(_) => break, + Binding::Missing(_) => break, Binding::Nested(bs) => bs.get(nesting_state.idx).ok_or_else(|| { nesting_state.at_end = true; binding_err!("could not find nested binding `{name}`") @@ -37,7 +38,55 @@ impl Bindings { }; } match b { - Binding::Fragment(it) => Ok(it), + Binding::Fragment(it) => Ok(it.clone()), + // emit some reasonable default expansion for missing bindings, + // this gives better recovery than emitting the `$fragment-name` verbatim + Binding::Missing(it) => Ok(match it { + MetaVarKind::Stmt => { + Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { + id: tt::TokenId::unspecified(), + char: ';', + spacing: tt::Spacing::Alone, + }))) + } + MetaVarKind::Block => Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { + delimiter: Some(tt::Delimiter { + id: tt::TokenId::unspecified(), + kind: tt::DelimiterKind::Brace, + }), + token_trees: vec![], + })), + // FIXME: Meta and Item should get proper defaults + MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => { + Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { + delimiter: None, + token_trees: vec![], + })) + } + MetaVarKind::Path + | MetaVarKind::Ty + | MetaVarKind::Pat + | MetaVarKind::PatParam + | MetaVarKind::Expr + | MetaVarKind::Ident => { + Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: SmolStr::new_inline("missing"), + id: tt::TokenId::unspecified(), + }))) + } + MetaVarKind::Lifetime => { + Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: SmolStr::new_inline("'missing"), + id: tt::TokenId::unspecified(), + }))) + } + MetaVarKind::Literal => { + Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: SmolStr::new_inline("\"missing\""), + id: tt::TokenId::unspecified(), + }))) + } + }), Binding::Nested(_) => { Err(binding_err!("expected simple binding, found nested binding `{name}`")) } @@ -157,7 +206,7 @@ fn expand_var(ctx: &mut ExpandCtx<'_>, v: &SmolStr, id: tt::TokenId) -> ExpandRe } else { ctx.bindings.get(v, &mut ctx.nesting).map_or_else( |e| ExpandResult { value: Fragment::Tokens(tt::TokenTree::empty()), err: Some(e) }, - |b| ExpandResult::ok(b.clone()), + |it| ExpandResult::ok(it), ) } } diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 79da84f4a02..c4f0fa20d6d 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -21,7 +21,7 @@ mod token_map; use std::fmt; use crate::{ - parser::{MetaTemplate, Op}, + parser::{MetaTemplate, MetaVarKind, Op}, tt_iter::TtIter, }; @@ -291,9 +291,9 @@ fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> { // Checks that no repetition which could match an empty token // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558 let lsh_is_empty_seq = separator.is_none() && subtree.iter().all(|child_op| { - match child_op { + match *child_op { // vis is optional - Op::Var { kind: Some(kind), .. } => kind == "vis", + Op::Var { kind: Some(kind), .. } => kind == MetaVarKind::Vis, Op::Repeat { kind: parser::RepeatKind::ZeroOrMore | parser::RepeatKind::ZeroOrOne, .. diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs index acb4be5846d..351c359b73c 100644 --- a/crates/mbe/src/parser.rs +++ b/crates/mbe/src/parser.rs @@ -50,7 +50,7 @@ impl MetaTemplate { #[derive(Clone, Debug, PartialEq, Eq)] pub(crate) enum Op { - Var { name: SmolStr, kind: Option, id: tt::TokenId }, + Var { name: SmolStr, kind: Option, id: tt::TokenId }, Ignore { name: SmolStr, id: tt::TokenId }, Index { depth: u32 }, Repeat { tokens: MetaTemplate, kind: RepeatKind, separator: Option }, @@ -65,6 +65,24 @@ pub(crate) enum RepeatKind { ZeroOrOne, } +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub(crate) enum MetaVarKind { + Path, + Ty, + Pat, + PatParam, + Stmt, + Block, + Meta, + Item, + Vis, + Expr, + Ident, + Tt, + Lifetime, + Literal, +} + #[derive(Clone, Debug, Eq)] pub(crate) enum Separator { Literal(tt::Literal), @@ -179,13 +197,30 @@ fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Resul Ok(res) } -fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result, ParseError> { +fn eat_fragment_kind(src: &mut TtIter<'_>, mode: Mode) -> Result, ParseError> { if let Mode::Pattern = mode { src.expect_char(':').map_err(|()| ParseError::unexpected("missing fragment specifier"))?; let ident = src .expect_ident() .map_err(|()| ParseError::unexpected("missing fragment specifier"))?; - return Ok(Some(ident.text.clone())); + let kind = match ident.text.as_str() { + "path" => MetaVarKind::Path, + "ty" => MetaVarKind::Ty, + "pat" => MetaVarKind::Pat, + "pat_param" => MetaVarKind::PatParam, + "stmt" => MetaVarKind::Stmt, + "block" => MetaVarKind::Block, + "meta" => MetaVarKind::Meta, + "item" => MetaVarKind::Item, + "vis" => MetaVarKind::Vis, + "expr" => MetaVarKind::Expr, + "ident" => MetaVarKind::Ident, + "tt" => MetaVarKind::Tt, + "lifetime" => MetaVarKind::Lifetime, + "literal" => MetaVarKind::Literal, + _ => return Ok(None), + }; + return Ok(Some(kind)); }; Ok(None) } From fd9cd8476b2c59518d27e90ac955dce536785379 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 10 Oct 2022 15:41:32 +0200 Subject: [PATCH 27/66] Don't report build-scripts and proc-macros are metadata progress --- crates/project-model/src/build_scripts.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index 32db42f1db7..d9f09c03495 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -154,6 +154,8 @@ impl WorkspaceBuildScripts { Some(&it) => it, None => return, }; + progress(format!("running build-script: {}", workspace[package].name)); + let cfgs = { let mut acc = Vec::new(); for cfg in message.cfgs { @@ -189,7 +191,7 @@ impl WorkspaceBuildScripts { None => return, }; - progress(format!("metadata {}", message.target.name)); + progress(format!("building proc-macros: {}", message.target.name)); if message.target.kind.iter().any(|k| k == "proc-macro") { // Skip rmeta file From f3ae5e56fbf35e7b47a071b28432f72b669f444a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 10 Oct 2022 15:45:24 +0200 Subject: [PATCH 28/66] Refactor completions expansion --- crates/ide-completion/src/context.rs | 43 +- crates/ide-completion/src/context/analysis.rs | 1977 +++++++++-------- 2 files changed, 1030 insertions(+), 990 deletions(-) diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index a5e854b74df..5ac460fc6eb 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -23,7 +23,10 @@ use syntax::{ }; use text_edit::Indel; -use crate::CompletionConfig; +use crate::{ + context::analysis::{expand_and_analyze, AnalysisResult}, + CompletionConfig, +}; const COMPLETION_MARKER: &str = "intellijRulezz"; @@ -561,15 +564,27 @@ impl<'a> CompletionContext<'a> { let edit = Indel::insert(offset, COMPLETION_MARKER.to_string()); parse.reparse(&edit).tree() }; - let fake_ident_token = - file_with_fake_ident.syntax().token_at_offset(offset).right_biased()?; + // always pick the token to the immediate left of the cursor, as that is what we are actually + // completing on let original_token = original_file.syntax().token_at_offset(offset).left_biased()?; - let token = sema.descend_into_macros_single(original_token.clone()); + + let AnalysisResult { + analysis, + expected: (expected_type, expected_name), + qualifier_ctx, + token, + offset, + } = expand_and_analyze( + &sema, + original_file.syntax().clone(), + file_with_fake_ident.syntax().clone(), + offset, + &original_token, + )?; // adjust for macro input, this still fails if there is no token written yet - let scope_offset = if original_token == token { offset } else { token.text_range().end() }; - let scope = sema.scope_at_offset(&token.parent()?, scope_offset)?; + let scope = sema.scope_at_offset(&token.parent()?, offset)?; let krate = scope.krate(); let module = scope.module(); @@ -583,7 +598,7 @@ impl<'a> CompletionContext<'a> { let depth_from_crate_root = iter::successors(module.parent(db), |m| m.parent(db)).count(); - let mut ctx = CompletionContext { + let ctx = CompletionContext { sema, scope, db, @@ -593,19 +608,13 @@ impl<'a> CompletionContext<'a> { token, krate, module, - expected_name: None, - expected_type: None, - qualifier_ctx: Default::default(), + expected_name, + expected_type, + qualifier_ctx, locals, depth_from_crate_root, }; - let ident_ctx = ctx.expand_and_analyze( - original_file.syntax().clone(), - file_with_fake_ident.syntax().clone(), - offset, - fake_ident_token, - )?; - Some((ctx, ident_ctx)) + Some((ctx, analysis)) } } diff --git a/crates/ide-completion/src/context/analysis.rs b/crates/ide-completion/src/context/analysis.rs index 01dd9a234f5..04111ec7efa 100644 --- a/crates/ide-completion/src/context/analysis.rs +++ b/crates/ide-completion/src/context/analysis.rs @@ -11,1063 +11,1094 @@ use syntax::{ }; use crate::context::{ - AttrCtx, CompletionAnalysis, CompletionContext, DotAccess, DotAccessKind, ExprCtx, - ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, NameRefContext, - NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathKind, PatternContext, - PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, TypeLocation, - COMPLETION_MARKER, + AttrCtx, CompletionAnalysis, DotAccess, DotAccessKind, ExprCtx, ItemListKind, LifetimeContext, + LifetimeKind, NameContext, NameKind, NameRefContext, NameRefKind, ParamContext, ParamKind, + PathCompletionCtx, PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx, + TypeAscriptionTarget, TypeLocation, COMPLETION_MARKER, }; -impl<'a> CompletionContext<'a> { - /// Expand attributes and macro calls at the current cursor position for both the original file - /// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original - /// and speculative states stay in sync. - pub(super) fn expand_and_analyze( - &mut self, - mut original_file: SyntaxNode, - mut speculative_file: SyntaxNode, - mut offset: TextSize, - mut fake_ident_token: SyntaxToken, - ) -> Option { - let _p = profile::span("CompletionContext::expand_and_fill"); - let mut derive_ctx = None; +struct ExpansionResult { + original_file: SyntaxNode, + speculative_file: SyntaxNode, + offset: TextSize, + fake_ident_token: SyntaxToken, + derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>, +} - 'expansion: loop { - let parent_item = - |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast); - let ancestor_items = iter::successors( - Option::zip( - find_node_at_offset::(&original_file, offset), - find_node_at_offset::(&speculative_file, offset), +pub(super) struct AnalysisResult { + pub(super) analysis: CompletionAnalysis, + pub(super) expected: (Option, Option), + pub(super) qualifier_ctx: QualifierCtx, + pub(super) token: SyntaxToken, + pub(super) offset: TextSize, +} + +pub(super) fn expand_and_analyze( + sema: &Semantics<'_, RootDatabase>, + original_file: SyntaxNode, + speculative_file: SyntaxNode, + offset: TextSize, + original_token: &SyntaxToken, +) -> Option { + // as we insert after the offset, right biased will *always* pick the identifier no matter + // if there is an ident already typed or not + let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?; + // the relative offset between the cursor and the *identifier* token we are completing on + let relative_offset = offset - fake_ident_token.text_range().start(); + // make the offset point to the start of the original token, as that is what the + // intermediate offsets calculated in expansion always points to + let offset = offset - relative_offset; + let expansion = expand(sema, original_file, speculative_file, offset, fake_ident_token); + // add the relative offset back, so that left_biased finds the proper token + let offset = expansion.offset + relative_offset; + let token = expansion.original_file.token_at_offset(offset).left_biased()?; + + analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| { + AnalysisResult { analysis, expected, qualifier_ctx, token, offset } + }) +} + +/// Expand attributes and macro calls at the current cursor position for both the original file +/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original +/// and speculative states stay in sync. +fn expand( + sema: &Semantics<'_, RootDatabase>, + mut original_file: SyntaxNode, + mut speculative_file: SyntaxNode, + mut offset: TextSize, + mut fake_ident_token: SyntaxToken, +) -> ExpansionResult { + let _p = profile::span("CompletionContext::expand"); + let mut derive_ctx = None; + + 'expansion: loop { + let parent_item = + |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast); + let ancestor_items = iter::successors( + Option::zip( + find_node_at_offset::(&original_file, offset), + find_node_at_offset::(&speculative_file, offset), + ), + |(a, b)| parent_item(a).zip(parent_item(b)), + ); + + // first try to expand attributes as these are always the outermost macro calls + 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items { + match ( + sema.expand_attr_macro(&actual_item), + sema.speculative_expand_attr_macro( + &actual_item, + &item_with_fake_ident, + fake_ident_token.clone(), ), - |(a, b)| parent_item(a).zip(parent_item(b)), - ); - - // first try to expand attributes as these are always the outermost macro calls - 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items { - match ( - self.sema.expand_attr_macro(&actual_item), - self.sema.speculative_expand_attr_macro( - &actual_item, - &item_with_fake_ident, - fake_ident_token.clone(), - ), - ) { - // maybe parent items have attributes, so continue walking the ancestors - (None, None) => continue 'ancestors, - // successful expansions - (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => { - let new_offset = fake_mapped_token.text_range().start(); - if new_offset > actual_expansion.text_range().end() { - // offset outside of bounds from the original expansion, - // stop here to prevent problems from happening - break 'expansion; - } - original_file = actual_expansion; - speculative_file = fake_expansion; - fake_ident_token = fake_mapped_token; - offset = new_offset; - continue 'expansion; - } - // exactly one expansion failed, inconsistent state so stop expanding completely - _ => break 'expansion, - } - } - - // No attributes have been expanded, so look for macro_call! token trees or derive token trees - let orig_tt = match find_node_at_offset::(&original_file, offset) { - Some(it) => it, - None => break 'expansion, - }; - let spec_tt = match find_node_at_offset::(&speculative_file, offset) { - Some(it) => it, - None => break 'expansion, - }; - - // Expand pseudo-derive expansion - if let (Some(orig_attr), Some(spec_attr)) = ( - orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()), - spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()), ) { - if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = ( - self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr), - self.sema.speculative_expand_derive_as_pseudo_attr_macro( - &orig_attr, - &spec_attr, - fake_ident_token.clone(), - ), - ) { - derive_ctx = Some(( - actual_expansion, - fake_expansion, - fake_mapped_token.text_range().start(), - orig_attr, - )); - } - // at this point we won't have any more successful expansions, so stop - break 'expansion; - } - - // Expand fn-like macro calls - if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( - orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast), - spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast), - ) { - let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text()); - let mac_call_path1 = - macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()); - - // inconsistent state, stop expanding - if mac_call_path0 != mac_call_path1 { - break 'expansion; - } - let speculative_args = match macro_call_with_fake_ident.token_tree() { - Some(tt) => tt, - None => break 'expansion, - }; - - match ( - self.sema.expand(&actual_macro_call), - self.sema.speculative_expand( - &actual_macro_call, - &speculative_args, - fake_ident_token.clone(), - ), - ) { - // successful expansions - (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => { - let new_offset = fake_mapped_token.text_range().start(); - if new_offset > actual_expansion.text_range().end() { - // offset outside of bounds from the original expansion, - // stop here to prevent problems from happening - break 'expansion; - } - original_file = actual_expansion; - speculative_file = fake_expansion; - fake_ident_token = fake_mapped_token; - offset = new_offset; - continue 'expansion; + // maybe parent items have attributes, so continue walking the ancestors + (None, None) => continue 'ancestors, + // successful expansions + (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => { + let new_offset = fake_mapped_token.text_range().start(); + if new_offset > actual_expansion.text_range().end() { + // offset outside of bounds from the original expansion, + // stop here to prevent problems from happening + break 'expansion; } - // at least on expansion failed, we won't have anything to expand from this point - // onwards so break out - _ => break 'expansion, + original_file = actual_expansion; + speculative_file = fake_expansion; + fake_ident_token = fake_mapped_token; + offset = new_offset; + continue 'expansion; } + // exactly one expansion failed, inconsistent state so stop expanding completely + _ => break 'expansion, } + } - // none of our states have changed so stop the loop + // No attributes have been expanded, so look for macro_call! token trees or derive token trees + let orig_tt = match find_node_at_offset::(&original_file, offset) { + Some(it) => it, + None => break 'expansion, + }; + let spec_tt = match find_node_at_offset::(&speculative_file, offset) { + Some(it) => it, + None => break 'expansion, + }; + + // Expand pseudo-derive expansion + if let (Some(orig_attr), Some(spec_attr)) = ( + orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()), + spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()), + ) { + if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = ( + sema.expand_derive_as_pseudo_attr_macro(&orig_attr), + sema.speculative_expand_derive_as_pseudo_attr_macro( + &orig_attr, + &spec_attr, + fake_ident_token.clone(), + ), + ) { + derive_ctx = Some(( + actual_expansion, + fake_expansion, + fake_mapped_token.text_range().start(), + orig_attr, + )); + } + // at this point we won't have any more successful expansions, so stop break 'expansion; } - self.analyze(&original_file, speculative_file, offset, derive_ctx) + // Expand fn-like macro calls + if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = ( + orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast), + spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast), + ) { + let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text()); + let mac_call_path1 = + macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text()); + + // inconsistent state, stop expanding + if mac_call_path0 != mac_call_path1 { + break 'expansion; + } + let speculative_args = match macro_call_with_fake_ident.token_tree() { + Some(tt) => tt, + None => break 'expansion, + }; + + match ( + sema.expand(&actual_macro_call), + sema.speculative_expand( + &actual_macro_call, + &speculative_args, + fake_ident_token.clone(), + ), + ) { + // successful expansions + (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => { + let new_offset = fake_mapped_token.text_range().start(); + if new_offset > actual_expansion.text_range().end() { + // offset outside of bounds from the original expansion, + // stop here to prevent problems from happening + break 'expansion; + } + original_file = actual_expansion; + speculative_file = fake_expansion; + fake_ident_token = fake_mapped_token; + offset = new_offset; + continue 'expansion; + } + // at least on expansion failed, we won't have anything to expand from this point + // onwards so break out + _ => break 'expansion, + } + } + + // none of our states have changed so stop the loop + break 'expansion; + } + ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } +} + +/// Fill the completion context, this is what does semantic reasoning about the surrounding context +/// of the completion location. +fn analyze( + sema: &Semantics<'_, RootDatabase>, + expansion_result: ExpansionResult, + original_token: &SyntaxToken, + self_token: &SyntaxToken, +) -> Option<(CompletionAnalysis, (Option, Option), QualifierCtx)> { + let _p = profile::span("CompletionContext::analyze"); + let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } = + expansion_result; + let syntax_element = NodeOrToken::Token(fake_ident_token); + if is_in_token_of_for_loop(syntax_element.clone()) { + // for pat $0 + // there is nothing to complete here except `in` keyword + // don't bother populating the context + // FIXME: the completion calculations should end up good enough + // such that this special case becomes unnecessary + return None; } - /// Calculate the expected type and name of the cursor position. - fn expected_type_and_name( - &self, - name_like: &ast::NameLike, - ) -> (Option, Option) { - let mut node = match self.token.parent() { - Some(it) => it, - None => return (None, None), - }; - - let strip_refs = |mut ty: Type| match name_like { - ast::NameLike::NameRef(n) => { - let p = match n.syntax().parent() { - Some(it) => it, - None => return ty, + // Overwrite the path kind for derives + if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx { + if let Some(ast::NameLike::NameRef(name_ref)) = + find_node_at_offset(&file_with_fake_ident, offset) + { + let parent = name_ref.syntax().parent()?; + let (mut nameref_ctx, _) = classify_name_ref(&sema, &original_file, name_ref, parent)?; + if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind { + path_ctx.kind = PathKind::Derive { + existing_derives: sema + .resolve_derive_macro(&origin_attr) + .into_iter() + .flatten() + .flatten() + .collect(), }; - let top_syn = match_ast! { - match p { - ast::FieldExpr(e) => e - .syntax() - .ancestors() - .map_while(ast::FieldExpr::cast) - .last() - .map(|it| it.syntax().clone()), - ast::PathSegment(e) => e - .syntax() - .ancestors() - .skip(1) - .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind())) - .find_map(ast::PathExpr::cast) - .map(|it| it.syntax().clone()), - _ => None - } - }; - let top_syn = match top_syn { - Some(it) => it, - None => return ty, - }; - for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) { - cov_mark::hit!(expected_type_fn_param_ref); - ty = ty.strip_reference(); - } - ty } - _ => ty, - }; + return Some(( + CompletionAnalysis::NameRef(nameref_ctx), + (None, None), + QualifierCtx::default(), + )); + } + return None; + } - loop { - break match_ast! { - match node { - ast::LetStmt(it) => { - cov_mark::hit!(expected_type_let_with_leading_char); - cov_mark::hit!(expected_type_let_without_leading_char); - let ty = it.pat() - .and_then(|pat| self.sema.type_of_pat(&pat)) - .or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it))) - .map(TypeInfo::original); - let name = match it.pat() { - Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name), - Some(_) | None => None, - }; - - (ty, name) - }, - ast::LetExpr(it) => { - cov_mark::hit!(expected_type_if_let_without_leading_char); - let ty = it.pat() - .and_then(|pat| self.sema.type_of_pat(&pat)) - .or_else(|| it.expr().and_then(|it| self.sema.type_of_expr(&it))) - .map(TypeInfo::original); - (ty, None) - }, - ast::ArgList(_) => { - cov_mark::hit!(expected_type_fn_param); - ActiveParameter::at_token( - &self.sema, - self.token.clone(), - ).map(|ap| { - let name = ap.ident().map(NameOrNameRef::Name); - - let ty = strip_refs(ap.ty); - (Some(ty), name) - }) - .unwrap_or((None, None)) - }, - ast::RecordExprFieldList(it) => { - // wouldn't try {} be nice... - (|| { - if self.token.kind() == T![..] - || self.token.prev_token().map(|t| t.kind()) == Some(T![..]) - { - cov_mark::hit!(expected_type_struct_func_update); - let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?; - let ty = self.sema.type_of_expr(&record_expr.into())?; - Some(( - Some(ty.original), - None - )) - } else { - cov_mark::hit!(expected_type_struct_field_without_leading_char); - let expr_field = self.token.prev_sibling_or_token()? - .into_node() - .and_then(ast::RecordExprField::cast)?; - let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?; - Some(( - Some(ty), - expr_field.field_name().map(NameOrNameRef::NameRef), - )) - } - })().unwrap_or((None, None)) - }, - ast::RecordExprField(it) => { - if let Some(expr) = it.expr() { - cov_mark::hit!(expected_type_struct_field_with_leading_char); - ( - self.sema.type_of_expr(&expr).map(TypeInfo::original), - it.field_name().map(NameOrNameRef::NameRef), - ) - } else { - cov_mark::hit!(expected_type_struct_field_followed_by_comma); - let ty = self.sema.resolve_record_field(&it) - .map(|(_, _, ty)| ty); - ( - ty, - it.field_name().map(NameOrNameRef::NameRef), - ) - } - }, - // match foo { $0 } - // match foo { ..., pat => $0 } - ast::MatchExpr(it) => { - let on_arrow = previous_non_trivia_token(self.token.clone()).map_or(false, |it| T![=>] == it.kind()); - - let ty = if on_arrow { - // match foo { ..., pat => $0 } - cov_mark::hit!(expected_type_match_arm_body_without_leading_char); - cov_mark::hit!(expected_type_match_arm_body_with_leading_char); - self.sema.type_of_expr(&it.into()) - } else { - // match foo { $0 } - cov_mark::hit!(expected_type_match_arm_without_leading_char); - it.expr().and_then(|e| self.sema.type_of_expr(&e)) - }.map(TypeInfo::original); - (ty, None) - }, - ast::IfExpr(it) => { - let ty = it.condition() - .and_then(|e| self.sema.type_of_expr(&e)) - .map(TypeInfo::original); - (ty, None) - }, - ast::IdentPat(it) => { - cov_mark::hit!(expected_type_if_let_with_leading_char); - cov_mark::hit!(expected_type_match_arm_with_leading_char); - let ty = self.sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original); - (ty, None) - }, - ast::Fn(it) => { - cov_mark::hit!(expected_type_fn_ret_with_leading_char); - cov_mark::hit!(expected_type_fn_ret_without_leading_char); - let def = self.sema.to_def(&it); - (def.map(|def| def.ret_type(self.db)), None) - }, - ast::ClosureExpr(it) => { - let ty = self.sema.type_of_expr(&it.into()); - ty.and_then(|ty| ty.original.as_callable(self.db)) - .map(|c| (Some(c.return_type()), None)) - .unwrap_or((None, None)) - }, - ast::ParamList(_) => (None, None), - ast::Stmt(_) => (None, None), - ast::Item(_) => (None, None), - _ => { - match node.parent() { - Some(n) => { - node = n; - continue; - }, - None => (None, None), - } - }, + let name_like = match find_node_at_offset(&speculative_file, offset) { + Some(it) => it, + None => { + let analysis = if let Some(original) = ast::String::cast(original_token.clone()) { + CompletionAnalysis::String { + original, + expanded: ast::String::cast(self_token.clone()), + } + } else { + // Fix up trailing whitespace problem + // #[attr(foo = $0 + let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?; + let p = token.parent()?; + if p.kind() == SyntaxKind::TOKEN_TREE + && p.ancestors().any(|it| it.kind() == SyntaxKind::META) + { + let colon_prefix = previous_non_trivia_token(self_token.clone()) + .map_or(false, |it| T![:] == it.kind()); + CompletionAnalysis::UnexpandedAttrTT { + fake_attribute_under_caret: syntax_element + .ancestors() + .find_map(ast::Attr::cast), + colon_prefix, + } + } else { + return None; } }; + return Some((analysis, (None, None), QualifierCtx::default())); } - } - - /// Fill the completion context, this is what does semantic reasoning about the surrounding context - /// of the completion location. - fn analyze( - &mut self, - original_file: &SyntaxNode, - file_with_fake_ident: SyntaxNode, - offset: TextSize, - derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>, - ) -> Option { - let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased()?; - let syntax_element = NodeOrToken::Token(fake_ident_token); - if is_in_token_of_for_loop(syntax_element.clone()) { - // for pat $0 - // there is nothing to complete here except `in` keyword - // don't bother populating the context - // FIXME: the completion calculations should end up good enough - // such that this special case becomes unnecessary - return None; + }; + let expected = expected_type_and_name(sema, &self_token, &name_like); + let mut qual_ctx = QualifierCtx::default(); + let analysis = match name_like { + ast::NameLike::Lifetime(lifetime) => { + CompletionAnalysis::Lifetime(classify_lifetime(sema, &original_file, lifetime)?) } + ast::NameLike::NameRef(name_ref) => { + let parent = name_ref.syntax().parent()?; + let (nameref_ctx, qualifier_ctx) = + classify_name_ref(sema, &original_file, name_ref, parent.clone())?; + qual_ctx = qualifier_ctx; + CompletionAnalysis::NameRef(nameref_ctx) + } + ast::NameLike::Name(name) => { + let name_ctx = classify_name(sema, &original_file, name)?; + CompletionAnalysis::Name(name_ctx) + } + }; + Some((analysis, expected, qual_ctx)) +} - // Overwrite the path kind for derives - if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx { - if let Some(ast::NameLike::NameRef(name_ref)) = - find_node_at_offset(&file_with_fake_ident, offset) - { - let parent = name_ref.syntax().parent()?; - let (mut nameref_ctx, _) = - Self::classify_name_ref(&self.sema, &original_file, name_ref, parent)?; - if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind { - path_ctx.kind = PathKind::Derive { - existing_derives: self - .sema - .resolve_derive_macro(&origin_attr) - .into_iter() - .flatten() - .flatten() - .collect(), - }; +/// Calculate the expected type and name of the cursor position. +fn expected_type_and_name( + sema: &Semantics<'_, RootDatabase>, + token: &SyntaxToken, + name_like: &ast::NameLike, +) -> (Option, Option) { + let mut node = match token.parent() { + Some(it) => it, + None => return (None, None), + }; + + let strip_refs = |mut ty: Type| match name_like { + ast::NameLike::NameRef(n) => { + let p = match n.syntax().parent() { + Some(it) => it, + None => return ty, + }; + let top_syn = match_ast! { + match p { + ast::FieldExpr(e) => e + .syntax() + .ancestors() + .map_while(ast::FieldExpr::cast) + .last() + .map(|it| it.syntax().clone()), + ast::PathSegment(e) => e + .syntax() + .ancestors() + .skip(1) + .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind())) + .find_map(ast::PathExpr::cast) + .map(|it| it.syntax().clone()), + _ => None } - return Some(CompletionAnalysis::NameRef(nameref_ctx)); + }; + let top_syn = match top_syn { + Some(it) => it, + None => return ty, + }; + for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) { + cov_mark::hit!(expected_type_fn_param_ref); + ty = ty.strip_reference(); } - return None; + ty } + _ => ty, + }; - let name_like = match find_node_at_offset(&file_with_fake_ident, offset) { - Some(it) => it, - None => { - let analysis = - if let Some(original) = ast::String::cast(self.original_token.clone()) { - CompletionAnalysis::String { - original, - expanded: ast::String::cast(self.token.clone()), - } - } else { - // Fix up trailing whitespace problem - // #[attr(foo = $0 - let token = - syntax::algo::skip_trivia_token(self.token.clone(), Direction::Prev)?; - let p = token.parent()?; - if p.kind() == SyntaxKind::TOKEN_TREE - && p.ancestors().any(|it| it.kind() == SyntaxKind::META) - { - let colon_prefix = previous_non_trivia_token(self.token.clone()) - .map_or(false, |it| T![:] == it.kind()); - CompletionAnalysis::UnexpandedAttrTT { - fake_attribute_under_caret: syntax_element - .ancestors() - .find_map(ast::Attr::cast), - colon_prefix, - } - } else { - return None; - } + loop { + break match_ast! { + match node { + ast::LetStmt(it) => { + cov_mark::hit!(expected_type_let_with_leading_char); + cov_mark::hit!(expected_type_let_without_leading_char); + let ty = it.pat() + .and_then(|pat| sema.type_of_pat(&pat)) + .or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it))) + .map(TypeInfo::original); + let name = match it.pat() { + Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name), + Some(_) | None => None, }; - return Some(analysis); - } - }; - (self.expected_type, self.expected_name) = self.expected_type_and_name(&name_like); - let analysis = match name_like { - ast::NameLike::Lifetime(lifetime) => CompletionAnalysis::Lifetime( - Self::classify_lifetime(&self.sema, original_file, lifetime)?, - ), - ast::NameLike::NameRef(name_ref) => { - let parent = name_ref.syntax().parent()?; - let (nameref_ctx, qualifier_ctx) = - Self::classify_name_ref(&self.sema, &original_file, name_ref, parent.clone())?; - self.qualifier_ctx = qualifier_ctx; - CompletionAnalysis::NameRef(nameref_ctx) - } - ast::NameLike::Name(name) => { - let name_ctx = Self::classify_name(&self.sema, original_file, name)?; - CompletionAnalysis::Name(name_ctx) - } - }; - Some(analysis) - } - - fn classify_lifetime( - _sema: &Semantics<'_, RootDatabase>, - original_file: &SyntaxNode, - lifetime: ast::Lifetime, - ) -> Option { - let parent = lifetime.syntax().parent()?; - if parent.kind() == SyntaxKind::ERROR { - return None; - } - - let kind = match_ast! { - match parent { - ast::LifetimeParam(param) => LifetimeKind::LifetimeParam { - is_decl: param.lifetime().as_ref() == Some(&lifetime), - param + (ty, name) }, - ast::BreakExpr(_) => LifetimeKind::LabelRef, - ast::ContinueExpr(_) => LifetimeKind::LabelRef, - ast::Label(_) => LifetimeKind::LabelDef, - _ => LifetimeKind::Lifetime, - } - }; - let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start()); + ast::LetExpr(it) => { + cov_mark::hit!(expected_type_if_let_without_leading_char); + let ty = it.pat() + .and_then(|pat| sema.type_of_pat(&pat)) + .or_else(|| it.expr().and_then(|it| sema.type_of_expr(&it))) + .map(TypeInfo::original); + (ty, None) + }, + ast::ArgList(_) => { + cov_mark::hit!(expected_type_fn_param); + ActiveParameter::at_token( + &sema, + token.clone(), + ).map(|ap| { + let name = ap.ident().map(NameOrNameRef::Name); - Some(LifetimeContext { lifetime, kind }) - } - - fn classify_name( - sema: &Semantics<'_, RootDatabase>, - original_file: &SyntaxNode, - name: ast::Name, - ) -> Option { - let parent = name.syntax().parent()?; - let kind = match_ast! { - match parent { - ast::Const(_) => NameKind::Const, - ast::ConstParam(_) => NameKind::ConstParam, - ast::Enum(_) => NameKind::Enum, - ast::Fn(_) => NameKind::Function, - ast::IdentPat(bind_pat) => { - let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into()); - if let Some(record_field) = ast::RecordPatField::for_field_name(&name) { - pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat()); + let ty = strip_refs(ap.ty); + (Some(ty), name) + }) + .unwrap_or((None, None)) + }, + ast::RecordExprFieldList(it) => { + // wouldn't try {} be nice... + (|| { + if token.kind() == T![..] + ||token.prev_token().map(|t| t.kind()) == Some(T![..]) + { + cov_mark::hit!(expected_type_struct_func_update); + let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?; + let ty = sema.type_of_expr(&record_expr.into())?; + Some(( + Some(ty.original), + None + )) + } else { + cov_mark::hit!(expected_type_struct_field_without_leading_char); + let expr_field = token.prev_sibling_or_token()? + .into_node() + .and_then(ast::RecordExprField::cast)?; + let (_, _, ty) = sema.resolve_record_field(&expr_field)?; + Some(( + Some(ty), + expr_field.field_name().map(NameOrNameRef::NameRef), + )) + } + })().unwrap_or((None, None)) + }, + ast::RecordExprField(it) => { + if let Some(expr) = it.expr() { + cov_mark::hit!(expected_type_struct_field_with_leading_char); + ( + sema.type_of_expr(&expr).map(TypeInfo::original), + it.field_name().map(NameOrNameRef::NameRef), + ) + } else { + cov_mark::hit!(expected_type_struct_field_followed_by_comma); + let ty = sema.resolve_record_field(&it) + .map(|(_, _, ty)| ty); + ( + ty, + it.field_name().map(NameOrNameRef::NameRef), + ) + } + }, + // match foo { $0 } + // match foo { ..., pat => $0 } + ast::MatchExpr(it) => { + let on_arrow = previous_non_trivia_token(token.clone()).map_or(false, |it| T![=>] == it.kind()); + + let ty = if on_arrow { + // match foo { ..., pat => $0 } + cov_mark::hit!(expected_type_match_arm_body_without_leading_char); + cov_mark::hit!(expected_type_match_arm_body_with_leading_char); + sema.type_of_expr(&it.into()) + } else { + // match foo { $0 } + cov_mark::hit!(expected_type_match_arm_without_leading_char); + it.expr().and_then(|e| sema.type_of_expr(&e)) + }.map(TypeInfo::original); + (ty, None) + }, + ast::IfExpr(it) => { + let ty = it.condition() + .and_then(|e| sema.type_of_expr(&e)) + .map(TypeInfo::original); + (ty, None) + }, + ast::IdentPat(it) => { + cov_mark::hit!(expected_type_if_let_with_leading_char); + cov_mark::hit!(expected_type_match_arm_with_leading_char); + let ty = sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original); + (ty, None) + }, + ast::Fn(it) => { + cov_mark::hit!(expected_type_fn_ret_with_leading_char); + cov_mark::hit!(expected_type_fn_ret_without_leading_char); + let def = sema.to_def(&it); + (def.map(|def| def.ret_type(sema.db)), None) + }, + ast::ClosureExpr(it) => { + let ty = sema.type_of_expr(&it.into()); + ty.and_then(|ty| ty.original.as_callable(sema.db)) + .map(|c| (Some(c.return_type()), None)) + .unwrap_or((None, None)) + }, + ast::ParamList(_) => (None, None), + ast::Stmt(_) => (None, None), + ast::Item(_) => (None, None), + _ => { + match node.parent() { + Some(n) => { + node = n; + continue; + }, + None => (None, None), } - - NameKind::IdentPat(pat_ctx) }, - ast::MacroDef(_) => NameKind::MacroDef, - ast::MacroRules(_) => NameKind::MacroRules, - ast::Module(module) => NameKind::Module(module), - ast::RecordField(_) => NameKind::RecordField, - ast::Rename(_) => NameKind::Rename, - ast::SelfParam(_) => NameKind::SelfParam, - ast::Static(_) => NameKind::Static, - ast::Struct(_) => NameKind::Struct, - ast::Trait(_) => NameKind::Trait, - ast::TypeAlias(_) => NameKind::TypeAlias, - ast::TypeParam(_) => NameKind::TypeParam, - ast::Union(_) => NameKind::Union, - ast::Variant(_) => NameKind::Variant, - _ => return None, } }; - let name = find_node_at_offset(&original_file, name.syntax().text_range().start()); - Some(NameContext { name, kind }) + } +} + +fn classify_lifetime( + _sema: &Semantics<'_, RootDatabase>, + original_file: &SyntaxNode, + lifetime: ast::Lifetime, +) -> Option { + let parent = lifetime.syntax().parent()?; + if parent.kind() == SyntaxKind::ERROR { + return None; } - fn classify_name_ref( - sema: &Semantics<'_, RootDatabase>, - original_file: &SyntaxNode, - name_ref: ast::NameRef, - parent: SyntaxNode, - ) -> Option<(NameRefContext, QualifierCtx)> { - let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); + let kind = match_ast! { + match parent { + ast::LifetimeParam(param) => LifetimeKind::LifetimeParam { + is_decl: param.lifetime().as_ref() == Some(&lifetime), + param + }, + ast::BreakExpr(_) => LifetimeKind::LabelRef, + ast::ContinueExpr(_) => LifetimeKind::LabelRef, + ast::Label(_) => LifetimeKind::LabelDef, + _ => LifetimeKind::Lifetime, + } + }; + let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start()); - let make_res = - |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default()); + Some(LifetimeContext { lifetime, kind }) +} - if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) { - let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone()) - .map_or(false, |it| T![.] == it.kind()); +fn classify_name( + sema: &Semantics<'_, RootDatabase>, + original_file: &SyntaxNode, + name: ast::Name, +) -> Option { + let parent = name.syntax().parent()?; + let kind = match_ast! { + match parent { + ast::Const(_) => NameKind::Const, + ast::ConstParam(_) => NameKind::ConstParam, + ast::Enum(_) => NameKind::Enum, + ast::Fn(_) => NameKind::Function, + ast::IdentPat(bind_pat) => { + let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into()); + if let Some(record_field) = ast::RecordPatField::for_field_name(&name) { + pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat()); + } - return find_node_in_file_compensated( + NameKind::IdentPat(pat_ctx) + }, + ast::MacroDef(_) => NameKind::MacroDef, + ast::MacroRules(_) => NameKind::MacroRules, + ast::Module(module) => NameKind::Module(module), + ast::RecordField(_) => NameKind::RecordField, + ast::Rename(_) => NameKind::Rename, + ast::SelfParam(_) => NameKind::SelfParam, + ast::Static(_) => NameKind::Static, + ast::Struct(_) => NameKind::Struct, + ast::Trait(_) => NameKind::Trait, + ast::TypeAlias(_) => NameKind::TypeAlias, + ast::TypeParam(_) => NameKind::TypeParam, + ast::Union(_) => NameKind::Union, + ast::Variant(_) => NameKind::Variant, + _ => return None, + } + }; + let name = find_node_at_offset(&original_file, name.syntax().text_range().start()); + Some(NameContext { name, kind }) +} + +fn classify_name_ref( + sema: &Semantics<'_, RootDatabase>, + original_file: &SyntaxNode, + name_ref: ast::NameRef, + parent: SyntaxNode, +) -> Option<(NameRefContext, QualifierCtx)> { + let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start()); + + let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default()); + + if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) { + let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone()) + .map_or(false, |it| T![.] == it.kind()); + + return find_node_in_file_compensated( + sema, + original_file, + &record_field.parent_record_lit(), + ) + .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix }) + .map(make_res); + } + if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) { + let kind = NameRefKind::Pattern(PatternContext { + param_ctx: None, + has_type_ascription: false, + ref_token: None, + mut_token: None, + record_pat: find_node_in_file_compensated( sema, original_file, - &record_field.parent_record_lit(), + &record_field.parent_record_pat(), + ), + ..pattern_context_for( + sema, + original_file, + record_field.parent_record_pat().clone().into(), ) - .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix }) - .map(make_res); - } - if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) { - let kind = NameRefKind::Pattern(PatternContext { - param_ctx: None, - has_type_ascription: false, - ref_token: None, - mut_token: None, - record_pat: find_node_in_file_compensated( - sema, - original_file, - &record_field.parent_record_pat(), - ), - ..pattern_context_for( - sema, - original_file, - record_field.parent_record_pat().clone().into(), - ) - }); - return Some(make_res(kind)); - } + }); + return Some(make_res(kind)); + } - let segment = match_ast! { + let segment = match_ast! { + match parent { + ast::PathSegment(segment) => segment, + ast::FieldExpr(field) => { + let receiver = find_opt_node_in_file(original_file, field.expr()); + let receiver_is_ambiguous_float_literal = match &receiver { + Some(ast::Expr::Literal(l)) => matches! { + l.kind(), + ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.text().ends_with('.')) + }, + _ => false, + }; + let kind = NameRefKind::DotAccess(DotAccess { + receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)), + kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal }, + receiver + }); + return Some(make_res(kind)); + }, + ast::MethodCallExpr(method) => { + let receiver = find_opt_node_in_file(original_file, method.receiver()); + let kind = NameRefKind::DotAccess(DotAccess { + receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)), + kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) }, + receiver + }); + return Some(make_res(kind)); + }, + _ => return None, + } + }; + + let path = segment.parent_path(); + let original_path = find_node_in_file_compensated(sema, original_file, &path); + + let mut path_ctx = PathCompletionCtx { + has_call_parens: false, + has_macro_bang: false, + qualified: Qualified::No, + parent: None, + path: path.clone(), + original_path, + kind: PathKind::Item { kind: ItemListKind::SourceFile }, + has_type_args: false, + use_tree_parent: false, + }; + + let is_in_block = |it: &SyntaxNode| { + it.parent() + .map(|node| { + ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind()) + }) + .unwrap_or(false) + }; + let func_update_record = |syn: &SyntaxNode| { + if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) { + find_node_in_file_compensated(sema, original_file, &record_expr) + } else { + None + } + }; + let after_if_expr = |node: SyntaxNode| { + let prev_expr = (|| { + let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?; + ast::ExprStmt::cast(prev_sibling)?.expr() + })(); + matches!(prev_expr, Some(ast::Expr::IfExpr(_))) + }; + + // We do not want to generate path completions when we are sandwiched between an item decl signature and its body. + // ex. trait Foo $0 {} + // in these cases parser recovery usually kicks in for our inserted identifier, causing it + // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block + // expression or an item list. + // The following code checks if the body is missing, if it is we either cut off the body + // from the item or it was missing in the first place + let inbetween_body_and_decl_check = |node: SyntaxNode| { + if let Some(NodeOrToken::Node(n)) = + syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev) + { + if let Some(item) = ast::Item::cast(n) { + let is_inbetween = match &item { + ast::Item::Const(it) => it.body().is_none(), + ast::Item::Enum(it) => it.variant_list().is_none(), + ast::Item::ExternBlock(it) => it.extern_item_list().is_none(), + ast::Item::Fn(it) => it.body().is_none(), + ast::Item::Impl(it) => it.assoc_item_list().is_none(), + ast::Item::Module(it) => it.item_list().is_none(), + ast::Item::Static(it) => it.body().is_none(), + ast::Item::Struct(it) => it.field_list().is_none(), + ast::Item::Trait(it) => it.assoc_item_list().is_none(), + ast::Item::TypeAlias(it) => it.ty().is_none(), + ast::Item::Union(it) => it.record_field_list().is_none(), + _ => false, + }; + if is_inbetween { + return Some(item); + } + } + } + None + }; + + let type_location = |node: &SyntaxNode| { + let parent = node.parent()?; + let res = match_ast! { match parent { - ast::PathSegment(segment) => segment, - ast::FieldExpr(field) => { - let receiver = find_opt_node_in_file(original_file, field.expr()); - let receiver_is_ambiguous_float_literal = match &receiver { - Some(ast::Expr::Literal(l)) => matches! { - l.kind(), - ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.text().ends_with('.')) - }, - _ => false, + ast::Const(it) => { + let name = find_opt_node_in_file(original_file, it.name())?; + let original = ast::Const::cast(name.syntax().parent()?)?; + TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body())) + }, + ast::RetType(it) => { + if it.thin_arrow_token().is_none() { + return None; + } + let parent = match ast::Fn::cast(parent.parent()?) { + Some(x) => x.param_list(), + None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(), }; - let kind = NameRefKind::DotAccess(DotAccess { - receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)), - kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal }, - receiver - }); - return Some(make_res(kind)); + + let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?; + TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! { + match parent { + ast::ClosureExpr(it) => { + it.body() + }, + ast::Fn(it) => { + it.body().map(ast::Expr::BlockExpr) + }, + _ => return None, + } + })) }, - ast::MethodCallExpr(method) => { - let receiver = find_opt_node_in_file(original_file, method.receiver()); - let kind = NameRefKind::DotAccess(DotAccess { - receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)), - kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) }, - receiver - }); - return Some(make_res(kind)); + ast::Param(it) => { + if it.colon_token().is_none() { + return None; + } + TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat()))) }, + ast::LetStmt(it) => { + if it.colon_token().is_none() { + return None; + } + TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat()))) + }, + ast::Impl(it) => { + match it.trait_() { + Some(t) if t.syntax() == node => TypeLocation::ImplTrait, + _ => match it.self_ty() { + Some(t) if t.syntax() == node => TypeLocation::ImplTarget, + _ => return None, + }, + } + }, + ast::TypeBound(_) => TypeLocation::TypeBound, + // is this case needed? + ast::TypeBoundList(_) => TypeLocation::TypeBound, + ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))), + // is this case needed? + ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))), + ast::TupleField(_) => TypeLocation::TupleField, _ => return None, } }; + Some(res) + }; - let path = segment.parent_path(); - let original_path = find_node_in_file_compensated(sema, original_file, &path); - - let mut path_ctx = PathCompletionCtx { - has_call_parens: false, - has_macro_bang: false, - qualified: Qualified::No, - parent: None, - path: path.clone(), - original_path, - kind: PathKind::Item { kind: ItemListKind::SourceFile }, - has_type_args: false, - use_tree_parent: false, - }; - - let is_in_block = |it: &SyntaxNode| { - it.parent() - .map(|node| { - ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind()) - }) - .unwrap_or(false) - }; - let func_update_record = |syn: &SyntaxNode| { - if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) { - find_node_in_file_compensated(sema, original_file, &record_expr) + let is_in_condition = |it: &ast::Expr| { + (|| { + let parent = it.syntax().parent()?; + if let Some(expr) = ast::WhileExpr::cast(parent.clone()) { + Some(expr.condition()? == *it) + } else if let Some(expr) = ast::IfExpr::cast(parent) { + Some(expr.condition()? == *it) } else { None } - }; - let after_if_expr = |node: SyntaxNode| { - let prev_expr = (|| { - let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?; - ast::ExprStmt::cast(prev_sibling)?.expr() - })(); - matches!(prev_expr, Some(ast::Expr::IfExpr(_))) - }; + })() + .unwrap_or(false) + }; - // We do not want to generate path completions when we are sandwiched between an item decl signature and its body. - // ex. trait Foo $0 {} - // in these cases parser recovery usually kicks in for our inserted identifier, causing it - // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block - // expression or an item list. - // The following code checks if the body is missing, if it is we either cut off the body - // from the item or it was missing in the first place - let inbetween_body_and_decl_check = |node: SyntaxNode| { - if let Some(NodeOrToken::Node(n)) = - syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev) - { - if let Some(item) = ast::Item::cast(n) { - let is_inbetween = match &item { - ast::Item::Const(it) => it.body().is_none(), - ast::Item::Enum(it) => it.variant_list().is_none(), - ast::Item::ExternBlock(it) => it.extern_item_list().is_none(), - ast::Item::Fn(it) => it.body().is_none(), - ast::Item::Impl(it) => it.assoc_item_list().is_none(), - ast::Item::Module(it) => it.item_list().is_none(), - ast::Item::Static(it) => it.body().is_none(), - ast::Item::Struct(it) => it.field_list().is_none(), - ast::Item::Trait(it) => it.assoc_item_list().is_none(), - ast::Item::TypeAlias(it) => it.ty().is_none(), - ast::Item::Union(it) => it.record_field_list().is_none(), - _ => false, - }; - if is_inbetween { - return Some(item); + let make_path_kind_expr = |expr: ast::Expr| { + let it = expr.syntax(); + let in_block_expr = is_in_block(it); + let in_loop_body = is_in_loop_body(it); + let after_if_expr = after_if_expr(it.clone()); + let ref_expr_parent = + path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast); + let (innermost_ret_ty, self_param) = { + let find_ret_ty = |it: SyntaxNode| { + if let Some(item) = ast::Item::cast(it.clone()) { + match item { + ast::Item::Fn(f) => Some(sema.to_def(&f).map(|it| it.ret_type(sema.db))), + ast::Item::MacroCall(_) => None, + _ => Some(None), } - } - } - None - }; - - let type_location = |node: &SyntaxNode| { - let parent = node.parent()?; - let res = match_ast! { - match parent { - ast::Const(it) => { - let name = find_opt_node_in_file(original_file, it.name())?; - let original = ast::Const::cast(name.syntax().parent()?)?; - TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body())) - }, - ast::RetType(it) => { - if it.thin_arrow_token().is_none() { - return None; - } - let parent = match ast::Fn::cast(parent.parent()?) { - Some(x) => x.param_list(), - None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(), - }; - - let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?; - TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! { - match parent { - ast::ClosureExpr(it) => { - it.body() - }, - ast::Fn(it) => { - it.body().map(ast::Expr::BlockExpr) - }, - _ => return None, - } - })) - }, - ast::Param(it) => { - if it.colon_token().is_none() { - return None; - } - TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat()))) - }, - ast::LetStmt(it) => { - if it.colon_token().is_none() { - return None; - } - TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat()))) - }, - ast::Impl(it) => { - match it.trait_() { - Some(t) if t.syntax() == node => TypeLocation::ImplTrait, - _ => match it.self_ty() { - Some(t) if t.syntax() == node => TypeLocation::ImplTarget, - _ => return None, - }, - } - }, - ast::TypeBound(_) => TypeLocation::TypeBound, - // is this case needed? - ast::TypeBoundList(_) => TypeLocation::TypeBound, - ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))), - // is this case needed? - ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))), - ast::TupleField(_) => TypeLocation::TupleField, - _ => return None, - } - }; - Some(res) - }; - - let is_in_condition = |it: &ast::Expr| { - (|| { - let parent = it.syntax().parent()?; - if let Some(expr) = ast::WhileExpr::cast(parent.clone()) { - Some(expr.condition()? == *it) - } else if let Some(expr) = ast::IfExpr::cast(parent) { - Some(expr.condition()? == *it) } else { - None - } - })() - .unwrap_or(false) - }; - - let make_path_kind_expr = |expr: ast::Expr| { - let it = expr.syntax(); - let in_block_expr = is_in_block(it); - let in_loop_body = is_in_loop_body(it); - let after_if_expr = after_if_expr(it.clone()); - let ref_expr_parent = - path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast); - let (innermost_ret_ty, self_param) = { - let find_ret_ty = |it: SyntaxNode| { - if let Some(item) = ast::Item::cast(it.clone()) { - match item { - ast::Item::Fn(f) => { - Some(sema.to_def(&f).map(|it| it.ret_type(sema.db))) - } - ast::Item::MacroCall(_) => None, - _ => Some(None), - } - } else { - let expr = ast::Expr::cast(it)?; - let callable = match expr { - // FIXME - // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b), - ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr), - _ => return None, - }; - Some( - callable - .and_then(|c| c.adjusted().as_callable(sema.db)) - .map(|it| it.return_type()), - ) - } - }; - let find_fn_self_param = |it| match it { - ast::Item::Fn(fn_) => { - Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db))) - } - ast::Item::MacroCall(_) => None, - _ => Some(None), - }; - - match find_node_in_file_compensated(sema, original_file, &expr) { - Some(it) => { - let innermost_ret_ty = sema - .ancestors_with_macros(it.syntax().clone()) - .find_map(find_ret_ty) - .flatten(); - - let self_param = sema - .ancestors_with_macros(it.syntax().clone()) - .filter_map(ast::Item::cast) - .find_map(find_fn_self_param) - .flatten(); - (innermost_ret_ty, self_param) - } - None => (None, None), + let expr = ast::Expr::cast(it)?; + let callable = match expr { + // FIXME + // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b), + ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr), + _ => return None, + }; + Some( + callable + .and_then(|c| c.adjusted().as_callable(sema.db)) + .map(|it| it.return_type()), + ) } }; - let is_func_update = func_update_record(it); - let in_condition = is_in_condition(&expr); - let incomplete_let = it - .parent() - .and_then(ast::LetStmt::cast) - .map_or(false, |it| it.semicolon_token().is_none()); - let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax()); - - let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) { - Some(arm) => arm - .fat_arrow_token() - .map_or(true, |arrow| it.text_range().start() < arrow.text_range().start()), - None => false, + let find_fn_self_param = |it| match it { + ast::Item::Fn(fn_) => Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db))), + ast::Item::MacroCall(_) => None, + _ => Some(None), }; - PathKind::Expr { - expr_ctx: ExprCtx { - in_block_expr, - in_loop_body, - after_if_expr, - in_condition, - ref_expr_parent, - is_func_update, - innermost_ret_ty, - self_param, - incomplete_let, - impl_, - in_match_guard, - }, + match find_node_in_file_compensated(sema, original_file, &expr) { + Some(it) => { + let innermost_ret_ty = sema + .ancestors_with_macros(it.syntax().clone()) + .find_map(find_ret_ty) + .flatten(); + + let self_param = sema + .ancestors_with_macros(it.syntax().clone()) + .filter_map(ast::Item::cast) + .find_map(find_fn_self_param) + .flatten(); + (innermost_ret_ty, self_param) + } + None => (None, None), } }; - let make_path_kind_type = |ty: ast::Type| { - let location = type_location(ty.syntax()); - PathKind::Type { location: location.unwrap_or(TypeLocation::Other) } + let is_func_update = func_update_record(it); + let in_condition = is_in_condition(&expr); + let incomplete_let = it + .parent() + .and_then(ast::LetStmt::cast) + .map_or(false, |it| it.semicolon_token().is_none()); + let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax()); + + let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) { + Some(arm) => arm + .fat_arrow_token() + .map_or(true, |arrow| it.text_range().start() < arrow.text_range().start()), + None => false, }; - let mut kind_macro_call = |it: ast::MacroCall| { - path_ctx.has_macro_bang = it.excl_token().is_some(); - let parent = it.syntax().parent()?; - // Any path in an item list will be treated as a macro call by the parser - let kind = match_ast! { - match parent { - ast::MacroExpr(expr) => make_path_kind_expr(expr.into()), - ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}, - ast::MacroType(ty) => make_path_kind_type(ty.into()), - ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module }, - ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() { - Some(it) => match_ast! { - match it { - ast::Trait(_) => ItemListKind::Trait, - ast::Impl(it) => if it.trait_().is_some() { - ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it)) - } else { - ItemListKind::Impl - }, - _ => return None - } - }, - None => return None, - } }, - ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock }, - ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile }, - _ => return None, - } - }; - Some(kind) - }; - let make_path_kind_attr = |meta: ast::Meta| { - let attr = meta.parent_attr()?; - let kind = attr.kind(); - let attached = attr.syntax().parent()?; - let is_trailing_outer_attr = kind != AttrKind::Inner - && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next) - .is_none(); - let annotated_item_kind = - if is_trailing_outer_attr { None } else { Some(attached.kind()) }; - Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } }) - }; + PathKind::Expr { + expr_ctx: ExprCtx { + in_block_expr, + in_loop_body, + after_if_expr, + in_condition, + ref_expr_parent, + is_func_update, + innermost_ret_ty, + self_param, + incomplete_let, + impl_, + in_match_guard, + }, + } + }; + let make_path_kind_type = |ty: ast::Type| { + let location = type_location(ty.syntax()); + PathKind::Type { location: location.unwrap_or(TypeLocation::Other) } + }; - // Infer the path kind - let parent = path.syntax().parent()?; + let mut kind_macro_call = |it: ast::MacroCall| { + path_ctx.has_macro_bang = it.excl_token().is_some(); + let parent = it.syntax().parent()?; + // Any path in an item list will be treated as a macro call by the parser let kind = match_ast! { match parent { - ast::PathType(it) => make_path_kind_type(it.into()), - ast::PathExpr(it) => { - if let Some(p) = it.syntax().parent() { - if ast::ExprStmt::can_cast(p.kind()) { - if let Some(kind) = inbetween_body_and_decl_check(p) { - return Some(make_res(NameRefKind::Keyword(kind))); - } + ast::MacroExpr(expr) => make_path_kind_expr(expr.into()), + ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}, + ast::MacroType(ty) => make_path_kind_type(ty.into()), + ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module }, + ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() { + Some(it) => match_ast! { + match it { + ast::Trait(_) => ItemListKind::Trait, + ast::Impl(it) => if it.trait_().is_some() { + ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it)) + } else { + ItemListKind::Impl + }, + _ => return None } - } - - path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); - - make_path_kind_expr(it.into()) - }, - ast::TupleStructPat(it) => { - path_ctx.has_call_parens = true; - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } - }, - ast::RecordPat(it) => { - path_ctx.has_call_parens = true; - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } - }, - ast::PathPat(it) => { - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} - }, - ast::MacroCall(it) => { - // A macro call in this position is usually a result of parsing recovery, so check that - if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) { - return Some(make_res(NameRefKind::Keyword(kind))); - } - - kind_macro_call(it)? - }, - ast::Meta(meta) => make_path_kind_attr(meta)?, - ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() }, - ast::UseTree(_) => PathKind::Use, - // completing inside a qualifier - ast::Path(parent) => { - path_ctx.parent = Some(parent.clone()); - let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?; - match_ast! { - match parent { - ast::PathType(it) => make_path_kind_type(it.into()), - ast::PathExpr(it) => { - path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); - - make_path_kind_expr(it.into()) - }, - ast::TupleStructPat(it) => { - path_ctx.has_call_parens = true; - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } - }, - ast::RecordPat(it) => { - path_ctx.has_call_parens = true; - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } - }, - ast::PathPat(it) => { - PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} - }, - ast::MacroCall(it) => { - kind_macro_call(it)? - }, - ast::Meta(meta) => make_path_kind_attr(meta)?, - ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() }, - ast::UseTree(_) => PathKind::Use, - ast::RecordExpr(it) => make_path_kind_expr(it.into()), - _ => return None, - } - } - }, - ast::RecordExpr(it) => make_path_kind_expr(it.into()), + }, + None => return None, + } }, + ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock }, + ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile }, _ => return None, } }; + Some(kind) + }; + let make_path_kind_attr = |meta: ast::Meta| { + let attr = meta.parent_attr()?; + let kind = attr.kind(); + let attached = attr.syntax().parent()?; + let is_trailing_outer_attr = kind != AttrKind::Inner + && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none(); + let annotated_item_kind = if is_trailing_outer_attr { None } else { Some(attached.kind()) }; + Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } }) + }; - path_ctx.kind = kind; - path_ctx.has_type_args = segment.generic_arg_list().is_some(); - - // calculate the qualifier context - if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) { - path_ctx.use_tree_parent = use_tree_parent; - if !use_tree_parent && segment.coloncolon_token().is_some() { - path_ctx.qualified = Qualified::Absolute; - } else { - let qualifier = qualifier - .segment() - .and_then(|it| find_node_in_file(original_file, &it)) - .map(|it| it.parent_path()); - if let Some(qualifier) = qualifier { - let type_anchor = match qualifier.segment().and_then(|it| it.kind()) { - Some(ast::PathSegmentKind::Type { - type_ref: Some(type_ref), - trait_ref, - }) if qualifier.qualifier().is_none() => Some((type_ref, trait_ref)), - _ => None, - }; - - path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor { - let ty = match ty { - ast::Type::InferType(_) => None, - ty => sema.resolve_type(&ty), - }; - let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?)); - Qualified::TypeAnchor { ty, trait_ } - } else { - let res = sema.resolve_path(&qualifier); - - // For understanding how and why super_chain_len is calculated the way it - // is check the documentation at it's definition - let mut segment_count = 0; - let super_count = - iter::successors(Some(qualifier.clone()), |p| p.qualifier()) - .take_while(|p| { - p.segment() - .and_then(|s| { - segment_count += 1; - s.super_token() - }) - .is_some() - }) - .count(); - - let super_chain_len = - if segment_count > super_count { None } else { Some(super_count) }; - - Qualified::With { path: qualifier, resolution: res, super_chain_len } - } - }; - } - } else if let Some(segment) = path.segment() { - if segment.coloncolon_token().is_some() { - path_ctx.qualified = Qualified::Absolute; - } - } - - let mut qualifier_ctx = QualifierCtx::default(); - if path_ctx.is_trivial_path() { - // fetch the full expression that may have qualifiers attached to it - let top_node = match path_ctx.kind { - PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => { - parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| { - let parent = p.parent()?; - if ast::StmtList::can_cast(parent.kind()) { - Some(p) - } else if ast::ExprStmt::can_cast(parent.kind()) { - Some(parent) - } else { - None + // Infer the path kind + let parent = path.syntax().parent()?; + let kind = match_ast! { + match parent { + ast::PathType(it) => make_path_kind_type(it.into()), + ast::PathExpr(it) => { + if let Some(p) = it.syntax().parent() { + if ast::ExprStmt::can_cast(p.kind()) { + if let Some(kind) = inbetween_body_and_decl_check(p) { + return Some(make_res(NameRefKind::Keyword(kind))); } - }) - } - PathKind::Item { .. } => { - parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind())) - } - _ => None, - }; - if let Some(top) = top_node { - if let Some(NodeOrToken::Node(error_node)) = - syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev) - { - if error_node.kind() == SyntaxKind::ERROR { - qualifier_ctx.unsafe_tok = error_node - .children_with_tokens() - .filter_map(NodeOrToken::into_token) - .find(|it| it.kind() == T![unsafe]); - qualifier_ctx.vis_node = - error_node.children().find_map(ast::Visibility::cast); } } - if let PathKind::Item { .. } = path_ctx.kind { - if qualifier_ctx.none() { - if let Some(t) = top.first_token() { - if let Some(prev) = t - .prev_token() - .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev)) - { - if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) { - // This was inferred to be an item position path, but it seems - // to be part of some other broken node which leaked into an item - // list - return None; - } + path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); + + make_path_kind_expr(it.into()) + }, + ast::TupleStructPat(it) => { + path_ctx.has_call_parens = true; + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } + }, + ast::RecordPat(it) => { + path_ctx.has_call_parens = true; + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } + }, + ast::PathPat(it) => { + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} + }, + ast::MacroCall(it) => { + // A macro call in this position is usually a result of parsing recovery, so check that + if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) { + return Some(make_res(NameRefKind::Keyword(kind))); + } + + kind_macro_call(it)? + }, + ast::Meta(meta) => make_path_kind_attr(meta)?, + ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() }, + ast::UseTree(_) => PathKind::Use, + // completing inside a qualifier + ast::Path(parent) => { + path_ctx.parent = Some(parent.clone()); + let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?; + match_ast! { + match parent { + ast::PathType(it) => make_path_kind_type(it.into()), + ast::PathExpr(it) => { + path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind())); + + make_path_kind_expr(it.into()) + }, + ast::TupleStructPat(it) => { + path_ctx.has_call_parens = true; + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } + }, + ast::RecordPat(it) => { + path_ctx.has_call_parens = true; + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) } + }, + ast::PathPat(it) => { + PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())} + }, + ast::MacroCall(it) => { + kind_macro_call(it)? + }, + ast::Meta(meta) => make_path_kind_attr(meta)?, + ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() }, + ast::UseTree(_) => PathKind::Use, + ast::RecordExpr(it) => make_path_kind_expr(it.into()), + _ => return None, + } + } + }, + ast::RecordExpr(it) => make_path_kind_expr(it.into()), + _ => return None, + } + }; + + path_ctx.kind = kind; + path_ctx.has_type_args = segment.generic_arg_list().is_some(); + + // calculate the qualifier context + if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) { + path_ctx.use_tree_parent = use_tree_parent; + if !use_tree_parent && segment.coloncolon_token().is_some() { + path_ctx.qualified = Qualified::Absolute; + } else { + let qualifier = qualifier + .segment() + .and_then(|it| find_node_in_file(original_file, &it)) + .map(|it| it.parent_path()); + if let Some(qualifier) = qualifier { + let type_anchor = match qualifier.segment().and_then(|it| it.kind()) { + Some(ast::PathSegmentKind::Type { type_ref: Some(type_ref), trait_ref }) + if qualifier.qualifier().is_none() => + { + Some((type_ref, trait_ref)) + } + _ => None, + }; + + path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor { + let ty = match ty { + ast::Type::InferType(_) => None, + ty => sema.resolve_type(&ty), + }; + let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?)); + Qualified::TypeAnchor { ty, trait_ } + } else { + let res = sema.resolve_path(&qualifier); + + // For understanding how and why super_chain_len is calculated the way it + // is check the documentation at it's definition + let mut segment_count = 0; + let super_count = iter::successors(Some(qualifier.clone()), |p| p.qualifier()) + .take_while(|p| { + p.segment() + .and_then(|s| { + segment_count += 1; + s.super_token() + }) + .is_some() + }) + .count(); + + let super_chain_len = + if segment_count > super_count { None } else { Some(super_count) }; + + Qualified::With { path: qualifier, resolution: res, super_chain_len } + } + }; + } + } else if let Some(segment) = path.segment() { + if segment.coloncolon_token().is_some() { + path_ctx.qualified = Qualified::Absolute; + } + } + + let mut qualifier_ctx = QualifierCtx::default(); + if path_ctx.is_trivial_path() { + // fetch the full expression that may have qualifiers attached to it + let top_node = match path_ctx.kind { + PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => { + parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| { + let parent = p.parent()?; + if ast::StmtList::can_cast(parent.kind()) { + Some(p) + } else if ast::ExprStmt::can_cast(parent.kind()) { + Some(parent) + } else { + None + } + }) + } + PathKind::Item { .. } => { + parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind())) + } + _ => None, + }; + if let Some(top) = top_node { + if let Some(NodeOrToken::Node(error_node)) = + syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev) + { + if error_node.kind() == SyntaxKind::ERROR { + qualifier_ctx.unsafe_tok = error_node + .children_with_tokens() + .filter_map(NodeOrToken::into_token) + .find(|it| it.kind() == T![unsafe]); + qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast); + } + } + + if let PathKind::Item { .. } = path_ctx.kind { + if qualifier_ctx.none() { + if let Some(t) = top.first_token() { + if let Some(prev) = t + .prev_token() + .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev)) + { + if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) { + // This was inferred to be an item position path, but it seems + // to be part of some other broken node which leaked into an item + // list + return None; } } } } } } - Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx)) } + Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx)) } fn pattern_context_for( From 76be44eed5e0fe1a80d642a31a4f0537e151f7fe Mon Sep 17 00:00:00 2001 From: dvdsk Date: Mon, 10 Oct 2022 19:00:47 +0200 Subject: [PATCH 29/66] Update manual now stable can be installed with rustup `rustup` can now install `rust-analyzer` for the stable tool-chain. This commit removes the note that `rustup` can only install for the nightly branch and adjusts the command. I also added a note on how to find the path to the `rust-analyzer` binary when installed using `rustup`, and suggestions on how to work around it not being placed in `~/.cargo/bin`. I thought it would be ideal to point everyone to use `rustup run stable rust-analyzer` to start `rust-analyzer`. That would make it trivial to switch to nightly however I could not get this to work in `nvim` therefore I left it as a suggestion at the end. --- docs/user/manual.adoc | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/docs/user/manual.adoc b/docs/user/manual.adoc index 9bd3b6a692b..c30838e5f5e 100644 --- a/docs/user/manual.adoc +++ b/docs/user/manual.adoc @@ -174,14 +174,25 @@ On Unix, running the editor from a shell or changing the `.desktop` file to set ==== `rustup` -`rust-analyzer` is available in `rustup`, but only in the nightly toolchain: +`rust-analyzer` is available in `rustup`: [source,bash] ---- -$ rustup +nightly component add rust-analyzer-preview +$ rustup component add rust-analyzer ---- -However, in contrast to `component add clippy` or `component add rustfmt`, this does not actually place a `rust-analyzer` binary in `~/.cargo/bin`, see https://github.com/rust-lang/rustup/issues/2411[this issue]. +However, in contrast to `component add clippy` or `component add rustfmt`, this does not actually place a `rust-analyzer` binary in `~/.cargo/bin`, see https://github.com/rust-lang/rustup/issues/2411[this issue]. You can find the path to the binary using: +[source,bash] +---- +$ rustup which --toolchain stable rust-analyzer +---- +You can link to there from `~/.cargo/bin` or configure your editor to use the full path. + +Alternatively you might be able to configure your editor to start `rust-analyzer` using the command: +[source,bash] +---- +$ rustup run stable rust-analyzer +---- ==== Arch Linux From c069f1b7d22b6e1ce9135052b4ba50be29bcea6c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 10 Oct 2022 22:47:52 +0200 Subject: [PATCH 30/66] Prefer similar tokens when expanding macros speculatively --- crates/hir-expand/src/db.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index bc97ee15c7d..87e4db03984 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -221,8 +221,16 @@ pub fn expand_speculative( fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info); let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to); - let range = rev_tmap.first_range_by_token(token_id, token_to_map.kind())?; - let token = node.syntax_node().covering_element(range).into_token()?; + let syntax_node = node.syntax_node(); + let token = rev_tmap + .ranges_by_token(token_id, token_to_map.kind()) + .filter_map(|range| syntax_node.covering_element(range).into_token()) + .min_by_key(|t| { + // prefer tokens of the same kind and text + // Note the inversion of the score here, as we want to prefer the first token in case + // of all tokens having the same score + (t.kind() != token_to_map.kind()) as u8 + (t.text() != token_to_map.text()) as u8 + })?; Some((node.syntax_node(), token)) } From 11a335d5e84844e0425a43c076baa6db2bbc790b Mon Sep 17 00:00:00 2001 From: zyctree Date: Tue, 11 Oct 2022 14:05:04 +0800 Subject: [PATCH 31/66] fix link in syntax.md --- docs/dev/syntax.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/dev/syntax.md b/docs/dev/syntax.md index 30e13701383..64c872b2517 100644 --- a/docs/dev/syntax.md +++ b/docs/dev/syntax.md @@ -11,7 +11,7 @@ The things described are implemented in three places * [rowan](https://github.com/rust-analyzer/rowan/tree/v0.9.0) -- a generic library for rowan syntax trees. * [ra_syntax](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_syntax) crate inside rust-analyzer which wraps `rowan` into rust-analyzer specific API. Nothing in rust-analyzer except this crate knows about `rowan`. -* [parser](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/parser) crate parses input tokens into an `ra_syntax` tree +* [parser](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_parser) crate parses input tokens into an `ra_syntax` tree ## Design Goals From 5e43ea96aad65fbaae5806ea6732ab59977ede1c Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Mon, 10 Oct 2022 19:32:24 +0900 Subject: [PATCH 32/66] fix: reorder dyn bounds on render --- crates/hir-ty/src/display.rs | 12 +++++++++- .../hir-ty/src/tests/display_source_code.rs | 22 +++++++++++++++++++ 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index 7f0baf49dad..a6602747ef7 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -751,9 +751,19 @@ impl HirDisplay for Ty { } TyKind::BoundVar(idx) => idx.hir_fmt(f)?, TyKind::Dyn(dyn_ty) => { + // Reorder bounds to satisfy `write_bounds_like_dyn_trait()`'s expectation. + // FIXME: `Iterator::partition_in_place()` or `Vec::drain_filter()` may make it + // more efficient when either of them hits stable. + let mut bounds: SmallVec<[_; 4]> = + dyn_ty.bounds.skip_binders().iter(Interner).cloned().collect(); + let (auto_traits, others): (SmallVec<[_; 4]>, _) = + bounds.drain(1..).partition(|b| b.skip_binders().trait_id().is_some()); + bounds.extend(others); + bounds.extend(auto_traits); + write_bounds_like_dyn_trait_with_prefix( "dyn", - dyn_ty.bounds.skip_binders().interned(), + &bounds, SizedByDefault::NotSized, f, )?; diff --git a/crates/hir-ty/src/tests/display_source_code.rs b/crates/hir-ty/src/tests/display_source_code.rs index 240942e488d..8a8ff08cfe8 100644 --- a/crates/hir-ty/src/tests/display_source_code.rs +++ b/crates/hir-ty/src/tests/display_source_code.rs @@ -55,6 +55,28 @@ fn main() { ); } +#[test] +fn render_dyn_ty_independent_of_order() { + check_types_source_code( + r#" +auto trait Send {} +trait A { + type Assoc; +} +trait B: A {} + +fn test( + _: &(dyn A + Send), + //^ &(dyn A + Send) + _: &(dyn Send + A), + //^ &(dyn A + Send) + _: &dyn B, + //^ &(dyn B) +) {} + "#, + ); +} + #[test] fn render_dyn_for_ty() { // FIXME From 34977996c3b2d045e25e17a6ce6c8a7565a16b01 Mon Sep 17 00:00:00 2001 From: zyctree Date: Tue, 11 Oct 2022 19:37:49 +0800 Subject: [PATCH 33/66] Update docs/dev/syntax.md MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Laurențiu Nicola --- docs/dev/syntax.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/dev/syntax.md b/docs/dev/syntax.md index 64c872b2517..767233351b4 100644 --- a/docs/dev/syntax.md +++ b/docs/dev/syntax.md @@ -11,7 +11,7 @@ The things described are implemented in three places * [rowan](https://github.com/rust-analyzer/rowan/tree/v0.9.0) -- a generic library for rowan syntax trees. * [ra_syntax](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_syntax) crate inside rust-analyzer which wraps `rowan` into rust-analyzer specific API. Nothing in rust-analyzer except this crate knows about `rowan`. -* [parser](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_parser) crate parses input tokens into an `ra_syntax` tree +* [ra_parser](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_parser) crate parses input tokens into an `ra_syntax` tree ## Design Goals From 3c39668ca464919ed4180049945b3e2b9bf6191a Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Tue, 11 Oct 2022 19:23:41 +0200 Subject: [PATCH 34/66] Remove extra parameter, access Db through semantics --- crates/ide-completion/src/completions/env_vars.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs index 5483fdf31a2..a3a97130394 100644 --- a/crates/ide-completion/src/completions/env_vars.rs +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -18,7 +18,8 @@ const CARGO_DEFINED_VARS: &[(&str, &str)] = &[ ("CARGO_PKG_NAME","The name of your package"), ("CARGO_PKG_DESCRIPTION","The description from the manifest of your package"), ("CARGO_PKG_HOMEPAGE","The home page from the manifest of your package"), -("CARGO_PKG_REPOSITORY","The repository from the manifest of your package"), + ("CARGO_PKG_REPOSITORY", +"The repository from the manifest of your package"), ("CARGO_PKG_LICENSE","The license from the manifest of your package"), ("CARGO_PKG_LICENSE_FILE","The license file from the manifest of your package"), ("CARGO_PKG_RUST_VERSION","The Rust version from the manifest of your package. Note that this is the minimum Rust version supported by the package, not the current Rust version"), @@ -33,7 +34,7 @@ pub(crate) fn complete_cargo_env_vars( ctx: &CompletionContext<'_>, expanded: &ast::String, ) -> Option<()> { - guard_env_macro(expanded, &ctx.sema, &ctx.db)?; + guard_env_macro(expanded, &ctx.sema)?; let range = expanded.text_range_between_quotes()?; CARGO_DEFINED_VARS.iter().for_each(|(var, detail)| { @@ -48,11 +49,11 @@ pub(crate) fn complete_cargo_env_vars( fn guard_env_macro( string: &ast::String, semantics: &Semantics<'_, RootDatabase>, - db: &RootDatabase, ) -> Option<()> { let call = get_outer_macro(string)?; let name = call.path()?.segment()?.name_ref()?; let makro = semantics.resolve_macro_call(&call)?; + let db = semantics.db; match name.text().as_str() { "env" | "option_env" if makro.kind(db) == hir::MacroKind::BuiltIn => Some(()), From a807cc3afbd5bd081f4f5ff0bad1b5c403d7a690 Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Tue, 11 Oct 2022 19:28:42 +0200 Subject: [PATCH 35/66] Rename `get_outer_macro` to `macro_call_for_string_token` --- crates/ide-completion/src/completions/env_vars.rs | 7 +++---- crates/ide-db/src/syntax_helpers/format_string.rs | 5 ++--- crates/ide-db/src/syntax_helpers/node_ext.rs | 2 +- 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs index a3a97130394..14dc17321f9 100644 --- a/crates/ide-completion/src/completions/env_vars.rs +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -1,11 +1,10 @@ //! Completes environment variables defined by Cargo (https://doc.rust-lang.org/cargo/reference/environment-variables.html) use hir::Semantics; -use ide_db::{syntax_helpers::node_ext::get_outer_macro, RootDatabase}; +use ide_db::{syntax_helpers::node_ext::macro_call_for_string_token, RootDatabase}; use syntax::ast::{self, IsString}; -use crate::{context::CompletionContext, CompletionItem, CompletionItemKind}; +use crate::{context::CompletionContext, CompletionItem, CompletionItemKind, completions::Completions}; -use super::Completions; const CARGO_DEFINED_VARS: &[(&str, &str)] = &[ ("CARGO","Path to the cargo binary performing the build"), ("CARGO_MANIFEST_DIR","The directory containing the manifest of your package"), @@ -50,7 +49,7 @@ fn guard_env_macro( string: &ast::String, semantics: &Semantics<'_, RootDatabase>, ) -> Option<()> { - let call = get_outer_macro(string)?; + let call = macro_call_for_string_token(string)?; let name = call.path()?.segment()?.name_ref()?; let makro = semantics.resolve_macro_call(&call)?; let db = semantics.db; diff --git a/crates/ide-db/src/syntax_helpers/format_string.rs b/crates/ide-db/src/syntax_helpers/format_string.rs index b3e227ddd27..caa579e322b 100644 --- a/crates/ide-db/src/syntax_helpers/format_string.rs +++ b/crates/ide-db/src/syntax_helpers/format_string.rs @@ -3,8 +3,7 @@ use syntax::{ ast::{self, IsString}, TextRange, TextSize, }; - -use super::node_ext::get_outer_macro; +use crate::syntax_helpers::node_ext::macro_call_for_string_token; pub fn is_format_string(string: &ast::String) -> bool { // Check if `string` is a format string argument of a macro invocation. @@ -16,7 +15,7 @@ pub fn is_format_string(string: &ast::String) -> bool { // This setup lets us correctly highlight the components of `concat!("{}", "bla")` format // strings. It still fails for `concat!("{", "}")`, but that is rare. (|| { - let name = get_outer_macro(string)?.path()?.segment()?.name_ref()?; + let name = macro_call_for_string_token(string)?.path()?.segment()?.name_ref()?; if !matches!( name.text().as_str(), diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs index 9cfcdfb77b9..39710b8f13e 100644 --- a/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -458,7 +458,7 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option Option { +pub fn macro_call_for_string_token(string: &ast::String) -> Option { let macro_call = string.syntax().parent_ancestors().find_map(ast::MacroCall::cast)?; Some(macro_call) } From b5e87ac11177165da100b21fbc2f83736add6ac8 Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Tue, 11 Oct 2022 19:29:09 +0200 Subject: [PATCH 36/66] Fix formatting for cargo vars list --- .../src/completions/env_vars.rs | 39 +++++++++---------- 1 file changed, 19 insertions(+), 20 deletions(-) diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs index 14dc17321f9..d7cccf0bedc 100644 --- a/crates/ide-completion/src/completions/env_vars.rs +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -6,26 +6,25 @@ use syntax::ast::{self, IsString}; use crate::{context::CompletionContext, CompletionItem, CompletionItemKind, completions::Completions}; const CARGO_DEFINED_VARS: &[(&str, &str)] = &[ -("CARGO","Path to the cargo binary performing the build"), -("CARGO_MANIFEST_DIR","The directory containing the manifest of your package"), -("CARGO_PKG_VERSION","The full version of your package"), -("CARGO_PKG_VERSION_MAJOR","The major version of your package"), -("CARGO_PKG_VERSION_MINOR","The minor version of your package"), -("CARGO_PKG_VERSION_PATCH","The patch version of your package"), -("CARGO_PKG_VERSION_PRE","The pre-release version of your package"), -("CARGO_PKG_AUTHORS","Colon separated list of authors from the manifest of your package"), -("CARGO_PKG_NAME","The name of your package"), -("CARGO_PKG_DESCRIPTION","The description from the manifest of your package"), -("CARGO_PKG_HOMEPAGE","The home page from the manifest of your package"), - ("CARGO_PKG_REPOSITORY", -"The repository from the manifest of your package"), -("CARGO_PKG_LICENSE","The license from the manifest of your package"), -("CARGO_PKG_LICENSE_FILE","The license file from the manifest of your package"), -("CARGO_PKG_RUST_VERSION","The Rust version from the manifest of your package. Note that this is the minimum Rust version supported by the package, not the current Rust version"), -("CARGO_CRATE_NAME","The name of the crate that is currently being compiled"), -("CARGO_BIN_NAME","The name of the binary that is currently being compiled (if it is a binary). This name does not include any file extension, such as .exe"), -("CARGO_PRIMARY_PACKAGE","This environment variable will be set if the package being built is primary. Primary packages are the ones the user selected on the command-line, either with -p flags or the defaults based on the current directory and the default workspace members. This environment variable will not be set when building dependencies. This is only set when compiling the package (not when running binaries or tests)"), -("CARGO_TARGET_TMPDIR","Only set when building integration test or benchmark code. This is a path to a directory inside the target directory where integration tests or benchmarks are free to put any data needed by the tests/benches. Cargo initially creates this directory but doesn't manage its content in any way, this is the responsibility of the test code") + ("CARGO","Path to the cargo binary performing the build"), + ("CARGO_MANIFEST_DIR","The directory containing the manifest of your package"), + ("CARGO_PKG_VERSION","The full version of your package"), + ("CARGO_PKG_VERSION_MAJOR","The major version of your package"), + ("CARGO_PKG_VERSION_MINOR","The minor version of your package"), + ("CARGO_PKG_VERSION_PATCH","The patch version of your package"), + ("CARGO_PKG_VERSION_PRE","The pre-release version of your package"), + ("CARGO_PKG_AUTHORS","Colon separated list of authors from the manifest of your package"), + ("CARGO_PKG_NAME","The name of your package"), + ("CARGO_PKG_DESCRIPTION","The description from the manifest of your package"), + ("CARGO_PKG_HOMEPAGE","The home page from the manifest of your package"), + ("CARGO_PKG_REPOSITORY","The repository from the manifest of your package"), + ("CARGO_PKG_LICENSE","The license from the manifest of your package"), + ("CARGO_PKG_LICENSE_FILE","The license file from the manifest of your package"), + ("CARGO_PKG_RUST_VERSION","The Rust version from the manifest of your package. Note that this is the minimum Rust version supported by the package, not the current Rust version"), + ("CARGO_CRATE_NAME","The name of the crate that is currently being compiled"), + ("CARGO_BIN_NAME","The name of the binary that is currently being compiled (if it is a binary). This name does not include any file extension, such as .exe"), + ("CARGO_PRIMARY_PACKAGE","This environment variable will be set if the package being built is primary. Primary packages are the ones the user selected on the command-line, either with -p flags or the defaults based on the current directory and the default workspace members. This environment variable will not be set when building dependencies. This is only set when compiling the package (not when running binaries or tests)"), + ("CARGO_TARGET_TMPDIR","Only set when building integration test or benchmark code. This is a path to a directory inside the target directory where integration tests or benchmarks are free to put any data needed by the tests/benches. Cargo initially creates this directory but doesn't manage its content in any way, this is the responsibility of the test code") ]; pub(crate) fn complete_cargo_env_vars( From 2b2b9f8c7332314b151fd8287a8b02c34a20a5a7 Mon Sep 17 00:00:00 2001 From: btwotwo <10519967+btwotwo@users.noreply.github.com> Date: Tue, 11 Oct 2022 19:53:22 +0200 Subject: [PATCH 37/66] Formatting --- crates/ide-completion/src/completions/env_vars.rs | 9 ++++----- crates/ide-db/src/syntax_helpers/format_string.rs | 2 +- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/crates/ide-completion/src/completions/env_vars.rs b/crates/ide-completion/src/completions/env_vars.rs index d7cccf0bedc..09e95e53de6 100644 --- a/crates/ide-completion/src/completions/env_vars.rs +++ b/crates/ide-completion/src/completions/env_vars.rs @@ -3,7 +3,9 @@ use hir::Semantics; use ide_db::{syntax_helpers::node_ext::macro_call_for_string_token, RootDatabase}; use syntax::ast::{self, IsString}; -use crate::{context::CompletionContext, CompletionItem, CompletionItemKind, completions::Completions}; +use crate::{ + completions::Completions, context::CompletionContext, CompletionItem, CompletionItemKind, +}; const CARGO_DEFINED_VARS: &[(&str, &str)] = &[ ("CARGO","Path to the cargo binary performing the build"), @@ -44,10 +46,7 @@ pub(crate) fn complete_cargo_env_vars( Some(()) } -fn guard_env_macro( - string: &ast::String, - semantics: &Semantics<'_, RootDatabase>, -) -> Option<()> { +fn guard_env_macro(string: &ast::String, semantics: &Semantics<'_, RootDatabase>) -> Option<()> { let call = macro_call_for_string_token(string)?; let name = call.path()?.segment()?.name_ref()?; let makro = semantics.resolve_macro_call(&call)?; diff --git a/crates/ide-db/src/syntax_helpers/format_string.rs b/crates/ide-db/src/syntax_helpers/format_string.rs index caa579e322b..2d6927cee99 100644 --- a/crates/ide-db/src/syntax_helpers/format_string.rs +++ b/crates/ide-db/src/syntax_helpers/format_string.rs @@ -1,9 +1,9 @@ //! Tools to work with format string literals for the `format_args!` family of macros. +use crate::syntax_helpers::node_ext::macro_call_for_string_token; use syntax::{ ast::{self, IsString}, TextRange, TextSize, }; -use crate::syntax_helpers::node_ext::macro_call_for_string_token; pub fn is_format_string(string: &ast::String) -> bool { // Check if `string` is a format string argument of a macro invocation. From 73e91dda5032932a8241dcdc938788ed391fe549 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Wed, 12 Oct 2022 17:01:27 +0300 Subject: [PATCH 38/66] Set opt-level = 1 on dev profile --- .github/workflows/ci.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1563ee0b143..1c061bb7486 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -45,6 +45,10 @@ jobs: - name: Cache Dependencies uses: Swatinem/rust-cache@ce325b60658c1b38465c06cc965b79baf32c1e72 + - name: Bump opt-level + if: matrix.os == 'ubuntu-latest' + run: sed -i '/\[profile.dev]/a opt-level=1' Cargo.toml + - name: Compile run: cargo test --no-run --locked From 82a8774a08d946cf9d821f4963fbcef9ac194644 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Wed, 12 Oct 2022 17:01:54 +0300 Subject: [PATCH 39/66] Avoid format! in favor of to_string --- crates/syntax/src/ast/make.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index c9a21e12c08..4057a75e7c1 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -89,7 +89,7 @@ pub mod ext { } pub fn ty_name(name: ast::Name) -> ast::Type { - ty_path(ident_path(&format!("{name}"))) + ty_path(ident_path(&name.to_string())) } pub fn ty_bool() -> ast::Type { ty_path(ident_path("bool")) From 983ae1b1c9b54bb82c5c7e90e98725d283d0889b Mon Sep 17 00:00:00 2001 From: HKalbasi <45197576+HKalbasi@users.noreply.github.com> Date: Wed, 12 Oct 2022 20:40:49 +0330 Subject: [PATCH 40/66] Cast runnableEnv items to string --- editors/code/src/config.ts | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index a9c0f079b3d..d66aa52fadc 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -133,7 +133,21 @@ export class Config { } get runnableEnv() { - return this.get("runnableEnv"); + const item = this.get("runnableEnv"); + if (!item) return item; + const fixRecord = (r: Record) => { + for (const key in r) { + if (typeof r[key] !== 'string') { + r[key] = String(r[key]); + } + } + }; + if (item instanceof Array) { + item.forEach((x) => fixRecord(x.env)); + } else { + fixRecord(item); + } + return item; } get restartServerOnConfigChange() { From 58e5452a9bd6adef56e7099b21862985c4c0151c Mon Sep 17 00:00:00 2001 From: HKalbasi <45197576+HKalbasi@users.noreply.github.com> Date: Wed, 12 Oct 2022 14:14:59 -0700 Subject: [PATCH 41/66] fix formatting --- editors/code/src/config.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index d66aa52fadc..15846a5e864 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts @@ -137,7 +137,7 @@ export class Config { if (!item) return item; const fixRecord = (r: Record) => { for (const key in r) { - if (typeof r[key] !== 'string') { + if (typeof r[key] !== "string") { r[key] = String(r[key]); } } From 4adf09bc8a9592af0d9778f2b8618760966e3ba4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Thu, 13 Oct 2022 12:49:16 +0300 Subject: [PATCH 42/66] Run analysis-stats on CI --- .github/workflows/ci.yaml | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1c061bb7486..7eda61848b7 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -49,12 +49,25 @@ jobs: if: matrix.os == 'ubuntu-latest' run: sed -i '/\[profile.dev]/a opt-level=1' Cargo.toml - - name: Compile + - name: Compile (tests) run: cargo test --no-run --locked + # It's faster to `test` before `build` ¯\_(ツ)_/¯ + - name: Compile (rust-analyzer) + if: matrix.os == 'ubuntu-latest' + run: cargo build --quiet + - name: Test run: cargo test -- --nocapture --quiet + - name: Run analysis-stats on rust-analyzer + if: matrix.os == 'ubuntu-latest' + run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats . + + - name: Run analysis-stats on rust std library + if: matrix.os == 'ubuntu-latest' + run: target/${{ matrix.target }}/debug/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std + # Weird targets to catch non-portable code rust-cross: if: github.repository == 'rust-lang/rust-analyzer' From 55fdcbe339a002099c17ba545fb593163fef2011 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Fri, 14 Oct 2022 09:28:44 +0200 Subject: [PATCH 43/66] Revert "fix: in VSCode, correctly resolve relative paths to errors" --- editors/code/package.json | 9 --------- editors/code/src/commands.ts | 4 ---- editors/code/src/ctx.ts | 4 ---- editors/code/src/main.ts | 1 - editors/code/src/run.ts | 2 -- editors/code/src/tasks.ts | 2 +- 6 files changed, 1 insertion(+), 21 deletions(-) diff --git a/editors/code/package.json b/editors/code/package.json index ccf62d002bb..f1dd3aa79ff 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -1301,15 +1301,6 @@ "endsPattern": "^\\[Finished running\\b" }, "pattern": "$rustc" - }, - { - "name": "rustc-run", - "base": "$rustc", - "fileLocation": [ - "autoDetect", - "${command:rust-analyzer.cargoWorkspaceRootForCurrentRun}" - ], - "pattern": "$rustc-run" } ], "colors": [ diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index 6b10073aa86..b9ad525e361 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts @@ -842,7 +842,6 @@ export function run(ctx: Ctx): Cmd { item.detail = "rerun"; prevRunnable = item; const task = await createTask(item.runnable, ctx.config); - ctx.cargoWorkspaceRootForCurrentRun = item.cargoWorkspaceRoot; return await vscode.tasks.executeTask(task); }; } @@ -947,6 +946,3 @@ export function linkToCommand(ctx: Ctx): Cmd { } }; } -export function getCargoWorkspaceDir(ctx: Ctx): Cmd { - return async () => ctx.cargoWorkspaceRootForCurrentRun; -} diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts index b6c0eedfb16..26510011d43 100644 --- a/editors/code/src/ctx.ts +++ b/editors/code/src/ctx.ts @@ -17,10 +17,6 @@ export type Workspace = }; export class Ctx { - // Helps VS Code to correctly link problems from runnables. This is used by - // `rust-analyzer.cargoWorkspaceRootForCurrentRun` command of $rustc-run problem matcher. - cargoWorkspaceRootForCurrentRun?: string = undefined; - private constructor( readonly config: Config, private readonly extCtx: vscode.ExtensionContext, diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts index 6e6c2513dd9..41bde4195e0 100644 --- a/editors/code/src/main.ts +++ b/editors/code/src/main.ts @@ -189,7 +189,6 @@ async function initCommonContext(context: vscode.ExtensionContext, ctx: Ctx) { ctx.registerCommand("resolveCodeAction", commands.resolveCodeAction); ctx.registerCommand("applyActionGroup", commands.applyActionGroup); ctx.registerCommand("gotoLocation", commands.gotoLocation); - ctx.registerCommand("cargoWorkspaceRootForCurrentRun", commands.getCargoWorkspaceDir); ctx.registerCommand("linkToCommand", commands.linkToCommand); } diff --git a/editors/code/src/run.ts b/editors/code/src/run.ts index 100c0fe2d8c..22e5eda6827 100644 --- a/editors/code/src/run.ts +++ b/editors/code/src/run.ts @@ -89,14 +89,12 @@ export async function selectRunnable( export class RunnableQuickPick implements vscode.QuickPickItem { public label: string; - public cargoWorkspaceRoot?: string; public description?: string | undefined; public detail?: string | undefined; public picked?: boolean | undefined; constructor(public runnable: ra.Runnable) { this.label = runnable.label; - this.cargoWorkspaceRoot = runnable.args.workspaceRoot; } } diff --git a/editors/code/src/tasks.ts b/editors/code/src/tasks.ts index 44697f95bab..e6239deeb21 100644 --- a/editors/code/src/tasks.ts +++ b/editors/code/src/tasks.ts @@ -128,7 +128,7 @@ export async function buildCargoTask( name, TASK_SOURCE, exec, - ["$rustc-run"] + ["$rustc"] ); } From d2d1c1e3ca8a3e65b1d03ca72ed8a9d93fab4564 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Fri, 14 Oct 2022 17:31:22 +0300 Subject: [PATCH 44/66] Bump Swatinem/rust-cache to move off Node 12 --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1563ee0b143..b2c8fdcf95b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -43,7 +43,7 @@ jobs: rustup component add rustfmt rust-src - name: Cache Dependencies - uses: Swatinem/rust-cache@ce325b60658c1b38465c06cc965b79baf32c1e72 + uses: Swatinem/rust-cache@76686c56f2b581d1bb5bda44b51f7e24bd9b8b8e - name: Compile run: cargo test --no-run --locked @@ -73,7 +73,7 @@ jobs: rustup target add ${{ env.targets }} ${{ env.targets_ide }} - name: Cache Dependencies - uses: Swatinem/rust-cache@ce325b60658c1b38465c06cc965b79baf32c1e72 + uses: Swatinem/rust-cache@76686c56f2b581d1bb5bda44b51f7e24bd9b8b8e - name: Check run: | From f94d8d89407267c5080b8d4281a5278365194c26 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Fri, 14 Oct 2022 19:23:32 +0300 Subject: [PATCH 45/66] Set a timeout when publishing to OVSX --- .github/workflows/release.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index f4d472e3d5c..f2bbf1f6ff1 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -259,6 +259,7 @@ jobs: working-directory: ./editors/code # token from https://dev.azure.com/rust-analyzer/ run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix || true + timeout-minutes: 2 - name: Publish Extension (Code Marketplace, nightly) if: github.ref != 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer') @@ -269,3 +270,4 @@ jobs: if: github.ref != 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer') working-directory: ./editors/code run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix || true + timeout-minutes: 2 From cbce0cda080b85626301fac89e6b5b3c4df91d4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 3 Oct 2022 09:02:42 +0300 Subject: [PATCH 46/66] Bump anyhow, arbitrary, itertools, semver, serde --- Cargo.lock | 32 +++++++++++++++---------------- crates/cfg/Cargo.toml | 4 ++-- crates/flycheck/Cargo.toml | 2 +- crates/hir-def/Cargo.toml | 2 +- crates/hir-expand/Cargo.toml | 2 +- crates/hir-ty/Cargo.toml | 2 +- crates/hir/Cargo.toml | 2 +- crates/ide-assists/Cargo.toml | 2 +- crates/ide-completion/Cargo.toml | 2 +- crates/ide-db/Cargo.toml | 2 +- crates/ide-diagnostics/Cargo.toml | 6 ++---- crates/ide-ssr/Cargo.toml | 3 +-- crates/ide/Cargo.toml | 2 +- crates/project-model/Cargo.toml | 6 +++--- crates/rust-analyzer/Cargo.toml | 4 ++-- crates/syntax/Cargo.toml | 2 +- crates/text-edit/Cargo.toml | 2 +- lib/lsp-server/Cargo.toml | 2 +- xtask/Cargo.toml | 2 +- 19 files changed, 39 insertions(+), 42 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cf1f166703f..e58c5b0078c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -37,9 +37,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.62" +version = "1.0.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1485d4d2cc45e7b201ee3767015c96faa5904387c9d87c6efdd0fb511f12d305" +checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602" [[package]] name = "anymap" @@ -49,9 +49,9 @@ checksum = "8f1f8f5a6f3d50d89e3797d7593a50f96bb2aaa20ca0cc7be1fb673232c91d72" [[package]] name = "arbitrary" -version = "1.1.3" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a7924531f38b1970ff630f03eb20a2fde69db5c590c93b0f3482e95dcc5fd60" +checksum = "d86fd10d912cab78764cc44307d9cd5f164e09abbeb87fb19fb6d95937e8da5f" [[package]] name = "arrayvec" @@ -306,9 +306,9 @@ dependencies = [ [[package]] name = "derive_arbitrary" -version = "1.1.3" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9a577516173adb681466d517d39bd468293bc2c2a16439375ef0f35bba45f3d" +checksum = "226ad66541d865d7a7173ad6a9e691c33fdb910ac723f4bc734b3e5294a1f931" dependencies = [ "proc-macro2", "quote", @@ -764,9 +764,9 @@ dependencies = [ [[package]] name = "itertools" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] @@ -1493,27 +1493,27 @@ checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" [[package]] name = "semver" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f6841e709003d68bb2deee8c343572bf446003ec20a583e76f7b15cebf3711" +checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.144" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f747710de3dcd43b88c9168773254e809d8ddbdf9653b84e2554ab219f17860" +checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.144" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94ed3a816fb1d101812f83e789f888322c34e291f894f19590dc310963e87a00" +checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c" dependencies = [ "proc-macro2", "quote", @@ -1522,9 +1522,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.85" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e55a28e3aaef9d5ce0506d0a14dbba8054ddc7e499ef522dd8b26859ec9d4a44" +checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074" dependencies = [ "indexmap", "itoa", diff --git a/crates/cfg/Cargo.toml b/crates/cfg/Cargo.toml index c9664a83ab8..ee1ad677a95 100644 --- a/crates/cfg/Cargo.toml +++ b/crates/cfg/Cargo.toml @@ -22,5 +22,5 @@ oorandom = "11.1.3" # We depend on both individually instead of using `features = ["derive"]` to microoptimize the # build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr` # supports `arbitrary`. This way, we avoid feature unification. -arbitrary = "1.1.0" -derive_arbitrary = "1.1.0" +arbitrary = "1.1.7" +derive_arbitrary = "1.1.6" diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml index 688e790c536..07b27a8322f 100644 --- a/crates/flycheck/Cargo.toml +++ b/crates/flycheck/Cargo.toml @@ -15,7 +15,7 @@ tracing = "0.1.35" cargo_metadata = "0.15.0" rustc-hash = "1.1.0" serde = { version = "1.0.137", features = ["derive"] } -serde_json = "1.0.81" +serde_json = "1.0.86" jod-thread = "0.1.2" toolchain = { path = "../toolchain", version = "0.0.0" } diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 54b0832ca6a..cb41683b8c1 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -21,7 +21,7 @@ either = "1.7.0" fst = { version = "0.4.7", default-features = false } hashbrown = { version = "0.12.1", default-features = false } indexmap = "1.9.1" -itertools = "0.10.3" +itertools = "0.10.5" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } once_cell = "1.15.0" rustc-hash = "1.1.0" diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml index dfd470ffca6..97473aa39b9 100644 --- a/crates/hir-expand/Cargo.toml +++ b/crates/hir-expand/Cargo.toml @@ -15,7 +15,7 @@ tracing = "0.1.35" either = "1.7.0" rustc-hash = "1.1.0" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } -itertools = "0.10.3" +itertools = "0.10.5" hashbrown = { version = "0.12.1", features = [ "inline-more", ], default-features = false } diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index d8ede9956fc..b4860fa55bd 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -11,7 +11,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" -itertools = "0.10.3" +itertools = "0.10.5" arrayvec = "0.7.2" smallvec = "1.9.0" ena = "0.14.0" diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 0c3eed57af0..03910b4c194 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -13,7 +13,7 @@ doctest = false rustc-hash = "1.1.0" either = "1.7.0" arrayvec = "0.7.2" -itertools = "0.10.3" +itertools = "0.10.5" smallvec = "1.9.0" once_cell = "1.15.0" diff --git a/crates/ide-assists/Cargo.toml b/crates/ide-assists/Cargo.toml index fca09d384c6..57a41f3d9a9 100644 --- a/crates/ide-assists/Cargo.toml +++ b/crates/ide-assists/Cargo.toml @@ -12,7 +12,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" -itertools = "0.10.3" +itertools = "0.10.5" either = "1.7.0" stdx = { path = "../stdx", version = "0.0.0" } diff --git a/crates/ide-completion/Cargo.toml b/crates/ide-completion/Cargo.toml index 550f65375a8..e0334f4c124 100644 --- a/crates/ide-completion/Cargo.toml +++ b/crates/ide-completion/Cargo.toml @@ -11,7 +11,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" -itertools = "0.10.3" +itertools = "0.10.5" once_cell = "1.15.0" smallvec = "1.9.0" diff --git a/crates/ide-db/Cargo.toml b/crates/ide-db/Cargo.toml index 3dca3e20222..cf0bcd5c96b 100644 --- a/crates/ide-db/Cargo.toml +++ b/crates/ide-db/Cargo.toml @@ -17,7 +17,7 @@ fst = { version = "0.4.7", default-features = false } rustc-hash = "1.1.0" once_cell = "1.15.0" either = "1.7.0" -itertools = "0.10.3" +itertools = "0.10.5" arrayvec = "0.7.2" indexmap = "1.9.1" memchr = "2.5.0" diff --git a/crates/ide-diagnostics/Cargo.toml b/crates/ide-diagnostics/Cargo.toml index 9b9e21a4ddb..e1d146f4ee5 100644 --- a/crates/ide-diagnostics/Cargo.toml +++ b/crates/ide-diagnostics/Cargo.toml @@ -11,11 +11,9 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" -itertools = "0.10.3" - - either = "1.7.0" -serde_json = "1.0.82" +itertools = "0.10.5" +serde_json = "1.0.86" profile = { path = "../profile", version = "0.0.0" } stdx = { path = "../stdx", version = "0.0.0" } diff --git a/crates/ide-ssr/Cargo.toml b/crates/ide-ssr/Cargo.toml index 73314e0f330..4baf786c455 100644 --- a/crates/ide-ssr/Cargo.toml +++ b/crates/ide-ssr/Cargo.toml @@ -12,8 +12,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" - -itertools = "0.10.3" +itertools = "0.10.5" text-edit = { path = "../text-edit", version = "0.0.0" } parser = { path = "../parser", version = "0.0.0" } diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index 0e9771cd2eb..15ad6bd1fe1 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml @@ -13,7 +13,7 @@ doctest = false cov-mark = "2.0.0-pre.1" crossbeam-channel = "0.5.5" either = "1.7.0" -itertools = "0.10.3" +itertools = "0.10.5" tracing = "0.1.35" oorandom = "11.1.3" pulldown-cmark-to-cmark = "10.0.1" diff --git a/crates/project-model/Cargo.toml b/crates/project-model/Cargo.toml index bc75d6faa38..6fd7c3166f8 100644 --- a/crates/project-model/Cargo.toml +++ b/crates/project-model/Cargo.toml @@ -13,10 +13,10 @@ doctest = false tracing = "0.1.35" rustc-hash = "1.1.0" cargo_metadata = "0.15.0" -semver = "1.0.10" +semver = "1.0.14" serde = { version = "1.0.137", features = ["derive"] } -serde_json = "1.0.81" -anyhow = "1.0.57" +serde_json = "1.0.86" +anyhow = "1.0.62" expect-test = "1.4.0" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index a4e6550984e..c769fbe84b2 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -18,10 +18,10 @@ name = "rust-analyzer" path = "src/bin/main.rs" [dependencies] -anyhow = "1.0.57" +anyhow = "1.0.62" crossbeam-channel = "0.5.5" dissimilar = "1.0.4" -itertools = "0.10.3" +itertools = "0.10.5" scip = "0.1.1" lsp-types = { version = "0.93.1", features = ["proposed"] } parking_lot = "0.12.1" diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 8005b20206d..42f727fd2d1 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -12,7 +12,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" -itertools = "0.10.3" +itertools = "0.10.5" rowan = "0.15.8" rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" } rustc-hash = "1.1.0" diff --git a/crates/text-edit/Cargo.toml b/crates/text-edit/Cargo.toml index cf14bbd3c3e..7a90d64a98b 100644 --- a/crates/text-edit/Cargo.toml +++ b/crates/text-edit/Cargo.toml @@ -10,5 +10,5 @@ rust-version = "1.57" doctest = false [dependencies] -itertools = "0.10.3" +itertools = "0.10.5" text-size = "1.1.0" diff --git a/lib/lsp-server/Cargo.toml b/lib/lsp-server/Cargo.toml index b236b156cf9..5922bbfdb48 100644 --- a/lib/lsp-server/Cargo.toml +++ b/lib/lsp-server/Cargo.toml @@ -8,7 +8,7 @@ edition = "2021" [dependencies] log = "0.4.17" -serde_json = "1.0.85" +serde_json = "1.0.86" serde = { version = "1.0.144", features = ["derive"] } crossbeam-channel = "0.5.6" diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 14816912b72..0be0bf920de 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" rust-version = "1.57" [dependencies] -anyhow = "1.0.57" +anyhow = "1.0.62" flate2 = "1.0.24" write-json = "0.1.2" xshell = "0.2.2" From 50f990c46f50e0fc8a0dfb3877f8663429ef74a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Thu, 6 Oct 2022 15:28:50 +0300 Subject: [PATCH 47/66] Bump smallvec --- Cargo.lock | 4 ++-- crates/hir-def/Cargo.toml | 2 +- crates/hir-expand/Cargo.toml | 2 +- crates/hir-ty/Cargo.toml | 2 +- crates/hir/Cargo.toml | 2 +- crates/ide-completion/Cargo.toml | 2 +- crates/mbe/Cargo.toml | 2 +- 7 files changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e58c5b0078c..d7472ba0e89 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1554,9 +1554,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1" +checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0" [[package]] name = "smol_str" diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index cb41683b8c1..73a40565795 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -25,7 +25,7 @@ itertools = "0.10.5" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } once_cell = "1.15.0" rustc-hash = "1.1.0" -smallvec = "1.9.0" +smallvec = "1.10.0" tracing = "0.1.35" stdx = { path = "../stdx", version = "0.0.0" } diff --git a/crates/hir-expand/Cargo.toml b/crates/hir-expand/Cargo.toml index 97473aa39b9..3359c99b396 100644 --- a/crates/hir-expand/Cargo.toml +++ b/crates/hir-expand/Cargo.toml @@ -19,7 +19,7 @@ itertools = "0.10.5" hashbrown = { version = "0.12.1", features = [ "inline-more", ], default-features = false } -smallvec = { version = "1.9.0", features = ["const_new"] } +smallvec = { version = "1.10.0", features = ["const_new"] } stdx = { path = "../stdx", version = "0.0.0" } base-db = { path = "../base-db", version = "0.0.0" } diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index b4860fa55bd..bbc9e0623b0 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -13,7 +13,7 @@ doctest = false cov-mark = "2.0.0-pre.1" itertools = "0.10.5" arrayvec = "0.7.2" -smallvec = "1.9.0" +smallvec = "1.10.0" ena = "0.14.0" tracing = "0.1.35" rustc-hash = "1.1.0" diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml index 03910b4c194..e1418de3cdc 100644 --- a/crates/hir/Cargo.toml +++ b/crates/hir/Cargo.toml @@ -14,7 +14,7 @@ rustc-hash = "1.1.0" either = "1.7.0" arrayvec = "0.7.2" itertools = "0.10.5" -smallvec = "1.9.0" +smallvec = "1.10.0" once_cell = "1.15.0" stdx = { path = "../stdx", version = "0.0.0" } diff --git a/crates/ide-completion/Cargo.toml b/crates/ide-completion/Cargo.toml index e0334f4c124..75835bce95d 100644 --- a/crates/ide-completion/Cargo.toml +++ b/crates/ide-completion/Cargo.toml @@ -14,7 +14,7 @@ cov-mark = "2.0.0-pre.1" itertools = "0.10.5" once_cell = "1.15.0" -smallvec = "1.9.0" +smallvec = "1.10.0" stdx = { path = "../stdx", version = "0.0.0" } syntax = { path = "../syntax", version = "0.0.0" } diff --git a/crates/mbe/Cargo.toml b/crates/mbe/Cargo.toml index 5ff3448a19c..13cd8901031 100644 --- a/crates/mbe/Cargo.toml +++ b/crates/mbe/Cargo.toml @@ -12,7 +12,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" rustc-hash = "1.1.0" -smallvec = "1.9.0" +smallvec = "1.10.0" tracing = "0.1.35" syntax = { path = "../syntax", version = "0.0.0" } From bb6990c4c99ce594c5538f1f2e6d98b505cd6cd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 8 Oct 2022 21:30:04 +0300 Subject: [PATCH 48/66] Bump tracing --- Cargo.lock | 16 ++++++++-------- crates/flycheck/Cargo.toml | 2 +- crates/hir-ty/Cargo.toml | 2 +- crates/proc-macro-api/Cargo.toml | 2 +- crates/rust-analyzer/Cargo.toml | 2 +- 5 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d7472ba0e89..41237806787 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1758,9 +1758,9 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.36" +version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fce9567bd60a67d08a16488756721ba392f24f29006402881e43b19aac64307" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" dependencies = [ "cfg-if", "pin-project-lite", @@ -1770,9 +1770,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2" +checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" dependencies = [ "proc-macro2", "quote", @@ -1781,9 +1781,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.29" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aeea4303076558a00714b823f9ad67d58a3bbda1df83d8827d21193156e22f7" +checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a" dependencies = [ "once_cell", "valuable", @@ -1802,9 +1802,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.15" +version = "0.3.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60db860322da191b40952ad9affe65ea23e7dd6a5c442c2c42865810c6ab8e6b" +checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70" dependencies = [ "matchers", "once_cell", diff --git a/crates/flycheck/Cargo.toml b/crates/flycheck/Cargo.toml index 07b27a8322f..2ad32d24837 100644 --- a/crates/flycheck/Cargo.toml +++ b/crates/flycheck/Cargo.toml @@ -11,7 +11,7 @@ doctest = false [dependencies] crossbeam-channel = "0.5.5" -tracing = "0.1.35" +tracing = "0.1.37" cargo_metadata = "0.15.0" rustc-hash = "1.1.0" serde = { version = "1.0.137", features = ["derive"] } diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index bbc9e0623b0..fc790a12f5b 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -37,7 +37,7 @@ limit = { path = "../limit", version = "0.0.0" } test-utils = { path = "../test-utils" } expect-test = "1.4.0" tracing = "0.1.35" -tracing-subscriber = { version = "0.3.14", default-features = false, features = [ +tracing-subscriber = { version = "0.3.16", default-features = false, features = [ "env-filter", "registry", ] } diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 85a1c13fe7d..54879c1870c 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -19,7 +19,7 @@ object = { version = "0.29.0", default-features = false, features = [ ] } serde = { version = "1.0.137", features = ["derive"] } serde_json = { version = "1.0.81", features = ["unbounded_depth"] } -tracing = "0.1.35" +tracing = "0.1.37" memmap2 = "0.5.4" snap = "1.0.5" diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index c769fbe84b2..4e064fbba33 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -36,7 +36,7 @@ num_cpus = "1.13.1" mimalloc = { version = "0.1.29", default-features = false, optional = true } lsp-server = { version = "0.7.0", path = "../../lib/lsp-server" } tracing = "0.1.35" -tracing-subscriber = { version = "0.3.14", default-features = false, features = [ +tracing-subscriber = { version = "0.3.16", default-features = false, features = [ "env-filter", "registry", "fmt", From 6e74a229181307b69af2272e356692c3b9b051b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 15 Oct 2022 12:53:26 +0300 Subject: [PATCH 49/66] Bump home --- Cargo.lock | 4 ++-- crates/toolchain/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 41237806787..6b03adc4e58 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -573,9 +573,9 @@ dependencies = [ [[package]] name = "home" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654" +checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408" dependencies = [ "winapi", ] diff --git a/crates/toolchain/Cargo.toml b/crates/toolchain/Cargo.toml index 7d3b9e09ece..3e0f31f19c5 100644 --- a/crates/toolchain/Cargo.toml +++ b/crates/toolchain/Cargo.toml @@ -10,4 +10,4 @@ rust-version = "1.57" doctest = false [dependencies] -home = "0.5.3" +home = "0.5.4" From 3a5f6a705ef47456361c237d8ba1140a8e4ea27a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 15 Oct 2022 12:54:25 +0300 Subject: [PATCH 50/66] Bump dashmap --- Cargo.lock | 9 +++++---- crates/hir-def/Cargo.toml | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6b03adc4e58..7deb70e6c4f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -294,13 +294,14 @@ dependencies = [ [[package]] name = "dashmap" -version = "5.3.4" +version = "5.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3495912c9c1ccf2e18976439f4443f3fee0fd61f424ff99fde6a66b15ecb448f" +checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" dependencies = [ "cfg-if", "hashbrown", "lock_api", + "once_cell", "parking_lot_core 0.9.3", ] @@ -844,9 +845,9 @@ version = "0.0.0" [[package]] name = "lock_api" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" dependencies = [ "autocfg", "scopeguard", diff --git a/crates/hir-def/Cargo.toml b/crates/hir-def/Cargo.toml index 73a40565795..4ad8e75970b 100644 --- a/crates/hir-def/Cargo.toml +++ b/crates/hir-def/Cargo.toml @@ -15,7 +15,7 @@ arrayvec = "0.7.2" bitflags = "1.3.2" cov-mark = "2.0.0-pre.1" # We need to freeze the version of the crate, as the raw-api feature is considered unstable -dashmap = { version = "=5.3.4", features = ["raw-api"] } +dashmap = { version = "=5.4.0", features = ["raw-api"] } drop_bomb = "0.1.5" either = "1.7.0" fst = { version = "0.4.7", default-features = false } From e294640484731dca722f2cc3509a18e8b4a4a20d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 15 Oct 2022 12:55:56 +0300 Subject: [PATCH 51/66] Bump libc --- Cargo.lock | 4 ++-- crates/profile/Cargo.toml | 2 +- crates/stdx/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7deb70e6c4f..66dbb1b68dc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -816,9 +816,9 @@ checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" [[package]] name = "libc" -version = "0.2.132" +version = "0.2.135" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5" +checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c" [[package]] name = "libloading" diff --git a/crates/profile/Cargo.toml b/crates/profile/Cargo.toml index ed3d816da52..5697aea964f 100644 --- a/crates/profile/Cargo.toml +++ b/crates/profile/Cargo.toml @@ -12,7 +12,7 @@ doctest = false [dependencies] once_cell = "1.15.0" cfg-if = "1.0.0" -libc = "0.2.126" +libc = "0.2.135" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } countme = { version = "3.0.1", features = ["enable"] } jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true } diff --git a/crates/stdx/Cargo.toml b/crates/stdx/Cargo.toml index 092b99ae515..e0657ab0f6d 100644 --- a/crates/stdx/Cargo.toml +++ b/crates/stdx/Cargo.toml @@ -10,7 +10,7 @@ rust-version = "1.57" doctest = false [dependencies] -libc = "0.2.126" +libc = "0.2.135" backtrace = { version = "0.3.65", optional = true } always-assert = { version = "0.1.2", features = ["log"] } # Think twice before adding anything here From 97eebbfab603c72cd1a3fbec8b016a436835a5fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 15 Oct 2022 12:57:30 +0300 Subject: [PATCH 52/66] Bump url --- Cargo.lock | 25 ++++++++----------------- crates/ide/Cargo.toml | 2 +- 2 files changed, 9 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 66dbb1b68dc..a2bc43219a3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -405,11 +405,10 @@ dependencies = [ [[package]] name = "form_urlencoded" -version = "1.0.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191" +checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8" dependencies = [ - "matches", "percent-encoding", ] @@ -715,11 +714,10 @@ dependencies = [ [[package]] name = "idna" -version = "0.2.3" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6" dependencies = [ - "matches", "unicode-bidi", "unicode-normalization", ] @@ -895,12 +893,6 @@ dependencies = [ "regex-automata", ] -[[package]] -name = "matches" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" - [[package]] name = "mbe" version = "0.0.0" @@ -1099,9 +1091,9 @@ version = "0.0.0" [[package]] name = "percent-encoding" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" +checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e" [[package]] name = "perf-event" @@ -1894,13 +1886,12 @@ checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" [[package]] name = "url" -version = "2.2.2" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" +checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643" dependencies = [ "form_urlencoded", "idna", - "matches", "percent-encoding", "serde", ] diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index 15ad6bd1fe1..389559d5d2f 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml @@ -18,7 +18,7 @@ tracing = "0.1.35" oorandom = "11.1.3" pulldown-cmark-to-cmark = "10.0.1" pulldown-cmark = { version = "0.9.1", default-features = false } -url = "2.2.2" +url = "2.3.1" dot = "0.1.4" stdx = { path = "../stdx", version = "0.0.0" } From 39777bf941722e61dd33e95a6296de48e782b9a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 15 Oct 2022 12:58:57 +0300 Subject: [PATCH 53/66] Bump rowan --- Cargo.lock | 4 ++-- crates/syntax/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a2bc43219a3..baacf04726a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1333,9 +1333,9 @@ checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" [[package]] name = "rowan" -version = "0.15.8" +version = "0.15.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e88acf7b001007e9e8c989fe7449f6601d909e5dd2c56399fc158977ad6c56e8" +checksum = "5811547e7ba31e903fe48c8ceab10d40d70a101f3d15523c847cce91aa71f332" dependencies = [ "countme", "hashbrown", diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 42f727fd2d1..94f275d8e39 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -13,7 +13,7 @@ doctest = false [dependencies] cov-mark = "2.0.0-pre.1" itertools = "0.10.5" -rowan = "0.15.8" +rowan = "0.15.10" rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" } rustc-hash = "1.1.0" once_cell = "1.15.0" From 275848803bee0b80f5bf67379aaac4d5cbb966a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 15 Oct 2022 12:59:56 +0300 Subject: [PATCH 54/66] Bump mimalloc --- Cargo.lock | 8 ++++---- crates/rust-analyzer/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index baacf04726a..21a2b3c6746 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -830,9 +830,9 @@ dependencies = [ [[package]] name = "libmimalloc-sys" -version = "0.1.25" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11ca136052550448f55df7898c6dbe651c6b574fe38a0d9ea687a9f8088a2e2c" +checksum = "8fc093ab289b0bfda3aa1bdfab9c9542be29c7ef385cfcbe77f8c9813588eb48" dependencies = [ "cc", ] @@ -934,9 +934,9 @@ dependencies = [ [[package]] name = "mimalloc" -version = "0.1.29" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f64ad83c969af2e732e907564deb0d0ed393cec4af80776f77dd77a1a427698" +checksum = "76ce6a4b40d3bff9eb3ce9881ca0737a85072f9f975886082640cd46a75cdb35" dependencies = [ "libmimalloc-sys", ] diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 4e064fbba33..5445028536c 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml @@ -33,7 +33,7 @@ serde_json = { version = "1.0.81", features = ["preserve_order"] } threadpool = "1.8.1" rayon = "1.5.3" num_cpus = "1.13.1" -mimalloc = { version = "0.1.29", default-features = false, optional = true } +mimalloc = { version = "0.1.30", default-features = false, optional = true } lsp-server = { version = "0.7.0", path = "../../lib/lsp-server" } tracing = "0.1.35" tracing-subscriber = { version = "0.3.16", default-features = false, features = [ From 792920f4419aa16563fdc3d0a171c31841cea3a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 15 Oct 2022 13:00:41 +0300 Subject: [PATCH 55/66] Bump pulldown-cmark-to-cmark --- Cargo.lock | 4 ++-- crates/ide/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 21a2b3c6746..e05ae0c9009 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1258,9 +1258,9 @@ dependencies = [ [[package]] name = "pulldown-cmark-to-cmark" -version = "10.0.2" +version = "10.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1353ac408192fa925228d3e60ff746167d03f4f7e54835d78ef79e08225d913" +checksum = "0194e6e1966c23cc5fd988714f85b18d548d773e81965413555d96569931833d" dependencies = [ "pulldown-cmark", ] diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml index 389559d5d2f..712459a7ee9 100644 --- a/crates/ide/Cargo.toml +++ b/crates/ide/Cargo.toml @@ -16,7 +16,7 @@ either = "1.7.0" itertools = "0.10.5" tracing = "0.1.35" oorandom = "11.1.3" -pulldown-cmark-to-cmark = "10.0.1" +pulldown-cmark-to-cmark = "10.0.4" pulldown-cmark = { version = "0.9.1", default-features = false } url = "2.3.1" dot = "0.1.4" From f5bbf9a06a29dd282096a2cf277b09820abaa58b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 15 Oct 2022 13:02:10 +0300 Subject: [PATCH 56/66] Bump proc-macro2 --- Cargo.lock | 4 ++-- crates/syntax/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e05ae0c9009..62d4a5fb897 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1183,9 +1183,9 @@ version = "0.0.0" [[package]] name = "proc-macro2" -version = "1.0.43" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab" +checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" dependencies = [ "unicode-ident", ] diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml index 94f275d8e39..1ef903371cf 100644 --- a/crates/syntax/Cargo.toml +++ b/crates/syntax/Cargo.toml @@ -28,7 +28,7 @@ profile = { path = "../profile", version = "0.0.0" } [dev-dependencies] rayon = "1.5.3" expect-test = "1.4.0" -proc-macro2 = "1.0.39" +proc-macro2 = "1.0.47" quote = "1.0.20" ungrammar = "1.16.1" From 119e1bafcc2f734807cb53b5be2ce9e932a051ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 15 Oct 2022 13:03:39 +0300 Subject: [PATCH 57/66] Bump notify --- Cargo.lock | 4 ++-- crates/vfs-notify/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 62d4a5fb897..0c4799de45d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -973,9 +973,9 @@ dependencies = [ [[package]] name = "notify" -version = "5.0.0-pre.16" +version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "530f6314d6904508082f4ea424a0275cf62d341e118b313663f266429cb19693" +checksum = "ed2c66da08abae1c024c01d635253e402341b4060a12e99b31c7594063bf490a" dependencies = [ "bitflags", "crossbeam-channel", diff --git a/crates/vfs-notify/Cargo.toml b/crates/vfs-notify/Cargo.toml index fcc693a7dda..df5dc24e2cd 100644 --- a/crates/vfs-notify/Cargo.toml +++ b/crates/vfs-notify/Cargo.toml @@ -14,7 +14,7 @@ tracing = "0.1.35" jod-thread = "0.1.2" walkdir = "2.3.2" crossbeam-channel = "0.5.5" -notify = "=5.0.0-pre.16" +notify = "5.0" vfs = { path = "../vfs", version = "0.0.0" } paths = { path = "../paths", version = "0.0.0" } From b513111f3c2507ed6f7e5464c4da67d07c75129c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 15 Oct 2022 13:06:10 +0300 Subject: [PATCH 58/66] Bump the rest of the deps --- Cargo.lock | 54 ++++++++++++++++++++++++++---------------------------- 1 file changed, 26 insertions(+), 28 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0c4799de45d..bc196989e5c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -270,26 +270,24 @@ dependencies = [ [[package]] name = "crossbeam-epoch" -version = "0.9.10" +version = "0.9.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1" +checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348" dependencies = [ "autocfg", "cfg-if", "crossbeam-utils", "memoffset", - "once_cell", "scopeguard", ] [[package]] name = "crossbeam-utils" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc" +checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac" dependencies = [ "cfg-if", - "once_cell", ] [[package]] @@ -772,9 +770,9 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754" +checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc" [[package]] name = "jod-thread" @@ -943,9 +941,9 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.5.3" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc" +checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34" dependencies = [ "adler", ] @@ -1081,9 +1079,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.8" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9423e2b32f7a043629287a536f21951e8c6a82482d0acb1eeebfc90bc2225b22" +checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" [[package]] name = "paths" @@ -1586,9 +1584,9 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.99" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13" +checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1" dependencies = [ "proc-macro2", "quote", @@ -1659,18 +1657,18 @@ checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a" [[package]] name = "thiserror" -version = "1.0.31" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" +checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.31" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" +checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" dependencies = [ "proc-macro2", "quote", @@ -1708,9 +1706,9 @@ dependencies = [ [[package]] name = "tikv-jemalloc-sys" -version = "0.5.1+5.3.0-patched" +version = "0.5.2+5.3.0-patched" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "931e876f91fed0827f863a2d153897790da0b24d882c721a79cb3beb0b903261" +checksum = "ec45c14da997d0925c7835883e4d5c181f196fa142f8c19d7643d1e9af2592c3" dependencies = [ "cc", "fs_extra", @@ -1859,30 +1857,30 @@ checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992" [[package]] name = "unicode-ident" -version = "1.0.1" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c" +checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" [[package]] name = "unicode-normalization" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" dependencies = [ "tinyvec", ] [[package]] name = "unicode-segmentation" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" +checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" [[package]] name = "unicode-xid" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "url" From 342764d31ac938547b40ca76a300db91e342813b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 15 Oct 2022 14:07:21 +0300 Subject: [PATCH 59/66] Add new license --- crates/rust-analyzer/tests/slow-tests/tidy.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 58099a58de0..24e68eca676 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -177,6 +177,7 @@ fn check_licenses() { let sh = &Shell::new().unwrap(); let expected = " +(MIT OR Apache-2.0) AND Unicode-DFS-2016 0BSD OR MIT OR Apache-2.0 Apache-2.0 Apache-2.0 OR BSL-1.0 From 7bf6f188de87ac682375e5228300ab707a912600 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sat, 15 Oct 2022 14:44:51 +0300 Subject: [PATCH 60/66] Bump actions/setup-node --- .github/workflows/ci.yaml | 4 ++-- .github/workflows/release.yaml | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9f5d02cd6b5..bb77324378a 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -116,9 +116,9 @@ jobs: uses: actions/checkout@v3 - name: Install Nodejs - uses: actions/setup-node@v1 + uses: actions/setup-node@v3 with: - node-version: 16.x + node-version: 16 - name: Install xvfb if: matrix.os == 'ubuntu-latest' diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index f2bbf1f6ff1..422fe29f9d5 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -76,9 +76,9 @@ jobs: rustup component add rust-src - name: Install Node.js - uses: actions/setup-node@v1 + uses: actions/setup-node@v3 with: - node-version: 16.x + node-version: 16 - name: Update apt repositories if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' @@ -184,9 +184,9 @@ jobs: needs: ["dist", "dist-x86_64-unknown-linux-musl"] steps: - name: Install Nodejs - uses: actions/setup-node@v1 + uses: actions/setup-node@v3 with: - node-version: 16.x + node-version: 16 - run: echo "TAG=$(date --iso -u)" >> $GITHUB_ENV if: github.ref == 'refs/heads/release' From dd4d3f0a098359bd6f632caa6d7817fffec1179f Mon Sep 17 00:00:00 2001 From: Volkan Sagcan Date: Sat, 15 Oct 2022 18:00:32 +0200 Subject: [PATCH 61/66] fix #13105: Ignore auto-import assist on parameter names --- crates/ide-assists/src/handlers/auto_import.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/crates/ide-assists/src/handlers/auto_import.rs b/crates/ide-assists/src/handlers/auto_import.rs index e257218ba93..678dc877d13 100644 --- a/crates/ide-assists/src/handlers/auto_import.rs +++ b/crates/ide-assists/src/handlers/auto_import.rs @@ -156,6 +156,8 @@ pub(super) fn find_importable_node( { ImportAssets::for_method_call(&method_under_caret, &ctx.sema) .zip(Some(method_under_caret.syntax().clone().into())) + } else if let Some(_) = ctx.find_node_at_offset_with_descend::() { + None } else if let Some(pat) = ctx .find_node_at_offset_with_descend::() .filter(ast::IdentPat::is_simple_ident) @@ -268,6 +270,20 @@ mod tests { assert_eq!(labels, order); } + #[test] + fn ignore_parameter_name() { + check_assist_not_applicable( + auto_import, + r" + mod foo { + pub mod bar {} + } + + fn foo(bar$0: &str) {} + ", + ); + } + #[test] fn prefer_shorter_paths() { let before = r" From 88d0efc243f29aa018ef9dc8cd2ebf8e4451402d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Sun, 16 Oct 2022 11:52:01 +0200 Subject: [PATCH 62/66] Update guide.md to reflect support for proc-macros --- docs/dev/guide.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docs/dev/guide.md b/docs/dev/guide.md index c9ff0b6c29e..52a13da31c5 100644 --- a/docs/dev/guide.md +++ b/docs/dev/guide.md @@ -88,9 +88,8 @@ is lower than Cargo's model of packages: each Cargo package consists of several targets, each of which is a separate crate (or several crates, if you try different feature combinations). -Procedural macros should become inputs as well, but currently they are not -supported. Procedural macro will be a black box `Box TokenStream>` -function, and will be inserted into the crate graph just like dependencies. +Procedural macros are inputs as well, roughly modeled as a crate with a bunch of +additional black box `dyn Fn(TokenStream) -> TokenStream` functions. Soon we'll talk how we build an LSP server on top of `Analysis`, but first, let's deal with that paths issue. From 310a72bf472901ff238aa499dc7175353de67528 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Fri, 14 Oct 2022 00:39:14 +0900 Subject: [PATCH 63/66] Bump chalk to 0.86 Two breaking changes: - `TypeFolder` has been split into `TypeFolder` and `FallibleTypeFolder` - `ProjectionTy::self_type_parameter()` has been removed --- Cargo.lock | 17 +++--- Cargo.toml | 1 + crates/hir-ty/Cargo.toml | 7 +-- crates/hir-ty/src/chalk_db.rs | 4 +- crates/hir-ty/src/chalk_ext.rs | 7 ++- crates/hir-ty/src/display.rs | 4 +- crates/hir-ty/src/infer/unify.rs | 71 +++++++++++++------------- crates/hir-ty/src/lib.rs | 88 +++++++++++++++----------------- crates/hir-ty/src/traits.rs | 6 +-- 9 files changed, 103 insertions(+), 102 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e45cde68388..e612495df57 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -171,9 +171,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chalk-derive" -version = "0.84.0" +version = "0.86.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf29c109d57f8d57b0e7675391be37a9285d86dd93278bd5f14a0ad3c447a6c2" +checksum = "5499d415d855b5094366a824815341893ad3de0ecb6048c430118bdae6d27402" dependencies = [ "proc-macro2", "quote", @@ -183,9 +183,9 @@ dependencies = [ [[package]] name = "chalk-ir" -version = "0.84.0" +version = "0.86.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d391763027b5e50a5e15caf6d2857ec585fd68160367bbeac9e1804209620918" +checksum = "3800118c76a48507b0eece3a01f3a429b5c478d203c493096e6040c67ab960e1" dependencies = [ "bitflags", "chalk-derive", @@ -194,9 +194,9 @@ dependencies = [ [[package]] name = "chalk-recursive" -version = "0.84.0" +version = "0.86.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afafd92dcdc7fe0ea940ee94bdd8cc5bd18f4a4a84c593d6d7025fe16c150478" +checksum = "1baf60628fd73104d1f8562586a52d48f37f1e84435aab2e62674b1fd935b8c8" dependencies = [ "chalk-derive", "chalk-ir", @@ -207,9 +207,9 @@ dependencies = [ [[package]] name = "chalk-solve" -version = "0.84.0" +version = "0.86.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3af1d111f11c91c48ace02e93e470c5bae6d2631bd112e4545317da53660d7fc" +checksum = "0e9c3c068f9358786348e58a1b94ef0a5cf90a9810fc1f10fda896f0b5d80185" dependencies = [ "chalk-derive", "chalk-ir", @@ -546,6 +546,7 @@ version = "0.0.0" dependencies = [ "arrayvec", "base-db", + "chalk-derive", "chalk-ir", "chalk-recursive", "chalk-solve", diff --git a/Cargo.toml b/Cargo.toml index 6b68ca82389..286ef1e7dcb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,6 +27,7 @@ debug = 0 # chalk-solve = { path = "../chalk/chalk-solve" } # chalk-ir = { path = "../chalk/chalk-ir" } # chalk-recursive = { path = "../chalk/chalk-recursive" } +# chalk-derive = { path = "../chalk/chalk-derive" } # ungrammar = { path = "../ungrammar" } diff --git a/crates/hir-ty/Cargo.toml b/crates/hir-ty/Cargo.toml index 7f143f396c7..df68a37bf45 100644 --- a/crates/hir-ty/Cargo.toml +++ b/crates/hir-ty/Cargo.toml @@ -18,9 +18,10 @@ ena = "0.14.0" tracing = "0.1.35" rustc-hash = "1.1.0" scoped-tls = "1.0.0" -chalk-solve = { version = "0.84.0", default-features = false } -chalk-ir = "0.84.0" -chalk-recursive = { version = "0.84.0", default-features = false } +chalk-solve = { version = "0.86.0", default-features = false } +chalk-ir = "0.86.0" +chalk-recursive = { version = "0.86.0", default-features = false } +chalk-derive = "0.86.0" la-arena = { version = "0.3.0", path = "../../lib/la-arena" } once_cell = "1.12.0" typed-arena = "2.0.1" diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 3f3f8f7d0f2..43c3451cab3 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -823,10 +823,10 @@ pub(super) fn generic_predicate_to_inline_bound( Some(chalk_ir::Binders::new(binders, rust_ir::InlineBound::TraitBound(trait_bound))) } WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(projection_ty), ty }) => { - if projection_ty.self_type_parameter(Interner) != self_ty_shifted_in { + let trait_ = projection_ty.trait_(db); + if projection_ty.self_type_parameter(db) != self_ty_shifted_in { return None; } - let trait_ = projection_ty.trait_(db); let args_no_self = projection_ty.substitution.as_slice(Interner)[1..] .iter() .map(|ty| ty.clone().cast(Interner)) diff --git a/crates/hir-ty/src/chalk_ext.rs b/crates/hir-ty/src/chalk_ext.rs index 4f0e9dbf1e4..e2099d7e509 100644 --- a/crates/hir-ty/src/chalk_ext.rs +++ b/crates/hir-ty/src/chalk_ext.rs @@ -262,7 +262,7 @@ impl TyExt for Ty { WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(proj), ty: _, - }) => &proj.self_type_parameter(Interner) == self, + }) => &proj.self_type_parameter(db) == self, _ => false, }) .collect::>(); @@ -333,6 +333,7 @@ impl TyExt for Ty { pub trait ProjectionTyExt { fn trait_ref(&self, db: &dyn HirDatabase) -> TraitRef; fn trait_(&self, db: &dyn HirDatabase) -> TraitId; + fn self_type_parameter(&self, db: &dyn HirDatabase) -> Ty; } impl ProjectionTyExt for ProjectionTy { @@ -349,6 +350,10 @@ impl ProjectionTyExt for ProjectionTy { _ => panic!("projection ty without parent trait"), } } + + fn self_type_parameter(&self, db: &dyn HirDatabase) -> Ty { + self.trait_ref(db).self_type_parameter(Interner) + } } pub trait TraitRefExt { diff --git a/crates/hir-ty/src/display.rs b/crates/hir-ty/src/display.rs index a6602747ef7..0221f922feb 100644 --- a/crates/hir-ty/src/display.rs +++ b/crates/hir-ty/src/display.rs @@ -291,7 +291,7 @@ impl HirDisplay for ProjectionTy { let trait_ = f.db.trait_data(self.trait_(f.db)); write!(f, "<")?; - self.self_type_parameter(Interner).hir_fmt(f)?; + self.self_type_parameter(f.db).hir_fmt(f)?; write!(f, " as {}", trait_.name)?; if self.substitution.len(Interner) > 1 { write!(f, "<")?; @@ -731,7 +731,7 @@ impl HirDisplay for Ty { WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(proj), ty: _, - }) => &proj.self_type_parameter(Interner) == self, + }) => &proj.self_type_parameter(f.db) == self, _ => false, }) .collect::>(); diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index 6ccd0b215c6..b00e3216b2d 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -4,7 +4,7 @@ use std::{fmt, mem, sync::Arc}; use chalk_ir::{ cast::Cast, fold::TypeFoldable, interner::HasInterner, zip::Zip, CanonicalVarKind, FloatTy, - IntTy, NoSolution, TyVariableKind, UniverseIndex, + IntTy, TyVariableKind, UniverseIndex, }; use chalk_solve::infer::ParameterEnaVariableExt; use ena::unify::UnifyKey; @@ -331,7 +331,6 @@ impl<'a> InferenceTable<'a> { &mut resolve::Resolver { table: self, var_stack, fallback }, DebruijnIndex::INNERMOST, ) - .expect("fold failed unexpectedly") } pub(crate) fn resolve_completely(&mut self, t: T) -> T @@ -452,13 +451,14 @@ impl<'a> InferenceTable<'a> { f: impl FnOnce(&mut Self) -> T, ) -> T { use chalk_ir::fold::TypeFolder; + + #[derive(chalk_derive::FallibleTypeFolder)] + #[has_interner(Interner)] struct VarFudger<'a, 'b> { table: &'a mut InferenceTable<'b>, highest_known_var: InferenceVar, } impl<'a, 'b> TypeFolder for VarFudger<'a, 'b> { - type Error = NoSolution; - fn as_dyn(&mut self) -> &mut dyn TypeFolder { self } @@ -472,24 +472,24 @@ impl<'a> InferenceTable<'a> { var: chalk_ir::InferenceVar, kind: TyVariableKind, _outer_binder: chalk_ir::DebruijnIndex, - ) -> chalk_ir::Fallible> { - Ok(if var < self.highest_known_var { + ) -> chalk_ir::Ty { + if var < self.highest_known_var { var.to_ty(Interner, kind) } else { self.table.new_type_var() - }) + } } fn fold_inference_lifetime( &mut self, var: chalk_ir::InferenceVar, _outer_binder: chalk_ir::DebruijnIndex, - ) -> chalk_ir::Fallible> { - Ok(if var < self.highest_known_var { + ) -> chalk_ir::Lifetime { + if var < self.highest_known_var { var.to_lifetime(Interner) } else { self.table.new_lifetime_var() - }) + } } fn fold_inference_const( @@ -497,12 +497,12 @@ impl<'a> InferenceTable<'a> { ty: chalk_ir::Ty, var: chalk_ir::InferenceVar, _outer_binder: chalk_ir::DebruijnIndex, - ) -> chalk_ir::Fallible> { - Ok(if var < self.highest_known_var { + ) -> chalk_ir::Const { + if var < self.highest_known_var { var.to_const(Interner, ty) } else { self.table.new_const_var(ty) - }) + } } } @@ -512,7 +512,6 @@ impl<'a> InferenceTable<'a> { self.rollback_to(snapshot); result .fold_with(&mut VarFudger { table: self, highest_known_var }, DebruijnIndex::INNERMOST) - .expect("fold_with with VarFudger") } /// This checks whether any of the free variables in the `canonicalized` @@ -639,21 +638,24 @@ mod resolve { use chalk_ir::{ cast::Cast, fold::{TypeFoldable, TypeFolder}, - Fallible, NoSolution, }; use hir_def::type_ref::ConstScalar; - pub(super) struct Resolver<'a, 'b, F> { + #[derive(chalk_derive::FallibleTypeFolder)] + #[has_interner(Interner)] + pub(super) struct Resolver< + 'a, + 'b, + F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg, + > { pub(super) table: &'a mut InferenceTable<'b>, pub(super) var_stack: &'a mut Vec, pub(super) fallback: F, } - impl<'a, 'b, 'i, F> TypeFolder for Resolver<'a, 'b, F> + impl<'a, 'b, F> TypeFolder for Resolver<'a, 'b, F> where - F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg + 'i, + F: Fn(InferenceVar, VariableKind, GenericArg, DebruijnIndex) -> GenericArg, { - type Error = NoSolution; - fn as_dyn(&mut self) -> &mut dyn TypeFolder { self } @@ -667,20 +669,19 @@ mod resolve { var: InferenceVar, kind: TyVariableKind, outer_binder: DebruijnIndex, - ) -> Fallible { + ) -> Ty { let var = self.table.var_unification_table.inference_var_root(var); if self.var_stack.contains(&var) { // recursive type let default = self.table.fallback_value(var, kind).cast(Interner); - return Ok((self.fallback)(var, VariableKind::Ty(kind), default, outer_binder) + return (self.fallback)(var, VariableKind::Ty(kind), default, outer_binder) .assert_ty_ref(Interner) - .clone()); + .clone(); } let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) { // known_ty may contain other variables that are known by now self.var_stack.push(var); - let result = - known_ty.fold_with(self, outer_binder).expect("fold failed unexpectedly"); + let result = known_ty.fold_with(self, outer_binder); self.var_stack.pop(); result.assert_ty_ref(Interner).clone() } else { @@ -689,7 +690,7 @@ mod resolve { .assert_ty_ref(Interner) .clone() }; - Ok(result) + result } fn fold_inference_const( @@ -697,7 +698,7 @@ mod resolve { ty: Ty, var: InferenceVar, outer_binder: DebruijnIndex, - ) -> Fallible { + ) -> Const { let var = self.table.var_unification_table.inference_var_root(var); let default = ConstData { ty: ty.clone(), @@ -707,35 +708,33 @@ mod resolve { .cast(Interner); if self.var_stack.contains(&var) { // recursive - return Ok((self.fallback)(var, VariableKind::Const(ty), default, outer_binder) + return (self.fallback)(var, VariableKind::Const(ty), default, outer_binder) .assert_const_ref(Interner) - .clone()); + .clone(); } - let result = if let Some(known_ty) = self.table.var_unification_table.probe_var(var) { + if let Some(known_ty) = self.table.var_unification_table.probe_var(var) { // known_ty may contain other variables that are known by now self.var_stack.push(var); - let result = - known_ty.fold_with(self, outer_binder).expect("fold failed unexpectedly"); + let result = known_ty.fold_with(self, outer_binder); self.var_stack.pop(); result.assert_const_ref(Interner).clone() } else { (self.fallback)(var, VariableKind::Const(ty), default, outer_binder) .assert_const_ref(Interner) .clone() - }; - Ok(result) + } } fn fold_inference_lifetime( &mut self, _var: InferenceVar, _outer_binder: DebruijnIndex, - ) -> Fallible { + ) -> Lifetime { // fall back all lifetimes to 'static -- currently we don't deal // with any lifetimes, but we can sometimes get some lifetime // variables through Chalk's unification, and this at least makes // sure we don't leak them outside of inference - Ok(crate::static_lifetime()) + crate::static_lifetime() } } } diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index de4a5446e57..c4b700cbce6 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -254,13 +254,13 @@ impl CallableSig { } impl TypeFoldable for CallableSig { - fn fold_with( + fn try_fold_with( self, - folder: &mut dyn chalk_ir::fold::TypeFolder, + folder: &mut dyn chalk_ir::fold::FallibleTypeFolder, outer_binder: DebruijnIndex, ) -> Result { let vec = self.params_and_return.to_vec(); - let folded = vec.fold_with(folder, outer_binder)?; + let folded = vec.try_fold_with(folder, outer_binder)?; Ok(CallableSig { params_and_return: folded.into(), is_varargs: self.is_varargs }) } } @@ -292,16 +292,19 @@ pub(crate) fn fold_free_vars + TypeFoldable< for_ty: impl FnMut(BoundVar, DebruijnIndex) -> Ty, for_const: impl FnMut(Ty, BoundVar, DebruijnIndex) -> Const, ) -> T { - use chalk_ir::{fold::TypeFolder, Fallible}; - struct FreeVarFolder(F1, F2); + use chalk_ir::fold::TypeFolder; + + #[derive(chalk_derive::FallibleTypeFolder)] + #[has_interner(Interner)] + struct FreeVarFolder< + F1: FnMut(BoundVar, DebruijnIndex) -> Ty, + F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const, + >(F1, F2); impl< - 'i, - F1: FnMut(BoundVar, DebruijnIndex) -> Ty + 'i, - F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const + 'i, + F1: FnMut(BoundVar, DebruijnIndex) -> Ty, + F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const, > TypeFolder for FreeVarFolder { - type Error = NoSolution; - fn as_dyn(&mut self) -> &mut dyn TypeFolder { self } @@ -310,12 +313,8 @@ pub(crate) fn fold_free_vars + TypeFoldable< Interner } - fn fold_free_var_ty( - &mut self, - bound_var: BoundVar, - outer_binder: DebruijnIndex, - ) -> Fallible { - Ok(self.0(bound_var, outer_binder)) + fn fold_free_var_ty(&mut self, bound_var: BoundVar, outer_binder: DebruijnIndex) -> Ty { + self.0(bound_var, outer_binder) } fn fold_free_var_const( @@ -323,12 +322,11 @@ pub(crate) fn fold_free_vars + TypeFoldable< ty: Ty, bound_var: BoundVar, outer_binder: DebruijnIndex, - ) -> Fallible { - Ok(self.1(ty, bound_var, outer_binder)) + ) -> Const { + self.1(ty, bound_var, outer_binder) } } t.fold_with(&mut FreeVarFolder(for_ty, for_const), DebruijnIndex::INNERMOST) - .expect("fold failed unexpectedly") } pub(crate) fn fold_tys + TypeFoldable>( @@ -351,16 +349,13 @@ pub(crate) fn fold_tys_and_consts + TypeFold f: impl FnMut(Either, DebruijnIndex) -> Either, binders: DebruijnIndex, ) -> T { - use chalk_ir::{ - fold::{TypeFolder, TypeSuperFoldable}, - Fallible, - }; - struct TyFolder(F); - impl<'i, F: FnMut(Either, DebruijnIndex) -> Either + 'i> - TypeFolder for TyFolder + use chalk_ir::fold::{TypeFolder, TypeSuperFoldable}; + #[derive(chalk_derive::FallibleTypeFolder)] + #[has_interner(Interner)] + struct TyFolder, DebruijnIndex) -> Either>(F); + impl, DebruijnIndex) -> Either> TypeFolder + for TyFolder { - type Error = NoSolution; - fn as_dyn(&mut self) -> &mut dyn TypeFolder { self } @@ -369,16 +364,16 @@ pub(crate) fn fold_tys_and_consts + TypeFold Interner } - fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Fallible { - let ty = ty.super_fold_with(self.as_dyn(), outer_binder)?; - Ok(self.0(Either::Left(ty), outer_binder).left().unwrap()) + fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Ty { + let ty = ty.super_fold_with(self.as_dyn(), outer_binder); + self.0(Either::Left(ty), outer_binder).left().unwrap() } - fn fold_const(&mut self, c: Const, outer_binder: DebruijnIndex) -> Fallible { - Ok(self.0(Either::Right(c), outer_binder).right().unwrap()) + fn fold_const(&mut self, c: Const, outer_binder: DebruijnIndex) -> Const { + self.0(Either::Right(c), outer_binder).right().unwrap() } } - t.fold_with(&mut TyFolder(f), binders).expect("fold failed unexpectedly") + t.fold_with(&mut TyFolder(f), binders) } /// 'Canonicalizes' the `t` by replacing any errors with new variables. Also @@ -390,16 +385,16 @@ where T: HasInterner, { use chalk_ir::{ - fold::{TypeFolder, TypeSuperFoldable}, + fold::{FallibleTypeFolder, TypeSuperFoldable}, Fallible, }; struct ErrorReplacer { vars: usize, } - impl TypeFolder for ErrorReplacer { + impl FallibleTypeFolder for ErrorReplacer { type Error = NoSolution; - fn as_dyn(&mut self) -> &mut dyn TypeFolder { + fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder { self } @@ -407,18 +402,17 @@ where Interner } - fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Fallible { + fn try_fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Fallible { if let TyKind::Error = ty.kind(Interner) { let index = self.vars; self.vars += 1; Ok(TyKind::BoundVar(BoundVar::new(outer_binder, index)).intern(Interner)) } else { - let ty = ty.super_fold_with(self.as_dyn(), outer_binder)?; - Ok(ty) + ty.try_super_fold_with(self.as_dyn(), outer_binder) } } - fn fold_inference_ty( + fn try_fold_inference_ty( &mut self, _var: InferenceVar, _kind: TyVariableKind, @@ -433,7 +427,7 @@ where } } - fn fold_free_var_ty( + fn try_fold_free_var_ty( &mut self, _bound_var: BoundVar, _outer_binder: DebruijnIndex, @@ -447,7 +441,7 @@ where } } - fn fold_inference_const( + fn try_fold_inference_const( &mut self, ty: Ty, _var: InferenceVar, @@ -460,7 +454,7 @@ where } } - fn fold_free_var_const( + fn try_fold_free_var_const( &mut self, ty: Ty, _bound_var: BoundVar, @@ -473,7 +467,7 @@ where } } - fn fold_inference_lifetime( + fn try_fold_inference_lifetime( &mut self, _var: InferenceVar, _outer_binder: DebruijnIndex, @@ -485,7 +479,7 @@ where } } - fn fold_free_var_lifetime( + fn try_fold_free_var_lifetime( &mut self, _bound_var: BoundVar, _outer_binder: DebruijnIndex, @@ -498,7 +492,7 @@ where } } let mut error_replacer = ErrorReplacer { vars: 0 }; - let value = match t.clone().fold_with(&mut error_replacer, DebruijnIndex::INNERMOST) { + let value = match t.clone().try_fold_with(&mut error_replacer, DebruijnIndex::INNERMOST) { Ok(t) => t, Err(_) => panic!("Encountered unbound or inference vars in {:?}", t), }; diff --git a/crates/hir-ty/src/traits.rs b/crates/hir-ty/src/traits.rs index 372c3a3cca6..c425f35acfe 100644 --- a/crates/hir-ty/src/traits.rs +++ b/crates/hir-ty/src/traits.rs @@ -13,8 +13,8 @@ use syntax::SmolStr; use crate::{ db::HirDatabase, infer::unify::InferenceTable, AliasEq, AliasTy, Canonical, DomainGoal, Goal, - Guidance, InEnvironment, Interner, ProjectionTy, Solution, TraitRefExt, Ty, TyKind, - WhereClause, + Guidance, InEnvironment, Interner, ProjectionTy, ProjectionTyExt, Solution, TraitRefExt, Ty, + TyKind, WhereClause, }; /// This controls how much 'time' we give the Chalk solver before giving up. @@ -95,7 +95,7 @@ pub(crate) fn trait_solve_query( .. }))) = &goal.value.goal.data(Interner) { - if let TyKind::BoundVar(_) = projection_ty.self_type_parameter(Interner).kind(Interner) { + if let TyKind::BoundVar(_) = projection_ty.self_type_parameter(db).kind(Interner) { // Hack: don't ask Chalk to normalize with an unknown self type, it'll say that's impossible return Some(Solution::Ambig(Guidance::Unknown)); } From 381366f1dd820a2d3c827cc123cb28e6d5f678f1 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 5 Oct 2022 19:15:07 +0200 Subject: [PATCH 64/66] Diagnose incorrect usages of the question mark operator --- crates/hir-expand/src/mod_path.rs | 1 + crates/hir-expand/src/name.rs | 2 + crates/hir-ty/src/infer.rs | 13 +-- crates/hir-ty/src/infer/expr.rs | 99 +++++++++++++++++-- crates/hir-ty/src/method_resolution.rs | 18 ++++ crates/hir-ty/src/tests/traits.rs | 92 +---------------- crates/hir/src/diagnostics.rs | 14 ++- crates/hir/src/lib.rs | 38 ++++--- .../src/handlers/incorrect_try_expr.rs | 37 +++++++ .../src/handlers/not_implemented.rs | 35 +++++++ crates/ide-diagnostics/src/lib.rs | 4 + crates/ide/src/hover/tests.rs | 22 ++++- crates/test-utils/src/minicore.rs | 39 +++++++- 13 files changed, 289 insertions(+), 125 deletions(-) create mode 100644 crates/ide-diagnostics/src/handlers/incorrect_try_expr.rs create mode 100644 crates/ide-diagnostics/src/handlers/not_implemented.rs diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index d7586d129b7..68413df420c 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -259,6 +259,7 @@ macro_rules! __known_path { (core::future::Future) => {}; (core::future::IntoFuture) => {}; (core::ops::Try) => {}; + (core::ops::FromResidual) => {}; ($path:path) => { compile_error!("Please register your known path in the path module") }; diff --git a/crates/hir-expand/src/name.rs b/crates/hir-expand/src/name.rs index 2679a1c3602..8a735b965ab 100644 --- a/crates/hir-expand/src/name.rs +++ b/crates/hir-expand/src/name.rs @@ -279,6 +279,8 @@ pub mod known { RangeToInclusive, RangeTo, Range, + Residual, + FromResidual, Neg, Not, None, diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 31e56dec625..4315ccab6c1 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -190,7 +190,9 @@ pub(crate) type InferResult = Result, TypeError>; pub enum InferenceDiagnostic { NoSuchField { expr: ExprId }, BreakOutsideOfLoop { expr: ExprId, is_break: bool }, + IncorrectTryTarget { expr: ExprId }, MismatchedArgCount { call_expr: ExprId, expected: usize, found: usize }, + DoesNotImplement { expr: ExprId, trait_: TraitId, ty: Ty }, } /// A mismatch between an expected and an inferred type. @@ -905,17 +907,6 @@ impl<'a> InferenceContext<'a> { self.db.trait_data(trait_).associated_type_by_name(&name![Item]) } - fn resolve_ops_try_ok(&self) -> Option { - // FIXME resolve via lang_item once try v2 is stable - let path = path![core::ops::Try]; - let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; - let trait_data = self.db.trait_data(trait_); - trait_data - // FIXME remove once try v2 is stable - .associated_type_by_name(&name![Ok]) - .or_else(|| trait_data.associated_type_by_name(&name![Output])) - } - fn resolve_ops_neg_output(&self) -> Option { let trait_ = self.resolve_lang_item(name![neg])?.as_trait()?; self.db.trait_data(trait_).associated_type_by_name(&name![Output]) diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index f56108b26c4..59ab50d0717 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -19,24 +19,24 @@ use hir_def::{ resolver::resolver_for_expr, ConstParamId, FieldId, ItemContainerId, Lookup, }; -use hir_expand::name::Name; +use hir_expand::{name, name::Name}; use stdx::always; use syntax::ast::RangeOp; use crate::{ autoderef::{self, Autoderef}, consteval, - infer::{coerce::CoerceMany, find_continuable, BreakableKind}, + infer::{coerce::CoerceMany, find_continuable, path, BreakableKind}, lower::{ const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode, }, mapping::{from_chalk, ToChalk}, method_resolution::{self, lang_names_for_bin_op, VisibleFromModule}, primitive::{self, UintTy}, - static_lifetime, to_chalk_trait_id, + static_lifetime, to_assoc_type_id, to_chalk_trait_id, utils::{generics, Generics}, - AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar, - Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, + AdtId, AliasEq, AliasTy, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, + ProjectionTy, Rawness, Scalar, Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind, }; use super::{ @@ -564,9 +564,29 @@ impl<'a> InferenceContext<'a> { let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) } - Expr::Try { expr } => { - let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); - self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok()) + &Expr::Try { expr } => { + let inner_ty = self.infer_expr_inner(expr, &Expectation::none()); + match self.resolve_try_impl_for(inner_ty.clone()) { + Some((_, Some((output, residual)))) => { + if let Some((_trait, false)) = + self.implements_from_residual(self.return_ty.clone(), residual) + { + self.push_diagnostic(InferenceDiagnostic::IncorrectTryTarget { + expr: tgt_expr, + }); + } + output + } + Some((trait_, None)) => { + self.push_diagnostic(InferenceDiagnostic::DoesNotImplement { + expr, + trait_, + ty: inner_ty, + }); + self.err_ty() + } + None => self.err_ty(), + } } Expr::Cast { expr, type_ref } => { // FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary) @@ -1530,4 +1550,67 @@ impl<'a> InferenceContext<'a> { let ctx = self.breakables.pop().expect("breakable stack broken"); (ctx.may_break.then(|| ctx.coerce.complete()), res) } + + /// Check whether `ty` implements `FromResidual` + fn implements_from_residual(&mut self, ty: Ty, r: Ty) -> Option<(hir_def::TraitId, bool)> { + let from_residual_trait = self + .resolver + .resolve_known_trait(self.db.upcast(), &(super::path![core::ops::FromResidual]))?; + let r = GenericArgData::Ty(r).intern(Interner); + let b = TyBuilder::trait_ref(self.db, from_residual_trait); + if b.remaining() != 2 { + return Some((from_residual_trait, false)); + } + let trait_ref = b.push(ty).push(r).build(); + Some((from_residual_trait, self.table.try_obligation(trait_ref.cast(Interner)).is_some())) + } + + fn resolve_try_impl_for(&mut self, ty: Ty) -> Option<(hir_def::TraitId, Option<(Ty, Ty)>)> { + let path = path![core::ops::Try]; + let trait_ = self.resolver.resolve_known_trait(self.db.upcast(), &path)?; + + let trait_ref = TyBuilder::trait_ref(self.db, trait_).push(ty).build(); + let substitution = trait_ref.substitution.clone(); + self.push_obligation(trait_ref.clone().cast(Interner)); + + let trait_data = self.db.trait_data(trait_); + let output = trait_data.associated_type_by_name(&name![Output]); + let residual = trait_data.associated_type_by_name(&name![Residual]); + + let output_ty = match output { + Some(output) => { + let output_ty = self.table.new_type_var(); + let alias_eq = AliasEq { + alias: AliasTy::Projection(ProjectionTy { + associated_ty_id: to_assoc_type_id(output), + substitution: substitution.clone(), + }), + ty: output_ty.clone(), + }; + self.push_obligation(alias_eq.cast(Interner)); + output_ty + } + None => self.err_ty(), + }; + let residual_ty = match residual { + Some(residual) => { + let residual_ty = self.table.new_type_var(); + let alias_eq = AliasEq { + alias: AliasTy::Projection(ProjectionTy { + associated_ty_id: to_assoc_type_id(residual), + substitution, + }), + ty: residual_ty.clone(), + }; + self.push_obligation(alias_eq.cast(Interner)); + residual_ty + } + None => self.err_ty(), + }; + // FIXME: We are doing the work twice here I think? + Some(( + trait_, + self.table.try_obligation(trait_ref.cast(Interner)).map(|_| (output_ty, residual_ty)), + )) + } } diff --git a/crates/hir-ty/src/method_resolution.rs b/crates/hir-ty/src/method_resolution.rs index 5998680dcd3..224dc21e94e 100644 --- a/crates/hir-ty/src/method_resolution.rs +++ b/crates/hir-ty/src/method_resolution.rs @@ -1111,6 +1111,24 @@ pub fn resolve_indexing_op( } None } +/// Returns the receiver type for the try branch trait call. +pub fn resolve_branch_op( + db: &dyn HirDatabase, + env: Arc, + ty: Canonical, + try_trait: TraitId, +) -> Option { + let mut table = InferenceTable::new(db, env.clone()); + let ty = table.instantiate_canonical(ty); + let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty); + for (ty, adj) in deref_chain.into_iter().zip(adj) { + let goal = generic_implements_goal(db, env.clone(), try_trait, &ty); + if db.trait_solve(env.krate, goal.cast(Interner)).is_some() { + return Some(adj); + } + } + None +} macro_rules! check_that { ($cond:expr) => { diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 555b6972fb7..b91172e3342 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -162,98 +162,16 @@ fn test() { ); } -#[test] -fn infer_try() { - check_types( - r#" -//- /main.rs crate:main deps:core -fn test() { - let r: Result = Result::Ok(1); - let v = r?; - v; -} //^ i32 - -//- /core.rs crate:core -pub mod ops { - pub trait Try { - type Ok; - type Error; - } -} - -pub mod result { - pub enum Result { - Ok(O), - Err(E) - } - - impl crate::ops::Try for Result { - type Ok = O; - type Error = E; - } -} - -pub mod prelude { - pub mod rust_2018 { - pub use crate::{result::*, ops::*}; - } -} -"#, - ); -} - #[test] fn infer_try_trait_v2() { check_types( r#" -//- /main.rs crate:main deps:core -fn test() { - let r: Result = Result::Ok(1); +//- minicore: try +fn test() -> core::ops::ControlFlow { + let r: core::ops::ControlFlow = core::ops::ControlFlow::Continue(1.0); let v = r?; - v; -} //^ i32 - -//- /core.rs crate:core -mod ops { - mod try_trait { - pub trait Try: FromResidual { - type Output; - type Residual; - } - pub trait FromResidual::Residual> {} - } - - pub use self::try_trait::FromResidual; - pub use self::try_trait::Try; -} - -mod convert { - pub trait From {} - impl From for T {} -} - -pub mod result { - use crate::convert::From; - use crate::ops::{Try, FromResidual}; - - pub enum Infallible {} - pub enum Result { - Ok(O), - Err(E) - } - - impl Try for Result { - type Output = O; - type Error = Result; - } - - impl> FromResidual> for Result {} -} - -pub mod prelude { - pub mod rust_2018 { - pub use crate::result::*; - } + //^ f32 + r } "#, ); diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index c5dc60f1ec5..6c8b3088adc 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -6,7 +6,7 @@ use base_db::CrateId; use cfg::{CfgExpr, CfgOptions}; use either::Either; -use hir_def::path::ModPath; +use hir_def::{path::ModPath, TraitId}; use hir_expand::{name::Name, HirFileId, InFile}; use syntax::{ast, AstPtr, SyntaxNodePtr, TextRange}; @@ -33,6 +33,7 @@ diagnostics![ BreakOutsideOfLoop, InactiveCode, IncorrectCase, + IncorrectTryExpr, InvalidDeriveTarget, MacroError, MalformedDerive, @@ -40,6 +41,7 @@ diagnostics![ MissingFields, MissingMatchArms, MissingUnsafe, + NotImplemented, NoSuchField, ReplaceFilterMapNextWithFindMap, TypeMismatch, @@ -153,6 +155,16 @@ pub struct MismatchedArgCount { pub expected: usize, pub found: usize, } +#[derive(Debug)] +pub struct IncorrectTryExpr { + pub expr: InFile>, +} +#[derive(Debug)] +pub struct NotImplemented { + pub expr: InFile>, + pub trait_: TraitId, + pub ty: Type, +} #[derive(Debug)] pub struct MissingMatchArms { diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index f5324208c9a..e6c5c6b5833 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -81,11 +81,12 @@ use crate::db::{DefDatabase, HirDatabase}; pub use crate::{ attrs::{HasAttrs, Namespace}, diagnostics::{ - AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, InvalidDeriveTarget, - MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms, - MissingUnsafe, NoSuchField, ReplaceFilterMapNextWithFindMap, TypeMismatch, - UnimplementedBuiltinMacro, UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall, - UnresolvedModule, UnresolvedProcMacro, + AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, IncorrectTryExpr, + InvalidDeriveTarget, MacroError, MalformedDerive, MismatchedArgCount, MissingFields, + MissingMatchArms, MissingUnsafe, NoSuchField, NotImplemented, + ReplaceFilterMapNextWithFindMap, TypeMismatch, UnimplementedBuiltinMacro, + UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall, UnresolvedModule, + UnresolvedProcMacro, }, has_source::HasSource, semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits}, @@ -1282,30 +1283,45 @@ impl DefWithBody { let infer = db.infer(self.into()); let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1); for d in &infer.diagnostics { - match d { + match *d { hir_ty::InferenceDiagnostic::NoSuchField { expr } => { - let field = source_map.field_syntax(*expr); + let field = source_map.field_syntax(expr); acc.push(NoSuchField { field }.into()) } - &hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break } => { + hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break } => { let expr = source_map .expr_syntax(expr) .expect("break outside of loop in synthetic syntax"); acc.push(BreakOutsideOfLoop { expr, is_break }.into()) } hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => { - match source_map.expr_syntax(*call_expr) { + match source_map.expr_syntax(call_expr) { Ok(source_ptr) => acc.push( MismatchedArgCount { call_expr: source_ptr, - expected: *expected, - found: *found, + expected: expected, + found: found, } .into(), ), Err(SyntheticSyntax) => (), } } + hir_ty::InferenceDiagnostic::IncorrectTryTarget { expr } => { + let expr = source_map.expr_syntax(expr).expect("try in synthetic syntax"); + acc.push(IncorrectTryExpr { expr }.into()) + } + hir_ty::InferenceDiagnostic::DoesNotImplement { expr, trait_, ref ty } => { + let expr = source_map.expr_syntax(expr).expect("try in synthetic syntax"); + acc.push( + NotImplemented { + expr, + trait_, + ty: Type::new(db, DefWithBodyId::from(self), ty.clone()), + } + .into(), + ) + } } } for (expr, mismatch) in infer.expr_type_mismatches() { diff --git a/crates/ide-diagnostics/src/handlers/incorrect_try_expr.rs b/crates/ide-diagnostics/src/handlers/incorrect_try_expr.rs new file mode 100644 index 00000000000..085d8d32598 --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/incorrect_try_expr.rs @@ -0,0 +1,37 @@ +use hir::InFile; + +use crate::{Diagnostic, DiagnosticsContext}; + +// Diagnostic: incorrect-try-target +// +// This diagnostic is triggered if a question mark operator was used in a context where it is not applicable. +pub(crate) fn incorrect_try_expr( + ctx: &DiagnosticsContext<'_>, + d: &hir::IncorrectTryExpr, +) -> Diagnostic { + Diagnostic::new( + "incorrect-try-target", + format!("the return type of the containing function does not implement `FromResidual`"), + ctx.sema + .diagnostics_display_range(InFile::new(d.expr.file_id, d.expr.value.clone().into())) + .range, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn try_ops_diag() { + check_diagnostics( + r#" +//- minicore: try +fn test() { + core::ops::ControlFlow::::Continue(1.0)?; + // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: the return type of the containing function does not implement `FromResidual` +} +"#, + ); + } +} diff --git a/crates/ide-diagnostics/src/handlers/not_implemented.rs b/crates/ide-diagnostics/src/handlers/not_implemented.rs new file mode 100644 index 00000000000..3bf6a423229 --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/not_implemented.rs @@ -0,0 +1,35 @@ +use hir::{db::DefDatabase, HirDisplay}; + +use crate::{Diagnostic, DiagnosticsContext}; + +// Diagnostic: not-implemented +// +// This diagnostic is triggered if a type doesn't implement a necessary trait. +pub(crate) fn not_implemented(ctx: &DiagnosticsContext<'_>, d: &hir::NotImplemented) -> Diagnostic { + Diagnostic::new( + "not-implemented", + format!( + "the trait `{}` is not implemented for `{}`", + ctx.sema.db.trait_data(d.trait_).name, + d.ty.display(ctx.sema.db) + ), + ctx.sema.diagnostics_display_range(d.expr.clone().map(|it| it.into())).range, + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn missing_try_impl() { + check_diagnostics( + r#" +//- minicore: try +fn main() { + ()?; +} //^^ error: the trait `Try` is not implemented for `()` +"#, + ) + } +} diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index ae299f05841..4577072149a 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -29,6 +29,7 @@ mod handlers { pub(crate) mod break_outside_of_loop; pub(crate) mod inactive_code; pub(crate) mod incorrect_case; + pub(crate) mod incorrect_try_expr; pub(crate) mod invalid_derive_target; pub(crate) mod macro_error; pub(crate) mod malformed_derive; @@ -36,6 +37,7 @@ mod handlers { pub(crate) mod missing_fields; pub(crate) mod missing_match_arms; pub(crate) mod missing_unsafe; + pub(crate) mod not_implemented; pub(crate) mod no_such_field; pub(crate) mod replace_filter_map_next_with_find_map; pub(crate) mod type_mismatch; @@ -225,12 +227,14 @@ pub fn diagnostics( let d = match diag { AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d), AnyDiagnostic::IncorrectCase(d) => handlers::incorrect_case::incorrect_case(&ctx, &d), + AnyDiagnostic::IncorrectTryExpr(d) => handlers::incorrect_try_expr::incorrect_try_expr(&ctx, &d), AnyDiagnostic::MacroError(d) => handlers::macro_error::macro_error(&ctx, &d), AnyDiagnostic::MalformedDerive(d) => handlers::malformed_derive::malformed_derive(&ctx, &d), AnyDiagnostic::MismatchedArgCount(d) => handlers::mismatched_arg_count::mismatched_arg_count(&ctx, &d), AnyDiagnostic::MissingFields(d) => handlers::missing_fields::missing_fields(&ctx, &d), AnyDiagnostic::MissingMatchArms(d) => handlers::missing_match_arms::missing_match_arms(&ctx, &d), AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d), + AnyDiagnostic::NotImplemented(d) => handlers::not_implemented::not_implemented(&ctx, &d), AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d), AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d), AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d), diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index eb997e6fef8..5cab017a58d 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -4913,6 +4913,22 @@ fn foo() -> NotResult<(), Short> { ``` "#]], ); + check_hover_range( + r#" +//- minicore: try +use core::ops::ControlFlow; +fn foo() -> ControlFlow<()> { + $0ControlFlow::Break(())?$0; + ControlFlow::Continue(()) +} +"#, + expect![[r#" + ```text + Try Target Type: ControlFlow<(), {unknown}> + Propagated as: ControlFlow<(), ()> + ``` + "#]], + ); } #[test] @@ -4928,9 +4944,9 @@ fn foo() -> Option<()> { } "#, expect![[r#" - ```rust - as Try>::Output - ```"#]], + ```rust + i32 + ```"#]], ); } diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index 69d2e62b256..59b1c147d7f 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -27,6 +27,7 @@ //! generator: pin //! hash: //! index: sized +//! infallible: //! iterator: option //! iterators: iterator, fn //! option: @@ -36,7 +37,7 @@ //! result: //! sized: //! slice: -//! try: +//! try: infallible //! unsize: sized pub mod marker { @@ -150,6 +151,9 @@ pub mod convert { fn as_ref(&self) -> &T; } // endregion:as_ref + // region:infallible + pub enum Infallible {} + // endregion:infallible } pub mod ops { @@ -326,7 +330,7 @@ pub mod ops { Continue(C), Break(B), } - pub trait FromResidual { + pub trait FromResidual::Residual> { #[lang = "from_residual"] fn from_residual(residual: R) -> Self; } @@ -342,13 +346,13 @@ pub mod ops { impl Try for ControlFlow { type Output = C; - type Residual = ControlFlow; + type Residual = ControlFlow; fn from_output(output: Self::Output) -> Self {} fn branch(self) -> ControlFlow {} } impl FromResidual for ControlFlow { - fn from_residual(residual: ControlFlow) -> Self {} + fn from_residual(residual: ControlFlow) -> Self {} } } pub use self::try_::{ControlFlow, FromResidual, Try}; @@ -469,6 +473,33 @@ pub mod option { } } } + // region:try + impl crate::ops::Try for Option { + type Output = T; + type Residual = Option; + + #[inline] + fn from_output(output: Self::Output) -> Self { + Some(output) + } + + #[inline] + fn branch(self) -> crate::ops::ControlFlow { + match self { + Some(v) => crate::ops::ControlFlow::Continue(v), + None => crate::ops::ControlFlow::Break(None), + } + } + } + impl crate::ops::FromResidual for Option { + #[inline] + fn from_residual(residual: Option) -> Self { + match residual { + None => None, + } + } + } + // endregion:try } // endregion:option From 5df03c2c18f82fee8ba4324aad7e009a41065ba1 Mon Sep 17 00:00:00 2001 From: zyctree Date: Sun, 16 Oct 2022 23:41:32 +0800 Subject: [PATCH 65/66] update link in syntax.md --- docs/dev/syntax.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/dev/syntax.md b/docs/dev/syntax.md index 767233351b4..82cfe3bbcdc 100644 --- a/docs/dev/syntax.md +++ b/docs/dev/syntax.md @@ -9,9 +9,9 @@ This guide describes the current state of syntax trees and parsing in rust-analy The things described are implemented in three places * [rowan](https://github.com/rust-analyzer/rowan/tree/v0.9.0) -- a generic library for rowan syntax trees. -* [ra_syntax](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_syntax) crate inside rust-analyzer which wraps `rowan` into rust-analyzer specific API. +* [syntax](https://github.com/rust-lang/rust-analyzer/tree/36a70b7435c48837018c71576d7bb4e8f763f501/crates/syntax) crate inside rust-analyzer which wraps `rowan` into rust-analyzer specific API. Nothing in rust-analyzer except this crate knows about `rowan`. -* [ra_parser](https://github.com/rust-lang/rust-analyzer/tree/cf5bdf464cad7ceb9a67e07985a3f4d3799ec0b6/crates/ra_parser) crate parses input tokens into an `ra_syntax` tree +* [parser](https://github.com/rust-lang/rust-analyzer/tree/36a70b7435c48837018c71576d7bb4e8f763f501/crates/parser) crate parses input tokens into an `syntax` tree ## Design Goals From ea8b62f9b0f569e21bf005073347c1802fa9169a Mon Sep 17 00:00:00 2001 From: zyctree Date: Mon, 17 Oct 2022 01:39:19 +0800 Subject: [PATCH 66/66] update link in syntax.md --- docs/dev/syntax.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/dev/syntax.md b/docs/dev/syntax.md index 82cfe3bbcdc..97e376787c8 100644 --- a/docs/dev/syntax.md +++ b/docs/dev/syntax.md @@ -8,10 +8,10 @@ This guide describes the current state of syntax trees and parsing in rust-analy The things described are implemented in three places -* [rowan](https://github.com/rust-analyzer/rowan/tree/v0.9.0) -- a generic library for rowan syntax trees. +* [rowan](https://github.com/rust-analyzer/rowan/tree/v0.15.10) -- a generic library for rowan syntax trees. * [syntax](https://github.com/rust-lang/rust-analyzer/tree/36a70b7435c48837018c71576d7bb4e8f763f501/crates/syntax) crate inside rust-analyzer which wraps `rowan` into rust-analyzer specific API. Nothing in rust-analyzer except this crate knows about `rowan`. -* [parser](https://github.com/rust-lang/rust-analyzer/tree/36a70b7435c48837018c71576d7bb4e8f763f501/crates/parser) crate parses input tokens into an `syntax` tree +* [parser](https://github.com/rust-lang/rust-analyzer/tree/36a70b7435c48837018c71576d7bb4e8f763f501/crates/parser) crate parses input tokens into a `syntax` tree ## Design Goals