mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-23 07:14:28 +00:00
Merge commit 'e36a20c24f35a4cee82bbdc600289104c9237c22' into ra-sync-and-pms-component
This commit is contained in:
parent
dfe84494c1
commit
a1f1b95d00
7
Cargo.lock
generated
7
Cargo.lock
generated
@ -1198,6 +1198,13 @@ dependencies = [
|
||||
"tt",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-srv-cli"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"proc-macro-srv",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-test"
|
||||
version = "0.0.0"
|
||||
|
@ -96,6 +96,7 @@ pub(super) fn lower(
|
||||
expander,
|
||||
name_to_pat_grouping: Default::default(),
|
||||
is_lowering_inside_or_pat: false,
|
||||
is_lowering_assignee_expr: false,
|
||||
}
|
||||
.collect(params, body)
|
||||
}
|
||||
@ -109,6 +110,7 @@ struct ExprCollector<'a> {
|
||||
// a poor-mans union-find?
|
||||
name_to_pat_grouping: FxHashMap<Name, Vec<PatId>>,
|
||||
is_lowering_inside_or_pat: bool,
|
||||
is_lowering_assignee_expr: bool,
|
||||
}
|
||||
|
||||
impl ExprCollector<'_> {
|
||||
@ -283,7 +285,10 @@ impl ExprCollector<'_> {
|
||||
} else {
|
||||
Box::default()
|
||||
};
|
||||
self.alloc_expr(Expr::Call { callee, args }, syntax_ptr)
|
||||
self.alloc_expr(
|
||||
Expr::Call { callee, args, is_assignee_expr: self.is_lowering_assignee_expr },
|
||||
syntax_ptr,
|
||||
)
|
||||
}
|
||||
ast::Expr::MethodCallExpr(e) => {
|
||||
let receiver = self.collect_expr_opt(e.receiver());
|
||||
@ -359,6 +364,7 @@ impl ExprCollector<'_> {
|
||||
ast::Expr::RecordExpr(e) => {
|
||||
let path =
|
||||
e.path().and_then(|path| self.expander.parse_path(self.db, path)).map(Box::new);
|
||||
let is_assignee_expr = self.is_lowering_assignee_expr;
|
||||
let record_lit = if let Some(nfl) = e.record_expr_field_list() {
|
||||
let fields = nfl
|
||||
.fields()
|
||||
@ -378,9 +384,16 @@ impl ExprCollector<'_> {
|
||||
})
|
||||
.collect();
|
||||
let spread = nfl.spread().map(|s| self.collect_expr(s));
|
||||
Expr::RecordLit { path, fields, spread }
|
||||
let ellipsis = nfl.dotdot_token().is_some();
|
||||
Expr::RecordLit { path, fields, spread, ellipsis, is_assignee_expr }
|
||||
} else {
|
||||
Expr::RecordLit { path, fields: Box::default(), spread: None }
|
||||
Expr::RecordLit {
|
||||
path,
|
||||
fields: Box::default(),
|
||||
spread: None,
|
||||
ellipsis: false,
|
||||
is_assignee_expr,
|
||||
}
|
||||
};
|
||||
|
||||
self.alloc_expr(record_lit, syntax_ptr)
|
||||
@ -458,14 +471,21 @@ impl ExprCollector<'_> {
|
||||
)
|
||||
}
|
||||
ast::Expr::BinExpr(e) => {
|
||||
let lhs = self.collect_expr_opt(e.lhs());
|
||||
let rhs = self.collect_expr_opt(e.rhs());
|
||||
let op = e.op_kind();
|
||||
if let Some(ast::BinaryOp::Assignment { op: None }) = op {
|
||||
self.is_lowering_assignee_expr = true;
|
||||
}
|
||||
let lhs = self.collect_expr_opt(e.lhs());
|
||||
self.is_lowering_assignee_expr = false;
|
||||
let rhs = self.collect_expr_opt(e.rhs());
|
||||
self.alloc_expr(Expr::BinaryOp { lhs, rhs, op }, syntax_ptr)
|
||||
}
|
||||
ast::Expr::TupleExpr(e) => {
|
||||
let exprs = e.fields().map(|expr| self.collect_expr(expr)).collect();
|
||||
self.alloc_expr(Expr::Tuple { exprs }, syntax_ptr)
|
||||
self.alloc_expr(
|
||||
Expr::Tuple { exprs, is_assignee_expr: self.is_lowering_assignee_expr },
|
||||
syntax_ptr,
|
||||
)
|
||||
}
|
||||
ast::Expr::BoxExpr(e) => {
|
||||
let expr = self.collect_expr_opt(e.expr());
|
||||
@ -477,8 +497,14 @@ impl ExprCollector<'_> {
|
||||
|
||||
match kind {
|
||||
ArrayExprKind::ElementList(e) => {
|
||||
let exprs = e.map(|expr| self.collect_expr(expr)).collect();
|
||||
self.alloc_expr(Expr::Array(Array::ElementList(exprs)), syntax_ptr)
|
||||
let elements = e.map(|expr| self.collect_expr(expr)).collect();
|
||||
self.alloc_expr(
|
||||
Expr::Array(Array::ElementList {
|
||||
elements,
|
||||
is_assignee_expr: self.is_lowering_assignee_expr,
|
||||
}),
|
||||
syntax_ptr,
|
||||
)
|
||||
}
|
||||
ArrayExprKind::Repeat { initializer, repeat } => {
|
||||
let initializer = self.collect_expr_opt(initializer);
|
||||
|
@ -12,7 +12,7 @@ use crate::{
|
||||
db::DefDatabase,
|
||||
intern::Interned,
|
||||
item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId},
|
||||
nameres::{attr_resolution::ResolvedAttr, DefMap},
|
||||
nameres::{attr_resolution::ResolvedAttr, proc_macro::ProcMacroKind, DefMap},
|
||||
type_ref::{TraitRef, TypeBound, TypeRef},
|
||||
visibility::RawVisibility,
|
||||
AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
|
||||
@ -348,7 +348,8 @@ impl MacroRulesData {
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct ProcMacroData {
|
||||
pub name: Name,
|
||||
// FIXME: Record deriver helper here?
|
||||
/// Derive helpers, if this is a derive
|
||||
pub helpers: Option<Box<[Name]>>,
|
||||
}
|
||||
|
||||
impl ProcMacroData {
|
||||
@ -360,17 +361,23 @@ impl ProcMacroData {
|
||||
let item_tree = loc.id.item_tree(db);
|
||||
let makro = &item_tree[loc.id.value];
|
||||
|
||||
let name = if let Some(def) = item_tree
|
||||
let (name, helpers) = if let Some(def) = item_tree
|
||||
.attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
|
||||
.parse_proc_macro_decl(&makro.name)
|
||||
{
|
||||
def.name
|
||||
(
|
||||
def.name,
|
||||
match def.kind {
|
||||
ProcMacroKind::CustomDerive { helpers } => Some(helpers),
|
||||
ProcMacroKind::FnLike | ProcMacroKind::Attr => None,
|
||||
},
|
||||
)
|
||||
} else {
|
||||
// eeeh...
|
||||
stdx::never!("proc macro declaration is not a proc macro");
|
||||
makro.name.clone()
|
||||
(makro.name.clone(), None)
|
||||
};
|
||||
Arc::new(ProcMacroData { name })
|
||||
Arc::new(ProcMacroData { name, helpers })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -110,6 +110,7 @@ pub enum Expr {
|
||||
Call {
|
||||
callee: ExprId,
|
||||
args: Box<[ExprId]>,
|
||||
is_assignee_expr: bool,
|
||||
},
|
||||
MethodCall {
|
||||
receiver: ExprId,
|
||||
@ -138,6 +139,8 @@ pub enum Expr {
|
||||
path: Option<Box<Path>>,
|
||||
fields: Box<[RecordLitField]>,
|
||||
spread: Option<ExprId>,
|
||||
ellipsis: bool,
|
||||
is_assignee_expr: bool,
|
||||
},
|
||||
Field {
|
||||
expr: ExprId,
|
||||
@ -196,6 +199,7 @@ pub enum Expr {
|
||||
},
|
||||
Tuple {
|
||||
exprs: Box<[ExprId]>,
|
||||
is_assignee_expr: bool,
|
||||
},
|
||||
Unsafe {
|
||||
body: ExprId,
|
||||
@ -211,7 +215,7 @@ pub enum Expr {
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum Array {
|
||||
ElementList(Box<[ExprId]>),
|
||||
ElementList { elements: Box<[ExprId]>, is_assignee_expr: bool },
|
||||
Repeat { initializer: ExprId, repeat: ExprId },
|
||||
}
|
||||
|
||||
@ -285,7 +289,7 @@ impl Expr {
|
||||
f(*iterable);
|
||||
f(*body);
|
||||
}
|
||||
Expr::Call { callee, args } => {
|
||||
Expr::Call { callee, args, .. } => {
|
||||
f(*callee);
|
||||
args.iter().copied().for_each(f);
|
||||
}
|
||||
@ -339,9 +343,9 @@ impl Expr {
|
||||
| Expr::Box { expr } => {
|
||||
f(*expr);
|
||||
}
|
||||
Expr::Tuple { exprs } => exprs.iter().copied().for_each(f),
|
||||
Expr::Tuple { exprs, .. } => exprs.iter().copied().for_each(f),
|
||||
Expr::Array(a) => match a {
|
||||
Array::ElementList(exprs) => exprs.iter().copied().for_each(f),
|
||||
Array::ElementList { elements, .. } => elements.iter().copied().for_each(f),
|
||||
Array::Repeat { initializer, repeat } => {
|
||||
f(*initializer);
|
||||
f(*repeat)
|
||||
|
@ -66,10 +66,14 @@ pub struct ItemScope {
|
||||
attr_macros: FxHashMap<AstId<ast::Item>, MacroCallId>,
|
||||
/// The derive macro invocations in this scope, keyed by the owner item over the actual derive attributes
|
||||
/// paired with the derive macro invocations for the specific attribute.
|
||||
derive_macros: FxHashMap<
|
||||
AstId<ast::Adt>,
|
||||
SmallVec<[(AttrId, MacroCallId, SmallVec<[Option<MacroCallId>; 1]>); 1]>,
|
||||
>,
|
||||
derive_macros: FxHashMap<AstId<ast::Adt>, SmallVec<[DeriveMacroInvocation; 1]>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
struct DeriveMacroInvocation {
|
||||
attr_id: AttrId,
|
||||
attr_call_id: MacroCallId,
|
||||
derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
|
||||
}
|
||||
|
||||
pub(crate) static BUILTIN_SCOPE: Lazy<FxHashMap<Name, PerNs>> = Lazy::new(|| {
|
||||
@ -210,12 +214,14 @@ impl ItemScope {
|
||||
&mut self,
|
||||
adt: AstId<ast::Adt>,
|
||||
call: MacroCallId,
|
||||
attr_id: AttrId,
|
||||
id: AttrId,
|
||||
idx: usize,
|
||||
) {
|
||||
if let Some(derives) = self.derive_macros.get_mut(&adt) {
|
||||
if let Some((.., invocs)) = derives.iter_mut().find(|&&mut (id, ..)| id == attr_id) {
|
||||
invocs[idx] = Some(call);
|
||||
if let Some(DeriveMacroInvocation { derive_call_ids, .. }) =
|
||||
derives.iter_mut().find(|&&mut DeriveMacroInvocation { attr_id, .. }| id == attr_id)
|
||||
{
|
||||
derive_call_ids[idx] = Some(call);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -227,10 +233,14 @@ impl ItemScope {
|
||||
&mut self,
|
||||
adt: AstId<ast::Adt>,
|
||||
attr_id: AttrId,
|
||||
call_id: MacroCallId,
|
||||
attr_call_id: MacroCallId,
|
||||
len: usize,
|
||||
) {
|
||||
self.derive_macros.entry(adt).or_default().push((attr_id, call_id, smallvec![None; len]));
|
||||
self.derive_macros.entry(adt).or_default().push(DeriveMacroInvocation {
|
||||
attr_id,
|
||||
attr_call_id,
|
||||
derive_call_ids: smallvec![None; len],
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn derive_macro_invocs(
|
||||
@ -242,7 +252,12 @@ impl ItemScope {
|
||||
),
|
||||
> + '_ {
|
||||
self.derive_macros.iter().map(|(k, v)| {
|
||||
(*k, v.iter().map(|&(attr_id, call_id, ref invocs)| (attr_id, call_id, &**invocs)))
|
||||
(
|
||||
*k,
|
||||
v.iter().map(|DeriveMacroInvocation { attr_id, attr_call_id, derive_call_ids }| {
|
||||
(*attr_id, *attr_call_id, &**derive_call_ids)
|
||||
}),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -934,11 +934,11 @@ fn derive_macro_as_call_id(
|
||||
derive_attr: AttrId,
|
||||
derive_pos: u32,
|
||||
krate: CrateId,
|
||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
|
||||
) -> Result<MacroCallId, UnresolvedMacro> {
|
||||
let def: MacroDefId = resolver(item_attr.path.clone())
|
||||
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
|
||||
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
|
||||
let (macro_id, def_id) = resolver(item_attr.path.clone())
|
||||
.ok_or_else(|| UnresolvedMacro { path: item_attr.path.clone() })?;
|
||||
let res = def.as_lazy_macro(
|
||||
let call_id = def_id.as_lazy_macro(
|
||||
db.upcast(),
|
||||
krate,
|
||||
MacroCallKind::Derive {
|
||||
@ -947,7 +947,7 @@ fn derive_macro_as_call_id(
|
||||
derive_attr_index: derive_attr.ast_index,
|
||||
},
|
||||
);
|
||||
Ok(res)
|
||||
Ok((macro_id, def_id, call_id))
|
||||
}
|
||||
|
||||
fn attr_macro_as_call_id(
|
||||
|
@ -48,19 +48,19 @@
|
||||
//! the result
|
||||
|
||||
pub mod attr_resolution;
|
||||
mod collector;
|
||||
pub mod proc_macro;
|
||||
pub mod diagnostics;
|
||||
mod collector;
|
||||
mod mod_resolution;
|
||||
mod path_resolution;
|
||||
mod proc_macro;
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
use std::{cmp::Ord, sync::Arc};
|
||||
use std::{cmp::Ord, ops::Deref, sync::Arc};
|
||||
|
||||
use base_db::{CrateId, Edition, FileId};
|
||||
use hir_expand::{name::Name, InFile, MacroDefId};
|
||||
use hir_expand::{name::Name, InFile, MacroCallId, MacroDefId};
|
||||
use itertools::Itertools;
|
||||
use la_arena::Arena;
|
||||
use profile::Count;
|
||||
@ -76,7 +76,7 @@ use crate::{
|
||||
path::ModPath,
|
||||
per_ns::PerNs,
|
||||
visibility::Visibility,
|
||||
AstId, BlockId, BlockLoc, FunctionId, LocalModuleId, ModuleId, ProcMacroId,
|
||||
AstId, BlockId, BlockLoc, FunctionId, LocalModuleId, MacroId, ModuleId, ProcMacroId,
|
||||
};
|
||||
|
||||
/// Contains the results of (early) name resolution.
|
||||
@ -106,6 +106,9 @@ pub struct DefMap {
|
||||
fn_proc_macro_mapping: FxHashMap<FunctionId, ProcMacroId>,
|
||||
/// The error that occurred when failing to load the proc-macro dll.
|
||||
proc_macro_loading_error: Option<Box<str>>,
|
||||
/// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
|
||||
/// attributes.
|
||||
derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<(Name, MacroId, MacroCallId)>>,
|
||||
|
||||
/// Custom attributes registered with `#![register_attr]`.
|
||||
registered_attrs: Vec<SmolStr>,
|
||||
@ -275,6 +278,7 @@ impl DefMap {
|
||||
exported_derives: FxHashMap::default(),
|
||||
fn_proc_macro_mapping: FxHashMap::default(),
|
||||
proc_macro_loading_error: None,
|
||||
derive_helpers_in_scope: FxHashMap::default(),
|
||||
prelude: None,
|
||||
root,
|
||||
modules,
|
||||
@ -294,12 +298,22 @@ impl DefMap {
|
||||
pub fn modules(&self) -> impl Iterator<Item = (LocalModuleId, &ModuleData)> + '_ {
|
||||
self.modules.iter()
|
||||
}
|
||||
|
||||
pub fn derive_helpers_in_scope(
|
||||
&self,
|
||||
id: AstId<ast::Adt>,
|
||||
) -> Option<&[(Name, MacroId, MacroCallId)]> {
|
||||
self.derive_helpers_in_scope.get(&id.map(|it| it.upcast())).map(Deref::deref)
|
||||
}
|
||||
|
||||
pub fn registered_tools(&self) -> &[SmolStr] {
|
||||
&self.registered_tools
|
||||
}
|
||||
|
||||
pub fn registered_attrs(&self) -> &[SmolStr] {
|
||||
&self.registered_attrs
|
||||
}
|
||||
|
||||
pub fn root(&self) -> LocalModuleId {
|
||||
self.root
|
||||
}
|
||||
@ -307,6 +321,7 @@ impl DefMap {
|
||||
pub fn fn_as_proc_macro(&self, id: FunctionId) -> Option<ProcMacroId> {
|
||||
self.fn_proc_macro_mapping.get(&id).copied()
|
||||
}
|
||||
|
||||
pub fn proc_macro_loading_error(&self) -> Option<&str> {
|
||||
self.proc_macro_loading_error.as_deref()
|
||||
}
|
||||
@ -463,6 +478,7 @@ impl DefMap {
|
||||
registered_attrs,
|
||||
registered_tools,
|
||||
fn_proc_macro_mapping,
|
||||
derive_helpers_in_scope,
|
||||
proc_macro_loading_error: _,
|
||||
block: _,
|
||||
edition: _,
|
||||
@ -479,6 +495,7 @@ impl DefMap {
|
||||
registered_attrs.shrink_to_fit();
|
||||
registered_tools.shrink_to_fit();
|
||||
fn_proc_macro_mapping.shrink_to_fit();
|
||||
derive_helpers_in_scope.shrink_to_fit();
|
||||
for (_, module) in modules.iter_mut() {
|
||||
module.children.shrink_to_fit();
|
||||
module.scope.shrink_to_fit();
|
||||
|
@ -18,7 +18,7 @@ use hir_expand::{
|
||||
ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
|
||||
MacroDefKind,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use itertools::{izip, Itertools};
|
||||
use la_arena::Idx;
|
||||
use limit::Limit;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
@ -110,7 +110,6 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: T
|
||||
proc_macros,
|
||||
from_glob_import: Default::default(),
|
||||
skip_attrs: Default::default(),
|
||||
derive_helpers_in_scope: Default::default(),
|
||||
is_proc_macro,
|
||||
};
|
||||
if tree_id.is_block() {
|
||||
@ -258,9 +257,6 @@ struct DefCollector<'a> {
|
||||
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
|
||||
/// non-builtin attributes in general.
|
||||
skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
|
||||
/// Tracks which custom derives are in scope for an item, to allow resolution of derive helper
|
||||
/// attributes.
|
||||
derive_helpers_in_scope: FxHashMap<AstId<ast::Item>, Vec<Name>>,
|
||||
}
|
||||
|
||||
impl DefCollector<'_> {
|
||||
@ -1059,7 +1055,7 @@ impl DefCollector<'_> {
|
||||
};
|
||||
let mut res = ReachedFixedPoint::Yes;
|
||||
macros.retain(|directive| {
|
||||
let resolver = |path| {
|
||||
let resolver2 = |path| {
|
||||
let resolved_res = self.def_map.resolve_path_fp_with_macro(
|
||||
self.db,
|
||||
ResolveMode::Other,
|
||||
@ -1067,8 +1063,12 @@ impl DefCollector<'_> {
|
||||
&path,
|
||||
BuiltinShadowMode::Module,
|
||||
);
|
||||
resolved_res.resolved_def.take_macros().map(|it| macro_id_to_def_id(self.db, it))
|
||||
resolved_res
|
||||
.resolved_def
|
||||
.take_macros()
|
||||
.map(|it| (it, macro_id_to_def_id(self.db, it)))
|
||||
};
|
||||
let resolver = |path| resolver2(path).map(|(_, it)| it);
|
||||
|
||||
match &directive.kind {
|
||||
MacroDirectiveKind::FnLike { ast_id, expand_to } => {
|
||||
@ -1087,21 +1087,37 @@ impl DefCollector<'_> {
|
||||
}
|
||||
}
|
||||
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
|
||||
let call_id = derive_macro_as_call_id(
|
||||
let id = derive_macro_as_call_id(
|
||||
self.db,
|
||||
ast_id,
|
||||
*derive_attr,
|
||||
*derive_pos as u32,
|
||||
self.def_map.krate,
|
||||
&resolver,
|
||||
&resolver2,
|
||||
);
|
||||
if let Ok(call_id) = call_id {
|
||||
|
||||
if let Ok((macro_id, def_id, call_id)) = id {
|
||||
self.def_map.modules[directive.module_id].scope.set_derive_macro_invoc(
|
||||
ast_id.ast_id,
|
||||
call_id,
|
||||
*derive_attr,
|
||||
*derive_pos,
|
||||
);
|
||||
// Record its helper attributes.
|
||||
if def_id.krate != self.def_map.krate {
|
||||
let def_map = self.db.crate_def_map(def_id.krate);
|
||||
if let Some(helpers) = def_map.exported_derives.get(&def_id) {
|
||||
self.def_map
|
||||
.derive_helpers_in_scope
|
||||
.entry(ast_id.ast_id.map(|it| it.upcast()))
|
||||
.or_default()
|
||||
.extend(izip!(
|
||||
helpers.iter().cloned(),
|
||||
iter::repeat(macro_id),
|
||||
iter::repeat(call_id),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
push_resolved(directive, call_id);
|
||||
res = ReachedFixedPoint::No;
|
||||
@ -1132,8 +1148,8 @@ impl DefCollector<'_> {
|
||||
};
|
||||
|
||||
if let Some(ident) = path.as_ident() {
|
||||
if let Some(helpers) = self.derive_helpers_in_scope.get(&ast_id) {
|
||||
if helpers.contains(ident) {
|
||||
if let Some(helpers) = self.def_map.derive_helpers_in_scope.get(&ast_id) {
|
||||
if helpers.iter().any(|(it, ..)| it == ident) {
|
||||
cov_mark::hit!(resolved_derive_helper);
|
||||
// Resolved to derive helper. Collect the item's attributes again,
|
||||
// starting after the derive helper.
|
||||
@ -1148,7 +1164,7 @@ impl DefCollector<'_> {
|
||||
};
|
||||
if matches!(
|
||||
def,
|
||||
MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. }
|
||||
MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. }
|
||||
if expander.is_derive()
|
||||
) {
|
||||
// Resolved to `#[derive]`
|
||||
@ -1317,19 +1333,6 @@ impl DefCollector<'_> {
|
||||
self.def_map.diagnostics.push(diag);
|
||||
}
|
||||
|
||||
// If we've just resolved a derive, record its helper attributes.
|
||||
if let MacroCallKind::Derive { ast_id, .. } = &loc.kind {
|
||||
if loc.def.krate != self.def_map.krate {
|
||||
let def_map = self.db.crate_def_map(loc.def.krate);
|
||||
if let Some(helpers) = def_map.exported_derives.get(&loc.def) {
|
||||
self.derive_helpers_in_scope
|
||||
.entry(ast_id.map(|it| it.upcast()))
|
||||
.or_default()
|
||||
.extend(helpers.iter().cloned());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Then, fetch and process the item tree. This will reuse the expansion result from above.
|
||||
let item_tree = self.db.file_item_tree(file_id);
|
||||
let mod_dir = self.mod_dirs[&module_id].clone();
|
||||
@ -2140,7 +2143,6 @@ mod tests {
|
||||
proc_macros: Default::default(),
|
||||
from_glob_import: Default::default(),
|
||||
skip_attrs: Default::default(),
|
||||
derive_helpers_in_scope: Default::default(),
|
||||
is_proc_macro: false,
|
||||
};
|
||||
collector.seed_with_top_level();
|
||||
|
@ -149,6 +149,7 @@ impl Resolver {
|
||||
self.resolve_module_path(db, path, BuiltinShadowMode::Module)
|
||||
}
|
||||
|
||||
// FIXME: This shouldn't exist
|
||||
pub fn resolve_module_path_in_trait_assoc_items(
|
||||
&self,
|
||||
db: &dyn DefDatabase,
|
||||
@ -448,10 +449,14 @@ impl Resolver {
|
||||
}
|
||||
|
||||
pub fn krate(&self) -> CrateId {
|
||||
self.def_map().krate()
|
||||
}
|
||||
|
||||
pub fn def_map(&self) -> &DefMap {
|
||||
self.scopes
|
||||
.get(0)
|
||||
.and_then(|scope| match scope {
|
||||
Scope::ModuleScope(m) => Some(m.def_map.krate()),
|
||||
Scope::ModuleScope(m) => Some(&m.def_map),
|
||||
_ => None,
|
||||
})
|
||||
.expect("module scope invariant violated")
|
||||
|
@ -305,7 +305,10 @@ pub fn record_literal_missing_fields(
|
||||
expr: &Expr,
|
||||
) -> Option<(VariantId, Vec<LocalFieldId>, /*exhaustive*/ bool)> {
|
||||
let (fields, exhaustive) = match expr {
|
||||
Expr::RecordLit { path: _, fields, spread } => (fields, spread.is_none()),
|
||||
Expr::RecordLit { fields, spread, ellipsis, is_assignee_expr, .. } => {
|
||||
let exhaustive = if *is_assignee_expr { !*ellipsis } else { spread.is_none() };
|
||||
(fields, exhaustive)
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
|
@ -276,7 +276,7 @@ impl<'a> InferenceContext<'a> {
|
||||
|
||||
closure_ty
|
||||
}
|
||||
Expr::Call { callee, args } => {
|
||||
Expr::Call { callee, args, .. } => {
|
||||
let callee_ty = self.infer_expr(*callee, &Expectation::none());
|
||||
let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone());
|
||||
let mut res = None;
|
||||
@ -421,7 +421,7 @@ impl<'a> InferenceContext<'a> {
|
||||
}
|
||||
TyKind::Never.intern(Interner)
|
||||
}
|
||||
Expr::RecordLit { path, fields, spread } => {
|
||||
Expr::RecordLit { path, fields, spread, .. } => {
|
||||
let (ty, def_id) = self.resolve_variant(path.as_deref(), false);
|
||||
if let Some(variant) = def_id {
|
||||
self.write_variant_resolution(tgt_expr.into(), variant);
|
||||
@ -693,7 +693,7 @@ impl<'a> InferenceContext<'a> {
|
||||
self.err_ty()
|
||||
}
|
||||
}
|
||||
Expr::Tuple { exprs } => {
|
||||
Expr::Tuple { exprs, .. } => {
|
||||
let mut tys = match expected
|
||||
.only_has_type(&mut self.table)
|
||||
.as_ref()
|
||||
@ -724,12 +724,12 @@ impl<'a> InferenceContext<'a> {
|
||||
|
||||
let expected = Expectation::has_type(elem_ty.clone());
|
||||
let len = match array {
|
||||
Array::ElementList(items) => {
|
||||
for &expr in items.iter() {
|
||||
Array::ElementList { elements, .. } => {
|
||||
for &expr in elements.iter() {
|
||||
let cur_elem_ty = self.infer_expr_inner(expr, &expected);
|
||||
coerce.coerce(self, Some(expr), &cur_elem_ty);
|
||||
}
|
||||
consteval::usize_const(Some(items.len() as u128))
|
||||
consteval::usize_const(Some(elements.len() as u128))
|
||||
}
|
||||
&Array::Repeat { initializer, repeat } => {
|
||||
self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty));
|
||||
@ -850,7 +850,7 @@ impl<'a> InferenceContext<'a> {
|
||||
let rhs_ty = self.resolve_ty_shallow(rhs_ty);
|
||||
|
||||
let ty = match &self.body[lhs] {
|
||||
Expr::Tuple { exprs } => {
|
||||
Expr::Tuple { exprs, .. } => {
|
||||
// We don't consider multiple ellipses. This is analogous to
|
||||
// `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
|
||||
let ellipsis = exprs.iter().position(|e| is_rest_expr(*e));
|
||||
@ -858,7 +858,7 @@ impl<'a> InferenceContext<'a> {
|
||||
|
||||
self.infer_tuple_pat_like(&rhs_ty, (), ellipsis, &exprs)
|
||||
}
|
||||
Expr::Call { callee, args } => {
|
||||
Expr::Call { callee, args, .. } => {
|
||||
// Tuple structs
|
||||
let path = match &self.body[*callee] {
|
||||
Expr::Path(path) => Some(path),
|
||||
@ -872,7 +872,7 @@ impl<'a> InferenceContext<'a> {
|
||||
|
||||
self.infer_tuple_struct_pat_like(path, &rhs_ty, (), lhs, ellipsis, &args)
|
||||
}
|
||||
Expr::Array(Array::ElementList(elements)) => {
|
||||
Expr::Array(Array::ElementList { elements, .. }) => {
|
||||
let elem_ty = match rhs_ty.kind(Interner) {
|
||||
TyKind::Array(st, _) => st.clone(),
|
||||
_ => self.err_ty(),
|
||||
|
@ -2252,6 +2252,32 @@ impl Local {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct DeriveHelper {
|
||||
pub(crate) derive: MacroId,
|
||||
pub(crate) idx: usize,
|
||||
}
|
||||
|
||||
impl DeriveHelper {
|
||||
pub fn derive(&self) -> Macro {
|
||||
Macro { id: self.derive.into() }
|
||||
}
|
||||
|
||||
pub fn name(&self, db: &dyn HirDatabase) -> Name {
|
||||
match self.derive {
|
||||
MacroId::Macro2Id(_) => None,
|
||||
MacroId::MacroRulesId(_) => None,
|
||||
MacroId::ProcMacroId(proc_macro) => db
|
||||
.proc_macro_data(proc_macro)
|
||||
.helpers
|
||||
.as_ref()
|
||||
.and_then(|it| it.get(self.idx))
|
||||
.cloned(),
|
||||
}
|
||||
.unwrap_or_else(|| Name::missing())
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: Wrong name? This is could also be a registered attribute
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct BuiltinAttr {
|
||||
|
@ -29,9 +29,9 @@ use crate::{
|
||||
db::HirDatabase,
|
||||
semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
|
||||
source_analyzer::{resolve_hir_path, SourceAnalyzer},
|
||||
Access, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, Field, Function, HasSource,
|
||||
HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name, Path,
|
||||
ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
|
||||
Access, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function,
|
||||
HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
|
||||
Name, Path, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@ -47,6 +47,7 @@ pub enum PathResolution {
|
||||
SelfType(Impl),
|
||||
BuiltinAttr(BuiltinAttr),
|
||||
ToolModule(ToolModule),
|
||||
DeriveHelper(DeriveHelper),
|
||||
}
|
||||
|
||||
impl PathResolution {
|
||||
@ -71,6 +72,7 @@ impl PathResolution {
|
||||
PathResolution::BuiltinAttr(_)
|
||||
| PathResolution::ToolModule(_)
|
||||
| PathResolution::Local(_)
|
||||
| PathResolution::DeriveHelper(_)
|
||||
| PathResolution::ConstParam(_) => None,
|
||||
PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
|
||||
PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
|
||||
@ -733,6 +735,8 @@ impl<'db> SemanticsImpl<'db> {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
let def_map = sa.resolver.def_map();
|
||||
|
||||
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
|
||||
let mut cache = self.expansion_info_cache.borrow_mut();
|
||||
let mut mcache = self.macro_call_cache.borrow_mut();
|
||||
@ -764,7 +768,7 @@ impl<'db> SemanticsImpl<'db> {
|
||||
while let Some(token) = stack.pop() {
|
||||
self.db.unwind_if_cancelled();
|
||||
let was_not_remapped = (|| {
|
||||
// are we inside an attribute macro call
|
||||
// First expand into attribute invocations
|
||||
let containing_attribute_macro_call = self.with_ctx(|ctx| {
|
||||
token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
|
||||
if item.attrs().next().is_none() {
|
||||
@ -784,53 +788,19 @@ impl<'db> SemanticsImpl<'db> {
|
||||
);
|
||||
}
|
||||
|
||||
// or are we inside a function-like macro call
|
||||
if let Some(tt) =
|
||||
// FIXME replace map.while_some with take_while once stable
|
||||
token
|
||||
.value
|
||||
.parent_ancestors()
|
||||
.map(ast::TokenTree::cast)
|
||||
.while_some()
|
||||
.last()
|
||||
{
|
||||
let parent = tt.syntax().parent()?;
|
||||
// check for derive attribute here
|
||||
let macro_call = match_ast! {
|
||||
match parent {
|
||||
ast::MacroCall(mcall) => mcall,
|
||||
// attribute we failed expansion for earlier, this might be a derive invocation
|
||||
// so try downmapping the token into the pseudo derive expansion
|
||||
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
|
||||
ast::Meta(meta) => {
|
||||
let attr = meta.parent_attr()?;
|
||||
let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
|
||||
let call_id = self.with_ctx(|ctx| {
|
||||
let (_, call_id, _) = ctx.attr_to_derive_macro_call(
|
||||
token.with_value(&adt),
|
||||
token.with_value(attr),
|
||||
)?;
|
||||
Some(call_id)
|
||||
})?;
|
||||
let file_id = call_id.as_file();
|
||||
return process_expansion_for_token(
|
||||
&mut stack,
|
||||
file_id,
|
||||
Some(adt.into()),
|
||||
token.as_ref(),
|
||||
);
|
||||
},
|
||||
_ => return None,
|
||||
}
|
||||
};
|
||||
// Then check for token trees, that means we are either in a function-like macro or
|
||||
// secondary attribute inputs
|
||||
let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
|
||||
let parent = tt.syntax().parent()?;
|
||||
|
||||
if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
|
||||
return None;
|
||||
}
|
||||
if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
|
||||
return None;
|
||||
}
|
||||
if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
|
||||
return None;
|
||||
}
|
||||
if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
|
||||
return None;
|
||||
}
|
||||
|
||||
if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
|
||||
let mcall = token.with_value(macro_call);
|
||||
let file_id = match mcache.get(&mcall) {
|
||||
Some(&it) => it,
|
||||
@ -840,11 +810,77 @@ impl<'db> SemanticsImpl<'db> {
|
||||
it
|
||||
}
|
||||
};
|
||||
return process_expansion_for_token(&mut stack, file_id, None, token.as_ref());
|
||||
}
|
||||
process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
|
||||
} else if let Some(meta) = ast::Meta::cast(parent.clone()) {
|
||||
// attribute we failed expansion for earlier, this might be a derive invocation
|
||||
// or derive helper attribute
|
||||
let attr = meta.parent_attr()?;
|
||||
|
||||
// outside of a macro invocation so this is a "final" token
|
||||
None
|
||||
let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) {
|
||||
// this might be a derive, or a derive helper on an ADT
|
||||
let derive_call = self.with_ctx(|ctx| {
|
||||
// so try downmapping the token into the pseudo derive expansion
|
||||
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
|
||||
ctx.attr_to_derive_macro_call(
|
||||
token.with_value(&adt),
|
||||
token.with_value(attr.clone()),
|
||||
)
|
||||
.map(|(_, call_id, _)| call_id)
|
||||
});
|
||||
|
||||
match derive_call {
|
||||
Some(call_id) => {
|
||||
// resolved to a derive
|
||||
let file_id = call_id.as_file();
|
||||
return process_expansion_for_token(
|
||||
&mut stack,
|
||||
file_id,
|
||||
Some(adt.into()),
|
||||
token.as_ref(),
|
||||
);
|
||||
}
|
||||
None => Some(adt),
|
||||
}
|
||||
} else {
|
||||
// Otherwise this could be a derive helper on a variant or field
|
||||
if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast)
|
||||
{
|
||||
field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
|
||||
} else if let Some(field) =
|
||||
attr.syntax().parent().and_then(ast::TupleField::cast)
|
||||
{
|
||||
field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
|
||||
} else if let Some(variant) =
|
||||
attr.syntax().parent().and_then(ast::Variant::cast)
|
||||
{
|
||||
variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}?;
|
||||
if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) {
|
||||
return None;
|
||||
}
|
||||
// Not an attribute, nor a derive, so it's either a builtin or a derive helper
|
||||
// Try to resolve to a derive helper and downmap
|
||||
let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
|
||||
let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
|
||||
let helpers =
|
||||
def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
|
||||
let item = Some(adt.into());
|
||||
let mut res = None;
|
||||
for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
|
||||
res = res.or(process_expansion_for_token(
|
||||
&mut stack,
|
||||
derive.as_file(),
|
||||
item.clone(),
|
||||
token.as_ref(),
|
||||
));
|
||||
}
|
||||
res
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})()
|
||||
.is_none();
|
||||
|
||||
|
@ -247,6 +247,7 @@ impl SourceToDefCtx<'_, '_> {
|
||||
map[keys::ATTR_MACRO_CALL].get(&src.value).copied()
|
||||
}
|
||||
|
||||
/// (AttrId, derive attribute call id, derive call ids)
|
||||
pub(super) fn attr_to_derive_macro_call(
|
||||
&mut self,
|
||||
item: InFile<&ast::Adt>,
|
||||
@ -257,6 +258,7 @@ impl SourceToDefCtx<'_, '_> {
|
||||
.get(&src.value)
|
||||
.map(|&(attr_id, call_id, ref ids)| (attr_id, call_id, &**ids))
|
||||
}
|
||||
|
||||
pub(super) fn has_derives(&mut self, adt: InFile<&ast::Adt>) -> bool {
|
||||
self.dyn_map(adt).as_ref().map_or(false, |map| !map[keys::DERIVE_MACRO_CALL].is_empty())
|
||||
}
|
||||
|
@ -35,6 +35,7 @@ use hir_ty::{
|
||||
method_resolution, Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution,
|
||||
TyExt, TyKind, TyLoweringContext,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use smallvec::SmallVec;
|
||||
use syntax::{
|
||||
ast::{self, AstNode},
|
||||
@ -43,8 +44,8 @@ use syntax::{
|
||||
|
||||
use crate::{
|
||||
db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr,
|
||||
BuiltinType, Callable, Const, Field, Function, Local, Macro, ModuleDef, Static, Struct,
|
||||
ToolModule, Trait, Type, TypeAlias, Variant,
|
||||
BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static,
|
||||
Struct, ToolModule, Trait, Type, TypeAlias, Variant,
|
||||
};
|
||||
|
||||
/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
|
||||
@ -429,19 +430,21 @@ impl SourceAnalyzer {
|
||||
}
|
||||
}
|
||||
|
||||
let is_path_of_attr = path
|
||||
let meta_path = path
|
||||
.syntax()
|
||||
.ancestors()
|
||||
.map(|it| it.kind())
|
||||
.take_while(|&kind| ast::Path::can_cast(kind) || ast::Meta::can_cast(kind))
|
||||
.take_while(|it| {
|
||||
let kind = it.kind();
|
||||
ast::Path::can_cast(kind) || ast::Meta::can_cast(kind)
|
||||
})
|
||||
.last()
|
||||
.map_or(false, ast::Meta::can_cast);
|
||||
.and_then(ast::Meta::cast);
|
||||
|
||||
// Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are
|
||||
// trying to resolve foo::bar.
|
||||
if path.parent_path().is_some() {
|
||||
return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path) {
|
||||
None if is_path_of_attr => {
|
||||
None if meta_path.is_some() => {
|
||||
path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
|
||||
ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
|
||||
.map(PathResolution::ToolModule)
|
||||
@ -449,16 +452,56 @@ impl SourceAnalyzer {
|
||||
}
|
||||
res => res,
|
||||
};
|
||||
} else if is_path_of_attr {
|
||||
} else if let Some(meta_path) = meta_path {
|
||||
// Case where we are resolving the final path segment of a path in an attribute
|
||||
// in this case we have to check for inert/builtin attributes and tools and prioritize
|
||||
// resolution of attributes over other namespaces
|
||||
let name_ref = path.as_single_name_ref();
|
||||
let builtin = name_ref.as_ref().and_then(|name_ref| {
|
||||
BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text())
|
||||
});
|
||||
if let Some(_) = builtin {
|
||||
return builtin.map(PathResolution::BuiltinAttr);
|
||||
if let Some(name_ref) = path.as_single_name_ref() {
|
||||
let builtin =
|
||||
BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text());
|
||||
if let Some(_) = builtin {
|
||||
return builtin.map(PathResolution::BuiltinAttr);
|
||||
}
|
||||
|
||||
if let Some(attr) = meta_path.parent_attr() {
|
||||
let adt = if let Some(field) =
|
||||
attr.syntax().parent().and_then(ast::RecordField::cast)
|
||||
{
|
||||
field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
|
||||
} else if let Some(field) =
|
||||
attr.syntax().parent().and_then(ast::TupleField::cast)
|
||||
{
|
||||
field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
|
||||
} else if let Some(variant) =
|
||||
attr.syntax().parent().and_then(ast::Variant::cast)
|
||||
{
|
||||
variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
if let Some(adt) = adt {
|
||||
let ast_id = db.ast_id_map(self.file_id).ast_id(&adt);
|
||||
if let Some(helpers) = self
|
||||
.resolver
|
||||
.def_map()
|
||||
.derive_helpers_in_scope(InFile::new(self.file_id, ast_id))
|
||||
{
|
||||
// FIXME: Multiple derives can have the same helper
|
||||
let name_ref = name_ref.as_name();
|
||||
for (macro_id, mut helpers) in
|
||||
helpers.iter().group_by(|(_, macro_id, ..)| macro_id).into_iter()
|
||||
{
|
||||
if let Some(idx) = helpers.position(|(name, ..)| *name == name_ref)
|
||||
{
|
||||
return Some(PathResolution::DeriveHelper(DeriveHelper {
|
||||
derive: *macro_id,
|
||||
idx,
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return match resolve_hir_path_as_macro(db, &self.resolver, &hir_path) {
|
||||
Some(m) => Some(PathResolution::Def(ModuleDef::Macro(m))),
|
||||
|
@ -145,13 +145,16 @@ fn add_missing_impl_members_inner(
|
||||
Some(cap) => {
|
||||
let mut cursor = Cursor::Before(first_new_item.syntax());
|
||||
let placeholder;
|
||||
if let ast::AssocItem::Fn(func) = &first_new_item {
|
||||
if try_gen_trait_body(ctx, func, &trait_, &impl_def).is_none() {
|
||||
if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
|
||||
{
|
||||
if m.syntax().text() == "todo!()" {
|
||||
placeholder = m;
|
||||
cursor = Cursor::Replace(placeholder.syntax());
|
||||
if let DefaultMethods::No = mode {
|
||||
if let ast::AssocItem::Fn(func) = &first_new_item {
|
||||
if try_gen_trait_body(ctx, func, &trait_, &impl_def).is_none() {
|
||||
if let Some(m) =
|
||||
func.syntax().descendants().find_map(ast::MacroCall::cast)
|
||||
{
|
||||
if m.syntax().text() == "todo!()" {
|
||||
placeholder = m;
|
||||
cursor = Cursor::Replace(placeholder.syntax());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ use syntax::{
|
||||
ted,
|
||||
};
|
||||
|
||||
/// Generate custom trait bodies where possible.
|
||||
/// Generate custom trait bodies without default implementation where possible.
|
||||
///
|
||||
/// Returns `Option` so that we can use `?` rather than `if let Some`. Returning
|
||||
/// `None` means that generating a custom trait body failed, and the body will remain
|
||||
@ -28,6 +28,7 @@ pub(crate) fn gen_trait_fn_body(
|
||||
|
||||
/// Generate a `Clone` impl based on the fields and members of the target type.
|
||||
fn gen_clone_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
|
||||
stdx::always!(func.name().map_or(false, |name| name.text() == "clone"));
|
||||
fn gen_clone_call(target: ast::Expr) -> ast::Expr {
|
||||
let method = make::name_ref("clone");
|
||||
make::expr_method_call(target, method, make::arg_list(None))
|
||||
@ -339,6 +340,7 @@ fn gen_default_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
|
||||
|
||||
/// Generate a `Hash` impl based on the fields and members of the target type.
|
||||
fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
|
||||
stdx::always!(func.name().map_or(false, |name| name.text() == "hash"));
|
||||
fn gen_hash_call(target: ast::Expr) -> ast::Stmt {
|
||||
let method = make::name_ref("hash");
|
||||
let arg = make::expr_path(make::ext::ident_path("state"));
|
||||
@ -394,9 +396,7 @@ fn gen_hash_impl(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
|
||||
|
||||
/// Generate a `PartialEq` impl based on the fields and members of the target type.
|
||||
fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
|
||||
if func.name().map_or(false, |name| name.text() == "ne") {
|
||||
return None;
|
||||
}
|
||||
stdx::always!(func.name().map_or(false, |name| name.text() == "eq"));
|
||||
fn gen_eq_chain(expr: Option<ast::Expr>, cmp: ast::Expr) -> Option<ast::Expr> {
|
||||
match expr {
|
||||
Some(expr) => Some(make::expr_bin_op(expr, BinaryOp::LogicOp(LogicOp::And), cmp)),
|
||||
@ -573,6 +573,7 @@ fn gen_partial_eq(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
|
||||
}
|
||||
|
||||
fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
|
||||
stdx::always!(func.name().map_or(false, |name| name.text() == "partial_cmp"));
|
||||
fn gen_partial_eq_match(match_target: ast::Expr) -> Option<ast::Stmt> {
|
||||
let mut arms = vec![];
|
||||
|
||||
@ -643,7 +644,7 @@ fn gen_partial_ord(adt: &ast::Adt, func: &ast::Fn) -> Option<()> {
|
||||
make::block_expr(stmts.into_iter(), tail).indent(ast::edit::IndentLevel(1))
|
||||
}
|
||||
|
||||
// No fields in the body means there's nothing to hash.
|
||||
// No fields in the body means there's nothing to compare.
|
||||
None => {
|
||||
let expr = make::expr_literal("true").into();
|
||||
make::block_expr(None, Some(expr)).indent(ast::edit::IndentLevel(1))
|
||||
|
@ -292,6 +292,7 @@ impl CompletionItemKind {
|
||||
SymbolKind::Const => "ct",
|
||||
SymbolKind::ConstParam => "cp",
|
||||
SymbolKind::Derive => "de",
|
||||
SymbolKind::DeriveHelper => "dh",
|
||||
SymbolKind::Enum => "en",
|
||||
SymbolKind::Field => "fd",
|
||||
SymbolKind::Function => "fn",
|
||||
|
@ -121,7 +121,7 @@ pub(crate) fn render_field(
|
||||
let mut item = CompletionItem::new(
|
||||
SymbolKind::Field,
|
||||
ctx.source_range(),
|
||||
receiver.map_or_else(|| name.clone(), |receiver| format!("{}.{}", receiver, name).into()),
|
||||
field_with_receiver(receiver.as_ref(), &name),
|
||||
);
|
||||
item.set_relevance(CompletionRelevance {
|
||||
type_match: compute_type_match(ctx.completion, ty),
|
||||
@ -132,7 +132,7 @@ pub(crate) fn render_field(
|
||||
.set_documentation(field.docs(ctx.db()))
|
||||
.set_deprecated(is_deprecated)
|
||||
.lookup_by(name.clone());
|
||||
item.insert_text(escaped_name);
|
||||
item.insert_text(field_with_receiver(receiver.as_ref(), &escaped_name));
|
||||
if let Some(receiver) = &dot_access.receiver {
|
||||
if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) {
|
||||
if let Some(ref_match) = compute_ref_match(ctx.completion, ty) {
|
||||
@ -143,6 +143,11 @@ pub(crate) fn render_field(
|
||||
item.build()
|
||||
}
|
||||
|
||||
fn field_with_receiver(receiver: Option<&hir::Name>, field_name: &str) -> SmolStr {
|
||||
receiver
|
||||
.map_or_else(|| field_name.into(), |receiver| format!("{}.{}", receiver, field_name).into())
|
||||
}
|
||||
|
||||
pub(crate) fn render_tuple_field(
|
||||
ctx: RenderContext<'_>,
|
||||
receiver: Option<hir::Name>,
|
||||
@ -152,7 +157,7 @@ pub(crate) fn render_tuple_field(
|
||||
let mut item = CompletionItem::new(
|
||||
SymbolKind::Field,
|
||||
ctx.source_range(),
|
||||
receiver.map_or_else(|| field.to_string(), |receiver| format!("{}.{}", receiver, field)),
|
||||
field_with_receiver(receiver.as_ref(), &field.to_string()),
|
||||
);
|
||||
item.detail(ty.display(ctx.db()).to_string()).lookup_by(field.to_string());
|
||||
item.build()
|
||||
@ -1873,6 +1878,35 @@ impl r#trait for r#struct { type t$0 }
|
||||
struct r#struct {}
|
||||
trait r#trait { type r#type; }
|
||||
impl r#trait for r#struct { type r#type = $0; }
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn field_access_includes_self() {
|
||||
check_edit(
|
||||
"length",
|
||||
r#"
|
||||
struct S {
|
||||
length: i32
|
||||
}
|
||||
|
||||
impl S {
|
||||
fn some_fn(&self) {
|
||||
let l = len$0
|
||||
}
|
||||
}
|
||||
"#,
|
||||
r#"
|
||||
struct S {
|
||||
length: i32
|
||||
}
|
||||
|
||||
impl S {
|
||||
fn some_fn(&self) {
|
||||
let l = self.length
|
||||
}
|
||||
}
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
@ -7,9 +7,9 @@
|
||||
|
||||
use arrayvec::ArrayVec;
|
||||
use hir::{
|
||||
Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, Field, Function,
|
||||
GenericParam, HasVisibility, Impl, ItemInNs, Label, Local, Macro, Module, ModuleDef, Name,
|
||||
PathResolution, Semantics, Static, ToolModule, Trait, TypeAlias, Variant, Visibility,
|
||||
Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, Field,
|
||||
Function, GenericParam, HasVisibility, Impl, ItemInNs, Label, Local, Macro, Module, ModuleDef,
|
||||
Name, PathResolution, Semantics, Static, ToolModule, Trait, TypeAlias, Variant, Visibility,
|
||||
};
|
||||
use stdx::impl_from;
|
||||
use syntax::{
|
||||
@ -37,6 +37,7 @@ pub enum Definition {
|
||||
Local(Local),
|
||||
GenericParam(GenericParam),
|
||||
Label(Label),
|
||||
DeriveHelper(DeriveHelper),
|
||||
BuiltinAttr(BuiltinAttr),
|
||||
ToolModule(ToolModule),
|
||||
}
|
||||
@ -69,6 +70,7 @@ impl Definition {
|
||||
Definition::Local(it) => it.module(db),
|
||||
Definition::GenericParam(it) => it.module(db),
|
||||
Definition::Label(it) => it.module(db),
|
||||
Definition::DeriveHelper(it) => it.derive().module(db),
|
||||
Definition::BuiltinAttr(_) | Definition::BuiltinType(_) | Definition::ToolModule(_) => {
|
||||
return None
|
||||
}
|
||||
@ -94,7 +96,8 @@ impl Definition {
|
||||
| Definition::SelfType(_)
|
||||
| Definition::Local(_)
|
||||
| Definition::GenericParam(_)
|
||||
| Definition::Label(_) => return None,
|
||||
| Definition::Label(_)
|
||||
| Definition::DeriveHelper(_) => return None,
|
||||
};
|
||||
Some(vis)
|
||||
}
|
||||
@ -118,6 +121,7 @@ impl Definition {
|
||||
Definition::Label(it) => it.name(db),
|
||||
Definition::BuiltinAttr(_) => return None, // FIXME
|
||||
Definition::ToolModule(_) => return None, // FIXME
|
||||
Definition::DeriveHelper(it) => it.name(db),
|
||||
};
|
||||
Some(name)
|
||||
}
|
||||
@ -500,6 +504,7 @@ impl From<PathResolution> for Definition {
|
||||
PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def),
|
||||
PathResolution::BuiltinAttr(attr) => Definition::BuiltinAttr(attr),
|
||||
PathResolution::ToolModule(tool) => Definition::ToolModule(tool),
|
||||
PathResolution::DeriveHelper(helper) => Definition::DeriveHelper(helper),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -173,6 +173,7 @@ pub enum SymbolKind {
|
||||
Const,
|
||||
ConstParam,
|
||||
Derive,
|
||||
DeriveHelper,
|
||||
Enum,
|
||||
Field,
|
||||
Function,
|
||||
|
@ -224,7 +224,8 @@ impl<'a> Ctx<'a> {
|
||||
| hir::PathResolution::SelfType(_)
|
||||
| hir::PathResolution::Def(_)
|
||||
| hir::PathResolution::BuiltinAttr(_)
|
||||
| hir::PathResolution::ToolModule(_) => (),
|
||||
| hir::PathResolution::ToolModule(_)
|
||||
| hir::PathResolution::DeriveHelper(_) => (),
|
||||
}
|
||||
Some(())
|
||||
}
|
||||
|
@ -156,6 +156,8 @@ impl Definition {
|
||||
Definition::SelfType(_) => return None,
|
||||
Definition::BuiltinAttr(_) => return None,
|
||||
Definition::ToolModule(_) => return None,
|
||||
// FIXME: This should be doable in theory
|
||||
Definition::DeriveHelper(_) => return None,
|
||||
};
|
||||
return res;
|
||||
|
||||
@ -316,14 +318,20 @@ pub fn source_edit_from_references(
|
||||
// macros can cause multiple refs to occur for the same text range, so keep track of what we have edited so far
|
||||
let mut edited_ranges = Vec::new();
|
||||
for &FileReference { range, ref name, .. } in references {
|
||||
let name_range = name.syntax().text_range();
|
||||
if name_range.len() != range.len() {
|
||||
// This usage comes from a different token kind that was downmapped to a NameLike in a macro
|
||||
// Renaming this will most likely break things syntax-wise
|
||||
continue;
|
||||
}
|
||||
let has_emitted_edit = match name {
|
||||
// if the ranges differ then the node is inside a macro call, we can't really attempt
|
||||
// to make special rewrites like shorthand syntax and such, so just rename the node in
|
||||
// the macro input
|
||||
ast::NameLike::NameRef(name_ref) if name_ref.syntax().text_range() == range => {
|
||||
ast::NameLike::NameRef(name_ref) if name_range == range => {
|
||||
source_edit_from_name_ref(&mut edit, name_ref, new_name, def)
|
||||
}
|
||||
ast::NameLike::Name(name) if name.syntax().text_range() == range => {
|
||||
ast::NameLike::Name(name) if name_range == range => {
|
||||
source_edit_from_name(&mut edit, name, new_name)
|
||||
}
|
||||
_ => false,
|
||||
|
@ -54,7 +54,9 @@ impl IntoIterator for UsageSearchResult {
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FileReference {
|
||||
/// The range of the reference in the original file
|
||||
pub range: TextRange,
|
||||
/// The node of the reference in the (macro-)file
|
||||
pub name: ast::NameLike,
|
||||
pub category: Option<ReferenceCategory>,
|
||||
}
|
||||
@ -276,16 +278,16 @@ impl Definition {
|
||||
}
|
||||
}
|
||||
hir::MacroKind::BuiltIn => SearchScope::crate_graph(db),
|
||||
// FIXME: We don't actually see derives in derive attributes as these do not
|
||||
// expand to something that references the derive macro in the output.
|
||||
// We could get around this by doing pseudo expansions for proc_macro_derive like we
|
||||
// do for the derive attribute
|
||||
hir::MacroKind::Derive | hir::MacroKind::Attr | hir::MacroKind::ProcMacro => {
|
||||
SearchScope::reverse_dependencies(db, module.krate())
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if let Definition::DeriveHelper(_) = self {
|
||||
return SearchScope::reverse_dependencies(db, module.krate());
|
||||
}
|
||||
|
||||
let vis = self.visibility(db);
|
||||
if let Some(Visibility::Public) = vis {
|
||||
return SearchScope::reverse_dependencies(db, module.krate());
|
||||
|
@ -292,6 +292,37 @@ fn x(a: S) {
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn missing_record_expr_in_assignee_expr() {
|
||||
check_diagnostics(
|
||||
r"
|
||||
struct S { s: usize, t: usize }
|
||||
struct S2 { s: S, t: () }
|
||||
struct T(S);
|
||||
fn regular(a: S) {
|
||||
let s;
|
||||
S { s, .. } = a;
|
||||
}
|
||||
fn nested(a: S2) {
|
||||
let s;
|
||||
S2 { s: S { s, .. }, .. } = a;
|
||||
}
|
||||
fn in_tuple(a: (S,)) {
|
||||
let s;
|
||||
(S { s, .. },) = a;
|
||||
}
|
||||
fn in_array(a: [S;1]) {
|
||||
let s;
|
||||
[S { s, .. },] = a;
|
||||
}
|
||||
fn in_tuple_struct(a: T) {
|
||||
let s;
|
||||
T(S { s, .. }) = a;
|
||||
}
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn range_mapping_out_of_macros() {
|
||||
check_fix(
|
||||
|
@ -190,7 +190,8 @@ pub(crate) fn resolve_doc_path_for_def(
|
||||
| Definition::SelfType(_)
|
||||
| Definition::Local(_)
|
||||
| Definition::GenericParam(_)
|
||||
| Definition::Label(_) => None,
|
||||
| Definition::Label(_)
|
||||
| Definition::DeriveHelper(_) => None,
|
||||
}
|
||||
.map(Definition::from)
|
||||
}
|
||||
@ -515,7 +516,8 @@ fn filename_and_frag_for_def(
|
||||
| Definition::GenericParam(_)
|
||||
| Definition::Label(_)
|
||||
| Definition::BuiltinAttr(_)
|
||||
| Definition::ToolModule(_) => return None,
|
||||
| Definition::ToolModule(_)
|
||||
| Definition::DeriveHelper(_) => return None,
|
||||
};
|
||||
|
||||
Some((def, res, None))
|
||||
|
@ -115,7 +115,12 @@ pub(crate) fn hover(
|
||||
});
|
||||
}
|
||||
|
||||
let descended = sema.descend_into_macros_with_same_text(original_token.clone());
|
||||
let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())));
|
||||
let descended = if in_attr {
|
||||
[sema.descend_into_macros_with_kind_preference(original_token.clone())].into()
|
||||
} else {
|
||||
sema.descend_into_macros_with_same_text(original_token.clone())
|
||||
};
|
||||
|
||||
// FIXME: Definition should include known lints and the like instead of having this special case here
|
||||
let hovered_lint = descended.iter().find_map(|token| {
|
||||
|
@ -370,6 +370,7 @@ pub(super) fn definition(
|
||||
// FIXME: We should be able to show more info about these
|
||||
Definition::BuiltinAttr(it) => return render_builtin_attr(db, it),
|
||||
Definition::ToolModule(it) => return Some(Markup::fenced_block(&it.name(db))),
|
||||
Definition::DeriveHelper(it) => (format!("derive_helper {}", it.name(db)), None),
|
||||
};
|
||||
|
||||
let docs = match config.documentation {
|
||||
|
@ -196,6 +196,8 @@ impl TryToNav for Definition {
|
||||
Definition::BuiltinType(_) => None,
|
||||
Definition::ToolModule(_) => None,
|
||||
Definition::BuiltinAttr(_) => None,
|
||||
// FIXME: The focus range should be set to the helper declaration
|
||||
Definition::DeriveHelper(it) => it.derive().try_to_nav(db),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -237,7 +237,8 @@ fn signature_help_for_generics(
|
||||
| hir::PathResolution::Local(_)
|
||||
| hir::PathResolution::TypeParam(_)
|
||||
| hir::PathResolution::ConstParam(_)
|
||||
| hir::PathResolution::SelfType(_) => return None,
|
||||
| hir::PathResolution::SelfType(_)
|
||||
| hir::PathResolution::DeriveHelper(_) => return None,
|
||||
};
|
||||
|
||||
generic_def
|
||||
|
@ -107,6 +107,7 @@ pub struct HlRange {
|
||||
// builtinType:: Emitted for builtin types like `u32`, `str` and `f32`.
|
||||
// comment:: Emitted for comments.
|
||||
// constParameter:: Emitted for const parameters.
|
||||
// deriveHelper:: Emitted for derive helper attributes.
|
||||
// enumMember:: Emitted for enum variants.
|
||||
// generic:: Emitted for generic tokens that have no mapping.
|
||||
// keyword:: Emitted for keywords.
|
||||
@ -431,6 +432,13 @@ fn traverse(
|
||||
// let the editor do its highlighting for these tokens instead
|
||||
continue;
|
||||
}
|
||||
if highlight.tag == HlTag::UnresolvedReference
|
||||
&& matches!(attr_or_derive_item, Some(AttrOrDerive::Derive(_)) if inside_attribute)
|
||||
{
|
||||
// do not emit unresolved references in derive helpers if the token mapping maps to
|
||||
// something unresolvable. FIXME: There should be a way to prevent that
|
||||
continue;
|
||||
}
|
||||
if inside_attribute {
|
||||
highlight |= HlMod::Attribute
|
||||
}
|
||||
|
@ -472,6 +472,7 @@ fn highlight_def(
|
||||
Definition::Label(_) => Highlight::new(HlTag::Symbol(SymbolKind::Label)),
|
||||
Definition::BuiltinAttr(_) => Highlight::new(HlTag::Symbol(SymbolKind::BuiltinAttr)),
|
||||
Definition::ToolModule(_) => Highlight::new(HlTag::Symbol(SymbolKind::ToolModule)),
|
||||
Definition::DeriveHelper(_) => Highlight::new(HlTag::Symbol(SymbolKind::DeriveHelper)),
|
||||
};
|
||||
|
||||
let def_crate = def.krate(db);
|
||||
|
@ -270,6 +270,7 @@ fn module_def_to_hl_tag(def: Definition) -> HlTag {
|
||||
Definition::Label(_) => SymbolKind::Label,
|
||||
Definition::BuiltinAttr(_) => SymbolKind::BuiltinAttr,
|
||||
Definition::ToolModule(_) => SymbolKind::ToolModule,
|
||||
Definition::DeriveHelper(_) => SymbolKind::DeriveHelper,
|
||||
};
|
||||
HlTag::Symbol(symbol)
|
||||
}
|
||||
|
@ -134,6 +134,7 @@ impl HlTag {
|
||||
SymbolKind::Const => "constant",
|
||||
SymbolKind::ConstParam => "const_param",
|
||||
SymbolKind::Derive => "derive",
|
||||
SymbolKind::DeriveHelper => "derive_helper",
|
||||
SymbolKind::Enum => "enum",
|
||||
SymbolKind::Field => "field",
|
||||
SymbolKind::Function => "function",
|
||||
|
@ -86,6 +86,7 @@ fn mk_child(
|
||||
) -> io::Result<Child> {
|
||||
Command::new(path.as_os_str())
|
||||
.args(args)
|
||||
.env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable")
|
||||
.stdin(Stdio::piped())
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::inherit())
|
||||
|
17
crates/proc-macro-srv-cli/Cargo.toml
Normal file
17
crates/proc-macro-srv-cli/Cargo.toml
Normal file
@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "proc-macro-srv-cli"
|
||||
version = "0.0.0"
|
||||
description = "TBD"
|
||||
license = "MIT OR Apache-2.0"
|
||||
edition = "2021"
|
||||
rust-version = "1.57"
|
||||
|
||||
[dependencies]
|
||||
proc-macro-srv = { version = "0.0.0", path = "../proc-macro-srv" }
|
||||
|
||||
[features]
|
||||
sysroot-abi = ["proc-macro-srv/sysroot-abi"]
|
||||
|
||||
[[bin]]
|
||||
name = "rust-analyzer-proc-macro-srv"
|
||||
path = "src/main.rs"
|
19
crates/proc-macro-srv-cli/src/main.rs
Normal file
19
crates/proc-macro-srv-cli/src/main.rs
Normal file
@ -0,0 +1,19 @@
|
||||
//! A standalone binary for `proc-macro-srv`.
|
||||
|
||||
use proc_macro_srv::cli;
|
||||
|
||||
fn main() -> std::io::Result<()> {
|
||||
let v = std::env::var("RUST_ANALYZER_INTERNALS_DO_NOT_USE");
|
||||
match v.as_deref() {
|
||||
Ok("this is unstable") => {
|
||||
// very well, if you must
|
||||
}
|
||||
_ => {
|
||||
eprintln!("If you're rust-analyzer, you can use this tool by exporting RUST_ANALYZER_INTERNALS_DO_NOT_USE='this is unstable'.");
|
||||
eprintln!("If not, you probably shouldn't use this tool. But do what you want: I'm an error message, not a cop.");
|
||||
std::process::exit(122);
|
||||
}
|
||||
}
|
||||
|
||||
cli::run()
|
||||
}
|
@ -62,8 +62,7 @@ fn main() {
|
||||
Command::new(toolchain::cargo())
|
||||
};
|
||||
|
||||
cmd
|
||||
.current_dir(&staging_dir)
|
||||
cmd.current_dir(&staging_dir)
|
||||
.args(&["build", "-p", "proc-macro-test-impl", "--message-format", "json"])
|
||||
// Explicit override the target directory to avoid using the same one which the parent
|
||||
// cargo is using, or we'll deadlock.
|
||||
|
@ -17,6 +17,9 @@ use crate::cfg_flag::CfgFlag;
|
||||
/// Roots and crates that compose this Rust project.
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ProjectJson {
|
||||
/// e.g. `path/to/sysroot`
|
||||
pub(crate) sysroot: Option<AbsPathBuf>,
|
||||
/// e.g. `path/to/sysroot/lib/rustlib/src/rust`
|
||||
pub(crate) sysroot_src: Option<AbsPathBuf>,
|
||||
project_root: AbsPathBuf,
|
||||
crates: Vec<Crate>,
|
||||
@ -52,6 +55,7 @@ impl ProjectJson {
|
||||
/// configuration.
|
||||
pub fn new(base: &AbsPath, data: ProjectJsonData) -> ProjectJson {
|
||||
ProjectJson {
|
||||
sysroot: data.sysroot.map(|it| base.join(it)),
|
||||
sysroot_src: data.sysroot_src.map(|it| base.join(it)),
|
||||
project_root: base.to_path_buf(),
|
||||
crates: data
|
||||
@ -122,6 +126,7 @@ impl ProjectJson {
|
||||
|
||||
#[derive(Deserialize, Debug, Clone)]
|
||||
pub struct ProjectJsonData {
|
||||
sysroot: Option<PathBuf>,
|
||||
sysroot_src: Option<PathBuf>,
|
||||
crates: Vec<CrateData>,
|
||||
}
|
||||
|
@ -15,6 +15,7 @@ use crate::{utf8_stdout, ManifestPath};
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Sysroot {
|
||||
root: AbsPathBuf,
|
||||
src_root: AbsPathBuf,
|
||||
crates: Arena<SysrootCrateData>,
|
||||
}
|
||||
|
||||
@ -35,10 +36,19 @@ impl ops::Index<SysrootCrate> for Sysroot {
|
||||
}
|
||||
|
||||
impl Sysroot {
|
||||
/// Returns sysroot "root" directory, where `bin/`, `etc/`, `lib/`, `libexec/`
|
||||
/// subfolder live, like:
|
||||
/// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu`
|
||||
pub fn root(&self) -> &AbsPath {
|
||||
&self.root
|
||||
}
|
||||
|
||||
/// Returns the sysroot "source" directory, where stdlib sources are located, like:
|
||||
/// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library`
|
||||
pub fn src_root(&self) -> &AbsPath {
|
||||
&self.src_root
|
||||
}
|
||||
|
||||
pub fn public_deps(&self) -> impl Iterator<Item = (&'static str, SysrootCrate, bool)> + '_ {
|
||||
// core is added as a dependency before std in order to
|
||||
// mimic rustcs dependency order
|
||||
@ -61,7 +71,7 @@ impl Sysroot {
|
||||
tracing::debug!("Discovering sysroot for {}", dir.display());
|
||||
let sysroot_dir = discover_sysroot_dir(dir)?;
|
||||
let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir, dir)?;
|
||||
let res = Sysroot::load(sysroot_src_dir)?;
|
||||
let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?;
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
@ -71,14 +81,15 @@ impl Sysroot {
|
||||
discover_sysroot_dir(current_dir).ok().and_then(|sysroot_dir| get_rustc_src(&sysroot_dir))
|
||||
}
|
||||
|
||||
pub fn load(sysroot_src_dir: AbsPathBuf) -> Result<Sysroot> {
|
||||
let mut sysroot = Sysroot { root: sysroot_src_dir, crates: Arena::default() };
|
||||
pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf) -> Result<Sysroot> {
|
||||
let mut sysroot =
|
||||
Sysroot { root: sysroot_dir, src_root: sysroot_src_dir, crates: Arena::default() };
|
||||
|
||||
for path in SYSROOT_CRATES.trim().lines() {
|
||||
let name = path.split('/').last().unwrap();
|
||||
let root = [format!("{}/src/lib.rs", path), format!("lib{}/lib.rs", path)]
|
||||
.into_iter()
|
||||
.map(|it| sysroot.root.join(it))
|
||||
.map(|it| sysroot.src_root.join(it))
|
||||
.filter_map(|it| ManifestPath::try_from(it).ok())
|
||||
.find(|it| fs::metadata(it).is_ok());
|
||||
|
||||
@ -119,7 +130,7 @@ impl Sysroot {
|
||||
};
|
||||
anyhow::bail!(
|
||||
"could not find libcore in sysroot path `{}`{}",
|
||||
sysroot.root.as_path().display(),
|
||||
sysroot.src_root.as_path().display(),
|
||||
var_note,
|
||||
);
|
||||
}
|
||||
|
@ -75,8 +75,11 @@ fn get_test_path(file: &str) -> PathBuf {
|
||||
|
||||
fn get_fake_sysroot() -> Sysroot {
|
||||
let sysroot_path = get_test_path("fake-sysroot");
|
||||
let sysroot_src_dir = AbsPathBuf::assert(sysroot_path);
|
||||
Sysroot::load(sysroot_src_dir).unwrap()
|
||||
// there's no `libexec/` directory with a `proc-macro-srv` binary in that
|
||||
// fake sysroot, so we give them both the same path:
|
||||
let sysroot_dir = AbsPathBuf::assert(sysroot_path);
|
||||
let sysroot_src_dir = sysroot_dir.clone();
|
||||
Sysroot::load(sysroot_dir, sysroot_src_dir).unwrap()
|
||||
}
|
||||
|
||||
fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
|
||||
|
@ -230,10 +230,26 @@ impl ProjectWorkspace {
|
||||
project_json: ProjectJson,
|
||||
target: Option<&str>,
|
||||
) -> Result<ProjectWorkspace> {
|
||||
let sysroot = match &project_json.sysroot_src {
|
||||
Some(path) => Some(Sysroot::load(path.clone())?),
|
||||
None => None,
|
||||
let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) {
|
||||
(Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)?),
|
||||
(Some(sysroot), None) => {
|
||||
// assume sysroot is structured like rustup's and guess `sysroot_src`
|
||||
let sysroot_src =
|
||||
sysroot.join("lib").join("rustlib").join("src").join("rust").join("library");
|
||||
|
||||
Some(Sysroot::load(sysroot, sysroot_src)?)
|
||||
}
|
||||
(None, Some(sysroot_src)) => {
|
||||
// assume sysroot is structured like rustup's and guess `sysroot`
|
||||
let mut sysroot = sysroot_src.clone();
|
||||
for _ in 0..5 {
|
||||
sysroot.pop();
|
||||
}
|
||||
Some(Sysroot::load(sysroot, sysroot_src)?)
|
||||
}
|
||||
(None, None) => None,
|
||||
};
|
||||
|
||||
let rustc_cfg = rustc_cfg::get(None, target);
|
||||
Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg })
|
||||
}
|
||||
@ -345,7 +361,7 @@ impl ProjectWorkspace {
|
||||
})
|
||||
.chain(sysroot.iter().map(|sysroot| PackageRoot {
|
||||
is_local: false,
|
||||
include: vec![sysroot.root().to_path_buf()],
|
||||
include: vec![sysroot.src_root().to_path_buf()],
|
||||
exclude: Vec::new(),
|
||||
}))
|
||||
.chain(rustc.iter().flat_map(|rustc| {
|
||||
|
@ -60,9 +60,9 @@ pub fn load_workspace(
|
||||
|
||||
let proc_macro_client = if load_config.with_proc_macro {
|
||||
let path = AbsPathBuf::assert(std::env::current_exe()?);
|
||||
Some(ProcMacroServer::spawn(path, &["proc-macro"]).unwrap())
|
||||
Ok(ProcMacroServer::spawn(path, &["proc-macro"]).unwrap())
|
||||
} else {
|
||||
None
|
||||
Err("proc macro server not started".to_owned())
|
||||
};
|
||||
|
||||
let crate_graph = ws.to_crate_graph(
|
||||
@ -89,7 +89,7 @@ pub fn load_workspace(
|
||||
if load_config.prefill_caches {
|
||||
host.analysis().parallel_prime_caches(1, |_| {})?;
|
||||
}
|
||||
Ok((host, vfs, proc_macro_client))
|
||||
Ok((host, vfs, proc_macro_client.ok()))
|
||||
}
|
||||
|
||||
fn load_crate_graph(
|
||||
|
@ -61,7 +61,7 @@ pub(crate) struct GlobalState {
|
||||
pub(crate) proc_macro_changed: bool,
|
||||
pub(crate) last_reported_status: Option<lsp_ext::ServerStatusParams>,
|
||||
pub(crate) source_root_config: SourceRootConfig,
|
||||
pub(crate) proc_macro_client: Option<ProcMacroServer>,
|
||||
pub(crate) proc_macro_clients: Vec<Result<ProcMacroServer, String>>,
|
||||
|
||||
pub(crate) flycheck: Vec<FlycheckHandle>,
|
||||
pub(crate) flycheck_sender: Sender<flycheck::Message>,
|
||||
@ -151,7 +151,7 @@ impl GlobalState {
|
||||
proc_macro_changed: false,
|
||||
last_reported_status: None,
|
||||
source_root_config: SourceRootConfig::default(),
|
||||
proc_macro_client: None,
|
||||
proc_macro_clients: vec![],
|
||||
|
||||
flycheck: Vec::new(),
|
||||
flycheck_sender,
|
||||
|
@ -44,7 +44,7 @@ use crate::{
|
||||
};
|
||||
|
||||
pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> {
|
||||
state.proc_macro_client = None;
|
||||
state.proc_macro_clients.clear();
|
||||
state.proc_macro_changed = false;
|
||||
state.fetch_workspaces_queue.request_op("reload workspace request".to_string());
|
||||
state.fetch_build_data_queue.request_op("reload workspace request".to_string());
|
||||
|
@ -303,18 +303,55 @@ impl GlobalState {
|
||||
let files_config = self.config.files();
|
||||
let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
|
||||
|
||||
if self.proc_macro_client.is_none() {
|
||||
if self.proc_macro_clients.is_empty() {
|
||||
if let Some((path, args)) = self.config.proc_macro_srv() {
|
||||
match ProcMacroServer::spawn(path.clone(), args) {
|
||||
Ok(it) => self.proc_macro_client = Some(it),
|
||||
Err(err) => {
|
||||
tracing::error!(
|
||||
"Failed to run proc_macro_srv from path {}, error: {:?}",
|
||||
self.proc_macro_clients = self
|
||||
.workspaces
|
||||
.iter()
|
||||
.map(|ws| {
|
||||
let mut args = args.clone();
|
||||
let mut path = path.clone();
|
||||
|
||||
if let ProjectWorkspace::Cargo { sysroot, .. } = ws {
|
||||
tracing::info!("Found a cargo workspace...");
|
||||
if let Some(sysroot) = sysroot.as_ref() {
|
||||
tracing::info!("Found a cargo workspace with a sysroot...");
|
||||
let server_path = sysroot
|
||||
.root()
|
||||
.join("libexec")
|
||||
.join("rust-analyzer-proc-macro-srv");
|
||||
if std::fs::metadata(&server_path).is_ok() {
|
||||
tracing::info!(
|
||||
"And the server exists at {}",
|
||||
server_path.display()
|
||||
);
|
||||
path = server_path;
|
||||
args = vec![];
|
||||
} else {
|
||||
tracing::info!(
|
||||
"And the server does not exist at {}",
|
||||
server_path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tracing::info!(
|
||||
"Using proc-macro server at {} with args {:?}",
|
||||
path.display(),
|
||||
err
|
||||
args
|
||||
);
|
||||
}
|
||||
}
|
||||
ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|err| {
|
||||
let error = format!(
|
||||
"Failed to run proc_macro_srv from path {}, error: {:?}",
|
||||
path.display(),
|
||||
err
|
||||
);
|
||||
tracing::error!(error);
|
||||
error
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
}
|
||||
|
||||
@ -331,15 +368,7 @@ impl GlobalState {
|
||||
|
||||
// Create crate graph from all the workspaces
|
||||
let crate_graph = {
|
||||
let proc_macro_client = self.proc_macro_client.as_ref();
|
||||
let dummy_replacements = self.config.dummy_replacements();
|
||||
let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| {
|
||||
load_proc_macro(
|
||||
proc_macro_client,
|
||||
path,
|
||||
dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(),
|
||||
)
|
||||
};
|
||||
|
||||
let vfs = &mut self.vfs.write().0;
|
||||
let loader = &mut self.loader;
|
||||
@ -359,7 +388,15 @@ impl GlobalState {
|
||||
};
|
||||
|
||||
let mut crate_graph = CrateGraph::default();
|
||||
for ws in self.workspaces.iter() {
|
||||
for (idx, ws) in self.workspaces.iter().enumerate() {
|
||||
let proc_macro_client = self.proc_macro_clients[idx].as_ref();
|
||||
let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| {
|
||||
load_proc_macro(
|
||||
proc_macro_client,
|
||||
path,
|
||||
dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(),
|
||||
)
|
||||
};
|
||||
crate_graph.extend(ws.to_crate_graph(&mut load_proc_macro, &mut load));
|
||||
}
|
||||
crate_graph
|
||||
@ -536,14 +573,14 @@ impl SourceRootConfig {
|
||||
/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
|
||||
/// with an identity dummy expander.
|
||||
pub(crate) fn load_proc_macro(
|
||||
server: Option<&ProcMacroServer>,
|
||||
server: Result<&ProcMacroServer, &String>,
|
||||
path: &AbsPath,
|
||||
dummy_replace: &[Box<str>],
|
||||
) -> ProcMacroLoadResult {
|
||||
let res: Result<Vec<_>, String> = (|| {
|
||||
let dylib = MacroDylib::new(path.to_path_buf())
|
||||
.map_err(|io| format!("Proc-macro dylib loading failed: {io}"))?;
|
||||
let server = server.ok_or_else(|| format!("Proc-macro server not started"))?;
|
||||
let server = server.map_err(ToOwned::to_owned)?;
|
||||
let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
|
||||
if vec.is_empty() {
|
||||
return Err("proc macro library returned no proc macros".to_string());
|
||||
|
@ -54,6 +54,7 @@ define_semantic_token_types![
|
||||
(COMPARISON, "comparison"),
|
||||
(CONST_PARAMETER, "constParameter"),
|
||||
(DERIVE, "derive"),
|
||||
(DERIVE_HELPER, "deriveHelper"),
|
||||
(DOT, "dot"),
|
||||
(ESCAPE_SEQUENCE, "escapeSequence"),
|
||||
(FORMAT_SPECIFIER, "formatSpecifier"),
|
||||
|
@ -53,7 +53,8 @@ pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind {
|
||||
SymbolKind::Macro
|
||||
| SymbolKind::BuiltinAttr
|
||||
| SymbolKind::Attribute
|
||||
| SymbolKind::Derive => lsp_types::SymbolKind::FUNCTION,
|
||||
| SymbolKind::Derive
|
||||
| SymbolKind::DeriveHelper => lsp_types::SymbolKind::FUNCTION,
|
||||
SymbolKind::Module | SymbolKind::ToolModule => lsp_types::SymbolKind::MODULE,
|
||||
SymbolKind::TypeAlias | SymbolKind::TypeParam | SymbolKind::SelfType => {
|
||||
lsp_types::SymbolKind::TYPE_PARAMETER
|
||||
@ -117,6 +118,7 @@ pub(crate) fn completion_item_kind(
|
||||
SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT,
|
||||
SymbolKind::ConstParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
|
||||
SymbolKind::Derive => lsp_types::CompletionItemKind::FUNCTION,
|
||||
SymbolKind::DeriveHelper => lsp_types::CompletionItemKind::FUNCTION,
|
||||
SymbolKind::Enum => lsp_types::CompletionItemKind::ENUM,
|
||||
SymbolKind::Field => lsp_types::CompletionItemKind::FIELD,
|
||||
SymbolKind::Function => lsp_types::CompletionItemKind::FUNCTION,
|
||||
@ -561,6 +563,7 @@ fn semantic_token_type_and_modifiers(
|
||||
HlTag::Symbol(symbol) => match symbol {
|
||||
SymbolKind::Attribute => semantic_tokens::ATTRIBUTE,
|
||||
SymbolKind::Derive => semantic_tokens::DERIVE,
|
||||
SymbolKind::DeriveHelper => semantic_tokens::DERIVE_HELPER,
|
||||
SymbolKind::Module => lsp_types::SemanticTokenType::NAMESPACE,
|
||||
SymbolKind::Impl => semantic_tokens::TYPE_ALIAS,
|
||||
SymbolKind::Field => lsp_types::SemanticTokenType::PROPERTY,
|
||||
|
Loading…
Reference in New Issue
Block a user