mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 08:13:41 +00:00
Rename Source::ast -> Source::value
This commit is contained in:
parent
e975f6364c
commit
36e3fc9d54
@ -174,7 +174,7 @@ fn resolve_target_trait_def(
|
||||
.path()?;
|
||||
|
||||
match analyzer.resolve_path(db, &ast_path) {
|
||||
Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).ast),
|
||||
Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def.source(db).value),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -141,7 +141,7 @@ fn find_struct_impl(
|
||||
})?;
|
||||
|
||||
let struct_ty = {
|
||||
let src = hir::Source { file_id: ctx.frange.file_id.into(), ast: strukt.clone() };
|
||||
let src = hir::Source { file_id: ctx.frange.file_id.into(), value: strukt.clone() };
|
||||
hir::Struct::from_source(db, src).unwrap().ty(db)
|
||||
};
|
||||
|
||||
@ -152,7 +152,7 @@ fn find_struct_impl(
|
||||
return false;
|
||||
}
|
||||
|
||||
let src = hir::Source { file_id: ctx.frange.file_id.into(), ast: impl_blk.clone() };
|
||||
let src = hir::Source { file_id: ctx.frange.file_id.into(), value: impl_blk.clone() };
|
||||
let blk = hir::ImplBlock::from_source(db, src).unwrap();
|
||||
|
||||
let same_ty = blk.target_ty(db) == struct_ty;
|
||||
|
@ -84,7 +84,7 @@ fn resolve_enum_def(
|
||||
let expr_ty = analyzer.type_of(db, &expr)?;
|
||||
|
||||
analyzer.autoderef(db, expr_ty).find_map(|ty| match ty.as_adt() {
|
||||
Some((Adt::Enum(e), _)) => Some(e.source(db).ast),
|
||||
Some((Adt::Enum(e), _)) => Some(e.source(db).value),
|
||||
_ => None,
|
||||
})
|
||||
}
|
||||
|
@ -98,7 +98,7 @@ pub fn run(
|
||||
let src = f.source(db);
|
||||
let original_file = src.file_id.original_file(db);
|
||||
let path = db.file_relative_path(original_file);
|
||||
let syntax_range = src.ast.syntax().text_range();
|
||||
let syntax_range = src.value.syntax().text_range();
|
||||
write!(msg, " ({:?} {})", path, syntax_range).unwrap();
|
||||
}
|
||||
bar.set_message(&msg);
|
||||
@ -135,7 +135,7 @@ pub fn run(
|
||||
let path = db.file_relative_path(original_file);
|
||||
let line_index = host.analysis().file_line_index(original_file).unwrap();
|
||||
let text_range = src
|
||||
.ast
|
||||
.value
|
||||
.either(|it| it.syntax().text_range(), |it| it.syntax().text_range());
|
||||
let (start, end) = (
|
||||
line_index.line_col(text_range.start()),
|
||||
|
@ -139,7 +139,7 @@ impl Module {
|
||||
) -> Either<ast::UseTree, ast::ExternCrateItem> {
|
||||
let src = self.definition_source(db);
|
||||
let (_, source_map) = db.raw_items_with_source_map(src.file_id);
|
||||
source_map.get(&src.ast, import)
|
||||
source_map.get(&src.value, import)
|
||||
}
|
||||
|
||||
/// Returns the crate this module is part of.
|
||||
@ -206,7 +206,7 @@ impl Module {
|
||||
crate::ModuleDef::Function(f) => f.diagnostics(db, sink),
|
||||
crate::ModuleDef::Module(m) => {
|
||||
// Only add diagnostics from inline modules
|
||||
if let ModuleSource::Module(_) = m.definition_source(db).ast {
|
||||
if let ModuleSource::Module(_) = m.definition_source(db).value {
|
||||
m.diagnostics(db, sink)
|
||||
}
|
||||
}
|
||||
@ -598,10 +598,10 @@ impl FnData {
|
||||
func: Function,
|
||||
) -> Arc<FnData> {
|
||||
let src = func.source(db);
|
||||
let name = src.ast.name().map(|n| n.as_name()).unwrap_or_else(Name::missing);
|
||||
let name = src.value.name().map(|n| n.as_name()).unwrap_or_else(Name::missing);
|
||||
let mut params = Vec::new();
|
||||
let mut has_self_param = false;
|
||||
if let Some(param_list) = src.ast.param_list() {
|
||||
if let Some(param_list) = src.value.param_list() {
|
||||
if let Some(self_param) = param_list.self_param() {
|
||||
let self_type = if let Some(type_ref) = self_param.ascribed_type() {
|
||||
TypeRef::from_ast(type_ref)
|
||||
@ -625,7 +625,7 @@ impl FnData {
|
||||
params.push(type_ref);
|
||||
}
|
||||
}
|
||||
let ret_type = if let Some(type_ref) = src.ast.ret_type().and_then(|rt| rt.type_ref()) {
|
||||
let ret_type = if let Some(type_ref) = src.value.ret_type().and_then(|rt| rt.type_ref()) {
|
||||
TypeRef::from_ast(type_ref)
|
||||
} else {
|
||||
TypeRef::unit()
|
||||
@ -801,7 +801,7 @@ impl ConstData {
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
konst: Const,
|
||||
) -> Arc<ConstData> {
|
||||
let node = konst.source(db).ast;
|
||||
let node = konst.source(db).value;
|
||||
const_data_for(&node)
|
||||
}
|
||||
|
||||
@ -809,7 +809,7 @@ impl ConstData {
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
konst: Static,
|
||||
) -> Arc<ConstData> {
|
||||
let node = konst.source(db).ast;
|
||||
let node = konst.source(db).value;
|
||||
const_data_for(&node)
|
||||
}
|
||||
}
|
||||
|
@ -49,9 +49,9 @@ pub(crate) fn attributes_query(
|
||||
AttrDef::Module(it) => {
|
||||
let src = it.declaration_source(db)?;
|
||||
let hygiene = Hygiene::new(db, src.file_id);
|
||||
Attr::from_attrs_owner(&src.ast, &hygiene)
|
||||
Attr::from_attrs_owner(&src.value, &hygiene)
|
||||
}
|
||||
AttrDef::StructField(it) => match it.source(db).ast {
|
||||
AttrDef::StructField(it) => match it.source(db).value {
|
||||
FieldSource::Named(named) => {
|
||||
let src = it.source(db);
|
||||
let hygiene = Hygiene::new(db, src.file_id);
|
||||
@ -82,7 +82,7 @@ where
|
||||
{
|
||||
let src = node.source(db);
|
||||
let hygiene = Hygiene::new(db, src.file_id);
|
||||
Attr::from_attrs_owner(&src.ast, &hygiene)
|
||||
Attr::from_attrs_owner(&src.value, &hygiene)
|
||||
}
|
||||
|
||||
impl<T: Into<AttrDef> + Copy> Attrs for T {
|
||||
|
@ -70,23 +70,23 @@ pub(crate) fn documentation_query(
|
||||
def: DocDef,
|
||||
) -> Option<Documentation> {
|
||||
match def {
|
||||
DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.ast),
|
||||
DocDef::StructField(it) => match it.source(db).ast {
|
||||
DocDef::Module(it) => docs_from_ast(&it.declaration_source(db)?.value),
|
||||
DocDef::StructField(it) => match it.source(db).value {
|
||||
FieldSource::Named(named) => docs_from_ast(&named),
|
||||
FieldSource::Pos(..) => None,
|
||||
},
|
||||
DocDef::Adt(it) => match it {
|
||||
Adt::Struct(it) => docs_from_ast(&it.source(db).ast),
|
||||
Adt::Enum(it) => docs_from_ast(&it.source(db).ast),
|
||||
Adt::Union(it) => docs_from_ast(&it.source(db).ast),
|
||||
Adt::Struct(it) => docs_from_ast(&it.source(db).value),
|
||||
Adt::Enum(it) => docs_from_ast(&it.source(db).value),
|
||||
Adt::Union(it) => docs_from_ast(&it.source(db).value),
|
||||
},
|
||||
DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Static(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Const(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Function(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::Trait(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::MacroDef(it) => docs_from_ast(&it.source(db).ast),
|
||||
DocDef::EnumVariant(it) => docs_from_ast(&it.source(db).value),
|
||||
DocDef::Static(it) => docs_from_ast(&it.source(db).value),
|
||||
DocDef::Const(it) => docs_from_ast(&it.source(db).value),
|
||||
DocDef::Function(it) => docs_from_ast(&it.source(db).value),
|
||||
DocDef::Trait(it) => docs_from_ast(&it.source(db).value),
|
||||
DocDef::TypeAlias(it) => docs_from_ast(&it.source(db).value),
|
||||
DocDef::MacroDef(it) => docs_from_ast(&it.source(db).value),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -25,9 +25,9 @@ impl Module {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
let decl_id = def_map[self.id.module_id].declaration;
|
||||
let file_id = def_map[self.id.module_id].definition;
|
||||
let ast = ModuleSource::new(db, file_id, decl_id);
|
||||
let value = ModuleSource::new(db, file_id, decl_id);
|
||||
let file_id = file_id.map(HirFileId::from).unwrap_or_else(|| decl_id.unwrap().file_id());
|
||||
Source { file_id, ast }
|
||||
Source { file_id, value }
|
||||
}
|
||||
|
||||
/// Returns a node which declares this module, either a `mod foo;` or a `mod foo {}`.
|
||||
@ -38,8 +38,8 @@ impl Module {
|
||||
) -> Option<Source<ast::Module>> {
|
||||
let def_map = db.crate_def_map(self.id.krate);
|
||||
let decl = def_map[self.id.module_id].declaration?;
|
||||
let ast = decl.to_node(db);
|
||||
Some(Source { file_id: decl.file_id(), ast })
|
||||
let value = decl.to_node(db);
|
||||
Some(Source { file_id: decl.file_id(), value })
|
||||
}
|
||||
}
|
||||
|
||||
@ -53,11 +53,11 @@ impl HasSource for StructField {
|
||||
let (file_id, struct_kind) = match self.parent {
|
||||
VariantDef::Struct(s) => {
|
||||
ss = s.source(db);
|
||||
(ss.file_id, ss.ast.kind())
|
||||
(ss.file_id, ss.value.kind())
|
||||
}
|
||||
VariantDef::EnumVariant(e) => {
|
||||
es = e.source(db);
|
||||
(es.file_id, es.ast.kind())
|
||||
(es.file_id, es.value.kind())
|
||||
}
|
||||
};
|
||||
|
||||
@ -66,13 +66,13 @@ impl HasSource for StructField {
|
||||
ast::StructKind::Named(fl) => fl.fields().map(|it| FieldSource::Named(it)).collect(),
|
||||
ast::StructKind::Unit => Vec::new(),
|
||||
};
|
||||
let ast = field_sources
|
||||
let value = field_sources
|
||||
.into_iter()
|
||||
.zip(fields.iter())
|
||||
.find(|(_syntax, (id, _))| *id == self.id)
|
||||
.unwrap()
|
||||
.0;
|
||||
Source { file_id, ast }
|
||||
Source { file_id, value }
|
||||
}
|
||||
}
|
||||
impl HasSource for Struct {
|
||||
@ -98,8 +98,8 @@ impl HasSource for EnumVariant {
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::EnumVariant> {
|
||||
let enum_data = db.enum_data(self.parent.id);
|
||||
let src = self.parent.id.source(db);
|
||||
let ast = src
|
||||
.ast
|
||||
let value = src
|
||||
.value
|
||||
.variant_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| it.variants())
|
||||
@ -107,7 +107,7 @@ impl HasSource for EnumVariant {
|
||||
.find(|(_syntax, (id, _))| *id == self.id)
|
||||
.unwrap()
|
||||
.0;
|
||||
Source { file_id: src.file_id, ast }
|
||||
Source { file_id: src.file_id, value }
|
||||
}
|
||||
}
|
||||
impl HasSource for Function {
|
||||
@ -143,7 +143,7 @@ impl HasSource for TypeAlias {
|
||||
impl HasSource for MacroDef {
|
||||
type Ast = ast::MacroCall;
|
||||
fn source(self, db: &(impl DefDatabase + AstDatabase)) -> Source<ast::MacroCall> {
|
||||
Source { file_id: self.id.ast_id.file_id(), ast: self.id.ast_id.to_node(db) }
|
||||
Source { file_id: self.id.ast_id.file_id(), value: self.id.ast_id.to_node(db) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -21,7 +21,7 @@ impl Diagnostic for NoSuchField {
|
||||
}
|
||||
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, ast: self.field.into() }
|
||||
Source { file_id: self.file, value: self.field.into() }
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
@ -41,7 +41,7 @@ impl Diagnostic for MissingFields {
|
||||
"fill structure fields".to_string()
|
||||
}
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, ast: self.field_list.into() }
|
||||
Source { file_id: self.file, value: self.field_list.into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
@ -53,7 +53,7 @@ impl AstDiagnostic for MissingFields {
|
||||
|
||||
fn ast(&self, db: &impl AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.source().file_id).unwrap();
|
||||
let node = self.source().ast.to_node(&root);
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::RecordFieldList::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
@ -69,7 +69,7 @@ impl Diagnostic for MissingOkInTailExpr {
|
||||
"wrap return expression in Ok".to_string()
|
||||
}
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, ast: self.expr.into() }
|
||||
Source { file_id: self.file, value: self.expr.into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
@ -81,7 +81,7 @@ impl AstDiagnostic for MissingOkInTailExpr {
|
||||
|
||||
fn ast(&self, db: &impl AstDatabase) -> Self::AST {
|
||||
let root = db.parse_or_expand(self.file).unwrap();
|
||||
let node = self.source().ast.to_node(&root);
|
||||
let node = self.source().value.to_node(&root);
|
||||
ast::Expr::cast(node).unwrap()
|
||||
}
|
||||
}
|
||||
|
@ -116,7 +116,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
||||
let source_map = self.func.body_source_map(db);
|
||||
|
||||
if let Some(source_ptr) = source_map.expr_syntax(id) {
|
||||
if let Some(expr) = source_ptr.ast.a() {
|
||||
if let Some(expr) = source_ptr.value.a() {
|
||||
let root = source_ptr.file_syntax(db);
|
||||
if let ast::Expr::RecordLit(record_lit) = expr.to_node(&root) {
|
||||
if let Some(field_list) = record_lit.record_field_list() {
|
||||
@ -161,7 +161,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
|
||||
let source_map = self.func.body_source_map(db);
|
||||
|
||||
if let Some(source_ptr) = source_map.expr_syntax(id) {
|
||||
if let Some(expr) = source_ptr.ast.a() {
|
||||
if let Some(expr) = source_ptr.value.a() {
|
||||
self.sink.push(MissingOkInTailExpr { file: source_ptr.file_id, expr });
|
||||
}
|
||||
}
|
||||
|
@ -87,7 +87,7 @@ impl FromSource for MacroDef {
|
||||
let module = Module::from_definition(db, Source::new(src.file_id, module_src))?;
|
||||
let krate = module.krate().crate_id();
|
||||
|
||||
let ast_id = AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.ast));
|
||||
let ast_id = AstId::new(src.file_id, db.ast_id_map(src.file_id).ast_id(&src.value));
|
||||
|
||||
let id: MacroDefId = MacroDefId { krate, ast_id, kind };
|
||||
Some(MacroDef { id })
|
||||
@ -105,8 +105,8 @@ impl FromSource for ImplBlock {
|
||||
impl FromSource for EnumVariant {
|
||||
type Ast = ast::EnumVariant;
|
||||
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
|
||||
let parent_enum = src.ast.parent_enum();
|
||||
let src_enum = Source { file_id: src.file_id, ast: parent_enum };
|
||||
let parent_enum = src.value.parent_enum();
|
||||
let src_enum = Source { file_id: src.file_id, value: parent_enum };
|
||||
let variants = Enum::from_source(db, src_enum)?.variants(db);
|
||||
variants.into_iter().find(|v| v.source(db) == src)
|
||||
}
|
||||
@ -115,16 +115,16 @@ impl FromSource for EnumVariant {
|
||||
impl FromSource for StructField {
|
||||
type Ast = FieldSource;
|
||||
fn from_source(db: &(impl DefDatabase + AstDatabase), src: Source<Self::Ast>) -> Option<Self> {
|
||||
let variant_def: VariantDef = match src.ast {
|
||||
let variant_def: VariantDef = match src.value {
|
||||
FieldSource::Named(ref field) => {
|
||||
let ast = field.syntax().ancestors().find_map(ast::StructDef::cast)?;
|
||||
let src = Source { file_id: src.file_id, ast };
|
||||
let value = field.syntax().ancestors().find_map(ast::StructDef::cast)?;
|
||||
let src = Source { file_id: src.file_id, value };
|
||||
let def = Struct::from_source(db, src)?;
|
||||
VariantDef::from(def)
|
||||
}
|
||||
FieldSource::Pos(ref field) => {
|
||||
let ast = field.syntax().ancestors().find_map(ast::EnumVariant::cast)?;
|
||||
let src = Source { file_id: src.file_id, ast };
|
||||
let value = field.syntax().ancestors().find_map(ast::EnumVariant::cast)?;
|
||||
let src = Source { file_id: src.file_id, value };
|
||||
let def = EnumVariant::from_source(db, src)?;
|
||||
VariantDef::from(def)
|
||||
}
|
||||
@ -142,12 +142,12 @@ impl FromSource for StructField {
|
||||
impl Local {
|
||||
pub fn from_source(db: &impl HirDatabase, src: Source<ast::BindPat>) -> Option<Self> {
|
||||
let file_id = src.file_id;
|
||||
let parent: DefWithBody = src.ast.syntax().ancestors().find_map(|it| {
|
||||
let parent: DefWithBody = src.value.syntax().ancestors().find_map(|it| {
|
||||
let res = match_ast! {
|
||||
match it {
|
||||
ast::ConstDef(ast) => { Const::from_source(db, Source { ast, file_id})?.into() },
|
||||
ast::StaticDef(ast) => { Static::from_source(db, Source { ast, file_id})?.into() },
|
||||
ast::FnDef(ast) => { Function::from_source(db, Source { ast, file_id})?.into() },
|
||||
ast::ConstDef(value) => { Const::from_source(db, Source { value, file_id})?.into() },
|
||||
ast::StaticDef(value) => { Static::from_source(db, Source { value, file_id})?.into() },
|
||||
ast::FnDef(value) => { Function::from_source(db, Source { value, file_id})?.into() },
|
||||
_ => return None,
|
||||
}
|
||||
};
|
||||
@ -162,33 +162,33 @@ impl Local {
|
||||
|
||||
impl Module {
|
||||
pub fn from_declaration(db: &impl DefDatabase, src: Source<ast::Module>) -> Option<Self> {
|
||||
let parent_declaration = src.ast.syntax().ancestors().skip(1).find_map(ast::Module::cast);
|
||||
let parent_declaration = src.value.syntax().ancestors().skip(1).find_map(ast::Module::cast);
|
||||
|
||||
let parent_module = match parent_declaration {
|
||||
Some(parent_declaration) => {
|
||||
let src_parent = Source { file_id: src.file_id, ast: parent_declaration };
|
||||
let src_parent = Source { file_id: src.file_id, value: parent_declaration };
|
||||
Module::from_declaration(db, src_parent)
|
||||
}
|
||||
_ => {
|
||||
let src_parent = Source {
|
||||
file_id: src.file_id,
|
||||
ast: ModuleSource::new(db, Some(src.file_id.original_file(db)), None),
|
||||
value: ModuleSource::new(db, Some(src.file_id.original_file(db)), None),
|
||||
};
|
||||
Module::from_definition(db, src_parent)
|
||||
}
|
||||
}?;
|
||||
|
||||
let child_name = src.ast.name()?;
|
||||
let child_name = src.value.name()?;
|
||||
parent_module.child(db, &child_name.as_name())
|
||||
}
|
||||
|
||||
pub fn from_definition(db: &impl DefDatabase, src: Source<ModuleSource>) -> Option<Self> {
|
||||
match src.ast {
|
||||
match src.value {
|
||||
ModuleSource::Module(ref module) => {
|
||||
assert!(!module.has_semi());
|
||||
return Module::from_declaration(
|
||||
db,
|
||||
Source { file_id: src.file_id, ast: module.clone() },
|
||||
Source { file_id: src.file_id, value: module.clone() },
|
||||
);
|
||||
}
|
||||
ModuleSource::SourceFile(_) => (),
|
||||
@ -214,5 +214,5 @@ where
|
||||
let module_src = ModuleSource::from_child_node(db, src.as_ref().map(|it| it.syntax()));
|
||||
let module = Module::from_definition(db, Source::new(src.file_id, module_src))?;
|
||||
let ctx = LocationCtx::new(db, module.id, src.file_id);
|
||||
Some(DEF::from_ast(ctx, &src.ast))
|
||||
Some(DEF::from_ast(ctx, &src.value))
|
||||
}
|
||||
|
@ -91,10 +91,10 @@ impl GenericParams {
|
||||
let start = generics.parent_params.as_ref().map(|p| p.params.len()).unwrap_or(0) as u32;
|
||||
// FIXME: add `: Sized` bound for everything except for `Self` in traits
|
||||
match def {
|
||||
GenericDef::Function(it) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::Adt(Adt::Struct(it)) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::Adt(Adt::Union(it)) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::Adt(Adt::Enum(it)) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::Function(it) => generics.fill(&it.source(db).value, start),
|
||||
GenericDef::Adt(Adt::Struct(it)) => generics.fill(&it.source(db).value, start),
|
||||
GenericDef::Adt(Adt::Union(it)) => generics.fill(&it.source(db).value, start),
|
||||
GenericDef::Adt(Adt::Enum(it)) => generics.fill(&it.source(db).value, start),
|
||||
GenericDef::Trait(it) => {
|
||||
// traits get the Self type as an implicit first type parameter
|
||||
generics.params.push(GenericParam {
|
||||
@ -102,17 +102,17 @@ impl GenericParams {
|
||||
name: name::SELF_TYPE,
|
||||
default: None,
|
||||
});
|
||||
generics.fill(&it.source(db).ast, start + 1);
|
||||
generics.fill(&it.source(db).value, start + 1);
|
||||
// add super traits as bounds on Self
|
||||
// i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
|
||||
let self_param = TypeRef::Path(name::SELF_TYPE.into());
|
||||
generics.fill_bounds(&it.source(db).ast, self_param);
|
||||
generics.fill_bounds(&it.source(db).value, self_param);
|
||||
}
|
||||
GenericDef::TypeAlias(it) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::TypeAlias(it) => generics.fill(&it.source(db).value, start),
|
||||
// Note that we don't add `Self` here: in `impl`s, `Self` is not a
|
||||
// type-parameter, but rather is a type-alias for impl's target
|
||||
// type, so this is handled by the resolver.
|
||||
GenericDef::ImplBlock(it) => generics.fill(&it.source(db).ast, start),
|
||||
GenericDef::ImplBlock(it) => generics.fill(&it.source(db).value, start),
|
||||
GenericDef::EnumVariant(_) | GenericDef::Const(_) => {}
|
||||
}
|
||||
|
||||
|
@ -97,7 +97,7 @@ impl LangItems {
|
||||
// Look for impl targets
|
||||
for impl_block in module.impl_blocks(db) {
|
||||
let src = impl_block.source(db);
|
||||
if let Some(lang_item_name) = lang_item_name(&src.ast) {
|
||||
if let Some(lang_item_name) = lang_item_name(&src.value) {
|
||||
self.items
|
||||
.entry(lang_item_name)
|
||||
.or_insert_with(|| LangItemTarget::ImplBlock(impl_block));
|
||||
@ -144,7 +144,7 @@ impl LangItems {
|
||||
T: Copy + HasSource<Ast = N>,
|
||||
N: AttrsOwner,
|
||||
{
|
||||
let node = item.source(db).ast;
|
||||
let node = item.source(db).value;
|
||||
if let Some(lang_item_name) = lang_item_name(&node) {
|
||||
self.items.entry(lang_item_name).or_insert_with(|| constructor(item));
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ use crate::{
|
||||
|
||||
fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -> Option<Resolver> {
|
||||
match_ast! {
|
||||
match (node.ast) {
|
||||
match (node.value) {
|
||||
ast::Module(it) => {
|
||||
let src = node.with_ast(it);
|
||||
Some(crate::Module::from_declaration(db, src)?.resolver(db))
|
||||
@ -48,7 +48,7 @@ fn try_get_resolver_for_node(db: &impl HirDatabase, node: Source<&SyntaxNode>) -
|
||||
let src = node.with_ast(it);
|
||||
Some(Enum::from_source(db, src)?.resolver(db))
|
||||
},
|
||||
_ => match node.ast.kind() {
|
||||
_ => match node.value.kind() {
|
||||
FN_DEF | CONST_DEF | STATIC_DEF => {
|
||||
Some(def_with_body_from_child_node(db, node)?.resolver(db))
|
||||
}
|
||||
@ -67,7 +67,7 @@ fn def_with_body_from_child_node(
|
||||
let module = Module::from_definition(db, Source::new(child.file_id, module_source))?;
|
||||
let ctx = LocationCtx::new(db, module.id, child.file_id);
|
||||
|
||||
child.ast.ancestors().find_map(|node| {
|
||||
child.value.ancestors().find_map(|node| {
|
||||
match_ast! {
|
||||
match node {
|
||||
ast::FnDef(def) => { Some(Function {id: ctx.to_def(&def) }.into()) },
|
||||
@ -171,7 +171,7 @@ impl SourceAnalyzer {
|
||||
} else {
|
||||
SourceAnalyzer {
|
||||
resolver: node
|
||||
.ast
|
||||
.value
|
||||
.ancestors()
|
||||
.find_map(|it| try_get_resolver_for_node(db, node.with_ast(&it)))
|
||||
.unwrap_or_default(),
|
||||
@ -185,12 +185,12 @@ impl SourceAnalyzer {
|
||||
}
|
||||
|
||||
fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> {
|
||||
let src = Source { file_id: self.file_id, ast: expr };
|
||||
let src = Source { file_id: self.file_id, value: expr };
|
||||
self.body_source_map.as_ref()?.node_expr(src)
|
||||
}
|
||||
|
||||
fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
|
||||
let src = Source { file_id: self.file_id, ast: pat };
|
||||
let src = Source { file_id: self.file_id, value: pat };
|
||||
self.body_source_map.as_ref()?.node_pat(src)
|
||||
}
|
||||
|
||||
@ -302,7 +302,7 @@ impl SourceAnalyzer {
|
||||
let entry = scopes.resolve_name_in_scope(scope, &name)?;
|
||||
Some(ScopeEntryWithSyntax {
|
||||
name: entry.name().clone(),
|
||||
ptr: source_map.pat_syntax(entry.pat())?.ast,
|
||||
ptr: source_map.pat_syntax(entry.pat())?.value,
|
||||
})
|
||||
}
|
||||
|
||||
@ -428,7 +428,7 @@ fn scope_for(
|
||||
source_map: &BodySourceMap,
|
||||
node: Source<&SyntaxNode>,
|
||||
) -> Option<ScopeId> {
|
||||
node.ast
|
||||
node.value
|
||||
.ancestors()
|
||||
.filter_map(ast::Expr::cast)
|
||||
.filter_map(|it| source_map.node_expr(Source::new(node.file_id, &it)))
|
||||
@ -450,18 +450,18 @@ fn scope_for_offset(
|
||||
return None;
|
||||
}
|
||||
let syntax_node_ptr =
|
||||
source.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
source.value.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
Some((syntax_node_ptr, scope))
|
||||
})
|
||||
// find containing scope
|
||||
.min_by_key(|(ptr, _scope)| {
|
||||
(
|
||||
!(ptr.range().start() <= offset.ast && offset.ast <= ptr.range().end()),
|
||||
!(ptr.range().start() <= offset.value && offset.value <= ptr.range().end()),
|
||||
ptr.range().len(),
|
||||
)
|
||||
})
|
||||
.map(|(ptr, scope)| {
|
||||
adjust(scopes, source_map, ptr, offset.file_id, offset.ast).unwrap_or(*scope)
|
||||
adjust(scopes, source_map, ptr, offset.file_id, offset.value).unwrap_or(*scope)
|
||||
})
|
||||
}
|
||||
|
||||
@ -485,7 +485,7 @@ fn adjust(
|
||||
return None;
|
||||
}
|
||||
let syntax_node_ptr =
|
||||
source.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
source.value.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
Some((syntax_node_ptr, scope))
|
||||
})
|
||||
.map(|(ptr, scope)| (ptr.range(), scope))
|
||||
|
@ -26,11 +26,11 @@ impl TraitData {
|
||||
tr: Trait,
|
||||
) -> Arc<TraitData> {
|
||||
let src = tr.source(db);
|
||||
let name = src.ast.name().map(|n| n.as_name());
|
||||
let name = src.value.name().map(|n| n.as_name());
|
||||
let module = tr.module(db);
|
||||
let ctx = LocationCtx::new(db, module.id, src.file_id);
|
||||
let auto = src.ast.is_auto();
|
||||
let items = if let Some(item_list) = src.ast.item_list() {
|
||||
let auto = src.value.is_auto();
|
||||
let items = if let Some(item_list) = src.value.item_list() {
|
||||
item_list
|
||||
.impl_items()
|
||||
.map(|item_node| match item_node {
|
||||
|
@ -4694,14 +4694,16 @@ fn infer(content: &str) -> String {
|
||||
}
|
||||
|
||||
// sort ranges for consistency
|
||||
types.sort_by_key(|(src_ptr, _)| (src_ptr.ast.range().start(), src_ptr.ast.range().end()));
|
||||
types.sort_by_key(|(src_ptr, _)| {
|
||||
(src_ptr.value.range().start(), src_ptr.value.range().end())
|
||||
});
|
||||
for (src_ptr, ty) in &types {
|
||||
let node = src_ptr.ast.to_node(&src_ptr.file_syntax(&db));
|
||||
let node = src_ptr.value.to_node(&src_ptr.file_syntax(&db));
|
||||
|
||||
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.clone()) {
|
||||
(self_param.self_kw_token().text_range(), "self".to_string())
|
||||
} else {
|
||||
(src_ptr.ast.range(), node.text().to_string().replace("\n", " "))
|
||||
(src_ptr.value.range(), node.text().to_string().replace("\n", " "))
|
||||
};
|
||||
let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" };
|
||||
write!(
|
||||
|
@ -23,7 +23,7 @@ impl TypeAliasData {
|
||||
db: &(impl DefDatabase + AstDatabase),
|
||||
typ: TypeAlias,
|
||||
) -> Arc<TypeAliasData> {
|
||||
let node = typ.source(db).ast;
|
||||
let node = typ.source(db).value;
|
||||
let name = node.name().map_or_else(Name::missing, |n| n.as_name());
|
||||
let type_ref = node.type_ref().map(TypeRef::from_ast);
|
||||
Arc::new(TypeAliasData { name, type_ref })
|
||||
|
@ -54,8 +54,8 @@ impl StructData {
|
||||
id: StructOrUnionId,
|
||||
) -> Arc<StructData> {
|
||||
let src = id.source(db);
|
||||
let name = src.ast.name().map(|n| n.as_name());
|
||||
let variant_data = VariantData::new(src.ast.kind());
|
||||
let name = src.value.name().map(|n| n.as_name());
|
||||
let variant_data = VariantData::new(src.value.kind());
|
||||
let variant_data = Arc::new(variant_data);
|
||||
Arc::new(StructData { name, variant_data })
|
||||
}
|
||||
@ -64,9 +64,9 @@ impl StructData {
|
||||
impl EnumData {
|
||||
pub(crate) fn enum_data_query(db: &impl DefDatabase2, e: EnumId) -> Arc<EnumData> {
|
||||
let src = e.source(db);
|
||||
let name = src.ast.name().map(|n| n.as_name());
|
||||
let name = src.value.name().map(|n| n.as_name());
|
||||
let variants = src
|
||||
.ast
|
||||
.value
|
||||
.variant_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| it.variants())
|
||||
|
@ -73,8 +73,8 @@ impl Expander {
|
||||
std::mem::forget(mark);
|
||||
}
|
||||
|
||||
fn to_source<T>(&self, ast: T) -> Source<T> {
|
||||
Source { file_id: self.current_file_id, ast }
|
||||
fn to_source<T>(&self, value: T) -> Source<T> {
|
||||
Source { file_id: self.current_file_id, value }
|
||||
}
|
||||
|
||||
fn parse_path(&mut self, path: ast::Path) -> Option<Path> {
|
||||
@ -150,16 +150,16 @@ impl Body {
|
||||
let (file_id, module, body) = match def {
|
||||
DefWithBodyId::FunctionId(f) => {
|
||||
let src = f.source(db);
|
||||
params = src.ast.param_list();
|
||||
(src.file_id, f.module(db), src.ast.body().map(ast::Expr::from))
|
||||
params = src.value.param_list();
|
||||
(src.file_id, f.module(db), src.value.body().map(ast::Expr::from))
|
||||
}
|
||||
DefWithBodyId::ConstId(c) => {
|
||||
let src = c.source(db);
|
||||
(src.file_id, c.module(db), src.ast.body())
|
||||
(src.file_id, c.module(db), src.value.body())
|
||||
}
|
||||
DefWithBodyId::StaticId(s) => {
|
||||
let src = s.source(db);
|
||||
(src.file_id, s.module(db), src.ast.body())
|
||||
(src.file_id, s.module(db), src.value.body())
|
||||
}
|
||||
};
|
||||
let expander = Expander::new(db, file_id, module);
|
||||
|
@ -210,8 +210,9 @@ mod tests {
|
||||
let scopes = db.expr_scopes(function.into());
|
||||
let (_body, source_map) = db.body_with_source_map(function.into());
|
||||
|
||||
let expr_id =
|
||||
source_map.node_expr(Source { file_id: file_id.into(), ast: &marker.into() }).unwrap();
|
||||
let expr_id = source_map
|
||||
.node_expr(Source { file_id: file_id.into(), value: &marker.into() })
|
||||
.unwrap();
|
||||
let scope = scopes.scope_for(expr_id);
|
||||
|
||||
let actual = scopes
|
||||
@ -317,14 +318,14 @@ mod tests {
|
||||
let expr_scope = {
|
||||
let expr_ast = name_ref.syntax().ancestors().find_map(ast::Expr::cast).unwrap();
|
||||
let expr_id =
|
||||
source_map.node_expr(Source { file_id: file_id.into(), ast: &expr_ast }).unwrap();
|
||||
source_map.node_expr(Source { file_id: file_id.into(), value: &expr_ast }).unwrap();
|
||||
scopes.scope_for(expr_id).unwrap()
|
||||
};
|
||||
|
||||
let resolved = scopes.resolve_name_in_scope(expr_scope, &name_ref.as_name()).unwrap();
|
||||
let pat_src = source_map.pat_syntax(resolved.pat()).unwrap();
|
||||
|
||||
let local_name = pat_src.ast.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
let local_name = pat_src.value.either(|it| it.syntax_node_ptr(), |it| it.syntax_node_ptr());
|
||||
assert_eq!(local_name.range(), expected_name.syntax().text_range());
|
||||
}
|
||||
|
||||
|
@ -20,7 +20,7 @@ impl Diagnostic for UnresolvedModule {
|
||||
"unresolved module".to_string()
|
||||
}
|
||||
fn source(&self) -> Source<SyntaxNodePtr> {
|
||||
Source { file_id: self.file, ast: self.decl.into() }
|
||||
Source { file_id: self.file, value: self.decl.into() }
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static) {
|
||||
self
|
||||
|
@ -25,11 +25,11 @@ impl ImplData {
|
||||
let src = id.source(db);
|
||||
let items = db.ast_id_map(src.file_id);
|
||||
|
||||
let target_trait = src.ast.target_trait().map(TypeRef::from_ast);
|
||||
let target_type = TypeRef::from_ast_opt(src.ast.target_type());
|
||||
let negative = src.ast.is_negative();
|
||||
let target_trait = src.value.target_trait().map(TypeRef::from_ast);
|
||||
let target_type = TypeRef::from_ast_opt(src.value.target_type());
|
||||
let negative = src.value.is_negative();
|
||||
|
||||
let items = if let Some(item_list) = src.ast.item_list() {
|
||||
let items = if let Some(item_list) = src.value.item_list() {
|
||||
let ctx = LocationCtx::new(db, id.module(db), src.file_id);
|
||||
item_list
|
||||
.impl_items()
|
||||
|
@ -80,7 +80,7 @@ impl ModuleSource {
|
||||
|
||||
pub fn from_child_node(db: &impl db::DefDatabase2, child: Source<&SyntaxNode>) -> ModuleSource {
|
||||
if let Some(m) =
|
||||
child.ast.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi())
|
||||
child.value.ancestors().filter_map(ast::Module::cast).find(|it| !it.has_semi())
|
||||
{
|
||||
ModuleSource::Module(m)
|
||||
} else {
|
||||
@ -184,8 +184,8 @@ pub trait AstItemDef<N: AstNode>: salsa::InternKey + Clone {
|
||||
}
|
||||
fn source(self, db: &(impl AstDatabase + InternDatabase)) -> Source<N> {
|
||||
let loc = self.lookup_intern(db);
|
||||
let ast = loc.ast_id.to_node(db);
|
||||
Source { file_id: loc.ast_id.file_id(), ast }
|
||||
let value = loc.ast_id.to_node(db);
|
||||
Source { file_id: loc.ast_id.file_id(), value }
|
||||
}
|
||||
fn module(self, db: &impl InternDatabase) -> ModuleId {
|
||||
let loc = self.lookup_intern(db);
|
||||
|
@ -327,7 +327,7 @@ impl RawItemsCollector {
|
||||
|
||||
let mut buf = Vec::new();
|
||||
Path::expand_use_item(
|
||||
Source { ast: use_item, file_id: self.file_id },
|
||||
Source { value: use_item, file_id: self.file_id },
|
||||
&self.hygiene,
|
||||
|path, use_tree, is_glob, alias| {
|
||||
let import_data = ImportData {
|
||||
|
@ -71,7 +71,7 @@ impl Path {
|
||||
hygiene: &Hygiene,
|
||||
mut cb: impl FnMut(Path, &ast::UseTree, bool, Option<Name>),
|
||||
) {
|
||||
if let Some(tree) = item_src.ast.use_tree() {
|
||||
if let Some(tree) = item_src.value.use_tree() {
|
||||
expand_use_tree(None, tree, hygiene, &mut cb);
|
||||
}
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static {
|
||||
fn message(&self) -> String;
|
||||
fn source(&self) -> Source<SyntaxNodePtr>;
|
||||
fn highlight_range(&self) -> TextRange {
|
||||
self.source().ast.range()
|
||||
self.source().value.range()
|
||||
}
|
||||
fn as_any(&self) -> &(dyn Any + Send + 'static);
|
||||
}
|
||||
@ -37,7 +37,7 @@ pub trait AstDiagnostic {
|
||||
impl dyn Diagnostic {
|
||||
pub fn syntax_node(&self, db: &impl AstDatabase) -> SyntaxNode {
|
||||
let node = db.parse_or_expand(self.source().file_id).unwrap();
|
||||
self.source().ast.to_node(&node)
|
||||
self.source().value.to_node(&node)
|
||||
}
|
||||
|
||||
pub fn downcast_ref<D: Diagnostic>(&self) -> Option<&D> {
|
||||
|
@ -166,19 +166,19 @@ impl ExpansionInfo {
|
||||
pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
|
||||
assert_eq!(token.file_id, self.arg.file_id);
|
||||
let range =
|
||||
token.ast.text_range().checked_sub(self.arg.ast.syntax().text_range().start())?;
|
||||
token.value.text_range().checked_sub(self.arg.value.syntax().text_range().start())?;
|
||||
let token_id = self.macro_arg.1.token_by_range(range)?;
|
||||
let token_id = self.macro_def.0.map_id_down(token_id);
|
||||
|
||||
let range = self.exp_map.range_by_token(token_id)?;
|
||||
|
||||
let token = algo::find_covering_element(&self.expanded.ast, range).into_token()?;
|
||||
let token = algo::find_covering_element(&self.expanded.value, range).into_token()?;
|
||||
|
||||
Some(self.expanded.with_ast(token))
|
||||
}
|
||||
|
||||
pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option<Source<SyntaxToken>> {
|
||||
let token_id = self.exp_map.token_by_range(token.ast.text_range())?;
|
||||
let token_id = self.exp_map.token_by_range(token.value.text_range())?;
|
||||
|
||||
let (token_id, origin) = self.macro_def.0.map_id_up(token_id);
|
||||
let (token_map, tt) = match origin {
|
||||
@ -188,8 +188,8 @@ impl ExpansionInfo {
|
||||
|
||||
let range = token_map.range_by_token(token_id)?;
|
||||
let token = algo::find_covering_element(
|
||||
tt.ast.syntax(),
|
||||
range + tt.ast.syntax().text_range().start(),
|
||||
tt.value.syntax(),
|
||||
range + tt.value.syntax().text_range().start(),
|
||||
)
|
||||
.into_token()?;
|
||||
Some(tt.with_ast(token))
|
||||
@ -240,30 +240,34 @@ impl<N: AstNode> AstId<N> {
|
||||
}
|
||||
}
|
||||
|
||||
/// FIXME: https://github.com/matklad/with ?
|
||||
/// `Source<T>` stores a value of `T` inside a particular file/syntax tree.
|
||||
///
|
||||
/// Typical usages are:
|
||||
///
|
||||
/// * `Source<SyntaxNode>` -- syntax node in a file
|
||||
/// * `Source<ast::FnDef>` -- ast node in a file
|
||||
/// * `Source<TextUnit>` -- offset in a file
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||
pub struct Source<T> {
|
||||
pub file_id: HirFileId,
|
||||
// FIXME: this stores all kind of things, not only `ast`.
|
||||
// There should be a better name...
|
||||
pub ast: T,
|
||||
pub value: T,
|
||||
}
|
||||
|
||||
impl<T> Source<T> {
|
||||
pub fn new(file_id: HirFileId, ast: T) -> Source<T> {
|
||||
Source { file_id, ast }
|
||||
pub fn new(file_id: HirFileId, value: T) -> Source<T> {
|
||||
Source { file_id, value }
|
||||
}
|
||||
|
||||
// Similarly, naming here is stupid...
|
||||
pub fn with_ast<U>(&self, ast: U) -> Source<U> {
|
||||
Source::new(self.file_id, ast)
|
||||
pub fn with_ast<U>(&self, value: U) -> Source<U> {
|
||||
Source::new(self.file_id, value)
|
||||
}
|
||||
|
||||
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
|
||||
Source::new(self.file_id, f(self.ast))
|
||||
Source::new(self.file_id, f(self.value))
|
||||
}
|
||||
pub fn as_ref(&self) -> Source<&T> {
|
||||
self.with_ast(&self.ast)
|
||||
self.with_ast(&self.value)
|
||||
}
|
||||
pub fn file_syntax(&self, db: &impl db::AstDatabase) -> SyntaxNode {
|
||||
db.parse_or_expand(self.file_id).expect("source created from invalid file")
|
||||
|
@ -54,7 +54,7 @@ impl<'a> CompletionContext<'a> {
|
||||
let src = hir::ModuleSource::from_position(db, position);
|
||||
let module = hir::Module::from_definition(
|
||||
db,
|
||||
hir::Source { file_id: position.file_id.into(), ast: src },
|
||||
hir::Source { file_id: position.file_id.into(), value: src },
|
||||
);
|
||||
let token =
|
||||
original_parse.tree().syntax().token_at_offset(position.offset).left_biased()?;
|
||||
|
@ -169,7 +169,7 @@ impl Completions {
|
||||
None => return,
|
||||
};
|
||||
|
||||
let ast_node = macro_.source(ctx.db).ast;
|
||||
let ast_node = macro_.source(ctx.db).value;
|
||||
let detail = macro_label(&ast_node);
|
||||
|
||||
let docs = macro_.docs(ctx.db);
|
||||
@ -201,7 +201,7 @@ impl Completions {
|
||||
) {
|
||||
let data = func.data(ctx.db);
|
||||
let name = name.unwrap_or_else(|| data.name().to_string());
|
||||
let ast_node = func.source(ctx.db).ast;
|
||||
let ast_node = func.source(ctx.db).value;
|
||||
let detail = function_label(&ast_node);
|
||||
|
||||
let mut builder =
|
||||
@ -234,7 +234,7 @@ impl Completions {
|
||||
}
|
||||
|
||||
pub(crate) fn add_const(&mut self, ctx: &CompletionContext, constant: hir::Const) {
|
||||
let ast_node = constant.source(ctx.db).ast;
|
||||
let ast_node = constant.source(ctx.db).value;
|
||||
let name = match ast_node.name() {
|
||||
Some(name) => name,
|
||||
_ => return,
|
||||
@ -250,7 +250,7 @@ impl Completions {
|
||||
}
|
||||
|
||||
pub(crate) fn add_type_alias(&mut self, ctx: &CompletionContext, type_alias: hir::TypeAlias) {
|
||||
let type_def = type_alias.source(ctx.db).ast;
|
||||
let type_def = type_alias.source(ctx.db).value;
|
||||
let name = match type_def.name() {
|
||||
Some(name) => name,
|
||||
_ => return,
|
||||
|
@ -96,7 +96,7 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic>
|
||||
});
|
||||
let source_file = db.parse(file_id).tree();
|
||||
let src =
|
||||
hir::Source { file_id: file_id.into(), ast: hir::ModuleSource::SourceFile(source_file) };
|
||||
hir::Source { file_id: file_id.into(), value: hir::ModuleSource::SourceFile(source_file) };
|
||||
if let Some(m) = hir::Module::from_definition(db, src) {
|
||||
m.diagnostics(db, &mut sink);
|
||||
};
|
||||
|
@ -48,12 +48,12 @@ impl FunctionSignature {
|
||||
|
||||
pub(crate) fn from_hir(db: &db::RootDatabase, function: hir::Function) -> Self {
|
||||
let doc = function.docs(db);
|
||||
let ast_node = function.source(db).ast;
|
||||
let ast_node = function.source(db).value;
|
||||
FunctionSignature::from(&ast_node).with_doc_opt(doc)
|
||||
}
|
||||
|
||||
pub(crate) fn from_struct(db: &db::RootDatabase, st: hir::Struct) -> Option<Self> {
|
||||
let node: ast::StructDef = st.source(db).ast;
|
||||
let node: ast::StructDef = st.source(db).value;
|
||||
match node.kind() {
|
||||
ast::StructKind::Named(_) => return None,
|
||||
_ => (),
|
||||
@ -87,7 +87,7 @@ impl FunctionSignature {
|
||||
db: &db::RootDatabase,
|
||||
variant: hir::EnumVariant,
|
||||
) -> Option<Self> {
|
||||
let node: ast::EnumVariant = variant.source(db).ast;
|
||||
let node: ast::EnumVariant = variant.source(db).value;
|
||||
match node.kind() {
|
||||
ast::StructKind::Named(_) | ast::StructKind::Unit => return None,
|
||||
_ => (),
|
||||
@ -126,7 +126,7 @@ impl FunctionSignature {
|
||||
}
|
||||
|
||||
pub(crate) fn from_macro(db: &db::RootDatabase, macro_def: hir::MacroDef) -> Option<Self> {
|
||||
let node: ast::MacroCall = macro_def.source(db).ast;
|
||||
let node: ast::MacroCall = macro_def.source(db).value;
|
||||
|
||||
let params = vec![];
|
||||
|
||||
|
@ -86,9 +86,9 @@ impl NavigationTarget {
|
||||
name,
|
||||
None,
|
||||
frange.range,
|
||||
src.ast.syntax().kind(),
|
||||
src.ast.doc_comment_text(),
|
||||
src.ast.short_label(),
|
||||
src.value.syntax().kind(),
|
||||
src.value.doc_comment_text(),
|
||||
src.value.short_label(),
|
||||
);
|
||||
}
|
||||
module.to_nav(db)
|
||||
@ -146,9 +146,9 @@ impl NavigationTarget {
|
||||
description: Option<String>,
|
||||
) -> NavigationTarget {
|
||||
//FIXME: use `_` instead of empty string
|
||||
let name = node.ast.name().map(|it| it.text().clone()).unwrap_or_default();
|
||||
let name = node.value.name().map(|it| it.text().clone()).unwrap_or_default();
|
||||
let focus_range =
|
||||
node.ast.name().map(|it| original_range(db, node.with_ast(it.syntax())).range);
|
||||
node.value.name().map(|it| original_range(db, node.with_ast(it.syntax())).range);
|
||||
let frange = original_range(db, node.map(|it| it.syntax()));
|
||||
|
||||
NavigationTarget::from_syntax(
|
||||
@ -156,7 +156,7 @@ impl NavigationTarget {
|
||||
name,
|
||||
focus_range,
|
||||
frange.range,
|
||||
node.ast.syntax().kind(),
|
||||
node.value.syntax().kind(),
|
||||
docs,
|
||||
description,
|
||||
)
|
||||
@ -220,8 +220,8 @@ where
|
||||
NavigationTarget::from_named(
|
||||
db,
|
||||
src.as_ref().map(|it| it as &dyn ast::NameOwner),
|
||||
src.ast.doc_comment_text(),
|
||||
src.ast.short_label(),
|
||||
src.value.doc_comment_text(),
|
||||
src.value.short_label(),
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -230,7 +230,7 @@ impl ToNav for hir::Module {
|
||||
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
|
||||
let src = self.definition_source(db);
|
||||
let name = self.name(db).map(|it| it.to_string().into()).unwrap_or_default();
|
||||
match &src.ast {
|
||||
match &src.value {
|
||||
ModuleSource::SourceFile(node) => {
|
||||
let frange = original_range(db, src.with_ast(node.syntax()));
|
||||
|
||||
@ -271,7 +271,7 @@ impl ToNav for hir::ImplBlock {
|
||||
"impl".into(),
|
||||
None,
|
||||
frange.range,
|
||||
src.ast.syntax().kind(),
|
||||
src.value.syntax().kind(),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
@ -282,7 +282,7 @@ impl ToNav for hir::StructField {
|
||||
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
|
||||
let src = self.source(db);
|
||||
|
||||
match &src.ast {
|
||||
match &src.value {
|
||||
FieldSource::Named(it) => NavigationTarget::from_named(
|
||||
db,
|
||||
src.with_ast(it),
|
||||
@ -308,11 +308,11 @@ impl ToNav for hir::StructField {
|
||||
impl ToNav for hir::MacroDef {
|
||||
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
|
||||
let src = self.source(db);
|
||||
log::debug!("nav target {:#?}", src.ast.syntax());
|
||||
log::debug!("nav target {:#?}", src.value.syntax());
|
||||
NavigationTarget::from_named(
|
||||
db,
|
||||
src.as_ref().map(|it| it as &dyn ast::NameOwner),
|
||||
src.ast.doc_comment_text(),
|
||||
src.value.doc_comment_text(),
|
||||
None,
|
||||
)
|
||||
}
|
||||
@ -341,7 +341,7 @@ impl ToNav for hir::AssocItem {
|
||||
impl ToNav for hir::Local {
|
||||
fn to_nav(&self, db: &RootDatabase) -> NavigationTarget {
|
||||
let src = self.source(db);
|
||||
let (full_range, focus_range) = match src.ast {
|
||||
let (full_range, focus_range) = match src.value {
|
||||
Either::A(it) => {
|
||||
(it.syntax().text_range(), it.name().map(|it| it.syntax().text_range()))
|
||||
}
|
||||
|
@ -12,7 +12,7 @@ pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> Fi
|
||||
None => {
|
||||
return FileRange {
|
||||
file_id: node.file_id.original_file(db),
|
||||
range: node.ast.text_range(),
|
||||
range: node.value.text_range(),
|
||||
}
|
||||
}
|
||||
Some(it) => it,
|
||||
@ -25,14 +25,18 @@ pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> Fi
|
||||
// *Second*, we should handle recurside macro expansions
|
||||
|
||||
let token = node
|
||||
.ast
|
||||
.value
|
||||
.descendants_with_tokens()
|
||||
.filter_map(|it| it.into_token())
|
||||
.find_map(|it| expansion.map_token_up(node.with_ast(&it)));
|
||||
|
||||
match token {
|
||||
Some(it) => FileRange { file_id: it.file_id.original_file(db), range: it.ast.text_range() },
|
||||
None => FileRange { file_id: node.file_id.original_file(db), range: node.ast.text_range() },
|
||||
Some(it) => {
|
||||
FileRange { file_id: it.file_id.original_file(db), range: it.value.text_range() }
|
||||
}
|
||||
None => {
|
||||
FileRange { file_id: node.file_id.original_file(db), range: node.value.text_range() }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -44,13 +48,13 @@ pub(crate) fn descend_into_macros(
|
||||
let src = Source::new(file_id.into(), token);
|
||||
|
||||
successors(Some(src), |token| {
|
||||
let macro_call = token.ast.ancestors().find_map(ast::MacroCall::cast)?;
|
||||
let macro_call = token.value.ancestors().find_map(ast::MacroCall::cast)?;
|
||||
let tt = macro_call.token_tree()?;
|
||||
if !token.ast.text_range().is_subrange(&tt.syntax().text_range()) {
|
||||
if !token.value.text_range().is_subrange(&tt.syntax().text_range()) {
|
||||
return None;
|
||||
}
|
||||
let source_analyzer =
|
||||
hir::SourceAnalyzer::new(db, token.with_ast(token.ast.parent()).as_ref(), None);
|
||||
hir::SourceAnalyzer::new(db, token.with_ast(token.value.parent()).as_ref(), None);
|
||||
let exp = source_analyzer.expand(db, ¯o_call)?;
|
||||
exp.map_token_down(db, token.as_ref())
|
||||
})
|
||||
|
@ -46,7 +46,7 @@ fn expand_macro_recur(
|
||||
let mut replaces = FxHashMap::default();
|
||||
|
||||
for child in children.into_iter() {
|
||||
let source = hir::Source::new(macro_file_id, source.ast);
|
||||
let source = hir::Source::new(macro_file_id, source.value);
|
||||
let new_node = expand_macro_recur(db, source, &child)?;
|
||||
|
||||
replaces.insert(child.syntax().clone().into(), new_node.into());
|
||||
@ -139,7 +139,7 @@ mod tests {
|
||||
}
|
||||
macro_rules! baz {
|
||||
() => { foo!(); }
|
||||
}
|
||||
}
|
||||
f<|>oo!();
|
||||
"#,
|
||||
);
|
||||
@ -156,7 +156,7 @@ fn b(){}
|
||||
r#"
|
||||
//- /lib.rs
|
||||
macro_rules! foo {
|
||||
() => {
|
||||
() => {
|
||||
fn some_thing() -> u32 {
|
||||
let a = 0;
|
||||
a + 10
|
||||
@ -172,7 +172,7 @@ fn b(){}
|
||||
fn some_thing() -> u32 {
|
||||
let a = 0;
|
||||
a+10
|
||||
}
|
||||
}
|
||||
"###);
|
||||
}
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ pub(crate) fn goto_definition(
|
||||
let token = descend_into_macros(db, position.file_id, token);
|
||||
|
||||
let res = match_ast! {
|
||||
match (token.ast.parent()) {
|
||||
match (token.value.parent()) {
|
||||
ast::NameRef(name_ref) => {
|
||||
let navs = reference_definition(db, token.with_ast(&name_ref)).to_vec();
|
||||
RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec())
|
||||
@ -84,7 +84,7 @@ pub(crate) fn reference_definition(
|
||||
};
|
||||
|
||||
// Fallback index based approach:
|
||||
let navs = crate::symbol_index::index_resolve(db, name_ref.ast)
|
||||
let navs = crate::symbol_index::index_resolve(db, name_ref.value)
|
||||
.into_iter()
|
||||
.map(|s| s.to_nav(db))
|
||||
.collect();
|
||||
@ -95,7 +95,7 @@ pub(crate) fn name_definition(
|
||||
db: &RootDatabase,
|
||||
name: Source<&ast::Name>,
|
||||
) -> Option<Vec<NavigationTarget>> {
|
||||
let parent = name.ast.syntax().parent()?;
|
||||
let parent = name.value.syntax().parent()?;
|
||||
|
||||
if let Some(module) = ast::Module::cast(parent.clone()) {
|
||||
if module.has_semi() {
|
||||
@ -116,7 +116,7 @@ pub(crate) fn name_definition(
|
||||
|
||||
fn named_target(db: &RootDatabase, node: Source<&SyntaxNode>) -> Option<NavigationTarget> {
|
||||
match_ast! {
|
||||
match (node.ast) {
|
||||
match (node.value) {
|
||||
ast::StructDef(it) => {
|
||||
Some(NavigationTarget::from_named(
|
||||
db,
|
||||
|
@ -16,7 +16,7 @@ pub(crate) fn goto_type_definition(
|
||||
let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?;
|
||||
let token = descend_into_macros(db, position.file_id, token);
|
||||
|
||||
let node = token.ast.ancestors().find_map(|token| {
|
||||
let node = token.value.ancestors().find_map(|token| {
|
||||
token
|
||||
.ancestors()
|
||||
.find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())
|
||||
|
@ -101,11 +101,11 @@ fn hover_text_from_name_kind(
|
||||
return match name_kind {
|
||||
Macro(it) => {
|
||||
let src = it.source(db);
|
||||
hover_text(src.ast.doc_comment_text(), Some(macro_label(&src.ast)))
|
||||
hover_text(src.value.doc_comment_text(), Some(macro_label(&src.value)))
|
||||
}
|
||||
Field(it) => {
|
||||
let src = it.source(db);
|
||||
match src.ast {
|
||||
match src.value {
|
||||
hir::FieldSource::Named(it) => hover_text(it.doc_comment_text(), it.short_label()),
|
||||
_ => None,
|
||||
}
|
||||
@ -116,7 +116,7 @@ fn hover_text_from_name_kind(
|
||||
hir::AssocItem::TypeAlias(it) => from_def_source(db, it),
|
||||
},
|
||||
Def(it) => match it {
|
||||
hir::ModuleDef::Module(it) => match it.definition_source(db).ast {
|
||||
hir::ModuleDef::Module(it) => match it.definition_source(db).value {
|
||||
hir::ModuleSource::Module(it) => {
|
||||
hover_text(it.doc_comment_text(), it.short_label())
|
||||
}
|
||||
@ -158,7 +158,7 @@ fn hover_text_from_name_kind(
|
||||
A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
|
||||
{
|
||||
let src = def.source(db);
|
||||
hover_text(src.ast.doc_comment_text(), src.ast.short_label())
|
||||
hover_text(src.value.doc_comment_text(), src.value.short_label())
|
||||
}
|
||||
}
|
||||
|
||||
@ -170,7 +170,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
let mut res = HoverResult::new();
|
||||
|
||||
let mut range = match_ast! {
|
||||
match (token.ast.parent()) {
|
||||
match (token.value.parent()) {
|
||||
ast::NameRef(name_ref) => {
|
||||
let mut no_fallback = false;
|
||||
if let Some(name_kind) =
|
||||
@ -211,7 +211,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn
|
||||
};
|
||||
|
||||
if range.is_none() {
|
||||
let node = token.ast.ancestors().find(|n| {
|
||||
let node = token.value.ancestors().find(|n| {
|
||||
ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()
|
||||
})?;
|
||||
let frange = FileRange { file_id: position.file_id, range: node.text_range() };
|
||||
|
@ -16,7 +16,7 @@ pub(crate) fn goto_implementation(
|
||||
let src = hir::ModuleSource::from_position(db, position);
|
||||
let module = hir::Module::from_definition(
|
||||
db,
|
||||
hir::Source { file_id: position.file_id.into(), ast: src },
|
||||
hir::Source { file_id: position.file_id.into(), value: src },
|
||||
)?;
|
||||
|
||||
if let Some(nominal_def) = find_node_at_offset::<ast::NominalDef>(&syntax, position.offset) {
|
||||
@ -42,11 +42,11 @@ fn impls_for_def(
|
||||
) -> Option<Vec<NavigationTarget>> {
|
||||
let ty = match node {
|
||||
ast::NominalDef::StructDef(def) => {
|
||||
let src = hir::Source { file_id: position.file_id.into(), ast: def.clone() };
|
||||
let src = hir::Source { file_id: position.file_id.into(), value: def.clone() };
|
||||
hir::Struct::from_source(db, src)?.ty(db)
|
||||
}
|
||||
ast::NominalDef::EnumDef(def) => {
|
||||
let src = hir::Source { file_id: position.file_id.into(), ast: def.clone() };
|
||||
let src = hir::Source { file_id: position.file_id.into(), value: def.clone() };
|
||||
hir::Enum::from_source(db, src)?.ty(db)
|
||||
}
|
||||
};
|
||||
@ -69,7 +69,7 @@ fn impls_for_trait(
|
||||
node: &ast::TraitDef,
|
||||
module: hir::Module,
|
||||
) -> Option<Vec<NavigationTarget>> {
|
||||
let src = hir::Source { file_id: position.file_id.into(), ast: node.clone() };
|
||||
let src = hir::Source { file_id: position.file_id.into(), value: node.clone() };
|
||||
let tr = hir::Trait::from_source(db, src)?;
|
||||
|
||||
let krate = module.krate();
|
||||
|
@ -10,7 +10,7 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
|
||||
let src = hir::ModuleSource::from_position(db, position);
|
||||
let module = match hir::Module::from_definition(
|
||||
db,
|
||||
hir::Source { file_id: position.file_id.into(), ast: src },
|
||||
hir::Source { file_id: position.file_id.into(), value: src },
|
||||
) {
|
||||
None => return Vec::new(),
|
||||
Some(it) => it,
|
||||
@ -23,7 +23,8 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<Na
|
||||
pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
|
||||
let src = hir::ModuleSource::from_file_id(db, file_id);
|
||||
let module =
|
||||
match hir::Module::from_definition(db, hir::Source { file_id: file_id.into(), ast: src }) {
|
||||
match hir::Module::from_definition(db, hir::Source { file_id: file_id.into(), value: src })
|
||||
{
|
||||
Some(it) => it,
|
||||
None => return Vec::new(),
|
||||
};
|
||||
|
@ -13,7 +13,7 @@ use crate::db::RootDatabase;
|
||||
|
||||
pub(crate) fn classify_name(db: &RootDatabase, name: Source<&ast::Name>) -> Option<NameDefinition> {
|
||||
let _p = profile("classify_name");
|
||||
let parent = name.ast.syntax().parent()?;
|
||||
let parent = name.value.syntax().parent()?;
|
||||
|
||||
match_ast! {
|
||||
match parent {
|
||||
@ -121,7 +121,7 @@ pub(crate) fn classify_name_ref(
|
||||
) -> Option<NameDefinition> {
|
||||
let _p = profile("classify_name_ref");
|
||||
|
||||
let parent = name_ref.ast.syntax().parent()?;
|
||||
let parent = name_ref.value.syntax().parent()?;
|
||||
let analyzer = SourceAnalyzer::new(db, name_ref.map(|it| it.syntax()), None);
|
||||
|
||||
if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
|
||||
@ -142,7 +142,7 @@ pub(crate) fn classify_name_ref(
|
||||
tested_by!(goto_definition_works_for_record_fields);
|
||||
if let Some(record_lit) = record_field.syntax().ancestors().find_map(ast::RecordLit::cast) {
|
||||
let variant_def = analyzer.resolve_record_literal(&record_lit)?;
|
||||
let hir_path = Path::from_name_ref(name_ref.ast);
|
||||
let hir_path = Path::from_name_ref(name_ref.value);
|
||||
let hir_name = hir_path.as_ident()?;
|
||||
let field = variant_def.field(db, hir_name)?;
|
||||
return Some(from_struct_field(db, field));
|
||||
@ -162,7 +162,7 @@ pub(crate) fn classify_name_ref(
|
||||
}
|
||||
}
|
||||
|
||||
let path = name_ref.ast.syntax().ancestors().find_map(ast::Path::cast)?;
|
||||
let path = name_ref.value.syntax().ancestors().find_map(ast::Path::cast)?;
|
||||
let resolved = analyzer.resolve_path(db, &path)?;
|
||||
match resolved {
|
||||
PathResolution::Def(def) => Some(from_module_def(db, def, Some(container))),
|
||||
|
@ -32,9 +32,9 @@ pub(crate) struct NameDefinition {
|
||||
pub(super) fn from_assoc_item(db: &RootDatabase, item: AssocItem) -> NameDefinition {
|
||||
let container = item.module(db);
|
||||
let visibility = match item {
|
||||
AssocItem::Function(f) => f.source(db).ast.visibility(),
|
||||
AssocItem::Const(c) => c.source(db).ast.visibility(),
|
||||
AssocItem::TypeAlias(a) => a.source(db).ast.visibility(),
|
||||
AssocItem::Function(f) => f.source(db).value.visibility(),
|
||||
AssocItem::Const(c) => c.source(db).value.visibility(),
|
||||
AssocItem::TypeAlias(a) => a.source(db).value.visibility(),
|
||||
};
|
||||
let kind = NameKind::AssocItem(item);
|
||||
NameDefinition { kind, container, visibility }
|
||||
@ -45,8 +45,8 @@ pub(super) fn from_struct_field(db: &RootDatabase, field: StructField) -> NameDe
|
||||
let parent = field.parent_def(db);
|
||||
let container = parent.module(db);
|
||||
let visibility = match parent {
|
||||
VariantDef::Struct(s) => s.source(db).ast.visibility(),
|
||||
VariantDef::EnumVariant(e) => e.source(db).ast.parent_enum().visibility(),
|
||||
VariantDef::Struct(s) => s.source(db).value.visibility(),
|
||||
VariantDef::EnumVariant(e) => e.source(db).value.parent_enum().visibility(),
|
||||
};
|
||||
NameDefinition { kind, container, visibility }
|
||||
}
|
||||
@ -60,22 +60,22 @@ pub(super) fn from_module_def(
|
||||
let (container, visibility) = match def {
|
||||
ModuleDef::Module(it) => {
|
||||
let container = it.parent(db).or_else(|| Some(it)).unwrap();
|
||||
let visibility = it.declaration_source(db).and_then(|s| s.ast.visibility());
|
||||
let visibility = it.declaration_source(db).and_then(|s| s.value.visibility());
|
||||
(container, visibility)
|
||||
}
|
||||
ModuleDef::EnumVariant(it) => {
|
||||
let container = it.module(db);
|
||||
let visibility = it.source(db).ast.parent_enum().visibility();
|
||||
let visibility = it.source(db).value.parent_enum().visibility();
|
||||
(container, visibility)
|
||||
}
|
||||
ModuleDef::Function(it) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Const(it) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Static(it) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Trait(it) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::TypeAlias(it) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Adt(Adt::Struct(it)) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Adt(Adt::Union(it)) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Adt(Adt::Enum(it)) => (it.module(db), it.source(db).ast.visibility()),
|
||||
ModuleDef::Function(it) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::Const(it) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::Static(it) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::Trait(it) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::TypeAlias(it) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::Adt(Adt::Struct(it)) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::Adt(Adt::Union(it)) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::Adt(Adt::Enum(it)) => (it.module(db), it.source(db).value.visibility()),
|
||||
ModuleDef::BuiltinType(..) => (module.unwrap(), None),
|
||||
};
|
||||
NameDefinition { kind, container, visibility }
|
||||
|
@ -55,11 +55,11 @@ fn rename_mod(
|
||||
) -> Option<SourceChange> {
|
||||
let mut source_file_edits = Vec::new();
|
||||
let mut file_system_edits = Vec::new();
|
||||
let module_src = hir::Source { file_id: position.file_id.into(), ast: ast_module.clone() };
|
||||
let module_src = hir::Source { file_id: position.file_id.into(), value: ast_module.clone() };
|
||||
if let Some(module) = hir::Module::from_declaration(db, module_src) {
|
||||
let src = module.definition_source(db);
|
||||
let file_id = src.file_id.original_file(db);
|
||||
match src.ast {
|
||||
match src.value {
|
||||
ModuleSource::SourceFile(..) => {
|
||||
let mod_path: RelativePathBuf = db.file_relative_path(file_id);
|
||||
// mod is defined in path/to/dir/mod.rs
|
||||
|
@ -73,9 +73,9 @@ impl NameDefinition {
|
||||
|
||||
if let NameKind::Local(var) = self.kind {
|
||||
let range = match var.parent(db) {
|
||||
DefWithBody::Function(f) => f.source(db).ast.syntax().text_range(),
|
||||
DefWithBody::Const(c) => c.source(db).ast.syntax().text_range(),
|
||||
DefWithBody::Static(s) => s.source(db).ast.syntax().text_range(),
|
||||
DefWithBody::Function(f) => f.source(db).value.syntax().text_range(),
|
||||
DefWithBody::Const(c) => c.source(db).value.syntax().text_range(),
|
||||
DefWithBody::Static(s) => s.source(db).value.syntax().text_range(),
|
||||
};
|
||||
let mut res = FxHashMap::default();
|
||||
res.insert(file_id, Some(range));
|
||||
@ -91,7 +91,7 @@ impl NameDefinition {
|
||||
let parent_src = parent_module.definition_source(db);
|
||||
let file_id = parent_src.file_id.original_file(db);
|
||||
|
||||
match parent_src.ast {
|
||||
match parent_src.value {
|
||||
ModuleSource::Module(m) => {
|
||||
let range = Some(m.syntax().text_range());
|
||||
res.insert(file_id, range);
|
||||
@ -135,7 +135,7 @@ impl NameDefinition {
|
||||
}
|
||||
|
||||
let mut res = FxHashMap::default();
|
||||
let range = match module_src.ast {
|
||||
let range = match module_src.value {
|
||||
ModuleSource::Module(m) => Some(m.syntax().text_range()),
|
||||
ModuleSource::SourceFile(_) => None,
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user