1408: Associated type basics & Deref support r=matklad a=flodiebold

This adds the necessary Chalk integration to handle associated types and uses it to implement support for `Deref` in the `*` operator and autoderef; so e.g. dot completions through an `Arc` work now.

It doesn't yet implement resolution of associated types in paths, though. Also, there's a big FIXME about handling variables in the solution we get from Chalk correctly.

Co-authored-by: Florian Diebold <flodiebold@gmail.com>
This commit is contained in:
bors[bot] 2019-06-16 12:00:41 +00:00
commit b81caed43f
19 changed files with 455 additions and 71 deletions

1
Cargo.lock generated
View File

@ -1080,6 +1080,7 @@ dependencies = [
"flexi_logger 0.11.5 (registry+https://github.com/rust-lang/crates.io-index)",
"insta 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)",
"join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"once_cell 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",

View File

@ -22,7 +22,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As
let expr = match_expr.expr()?;
let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None);
let match_expr_ty = analyzer.type_of(ctx.db, expr)?;
let enum_def = match_expr_ty.autoderef(ctx.db).find_map(|ty| match ty.as_adt() {
let enum_def = analyzer.autoderef(ctx.db, match_expr_ty).find_map(|ty| match ty.as_adt() {
Some((AdtDef::Enum(e), _)) => Some(e),
_ => None,
})?;

View File

@ -25,6 +25,7 @@ ra_prof = { path = "../ra_prof" }
chalk-solve = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" }
chalk-rust-ir = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" }
chalk-ir = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" }
lalrpop-intern = "0.15.1"
[dev-dependencies]
flexi_logger = "0.11.0"

View File

@ -779,6 +779,18 @@ impl Trait {
self.trait_data(db).items().to_vec()
}
pub fn associated_type_by_name(self, db: &impl DefDatabase, name: Name) -> Option<TypeAlias> {
let trait_data = self.trait_data(db);
trait_data
.items()
.iter()
.filter_map(|item| match item {
TraitItem::TypeAlias(t) => Some(*t),
_ => None,
})
.find(|t| t.name(db) == name)
}
pub(crate) fn trait_data(self, db: &impl DefDatabase) -> Arc<TraitData> {
db.trait_data(self)
}
@ -831,8 +843,12 @@ impl TypeAlias {
}
}
pub fn type_ref(self, db: &impl DefDatabase) -> Arc<TypeRef> {
db.type_alias_ref(self)
pub fn type_ref(self, db: &impl DefDatabase) -> Option<TypeRef> {
db.type_alias_data(self).type_ref.clone()
}
pub fn name(self, db: &impl DefDatabase) -> Name {
db.type_alias_data(self).name.clone()
}
/// Builds a resolver for the type references in this type alias.

View File

@ -16,9 +16,8 @@ use crate::{
adt::{StructData, EnumData},
impl_block::{ModuleImplBlocks, ImplSourceMap, ImplBlock},
generics::{GenericParams, GenericDef},
type_ref::TypeRef,
traits::TraitData,
lang_item::{LangItems, LangItemTarget},
lang_item::{LangItems, LangItemTarget}, type_alias::TypeAliasData,
};
// This database has access to source code, so queries here are not really
@ -113,8 +112,8 @@ pub trait DefDatabase: SourceDatabase {
#[salsa::invoke(crate::FnSignature::fn_signature_query)]
fn fn_signature(&self, func: Function) -> Arc<FnSignature>;
#[salsa::invoke(crate::type_alias::type_alias_ref_query)]
fn type_alias_ref(&self, typ: TypeAlias) -> Arc<TypeRef>;
#[salsa::invoke(crate::type_alias::type_alias_data_query)]
fn type_alias_data(&self, typ: TypeAlias) -> Arc<TypeAliasData>;
#[salsa::invoke(crate::ConstSignature::const_signature_query)]
fn const_signature(&self, konst: Const) -> Arc<ConstSignature>;
@ -185,6 +184,13 @@ pub trait HirDatabase: DefDatabase + AstDatabase {
krate: Crate,
goal: crate::ty::Canonical<crate::ty::TraitRef>,
) -> Option<crate::ty::traits::Solution>;
#[salsa::invoke(crate::ty::traits::normalize_query)]
fn normalize(
&self,
krate: Crate,
goal: crate::ty::Canonical<crate::ty::ProjectionPredicate>,
) -> Option<crate::ty::traits::Solution>;
}
#[test]

View File

@ -1,10 +1,11 @@
use std::sync::Arc;
use rustc_hash::FxHashMap;
use ra_syntax::{SmolStr, ast::AttrsOwner};
use ra_syntax::{SmolStr, TreeArc, ast::AttrsOwner};
use crate::{
Crate, DefDatabase, Enum, Function, HirDatabase, ImplBlock, Module, Static, Struct, Trait, AstDatabase,
Crate, DefDatabase, Enum, Function, HirDatabase, ImplBlock, Module,
Static, Struct, Trait, ModuleDef, AstDatabase, HasSource
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -87,23 +88,51 @@ impl LangItems {
let source = module.definition_source(db).ast;
for (impl_id, _) in impl_blocks.impls.iter() {
let impl_block = source_map.get(&source, impl_id);
let lang_item_name = impl_block
.attrs()
.filter_map(|a| a.as_key_value())
.filter(|(key, _)| key == "lang")
.map(|(_, val)| val)
.nth(0);
if let Some(lang_item_name) = lang_item_name {
if let Some(lang_item_name) = lang_item_name(&*impl_block) {
let imp = ImplBlock::from_id(*module, impl_id);
self.items.entry(lang_item_name).or_insert_with(|| LangItemTarget::ImplBlock(imp));
}
}
// FIXME we should look for the other lang item targets (traits, structs, ...)
for def in module.declarations(db) {
match def {
ModuleDef::Trait(trait_) => {
self.collect_lang_item(db, trait_, LangItemTarget::Trait)
}
ModuleDef::Enum(e) => self.collect_lang_item(db, e, LangItemTarget::Enum),
ModuleDef::Struct(s) => self.collect_lang_item(db, s, LangItemTarget::Struct),
ModuleDef::Function(f) => self.collect_lang_item(db, f, LangItemTarget::Function),
ModuleDef::Static(s) => self.collect_lang_item(db, s, LangItemTarget::Static),
_ => {}
}
}
// Look for lang items in the children
for child in module.children(db) {
self.collect_lang_items_recursive(db, &child);
}
}
fn collect_lang_item<T, N>(
&mut self,
db: &(impl DefDatabase + AstDatabase),
item: T,
constructor: fn(T) -> LangItemTarget,
) where
T: Copy + HasSource<Ast = TreeArc<N>>,
N: AttrsOwner,
{
let node = item.source(db).ast;
if let Some(lang_item_name) = lang_item_name(&*node) {
self.items.entry(lang_item_name).or_insert(constructor(item));
}
}
}
fn lang_item_name<T: AttrsOwner>(node: &T) -> Option<SmolStr> {
node.attrs()
.filter_map(|a| a.as_key_value())
.filter(|(key, _)| key == "lang")
.map(|(_, val)| val)
.nth(0)
}

View File

@ -46,6 +46,11 @@ impl Name {
Name::new(idx.to_string().into())
}
// Needed for Deref
pub(crate) fn target() -> Name {
Name::new("Target".into())
}
// There's should be no way to extract a string out of `Name`: `Name` in the
// future, `Name` will include hygiene information, and you can't encode
// hygiene into a String.

View File

@ -369,6 +369,17 @@ impl SourceAnalyzer {
)
}
pub fn autoderef<'a>(
&'a self,
db: &'a impl HirDatabase,
ty: Ty,
) -> impl Iterator<Item = Ty> + 'a {
// There should be no inference vars in types passed here
// FIXME check that?
let canonical = crate::ty::Canonical { value: ty, num_vars: 0 };
crate::ty::autoderef(db, &self.resolver, canonical).map(|canonical| canonical.value)
}
#[cfg(test)]
pub(crate) fn body_source_map(&self) -> Arc<BodySourceMap> {
self.body_source_map.clone().unwrap()

View File

@ -16,12 +16,14 @@ use std::sync::Arc;
use std::ops::Deref;
use std::{fmt, mem};
use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait, GenericParams};
use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait, GenericParams, TypeAlias};
use display::{HirDisplay, HirFormatter};
pub(crate) use lower::{TypableDef, type_for_def, type_for_field, callable_item_sig, generic_predicates, generic_defaults};
pub(crate) use infer::{infer_query, InferenceResult, InferTy};
pub use lower::CallableDef;
pub(crate) use autoderef::autoderef;
pub(crate) use traits::ProjectionPredicate;
/// A type constructor or type name: this might be something like the primitive
/// type `bool`, a struct like `Vec`, or things like function pointers or
@ -100,6 +102,15 @@ pub struct ApplicationTy {
pub parameters: Substs,
}
/// A "projection" type corresponds to an (unnormalized)
/// projection like `<P0 as Trait<P1..Pn>>::Foo`. Note that the
/// trait and all its parameters are fully known.
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct ProjectionTy {
pub associated_ty: TypeAlias,
pub parameters: Substs,
}
/// A type.
///
/// See also the `TyKind` enum in rustc (librustc/ty/sty.rs), which represents
@ -216,8 +227,8 @@ impl Deref for Substs {
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct TraitRef {
/// FIXME name?
trait_: Trait,
substs: Substs,
pub trait_: Trait,
pub substs: Substs,
}
impl TraitRef {
@ -464,6 +475,17 @@ impl Ty {
_ => None,
}
}
/// Shifts up `Ty::Bound` vars by `n`.
pub fn shift_bound_vars(self, n: i32) -> Ty {
self.fold(&mut |ty| match ty {
Ty::Bound(idx) => {
assert!(idx as i32 >= -n);
Ty::Bound((idx as i32 + n) as u32)
}
ty => ty,
})
}
}
impl HirDisplay for &Ty {

View File

@ -5,17 +5,88 @@
use std::iter::successors;
use crate::HirDatabase;
use super::Ty;
use log::{info, warn};
impl Ty {
/// Iterates over the possible derefs of `ty`.
pub fn autoderef<'a>(self, db: &'a impl HirDatabase) -> impl Iterator<Item = Ty> + 'a {
successors(Some(self), move |ty| ty.autoderef_step(db))
}
use crate::{HirDatabase, Name, Resolver, HasGenericParams};
use super::{traits::Solution, Ty, Canonical};
fn autoderef_step(&self, _db: &impl HirDatabase) -> Option<Ty> {
// FIXME Deref::deref
self.builtin_deref()
const AUTODEREF_RECURSION_LIMIT: usize = 10;
pub(crate) fn autoderef<'a>(
db: &'a impl HirDatabase,
resolver: &'a Resolver,
ty: Canonical<Ty>,
) -> impl Iterator<Item = Canonical<Ty>> + 'a {
successors(Some(ty), move |ty| deref(db, resolver, ty)).take(AUTODEREF_RECURSION_LIMIT)
}
pub(crate) fn deref(
db: &impl HirDatabase,
resolver: &Resolver,
ty: &Canonical<Ty>,
) -> Option<Canonical<Ty>> {
if let Some(derefed) = ty.value.builtin_deref() {
Some(Canonical { value: derefed, num_vars: ty.num_vars })
} else {
deref_by_trait(db, resolver, ty)
}
}
fn deref_by_trait(
db: &impl HirDatabase,
resolver: &Resolver,
ty: &Canonical<Ty>,
) -> Option<Canonical<Ty>> {
let krate = resolver.krate()?;
let deref_trait = match db.lang_item(krate, "deref".into())? {
crate::lang_item::LangItemTarget::Trait(t) => t,
_ => return None,
};
let target = deref_trait.associated_type_by_name(db, Name::target())?;
if target.generic_params(db).count_params_including_parent() != 1 {
// the Target type + Deref trait should only have one generic parameter,
// namely Deref's Self type
return None;
}
// FIXME make the Canonical handling nicer
let projection = super::traits::ProjectionPredicate {
ty: Ty::Bound(0),
projection_ty: super::ProjectionTy {
associated_ty: target,
parameters: vec![ty.value.clone().shift_bound_vars(1)].into(),
},
};
let canonical = super::Canonical { num_vars: 1 + ty.num_vars, value: projection };
let solution = db.normalize(krate, canonical)?;
match &solution {
Solution::Unique(vars) => {
// FIXME: vars may contain solutions for any inference variables
// that happened to be inside ty. To correctly handle these, we
// would have to pass the solution up to the inference context, but
// that requires a larger refactoring (especially if the deref
// happens during method resolution). So for the moment, we just
// check that we're not in the situation we're we would actually
// need to handle the values of the additional variables, i.e.
// they're just being 'passed through'. In the 'standard' case where
// we have `impl<T> Deref for Foo<T> { Target = T }`, that should be
// the case.
for i in 1..vars.0.num_vars {
if vars.0.value[i] != Ty::Bound((i - 1) as u32) {
warn!("complex solution for derefing {:?}: {:?}, ignoring", ty, solution);
return None;
}
}
Some(Canonical { value: vars.0.value[0].clone(), num_vars: vars.0.num_vars })
}
Solution::Ambig(_) => {
info!("Ambiguous solution for derefing {:?}: {:?}", ty, solution);
None
}
}
}

View File

@ -46,7 +46,7 @@ use crate::{
use super::{
Ty, TypableDef, Substs, primitive, op, ApplicationTy, TypeCtor, CallableDef, TraitRef,
traits::{Solution, Obligation, Guidance},
method_resolution,
method_resolution, autoderef,
};
mod unify;
@ -1074,25 +1074,27 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
}
Expr::Field { expr, name } => {
let receiver_ty = self.infer_expr(*expr, &Expectation::none());
let ty = receiver_ty
.autoderef(self.db)
.find_map(|derefed_ty| match derefed_ty {
Ty::Apply(a_ty) => match a_ty.ctor {
TypeCtor::Tuple { .. } => {
let i = name.to_string().parse::<usize>().ok();
i.and_then(|i| a_ty.parameters.0.get(i).cloned())
}
TypeCtor::Adt(AdtDef::Struct(s)) => {
s.field(self.db, name).map(|field| {
self.write_field_resolution(tgt_expr, field);
field.ty(self.db).subst(&a_ty.parameters)
})
}
_ => None,
},
let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty);
let ty = autoderef::autoderef(
self.db,
&self.resolver.clone(),
canonicalized.value.clone(),
)
.find_map(|derefed_ty| match canonicalized.decanonicalize_ty(derefed_ty.value) {
Ty::Apply(a_ty) => match a_ty.ctor {
TypeCtor::Tuple { .. } => {
let i = name.to_string().parse::<usize>().ok();
i.and_then(|i| a_ty.parameters.0.get(i).cloned())
}
TypeCtor::Adt(AdtDef::Struct(s)) => s.field(self.db, name).map(|field| {
self.write_field_resolution(tgt_expr, field);
field.ty(self.db).subst(&a_ty.parameters)
}),
_ => None,
})
.unwrap_or(Ty::Unknown);
},
_ => None,
})
.unwrap_or(Ty::Unknown);
self.insert_type_vars(ty)
}
Expr::Try { expr } => {
@ -1124,10 +1126,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
let inner_ty = self.infer_expr(*expr, &Expectation::none());
match op {
UnaryOp::Deref => {
if let Some(derefed_ty) = inner_ty.builtin_deref() {
derefed_ty
let canonicalized = self.canonicalizer().canonicalize_ty(inner_ty);
if let Some(derefed_ty) =
autoderef::deref(self.db, &self.resolver, &canonicalized.value)
{
canonicalized.decanonicalize_ty(derefed_ty.value)
} else {
// FIXME Deref::deref
Ty::Unknown
}
}

View File

@ -460,7 +460,7 @@ fn type_for_type_alias(db: &impl HirDatabase, t: TypeAlias) -> Ty {
let resolver = t.resolver(db);
let type_ref = t.type_ref(db);
let substs = Substs::identity(&generics);
let inner = Ty::from_hir(db, &resolver, &type_ref);
let inner = Ty::from_hir(db, &resolver, &type_ref.unwrap_or(TypeRef::Error));
inner.subst(&substs)
}

View File

@ -16,7 +16,7 @@ use crate::{
generics::HasGenericParams,
ty::primitive::{UncertainIntTy, UncertainFloatTy}
};
use super::{TraitRef, Canonical};
use super::{TraitRef, Canonical, autoderef};
/// This is used as a key for indexing impls.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
@ -162,8 +162,7 @@ pub(crate) fn iterate_method_candidates<T>(
// rustc does an autoderef and then autoref again).
let krate = resolver.krate()?;
for derefed_ty in ty.value.clone().autoderef(db) {
let derefed_ty = Canonical { value: derefed_ty, num_vars: ty.num_vars };
for derefed_ty in autoderef::autoderef(db, resolver, ty.clone()) {
if let Some(result) = iterate_inherent_methods(&derefed_ty, db, name, krate, &mut callback)
{
return Some(result);

View File

@ -2737,6 +2737,90 @@ fn main() {
assert_eq!(t, "Foo");
}
#[test]
fn deref_trait() {
let t = type_at(
r#"
//- /main.rs
#[lang = "deref"]
trait Deref {
type Target;
fn deref(&self) -> &Self::Target;
}
struct Arc<T>;
impl<T> Deref for Arc<T> {
type Target = T;
}
struct S;
impl S {
fn foo(&self) -> u128 {}
}
fn test(s: Arc<S>) {
(*s, s.foo())<|>
}
"#,
);
assert_eq!(t, "(S, u128)");
}
#[test]
fn deref_trait_with_inference_var() {
let t = type_at(
r#"
//- /main.rs
#[lang = "deref"]
trait Deref {
type Target;
fn deref(&self) -> &Self::Target;
}
struct Arc<T>;
fn new_arc<T>() -> Arc<T> {}
impl<T> Deref for Arc<T> {
type Target = T;
}
struct S;
fn foo(a: Arc<S>) {}
fn test() {
let a = new_arc();
let b = (*a)<|>;
foo(a);
}
"#,
);
assert_eq!(t, "S");
}
#[test]
fn deref_trait_infinite_recursion() {
let t = type_at(
r#"
//- /main.rs
#[lang = "deref"]
trait Deref {
type Target;
fn deref(&self) -> &Self::Target;
}
struct S;
impl Deref for S {
type Target = S;
}
fn test(s: S) {
s.foo()<|>;
}
"#,
);
assert_eq!(t, "{unknown}");
}
fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String {
let file = db.parse(pos.file_id).ok().unwrap();
let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap();

View File

@ -8,7 +8,7 @@ use chalk_ir::cast::Cast;
use ra_prof::profile;
use crate::{Crate, Trait, db::HirDatabase, ImplBlock};
use super::{TraitRef, Ty, Canonical};
use super::{TraitRef, Ty, Canonical, ProjectionTy};
use self::chalk::{ToChalk, from_chalk};
@ -75,6 +75,13 @@ pub enum Obligation {
/// Prove that a certain type implements a trait (the type is the `Self` type
/// parameter to the `TraitRef`).
Trait(TraitRef),
// Projection(ProjectionPredicate),
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct ProjectionPredicate {
pub projection_ty: ProjectionTy,
pub ty: Ty,
}
/// Check using Chalk whether trait is implemented for given parameters including `Self` type.
@ -98,6 +105,30 @@ pub(crate) fn implements_query(
solution.map(|solution| solution_from_chalk(db, solution))
}
pub(crate) fn normalize_query(
db: &impl HirDatabase,
krate: Crate,
projection: Canonical<ProjectionPredicate>,
) -> Option<Solution> {
let goal: chalk_ir::Goal = chalk_ir::Normalize {
projection: projection.value.projection_ty.to_chalk(db),
ty: projection.value.ty.to_chalk(db),
}
.cast();
debug!("goal: {:?}", goal);
// FIXME unify with `implements`
let env = chalk_ir::Environment::new();
let in_env = chalk_ir::InEnvironment::new(&env, goal);
let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT);
let canonical =
chalk_ir::Canonical { value: in_env, binders: vec![parameter; projection.num_vars] };
// We currently don't deal with universes (I think / hope they're not yet
// relevant for our use cases?)
let u_canonical = chalk_ir::UCanonical { canonical, universes: 1 };
let solution = solve(db, krate, &u_canonical);
solution.map(|solution| solution_from_chalk(db, solution))
}
fn solution_from_chalk(db: &impl HirDatabase, solution: chalk_solve::Solution) -> Solution {
let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution>| {
let value = subst

View File

@ -3,7 +3,7 @@ use std::sync::Arc;
use log::debug;
use chalk_ir::{TypeId, ImplId, TypeKindId, ProjectionTy, Parameter, Identifier, cast::Cast, PlaceholderIndex, UniverseIndex, TypeName};
use chalk_ir::{TypeId, ImplId, TypeKindId, Parameter, Identifier, cast::Cast, PlaceholderIndex, UniverseIndex, TypeName};
use chalk_rust_ir::{AssociatedTyDatum, TraitDatum, StructDatum, ImplDatum};
use test_utils::tested_by;
@ -12,9 +12,9 @@ use ra_db::salsa::{InternId, InternKey};
use crate::{
Trait, HasGenericParams, ImplBlock,
db::HirDatabase,
ty::{TraitRef, Ty, ApplicationTy, TypeCtor, Substs, GenericPredicate, CallableDef},
ty::{TraitRef, Ty, ApplicationTy, TypeCtor, Substs, GenericPredicate, CallableDef, ProjectionTy},
ty::display::HirDisplay,
generics::GenericDef,
generics::GenericDef, TypeAlias, ImplItem,
};
use super::ChalkContext;
@ -156,6 +156,18 @@ impl ToChalk for ImplBlock {
}
}
impl ToChalk for TypeAlias {
type Chalk = chalk_ir::TypeId;
fn to_chalk(self, _db: &impl HirDatabase) -> chalk_ir::TypeId {
self.id.into()
}
fn from_chalk(_db: &impl HirDatabase, impl_id: chalk_ir::TypeId) -> TypeAlias {
TypeAlias { id: impl_id.into() }
}
}
impl ToChalk for GenericPredicate {
type Chalk = chalk_ir::QuantifiedWhereClause;
@ -183,6 +195,24 @@ impl ToChalk for GenericPredicate {
}
}
impl ToChalk for ProjectionTy {
type Chalk = chalk_ir::ProjectionTy;
fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::ProjectionTy {
chalk_ir::ProjectionTy {
associated_ty_id: self.associated_ty.to_chalk(db),
parameters: self.parameters.to_chalk(db),
}
}
fn from_chalk(db: &impl HirDatabase, projection_ty: chalk_ir::ProjectionTy) -> ProjectionTy {
ProjectionTy {
associated_ty: from_chalk(db, projection_ty.associated_ty_id),
parameters: from_chalk(db, projection_ty.parameters),
}
}
}
fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> {
chalk_ir::Binders {
value,
@ -225,8 +255,29 @@ impl<'a, DB> chalk_solve::RustIrDatabase for ChalkContext<'a, DB>
where
DB: HirDatabase,
{
fn associated_ty_data(&self, _ty: TypeId) -> Arc<AssociatedTyDatum> {
unimplemented!()
fn associated_ty_data(&self, id: TypeId) -> Arc<AssociatedTyDatum> {
debug!("associated_ty_data {:?}", id);
let type_alias: TypeAlias = from_chalk(self.db, id);
let trait_ = match type_alias.container(self.db) {
Some(crate::Container::Trait(t)) => t,
_ => panic!("associated type not in trait"),
};
let generic_params = type_alias.generic_params(self.db);
let parameter_kinds = generic_params
.params_including_parent()
.into_iter()
.map(|p| chalk_ir::ParameterKind::Ty(lalrpop_intern::intern(&p.name.to_string())))
.collect();
let datum = AssociatedTyDatum {
trait_id: trait_.to_chalk(self.db),
id,
name: lalrpop_intern::intern(&type_alias.name(self.db).to_string()),
parameter_kinds,
// FIXME add bounds and where clauses
bounds: vec![],
where_clauses: vec![],
};
Arc::new(datum)
}
fn trait_datum(&self, trait_id: chalk_ir::TraitId) -> Arc<TraitDatum> {
debug!("trait_datum {:?}", trait_id);
@ -260,7 +311,15 @@ where
fundamental: false,
};
let where_clauses = convert_where_clauses(self.db, trait_.into(), &bound_vars);
let associated_ty_ids = Vec::new(); // FIXME add associated tys
let associated_ty_ids = trait_
.items(self.db)
.into_iter()
.filter_map(|trait_item| match trait_item {
crate::traits::TraitItem::TypeAlias(type_alias) => Some(type_alias),
_ => None,
})
.map(|type_alias| type_alias.to_chalk(self.db))
.collect();
let trait_datum_bound =
chalk_rust_ir::TraitDatumBound { trait_ref, where_clauses, flags, associated_ty_ids };
let trait_datum = TraitDatum { binders: make_binders(trait_datum_bound, bound_vars.len()) };
@ -359,7 +418,29 @@ where
trait_ref.display(self.db),
where_clauses
);
let trait_ = trait_ref.trait_;
let trait_ref = trait_ref.to_chalk(self.db);
let associated_ty_values = impl_block
.items(self.db)
.into_iter()
.filter_map(|item| match item {
ImplItem::TypeAlias(t) => Some(t),
_ => None,
})
.filter_map(|t| {
let assoc_ty = trait_.associated_type_by_name(self.db, t.name(self.db))?;
let ty = self.db.type_for_def(t.into(), crate::Namespace::Types).subst(&bound_vars);
Some(chalk_rust_ir::AssociatedTyValue {
impl_id,
associated_ty_id: assoc_ty.to_chalk(self.db),
value: chalk_ir::Binders {
value: chalk_rust_ir::AssociatedTyValueBound { ty: ty.to_chalk(self.db) },
binders: vec![], // we don't support GATs yet
},
})
})
.collect();
let impl_datum_bound = chalk_rust_ir::ImplDatumBound {
trait_ref: if negative {
chalk_rust_ir::PolarizedTraitRef::Negative(trait_ref)
@ -367,9 +448,10 @@ where
chalk_rust_ir::PolarizedTraitRef::Positive(trait_ref)
},
where_clauses,
associated_ty_values: Vec::new(), // FIXME add associated type values
associated_ty_values,
impl_type,
};
debug!("impl_datum: {:?}", impl_datum_bound);
let impl_datum = ImplDatum { binders: make_binders(impl_datum_bound, bound_vars.len()) };
Arc::new(impl_datum)
}
@ -405,7 +487,7 @@ where
}
fn split_projection<'p>(
&self,
projection: &'p ProjectionTy,
projection: &'p chalk_ir::ProjectionTy,
) -> (Arc<AssociatedTyDatum>, &'p [Parameter], &'p [Parameter]) {
debug!("split_projection {:?}", projection);
unimplemented!()
@ -440,6 +522,18 @@ impl From<crate::ids::TraitId> for chalk_ir::TraitId {
}
}
impl From<chalk_ir::TypeId> for crate::ids::TypeAliasId {
fn from(type_id: chalk_ir::TypeId) -> Self {
id_from_chalk(type_id.0)
}
}
impl From<crate::ids::TypeAliasId> for chalk_ir::TypeId {
fn from(type_id: crate::ids::TypeAliasId) -> Self {
chalk_ir::TypeId(id_to_chalk(type_id))
}
}
impl From<chalk_ir::StructId> for crate::ids::TypeCtorId {
fn from(struct_id: chalk_ir::StructId) -> Self {
id_from_chalk(struct_id.0)

View File

@ -2,12 +2,22 @@
use std::sync::Arc;
use crate::{TypeAlias, DefDatabase, AstDatabase, HasSource, type_ref::TypeRef};
use ra_syntax::ast::NameOwner;
pub(crate) fn type_alias_ref_query(
use crate::{TypeAlias, db::{DefDatabase, AstDatabase}, type_ref::TypeRef, name::{Name, AsName}, HasSource};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct TypeAliasData {
pub(crate) name: Name,
pub(crate) type_ref: Option<TypeRef>,
}
pub(crate) fn type_alias_data_query(
db: &(impl DefDatabase + AstDatabase),
typ: TypeAlias,
) -> Arc<TypeRef> {
) -> Arc<TypeAliasData> {
let node = typ.source(db).ast;
Arc::new(TypeRef::from_ast_opt(node.type_ref()))
let name = node.name().map_or_else(Name::missing, |n| n.as_name());
let type_ref = node.type_ref().map(TypeRef::from_ast);
Arc::new(TypeAliasData { name, type_ref })
}

View File

@ -15,7 +15,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
}
fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) {
for receiver in receiver.autoderef(ctx.db) {
for receiver in ctx.analyzer.autoderef(ctx.db, receiver) {
if let Ty::Apply(a_ty) = receiver {
match a_ty.ctor {
TypeCtor::Adt(AdtDef::Struct(s)) => {

View File

@ -30,7 +30,7 @@ pub(crate) fn goto_type_definition(
return None;
};
let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt().map(|adt| adt.0))?;
let adt_def = analyzer.autoderef(db, ty).find_map(|ty| ty.as_adt().map(|adt| adt.0))?;
let nav = NavigationTarget::from_adt_def(db, adt_def);
Some(RangeInfo::new(node.range(), vec![nav]))