mirror of
https://github.com/rust-lang/rust.git
synced 2024-12-14 09:36:06 +00:00
Merge #1103
1103: Array inference r=flodiebold a=Lapz Fixes the final item in #394. The only problem is that infering the repeat cause some types to be infered twices. i.e ```rust fn test() { let y = unknown; [y, &y]; } ``` results in the following diff: ```diff [11; 48) '{ ...&y]; }': () [21; 22) 'y': &{unknown} [25; 32) 'unknown': &{unknown} -[38; 45) '[y, &y]': [&&{unknown}] +[38; 45) '[y, &y]': [&&{unknown};usize] [39; 40) 'y': &{unknown} +[39; 40) 'y': &{unknown} [42; 44) '&y': &&{unknown} [43; 44) 'y': &{unknown} ``` Should the code produce two inference results for 'y' and if not could any tell me what needs to change. Co-authored-by: Lenard Pratt <l3np27@gmail.com>
This commit is contained in:
commit
1e2178eb8e
@ -6,7 +6,7 @@ use rustc_hash::FxHashMap;
|
|||||||
use ra_arena::{Arena, RawId, impl_arena_id, map::ArenaMap};
|
use ra_arena::{Arena, RawId, impl_arena_id, map::ArenaMap};
|
||||||
use ra_syntax::{
|
use ra_syntax::{
|
||||||
SyntaxNodePtr, AstPtr, AstNode,
|
SyntaxNodePtr, AstPtr, AstNode,
|
||||||
ast::{self, LoopBodyOwner, ArgListOwner, NameOwner, LiteralKind, TypeAscriptionOwner}
|
ast::{self, LoopBodyOwner, ArgListOwner, NameOwner, LiteralKind,ArrayExprKind, TypeAscriptionOwner}
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
@ -238,14 +238,17 @@ pub enum Expr {
|
|||||||
Tuple {
|
Tuple {
|
||||||
exprs: Vec<ExprId>,
|
exprs: Vec<ExprId>,
|
||||||
},
|
},
|
||||||
Array {
|
Array(Array),
|
||||||
exprs: Vec<ExprId>,
|
|
||||||
},
|
|
||||||
Literal(Literal),
|
Literal(Literal),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub use ra_syntax::ast::PrefixOp as UnaryOp;
|
pub use ra_syntax::ast::PrefixOp as UnaryOp;
|
||||||
pub use ra_syntax::ast::BinOp as BinaryOp;
|
pub use ra_syntax::ast::BinOp as BinaryOp;
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
pub enum Array {
|
||||||
|
ElementList(Vec<ExprId>),
|
||||||
|
Repeat { initializer: ExprId, repeat: ExprId },
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub struct MatchArm {
|
pub struct MatchArm {
|
||||||
@ -348,11 +351,22 @@ impl Expr {
|
|||||||
| Expr::UnaryOp { expr, .. } => {
|
| Expr::UnaryOp { expr, .. } => {
|
||||||
f(*expr);
|
f(*expr);
|
||||||
}
|
}
|
||||||
Expr::Tuple { exprs } | Expr::Array { exprs } => {
|
Expr::Tuple { exprs } => {
|
||||||
for expr in exprs {
|
for expr in exprs {
|
||||||
f(*expr);
|
f(*expr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Expr::Array(a) => match a {
|
||||||
|
Array::ElementList(exprs) => {
|
||||||
|
for expr in exprs {
|
||||||
|
f(*expr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Array::Repeat { initializer, repeat } => {
|
||||||
|
f(*initializer);
|
||||||
|
f(*repeat)
|
||||||
|
}
|
||||||
|
},
|
||||||
Expr::Literal(_) => {}
|
Expr::Literal(_) => {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -723,10 +737,26 @@ impl ExprCollector {
|
|||||||
let exprs = e.exprs().map(|expr| self.collect_expr(expr)).collect();
|
let exprs = e.exprs().map(|expr| self.collect_expr(expr)).collect();
|
||||||
self.alloc_expr(Expr::Tuple { exprs }, syntax_ptr)
|
self.alloc_expr(Expr::Tuple { exprs }, syntax_ptr)
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::ExprKind::ArrayExpr(e) => {
|
ast::ExprKind::ArrayExpr(e) => {
|
||||||
let exprs = e.exprs().map(|expr| self.collect_expr(expr)).collect();
|
let kind = e.kind();
|
||||||
self.alloc_expr(Expr::Array { exprs }, syntax_ptr)
|
|
||||||
|
match kind {
|
||||||
|
ArrayExprKind::ElementList(e) => {
|
||||||
|
let exprs = e.map(|expr| self.collect_expr(expr)).collect();
|
||||||
|
self.alloc_expr(Expr::Array(Array::ElementList(exprs)), syntax_ptr)
|
||||||
|
}
|
||||||
|
ArrayExprKind::Repeat { initializer, repeat } => {
|
||||||
|
let initializer = self.collect_expr_opt(initializer);
|
||||||
|
let repeat = self.collect_expr_opt(repeat);
|
||||||
|
self.alloc_expr(
|
||||||
|
Expr::Array(Array::Repeat { initializer, repeat }),
|
||||||
|
syntax_ptr,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ast::ExprKind::Literal(e) => {
|
ast::ExprKind::Literal(e) => {
|
||||||
let lit = match e.kind() {
|
let lit = match e.kind() {
|
||||||
LiteralKind::IntNumber { suffix } => {
|
LiteralKind::IntNumber { suffix } => {
|
||||||
|
@ -353,10 +353,14 @@ impl HirDisplay for ApplicationTy {
|
|||||||
TypeCtor::Int(t) => write!(f, "{}", t)?,
|
TypeCtor::Int(t) => write!(f, "{}", t)?,
|
||||||
TypeCtor::Float(t) => write!(f, "{}", t)?,
|
TypeCtor::Float(t) => write!(f, "{}", t)?,
|
||||||
TypeCtor::Str => write!(f, "str")?,
|
TypeCtor::Str => write!(f, "str")?,
|
||||||
TypeCtor::Slice | TypeCtor::Array => {
|
TypeCtor::Slice => {
|
||||||
let t = self.parameters.as_single();
|
let t = self.parameters.as_single();
|
||||||
write!(f, "[{}]", t.display(f.db))?;
|
write!(f, "[{}]", t.display(f.db))?;
|
||||||
}
|
}
|
||||||
|
TypeCtor::Array => {
|
||||||
|
let t = self.parameters.as_single();
|
||||||
|
write!(f, "[{};_]", t.display(f.db))?;
|
||||||
|
}
|
||||||
TypeCtor::RawPtr(m) => {
|
TypeCtor::RawPtr(m) => {
|
||||||
let t = self.parameters.as_single();
|
let t = self.parameters.as_single();
|
||||||
write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?;
|
write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?;
|
||||||
|
@ -32,7 +32,7 @@ use crate::{
|
|||||||
DefWithBody,
|
DefWithBody,
|
||||||
ImplItem,
|
ImplItem,
|
||||||
type_ref::{TypeRef, Mutability},
|
type_ref::{TypeRef, Mutability},
|
||||||
expr::{Body, Expr, BindingAnnotation, Literal, ExprId, Pat, PatId, UnaryOp, BinaryOp, Statement, FieldPat, self},
|
expr::{Body, Expr, BindingAnnotation, Literal, ExprId, Pat, PatId, UnaryOp, BinaryOp, Statement, FieldPat,Array, self},
|
||||||
generics::GenericParams,
|
generics::GenericParams,
|
||||||
path::{GenericArgs, GenericArg},
|
path::{GenericArgs, GenericArg},
|
||||||
adt::VariantDef,
|
adt::VariantDef,
|
||||||
@ -1074,7 +1074,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||||||
|
|
||||||
Ty::apply(TypeCtor::Tuple, Substs(ty_vec.into()))
|
Ty::apply(TypeCtor::Tuple, Substs(ty_vec.into()))
|
||||||
}
|
}
|
||||||
Expr::Array { exprs } => {
|
Expr::Array(array) => {
|
||||||
let elem_ty = match &expected.ty {
|
let elem_ty = match &expected.ty {
|
||||||
Ty::Apply(a_ty) => match a_ty.ctor {
|
Ty::Apply(a_ty) => match a_ty.ctor {
|
||||||
TypeCtor::Slice | TypeCtor::Array => {
|
TypeCtor::Slice | TypeCtor::Array => {
|
||||||
@ -1085,8 +1085,21 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
|||||||
_ => self.new_type_var(),
|
_ => self.new_type_var(),
|
||||||
};
|
};
|
||||||
|
|
||||||
for expr in exprs.iter() {
|
match array {
|
||||||
self.infer_expr(*expr, &Expectation::has_type(elem_ty.clone()));
|
Array::ElementList(items) => {
|
||||||
|
for expr in items.iter() {
|
||||||
|
self.infer_expr(*expr, &Expectation::has_type(elem_ty.clone()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Array::Repeat { initializer, repeat } => {
|
||||||
|
self.infer_expr(*initializer, &Expectation::has_type(elem_ty.clone()));
|
||||||
|
self.infer_expr(
|
||||||
|
*repeat,
|
||||||
|
&Expectation::has_type(Ty::simple(TypeCtor::Int(
|
||||||
|
primitive::UncertainIntTy::Known(primitive::IntTy::usize()),
|
||||||
|
))),
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ty::apply_one(TypeCtor::Array, elem_ty)
|
Ty::apply_one(TypeCtor::Array, elem_ty)
|
||||||
|
@ -697,58 +697,58 @@ fn test(x: &str, y: isize) {
|
|||||||
[9; 10) 'x': &str
|
[9; 10) 'x': &str
|
||||||
[18; 19) 'y': isize
|
[18; 19) 'y': isize
|
||||||
[28; 324) '{ ... 3]; }': ()
|
[28; 324) '{ ... 3]; }': ()
|
||||||
[38; 39) 'a': [&str]
|
[38; 39) 'a': [&str;_]
|
||||||
[42; 45) '[x]': [&str]
|
[42; 45) '[x]': [&str;_]
|
||||||
[43; 44) 'x': &str
|
[43; 44) 'x': &str
|
||||||
[55; 56) 'b': [[&str]]
|
[55; 56) 'b': [[&str;_];_]
|
||||||
[59; 65) '[a, a]': [[&str]]
|
[59; 65) '[a, a]': [[&str;_];_]
|
||||||
[60; 61) 'a': [&str]
|
[60; 61) 'a': [&str;_]
|
||||||
[63; 64) 'a': [&str]
|
[63; 64) 'a': [&str;_]
|
||||||
[75; 76) 'c': [[[&str]]]
|
[75; 76) 'c': [[[&str;_];_];_]
|
||||||
[79; 85) '[b, b]': [[[&str]]]
|
[79; 85) '[b, b]': [[[&str;_];_];_]
|
||||||
[80; 81) 'b': [[&str]]
|
[80; 81) 'b': [[&str;_];_]
|
||||||
[83; 84) 'b': [[&str]]
|
[83; 84) 'b': [[&str;_];_]
|
||||||
[96; 97) 'd': [isize]
|
[96; 97) 'd': [isize;_]
|
||||||
[100; 112) '[y, 1, 2, 3]': [isize]
|
[100; 112) '[y, 1, 2, 3]': [isize;_]
|
||||||
[101; 102) 'y': isize
|
[101; 102) 'y': isize
|
||||||
[104; 105) '1': isize
|
[104; 105) '1': isize
|
||||||
[107; 108) '2': isize
|
[107; 108) '2': isize
|
||||||
[110; 111) '3': isize
|
[110; 111) '3': isize
|
||||||
[122; 123) 'd': [isize]
|
[122; 123) 'd': [isize;_]
|
||||||
[126; 138) '[1, y, 2, 3]': [isize]
|
[126; 138) '[1, y, 2, 3]': [isize;_]
|
||||||
[127; 128) '1': isize
|
[127; 128) '1': isize
|
||||||
[130; 131) 'y': isize
|
[130; 131) 'y': isize
|
||||||
[133; 134) '2': isize
|
[133; 134) '2': isize
|
||||||
[136; 137) '3': isize
|
[136; 137) '3': isize
|
||||||
[148; 149) 'e': [isize]
|
[148; 149) 'e': [isize;_]
|
||||||
[152; 155) '[y]': [isize]
|
[152; 155) '[y]': [isize;_]
|
||||||
[153; 154) 'y': isize
|
[153; 154) 'y': isize
|
||||||
[165; 166) 'f': [[isize]]
|
[165; 166) 'f': [[isize;_];_]
|
||||||
[169; 175) '[d, d]': [[isize]]
|
[169; 175) '[d, d]': [[isize;_];_]
|
||||||
[170; 171) 'd': [isize]
|
[170; 171) 'd': [isize;_]
|
||||||
[173; 174) 'd': [isize]
|
[173; 174) 'd': [isize;_]
|
||||||
[185; 186) 'g': [[isize]]
|
[185; 186) 'g': [[isize;_];_]
|
||||||
[189; 195) '[e, e]': [[isize]]
|
[189; 195) '[e, e]': [[isize;_];_]
|
||||||
[190; 191) 'e': [isize]
|
[190; 191) 'e': [isize;_]
|
||||||
[193; 194) 'e': [isize]
|
[193; 194) 'e': [isize;_]
|
||||||
[206; 207) 'h': [i32]
|
[206; 207) 'h': [i32;_]
|
||||||
[210; 216) '[1, 2]': [i32]
|
[210; 216) '[1, 2]': [i32;_]
|
||||||
[211; 212) '1': i32
|
[211; 212) '1': i32
|
||||||
[214; 215) '2': i32
|
[214; 215) '2': i32
|
||||||
[226; 227) 'i': [&str]
|
[226; 227) 'i': [&str;_]
|
||||||
[230; 240) '["a", "b"]': [&str]
|
[230; 240) '["a", "b"]': [&str;_]
|
||||||
[231; 234) '"a"': &str
|
[231; 234) '"a"': &str
|
||||||
[236; 239) '"b"': &str
|
[236; 239) '"b"': &str
|
||||||
[251; 252) 'b': [[&str]]
|
[251; 252) 'b': [[&str;_];_]
|
||||||
[255; 265) '[a, ["b"]]': [[&str]]
|
[255; 265) '[a, ["b"]]': [[&str;_];_]
|
||||||
[256; 257) 'a': [&str]
|
[256; 257) 'a': [&str;_]
|
||||||
[259; 264) '["b"]': [&str]
|
[259; 264) '["b"]': [&str;_]
|
||||||
[260; 263) '"b"': &str
|
[260; 263) '"b"': &str
|
||||||
[275; 276) 'x': [u8]
|
[275; 276) 'x': [u8;_]
|
||||||
[288; 290) '[]': [u8]
|
[288; 290) '[]': [u8;_]
|
||||||
[300; 301) 'z': &[u8]
|
[300; 301) 'z': &[u8;_]
|
||||||
[311; 321) '&[1, 2, 3]': &[u8]
|
[311; 321) '&[1, 2, 3]': &[u8;_]
|
||||||
[312; 321) '[1, 2, 3]': [u8]
|
[312; 321) '[1, 2, 3]': [u8;_]
|
||||||
[313; 314) '1': u8
|
[313; 314) '1': u8
|
||||||
[316; 317) '2': u8
|
[316; 317) '2': u8
|
||||||
[319; 320) '3': u8"###
|
[319; 320) '3': u8"###
|
||||||
@ -1553,7 +1553,7 @@ fn test() {
|
|||||||
[11; 48) '{ ...&y]; }': ()
|
[11; 48) '{ ...&y]; }': ()
|
||||||
[21; 22) 'y': &{unknown}
|
[21; 22) 'y': &{unknown}
|
||||||
[25; 32) 'unknown': &{unknown}
|
[25; 32) 'unknown': &{unknown}
|
||||||
[38; 45) '[y, &y]': [&&{unknown}]
|
[38; 45) '[y, &y]': [&&{unknown};_]
|
||||||
[39; 40) 'y': &{unknown}
|
[39; 40) 'y': &{unknown}
|
||||||
[42; 44) '&y': &&{unknown}
|
[42; 44) '&y': &&{unknown}
|
||||||
[43; 44) 'y': &{unknown}"###
|
[43; 44) 'y': &{unknown}"###
|
||||||
@ -1578,7 +1578,7 @@ fn test() {
|
|||||||
[25; 32) 'unknown': &&{unknown}
|
[25; 32) 'unknown': &&{unknown}
|
||||||
[42; 43) 'y': &&{unknown}
|
[42; 43) 'y': &&{unknown}
|
||||||
[46; 53) 'unknown': &&{unknown}
|
[46; 53) 'unknown': &&{unknown}
|
||||||
[59; 77) '[(x, y..., &x)]': [(&&{unknown}, &&{unknown})]
|
[59; 77) '[(x, y..., &x)]': [(&&{unknown}, &&{unknown});_]
|
||||||
[60; 66) '(x, y)': (&&{unknown}, &&{unknown})
|
[60; 66) '(x, y)': (&&{unknown}, &&{unknown})
|
||||||
[61; 62) 'x': &&{unknown}
|
[61; 62) 'x': &&{unknown}
|
||||||
[64; 65) 'y': &&{unknown}
|
[64; 65) 'y': &&{unknown}
|
||||||
@ -1670,8 +1670,8 @@ fn test_line_buffer() {
|
|||||||
"#),
|
"#),
|
||||||
@r###"
|
@r###"
|
||||||
[23; 53) '{ ...n']; }': ()
|
[23; 53) '{ ...n']; }': ()
|
||||||
[29; 50) '&[0, b...b'\n']': &[u8]
|
[29; 50) '&[0, b...b'\n']': &[u8;_]
|
||||||
[30; 50) '[0, b'...b'\n']': [u8]
|
[30; 50) '[0, b'...b'\n']': [u8;_]
|
||||||
[31; 32) '0': u8
|
[31; 32) '0': u8
|
||||||
[34; 39) 'b'\n'': u8
|
[34; 39) 'b'\n'': u8
|
||||||
[41; 42) '1': u8
|
[41; 42) '1': u8
|
||||||
|
@ -17,8 +17,8 @@ pub use self::{
|
|||||||
generated::*,
|
generated::*,
|
||||||
traits::*,
|
traits::*,
|
||||||
tokens::*,
|
tokens::*,
|
||||||
extensions::{PathSegmentKind, StructKind, FieldKind, SelfParamKind},
|
extensions::{PathSegmentKind, StructKind,FieldKind, SelfParamKind},
|
||||||
expr_extensions::{ElseBranch, PrefixOp, BinOp, LiteralKind},
|
expr_extensions::{ElseBranch, PrefixOp, BinOp, LiteralKind,ArrayExprKind},
|
||||||
};
|
};
|
||||||
|
|
||||||
/// The main trait to go from untyped `SyntaxNode` to a typed ast. The
|
/// The main trait to go from untyped `SyntaxNode` to a typed ast. The
|
||||||
|
@ -193,6 +193,28 @@ impl ast::BinExpr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub enum ArrayExprKind<'a> {
|
||||||
|
Repeat { initializer: Option<&'a ast::Expr>, repeat: Option<&'a ast::Expr> },
|
||||||
|
ElementList(AstChildren<'a, ast::Expr>),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ast::ArrayExpr {
|
||||||
|
pub fn kind(&self) -> ArrayExprKind {
|
||||||
|
if self.is_repeat() {
|
||||||
|
ArrayExprKind::Repeat {
|
||||||
|
initializer: children(self).nth(0),
|
||||||
|
repeat: children(self).nth(1),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ArrayExprKind::ElementList(children(self))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn is_repeat(&self) -> bool {
|
||||||
|
self.syntax().children_with_tokens().any(|it| it.kind() == SEMI)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||||
pub enum LiteralKind {
|
pub enum LiteralKind {
|
||||||
String,
|
String,
|
||||||
|
Loading…
Reference in New Issue
Block a user