Rollup merge of #129733 - lnicola:sync-from-ra, r=lnicola

Subtree update of `rust-analyzer`

r? ````@ghost````
This commit is contained in:
Matthias Krüger 2024-08-31 14:46:08 +02:00 committed by GitHub
commit a00bd75b6c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
303 changed files with 9220 additions and 2905 deletions

View File

@ -7,9 +7,10 @@
# prettier format
f247090558c9ba3c551566eae5882b7ca865225f
# subtree syncs
932d85b52946d917deab2c23ead552f7f713b828
# pre-josh subtree syncs
3e358a6827d83e8d6473913a5e304734aadfed04
932d85b52946d917deab2c23ead552f7f713b828
9d2cb42a413e51deb50b36794a2e1605381878fc
f532576ac53ddcc666bc8d59e0b6437065e2f599
b2f6fd4f961fc7e4fbfdb80cae2e6065f8436f15
c48062fe2ab9a2d913d1985a6b0aec4bf936bfc1
f532576ac53ddcc666bc8d59e0b6437065e2f599

View File

@ -495,7 +495,6 @@ dependencies = [
"hir-ty",
"intern",
"itertools",
"once_cell",
"rustc-hash",
"smallvec",
"span",
@ -528,7 +527,6 @@ dependencies = [
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"mbe",
"once_cell",
"ra-ap-rustc_abi",
"ra-ap-rustc_parse_format",
"rustc-hash",
@ -595,7 +593,6 @@ dependencies = [
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"limit",
"nohash-hasher",
"once_cell",
"oorandom",
"project-model",
"ra-ap-rustc_abi",
@ -691,7 +688,6 @@ dependencies = [
"hir",
"ide-db",
"itertools",
"once_cell",
"smallvec",
"stdx",
"syntax",
@ -720,7 +716,6 @@ dependencies = [
"line-index 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"memchr",
"nohash-hasher",
"once_cell",
"parser",
"profile",
"rayon",
@ -746,7 +741,6 @@ dependencies = [
"hir",
"ide-db",
"itertools",
"once_cell",
"paths",
"serde_json",
"stdx",
@ -1933,7 +1927,6 @@ dependencies = [
"expect-test",
"indexmap",
"itertools",
"once_cell",
"parser",
"ra-ap-rustc_lexer",
"rayon",

View File

@ -19,9 +19,10 @@ rowan.opt-level = 3
rustc-hash.opt-level = 3
smol_str.opt-level = 3
text-size.opt-level = 3
serde.opt-level = 3
salsa.opt-level = 3
# This speeds up `cargo xtask dist`.
miniz_oxide.opt-level = 3
salsa.opt-level = 3
[profile.release]
incremental = true
@ -184,6 +185,8 @@ style = { level = "warn", priority = -1 }
suspicious = { level = "warn", priority = -1 }
## allow following lints
# subjective
single_match = "allow"
# () makes a fine error in most cases
result_unit_err = "allow"
# We don't expose public APIs that matter like this

View File

@ -174,28 +174,3 @@ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -23,7 +23,6 @@ fst = { version = "0.4.7", default-features = false }
indexmap.workspace = true
itertools.workspace = true
la-arena.workspace = true
once_cell = "1.17.0"
rustc-hash.workspace = true
tracing.workspace = true
smallvec.workspace = true

View File

@ -14,7 +14,7 @@ use hir_expand::{name::Name, ExpandError, InFile};
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::MacroFileId;
use span::{Edition, MacroFileId};
use syntax::{ast, AstPtr, SyntaxNodePtr};
use triomphe::Arc;
@ -201,8 +201,13 @@ impl Body {
self.block_scopes.iter().map(move |&block| (block, db.block_def_map(block)))
}
pub fn pretty_print(&self, db: &dyn DefDatabase, owner: DefWithBodyId) -> String {
pretty::print_body_hir(db, self, owner)
pub fn pretty_print(
&self,
db: &dyn DefDatabase,
owner: DefWithBodyId,
edition: Edition,
) -> String {
pretty::print_body_hir(db, self, owner, edition)
}
pub fn pretty_print_expr(
@ -210,8 +215,9 @@ impl Body {
db: &dyn DefDatabase,
owner: DefWithBodyId,
expr: ExprId,
edition: Edition,
) -> String {
pretty::print_expr_hir(db, self, owner, expr)
pretty::print_expr_hir(db, self, owner, expr, edition)
}
fn new(

View File

@ -3,6 +3,7 @@
use std::fmt::{self, Write};
use itertools::Itertools;
use span::Edition;
use crate::{
hir::{
@ -15,20 +16,26 @@ use crate::{
use super::*;
pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBodyId) -> String {
pub(super) fn print_body_hir(
db: &dyn DefDatabase,
body: &Body,
owner: DefWithBodyId,
edition: Edition,
) -> String {
let header = match owner {
DefWithBodyId::FunctionId(it) => {
it.lookup(db).id.resolved(db, |it| format!("fn {}", it.name.display(db.upcast())))
}
DefWithBodyId::FunctionId(it) => it
.lookup(db)
.id
.resolved(db, |it| format!("fn {}", it.name.display(db.upcast(), edition))),
DefWithBodyId::StaticId(it) => it
.lookup(db)
.id
.resolved(db, |it| format!("static {} = ", it.name.display(db.upcast()))),
.resolved(db, |it| format!("static {} = ", it.name.display(db.upcast(), edition))),
DefWithBodyId::ConstId(it) => it.lookup(db).id.resolved(db, |it| {
format!(
"const {} = ",
match &it.name {
Some(name) => name.display(db.upcast()).to_string(),
Some(name) => name.display(db.upcast(), edition).to_string(),
None => "_".to_owned(),
}
)
@ -39,13 +46,13 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
let enum_loc = loc.parent.lookup(db);
format!(
"enum {}::{}",
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast()),
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast()),
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition),
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
)
}
};
let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false };
let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false, edition };
if let DefWithBodyId::FunctionId(it) = owner {
p.buf.push('(');
let function_data = &db.function_data(it);
@ -86,8 +93,10 @@ pub(super) fn print_expr_hir(
body: &Body,
_owner: DefWithBodyId,
expr: ExprId,
edition: Edition,
) -> String {
let mut p = Printer { db, body, buf: String::new(), indent_level: 0, needs_indent: false };
let mut p =
Printer { db, body, buf: String::new(), indent_level: 0, needs_indent: false, edition };
p.print_expr(expr);
p.buf
}
@ -113,6 +122,7 @@ struct Printer<'a> {
buf: String,
indent_level: usize,
needs_indent: bool,
edition: Edition,
}
impl Write for Printer<'_> {
@ -173,13 +183,14 @@ impl Printer<'_> {
Expr::OffsetOf(offset_of) => {
w!(self, "builtin#offset_of(");
self.print_type_ref(&offset_of.container);
let edition = self.edition;
w!(
self,
", {})",
offset_of
.fields
.iter()
.format_with(".", |field, f| f(&field.display(self.db.upcast())))
.format_with(".", |field, f| f(&field.display(self.db.upcast(), edition)))
);
}
Expr::Path(path) => self.print_path(path),
@ -201,7 +212,7 @@ impl Printer<'_> {
}
Expr::Loop { body, label } => {
if let Some(lbl) = label {
w!(self, "{}: ", self.body[*lbl].name.display(self.db.upcast()));
w!(self, "{}: ", self.body[*lbl].name.display(self.db.upcast(), self.edition));
}
w!(self, "loop ");
self.print_expr(*body);
@ -221,10 +232,11 @@ impl Printer<'_> {
}
Expr::MethodCall { receiver, method_name, args, generic_args } => {
self.print_expr(*receiver);
w!(self, ".{}", method_name.display(self.db.upcast()));
w!(self, ".{}", method_name.display(self.db.upcast(), self.edition));
if let Some(args) = generic_args {
w!(self, "::<");
print_generic_args(self.db, args, self).unwrap();
let edition = self.edition;
print_generic_args(self.db, args, self, edition).unwrap();
w!(self, ">");
}
w!(self, "(");
@ -259,13 +271,13 @@ impl Printer<'_> {
Expr::Continue { label } => {
w!(self, "continue");
if let Some(lbl) = label {
w!(self, " {}", self.body[*lbl].name.display(self.db.upcast()));
w!(self, " {}", self.body[*lbl].name.display(self.db.upcast(), self.edition));
}
}
Expr::Break { expr, label } => {
w!(self, "break");
if let Some(lbl) = label {
w!(self, " {}", self.body[*lbl].name.display(self.db.upcast()));
w!(self, " {}", self.body[*lbl].name.display(self.db.upcast(), self.edition));
}
if let Some(expr) = expr {
self.whitespace();
@ -307,9 +319,10 @@ impl Printer<'_> {
}
w!(self, "{{");
let edition = self.edition;
self.indented(|p| {
for field in &**fields {
w!(p, "{}: ", field.name.display(self.db.upcast()));
w!(p, "{}: ", field.name.display(self.db.upcast(), edition));
p.print_expr(field.expr);
wln!(p, ",");
}
@ -326,7 +339,7 @@ impl Printer<'_> {
}
Expr::Field { expr, name } => {
self.print_expr(*expr);
w!(self, ".{}", name.display(self.db.upcast()));
w!(self, ".{}", name.display(self.db.upcast(), self.edition));
}
Expr::Await { expr } => {
self.print_expr(*expr);
@ -464,8 +477,9 @@ impl Printer<'_> {
}
Expr::Literal(lit) => self.print_literal(lit),
Expr::Block { id: _, statements, tail, label } => {
let label =
label.map(|lbl| format!("{}: ", self.body[lbl].name.display(self.db.upcast())));
let label = label.map(|lbl| {
format!("{}: ", self.body[lbl].name.display(self.db.upcast(), self.edition))
});
self.print_block(label.as_deref(), statements, tail);
}
Expr::Unsafe { id: _, statements, tail } => {
@ -539,9 +553,10 @@ impl Printer<'_> {
}
w!(self, " {{");
let edition = self.edition;
self.indented(|p| {
for arg in args.iter() {
w!(p, "{}: ", arg.name.display(self.db.upcast()));
w!(p, "{}: ", arg.name.display(self.db.upcast(), edition));
p.print_pat(arg.pat);
wln!(p, ",");
}
@ -686,11 +701,13 @@ impl Printer<'_> {
}
fn print_type_ref(&mut self, ty: &TypeRef) {
print_type_ref(self.db, ty, self).unwrap();
let edition = self.edition;
print_type_ref(self.db, ty, self, edition).unwrap();
}
fn print_path(&mut self, path: &Path) {
print_path(self.db, path, self).unwrap();
let edition = self.edition;
print_path(self.db, path, self, edition).unwrap();
}
fn print_binding(&mut self, id: BindingId) {
@ -701,6 +718,6 @@ impl Printer<'_> {
BindingAnnotation::Ref => "ref ",
BindingAnnotation::RefMut => "ref mut ",
};
w!(self, "{}{}", mode, name.display(self.db.upcast()));
w!(self, "{}{}", mode, name.display(self.db.upcast(), self.edition));
}
}

View File

@ -219,7 +219,7 @@ fn main() {
},
);
}"#]]
.assert_eq(&body.pretty_print(&db, def))
.assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
}
#[test]
@ -285,7 +285,7 @@ impl SsrError {
),
);
}"#]]
.assert_eq(&body.pretty_print(&db, def))
.assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
}
#[test]
@ -333,5 +333,5 @@ fn f(a: i32, b: u32) -> String {
);
};
}"#]]
.assert_eq(&body.pretty_print(&db, def))
.assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
}

View File

@ -651,6 +651,7 @@ mod tests {
use expect_test::{expect, Expect};
use hir_expand::db::ExpandDatabase;
use itertools::Itertools;
use span::Edition;
use stdx::format_to;
use syntax::ast::AstNode;
use test_fixture::WithFixture;
@ -717,8 +718,10 @@ mod tests {
"{:7}(imports {}): {}\n",
format!("{:?}", prefix),
if ignore_local_imports { '✖' } else { '✔' },
found_path
.map_or_else(|| "<unresolvable>".to_owned(), |it| it.display(&db).to_string()),
found_path.map_or_else(
|| "<unresolvable>".to_owned(),
|it| it.display(&db, Edition::CURRENT).to_string()
),
);
}
expect.assert_eq(&res);

View File

@ -12,7 +12,6 @@ use hir_expand::{
};
use intern::Interned;
use la_arena::{Arena, RawIdx};
use once_cell::unsync::Lazy;
use stdx::impl_from;
use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds};
use triomphe::Arc;
@ -394,11 +393,16 @@ impl GenericParams {
// Don't create an `Expander` if not needed since this
// could cause a reparse after the `ItemTree` has been created due to the spanmap.
let mut expander = Lazy::new(|| {
(module.def_map(db), Expander::new(db, loc.id.file_id(), module))
});
let mut expander = None;
for param in func_data.params.iter() {
generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
generic_params.fill_implicit_impl_trait_args(
db,
&mut expander,
&mut || {
(module.def_map(db), Expander::new(db, loc.id.file_id(), module))
},
param,
);
}
Interned::new(generic_params.finish())
}
@ -597,7 +601,9 @@ impl GenericParamsCollector {
fn fill_implicit_impl_trait_args(
&mut self,
db: &dyn DefDatabase,
exp: &mut Lazy<(Arc<DefMap>, Expander), impl FnOnce() -> (Arc<DefMap>, Expander)>,
// FIXME: Change this back to `LazyCell` if https://github.com/rust-lang/libs-team/issues/429 is accepted.
exp: &mut Option<(Arc<DefMap>, Expander)>,
exp_fill: &mut dyn FnMut() -> (Arc<DefMap>, Expander),
type_ref: &TypeRef,
) {
type_ref.walk(&mut |type_ref| {
@ -617,7 +623,7 @@ impl GenericParamsCollector {
}
if let TypeRef::Macro(mc) = type_ref {
let macro_call = mc.to_node(db.upcast());
let (def_map, expander) = &mut **exp;
let (def_map, expander) = exp.get_or_insert_with(&mut *exp_fill);
let module = expander.module.local_id;
let resolver = |path: &_| {
@ -637,8 +643,8 @@ impl GenericParamsCollector {
{
let ctx = expander.ctx(db);
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref);
exp.1.exit(mark);
self.fill_implicit_impl_trait_args(db, &mut *exp, exp_fill, &type_ref);
exp.get_or_insert_with(&mut *exp_fill).1.exit(mark);
}
}
});

View File

@ -250,7 +250,7 @@ pub(crate) fn parse(
}
}
ArgRef::Name(name, span) => {
let name = Name::new(name, tt::IdentIsRaw::No, call_ctx);
let name = Name::new(name, call_ctx);
if let Some((index, _)) = args.by_name(&name) {
record_usage(name, span);
// Name found in `args`, so we resolve it to its index.

View File

@ -10,6 +10,7 @@ use hir_expand::{
AstId,
};
use intern::{sym, Interned, Symbol};
use span::Edition;
use syntax::ast::{self, HasGenericArgs, HasName, IsString};
use crate::{
@ -419,18 +420,22 @@ impl ConstRef {
param.default_val().map(|default| Self::from_const_arg(lower_ctx, Some(default)))
}
pub fn display<'a>(&'a self, db: &'a dyn ExpandDatabase) -> impl fmt::Display + 'a {
struct Display<'a>(&'a dyn ExpandDatabase, &'a ConstRef);
pub fn display<'a>(
&'a self,
db: &'a dyn ExpandDatabase,
edition: Edition,
) -> impl fmt::Display + 'a {
struct Display<'a>(&'a dyn ExpandDatabase, &'a ConstRef, Edition);
impl fmt::Display for Display<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.1 {
ConstRef::Scalar(s) => s.fmt(f),
ConstRef::Path(n) => n.display(self.0).fmt(f),
ConstRef::Path(n) => n.display(self.0, self.2).fmt(f),
ConstRef::Complex(_) => f.write_str("{const}"),
}
}
}
Display(db, self)
Display(db, self, edition)
}
// We special case literals and single identifiers, to speed up things.

View File

@ -8,6 +8,7 @@ use hir_expand::name::Name;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use span::Edition;
use stdx::{format_to, TupleExt};
use syntax::ToSmolStr;
use triomphe::Arc;
@ -66,7 +67,12 @@ impl ImportMap {
for (k, v) in self.item_to_info_map.iter() {
format_to!(out, "{:?} ({:?}) -> ", k, v.1);
for v in &v.0 {
format_to!(out, "{}:{:?}, ", v.name.display(db.upcast()), v.container);
format_to!(
out,
"{}:{:?}, ",
v.name.display(db.upcast(), Edition::CURRENT),
v.container
);
}
format_to!(out, "\n");
}
@ -83,7 +89,7 @@ impl ImportMap {
// We've only collected items, whose name cannot be tuple field so unwrapping is fine.
.flat_map(|(&item, (info, _))| {
info.iter().enumerate().map(move |(idx, info)| {
(item, info.name.display(db.upcast()).to_smolstr(), idx as u32)
(item, info.name.unescaped().display(db.upcast()).to_smolstr(), idx as u32)
})
})
.collect();
@ -461,7 +467,7 @@ fn search_maps(
query.search_mode.check(
&query.query,
query.case_sensitive,
&info.name.display(db.upcast()).to_smolstr(),
&info.name.unescaped().display(db.upcast()).to_smolstr(),
)
});
res.extend(iter.map(TupleExt::head));
@ -577,7 +583,7 @@ mod tests {
Some(format!(
"{}::{}",
render_path(db, &trait_info[0]),
assoc_item_name.display(db.upcast())
assoc_item_name.display(db.upcast(), Edition::CURRENT)
))
}
@ -616,7 +622,7 @@ mod tests {
module = parent;
}
segments.iter().rev().map(|it| it.display(db.upcast())).join("::")
segments.iter().rev().map(|it| it.display(db.upcast(), Edition::CURRENT)).join("::")
}
#[test]

View File

@ -1,14 +1,16 @@
//! Describes items defined or visible (ie, imported) in a certain scope.
//! This is shared between modules and blocks.
use std::sync::LazyLock;
use base_db::CrateId;
use hir_expand::{attrs::AttrId, db::ExpandDatabase, name::Name, AstId, MacroCallId};
use indexmap::map::Entry;
use itertools::Itertools;
use la_arena::Idx;
use once_cell::sync::Lazy;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
use span::Edition;
use stdx::format_to;
use syntax::ast;
@ -129,7 +131,7 @@ struct DeriveMacroInvocation {
derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
}
pub(crate) static BUILTIN_SCOPE: Lazy<FxIndexMap<Name, PerNs>> = Lazy::new(|| {
pub(crate) static BUILTIN_SCOPE: LazyLock<FxIndexMap<Name, PerNs>> = LazyLock::new(|| {
BuiltinType::all_builtin_types()
.iter()
.map(|(name, ty)| (name.clone(), PerNs::types((*ty).into(), Visibility::Public, None)))
@ -706,7 +708,7 @@ impl ItemScope {
format_to!(
buf,
"{}:",
name.map_or("_".to_owned(), |name| name.display(db).to_string())
name.map_or("_".to_owned(), |name| name.display(db, Edition::LATEST).to_string())
);
if let Some((.., i)) = def.types {

View File

@ -40,6 +40,7 @@ use std::{
fmt::{self, Debug},
hash::{Hash, Hasher},
ops::{Index, Range},
sync::OnceLock,
};
use ast::{AstNode, StructKind};
@ -48,10 +49,9 @@ use either::Either;
use hir_expand::{attrs::RawAttrs, name::Name, ExpandTo, HirFileId, InFile};
use intern::{Interned, Symbol};
use la_arena::{Arena, Idx, RawIdx};
use once_cell::sync::OnceCell;
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{AstIdNode, FileAstId, SyntaxContextId};
use span::{AstIdNode, Edition, FileAstId, SyntaxContextId};
use stdx::never;
use syntax::{ast, match_ast, SyntaxKind};
use triomphe::Arc;
@ -101,7 +101,7 @@ pub struct ItemTree {
impl ItemTree {
pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
let _p = tracing::info_span!("file_item_tree_query", ?file_id).entered();
static EMPTY: OnceCell<Arc<ItemTree>> = OnceCell::new();
static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
let syntax = db.parse_or_expand(file_id);
@ -152,7 +152,7 @@ impl ItemTree {
pub(crate) fn block_item_tree_query(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
let _p = tracing::info_span!("block_item_tree_query", ?block).entered();
static EMPTY: OnceCell<Arc<ItemTree>> = OnceCell::new();
static EMPTY: OnceLock<Arc<ItemTree>> = OnceLock::new();
let loc = block.lookup(db);
let block = loc.ast_id.to_node(db.upcast());
@ -199,8 +199,8 @@ impl ItemTree {
Attrs::filter(db, krate, self.raw_attrs(of).clone())
}
pub fn pretty_print(&self, db: &dyn DefDatabase) -> String {
pretty::print_item_tree(db, self)
pub fn pretty_print(&self, db: &dyn DefDatabase, edition: Edition) -> String {
pretty::print_item_tree(db, self, edition)
}
fn data(&self) -> &ItemTreeData {
@ -626,9 +626,9 @@ impl Index<RawVisibilityId> for ItemTree {
type Output = RawVisibility;
fn index(&self, index: RawVisibilityId) -> &Self::Output {
static VIS_PUB: RawVisibility = RawVisibility::Public;
static VIS_PRIV_IMPLICIT: OnceCell<RawVisibility> = OnceCell::new();
static VIS_PRIV_EXPLICIT: OnceCell<RawVisibility> = OnceCell::new();
static VIS_PUB_CRATE: OnceCell<RawVisibility> = OnceCell::new();
static VIS_PRIV_IMPLICIT: OnceLock<RawVisibility> = OnceLock::new();
static VIS_PRIV_EXPLICIT: OnceLock<RawVisibility> = OnceLock::new();
static VIS_PUB_CRATE: OnceLock<RawVisibility> = OnceLock::new();
match index {
RawVisibilityId::PRIV_IMPLICIT => VIS_PRIV_IMPLICIT.get_or_init(|| {

View File

@ -3,7 +3,7 @@
use std::fmt::{self, Write};
use la_arena::{Idx, RawIdx};
use span::ErasedFileAstId;
use span::{Edition, ErasedFileAstId};
use crate::{
generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget},
@ -18,8 +18,9 @@ use crate::{
visibility::RawVisibility,
};
pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree) -> String {
let mut p = Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true };
pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree, edition: Edition) -> String {
let mut p =
Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true, edition };
if let Some(attrs) = tree.attrs.get(&AttrOwner::TopLevel) {
p.print_attrs(attrs, true, "\n");
@ -56,6 +57,7 @@ struct Printer<'a> {
buf: String,
indent_level: usize,
needs_indent: bool,
edition: Edition,
}
impl Printer<'_> {
@ -97,7 +99,7 @@ impl Printer<'_> {
self,
"#{}[{}{}]{}",
inner,
attr.path.display(self.db.upcast()),
attr.path.display(self.db.upcast(), self.edition),
attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(),
separated_by,
);
@ -113,13 +115,14 @@ impl Printer<'_> {
fn print_visibility(&mut self, vis: RawVisibilityId) {
match &self.tree[vis] {
RawVisibility::Module(path, _expl) => {
w!(self, "pub({}) ", path.display(self.db.upcast()))
w!(self, "pub({}) ", path.display(self.db.upcast(), self.edition))
}
RawVisibility::Public => w!(self, "pub "),
};
}
fn print_fields(&mut self, parent: FieldParent, kind: FieldsShape, fields: &[Field]) {
let edition = self.edition;
match kind {
FieldsShape::Record => {
self.whitespace();
@ -131,7 +134,7 @@ impl Printer<'_> {
"\n",
);
this.print_visibility(*visibility);
w!(this, "{}: ", name.display(self.db.upcast()));
w!(this, "{}: ", name.display(self.db.upcast(), edition));
this.print_type_ref(type_ref);
wln!(this, ",");
}
@ -147,7 +150,7 @@ impl Printer<'_> {
"\n",
);
this.print_visibility(*visibility);
w!(this, "{}: ", name.display(self.db.upcast()));
w!(this, "{}: ", name.display(self.db.upcast(), edition));
this.print_type_ref(type_ref);
wln!(this, ",");
}
@ -186,20 +189,20 @@ impl Printer<'_> {
fn print_use_tree(&mut self, use_tree: &UseTree) {
match &use_tree.kind {
UseTreeKind::Single { path, alias } => {
w!(self, "{}", path.display(self.db.upcast()));
w!(self, "{}", path.display(self.db.upcast(), self.edition));
if let Some(alias) = alias {
w!(self, " as {}", alias);
w!(self, " as {}", alias.display(self.edition));
}
}
UseTreeKind::Glob { path } => {
if let Some(path) = path {
w!(self, "{}::", path.display(self.db.upcast()));
w!(self, "{}::", path.display(self.db.upcast(), self.edition));
}
w!(self, "*");
}
UseTreeKind::Prefixed { prefix, list } => {
if let Some(prefix) = prefix {
w!(self, "{}::", prefix.display(self.db.upcast()));
w!(self, "{}::", prefix.display(self.db.upcast(), self.edition));
}
w!(self, "{{");
for (i, tree) in list.iter().enumerate() {
@ -229,9 +232,9 @@ impl Printer<'_> {
let ExternCrate { name, alias, visibility, ast_id } = &self.tree[it];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "extern crate {}", name.display(self.db.upcast()));
w!(self, "extern crate {}", name.display(self.db.upcast(), self.edition));
if let Some(alias) = alias {
w!(self, " as {}", alias);
w!(self, " as {}", alias.display(self.edition));
}
wln!(self, ";");
}
@ -278,7 +281,7 @@ impl Printer<'_> {
if let Some(abi) = abi {
w!(self, "extern \"{}\" ", abi);
}
w!(self, "fn {}", name.display(self.db.upcast()));
w!(self, "fn {}", name.display(self.db.upcast(), self.edition));
self.print_generic_params(explicit_generic_params, it.into());
w!(self, "(");
if !params.is_empty() {
@ -314,7 +317,7 @@ impl Printer<'_> {
&self.tree[it];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "struct {}", name.display(self.db.upcast()));
w!(self, "struct {}", name.display(self.db.upcast(), self.edition));
self.print_generic_params(generic_params, it.into());
self.print_fields_and_where_clause(
FieldParent::Struct(it),
@ -332,7 +335,7 @@ impl Printer<'_> {
let Union { name, visibility, fields, generic_params, ast_id } = &self.tree[it];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "union {}", name.display(self.db.upcast()));
w!(self, "union {}", name.display(self.db.upcast(), self.edition));
self.print_generic_params(generic_params, it.into());
self.print_fields_and_where_clause(
FieldParent::Union(it),
@ -346,15 +349,16 @@ impl Printer<'_> {
let Enum { name, visibility, variants, generic_params, ast_id } = &self.tree[it];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "enum {}", name.display(self.db.upcast()));
w!(self, "enum {}", name.display(self.db.upcast(), self.edition));
self.print_generic_params(generic_params, it.into());
self.print_where_clause_and_opening_brace(generic_params);
let edition = self.edition;
self.indented(|this| {
for variant in FileItemTreeId::range_iter(variants.clone()) {
let Variant { name, fields, shape: kind, ast_id } = &this.tree[variant];
this.print_ast_id(ast_id.erase());
this.print_attrs_of(variant, "\n");
w!(this, "{}", name.display(self.db.upcast()));
w!(this, "{}", name.display(self.db.upcast(), edition));
this.print_fields(FieldParent::Variant(variant), *kind, fields);
wln!(this, ",");
}
@ -367,7 +371,7 @@ impl Printer<'_> {
self.print_visibility(*visibility);
w!(self, "const ");
match name {
Some(name) => w!(self, "{}", name.display(self.db.upcast())),
Some(name) => w!(self, "{}", name.display(self.db.upcast(), self.edition)),
None => w!(self, "_"),
}
w!(self, ": ");
@ -382,7 +386,7 @@ impl Printer<'_> {
if *mutable {
w!(self, "mut ");
}
w!(self, "{}: ", name.display(self.db.upcast()));
w!(self, "{}: ", name.display(self.db.upcast(), self.edition));
self.print_type_ref(type_ref);
w!(self, " = _;");
wln!(self);
@ -398,7 +402,7 @@ impl Printer<'_> {
if *is_auto {
w!(self, "auto ");
}
w!(self, "trait {}", name.display(self.db.upcast()));
w!(self, "trait {}", name.display(self.db.upcast(), self.edition));
self.print_generic_params(generic_params, it.into());
self.print_where_clause_and_opening_brace(generic_params);
self.indented(|this| {
@ -412,7 +416,7 @@ impl Printer<'_> {
let TraitAlias { name, visibility, generic_params, ast_id } = &self.tree[it];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "trait {}", name.display(self.db.upcast()));
w!(self, "trait {}", name.display(self.db.upcast(), self.edition));
self.print_generic_params(generic_params, it.into());
w!(self, " = ");
self.print_where_clause(generic_params);
@ -457,7 +461,7 @@ impl Printer<'_> {
&self.tree[it];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "type {}", name.display(self.db.upcast()));
w!(self, "type {}", name.display(self.db.upcast(), self.edition));
self.print_generic_params(generic_params, it.into());
if !bounds.is_empty() {
w!(self, ": ");
@ -475,7 +479,7 @@ impl Printer<'_> {
let Mod { name, visibility, kind, ast_id } = &self.tree[it];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
w!(self, "mod {}", name.display(self.db.upcast()));
w!(self, "mod {}", name.display(self.db.upcast(), self.edition));
match kind {
ModKind::Inline { items } => {
w!(self, " {{");
@ -500,18 +504,22 @@ impl Printer<'_> {
ctxt,
expand_to
);
wln!(self, "{}!(...);", path.display(self.db.upcast()));
wln!(self, "{}!(...);", path.display(self.db.upcast(), self.edition));
}
ModItem::MacroRules(it) => {
let MacroRules { name, ast_id } = &self.tree[it];
self.print_ast_id(ast_id.erase());
wln!(self, "macro_rules! {} {{ ... }}", name.display(self.db.upcast()));
wln!(
self,
"macro_rules! {} {{ ... }}",
name.display(self.db.upcast(), self.edition)
);
}
ModItem::Macro2(it) => {
let Macro2 { name, visibility, ast_id } = &self.tree[it];
self.print_ast_id(ast_id.erase());
self.print_visibility(*visibility);
wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast()));
wln!(self, "macro {} {{ ... }}", name.display(self.db.upcast(), self.edition));
}
}
@ -519,15 +527,18 @@ impl Printer<'_> {
}
fn print_type_ref(&mut self, type_ref: &TypeRef) {
print_type_ref(self.db, type_ref, self).unwrap();
let edition = self.edition;
print_type_ref(self.db, type_ref, self, edition).unwrap();
}
fn print_type_bounds(&mut self, bounds: &[Interned<TypeBound>]) {
print_type_bounds(self.db, bounds, self).unwrap();
let edition = self.edition;
print_type_bounds(self.db, bounds, self, edition).unwrap();
}
fn print_path(&mut self, path: &Path) {
print_path(self.db, path, self).unwrap();
let edition = self.edition;
print_path(self.db, path, self, edition).unwrap();
}
fn print_generic_params(&mut self, params: &GenericParams, parent: GenericModItem) {
@ -543,7 +554,7 @@ impl Printer<'_> {
}
first = false;
self.print_attrs_of(AttrOwner::LifetimeParamData(parent, idx), " ");
w!(self, "{}", lt.name.display(self.db.upcast()));
w!(self, "{}", lt.name.display(self.db.upcast(), self.edition));
}
for (idx, x) in params.iter_type_or_consts() {
if !first {
@ -553,11 +564,11 @@ impl Printer<'_> {
self.print_attrs_of(AttrOwner::TypeOrConstParamData(parent, idx), " ");
match x {
TypeOrConstParamData::TypeParamData(ty) => match &ty.name {
Some(name) => w!(self, "{}", name.display(self.db.upcast())),
Some(name) => w!(self, "{}", name.display(self.db.upcast(), self.edition)),
None => w!(self, "_anon_{}", idx.into_raw()),
},
TypeOrConstParamData::ConstParamData(konst) => {
w!(self, "const {}: ", konst.name.display(self.db.upcast()));
w!(self, "const {}: ", konst.name.display(self.db.upcast(), self.edition));
self.print_type_ref(&konst.ty);
}
}
@ -580,6 +591,7 @@ impl Printer<'_> {
}
w!(self, "\nwhere");
let edition = self.edition;
self.indented(|this| {
for (i, pred) in params.where_predicates().enumerate() {
if i != 0 {
@ -592,8 +604,8 @@ impl Printer<'_> {
wln!(
this,
"{}: {},",
target.name.display(self.db.upcast()),
bound.name.display(self.db.upcast())
target.name.display(self.db.upcast(), edition),
bound.name.display(self.db.upcast(), edition)
);
continue;
}
@ -603,7 +615,7 @@ impl Printer<'_> {
if i != 0 {
w!(this, ", ");
}
w!(this, "{}", lt.display(self.db.upcast()));
w!(this, "{}", lt.display(self.db.upcast(), edition));
}
w!(this, "> ");
(target, bound)
@ -613,7 +625,7 @@ impl Printer<'_> {
match target {
WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty),
WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() {
Some(name) => w!(this, "{}", name.display(self.db.upcast())),
Some(name) => w!(this, "{}", name.display(self.db.upcast(), edition)),
None => w!(this, "_anon_{}", id.into_raw()),
},
}

View File

@ -1,4 +1,5 @@
use expect_test::{expect, Expect};
use span::Edition;
use test_fixture::WithFixture;
use crate::{db::DefDatabase, test_db::TestDB};
@ -6,7 +7,7 @@ use crate::{db::DefDatabase, test_db::TestDB};
fn check(ra_fixture: &str, expect: Expect) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let item_tree = db.file_item_tree(file_id.into());
let pretty = item_tree.pretty_print(&db);
let pretty = item_tree.pretty_print(&db, Edition::CURRENT);
expect.assert_eq(&pretty);
}

View File

@ -74,6 +74,13 @@ impl LangItemTarget {
_ => None,
}
}
pub fn as_type_alias(self) -> Option<TypeAliasId> {
match self {
LangItemTarget::TypeAlias(id) => Some(id),
_ => None,
}
}
}
#[derive(Default, Debug, Clone, PartialEq, Eq)]
@ -117,11 +124,19 @@ impl LangItems {
match def {
ModuleDefId::TraitId(trait_) => {
lang_items.collect_lang_item(db, trait_, LangItemTarget::Trait);
db.trait_data(trait_).items.iter().for_each(|&(_, assoc_id)| {
if let AssocItemId::FunctionId(f) = assoc_id {
lang_items.collect_lang_item(db, f, LangItemTarget::Function);
}
});
db.trait_data(trait_).items.iter().for_each(
|&(_, assoc_id)| match assoc_id {
AssocItemId::FunctionId(f) => {
lang_items.collect_lang_item(db, f, LangItemTarget::Function);
}
AssocItemId::TypeAliasId(alias) => lang_items.collect_lang_item(
db,
alias,
LangItemTarget::TypeAlias,
),
AssocItemId::ConstId(_) => {}
},
);
}
ModuleDefId::AdtId(AdtId::EnumId(e)) => {
lang_items.collect_lang_item(db, e, LangItemTarget::EnumId);
@ -453,6 +468,7 @@ language_item_table! {
Context, sym::Context, context, Target::Struct, GenericRequirement::None;
FuturePoll, sym::poll, future_poll_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
FutureOutput, sym::future_output, future_output, Target::TypeAlias, GenericRequirement::None;
Option, sym::Option, option_type, Target::Enum, GenericRequirement::None;
OptionSome, sym::Some, option_some_variant, Target::Variant, GenericRequirement::None;
@ -467,6 +483,7 @@ language_item_table! {
IntoFutureIntoFuture, sym::into_future, into_future_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
IntoIterIntoIter, sym::into_iter, into_iter_fn, Target::Method(MethodKind::Trait { body: false }), GenericRequirement::None;
IteratorNext, sym::next, next_fn, Target::Method(MethodKind::Trait { body: false}), GenericRequirement::None;
Iterator, sym::iterator, iterator, Target::Trait, GenericRequirement::None;
PinNewUnchecked, sym::new_unchecked, new_unchecked_fn, Target::Method(MethodKind::Inherent), GenericRequirement::None;

View File

@ -241,7 +241,7 @@ pub type StaticLoc = AssocItemLoc<Static>;
impl_intern!(StaticId, StaticLoc, intern_static, lookup_intern_static);
impl_loc!(StaticLoc, id: Static, container: ItemContainerId);
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct TraitId(salsa::InternId);
pub type TraitLoc = ItemLoc<Trait>;
impl_intern!(TraitId, TraitLoc, intern_trait, lookup_intern_trait);

View File

@ -25,7 +25,7 @@ use hir_expand::{
InFile, MacroFileId, MacroFileIdExt,
};
use intern::Symbol;
use span::Span;
use span::{Edition, Span};
use stdx::{format_to, format_to_acc};
use syntax::{
ast::{self, edit::IndentLevel},
@ -257,21 +257,25 @@ fn pretty_print_macro_expansion(
(T![;] | T!['{'] | T!['}'], _) => "\n",
(_, T!['}']) => "\n",
(IDENT | LIFETIME_IDENT, IDENT | LIFETIME_IDENT) => " ",
_ if prev_kind.is_keyword() && curr_kind.is_keyword() => " ",
(IDENT, _) if curr_kind.is_keyword() => " ",
(_, IDENT) if prev_kind.is_keyword() => " ",
_ if prev_kind.is_keyword(Edition::CURRENT)
&& curr_kind.is_keyword(Edition::CURRENT) =>
{
" "
}
(IDENT, _) if curr_kind.is_keyword(Edition::CURRENT) => " ",
(_, IDENT) if prev_kind.is_keyword(Edition::CURRENT) => " ",
(T![>], IDENT) => " ",
(T![>], _) if curr_kind.is_keyword() => " ",
(T![>], _) if curr_kind.is_keyword(Edition::CURRENT) => " ",
(T![->], _) | (_, T![->]) => " ",
(T![&&], _) | (_, T![&&]) => " ",
(T![,], _) => " ",
(T![:], IDENT | T!['(']) => " ",
(T![:], _) if curr_kind.is_keyword() => " ",
(T![:], _) if curr_kind.is_keyword(Edition::CURRENT) => " ",
(T![fn], T!['(']) => "",
(T![']'], _) if curr_kind.is_keyword() => " ",
(T![']'], _) if curr_kind.is_keyword(Edition::CURRENT) => " ",
(T![']'], T![#]) => "\n",
(T![Self], T![::]) => "",
_ if prev_kind.is_keyword() => " ",
_ if prev_kind.is_keyword(Edition::CURRENT) => " ",
_ => "",
};

View File

@ -328,6 +328,10 @@ impl DefMap {
/// The module id of a crate or block root.
pub const ROOT: LocalModuleId = LocalModuleId::from_raw(la_arena::RawIdx::from_u32(0));
pub fn edition(&self) -> Edition {
self.data.edition
}
pub(crate) fn crate_def_map_query(db: &dyn DefDatabase, crate_id: CrateId) -> Arc<DefMap> {
let crate_graph = db.crate_graph();
let krate = &crate_graph[crate_id];
@ -550,7 +554,7 @@ impl DefMap {
for (name, child) in
map.modules[module].children.iter().sorted_by(|a, b| Ord::cmp(&a.0, &b.0))
{
let path = format!("{path}::{}", name.display(db.upcast()));
let path = format!("{path}::{}", name.display(db.upcast(), Edition::LATEST));
buf.push('\n');
go(buf, db, map, &path, *child);
}

View File

@ -548,7 +548,7 @@ impl DefCollector<'_> {
types => {
tracing::debug!(
"could not resolve prelude path `{}` to module (resolved to {:?})",
path.display(self.db.upcast()),
path.display(self.db.upcast(), Edition::LATEST),
types
);
}
@ -768,7 +768,7 @@ impl DefCollector<'_> {
}
fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db.upcast()))
let _p = tracing::info_span!("resolve_import", import_path = %import.path.display(self.db.upcast(), Edition::LATEST))
.entered();
tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.data.edition);
match import.source {
@ -1606,7 +1606,11 @@ impl ModCollector<'_, '_> {
// Prelude module is always considered to be `#[macro_use]`.
if let Some((prelude_module, _use)) = self.def_collector.def_map.prelude {
if prelude_module.krate != krate && is_crate_root {
// Don't insert macros from the prelude into blocks, as they can be shadowed by other macros.
if prelude_module.krate != krate
&& is_crate_root
&& self.def_collector.def_map.block.is_none()
{
cov_mark::hit!(prelude_is_macro_use);
self.def_collector.import_macros_from_extern_crate(
prelude_module.krate,
@ -2151,7 +2155,7 @@ impl ModCollector<'_, '_> {
}
tracing::debug!(
"non-builtin attribute {}",
attr.path.display(self.def_collector.db.upcast())
attr.path.display(self.def_collector.db.upcast(), Edition::LATEST)
);
let ast_id = AstIdWithPath::new(
@ -2286,8 +2290,8 @@ impl ModCollector<'_, '_> {
stdx::always!(
name == mac.name,
"built-in macro {} has #[rustc_builtin_macro] which declares different name {}",
mac.name.display(self.def_collector.db.upcast()),
name.display(self.def_collector.db.upcast())
mac.name.display(self.def_collector.db.upcast(), Edition::LATEST),
name.display(self.def_collector.db.upcast(), Edition::LATEST),
);
helpers_opt = Some(helpers);
}

View File

@ -1,6 +1,7 @@
use expect_test::expect;
use itertools::Itertools;
use span::Edition;
use super::*;
@ -1100,7 +1101,7 @@ pub fn derive_macro_2(_item: TokenStream) -> TokenStream {
assert_eq!(def_map.data.exported_derives.len(), 1);
match def_map.data.exported_derives.values().next() {
Some(helpers) => match &**helpers {
[attr] => assert_eq!(attr.display(&db).to_string(), "helper_attr"),
[attr] => assert_eq!(attr.display(&db, Edition::CURRENT).to_string(), "helper_attr"),
_ => unreachable!(),
},
_ => unreachable!(),
@ -1456,7 +1457,7 @@ fn proc_attr(a: TokenStream, b: TokenStream) -> TokenStream { a }
let actual = def_map
.macro_use_prelude
.keys()
.map(|name| name.display(&db).to_string())
.map(|name| name.display(&db, Edition::CURRENT).to_string())
.sorted()
.join("\n");

View File

@ -144,14 +144,14 @@ pub struct Baz;
crate::r#async
Bar: t v
foo: t
r#async: t
crate::r#async::foo
Foo: t v
foo: t
crate::r#async::r#async
Baz: t v
crate::r#async::foo
Foo: t v
"#]],
);
}

View File

@ -13,7 +13,8 @@ use crate::{
};
use hir_expand::name::Name;
use intern::Interned;
use syntax::{ast, ToSmolStr};
use span::Edition;
use syntax::ast;
pub use hir_expand::mod_path::{path, ModPath, PathKind};
@ -25,11 +26,21 @@ pub enum ImportAlias {
Alias(Name),
}
impl Display for ImportAlias {
impl ImportAlias {
pub fn display(&self, edition: Edition) -> impl Display + '_ {
ImportAliasDisplay { value: self, edition }
}
}
struct ImportAliasDisplay<'a> {
value: &'a ImportAlias,
edition: Edition,
}
impl Display for ImportAliasDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
match self.value {
ImportAlias::Underscore => f.write_str("_"),
ImportAlias::Alias(name) => f.write_str(&name.display_no_db().to_smolstr()),
ImportAlias::Alias(name) => Display::fmt(&name.display_no_db(self.edition), f),
}
}
}

View File

@ -5,6 +5,7 @@ use std::fmt::{self, Write};
use hir_expand::mod_path::PathKind;
use intern::Interned;
use itertools::Itertools;
use span::Edition;
use crate::{
db::DefDatabase,
@ -13,46 +14,51 @@ use crate::{
type_ref::{Mutability, TraitBoundModifier, TypeBound, TypeRef},
};
pub(crate) fn print_path(db: &dyn DefDatabase, path: &Path, buf: &mut dyn Write) -> fmt::Result {
pub(crate) fn print_path(
db: &dyn DefDatabase,
path: &Path,
buf: &mut dyn Write,
edition: Edition,
) -> fmt::Result {
if let Path::LangItem(it, s) = path {
write!(buf, "builtin#lang(")?;
match *it {
LangItemTarget::ImplDef(it) => write!(buf, "{it:?}")?,
LangItemTarget::EnumId(it) => {
write!(buf, "{}", db.enum_data(it).name.display(db.upcast()))?
write!(buf, "{}", db.enum_data(it).name.display(db.upcast(), edition))?
}
LangItemTarget::Function(it) => {
write!(buf, "{}", db.function_data(it).name.display(db.upcast()))?
write!(buf, "{}", db.function_data(it).name.display(db.upcast(), edition))?
}
LangItemTarget::Static(it) => {
write!(buf, "{}", db.static_data(it).name.display(db.upcast()))?
write!(buf, "{}", db.static_data(it).name.display(db.upcast(), edition))?
}
LangItemTarget::Struct(it) => {
write!(buf, "{}", db.struct_data(it).name.display(db.upcast()))?
write!(buf, "{}", db.struct_data(it).name.display(db.upcast(), edition))?
}
LangItemTarget::Union(it) => {
write!(buf, "{}", db.union_data(it).name.display(db.upcast()))?
write!(buf, "{}", db.union_data(it).name.display(db.upcast(), edition))?
}
LangItemTarget::TypeAlias(it) => {
write!(buf, "{}", db.type_alias_data(it).name.display(db.upcast()))?
write!(buf, "{}", db.type_alias_data(it).name.display(db.upcast(), edition))?
}
LangItemTarget::Trait(it) => {
write!(buf, "{}", db.trait_data(it).name.display(db.upcast()))?
write!(buf, "{}", db.trait_data(it).name.display(db.upcast(), edition))?
}
LangItemTarget::EnumVariant(it) => {
write!(buf, "{}", db.enum_variant_data(it).name.display(db.upcast()))?
write!(buf, "{}", db.enum_variant_data(it).name.display(db.upcast(), edition))?
}
}
if let Some(s) = s {
write!(buf, "::{}", s.display(db.upcast()))?;
write!(buf, "::{}", s.display(db.upcast(), edition))?;
}
return write!(buf, ")");
}
match path.type_anchor() {
Some(anchor) => {
write!(buf, "<")?;
print_type_ref(db, anchor, buf)?;
print_type_ref(db, anchor, buf, edition)?;
write!(buf, ">::")?;
}
None => match path.kind() {
@ -78,10 +84,10 @@ pub(crate) fn print_path(db: &dyn DefDatabase, path: &Path, buf: &mut dyn Write)
write!(buf, "::")?;
}
write!(buf, "{}", segment.name.display(db.upcast()))?;
write!(buf, "{}", segment.name.display(db.upcast(), edition))?;
if let Some(generics) = segment.args_and_bindings {
write!(buf, "::<")?;
print_generic_args(db, generics, buf)?;
print_generic_args(db, generics, buf, edition)?;
write!(buf, ">")?;
}
@ -94,12 +100,13 @@ pub(crate) fn print_generic_args(
db: &dyn DefDatabase,
generics: &GenericArgs,
buf: &mut dyn Write,
edition: Edition,
) -> fmt::Result {
let mut first = true;
let args = if generics.has_self_type {
let (self_ty, args) = generics.args.split_first().unwrap();
write!(buf, "Self=")?;
print_generic_arg(db, self_ty, buf)?;
print_generic_arg(db, self_ty, buf, edition)?;
first = false;
args
} else {
@ -110,21 +117,21 @@ pub(crate) fn print_generic_args(
write!(buf, ", ")?;
}
first = false;
print_generic_arg(db, arg, buf)?;
print_generic_arg(db, arg, buf, edition)?;
}
for binding in generics.bindings.iter() {
if !first {
write!(buf, ", ")?;
}
first = false;
write!(buf, "{}", binding.name.display(db.upcast()))?;
write!(buf, "{}", binding.name.display(db.upcast(), edition))?;
if !binding.bounds.is_empty() {
write!(buf, ": ")?;
print_type_bounds(db, &binding.bounds, buf)?;
print_type_bounds(db, &binding.bounds, buf, edition)?;
}
if let Some(ty) = &binding.type_ref {
write!(buf, " = ")?;
print_type_ref(db, ty, buf)?;
print_type_ref(db, ty, buf, edition)?;
}
}
Ok(())
@ -134,11 +141,12 @@ pub(crate) fn print_generic_arg(
db: &dyn DefDatabase,
arg: &GenericArg,
buf: &mut dyn Write,
edition: Edition,
) -> fmt::Result {
match arg {
GenericArg::Type(ty) => print_type_ref(db, ty, buf),
GenericArg::Const(c) => write!(buf, "{}", c.display(db.upcast())),
GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast())),
GenericArg::Type(ty) => print_type_ref(db, ty, buf, edition),
GenericArg::Const(c) => write!(buf, "{}", c.display(db.upcast(), edition)),
GenericArg::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast(), edition)),
}
}
@ -146,6 +154,7 @@ pub(crate) fn print_type_ref(
db: &dyn DefDatabase,
type_ref: &TypeRef,
buf: &mut dyn Write,
edition: Edition,
) -> fmt::Result {
// FIXME: deduplicate with `HirDisplay` impl
match type_ref {
@ -157,18 +166,18 @@ pub(crate) fn print_type_ref(
if i != 0 {
write!(buf, ", ")?;
}
print_type_ref(db, field, buf)?;
print_type_ref(db, field, buf, edition)?;
}
write!(buf, ")")?;
}
TypeRef::Path(path) => print_path(db, path, buf)?,
TypeRef::Path(path) => print_path(db, path, buf, edition)?,
TypeRef::RawPtr(pointee, mtbl) => {
let mtbl = match mtbl {
Mutability::Shared => "*const",
Mutability::Mut => "*mut",
};
write!(buf, "{mtbl} ")?;
print_type_ref(db, pointee, buf)?;
print_type_ref(db, pointee, buf, edition)?;
}
TypeRef::Reference(pointee, lt, mtbl) => {
let mtbl = match mtbl {
@ -177,19 +186,19 @@ pub(crate) fn print_type_ref(
};
write!(buf, "&")?;
if let Some(lt) = lt {
write!(buf, "{} ", lt.name.display(db.upcast()))?;
write!(buf, "{} ", lt.name.display(db.upcast(), edition))?;
}
write!(buf, "{mtbl}")?;
print_type_ref(db, pointee, buf)?;
print_type_ref(db, pointee, buf, edition)?;
}
TypeRef::Array(elem, len) => {
write!(buf, "[")?;
print_type_ref(db, elem, buf)?;
write!(buf, "; {}]", len.display(db.upcast()))?;
print_type_ref(db, elem, buf, edition)?;
write!(buf, "; {}]", len.display(db.upcast(), edition))?;
}
TypeRef::Slice(elem) => {
write!(buf, "[")?;
print_type_ref(db, elem, buf)?;
print_type_ref(db, elem, buf, edition)?;
write!(buf, "]")?;
}
TypeRef::Fn(args_and_ret, varargs, is_unsafe, abi) => {
@ -208,7 +217,7 @@ pub(crate) fn print_type_ref(
if i != 0 {
write!(buf, ", ")?;
}
print_type_ref(db, typeref, buf)?;
print_type_ref(db, typeref, buf, edition)?;
}
if *varargs {
if !args.is_empty() {
@ -217,7 +226,7 @@ pub(crate) fn print_type_ref(
write!(buf, "...")?;
}
write!(buf, ") -> ")?;
print_type_ref(db, return_type, buf)?;
print_type_ref(db, return_type, buf, edition)?;
}
TypeRef::Macro(_ast_id) => {
write!(buf, "<macro>")?;
@ -225,11 +234,11 @@ pub(crate) fn print_type_ref(
TypeRef::Error => write!(buf, "{{unknown}}")?,
TypeRef::ImplTrait(bounds) => {
write!(buf, "impl ")?;
print_type_bounds(db, bounds, buf)?;
print_type_bounds(db, bounds, buf, edition)?;
}
TypeRef::DynTrait(bounds) => {
write!(buf, "dyn ")?;
print_type_bounds(db, bounds, buf)?;
print_type_bounds(db, bounds, buf, edition)?;
}
}
@ -240,6 +249,7 @@ pub(crate) fn print_type_bounds(
db: &dyn DefDatabase,
bounds: &[Interned<TypeBound>],
buf: &mut dyn Write,
edition: Edition,
) -> fmt::Result {
for (i, bound) in bounds.iter().enumerate() {
if i != 0 {
@ -252,17 +262,17 @@ pub(crate) fn print_type_bounds(
TraitBoundModifier::None => (),
TraitBoundModifier::Maybe => write!(buf, "?")?,
}
print_path(db, path, buf)?;
print_path(db, path, buf, edition)?;
}
TypeBound::ForLifetime(lifetimes, path) => {
write!(
buf,
"for<{}> ",
lifetimes.iter().map(|it| it.display(db.upcast())).format(", ")
lifetimes.iter().map(|it| it.display(db.upcast(), edition)).format(", ")
)?;
print_path(db, path, buf)?;
print_path(db, path, buf, edition)?;
}
TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast()))?,
TypeBound::Lifetime(lt) => write!(buf, "{}", lt.name.display(db.upcast(), edition))?,
TypeBound::Error => write!(buf, "{{unknown}}")?,
}
}

View File

@ -176,9 +176,10 @@ fn eager_macro_recur(
Some(path) => match macro_resolver(&path) {
Some(def) => def,
None => {
let edition = db.crate_graph()[krate].edition;
error = Some(ExpandError::other(
span_map.span_at(call.syntax().text_range().start()),
format!("unresolved macro {}", path.display(db)),
format!("unresolved macro {}", path.display(db, edition)),
));
offset += call.syntax().text_range().len();
continue;

View File

@ -461,3 +461,12 @@ impl<N: AstNode> InFile<N> {
Some(InRealFile::new(file_id, value))
}
}
impl<T> InFile<T> {
pub fn into_real_file(self) -> Result<InRealFile<T>, InFile<T>> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => Ok(InRealFile { file_id, value: self.value }),
HirFileIdRepr::MacroFile(_) => Err(self),
}
}
}

View File

@ -192,7 +192,7 @@ impl ExpandErrorKind {
("overflow expanding the original macro".to_owned(), true)
}
ExpandErrorKind::Other(e) => ((**e).to_owned(), true),
ExpandErrorKind::ProcMacroPanic(e) => ((**e).to_owned(), true),
ExpandErrorKind::ProcMacroPanic(e) => (format!("proc-macro panicked: {e}"), true),
}
}
}
@ -279,6 +279,7 @@ pub enum MacroCallKind {
}
pub trait HirFileIdExt {
fn edition(self, db: &dyn ExpandDatabase) -> Edition;
/// Returns the original file of this macro call hierarchy.
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId;
@ -293,6 +294,12 @@ pub trait HirFileIdExt {
}
impl HirFileIdExt for HirFileId {
fn edition(self, db: &dyn ExpandDatabase) -> Edition {
match self.repr() {
HirFileIdRepr::FileId(file_id) => file_id.edition(),
HirFileIdRepr::MacroFile(m) => m.macro_call_id.lookup(db).def.edition,
}
}
fn original_file(self, db: &dyn ExpandDatabase) -> EditionedFileId {
let mut file_id = self;
loop {

View File

@ -14,7 +14,7 @@ use crate::{
use base_db::CrateId;
use intern::sym;
use smallvec::SmallVec;
use span::SyntaxContextId;
use span::{Edition, SyntaxContextId};
use syntax::{ast, AstNode};
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -140,8 +140,12 @@ impl ModPath {
UnescapedModPath(self)
}
pub fn display<'a>(&'a self, db: &'a dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
Display { db, path: self }
pub fn display<'a>(
&'a self,
db: &'a dyn crate::db::ExpandDatabase,
edition: Edition,
) -> impl fmt::Display + 'a {
Display { db, path: self, edition }
}
}
@ -154,11 +158,12 @@ impl Extend<Name> for ModPath {
struct Display<'a> {
db: &'a dyn ExpandDatabase,
path: &'a ModPath,
edition: Edition,
}
impl fmt::Display for Display<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
display_fmt_path(self.db, self.path, f, true)
display_fmt_path(self.db, self.path, f, Escape::IfNeeded(self.edition))
}
}
@ -169,7 +174,7 @@ struct UnescapedDisplay<'a> {
impl fmt::Display for UnescapedDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
display_fmt_path(self.db, self.path.0, f, false)
display_fmt_path(self.db, self.path.0, f, Escape::No)
}
}
@ -178,11 +183,17 @@ impl From<Name> for ModPath {
ModPath::from_segments(PathKind::Plain, iter::once(name))
}
}
enum Escape {
No,
IfNeeded(Edition),
}
fn display_fmt_path(
db: &dyn ExpandDatabase,
path: &ModPath,
f: &mut fmt::Formatter<'_>,
escaped: bool,
escaped: Escape,
) -> fmt::Result {
let mut first_segment = true;
let mut add_segment = |s| -> fmt::Result {
@ -210,10 +221,9 @@ fn display_fmt_path(
f.write_str("::")?;
}
first_segment = false;
if escaped {
segment.display(db).fmt(f)?;
} else {
segment.unescaped().display(db).fmt(f)?;
match escaped {
Escape::IfNeeded(edition) => segment.display(db, edition).fmt(f)?,
Escape::No => segment.unescaped().display(db).fmt(f)?,
}
}
Ok(())
@ -322,9 +332,11 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModP
tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::self_ => PathKind::SELF,
tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::super_ => {
let mut deg = 1;
while let Some(tt::Leaf::Ident(tt::Ident { sym: text, span, is_raw })) = leaves.next() {
while let Some(tt::Leaf::Ident(tt::Ident { sym: text, span, is_raw: _ })) =
leaves.next()
{
if *text != sym::super_ {
segments.push(Name::new_symbol_maybe_raw(text.clone(), *is_raw, span.ctx));
segments.push(Name::new_symbol(text.clone(), span.ctx));
break;
}
deg += 1;
@ -333,19 +345,13 @@ fn convert_path_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree]) -> Option<ModP
}
tt::Leaf::Ident(tt::Ident { sym: text, .. }) if *text == sym::crate_ => PathKind::Crate,
tt::Leaf::Ident(ident) => {
segments.push(Name::new_symbol_maybe_raw(
ident.sym.clone(),
ident.is_raw,
ident.span.ctx,
));
segments.push(Name::new_symbol(ident.sym.clone(), ident.span.ctx));
PathKind::Plain
}
_ => return None,
};
segments.extend(leaves.filter_map(|leaf| match leaf {
::tt::Leaf::Ident(ident) => {
Some(Name::new_symbol_maybe_raw(ident.sym.clone(), ident.is_raw, ident.span.ctx))
}
::tt::Leaf::Ident(ident) => Some(Name::new_symbol(ident.sym.clone(), ident.span.ctx)),
_ => None,
}));
Some(ModPath { kind, segments })

View File

@ -3,22 +3,22 @@
use std::fmt;
use intern::{sym, Symbol};
use span::SyntaxContextId;
use syntax::{ast, utils::is_raw_identifier};
use span::{Edition, SyntaxContextId};
use syntax::ast;
use syntax::utils::is_raw_identifier;
/// `Name` is a wrapper around string, which is used in hir for both references
/// and declarations. In theory, names should also carry hygiene info, but we are
/// not there yet!
///
/// Note that `Name` holds and prints escaped name i.e. prefixed with "r#" when it
/// is a raw identifier. Use [`unescaped()`][Name::unescaped] when you need the
/// name without "r#".
/// Note that the rawness (`r#`) of names does not depend on whether they are written raw.
/// This is because we want to show (in completions etc.) names as raw depending on the needs
/// of the current crate, for example if it is edition 2021 complete `gen` even if the defining
/// crate is in edition 2024 and wrote `r#gen`, and the opposite holds as well.
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Name {
symbol: Symbol,
ctx: (),
// FIXME: We should probably encode rawness as a property here instead, once we have hygiene
// in here we've got 4 bytes of padding to fill anyways
}
impl fmt::Debug for Name {
@ -42,6 +42,7 @@ impl PartialOrd for Name {
}
}
// No need to strip `r#`, all comparisons are done against well-known symbols.
impl PartialEq<Symbol> for Name {
fn eq(&self, sym: &Symbol) -> bool {
self.symbol == *sym
@ -55,16 +56,16 @@ impl PartialEq<Name> for Symbol {
}
/// Wrapper of `Name` to print the name without "r#" even when it is a raw identifier.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct UnescapedName<'a>(&'a Name);
impl UnescapedName<'_> {
pub fn display(&self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + '_ {
impl<'a> UnescapedName<'a> {
pub fn display(self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
_ = db;
UnescapedDisplay { name: self }
}
#[doc(hidden)]
pub fn display_no_db(&self) -> impl fmt::Display + '_ {
pub fn display_no_db(self) -> impl fmt::Display + 'a {
UnescapedDisplay { name: self }
}
}
@ -77,16 +78,9 @@ impl Name {
Name { symbol: Symbol::intern(text), ctx: () }
}
pub fn new(text: &str, raw: tt::IdentIsRaw, ctx: SyntaxContextId) -> Name {
pub fn new(text: &str, ctx: SyntaxContextId) -> Name {
_ = ctx;
Name {
symbol: if raw.yes() {
Symbol::intern(&format!("{}{text}", raw.as_str()))
} else {
Symbol::intern(text)
},
ctx: (),
}
Self::new_text(text)
}
pub fn new_tuple_field(idx: usize) -> Name {
@ -97,23 +91,9 @@ impl Name {
Name { symbol: Symbol::intern(lt.text().as_str()), ctx: () }
}
/// Shortcut to create a name from a string literal.
fn new_ref(text: &str) -> Name {
Name { symbol: Symbol::intern(text), ctx: () }
}
/// Resolve a name from the text of token.
fn resolve(raw_text: &str) -> Name {
match raw_text.strip_prefix("r#") {
// When `raw_text` starts with "r#" but the name does not coincide with any
// keyword, we never need the prefix so we strip it.
Some(text) if !is_raw_identifier(text) => Name::new_ref(text),
// Keywords (in the current edition) *can* be used as a name in earlier editions of
// Rust, e.g. "try" in Rust 2015. Even in such cases, we keep track of them in their
// escaped form.
None if is_raw_identifier(raw_text) => Name::new_text(&format!("r#{}", raw_text)),
_ => Name::new_text(raw_text),
}
Name::new_text(raw_text.trim_start_matches("r#"))
}
/// A fake name for things missing in the source code.
@ -159,19 +139,23 @@ impl Name {
UnescapedName(self)
}
pub fn is_escaped(&self) -> bool {
self.symbol.as_str().starts_with("r#")
pub fn is_escaped(&self, edition: Edition) -> bool {
is_raw_identifier(self.symbol.as_str(), edition)
}
pub fn display<'a>(&'a self, db: &dyn crate::db::ExpandDatabase) -> impl fmt::Display + 'a {
pub fn display<'a>(
&'a self,
db: &dyn crate::db::ExpandDatabase,
edition: Edition,
) -> impl fmt::Display + 'a {
_ = db;
Display { name: self }
self.display_no_db(edition)
}
// FIXME: Remove this
#[doc(hidden)]
pub fn display_no_db(&self) -> impl fmt::Display + '_ {
Display { name: self }
pub fn display_no_db(&self, edition: Edition) -> impl fmt::Display + '_ {
Display { name: self, needs_escaping: is_raw_identifier(self.symbol.as_str(), edition) }
}
pub fn symbol(&self) -> &Symbol {
@ -183,39 +167,39 @@ impl Name {
Self { symbol, ctx: () }
}
pub fn new_symbol_maybe_raw(sym: Symbol, raw: tt::IdentIsRaw, ctx: SyntaxContextId) -> Self {
if raw.no() {
Self { symbol: sym, ctx: () }
} else {
Name::new(sym.as_str(), raw, ctx)
}
}
// FIXME: This needs to go once we have hygiene
pub const fn new_symbol_root(sym: Symbol) -> Self {
Self { symbol: sym, ctx: () }
}
#[inline]
pub fn eq_ident(&self, ident: &str) -> bool {
self.as_str() == ident.trim_start_matches("r#")
}
}
struct Display<'a> {
name: &'a Name,
needs_escaping: bool,
}
impl fmt::Display for Display<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if self.needs_escaping {
write!(f, "r#")?;
}
fmt::Display::fmt(self.name.symbol.as_str(), f)
}
}
struct UnescapedDisplay<'a> {
name: &'a UnescapedName<'a>,
name: UnescapedName<'a>,
}
impl fmt::Display for UnescapedDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let symbol = &self.name.0.symbol.as_str();
let text = symbol.strip_prefix("r#").unwrap_or(symbol);
fmt::Display::fmt(&text, f)
let symbol = self.name.0.symbol.as_str();
fmt::Display::fmt(symbol, f)
}
}

View File

@ -29,7 +29,6 @@ chalk-ir.workspace = true
chalk-recursive.workspace = true
chalk-derive.workspace = true
la-arena.workspace = true
once_cell = "1.17.0"
triomphe.workspace = true
nohash-hasher.workspace = true
typed-arena = "2.0.1"

View File

@ -3,6 +3,8 @@
//! reference to a type with the field `bar`. This is an approximation of the
//! logic in rustc (which lives in rustc_hir_analysis/check/autoderef.rs).
use std::mem;
use chalk_ir::cast::Cast;
use hir_def::lang_item::LangItem;
use hir_expand::name::Name;
@ -37,7 +39,7 @@ pub fn autoderef(
) -> impl Iterator<Item = Ty> {
let mut table = InferenceTable::new(db, env);
let ty = table.instantiate_canonical(ty);
let mut autoderef = Autoderef::new(&mut table, ty, false);
let mut autoderef = Autoderef::new_no_tracking(&mut table, ty, false);
let mut v = Vec::new();
while let Some((ty, _steps)) = autoderef.next() {
// `ty` may contain unresolved inference variables. Since there's no chance they would be
@ -58,41 +60,76 @@ pub fn autoderef(
v.into_iter()
}
trait TrackAutoderefSteps {
fn len(&self) -> usize;
fn push(&mut self, kind: AutoderefKind, ty: &Ty);
}
impl TrackAutoderefSteps for usize {
fn len(&self) -> usize {
*self
}
fn push(&mut self, _: AutoderefKind, _: &Ty) {
*self += 1;
}
}
impl TrackAutoderefSteps for Vec<(AutoderefKind, Ty)> {
fn len(&self) -> usize {
self.len()
}
fn push(&mut self, kind: AutoderefKind, ty: &Ty) {
self.push((kind, ty.clone()));
}
}
#[derive(Debug)]
pub(crate) struct Autoderef<'a, 'db> {
pub(crate) table: &'a mut InferenceTable<'db>,
pub(crate) struct Autoderef<'table, 'db, T = Vec<(AutoderefKind, Ty)>> {
pub(crate) table: &'table mut InferenceTable<'db>,
ty: Ty,
at_start: bool,
steps: Vec<(AutoderefKind, Ty)>,
steps: T,
explicit: bool,
}
impl<'a, 'db> Autoderef<'a, 'db> {
pub(crate) fn new(table: &'a mut InferenceTable<'db>, ty: Ty, explicit: bool) -> Self {
impl<'table, 'db> Autoderef<'table, 'db> {
pub(crate) fn new(table: &'table mut InferenceTable<'db>, ty: Ty, explicit: bool) -> Self {
let ty = table.resolve_ty_shallow(&ty);
Autoderef { table, ty, at_start: true, steps: Vec::new(), explicit }
}
pub(crate) fn step_count(&self) -> usize {
self.steps.len()
}
pub(crate) fn steps(&self) -> &[(AutoderefKind, Ty)] {
&self.steps
}
}
impl<'table, 'db> Autoderef<'table, 'db, usize> {
pub(crate) fn new_no_tracking(
table: &'table mut InferenceTable<'db>,
ty: Ty,
explicit: bool,
) -> Self {
let ty = table.resolve_ty_shallow(&ty);
Autoderef { table, ty, at_start: true, steps: 0, explicit }
}
}
#[allow(private_bounds)]
impl<'table, 'db, T: TrackAutoderefSteps> Autoderef<'table, 'db, T> {
pub(crate) fn step_count(&self) -> usize {
self.steps.len()
}
pub(crate) fn final_ty(&self) -> Ty {
self.ty.clone()
}
}
impl Iterator for Autoderef<'_, '_> {
impl<T: TrackAutoderefSteps> Iterator for Autoderef<'_, '_, T> {
type Item = (Ty, usize);
#[tracing::instrument(skip_all)]
fn next(&mut self) -> Option<Self::Item> {
if self.at_start {
self.at_start = false;
if mem::take(&mut self.at_start) {
return Some((self.ty.clone(), 0));
}
@ -102,7 +139,7 @@ impl Iterator for Autoderef<'_, '_> {
let (kind, new_ty) = autoderef_step(self.table, self.ty.clone(), self.explicit)?;
self.steps.push((kind, self.ty.clone()));
self.steps.push(kind, &self.ty);
self.ty = new_ty;
Some((self.ty.clone(), self.step_count()))
@ -129,12 +166,8 @@ pub(crate) fn builtin_deref<'ty>(
match ty.kind(Interner) {
TyKind::Ref(.., ty) => Some(ty),
TyKind::Raw(.., ty) if explicit => Some(ty),
&TyKind::Adt(chalk_ir::AdtId(adt), ref substs) => {
if crate::lang_items::is_box(db, adt) {
substs.at(Interner, 0).ty(Interner)
} else {
None
}
&TyKind::Adt(chalk_ir::AdtId(adt), ref substs) if crate::lang_items::is_box(db, adt) => {
substs.at(Interner, 0).ty(Interner)
}
_ => None,
}

View File

@ -5,6 +5,7 @@ use std::{iter, ops::ControlFlow, sync::Arc};
use hir_expand::name::Name;
use intern::sym;
use span::Edition;
use tracing::debug;
use chalk_ir::{cast::Caster, fold::shift::Shift, CanonicalVarKinds};
@ -424,18 +425,19 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
fn trait_name(&self, trait_id: chalk_ir::TraitId<Interner>) -> String {
let id = from_chalk_trait_id(trait_id);
self.db.trait_data(id).name.display(self.db.upcast()).to_string()
self.db.trait_data(id).name.display(self.db.upcast(), self.edition()).to_string()
}
fn adt_name(&self, chalk_ir::AdtId(adt_id): AdtId) -> String {
let edition = self.edition();
match adt_id {
hir_def::AdtId::StructId(id) => {
self.db.struct_data(id).name.display(self.db.upcast()).to_string()
self.db.struct_data(id).name.display(self.db.upcast(), edition).to_string()
}
hir_def::AdtId::EnumId(id) => {
self.db.enum_data(id).name.display(self.db.upcast()).to_string()
self.db.enum_data(id).name.display(self.db.upcast(), edition).to_string()
}
hir_def::AdtId::UnionId(id) => {
self.db.union_data(id).name.display(self.db.upcast()).to_string()
self.db.union_data(id).name.display(self.db.upcast(), edition).to_string()
}
}
}
@ -445,7 +447,7 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
}
fn assoc_type_name(&self, assoc_ty_id: chalk_ir::AssocTypeId<Interner>) -> String {
let id = self.db.associated_ty_data(assoc_ty_id).name;
self.db.type_alias_data(id).name.display(self.db.upcast()).to_string()
self.db.type_alias_data(id).name.display(self.db.upcast(), self.edition()).to_string()
}
fn opaque_type_name(&self, opaque_ty_id: chalk_ir::OpaqueTyId<Interner>) -> String {
format!("Opaque_{}", opaque_ty_id.0)
@ -519,6 +521,10 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
}
impl<'a> ChalkContext<'a> {
fn edition(&self) -> Edition {
self.db.crate_graph()[self.krate].edition
}
fn for_trait_impls(
&self,
trait_id: hir_def::TraitId,
@ -843,7 +849,7 @@ fn impl_def_datum(
"impl {:?}: {}{} where {:?}",
chalk_id,
if negative { "!" } else { "" },
trait_ref.display(db),
trait_ref.display(db, db.crate_graph()[krate].edition),
where_clauses
);

View File

@ -1,3 +1,4 @@
use base_db::SourceDatabase;
use chalk_ir::Substitution;
use hir_def::db::DefDatabase;
use rustc_apfloat::{
@ -94,9 +95,10 @@ fn check_answer(ra_fixture: &str, check: impl FnOnce(&[u8], &MemoryMap)) {
fn pretty_print_err(e: ConstEvalError, db: TestDB) -> String {
let mut err = String::new();
let span_formatter = |file, range| format!("{file:?} {range:?}");
let edition = db.crate_graph()[db.test_crate()].edition;
match e {
ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter),
ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter),
ConstEvalError::MirLowerError(e) => e.pretty_print(&mut err, &db, span_formatter, edition),
ConstEvalError::MirEvalError(e) => e.pretty_print(&mut err, &db, span_formatter, edition),
}
.unwrap();
err
@ -110,7 +112,9 @@ fn eval_goal(db: &TestDB, file_id: EditionedFileId) -> Result<Const, ConstEvalEr
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::ConstId(x) => {
if db.const_data(x).name.as_ref()?.display(db).to_string() == "GOAL" {
if db.const_data(x).name.as_ref()?.display(db, file_id.edition()).to_string()
== "GOAL"
{
Some(x)
} else {
None
@ -243,6 +247,17 @@ fn casts() {
check_number(r#"const GOAL: i32 = -12i8 as i32"#, -12);
}
#[test]
fn floating_point_casts() {
check_number(r#"const GOAL: usize = 12i32 as f32 as usize"#, 12);
check_number(r#"const GOAL: i8 = -12i32 as f64 as i8"#, -12);
check_number(r#"const GOAL: i32 = (-1ui8 as f32 + 2u64 as f32) as i32"#, 1);
check_number(r#"const GOAL: i8 = (0./0.) as i8"#, 0);
check_number(r#"const GOAL: i8 = (1./0.) as i8"#, 127);
check_number(r#"const GOAL: i8 = (-1./0.) as i8"#, -128);
check_number(r#"const GOAL: i64 = 1e18f64 as f32 as i64"#, 999999984306749440);
}
#[test]
fn raw_pointer_equality() {
check_number(

View File

@ -17,17 +17,18 @@ use std::fmt;
use hir_def::{
data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, AttrDefId, ConstId,
EnumId, EnumVariantId, FunctionId, ItemContainerId, Lookup, ModuleDefId, ModuleId, StaticId,
StructId, TraitId, TypeAliasId,
EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId,
StaticId, StructId, TraitId, TypeAliasId,
};
use hir_expand::{
name::{AsName, Name},
HirFileId, MacroFileIdExt,
HirFileId, HirFileIdExt, MacroFileIdExt,
};
use intern::sym;
use stdx::{always, never};
use syntax::{
ast::{self, HasName},
utils::is_raw_identifier,
AstNode, AstPtr, ToSmolStr,
};
@ -318,17 +319,21 @@ impl<'a> DeclValidator<'a> {
/// This includes function parameters except for trait implementation associated functions.
fn validate_func_body(&mut self, func: FunctionId) {
let body = self.db.body(func.into());
let edition = self.edition(func);
let mut pats_replacements = body
.pats
.iter()
.filter_map(|(pat_id, pat)| match pat {
Pat::Bind { id, .. } => {
let bind_name = &body.bindings[*id].name;
let mut suggested_text =
to_lower_snake_case(&bind_name.unescaped().display_no_db().to_smolstr())?;
if is_raw_identifier(&suggested_text, edition) {
suggested_text.insert_str(0, "r#");
}
let replacement = Replacement {
current_name: bind_name.clone(),
suggested_text: to_lower_snake_case(
&bind_name.display_no_db().to_smolstr(),
)?,
suggested_text,
expected_case: CaseType::LowerSnakeCase,
};
Some((pat_id, replacement))
@ -377,6 +382,11 @@ impl<'a> DeclValidator<'a> {
}
}
fn edition(&self, id: impl HasModule) -> span::Edition {
let krate = id.krate(self.db.upcast());
self.db.crate_graph()[krate].edition
}
fn validate_struct(&mut self, struct_id: StructId) {
// Check the structure name.
let non_camel_case_allowed =
@ -405,16 +415,17 @@ impl<'a> DeclValidator<'a> {
let VariantData::Record(fields) = data.variant_data.as_ref() else {
return;
};
let edition = self.edition(struct_id);
let mut struct_fields_replacements = fields
.iter()
.filter_map(|(_, field)| {
to_lower_snake_case(&field.name.display_no_db().to_smolstr()).map(|new_name| {
Replacement {
to_lower_snake_case(&field.name.display_no_db(edition).to_smolstr()).map(
|new_name| Replacement {
current_name: field.name.clone(),
suggested_text: new_name,
expected_case: CaseType::LowerSnakeCase,
}
})
},
)
})
.peekable();
@ -498,14 +509,17 @@ impl<'a> DeclValidator<'a> {
self.validate_enum_variant_fields(*variant_id);
}
let edition = self.edition(enum_id);
let mut enum_variants_replacements = data
.variants
.iter()
.filter_map(|(_, name)| {
to_camel_case(&name.display_no_db().to_smolstr()).map(|new_name| Replacement {
current_name: name.clone(),
suggested_text: new_name,
expected_case: CaseType::UpperCamelCase,
to_camel_case(&name.display_no_db(edition).to_smolstr()).map(|new_name| {
Replacement {
current_name: name.clone(),
suggested_text: new_name,
expected_case: CaseType::UpperCamelCase,
}
})
})
.peekable();
@ -566,16 +580,17 @@ impl<'a> DeclValidator<'a> {
let VariantData::Record(fields) = variant_data.variant_data.as_ref() else {
return;
};
let edition = self.edition(variant_id);
let mut variant_field_replacements = fields
.iter()
.filter_map(|(_, field)| {
to_lower_snake_case(&field.name.display_no_db().to_smolstr()).map(|new_name| {
Replacement {
to_lower_snake_case(&field.name.display_no_db(edition).to_smolstr()).map(
|new_name| Replacement {
current_name: field.name.clone(),
suggested_text: new_name,
expected_case: CaseType::LowerSnakeCase,
}
})
},
)
})
.peekable();
@ -704,18 +719,22 @@ impl<'a> DeclValidator<'a> {
) where
N: AstNode + HasName + fmt::Debug,
S: HasSource<Value = N>,
L: Lookup<Data = S, Database<'a> = dyn DefDatabase + 'a>,
L: Lookup<Data = S, Database<'a> = dyn DefDatabase + 'a> + HasModule + Copy,
{
let to_expected_case_type = match expected_case {
CaseType::LowerSnakeCase => to_lower_snake_case,
CaseType::UpperSnakeCase => to_upper_snake_case,
CaseType::UpperCamelCase => to_camel_case,
};
let Some(replacement) =
to_expected_case_type(&name.display(self.db.upcast()).to_smolstr()).map(|new_name| {
Replacement { current_name: name.clone(), suggested_text: new_name, expected_case }
})
else {
let edition = self.edition(item_id);
let Some(replacement) = to_expected_case_type(
&name.display(self.db.upcast(), edition).to_smolstr(),
)
.map(|new_name| Replacement {
current_name: name.clone(),
suggested_text: new_name,
expected_case,
}) else {
return;
};
@ -748,12 +767,13 @@ impl<'a> DeclValidator<'a> {
return;
};
let edition = file_id.original_file(self.db.upcast()).edition();
let diagnostic = IncorrectCase {
file: file_id,
ident_type,
ident: AstPtr::new(&name_ast),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.display(self.db.upcast()).to_string(),
ident_text: replacement.current_name.display(self.db.upcast(), edition).to_string(),
suggested_text: replacement.suggested_text,
};

View File

@ -4,6 +4,7 @@
use std::fmt;
use base_db::CrateId;
use chalk_solve::rust_ir::AdtKind;
use either::Either;
use hir_def::{
@ -15,6 +16,7 @@ use intern::sym;
use itertools::Itertools;
use rustc_hash::FxHashSet;
use rustc_pattern_analysis::constructor::Constructor;
use span::Edition;
use syntax::{
ast::{self, UnaryOp},
AstNode,
@ -258,7 +260,13 @@ impl ExprValidator {
if !witnesses.is_empty() {
self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms {
match_expr,
uncovered_patterns: missing_match_arms(&cx, scrut_ty, witnesses, m_arms.is_empty()),
uncovered_patterns: missing_match_arms(
&cx,
scrut_ty,
witnesses,
m_arms.is_empty(),
self.owner.krate(db.upcast()),
),
});
}
}
@ -345,7 +353,13 @@ impl ExprValidator {
if !witnesses.is_empty() {
self.diagnostics.push(BodyValidationDiagnostic::NonExhaustiveLet {
pat,
uncovered_patterns: missing_match_arms(&cx, ty, witnesses, false),
uncovered_patterns: missing_match_arms(
&cx,
ty,
witnesses,
false,
self.owner.krate(db.upcast()),
),
});
}
}
@ -616,24 +630,26 @@ fn missing_match_arms<'p>(
scrut_ty: &Ty,
witnesses: Vec<WitnessPat<'p>>,
arms_is_empty: bool,
krate: CrateId,
) -> String {
struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>);
struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>, Edition);
impl fmt::Display for DisplayWitness<'_, '_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let DisplayWitness(witness, cx) = *self;
let DisplayWitness(witness, cx, edition) = *self;
let pat = cx.hoist_witness_pat(witness);
write!(f, "{}", pat.display(cx.db))
write!(f, "{}", pat.display(cx.db, edition))
}
}
let edition = cx.db.crate_graph()[krate].edition;
let non_empty_enum = match scrut_ty.as_adt() {
Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(),
_ => false,
};
if arms_is_empty && !non_empty_enum {
format!("type `{}` is non-empty", scrut_ty.display(cx.db))
format!("type `{}` is non-empty", scrut_ty.display(cx.db, edition))
} else {
let pat_display = |witness| DisplayWitness(witness, cx);
let pat_display = |witness| DisplayWitness(witness, cx, edition);
const LIMIT: usize = 3;
match &*witnesses {
[witness] => format!("`{}` not covered", pat_display(witness)),

View File

@ -14,6 +14,7 @@ use hir_def::{
body::Body, data::adt::VariantData, hir::PatId, AdtId, EnumVariantId, LocalFieldId, VariantId,
};
use hir_expand::name::Name;
use span::Edition;
use stdx::{always, never};
use crate::{
@ -151,7 +152,11 @@ impl<'a> PatCtxt<'a> {
match (bm, ty.kind(Interner)) {
(BindingMode::Ref(_), TyKind::Ref(.., rty)) => ty = rty,
(BindingMode::Ref(_), _) => {
never!("`ref {}` has wrong type {:?}", name.display(self.db.upcast()), ty);
never!(
"`ref {}` has wrong type {:?}",
name.display(self.db.upcast(), Edition::LATEST),
ty
);
self.errors.push(PatternError::UnexpectedType);
return Pat { ty: ty.clone(), kind: PatKind::Wild.into() };
}
@ -297,7 +302,7 @@ impl HirDisplay for Pat {
PatKind::Wild => write!(f, "_"),
PatKind::Never => write!(f, "!"),
PatKind::Binding { name, subpattern } => {
write!(f, "{}", name.display(f.db.upcast()))?;
write!(f, "{}", name.display(f.db.upcast(), f.edition()))?;
if let Some(subpattern) = subpattern {
write!(f, " @ ")?;
subpattern.hir_fmt(f)?;
@ -317,14 +322,22 @@ impl HirDisplay for Pat {
if let Some(variant) = variant {
match variant {
VariantId::EnumVariantId(v) => {
write!(f, "{}", f.db.enum_variant_data(v).name.display(f.db.upcast()))?;
}
VariantId::StructId(s) => {
write!(f, "{}", f.db.struct_data(s).name.display(f.db.upcast()))?
}
VariantId::UnionId(u) => {
write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast()))?
write!(
f,
"{}",
f.db.enum_variant_data(v).name.display(f.db.upcast(), f.edition())
)?;
}
VariantId::StructId(s) => write!(
f,
"{}",
f.db.struct_data(s).name.display(f.db.upcast(), f.edition())
)?,
VariantId::UnionId(u) => write!(
f,
"{}",
f.db.union_data(u).name.display(f.db.upcast(), f.edition())
)?,
};
let variant_data = variant.variant_data(f.db.upcast());
@ -341,7 +354,9 @@ impl HirDisplay for Pat {
write!(
f,
"{}: ",
rec_fields[p.field].name.display(f.db.upcast())
rec_fields[p.field]
.name
.display(f.db.upcast(), f.edition())
)?;
p.pattern.hir_fmt(f)
})

View File

@ -1,10 +1,10 @@
//! Interface with `rustc_pattern_analysis`.
use std::cell::LazyCell;
use std::fmt;
use hir_def::{DefWithBodyId, EnumId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId};
use intern::sym;
use once_cell::unsync::Lazy;
use rustc_pattern_analysis::{
constructor::{Constructor, ConstructorSet, VariantVisibility},
usefulness::{compute_match_usefulness, PlaceValidity, UsefulnessReport},
@ -384,8 +384,9 @@ impl<'db> PatCx for MatchCheckCtx<'db> {
let variant = Self::variant_id_for_adt(self.db, ctor, adt).unwrap();
// Whether we must not match the fields of this variant exhaustively.
let is_non_exhaustive = Lazy::new(|| self.is_foreign_non_exhaustive(adt));
let visibilities = Lazy::new(|| self.db.field_visibilities(variant));
let is_non_exhaustive =
LazyCell::new(|| self.is_foreign_non_exhaustive(adt));
let visibilities = LazyCell::new(|| self.db.field_visibilities(variant));
self.list_variant_fields(ty, variant)
.map(move |(fid, ty)| {

View File

@ -33,7 +33,8 @@ use rustc_apfloat::{
Float,
};
use smallvec::SmallVec;
use stdx::{never, IsNoneOr};
use span::Edition;
use stdx::never;
use triomphe::Arc;
use crate::{
@ -131,7 +132,11 @@ pub trait HirDisplay {
/// Returns a `Display`able type that is human-readable.
/// Use this for showing types to the user (e.g. diagnostics)
fn display<'a>(&'a self, db: &'a dyn HirDatabase) -> HirDisplayWrapper<'a, Self>
fn display<'a>(
&'a self,
db: &'a dyn HirDatabase,
edition: Edition,
) -> HirDisplayWrapper<'a, Self>
where
Self: Sized,
{
@ -142,7 +147,7 @@ pub trait HirDisplay {
limited_size: None,
omit_verbose_types: false,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics,
display_target: DisplayTarget::Diagnostics { edition },
show_container_bounds: false,
}
}
@ -153,6 +158,7 @@ pub trait HirDisplay {
&'a self,
db: &'a dyn HirDatabase,
max_size: Option<usize>,
edition: Edition,
) -> HirDisplayWrapper<'a, Self>
where
Self: Sized,
@ -164,7 +170,7 @@ pub trait HirDisplay {
limited_size: None,
omit_verbose_types: true,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics,
display_target: DisplayTarget::Diagnostics { edition },
show_container_bounds: false,
}
}
@ -175,6 +181,7 @@ pub trait HirDisplay {
&'a self,
db: &'a dyn HirDatabase,
limited_size: Option<usize>,
edition: Edition,
) -> HirDisplayWrapper<'a, Self>
where
Self: Sized,
@ -186,7 +193,7 @@ pub trait HirDisplay {
limited_size,
omit_verbose_types: true,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics,
display_target: DisplayTarget::Diagnostics { edition },
show_container_bounds: false,
}
}
@ -242,6 +249,7 @@ pub trait HirDisplay {
&'a self,
db: &'a dyn HirDatabase,
show_container_bounds: bool,
edition: Edition,
) -> HirDisplayWrapper<'a, Self>
where
Self: Sized,
@ -253,13 +261,23 @@ pub trait HirDisplay {
limited_size: None,
omit_verbose_types: false,
closure_style: ClosureStyle::ImplFn,
display_target: DisplayTarget::Diagnostics,
display_target: DisplayTarget::Diagnostics { edition },
show_container_bounds,
}
}
}
impl HirFormatter<'_> {
pub fn edition(&self) -> Edition {
match self.display_target {
DisplayTarget::Diagnostics { edition } => edition,
DisplayTarget::SourceCode { module_id, .. } => {
self.db.crate_graph()[module_id.krate()].edition
}
DisplayTarget::Test => Edition::CURRENT,
}
}
pub fn write_joined<T: HirDisplay>(
&mut self,
iter: impl IntoIterator<Item = T>,
@ -324,7 +342,7 @@ pub enum DisplayTarget {
/// Display types for inlays, doc popups, autocompletion, etc...
/// Showing `{unknown}` or not qualifying paths is fine here.
/// There's no reason for this to fail.
Diagnostics,
Diagnostics { edition: Edition },
/// Display types for inserting them in source files.
/// The generated code should compile, so paths need to be qualified.
SourceCode { module_id: ModuleId, allow_opaque: bool },
@ -460,7 +478,7 @@ impl HirDisplay for ProjectionTy {
">::{}",
f.db.type_alias_data(from_assoc_type_id(self.associated_ty_id))
.name
.display(f.db.upcast())
.display(f.db.upcast(), f.edition())
)?;
let proj_params_count =
self.substitution.len(Interner) - trait_ref.substitution.len(Interner);
@ -499,7 +517,7 @@ impl HirDisplay for Const {
let id = from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent);
let param_data = &generics[id.local_id];
write!(f, "{}", param_data.name().unwrap().display(f.db.upcast()))?;
write!(f, "{}", param_data.name().unwrap().display(f.db.upcast(), f.edition()))?;
Ok(())
}
ConstValue::Concrete(c) => match &c.interned {
@ -633,7 +651,7 @@ fn render_const_scalar(
TyKind::Adt(adt, _) if b.len() == 2 * size_of::<usize>() => match adt.0 {
hir_def::AdtId::StructId(s) => {
let data = f.db.struct_data(s);
write!(f, "&{}", data.name.display(f.db.upcast()))?;
write!(f, "&{}", data.name.display(f.db.upcast(), f.edition()))?;
Ok(())
}
_ => f.write_str("<unsized-enum-or-union>"),
@ -691,7 +709,7 @@ fn render_const_scalar(
match adt.0 {
hir_def::AdtId::StructId(s) => {
let data = f.db.struct_data(s);
write!(f, "{}", data.name.display(f.db.upcast()))?;
write!(f, "{}", data.name.display(f.db.upcast(), f.edition()))?;
let field_types = f.db.field_types(s.into());
render_variant_after_name(
&data.variant_data,
@ -705,7 +723,7 @@ fn render_const_scalar(
)
}
hir_def::AdtId::UnionId(u) => {
write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast()))
write!(f, "{}", f.db.union_data(u).name.display(f.db.upcast(), f.edition()))
}
hir_def::AdtId::EnumId(e) => {
let Ok(target_data_layout) = f.db.target_data_layout(trait_env.krate) else {
@ -717,7 +735,7 @@ fn render_const_scalar(
return f.write_str("<failed-to-detect-variant>");
};
let data = f.db.enum_variant_data(var_id);
write!(f, "{}", data.name.display(f.db.upcast()))?;
write!(f, "{}", data.name.display(f.db.upcast(), f.edition()))?;
let field_types = f.db.field_types(var_id.into());
render_variant_after_name(
&data.variant_data,
@ -802,11 +820,11 @@ fn render_variant_after_name(
if matches!(data, VariantData::Record(_)) {
write!(f, " {{")?;
if let Some((id, data)) = it.next() {
write!(f, " {}: ", data.name.display(f.db.upcast()))?;
write!(f, " {}: ", data.name.display(f.db.upcast(), f.edition()))?;
render_field(f, id)?;
}
for (id, data) in it {
write!(f, ", {}: ", data.name.display(f.db.upcast()))?;
write!(f, ", {}: ", data.name.display(f.db.upcast(), f.edition()))?;
render_field(f, id)?;
}
write!(f, " }}")?;
@ -1000,15 +1018,23 @@ impl HirDisplay for Ty {
CallableDefId::FunctionId(ff) => {
write!(f, "fn ")?;
f.start_location_link(def.into());
write!(f, "{}", db.function_data(ff).name.display(f.db.upcast()))?
write!(
f,
"{}",
db.function_data(ff).name.display(f.db.upcast(), f.edition())
)?
}
CallableDefId::StructId(s) => {
f.start_location_link(def.into());
write!(f, "{}", db.struct_data(s).name.display(f.db.upcast()))?
write!(f, "{}", db.struct_data(s).name.display(f.db.upcast(), f.edition()))?
}
CallableDefId::EnumVariantId(e) => {
f.start_location_link(def.into());
write!(f, "{}", db.enum_variant_data(e).name.display(f.db.upcast()))?
write!(
f,
"{}",
db.enum_variant_data(e).name.display(f.db.upcast(), f.edition())
)?
}
};
f.end_location_link();
@ -1019,26 +1045,25 @@ impl HirDisplay for Ty {
let (parent_len, self_param, type_, const_, impl_, lifetime) =
generics.provenance_split();
let parameters = parameters.as_slice(Interner);
debug_assert_eq!(
parameters.len(),
parent_len + self_param as usize + type_ + const_ + impl_ + lifetime
);
// We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
if parameters.len() - impl_ > 0 {
// `parameters` are in the order of fn's params (including impl traits), fn's lifetimes
let parameters =
generic_args_sans_defaults(f, Some(generic_def_id), parameters);
let without_impl = self_param as usize + type_ + const_ + lifetime;
// parent's params (those from enclosing impl or trait, if any).
let (fn_params, parent_params) = parameters.split_at(without_impl + impl_);
debug_assert_eq!(parent_params.len(), parent_len);
let parent_params =
generic_args_sans_defaults(f, Some(generic_def_id), parent_params);
let fn_params =
&generic_args_sans_defaults(f, Some(generic_def_id), fn_params)
[0..without_impl];
write!(f, "<")?;
hir_fmt_generic_arguments(f, parent_params, None)?;
if !parent_params.is_empty() && !fn_params.is_empty() {
write!(f, ", ")?;
}
hir_fmt_generic_arguments(f, fn_params, None)?;
hir_fmt_generic_arguments(f, &fn_params[0..without_impl], None)?;
write!(f, ">")?;
}
}
@ -1054,13 +1079,13 @@ impl HirDisplay for Ty {
TyKind::Adt(AdtId(def_id), parameters) => {
f.start_location_link((*def_id).into());
match f.display_target {
DisplayTarget::Diagnostics | DisplayTarget::Test => {
DisplayTarget::Diagnostics { .. } | DisplayTarget::Test => {
let name = match *def_id {
hir_def::AdtId::StructId(it) => db.struct_data(it).name.clone(),
hir_def::AdtId::UnionId(it) => db.union_data(it).name.clone(),
hir_def::AdtId::EnumId(it) => db.enum_data(it).name.clone(),
};
write!(f, "{}", name.display(f.db.upcast()))?;
write!(f, "{}", name.display(f.db.upcast(), f.edition()))?;
}
DisplayTarget::SourceCode { module_id, allow_opaque: _ } => {
if let Some(path) = find_path::find_path(
@ -1076,7 +1101,7 @@ impl HirDisplay for Ty {
prefer_absolute: false,
},
) {
write!(f, "{}", path.display(f.db.upcast()))?;
write!(f, "{}", path.display(f.db.upcast(), f.edition()))?;
} else {
return Err(HirDisplayError::DisplaySourceCodeError(
DisplaySourceCodeError::PathNotFound,
@ -1102,12 +1127,12 @@ impl HirDisplay for Ty {
// Use placeholder associated types when the target is test (https://rust-lang.github.io/chalk/book/clauses/type_equality.html#placeholder-associated-types)
if f.display_target.is_test() {
f.start_location_link(trait_.into());
write!(f, "{}", trait_data.name.display(f.db.upcast()))?;
write!(f, "{}", trait_data.name.display(f.db.upcast(), f.edition()))?;
f.end_location_link();
write!(f, "::")?;
f.start_location_link(type_alias.into());
write!(f, "{}", type_alias_data.name.display(f.db.upcast()))?;
write!(f, "{}", type_alias_data.name.display(f.db.upcast(), f.edition()))?;
f.end_location_link();
// Note that the generic args for the associated type come before those for the
// trait (including the self type).
@ -1125,7 +1150,7 @@ impl HirDisplay for Ty {
let alias = from_foreign_def_id(*type_alias);
let type_alias = db.type_alias_data(alias);
f.start_location_link(alias.into());
write!(f, "{}", type_alias.name.display(f.db.upcast()))?;
write!(f, "{}", type_alias.name.display(f.db.upcast(), f.edition()))?;
f.end_location_link();
}
TyKind::OpaqueType(opaque_ty_id, parameters) => {
@ -1257,7 +1282,10 @@ impl HirDisplay for Ty {
write!(
f,
"{}",
p.name.clone().unwrap_or_else(Name::missing).display(f.db.upcast())
p.name
.clone()
.unwrap_or_else(Name::missing)
.display(f.db.upcast(), f.edition())
)?
}
TypeParamProvenance::ArgumentImplTrait => {
@ -1290,7 +1318,7 @@ impl HirDisplay for Ty {
}
},
TypeOrConstParamData::ConstParamData(p) => {
write!(f, "{}", p.name.display(f.db.upcast()))?;
write!(f, "{}", p.name.display(f.db.upcast(), f.edition()))?;
}
}
}
@ -1410,17 +1438,7 @@ fn hir_fmt_generics(
let parameters_to_write = generic_args_sans_defaults(f, generic_def, parameters);
// FIXME: Remote this
// most of our lifetimes will be errors as we lack elision and inference
// so don't render them for now
let only_err_lifetimes = !cfg!(test)
&& parameters_to_write.iter().all(|arg| {
matches!(
arg.data(Interner),
chalk_ir::GenericArgData::Lifetime(it) if *it.data(Interner) == LifetimeData::Error
)
});
if !parameters_to_write.is_empty() && !only_err_lifetimes {
if !parameters_to_write.is_empty() {
write!(f, "<")?;
hir_fmt_generic_arguments(f, parameters_to_write, self_)?;
write!(f, ">")?;
@ -1461,12 +1479,14 @@ fn generic_args_sans_defaults<'ga>(
}
// otherwise, if the arg is equal to the param default, hide it (unless the
// default is an error which can happen for the trait Self type)
#[allow(unstable_name_collisions)]
IsNoneOr::is_none_or(default_parameters.get(i), |default_parameter| {
// !is_err(default_parameter.skip_binders())
// &&
arg != &default_parameter.clone().substitute(Interner, &parameters)
})
match default_parameters.get(i) {
None => true,
Some(default_parameter) => {
// !is_err(default_parameter.skip_binders())
// &&
arg != &default_parameter.clone().substitute(Interner, &parameters)
}
}
};
let mut default_from = 0;
for (i, parameter) in parameters.iter().enumerate() {
@ -1495,18 +1515,6 @@ fn hir_fmt_generic_arguments(
None => (parameters, &[][..]),
};
for generic_arg in lifetimes.iter().chain(ty_or_const) {
// FIXME: Remove this
// most of our lifetimes will be errors as we lack elision and inference
// so don't render them for now
if !cfg!(test)
&& matches!(
generic_arg.lifetime(Interner),
Some(l) if ***l.interned() == LifetimeData::Error
)
{
continue;
}
if !mem::take(&mut first) {
write!(f, ", ")?;
}
@ -1633,7 +1641,7 @@ fn write_bounds_like_dyn_trait(
// existential) here, which is the only thing that's
// possible in actual Rust, and hence don't print it
f.start_location_link(trait_.into());
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast(), f.edition()))?;
f.end_location_link();
if is_fn_trait {
if let [self_, params @ ..] = trait_ref.substitution.as_slice(Interner) {
@ -1707,7 +1715,7 @@ fn write_bounds_like_dyn_trait(
let assoc_ty_id = from_assoc_type_id(proj.associated_ty_id);
let type_alias = f.db.type_alias_data(assoc_ty_id);
f.start_location_link(assoc_ty_id.into());
write!(f, "{}", type_alias.name.display(f.db.upcast()))?;
write!(f, "{}", type_alias.name.display(f.db.upcast(), f.edition()))?;
f.end_location_link();
let proj_arg_count = generics(f.db.upcast(), assoc_ty_id.into()).len_self();
@ -1771,7 +1779,7 @@ fn fmt_trait_ref(
}
let trait_ = tr.hir_trait_id();
f.start_location_link(trait_.into());
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast()))?;
write!(f, "{}", f.db.trait_data(trait_).name.display(f.db.upcast(), f.edition()))?;
f.end_location_link();
let substs = tr.substitution.as_slice(Interner);
hir_fmt_generics(f, &substs[1..], None, substs[0].ty(Interner))
@ -1797,7 +1805,11 @@ impl HirDisplay for WhereClause {
write!(f, ">::",)?;
let type_alias = from_assoc_type_id(projection_ty.associated_ty_id);
f.start_location_link(type_alias.into());
write!(f, "{}", f.db.type_alias_data(type_alias).name.display(f.db.upcast()),)?;
write!(
f,
"{}",
f.db.type_alias_data(type_alias).name.display(f.db.upcast(), f.edition()),
)?;
f.end_location_link();
write!(f, " = ")?;
ty.hir_fmt(f)?;
@ -1833,14 +1845,20 @@ impl HirDisplay for LifetimeData {
let id = lt_from_placeholder_idx(f.db, *idx);
let generics = generics(f.db.upcast(), id.parent);
let param_data = &generics[id.local_id];
write!(f, "{}", param_data.name.display(f.db.upcast()))?;
write!(f, "{}", param_data.name.display(f.db.upcast(), f.edition()))?;
Ok(())
}
_ if f.display_target.is_source_code() => write!(f, "'_"),
LifetimeData::BoundVar(idx) => idx.hir_fmt(f),
LifetimeData::InferenceVar(_) => write!(f, "_"),
LifetimeData::Static => write!(f, "'static"),
LifetimeData::Error => write!(f, "'?"),
LifetimeData::Error => {
if cfg!(test) {
write!(f, "'?")
} else {
write!(f, "'_")
}
}
LifetimeData::Erased => write!(f, "'<erased>"),
LifetimeData::Phantom(void, _) => match *void {},
}
@ -1855,7 +1873,7 @@ impl HirDisplay for DomainGoal {
wc.hir_fmt(f)?;
write!(f, ")")?;
}
_ => write!(f, "?")?,
_ => write!(f, "_")?,
}
Ok(())
}
@ -1914,7 +1932,7 @@ impl HirDisplay for TypeRef {
};
write!(f, "&")?;
if let Some(lifetime) = lifetime {
write!(f, "{} ", lifetime.name.display(f.db.upcast()))?;
write!(f, "{} ", lifetime.name.display(f.db.upcast(), f.edition()))?;
}
write!(f, "{mutability}")?;
inner.hir_fmt(f)?;
@ -1922,7 +1940,7 @@ impl HirDisplay for TypeRef {
TypeRef::Array(inner, len) => {
write!(f, "[")?;
inner.hir_fmt(f)?;
write!(f, "; {}]", len.display(f.db.upcast()))?;
write!(f, "; {}]", len.display(f.db.upcast(), f.edition()))?;
}
TypeRef::Slice(inner) => {
write!(f, "[")?;
@ -1943,7 +1961,7 @@ impl HirDisplay for TypeRef {
for index in 0..function_parameters.len() {
let (param_name, param_type) = &function_parameters[index];
if let Some(name) = param_name {
write!(f, "{}: ", name.display(f.db.upcast()))?;
write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?;
}
param_type.hir_fmt(f)?;
@ -2001,12 +2019,15 @@ impl HirDisplay for TypeBound {
}
path.hir_fmt(f)
}
TypeBound::Lifetime(lifetime) => write!(f, "{}", lifetime.name.display(f.db.upcast())),
TypeBound::Lifetime(lifetime) => {
write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition()))
}
TypeBound::ForLifetime(lifetimes, path) => {
let edition = f.edition();
write!(
f,
"for<{}> ",
lifetimes.iter().map(|it| it.display(f.db.upcast())).format(", ")
lifetimes.iter().map(|it| it.display(f.db.upcast(), edition)).format(", ")
)?;
path.hir_fmt(f)
}
@ -2072,7 +2093,7 @@ impl HirDisplay for Path {
if !matches!(self.kind(), PathKind::Plain) || seg_idx > 0 {
write!(f, "::")?;
}
write!(f, "{}", segment.name.display(f.db.upcast()))?;
write!(f, "{}", segment.name.display(f.db.upcast(), f.edition()))?;
if let Some(generic_args) = segment.args_and_bindings {
// We should be in type context, so format as `Foo<Bar>` instead of `Foo::<Bar>`.
// Do we actually format expressions?
@ -2117,7 +2138,7 @@ impl HirDisplay for Path {
} else {
write!(f, ", ")?;
}
write!(f, "{}", binding.name.display(f.db.upcast()))?;
write!(f, "{}", binding.name.display(f.db.upcast(), f.edition()))?;
match &binding.type_ref {
Some(ty) => {
write!(f, " = ")?;
@ -2151,9 +2172,11 @@ impl HirDisplay for hir_def::path::GenericArg {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
match self {
hir_def::path::GenericArg::Type(ty) => ty.hir_fmt(f),
hir_def::path::GenericArg::Const(c) => write!(f, "{}", c.display(f.db.upcast())),
hir_def::path::GenericArg::Const(c) => {
write!(f, "{}", c.display(f.db.upcast(), f.edition()))
}
hir_def::path::GenericArg::Lifetime(lifetime) => {
write!(f, "{}", lifetime.name.display(f.db.upcast()))
write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition()))
}
}
}

View File

@ -22,7 +22,7 @@ mod pat;
mod path;
pub(crate) mod unify;
use std::{convert::identity, iter, ops::Index};
use std::{cell::OnceCell, convert::identity, iter, ops::Index};
use chalk_ir::{
cast::Cast,
@ -49,17 +49,17 @@ use hir_expand::name::Name;
use indexmap::IndexSet;
use intern::sym;
use la_arena::{ArenaMap, Entry};
use once_cell::unsync::OnceCell;
use rustc_hash::{FxHashMap, FxHashSet};
use stdx::{always, never};
use triomphe::Arc;
use crate::{
db::HirDatabase,
error_lifetime, fold_tys,
fold_tys,
generics::Generics,
infer::{coerce::CoerceMany, unify::InferenceTable},
lower::ImplTraitLoweringMode,
mir::MirSpan,
to_assoc_type_id,
traits::FnTrait,
utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder},
@ -328,13 +328,13 @@ pub struct Adjustment {
}
impl Adjustment {
pub fn borrow(m: Mutability, ty: Ty) -> Self {
let ty = TyKind::Ref(m, error_lifetime(), ty).intern(Interner);
Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty }
pub fn borrow(m: Mutability, ty: Ty, lt: Lifetime) -> Self {
let ty = TyKind::Ref(m, lt.clone(), ty).intern(Interner);
Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(lt, m)), target: ty }
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum Adjust {
/// Go from ! to any type.
NeverToAny,
@ -354,18 +354,18 @@ pub enum Adjust {
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct OverloadedDeref(pub Option<Mutability>);
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum AutoBorrow {
/// Converts from T to &T.
Ref(Mutability),
Ref(Lifetime, Mutability),
/// Converts from T to *T.
RawPtr(Mutability),
}
impl AutoBorrow {
fn mutability(self) -> Mutability {
let (AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) = self;
m
fn mutability(&self) -> Mutability {
let (AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) = self;
*m
}
}
@ -554,6 +554,12 @@ pub(crate) struct InferenceContext<'a> {
// fields related to closure capture
current_captures: Vec<CapturedItemWithoutTy>,
/// A stack that has an entry for each projection in the current capture.
///
/// For example, in `a.b.c`, we capture the spans of `a`, `a.b`, and `a.b.c`.
/// We do that because sometimes we truncate projections (when a closure captures
/// both `a.b` and `a.b.c`), and we want to provide accurate spans in this case.
current_capture_span_stack: Vec<MirSpan>,
current_closure: Option<ClosureId>,
/// Stores the list of closure ids that need to be analyzed before this closure. See the
/// comment on `InferenceContext::sort_closures`
@ -605,6 +611,11 @@ fn find_continuable(
}
}
enum ImplTraitReplacingMode {
ReturnPosition(FxHashSet<Ty>),
TypeAlias,
}
impl<'a> InferenceContext<'a> {
fn new(
db: &'a dyn HirDatabase,
@ -630,6 +641,7 @@ impl<'a> InferenceContext<'a> {
breakables: Vec::new(),
deferred_cast_checks: Vec::new(),
current_captures: Vec::new(),
current_capture_span_stack: Vec::new(),
current_closure: None,
deferred_closures: FxHashMap::default(),
closure_dependencies: FxHashMap::default(),
@ -826,13 +838,19 @@ impl<'a> InferenceContext<'a> {
self.write_binding_ty(self_param, ty);
}
}
let mut params_and_ret_tys = Vec::new();
let mut tait_candidates = FxHashSet::default();
for (ty, pat) in param_tys.zip(&*self.body.params) {
let ty = self.insert_type_vars(ty);
let ty = self.normalize_associated_types_in(ty);
self.infer_top_pat(*pat, &ty);
params_and_ret_tys.push(ty);
if ty
.data(Interner)
.flags
.intersects(TypeFlags::HAS_TY_OPAQUE.union(TypeFlags::HAS_TY_INFER))
{
tait_candidates.insert(ty);
}
}
let return_ty = &*data.ret_type;
@ -845,7 +863,12 @@ impl<'a> InferenceContext<'a> {
let return_ty = if let Some(rpits) = self.db.return_type_impl_traits(func) {
// RPIT opaque types use substitution of their parent function.
let fn_placeholders = TyBuilder::placeholder_subst(self.db, func);
let result = self.insert_inference_vars_for_impl_trait(return_ty, fn_placeholders);
let mut mode = ImplTraitReplacingMode::ReturnPosition(FxHashSet::default());
let result =
self.insert_inference_vars_for_impl_trait(return_ty, fn_placeholders, &mut mode);
if let ImplTraitReplacingMode::ReturnPosition(taits) = mode {
tait_candidates.extend(taits);
}
let rpits = rpits.skip_binders();
for (id, _) in rpits.impl_traits.iter() {
if let Entry::Vacant(e) = self.result.type_of_rpit.entry(id) {
@ -864,11 +887,23 @@ impl<'a> InferenceContext<'a> {
// Functions might be defining usage sites of TAITs.
// To define an TAITs, that TAIT must appear in the function's signatures.
// So, it suffices to check for params and return types.
params_and_ret_tys.push(self.return_ty.clone());
self.make_tait_coercion_table(params_and_ret_tys.iter());
if self
.return_ty
.data(Interner)
.flags
.intersects(TypeFlags::HAS_TY_OPAQUE.union(TypeFlags::HAS_TY_INFER))
{
tait_candidates.insert(self.return_ty.clone());
}
self.make_tait_coercion_table(tait_candidates.iter());
}
fn insert_inference_vars_for_impl_trait<T>(&mut self, t: T, placeholders: Substitution) -> T
fn insert_inference_vars_for_impl_trait<T>(
&mut self,
t: T,
placeholders: Substitution,
mode: &mut ImplTraitReplacingMode,
) -> T
where
T: crate::HasInterner<Interner = Interner> + crate::TypeFoldable<Interner>,
{
@ -881,10 +916,31 @@ impl<'a> InferenceContext<'a> {
};
let (impl_traits, idx) =
match self.db.lookup_intern_impl_trait_id(opaque_ty_id.into()) {
// We don't replace opaque types from other kind with inference vars
// because `insert_inference_vars_for_impl_traits` for each kinds
// and unreplaced opaque types of other kind are resolved while
// inferencing because of `tait_coercion_table`.
// Moreover, calling `insert_inference_vars_for_impl_traits` with same
// `placeholders` for other kind may cause trouble because
// the substs for the bounds of each impl traits do not match
ImplTraitId::ReturnTypeImplTrait(def, idx) => {
if matches!(mode, ImplTraitReplacingMode::TypeAlias) {
// RPITs don't have `tait_coercion_table`, so use inserted inference
// vars for them.
if let Some(ty) = self.result.type_of_rpit.get(idx) {
return ty.clone();
}
return ty;
}
(self.db.return_type_impl_traits(def), idx)
}
ImplTraitId::TypeAliasImplTrait(def, idx) => {
if let ImplTraitReplacingMode::ReturnPosition(taits) = mode {
// Gather TAITs while replacing RPITs because TAITs inside RPITs
// may not visited while replacing TAITs
taits.insert(ty.clone());
return ty;
}
(self.db.type_alias_impl_traits(def), idx)
}
_ => unreachable!(),
@ -893,16 +949,20 @@ impl<'a> InferenceContext<'a> {
return ty;
};
let bounds = (*impl_traits)
.map_ref(|rpits| rpits.impl_traits[idx].bounds.map_ref(|it| it.iter()));
.map_ref(|its| its.impl_traits[idx].bounds.map_ref(|it| it.iter()));
let var = self.table.new_type_var();
let var_subst = Substitution::from1(Interner, var.clone());
for bound in bounds {
let predicate = bound.map(|it| it.cloned()).substitute(Interner, &placeholders);
let predicate = bound.map(|it| it.cloned());
let predicate = predicate.substitute(Interner, &placeholders);
let (var_predicate, binders) =
predicate.substitute(Interner, &var_subst).into_value_and_skipped_binders();
always!(binders.is_empty(Interner)); // quantified where clauses not yet handled
let var_predicate = self
.insert_inference_vars_for_impl_trait(var_predicate, placeholders.clone());
let var_predicate = self.insert_inference_vars_for_impl_trait(
var_predicate,
placeholders.clone(),
mode,
);
self.push_obligation(var_predicate.cast(Interner));
}
self.result.type_of_rpit.insert(idx, var.clone());
@ -1039,7 +1099,11 @@ impl<'a> InferenceContext<'a> {
self.db.lookup_intern_impl_trait_id(id.into())
{
let subst = TyBuilder::placeholder_subst(self.db, alias_id);
let ty = self.insert_inference_vars_for_impl_trait(ty, subst);
let ty = self.insert_inference_vars_for_impl_trait(
ty,
subst,
&mut ImplTraitReplacingMode::TypeAlias,
);
Some((id, ty))
} else {
None
@ -1436,7 +1500,8 @@ impl<'a> InferenceContext<'a> {
let remaining = unresolved.map(|it| path.segments()[it..].len()).filter(|it| it > &0);
let ty = match ty.kind(Interner) {
TyKind::Alias(AliasTy::Projection(proj_ty)) => {
self.db.normalize_projection(proj_ty.clone(), self.table.trait_env.clone())
let ty = self.table.normalize_projection_ty(proj_ty.clone());
self.table.resolve_ty_shallow(&ty)
}
_ => ty,
};

View File

@ -18,8 +18,9 @@ use hir_def::{
use hir_expand::name::Name;
use intern::sym;
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use stdx::never;
use smallvec::{smallvec, SmallVec};
use stdx::{format_to, never};
use syntax::utils::is_raw_identifier;
use crate::{
db::{HirDatabase, InternedClosure},
@ -236,7 +237,13 @@ pub enum CaptureKind {
pub struct CapturedItem {
pub(crate) place: HirPlace,
pub(crate) kind: CaptureKind,
pub(crate) span: MirSpan,
/// The inner vec is the stacks; the outer vec is for each capture reference.
///
/// Even though we always report only the last span (i.e. the most inclusive span),
/// we need to keep them all, since when a closure occurs inside a closure, we
/// copy all captures of the inner closure to the outer closure, and then we may
/// truncate them, and we want the correct span to be reported.
span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
pub(crate) ty: Binders<Ty>,
}
@ -245,6 +252,11 @@ impl CapturedItem {
self.place.local
}
/// Returns whether this place has any field (aka. non-deref) projections.
pub fn has_field_projections(&self) -> bool {
self.place.projections.iter().any(|it| !matches!(it, ProjectionElem::Deref))
}
pub fn ty(&self, subst: &Substitution) -> Ty {
self.ty.clone().substitute(Interner, utils::ClosureSubst(subst).parent_subst())
}
@ -253,9 +265,106 @@ impl CapturedItem {
self.kind
}
pub fn spans(&self) -> SmallVec<[MirSpan; 3]> {
self.span_stacks.iter().map(|stack| *stack.last().expect("empty span stack")).collect()
}
/// Converts the place to a name that can be inserted into source code.
pub fn place_to_name(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
let body = db.body(owner);
let mut result = body[self.place.local].name.unescaped().display(db.upcast()).to_string();
for proj in &self.place.projections {
match proj {
ProjectionElem::Deref => {}
ProjectionElem::Field(Either::Left(f)) => {
match &*f.parent.variant_data(db.upcast()) {
VariantData::Record(fields) => {
result.push('_');
result.push_str(fields[f.local_id].name.as_str())
}
VariantData::Tuple(fields) => {
let index = fields.iter().position(|it| it.0 == f.local_id);
if let Some(index) = index {
format_to!(result, "_{index}");
}
}
VariantData::Unit => {}
}
}
ProjectionElem::Field(Either::Right(f)) => format_to!(result, "_{}", f.index),
&ProjectionElem::ClosureField(field) => format_to!(result, "_{field}"),
ProjectionElem::Index(_)
| ProjectionElem::ConstantIndex { .. }
| ProjectionElem::Subslice { .. }
| ProjectionElem::OpaqueCast(_) => {
never!("Not happen in closure capture");
continue;
}
}
}
if is_raw_identifier(&result, db.crate_graph()[owner.module(db.upcast()).krate()].edition) {
result.insert_str(0, "r#");
}
result
}
pub fn display_place_source_code(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
let body = db.body(owner);
let krate = owner.krate(db.upcast());
let edition = db.crate_graph()[krate].edition;
let mut result = body[self.place.local].name.display(db.upcast(), edition).to_string();
for proj in &self.place.projections {
match proj {
// In source code autoderef kicks in.
ProjectionElem::Deref => {}
ProjectionElem::Field(Either::Left(f)) => {
let variant_data = f.parent.variant_data(db.upcast());
match &*variant_data {
VariantData::Record(fields) => format_to!(
result,
".{}",
fields[f.local_id].name.display(db.upcast(), edition)
),
VariantData::Tuple(fields) => format_to!(
result,
".{}",
fields.iter().position(|it| it.0 == f.local_id).unwrap_or_default()
),
VariantData::Unit => {}
}
}
ProjectionElem::Field(Either::Right(f)) => {
let field = f.index;
format_to!(result, ".{field}");
}
&ProjectionElem::ClosureField(field) => {
format_to!(result, ".{field}");
}
ProjectionElem::Index(_)
| ProjectionElem::ConstantIndex { .. }
| ProjectionElem::Subslice { .. }
| ProjectionElem::OpaqueCast(_) => {
never!("Not happen in closure capture");
continue;
}
}
}
let final_derefs_count = self
.place
.projections
.iter()
.rev()
.take_while(|proj| matches!(proj, ProjectionElem::Deref))
.count();
result.insert_str(0, &"*".repeat(final_derefs_count));
result
}
pub fn display_place(&self, owner: DefWithBodyId, db: &dyn HirDatabase) -> String {
let body = db.body(owner);
let mut result = body[self.place.local].name.display(db.upcast()).to_string();
let krate = owner.krate(db.upcast());
let edition = db.crate_graph()[krate].edition;
let mut result = body[self.place.local].name.display(db.upcast(), edition).to_string();
let mut field_need_paren = false;
for proj in &self.place.projections {
match proj {
@ -312,7 +421,8 @@ impl CapturedItem {
pub(crate) struct CapturedItemWithoutTy {
pub(crate) place: HirPlace,
pub(crate) kind: CaptureKind,
pub(crate) span: MirSpan,
/// The inner vec is the stacks; the outer vec is for each capture reference.
pub(crate) span_stacks: SmallVec<[SmallVec<[MirSpan; 3]>; 3]>,
}
impl CapturedItemWithoutTy {
@ -331,7 +441,7 @@ impl CapturedItemWithoutTy {
return CapturedItem {
place: self.place,
kind: self.kind,
span: self.span,
span_stacks: self.span_stacks,
ty: replace_placeholder_with_binder(ctx, ty),
};
@ -391,22 +501,26 @@ impl InferenceContext<'_> {
let r = self.place_of_expr_without_adjust(tgt_expr)?;
let default = vec![];
let adjustments = self.result.expr_adjustments.get(&tgt_expr).unwrap_or(&default);
apply_adjusts_to_place(r, adjustments)
apply_adjusts_to_place(&mut self.current_capture_span_stack, r, adjustments)
}
/// Changes `current_capture_span_stack` to contain the stack of spans for this expr.
fn place_of_expr_without_adjust(&mut self, tgt_expr: ExprId) -> Option<HirPlace> {
self.current_capture_span_stack.clear();
match &self.body[tgt_expr] {
Expr::Path(p) => {
let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
if let Some(ResolveValueResult::ValueNs(ValueNs::LocalBinding(b), _)) =
resolver.resolve_path_in_value_ns(self.db.upcast(), p)
{
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
return Some(HirPlace { local: b, projections: vec![] });
}
}
Expr::Field { expr, name: _ } => {
let mut place = self.place_of_expr(*expr)?;
let field = self.result.field_resolution(tgt_expr)?;
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
place.projections.push(ProjectionElem::Field(field));
return Some(place);
}
@ -416,6 +530,7 @@ impl InferenceContext<'_> {
TyKind::Ref(..) | TyKind::Raw(..)
) {
let mut place = self.place_of_expr(*expr)?;
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
place.projections.push(ProjectionElem::Deref);
return Some(place);
}
@ -425,29 +540,57 @@ impl InferenceContext<'_> {
None
}
fn push_capture(&mut self, capture: CapturedItemWithoutTy) {
self.current_captures.push(capture);
fn push_capture(&mut self, place: HirPlace, kind: CaptureKind) {
self.current_captures.push(CapturedItemWithoutTy {
place,
kind,
span_stacks: smallvec![self.current_capture_span_stack.iter().copied().collect()],
});
}
fn ref_expr(&mut self, expr: ExprId) {
if let Some(place) = self.place_of_expr(expr) {
self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared), expr.into());
fn truncate_capture_spans(&self, capture: &mut CapturedItemWithoutTy, mut truncate_to: usize) {
// The first span is the identifier, and it must always remain.
truncate_to += 1;
for span_stack in &mut capture.span_stacks {
let mut remained = truncate_to;
let mut actual_truncate_to = 0;
for &span in &*span_stack {
actual_truncate_to += 1;
if !span.is_ref_span(self.body) {
remained -= 1;
if remained == 0 {
break;
}
}
}
if actual_truncate_to < span_stack.len()
&& span_stack[actual_truncate_to].is_ref_span(self.body)
{
// Include the ref operator if there is one, we will fix it later (in `strip_captures_ref_span()`) if it's incorrect.
actual_truncate_to += 1;
}
span_stack.truncate(actual_truncate_to);
}
}
fn ref_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
if let Some(place) = place {
self.add_capture(place, CaptureKind::ByRef(BorrowKind::Shared));
}
self.walk_expr(expr);
}
fn add_capture(&mut self, place: HirPlace, kind: CaptureKind, span: MirSpan) {
fn add_capture(&mut self, place: HirPlace, kind: CaptureKind) {
if self.is_upvar(&place) {
self.push_capture(CapturedItemWithoutTy { place, kind, span });
self.push_capture(place, kind);
}
}
fn mutate_expr(&mut self, expr: ExprId) {
if let Some(place) = self.place_of_expr(expr) {
fn mutate_expr(&mut self, expr: ExprId, place: Option<HirPlace>) {
if let Some(place) = place {
self.add_capture(
place,
CaptureKind::ByRef(BorrowKind::Mut { kind: MutBorrowKind::Default }),
expr.into(),
);
}
self.walk_expr(expr);
@ -455,12 +598,12 @@ impl InferenceContext<'_> {
fn consume_expr(&mut self, expr: ExprId) {
if let Some(place) = self.place_of_expr(expr) {
self.consume_place(place, expr.into());
self.consume_place(place);
}
self.walk_expr(expr);
}
fn consume_place(&mut self, place: HirPlace, span: MirSpan) {
fn consume_place(&mut self, place: HirPlace) {
if self.is_upvar(&place) {
let ty = place.ty(self);
let kind = if self.is_ty_copy(ty) {
@ -468,13 +611,13 @@ impl InferenceContext<'_> {
} else {
CaptureKind::ByValue
};
self.push_capture(CapturedItemWithoutTy { place, kind, span });
self.push_capture(place, kind);
}
}
fn walk_expr_with_adjust(&mut self, tgt_expr: ExprId, adjustment: &[Adjustment]) {
if let Some((last, rest)) = adjustment.split_last() {
match last.kind {
match &last.kind {
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => {
self.walk_expr_with_adjust(tgt_expr, rest)
}
@ -499,8 +642,10 @@ impl InferenceContext<'_> {
Mutability::Not => CaptureKind::ByRef(BorrowKind::Shared),
};
if let Some(place) = self.place_of_expr_without_adjust(tgt_expr) {
if let Some(place) = apply_adjusts_to_place(place, rest) {
self.add_capture(place, capture_kind, tgt_expr.into());
if let Some(place) =
apply_adjusts_to_place(&mut self.current_capture_span_stack, place, rest)
{
self.add_capture(place, capture_kind);
}
}
self.walk_expr_with_adjust(tgt_expr, rest);
@ -582,11 +727,7 @@ impl InferenceContext<'_> {
self.walk_pat(&mut capture_mode, arm.pat);
}
if let Some(c) = capture_mode {
self.push_capture(CapturedItemWithoutTy {
place: discr_place,
kind: c,
span: (*expr).into(),
})
self.push_capture(discr_place, c);
}
}
}
@ -630,10 +771,11 @@ impl InferenceContext<'_> {
}
false
};
let place = self.place_of_expr(*expr);
if mutability {
self.mutate_expr(*expr);
self.mutate_expr(*expr, place);
} else {
self.ref_expr(*expr);
self.ref_expr(*expr, place);
}
} else {
self.select_from_expr(*expr);
@ -648,16 +790,22 @@ impl InferenceContext<'_> {
| Expr::Cast { expr, type_ref: _ } => {
self.consume_expr(*expr);
}
Expr::Ref { expr, rawness: _, mutability } => match mutability {
hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr),
hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr),
},
Expr::Ref { expr, rawness: _, mutability } => {
// We need to do this before we push the span so the order will be correct.
let place = self.place_of_expr(*expr);
self.current_capture_span_stack.push(MirSpan::ExprId(tgt_expr));
match mutability {
hir_def::type_ref::Mutability::Shared => self.ref_expr(*expr, place),
hir_def::type_ref::Mutability::Mut => self.mutate_expr(*expr, place),
}
}
Expr::BinaryOp { lhs, rhs, op } => {
let Some(op) = op else {
return;
};
if matches!(op, BinaryOp::Assignment { .. }) {
self.mutate_expr(*lhs);
let place = self.place_of_expr(*lhs);
self.mutate_expr(*lhs, place);
self.consume_expr(*rhs);
return;
}
@ -688,7 +836,11 @@ impl InferenceContext<'_> {
);
let mut cc = mem::take(&mut self.current_captures);
cc.extend(captures.iter().filter(|it| self.is_upvar(&it.place)).map(|it| {
CapturedItemWithoutTy { place: it.place.clone(), kind: it.kind, span: it.span }
CapturedItemWithoutTy {
place: it.place.clone(),
kind: it.kind,
span_stacks: it.span_stacks.clone(),
}
}));
self.current_captures = cc;
}
@ -810,10 +962,13 @@ impl InferenceContext<'_> {
}
fn restrict_precision_for_unsafe(&mut self) {
for capture in &mut self.current_captures {
// FIXME: Borrow checker problems without this.
let mut current_captures = std::mem::take(&mut self.current_captures);
for capture in &mut current_captures {
let mut ty = self.table.resolve_completely(self.result[capture.place.local].clone());
if ty.as_raw_ptr().is_some() || ty.is_union() {
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
self.truncate_capture_spans(capture, 0);
capture.place.projections.truncate(0);
continue;
}
@ -828,29 +983,35 @@ impl InferenceContext<'_> {
);
if ty.as_raw_ptr().is_some() || ty.is_union() {
capture.kind = CaptureKind::ByRef(BorrowKind::Shared);
self.truncate_capture_spans(capture, i + 1);
capture.place.projections.truncate(i + 1);
break;
}
}
}
self.current_captures = current_captures;
}
fn adjust_for_move_closure(&mut self) {
for capture in &mut self.current_captures {
// FIXME: Borrow checker won't allow without this.
let mut current_captures = std::mem::take(&mut self.current_captures);
for capture in &mut current_captures {
if let Some(first_deref) =
capture.place.projections.iter().position(|proj| *proj == ProjectionElem::Deref)
{
self.truncate_capture_spans(capture, first_deref);
capture.place.projections.truncate(first_deref);
}
capture.kind = CaptureKind::ByValue;
}
self.current_captures = current_captures;
}
fn minimize_captures(&mut self) {
self.current_captures.sort_by_key(|it| it.place.projections.len());
self.current_captures.sort_unstable_by_key(|it| it.place.projections.len());
let mut hash_map = FxHashMap::<HirPlace, usize>::default();
let result = mem::take(&mut self.current_captures);
for item in result {
for mut item in result {
let mut lookup_place = HirPlace { local: item.place.local, projections: vec![] };
let mut it = item.place.projections.iter();
let prev_index = loop {
@ -858,12 +1019,17 @@ impl InferenceContext<'_> {
break Some(*k);
}
match it.next() {
Some(it) => lookup_place.projections.push(it.clone()),
Some(it) => {
lookup_place.projections.push(it.clone());
}
None => break None,
}
};
match prev_index {
Some(p) => {
let prev_projections_len = self.current_captures[p].place.projections.len();
self.truncate_capture_spans(&mut item, prev_projections_len);
self.current_captures[p].span_stacks.extend(item.span_stacks);
let len = self.current_captures[p].place.projections.len();
let kind_after_truncate =
item.place.capture_kind_of_truncated_place(item.kind, len);
@ -878,113 +1044,128 @@ impl InferenceContext<'_> {
}
}
fn consume_with_pat(&mut self, mut place: HirPlace, pat: PatId) {
let cnt = self.result.pat_adjustments.get(&pat).map(|it| it.len()).unwrap_or_default();
place.projections = place
.projections
.iter()
.cloned()
.chain((0..cnt).map(|_| ProjectionElem::Deref))
.collect::<Vec<_>>();
match &self.body[pat] {
Pat::Missing | Pat::Wild => (),
Pat::Tuple { args, ellipsis } => {
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let field_count = match self.result[pat].kind(Interner) {
TyKind::Tuple(_, s) => s.len(Interner),
_ => return,
};
let fields = 0..field_count;
let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
for (arg, i) in it {
let mut p = place.clone();
p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId {
tuple: TupleId(!0), // dummy this, as its unused anyways
index: i as u32,
})));
self.consume_with_pat(p, *arg);
}
}
Pat::Or(pats) => {
for pat in pats.iter() {
self.consume_with_pat(place.clone(), *pat);
}
}
Pat::Record { args, .. } => {
let Some(variant) = self.result.variant_resolution_for_pat(pat) else {
return;
};
match variant {
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
self.consume_place(place, pat.into())
fn consume_with_pat(&mut self, mut place: HirPlace, tgt_pat: PatId) {
let adjustments_count =
self.result.pat_adjustments.get(&tgt_pat).map(|it| it.len()).unwrap_or_default();
place.projections.extend((0..adjustments_count).map(|_| ProjectionElem::Deref));
self.current_capture_span_stack
.extend((0..adjustments_count).map(|_| MirSpan::PatId(tgt_pat)));
'reset_span_stack: {
match &self.body[tgt_pat] {
Pat::Missing | Pat::Wild => (),
Pat::Tuple { args, ellipsis } => {
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let field_count = match self.result[tgt_pat].kind(Interner) {
TyKind::Tuple(_, s) => s.len(Interner),
_ => break 'reset_span_stack,
};
let fields = 0..field_count;
let it = al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
for (&arg, i) in it {
let mut p = place.clone();
self.current_capture_span_stack.push(MirSpan::PatId(arg));
p.projections.push(ProjectionElem::Field(Either::Right(TupleFieldId {
tuple: TupleId(!0), // dummy this, as its unused anyways
index: i as u32,
})));
self.consume_with_pat(p, arg);
self.current_capture_span_stack.pop();
}
VariantId::StructId(s) => {
let vd = &*self.db.struct_data(s).variant_data;
for field_pat in args.iter() {
let arg = field_pat.pat;
let Some(local_id) = vd.field(&field_pat.name) else {
continue;
};
let mut p = place.clone();
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
parent: variant,
local_id,
})));
self.consume_with_pat(p, arg);
}
Pat::Or(pats) => {
for pat in pats.iter() {
self.consume_with_pat(place.clone(), *pat);
}
}
Pat::Record { args, .. } => {
let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else {
break 'reset_span_stack;
};
match variant {
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
self.consume_place(place)
}
VariantId::StructId(s) => {
let vd = &*self.db.struct_data(s).variant_data;
for field_pat in args.iter() {
let arg = field_pat.pat;
let Some(local_id) = vd.field(&field_pat.name) else {
continue;
};
let mut p = place.clone();
self.current_capture_span_stack.push(MirSpan::PatId(arg));
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
parent: variant,
local_id,
})));
self.consume_with_pat(p, arg);
self.current_capture_span_stack.pop();
}
}
}
}
}
Pat::Range { .. }
| Pat::Slice { .. }
| Pat::ConstBlock(_)
| Pat::Path(_)
| Pat::Lit(_) => self.consume_place(place, pat.into()),
Pat::Bind { id: _, subpat: _ } => {
let mode = self.result.binding_modes[pat];
let capture_kind = match mode {
BindingMode::Move => {
self.consume_place(place, pat.into());
return;
}
BindingMode::Ref(Mutability::Not) => BorrowKind::Shared,
BindingMode::Ref(Mutability::Mut) => {
BorrowKind::Mut { kind: MutBorrowKind::Default }
}
};
self.add_capture(place, CaptureKind::ByRef(capture_kind), pat.into());
}
Pat::TupleStruct { path: _, args, ellipsis } => {
let Some(variant) = self.result.variant_resolution_for_pat(pat) else {
return;
};
match variant {
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
self.consume_place(place, pat.into())
}
VariantId::StructId(s) => {
let vd = &*self.db.struct_data(s).variant_data;
let (al, ar) = args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let fields = vd.fields().iter();
let it =
al.iter().zip(fields.clone()).chain(ar.iter().rev().zip(fields.rev()));
for (arg, (i, _)) in it {
let mut p = place.clone();
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
parent: variant,
local_id: i,
})));
self.consume_with_pat(p, *arg);
Pat::Range { .. }
| Pat::Slice { .. }
| Pat::ConstBlock(_)
| Pat::Path(_)
| Pat::Lit(_) => self.consume_place(place),
&Pat::Bind { id, subpat: _ } => {
let mode = self.result.binding_modes[tgt_pat];
let capture_kind = match mode {
BindingMode::Move => {
self.consume_place(place);
break 'reset_span_stack;
}
BindingMode::Ref(Mutability::Not) => BorrowKind::Shared,
BindingMode::Ref(Mutability::Mut) => {
BorrowKind::Mut { kind: MutBorrowKind::Default }
}
};
self.current_capture_span_stack.push(MirSpan::BindingId(id));
self.add_capture(place, CaptureKind::ByRef(capture_kind));
self.current_capture_span_stack.pop();
}
Pat::TupleStruct { path: _, args, ellipsis } => {
let Some(variant) = self.result.variant_resolution_for_pat(tgt_pat) else {
break 'reset_span_stack;
};
match variant {
VariantId::EnumVariantId(_) | VariantId::UnionId(_) => {
self.consume_place(place)
}
VariantId::StructId(s) => {
let vd = &*self.db.struct_data(s).variant_data;
let (al, ar) =
args.split_at(ellipsis.map_or(args.len(), |it| it as usize));
let fields = vd.fields().iter();
let it = al
.iter()
.zip(fields.clone())
.chain(ar.iter().rev().zip(fields.rev()));
for (&arg, (i, _)) in it {
let mut p = place.clone();
self.current_capture_span_stack.push(MirSpan::PatId(arg));
p.projections.push(ProjectionElem::Field(Either::Left(FieldId {
parent: variant,
local_id: i,
})));
self.consume_with_pat(p, arg);
self.current_capture_span_stack.pop();
}
}
}
}
Pat::Ref { pat, mutability: _ } => {
self.current_capture_span_stack.push(MirSpan::PatId(tgt_pat));
place.projections.push(ProjectionElem::Deref);
self.consume_with_pat(place, *pat);
self.current_capture_span_stack.pop();
}
Pat::Box { .. } => (), // not supported
}
Pat::Ref { pat, mutability: _ } => {
place.projections.push(ProjectionElem::Deref);
self.consume_with_pat(place, *pat)
}
Pat::Box { .. } => (), // not supported
}
self.current_capture_span_stack
.truncate(self.current_capture_span_stack.len() - adjustments_count);
}
fn consume_exprs(&mut self, exprs: impl Iterator<Item = ExprId>) {
@ -1042,12 +1223,28 @@ impl InferenceContext<'_> {
CaptureBy::Ref => (),
}
self.minimize_captures();
self.strip_captures_ref_span();
let result = mem::take(&mut self.current_captures);
let captures = result.into_iter().map(|it| it.with_ty(self)).collect::<Vec<_>>();
self.result.closure_info.insert(closure, (captures, closure_kind));
closure_kind
}
fn strip_captures_ref_span(&mut self) {
// FIXME: Borrow checker won't allow without this.
let mut captures = std::mem::take(&mut self.current_captures);
for capture in &mut captures {
if matches!(capture.kind, CaptureKind::ByValue) {
for span_stack in &mut capture.span_stacks {
if span_stack[span_stack.len() - 1].is_ref_span(self.body) {
span_stack.truncate(span_stack.len() - 1);
}
}
}
}
self.current_captures = captures;
}
pub(crate) fn infer_closures(&mut self) {
let deferred_closures = self.sort_closures();
for (closure, exprs) in deferred_closures.into_iter().rev() {
@ -1108,10 +1305,17 @@ impl InferenceContext<'_> {
}
}
fn apply_adjusts_to_place(mut r: HirPlace, adjustments: &[Adjustment]) -> Option<HirPlace> {
/// Call this only when the last span in the stack isn't a split.
fn apply_adjusts_to_place(
current_capture_span_stack: &mut Vec<MirSpan>,
mut r: HirPlace,
adjustments: &[Adjustment],
) -> Option<HirPlace> {
let span = *current_capture_span_stack.last().expect("empty capture span stack");
for adj in adjustments {
match &adj.kind {
Adjust::Deref(None) => {
current_capture_span_stack.push(span);
r.projections.push(ProjectionElem::Deref);
}
_ => return None,

View File

@ -18,14 +18,13 @@ use triomphe::Arc;
use crate::{
autoderef::{Autoderef, AutoderefKind},
db::HirDatabase,
error_lifetime,
infer::{
Adjust, Adjustment, AutoBorrow, InferOk, InferenceContext, OverloadedDeref, PointerCast,
TypeError, TypeMismatch,
},
utils::ClosureSubst,
Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Solution,
Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
Canonical, DomainGoal, FnAbi, FnPointer, FnSig, Guidance, InEnvironment, Interner, Lifetime,
Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
};
use super::unify::InferenceTable;
@ -301,7 +300,7 @@ impl InferenceTable<'_> {
// Examine the supertype and consider auto-borrowing.
match to_ty.kind(Interner) {
TyKind::Raw(mt, _) => return self.coerce_ptr(from_ty, to_ty, *mt),
TyKind::Ref(mt, _, _) => return self.coerce_ref(from_ty, to_ty, *mt),
TyKind::Ref(mt, lt, _) => return self.coerce_ref(from_ty, to_ty, *mt, lt),
_ => {}
}
@ -377,11 +376,17 @@ impl InferenceTable<'_> {
/// Reborrows `&mut A` to `&mut B` and `&(mut) A` to `&B`.
/// To match `A` with `B`, autoderef will be performed,
/// calling `deref`/`deref_mut` where necessary.
fn coerce_ref(&mut self, from_ty: Ty, to_ty: &Ty, to_mt: Mutability) -> CoerceResult {
let from_mt = match from_ty.kind(Interner) {
&TyKind::Ref(mt, _, _) => {
coerce_mutabilities(mt, to_mt)?;
mt
fn coerce_ref(
&mut self,
from_ty: Ty,
to_ty: &Ty,
to_mt: Mutability,
to_lt: &Lifetime,
) -> CoerceResult {
let (_from_lt, from_mt) = match from_ty.kind(Interner) {
TyKind::Ref(mt, lt, _) => {
coerce_mutabilities(*mt, to_mt)?;
(lt.clone(), *mt) // clone is probably not good?
}
_ => return self.unify_and(&from_ty, to_ty, identity),
};
@ -427,8 +432,8 @@ impl InferenceTable<'_> {
// compare those. Note that this means we use the target
// mutability [1], since it may be that we are coercing
// from `&mut T` to `&U`.
let lt = error_lifetime(); // FIXME: handle lifetimes correctly, see rustc
let derefd_from_ty = TyKind::Ref(to_mt, lt, referent_ty).intern(Interner);
let lt = to_lt; // FIXME: Involve rustc LUB and SUB flag checks
let derefd_from_ty = TyKind::Ref(to_mt, lt.clone(), referent_ty).intern(Interner);
match autoderef.table.try_unify(&derefd_from_ty, to_ty) {
Ok(result) => {
found = Some(result.map(|()| derefd_from_ty));
@ -472,8 +477,10 @@ impl InferenceTable<'_> {
}
let mut adjustments = auto_deref_adjust_steps(&autoderef);
adjustments
.push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)), target: ty.clone() });
adjustments.push(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(to_lt.clone(), to_mt)),
target: ty.clone(),
});
success(adjustments, ty, goals)
}
@ -621,11 +628,11 @@ impl InferenceTable<'_> {
(TyKind::Ref(from_mt, _, from_inner), &TyKind::Ref(to_mt, _, _)) => {
coerce_mutabilities(*from_mt, to_mt)?;
let lt = error_lifetime();
let lt = self.new_lifetime_var();
Some((
Adjustment { kind: Adjust::Deref(None), target: from_inner.clone() },
Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(to_mt)),
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), to_mt)),
target: TyKind::Ref(to_mt, lt, from_inner.clone()).intern(Interner),
},
))

View File

@ -635,7 +635,10 @@ impl InferenceContext<'_> {
let inner_ty = self.infer_expr_inner(*expr, &expectation);
match rawness {
Rawness::RawPtr => TyKind::Raw(mutability, inner_ty),
Rawness::Ref => TyKind::Ref(mutability, error_lifetime(), inner_ty),
Rawness::Ref => {
let lt = self.table.new_lifetime_var();
TyKind::Ref(mutability, lt, inner_ty)
}
}
.intern(Interner)
}
@ -786,18 +789,23 @@ impl InferenceContext<'_> {
adj.apply(&mut self.table, base_ty)
});
// mutability will be fixed up in `InferenceContext::infer_mut`;
adj.push(Adjustment::borrow(Mutability::Not, self_ty.clone()));
adj.push(Adjustment::borrow(
Mutability::Not,
self_ty.clone(),
self.table.new_lifetime_var(),
));
self.write_expr_adj(*base, adj);
if let Some(func) = self
.db
.trait_data(index_trait)
.method_by_name(&Name::new_symbol_root(sym::index.clone()))
{
let substs = TyBuilder::subst_for_def(self.db, index_trait, None)
.push(self_ty.clone())
.push(index_ty.clone())
.build();
self.write_method_resolution(tgt_expr, func, substs);
let subst = TyBuilder::subst_for_def(self.db, index_trait, None);
if subst.remaining() != 2 {
return self.err_ty();
}
let subst = subst.push(self_ty.clone()).push(index_ty.clone()).build();
self.write_method_resolution(tgt_expr, func, subst);
}
let assoc = self.resolve_ops_index_output();
let res = self.resolve_associated_type_with_params(
@ -990,7 +998,7 @@ impl InferenceContext<'_> {
match fn_x {
FnTrait::FnOnce => (),
FnTrait::FnMut => {
if let TyKind::Ref(Mutability::Mut, _, inner) = derefed_callee.kind(Interner) {
if let TyKind::Ref(Mutability::Mut, lt, inner) = derefed_callee.kind(Interner) {
if adjustments
.last()
.map(|it| matches!(it.kind, Adjust::Borrow(_)))
@ -999,15 +1007,27 @@ impl InferenceContext<'_> {
// prefer reborrow to move
adjustments
.push(Adjustment { kind: Adjust::Deref(None), target: inner.clone() });
adjustments.push(Adjustment::borrow(Mutability::Mut, inner.clone()))
adjustments.push(Adjustment::borrow(
Mutability::Mut,
inner.clone(),
lt.clone(),
))
}
} else {
adjustments.push(Adjustment::borrow(Mutability::Mut, derefed_callee.clone()));
adjustments.push(Adjustment::borrow(
Mutability::Mut,
derefed_callee.clone(),
self.table.new_lifetime_var(),
));
}
}
FnTrait::Fn => {
if !matches!(derefed_callee.kind(Interner), TyKind::Ref(Mutability::Not, _, _)) {
adjustments.push(Adjustment::borrow(Mutability::Not, derefed_callee.clone()));
adjustments.push(Adjustment::borrow(
Mutability::Not,
derefed_callee.clone(),
self.table.new_lifetime_var(),
));
}
}
}
@ -1295,10 +1315,12 @@ impl InferenceContext<'_> {
// HACK: We can use this substitution for the function because the function itself doesn't
// have its own generic parameters.
let subst = TyBuilder::subst_for_def(self.db, trait_, None)
.push(lhs_ty.clone())
.push(rhs_ty.clone())
.build();
let subst = TyBuilder::subst_for_def(self.db, trait_, None);
if subst.remaining() != 2 {
return Ty::new(Interner, TyKind::Error);
}
let subst = subst.push(lhs_ty.clone()).push(rhs_ty.clone()).build();
self.write_method_resolution(tgt_expr, func, subst.clone());
let method_ty = self.db.value_ty(func.into()).unwrap().substitute(Interner, &subst);
@ -1310,11 +1332,11 @@ impl InferenceContext<'_> {
Some(sig) => {
let p_left = &sig.params()[0];
if matches!(op, BinaryOp::CmpOp(..) | BinaryOp::Assignment { .. }) {
if let &TyKind::Ref(mtbl, _, _) = p_left.kind(Interner) {
if let TyKind::Ref(mtbl, lt, _) = p_left.kind(Interner) {
self.write_expr_adj(
lhs,
vec![Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mtbl)),
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
target: p_left.clone(),
}],
);
@ -1322,11 +1344,11 @@ impl InferenceContext<'_> {
}
let p_right = &sig.params()[1];
if matches!(op, BinaryOp::CmpOp(..)) {
if let &TyKind::Ref(mtbl, _, _) = p_right.kind(Interner) {
if let TyKind::Ref(mtbl, lt, _) = p_right.kind(Interner) {
self.write_expr_adj(
rhs,
vec![Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mtbl)),
kind: Adjust::Borrow(AutoBorrow::Ref(lt.clone(), *mtbl)),
target: p_right.clone(),
}],
);

View File

@ -28,7 +28,7 @@ impl InferenceContext<'_> {
Adjust::NeverToAny | Adjust::Deref(None) | Adjust::Pointer(_) => (),
Adjust::Deref(Some(d)) => *d = OverloadedDeref(Some(mutability)),
Adjust::Borrow(b) => match b {
AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m) => mutability = *m,
AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m) => mutability = *m,
},
}
}
@ -125,7 +125,7 @@ impl InferenceContext<'_> {
.get_mut(&base)
.and_then(|it| it.last_mut());
if let Some(Adjustment {
kind: Adjust::Borrow(AutoBorrow::Ref(mutability)),
kind: Adjust::Borrow(AutoBorrow::Ref(_, mutability)),
target,
}) = base_adjustments
{

View File

@ -12,7 +12,6 @@ use stdx::TupleExt;
use crate::{
consteval::{try_const_usize, usize_const},
error_lifetime,
infer::{BindingMode, Expectation, InferenceContext, TypeMismatch},
lower::lower_to_chalk_mutability,
primitive::UintTy,
@ -394,19 +393,20 @@ impl InferenceContext<'_> {
expected: &Ty,
default_bm: BindingMode,
) -> Ty {
let expectation = match expected.as_reference() {
Some((inner_ty, _lifetime, _exp_mut)) => inner_ty.clone(),
let (expectation_type, expectation_lt) = match expected.as_reference() {
Some((inner_ty, lifetime, _exp_mut)) => (inner_ty.clone(), lifetime.clone()),
None => {
let inner_ty = self.table.new_type_var();
let inner_lt = self.table.new_lifetime_var();
let ref_ty =
TyKind::Ref(mutability, error_lifetime(), inner_ty.clone()).intern(Interner);
TyKind::Ref(mutability, inner_lt.clone(), inner_ty.clone()).intern(Interner);
// Unification failure will be reported by the caller.
self.unify(&ref_ty, expected);
inner_ty
(inner_ty, inner_lt)
}
};
let subty = self.infer_pat(inner_pat, &expectation, default_bm);
TyKind::Ref(mutability, error_lifetime(), subty).intern(Interner)
let subty = self.infer_pat(inner_pat, &expectation_type, default_bm);
TyKind::Ref(mutability, expectation_lt, subty).intern(Interner)
}
fn infer_bind_pat(
@ -433,7 +433,8 @@ impl InferenceContext<'_> {
let bound_ty = match mode {
BindingMode::Ref(mutability) => {
TyKind::Ref(mutability, error_lifetime(), inner_ty.clone()).intern(Interner)
let inner_lt = self.table.new_lifetime_var();
TyKind::Ref(mutability, inner_lt, inner_ty.clone()).intern(Interner)
}
BindingMode::Move => inner_ty.clone(),
};

View File

@ -17,12 +17,12 @@ use triomphe::Arc;
use super::{InferOk, InferResult, InferenceContext, TypeError};
use crate::{
consteval::unknown_const, db::HirDatabase, error_lifetime, fold_generic_args,
fold_tys_and_consts, to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical,
Const, ConstValue, DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData,
Guidance, InEnvironment, InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, ProjectionTy,
ProjectionTyExt, Scalar, Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt,
TyKind, VariableKind, WhereClause,
consteval::unknown_const, db::HirDatabase, fold_generic_args, fold_tys_and_consts,
to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical, Const, ConstValue,
DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData, Guidance, InEnvironment,
InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, ProjectionTy, ProjectionTyExt, Scalar,
Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
WhereClause,
};
impl InferenceContext<'_> {
@ -105,7 +105,7 @@ impl<T: HasInterner<Interner = Interner>> Canonicalized<T> {
VariableKind::Ty(TyVariableKind::Float) => ctx.new_float_var().cast(Interner),
// Chalk can sometimes return new lifetime variables. We just replace them by errors
// for now.
VariableKind::Lifetime => error_lifetime().cast(Interner),
VariableKind::Lifetime => ctx.new_lifetime_var().cast(Interner),
VariableKind::Const(ty) => ctx.new_const_var(ty.clone()).cast(Interner),
}),
);

View File

@ -42,19 +42,20 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
hir_def::ModuleDefId::AdtId(x) => {
let name = match x {
hir_def::AdtId::StructId(x) => {
db.struct_data(x).name.display_no_db().to_smolstr()
db.struct_data(x).name.display_no_db(file_id.edition()).to_smolstr()
}
hir_def::AdtId::UnionId(x) => {
db.union_data(x).name.display_no_db().to_smolstr()
db.union_data(x).name.display_no_db(file_id.edition()).to_smolstr()
}
hir_def::AdtId::EnumId(x) => {
db.enum_data(x).name.display_no_db().to_smolstr()
db.enum_data(x).name.display_no_db(file_id.edition()).to_smolstr()
}
};
(name == "Goal").then_some(Either::Left(x))
}
hir_def::ModuleDefId::TypeAliasId(x) => {
let name = db.type_alias_data(x).name.display_no_db().to_smolstr();
let name =
db.type_alias_data(x).name.display_no_db(file_id.edition()).to_smolstr();
(name == "Goal").then_some(Either::Right(x))
}
_ => None,
@ -94,7 +95,7 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => {
let name = db.function_data(x).name.display_no_db().to_smolstr();
let name = db.function_data(x).name.display_no_db(file_id.edition()).to_smolstr();
(name == "main").then_some(x)
}
_ => None,
@ -104,7 +105,7 @@ fn eval_expr(ra_fixture: &str, minicore: &str) -> Result<Arc<Layout>, LayoutErro
let b = hir_body
.bindings
.iter()
.find(|x| x.1.name.display_no_db().to_smolstr() == "goal")
.find(|x| x.1.name.display_no_db(file_id.edition()).to_smolstr() == "goal")
.unwrap()
.0;
let infer = db.infer(function_id.into());

View File

@ -68,6 +68,7 @@ use intern::{sym, Symbol};
use la_arena::{Arena, Idx};
use mir::{MirEvalError, VTableMap};
use rustc_hash::{FxHashMap, FxHashSet};
use span::Edition;
use syntax::ast::{make, ConstArg};
use traits::FnTrait;
use triomphe::Arc;
@ -1027,7 +1028,11 @@ where
collector.placeholders.into_iter().collect()
}
pub fn known_const_to_ast(konst: &Const, db: &dyn HirDatabase) -> Option<ConstArg> {
pub fn known_const_to_ast(
konst: &Const,
db: &dyn HirDatabase,
edition: Edition,
) -> Option<ConstArg> {
if let ConstValue::Concrete(c) = &konst.interned().value {
match c.interned {
ConstScalar::UnevaluatedConst(GeneralConstId::InTypeConstId(cid), _) => {
@ -1037,5 +1042,5 @@ pub fn known_const_to_ast(konst: &Const, db: &dyn HirDatabase) -> Option<ConstAr
_ => (),
}
}
Some(make::expr_const_value(konst.display(db).to_string().as_str()))
Some(make::expr_const_value(konst.display(db, edition).to_string().as_str()))
}

View File

@ -6,7 +6,7 @@
//!
//! This usually involves resolving names, collecting generic arguments etc.
use std::{
cell::{Cell, RefCell, RefMut},
cell::{Cell, OnceCell, RefCell, RefMut},
iter,
ops::{self, Not as _},
};
@ -43,7 +43,6 @@ use hir_def::{
use hir_expand::{name::Name, ExpandResult};
use intern::Interned;
use la_arena::{Arena, ArenaMap};
use once_cell::unsync::OnceCell;
use rustc_hash::FxHashSet;
use rustc_pattern_analysis::Captures;
use smallvec::SmallVec;
@ -378,26 +377,25 @@ impl<'a> TyLoweringContext<'a> {
// Count the number of `impl Trait` things that appear within our bounds.
// Since t hose have been emitted as implicit type args already.
counter.set(idx + count_impl_traits(type_ref) as u16);
let (
_parent_params,
self_param,
type_params,
const_params,
_impl_trait_params,
lifetime_params,
) = self
let kind = self
.generics()
.expect("variable impl trait lowering must be in a generic def")
.provenance_split();
TyKind::BoundVar(BoundVar::new(
self.in_binders,
idx as usize
+ self_param as usize
+ type_params
+ const_params
+ lifetime_params,
))
.intern(Interner)
.iter()
.enumerate()
.filter_map(|(i, (id, data))| match (id, data) {
(
GenericParamId::TypeParamId(_),
GenericParamDataRef::TypeParamData(data),
) if data.provenance == TypeParamProvenance::ArgumentImplTrait => {
Some(i)
}
_ => None,
})
.nth(idx as usize)
.map_or(TyKind::Error, |id| {
TyKind::BoundVar(BoundVar { debruijn: self.in_binders, index: id })
});
kind.intern(Interner)
}
ImplTraitLoweringState::Disallowed => {
// FIXME: report error
@ -1553,6 +1551,10 @@ pub(crate) fn generic_predicates_for_param_query(
}
};
if invalid_target {
// If this is filtered out without lowering, `?Sized` is not gathered into `ctx.unsized_types`
if let TypeBound::Path(_, TraitBoundModifier::Maybe) = &**bound {
ctx.lower_where_predicate(pred, &def, true).for_each(drop);
}
return false;
}
@ -1741,15 +1743,39 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>(
substitution: &'subst Substitution,
resolver: &Resolver,
) -> Option<impl Iterator<Item = WhereClause> + Captures<'a> + Captures<'subst>> {
let is_trait_def = matches!(def, GenericDefId::TraitId(..));
let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
let sized_trait = db
.lang_item(resolver.krate(), LangItem::Sized)
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id))?;
sized_trait.map(move |sized_trait| {
generic_args
.iter()
let get_trait_self_idx = |container: ItemContainerId| {
if matches!(container, ItemContainerId::TraitId(_)) {
let generics = generics(db.upcast(), def);
Some(generics.len_self())
} else {
None
}
};
let trait_self_idx = match def {
GenericDefId::TraitId(_) => Some(0),
GenericDefId::FunctionId(it) => get_trait_self_idx(it.lookup(db.upcast()).container),
GenericDefId::ConstId(it) => get_trait_self_idx(it.lookup(db.upcast()).container),
GenericDefId::TypeAliasId(it) => get_trait_self_idx(it.lookup(db.upcast()).container),
_ => None,
};
Some(
substitution
.iter(Interner)
.enumerate()
.filter_map(
move |(idx, generic_arg)| {
if Some(idx) == trait_self_idx {
None
} else {
Some(generic_arg)
}
},
)
.filter_map(|generic_arg| generic_arg.ty(Interner))
.filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty))
.map(move |self_ty| {
@ -1757,8 +1783,8 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>(
trait_id: sized_trait,
substitution: Substitution::from1(Interner, self_ty.clone()),
})
})
})
}),
)
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -1978,13 +2004,13 @@ fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
.with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
.with_type_param_mode(ParamLoweringMode::Variable);
let type_alias_data = db.type_alias_data(t);
if type_alias_data.is_extern {
Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
let inner = if type_alias_data.is_extern {
TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner)
} else {
let type_ref = &type_alias_data.type_ref;
let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error));
make_binders(db, &generics, inner)
}
ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error))
};
make_binders(db, &generics, inner)
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]

View File

@ -35,7 +35,7 @@ use crate::{
};
/// This is used as a key for indexing impls.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub enum TyFingerprint {
// These are lang item impls:
Str,
@ -542,7 +542,8 @@ impl ReceiverAdjustments {
}
}
if let Some(m) = self.autoref {
let a = Adjustment::borrow(m, ty);
let lt = table.new_lifetime_var();
let a = Adjustment::borrow(m, ty, lt);
ty = a.target.clone();
adjust.push(a);
}
@ -1066,7 +1067,7 @@ fn iterate_method_candidates_by_receiver(
// be found in any of the derefs of receiver_ty, so we have to go through
// that, including raw derefs.
table.run_in_snapshot(|table| {
let mut autoderef = autoderef::Autoderef::new(table, receiver_ty.clone(), true);
let mut autoderef = autoderef::Autoderef::new_no_tracking(table, receiver_ty.clone(), true);
while let Some((self_ty, _)) = autoderef.next() {
iterate_inherent_methods(
&self_ty,
@ -1081,7 +1082,7 @@ fn iterate_method_candidates_by_receiver(
ControlFlow::Continue(())
})?;
table.run_in_snapshot(|table| {
let mut autoderef = autoderef::Autoderef::new(table, receiver_ty.clone(), true);
let mut autoderef = autoderef::Autoderef::new_no_tracking(table, receiver_ty.clone(), true);
while let Some((self_ty, _)) = autoderef.next() {
if matches!(self_ty.kind(Interner), TyKind::InferenceVar(_, TyVariableKind::General)) {
// don't try to resolve methods on unknown types
@ -1656,7 +1657,7 @@ fn autoderef_method_receiver(
ty: Ty,
) -> Vec<(Canonical<Ty>, ReceiverAdjustments)> {
let mut deref_chain: Vec<_> = Vec::new();
let mut autoderef = autoderef::Autoderef::new(table, ty, false);
let mut autoderef = autoderef::Autoderef::new_no_tracking(table, ty, false);
while let Some((ty, derefs)) = autoderef.next() {
deref_chain.push((
autoderef.table.canonicalize(ty),

View File

@ -16,7 +16,8 @@ use base_db::CrateId;
use chalk_ir::Mutability;
use either::Either;
use hir_def::{
hir::{BindingId, Expr, ExprId, Ordering, PatId},
body::Body,
hir::{BindingAnnotation, BindingId, Expr, ExprId, Ordering, PatId},
DefWithBodyId, FieldId, StaticId, TupleFieldId, UnionId, VariantId,
};
use la_arena::{Arena, ArenaMap, Idx, RawIdx};
@ -158,7 +159,10 @@ impl<V, T> ProjectionElem<V, T> {
subst.at(Interner, 0).assert_ty_ref(Interner).clone()
}
_ => {
never!("Overloaded deref on type {} is not a projection", base.display(db));
never!(
"Overloaded deref on type {} is not a projection",
base.display(db, db.crate_graph()[krate].edition)
);
TyKind::Error.intern(Interner)
}
},
@ -633,6 +637,7 @@ pub enum TerminatorKind {
},
}
// Order of variants in this enum matter: they are used to compare borrow kinds.
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
pub enum BorrowKind {
/// Data must be immutable and is aliasable.
@ -663,15 +668,16 @@ pub enum BorrowKind {
Mut { kind: MutBorrowKind },
}
// Order of variants in this enum matter: they are used to compare borrow kinds.
#[derive(Debug, PartialEq, Eq, Clone, Copy, PartialOrd, Ord)]
pub enum MutBorrowKind {
/// Data must be immutable but not aliasable. This kind of borrow cannot currently
/// be expressed by the user and is used only in implicit closure bindings.
ClosureCapture,
Default,
/// This borrow arose from method-call auto-ref
/// (i.e., adjustment::Adjust::Borrow).
TwoPhasedBorrow,
/// Data must be immutable but not aliasable. This kind of borrow cannot currently
/// be expressed by the user and is used only in implicit closure bindings.
ClosureCapture,
}
impl BorrowKind {
@ -1169,6 +1175,20 @@ pub enum MirSpan {
Unknown,
}
impl MirSpan {
pub fn is_ref_span(&self, body: &Body) -> bool {
match *self {
MirSpan::ExprId(expr) => matches!(body[expr], Expr::Ref { .. }),
// FIXME: Figure out if this is correct wrt. match ergonomics.
MirSpan::BindingId(binding) => matches!(
body.bindings[binding].mode,
BindingAnnotation::Ref | BindingAnnotation::RefMut
),
MirSpan::PatId(_) | MirSpan::SelfParam | MirSpan::Unknown => false,
}
}
}
impl_from!(ExprId, PatId for MirSpan);
impl From<&ExprId> for MirSpan {

View File

@ -23,7 +23,7 @@ use rustc_apfloat::{
Float,
};
use rustc_hash::{FxHashMap, FxHashSet};
use span::FileId;
use span::{Edition, FileId};
use stdx::never;
use syntax::{SyntaxNodePtr, TextRange};
use triomphe::Arc;
@ -358,6 +358,7 @@ impl MirEvalError {
f: &mut String,
db: &dyn HirDatabase,
span_formatter: impl Fn(FileId, TextRange) -> String,
edition: Edition,
) -> std::result::Result<(), std::fmt::Error> {
writeln!(f, "Mir eval error:")?;
let mut err = self;
@ -370,7 +371,7 @@ impl MirEvalError {
writeln!(
f,
"In function {} ({:?})",
function_name.name.display(db.upcast()),
function_name.name.display(db.upcast(), edition),
func
)?;
}
@ -415,7 +416,7 @@ impl MirEvalError {
write!(
f,
"Layout for type `{}` is not available due {err:?}",
ty.display(db).with_closure_style(ClosureStyle::ClosureWithId)
ty.display(db, edition).with_closure_style(ClosureStyle::ClosureWithId)
)?;
}
MirEvalError::MirLowerError(func, err) => {
@ -423,16 +424,17 @@ impl MirEvalError {
writeln!(
f,
"MIR lowering for function `{}` ({:?}) failed due:",
function_name.name.display(db.upcast()),
function_name.name.display(db.upcast(), edition),
func
)?;
err.pretty_print(f, db, span_formatter)?;
err.pretty_print(f, db, span_formatter, edition)?;
}
MirEvalError::ConstEvalError(name, err) => {
MirLowerError::ConstEvalError((**name).into(), err.clone()).pretty_print(
f,
db,
span_formatter,
edition,
)?;
}
MirEvalError::UndefinedBehavior(_)
@ -1516,9 +1518,97 @@ impl Evaluator<'_> {
self.size_of_sized(target_ty, locals, "destination of int to int cast")?;
Owned(current[0..dest_size].to_vec())
}
CastKind::FloatToInt => not_supported!("float to int cast"),
CastKind::FloatToFloat => not_supported!("float to float cast"),
CastKind::IntToFloat => not_supported!("float to int cast"),
CastKind::FloatToInt => {
let ty = self.operand_ty(operand, locals)?;
let TyKind::Scalar(chalk_ir::Scalar::Float(ty)) = ty.kind(Interner) else {
not_supported!("invalid float to int cast");
};
let value = self.eval_operand(operand, locals)?.get(self)?;
let value = match ty {
chalk_ir::FloatTy::F32 => {
let value = value.try_into().unwrap();
f32::from_le_bytes(value) as f64
}
chalk_ir::FloatTy::F64 => {
let value = value.try_into().unwrap();
f64::from_le_bytes(value)
}
chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => {
not_supported!("unstable floating point type f16 and f128");
}
};
let is_signed = matches!(
target_ty.kind(Interner),
TyKind::Scalar(chalk_ir::Scalar::Int(_))
);
let dest_size =
self.size_of_sized(target_ty, locals, "destination of float to int cast")?;
let dest_bits = dest_size * 8;
let (max, min) = if dest_bits == 128 {
(i128::MAX, i128::MIN)
} else if is_signed {
let max = 1i128 << (dest_bits - 1);
(max - 1, -max)
} else {
(1i128 << dest_bits, 0)
};
let value = (value as i128).min(max).max(min);
let result = value.to_le_bytes();
Owned(result[0..dest_size].to_vec())
}
CastKind::FloatToFloat => {
let ty = self.operand_ty(operand, locals)?;
let TyKind::Scalar(chalk_ir::Scalar::Float(ty)) = ty.kind(Interner) else {
not_supported!("invalid float to int cast");
};
let value = self.eval_operand(operand, locals)?.get(self)?;
let value = match ty {
chalk_ir::FloatTy::F32 => {
let value = value.try_into().unwrap();
f32::from_le_bytes(value) as f64
}
chalk_ir::FloatTy::F64 => {
let value = value.try_into().unwrap();
f64::from_le_bytes(value)
}
chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => {
not_supported!("unstable floating point type f16 and f128");
}
};
let TyKind::Scalar(chalk_ir::Scalar::Float(target_ty)) =
target_ty.kind(Interner)
else {
not_supported!("invalid float to float cast");
};
match target_ty {
chalk_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()),
chalk_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()),
chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => {
not_supported!("unstable floating point type f16 and f128");
}
}
}
CastKind::IntToFloat => {
let current_ty = self.operand_ty(operand, locals)?;
let is_signed = matches!(
current_ty.kind(Interner),
TyKind::Scalar(chalk_ir::Scalar::Int(_))
);
let value = pad16(self.eval_operand(operand, locals)?.get(self)?, is_signed);
let value = i128::from_le_bytes(value);
let TyKind::Scalar(chalk_ir::Scalar::Float(target_ty)) =
target_ty.kind(Interner)
else {
not_supported!("invalid int to float cast");
};
match target_ty {
chalk_ir::FloatTy::F32 => Owned((value as f32).to_le_bytes().to_vec()),
chalk_ir::FloatTy::F64 => Owned((value as f64).to_le_bytes().to_vec()),
chalk_ir::FloatTy::F16 | chalk_ir::FloatTy::F128 => {
not_supported!("unstable floating point type f16 and f128");
}
}
}
CastKind::FnPtrToPtr => not_supported!("fn ptr to ptr cast"),
},
})
@ -2675,10 +2765,11 @@ impl Evaluator<'_> {
let db = self.db.upcast();
let loc = variant.lookup(db);
let enum_loc = loc.parent.lookup(db);
let edition = self.db.crate_graph()[self.crate_id].edition;
let name = format!(
"{}::{}",
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast()),
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast()),
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition),
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
);
Err(MirEvalError::ConstEvalError(name, Box::new(e)))
}

View File

@ -856,7 +856,11 @@ impl Evaluator<'_> {
Ok(ty_name) => ty_name,
// Fallback to human readable display in case of `Err`. Ideally we want to use `display_source_code` to
// render full paths.
Err(_) => ty.display(self.db).to_string(),
Err(_) => {
let krate = locals.body.owner.krate(self.db.upcast());
let edition = self.db.crate_graph()[krate].edition;
ty.display(self.db, edition).to_string()
}
};
let len = ty_name.len();
let addr = self.heap_allocate(len, 1)?;

View File

@ -1,5 +1,5 @@
use hir_def::db::DefDatabase;
use span::EditionedFileId;
use span::{Edition, EditionedFileId};
use syntax::{TextRange, TextSize};
use test_fixture::WithFixture;
@ -15,7 +15,7 @@ fn eval_main(db: &TestDB, file_id: EditionedFileId) -> Result<(String, String),
.declarations()
.find_map(|x| match x {
hir_def::ModuleDefId::FunctionId(x) => {
if db.function_data(x).name.display(db).to_string() == "main" {
if db.function_data(x).name.display(db, Edition::CURRENT).to_string() == "main" {
Some(x)
} else {
None
@ -63,7 +63,7 @@ fn check_pass_and_stdio(ra_fixture: &str, expected_stdout: &str, expected_stderr
let span_formatter = |file, range: TextRange| {
format!("{:?} {:?}..{:?}", file, line_index(range.start()), line_index(range.end()))
};
e.pretty_print(&mut err, &db, span_formatter).unwrap();
e.pretty_print(&mut err, &db, span_formatter, Edition::CURRENT).unwrap();
panic!("Error in interpreting: {err}");
}
Ok((stdout, stderr)) => {

View File

@ -21,7 +21,7 @@ use hir_expand::name::Name;
use la_arena::ArenaMap;
use rustc_apfloat::Float;
use rustc_hash::FxHashMap;
use span::FileId;
use span::{Edition, FileId};
use syntax::TextRange;
use triomphe::Arc;
@ -157,13 +157,18 @@ impl MirLowerError {
f: &mut String,
db: &dyn HirDatabase,
span_formatter: impl Fn(FileId, TextRange) -> String,
edition: Edition,
) -> std::result::Result<(), std::fmt::Error> {
match self {
MirLowerError::ConstEvalError(name, e) => {
writeln!(f, "In evaluating constant {name}")?;
match &**e {
ConstEvalError::MirLowerError(e) => e.pretty_print(f, db, span_formatter)?,
ConstEvalError::MirEvalError(e) => e.pretty_print(f, db, span_formatter)?,
ConstEvalError::MirLowerError(e) => {
e.pretty_print(f, db, span_formatter, edition)?
}
ConstEvalError::MirEvalError(e) => {
e.pretty_print(f, db, span_formatter, edition)?
}
}
}
MirLowerError::MissingFunctionDefinition(owner, it) => {
@ -171,15 +176,15 @@ impl MirLowerError {
writeln!(
f,
"Missing function definition for {}",
body.pretty_print_expr(db.upcast(), *owner, *it)
body.pretty_print_expr(db.upcast(), *owner, *it, edition)
)?;
}
MirLowerError::TypeMismatch(e) => match e {
Some(e) => writeln!(
f,
"Type mismatch: Expected {}, found {}",
e.expected.display(db),
e.actual.display(db),
e.expected.display(db, edition),
e.actual.display(db, edition),
)?,
None => writeln!(f, "Type mismatch: types mismatch with {{unknown}}",)?,
},
@ -189,11 +194,11 @@ impl MirLowerError {
writeln!(
f,
"Generic arg not provided for {}",
param.name().unwrap_or(&Name::missing()).display(db.upcast())
param.name().unwrap_or(&Name::missing()).display(db.upcast(), edition)
)?;
writeln!(f, "Provided args: [")?;
for g in subst.iter(Interner) {
write!(f, " {},", g.display(db))?;
write!(f, " {},", g.display(db, edition))?;
}
writeln!(f, "]")?;
}
@ -242,8 +247,8 @@ impl From<LayoutError> for MirLowerError {
}
impl MirLowerError {
fn unresolved_path(db: &dyn HirDatabase, p: &Path) -> Self {
Self::UnresolvedName(p.display(db).to_string())
fn unresolved_path(db: &dyn HirDatabase, p: &Path, edition: Edition) -> Self {
Self::UnresolvedName(p.display(db, edition).to_string())
}
}
@ -337,7 +342,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.push_assignment(current, place, Operand::Copy(p).into(), expr_id.into());
Ok(Some(current))
}
Adjust::Borrow(AutoBorrow::Ref(m) | AutoBorrow::RawPtr(m)) => {
Adjust::Borrow(AutoBorrow::Ref(_, m) | AutoBorrow::RawPtr(m)) => {
let Some((p, current)) =
self.lower_expr_as_place_with_adjust(current, expr_id, true, rest)?
else {
@ -436,7 +441,8 @@ impl<'ctx> MirLowerCtx<'ctx> {
VariantId::UnionId(_) => implementation_error!("Union variant as path"),
}
} else {
let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
let unresolved_name =
|| MirLowerError::unresolved_path(self.db, p, self.edition());
let resolver = resolver_for_expr(self.db.upcast(), self.owner, expr_id);
resolver
.resolve_path_in_value_ns_fully(self.db.upcast(), p)
@ -662,7 +668,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
let (func_id, generic_args) =
self.infer.method_resolution(expr_id).ok_or_else(|| {
MirLowerError::UnresolvedMethod(
method_name.display(self.db.upcast()).to_string(),
method_name.display(self.db.upcast(), self.edition()).to_string(),
)
})?;
let func = Operand::from_fn(self.db, func_id, generic_args);
@ -803,7 +809,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
};
let variant_id =
self.infer.variant_resolution_for_expr(expr_id).ok_or_else(|| match path {
Some(p) => MirLowerError::UnresolvedName(p.display(self.db).to_string()),
Some(p) => MirLowerError::UnresolvedName(
p.display(self.db, self.edition()).to_string(),
),
None => MirLowerError::RecordLiteralWithoutPath,
})?;
let subst = match self.expr_ty_without_adjust(expr_id).kind(Interner) {
@ -1172,8 +1180,15 @@ impl<'ctx> MirLowerCtx<'ctx> {
let placeholder_subst = self.placeholder_subst();
let tmp_ty =
capture.ty.clone().substitute(Interner, &placeholder_subst);
let tmp: Place = self.temp(tmp_ty, current, capture.span)?.into();
self.push_assignment(current, tmp, Rvalue::Ref(*bk, p), capture.span);
// FIXME: Handle more than one span.
let capture_spans = capture.spans();
let tmp: Place = self.temp(tmp_ty, current, capture_spans[0])?.into();
self.push_assignment(
current,
tmp,
Rvalue::Ref(*bk, p),
capture_spans[0],
);
operands.push(Operand::Move(tmp));
}
CaptureKind::ByValue => operands.push(Operand::Move(p)),
@ -1378,7 +1393,9 @@ impl<'ctx> MirLowerCtx<'ctx> {
"only `char` and numeric types are allowed in range patterns"
),
};
let unresolved_name = || MirLowerError::unresolved_path(self.db, c.as_ref());
let edition = self.edition();
let unresolved_name =
|| MirLowerError::unresolved_path(self.db, c.as_ref(), edition);
let resolver = self.owner.resolver(self.db.upcast());
let pr = resolver
.resolve_path_in_value_ns(self.db.upcast(), c.as_ref())
@ -1904,19 +1921,25 @@ impl<'ctx> MirLowerCtx<'ctx> {
match r {
Ok(r) => Ok(r),
Err(e) => {
let edition = self.edition();
let db = self.db.upcast();
let loc = variant.lookup(db);
let enum_loc = loc.parent.lookup(db);
let name = format!(
"{}::{}",
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast()),
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast()),
enum_loc.id.item_tree(db)[enum_loc.id.value].name.display(db.upcast(), edition),
loc.id.item_tree(db)[loc.id.value].name.display(db.upcast(), edition),
);
Err(MirLowerError::ConstEvalError(name.into(), Box::new(e)))
}
}
}
fn edition(&self) -> Edition {
let krate = self.owner.krate(self.db.upcast());
self.db.crate_graph()[krate].edition
}
fn drop_until_scope(
&mut self,
scope_index: usize,
@ -2121,18 +2144,24 @@ pub fn mir_body_for_closure_query(
}
pub fn mir_body_query(db: &dyn HirDatabase, def: DefWithBodyId) -> Result<Arc<MirBody>> {
let krate = def.krate(db.upcast());
let edition = db.crate_graph()[krate].edition;
let detail = match def {
DefWithBodyId::FunctionId(it) => db.function_data(it).name.display(db.upcast()).to_string(),
DefWithBodyId::StaticId(it) => db.static_data(it).name.display(db.upcast()).to_string(),
DefWithBodyId::FunctionId(it) => {
db.function_data(it).name.display(db.upcast(), edition).to_string()
}
DefWithBodyId::StaticId(it) => {
db.static_data(it).name.display(db.upcast(), edition).to_string()
}
DefWithBodyId::ConstId(it) => db
.const_data(it)
.name
.clone()
.unwrap_or_else(Name::missing)
.display(db.upcast())
.display(db.upcast(), edition)
.to_string(),
DefWithBodyId::VariantId(it) => {
db.enum_variant_data(it).name.display(db.upcast()).to_string()
db.enum_variant_data(it).name.display(db.upcast(), edition).to_string()
}
DefWithBodyId::InTypeConstId(it) => format!("in type const {it:?}"),
};

View File

@ -347,7 +347,8 @@ impl MirLowerCtx<'_> {
// A const don't bind anything. Only needs check.
return Ok((current, current_else));
}
let unresolved_name = || MirLowerError::unresolved_path(self.db, p);
let unresolved_name =
|| MirLowerError::unresolved_path(self.db, p, self.edition());
let resolver = self.owner.resolver(self.db.upcast());
let pr = resolver
.resolve_path_in_value_ns(self.db.upcast(), p)

View File

@ -9,6 +9,7 @@ use either::Either;
use hir_def::{body::Body, hir::BindingId};
use hir_expand::{name::Name, Lookup};
use la_arena::ArenaMap;
use span::Edition;
use crate::{
db::HirDatabase,
@ -44,18 +45,21 @@ impl MirBody {
ctx.for_body(|this| match ctx.body.owner {
hir_def::DefWithBodyId::FunctionId(id) => {
let data = db.function_data(id);
w!(this, "fn {}() ", data.name.display(db.upcast()));
w!(this, "fn {}() ", data.name.display(db.upcast(), Edition::LATEST));
}
hir_def::DefWithBodyId::StaticId(id) => {
let data = db.static_data(id);
w!(this, "static {}: _ = ", data.name.display(db.upcast()));
w!(this, "static {}: _ = ", data.name.display(db.upcast(), Edition::LATEST));
}
hir_def::DefWithBodyId::ConstId(id) => {
let data = db.const_data(id);
w!(
this,
"const {}: _ = ",
data.name.as_ref().unwrap_or(&Name::missing()).display(db.upcast())
data.name
.as_ref()
.unwrap_or(&Name::missing())
.display(db.upcast(), Edition::LATEST)
);
}
hir_def::DefWithBodyId::VariantId(id) => {
@ -64,8 +68,12 @@ impl MirBody {
w!(
this,
"enum {}::{} = ",
enum_loc.id.item_tree(db.upcast())[enum_loc.id.value].name.display(db.upcast()),
loc.id.item_tree(db.upcast())[loc.id.value].name.display(db.upcast()),
enum_loc.id.item_tree(db.upcast())[enum_loc.id.value]
.name
.display(db.upcast(), Edition::LATEST),
loc.id.item_tree(db.upcast())[loc.id.value]
.name
.display(db.upcast(), Edition::LATEST),
)
}
hir_def::DefWithBodyId::InTypeConstId(id) => {
@ -122,7 +130,7 @@ impl HirDisplay for LocalName {
match self {
LocalName::Unknown(l) => write!(f, "_{}", u32::from(l.into_raw())),
LocalName::Binding(n, l) => {
write!(f, "{}_{}", n.display(f.db.upcast()), u32::from(l.into_raw()))
write!(f, "{}_{}", n.display(f.db.upcast(), f.edition()), u32::from(l.into_raw()))
}
}
}
@ -200,7 +208,7 @@ impl<'a> MirPrettyCtx<'a> {
wln!(
self,
"let {}: {};",
self.local_name(id).display(self.db),
self.local_name(id).display_test(self.db),
self.hir_display(&local.ty)
);
}
@ -231,10 +239,18 @@ impl<'a> MirPrettyCtx<'a> {
wln!(this, ";");
}
StatementKind::StorageDead(p) => {
wln!(this, "StorageDead({})", this.local_name(*p).display(self.db));
wln!(
this,
"StorageDead({})",
this.local_name(*p).display_test(self.db)
);
}
StatementKind::StorageLive(p) => {
wln!(this, "StorageLive({})", this.local_name(*p).display(self.db));
wln!(
this,
"StorageLive({})",
this.local_name(*p).display_test(self.db)
);
}
StatementKind::Deinit(p) => {
w!(this, "Deinit(");
@ -297,7 +313,7 @@ impl<'a> MirPrettyCtx<'a> {
fn f(this: &mut MirPrettyCtx<'_>, local: LocalId, projections: &[PlaceElem]) {
let Some((last, head)) = projections.split_last() else {
// no projection
w!(this, "{}", this.local_name(local).display(this.db));
w!(this, "{}", this.local_name(local).display_test(this.db));
return;
};
match last {
@ -317,13 +333,13 @@ impl<'a> MirPrettyCtx<'a> {
w!(
this,
" as {}).{}",
variant_name.display(this.db.upcast()),
name.display(this.db.upcast())
variant_name.display(this.db.upcast(), Edition::LATEST),
name.display(this.db.upcast(), Edition::LATEST)
);
}
hir_def::VariantId::StructId(_) | hir_def::VariantId::UnionId(_) => {
f(this, local, head);
w!(this, ".{}", name.display(this.db.upcast()));
w!(this, ".{}", name.display(this.db.upcast(), Edition::LATEST));
}
}
}
@ -337,7 +353,7 @@ impl<'a> MirPrettyCtx<'a> {
}
ProjectionElem::Index(l) => {
f(this, local, head);
w!(this, "[{}]", this.local_name(*l).display(this.db));
w!(this, "[{}]", this.local_name(*l).display_test(this.db));
}
it => {
f(this, local, head);
@ -387,7 +403,7 @@ impl<'a> MirPrettyCtx<'a> {
Rvalue::Repeat(op, len) => {
w!(self, "[");
self.operand(op);
w!(self, "; {}]", len.display(self.db));
w!(self, "; {}]", len.display_test(self.db));
}
Rvalue::Aggregate(AggregateKind::Adt(_, _), it) => {
w!(self, "Adt(");
@ -458,6 +474,6 @@ impl<'a> MirPrettyCtx<'a> {
}
fn hir_display<T: HirDisplay>(&self, ty: &'a T) -> impl Display + 'a {
ty.display(self.db).with_closure_style(ClosureStyle::ClosureWithSubst)
ty.display_test(self.db).with_closure_style(ClosureStyle::ClosureWithSubst)
}
}

View File

@ -1,3 +1,4 @@
mod closure_captures;
mod coercion;
mod diagnostics;
mod display_source_code;
@ -12,6 +13,7 @@ mod traits;
mod type_alias_impl_traits;
use std::env;
use std::sync::LazyLock;
use base_db::SourceDatabaseFileInputExt as _;
use expect_test::Expect;
@ -25,7 +27,7 @@ use hir_def::{
AssocItemId, DefWithBodyId, HasModule, LocalModuleId, Lookup, ModuleDefId,
};
use hir_expand::{db::ExpandDatabase, FileRange, InFile};
use once_cell::race::OnceBool;
use itertools::Itertools;
use rustc_hash::FxHashMap;
use stdx::format_to;
use syntax::{
@ -50,8 +52,8 @@ use crate::{
// `env UPDATE_EXPECT=1 cargo test -p hir_ty` to update the snapshots.
fn setup_tracing() -> Option<tracing::subscriber::DefaultGuard> {
static ENABLE: OnceBool = OnceBool::new();
if !ENABLE.get_or_init(|| env::var("CHALK_DEBUG").is_ok()) {
static ENABLE: LazyLock<bool> = LazyLock::new(|| env::var("CHALK_DEBUG").is_ok());
if !*ENABLE {
return None;
}
@ -94,7 +96,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
let mut had_annotations = false;
let mut mismatches = FxHashMap::default();
let mut types = FxHashMap::default();
let mut adjustments = FxHashMap::<_, Vec<_>>::default();
let mut adjustments = FxHashMap::default();
for (file_id, annotations) in db.extract_annotations() {
for (range, expected) in annotations {
let file_range = FileRange { file_id, range };
@ -107,13 +109,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
} else if expected.starts_with("adjustments:") {
adjustments.insert(
file_range,
expected
.trim_start_matches("adjustments:")
.trim()
.split(',')
.map(|it| it.trim().to_owned())
.filter(|it| !it.is_empty())
.collect(),
expected.trim_start_matches("adjustments:").trim().to_owned(),
);
} else {
panic!("unexpected annotation: {expected}");
@ -200,7 +196,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
adjustments
.iter()
.map(|Adjustment { kind, .. }| format!("{kind:?}"))
.collect::<Vec<_>>()
.join(", ")
);
}
}

View File

@ -0,0 +1,433 @@
use base_db::salsa::InternKey;
use expect_test::{expect, Expect};
use hir_def::db::DefDatabase;
use hir_expand::files::InFileWrapper;
use itertools::Itertools;
use span::{HirFileId, TextRange};
use syntax::{AstNode, AstPtr};
use test_fixture::WithFixture;
use crate::db::{HirDatabase, InternedClosureId};
use crate::display::HirDisplay;
use crate::mir::MirSpan;
use crate::test_db::TestDB;
use super::visit_module;
fn check_closure_captures(ra_fixture: &str, expect: Expect) {
let (db, file_id) = TestDB::with_single_file(ra_fixture);
let module = db.module_for_file(file_id);
let def_map = module.def_map(&db);
let mut defs = Vec::new();
visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
let mut captures_info = Vec::new();
for def in defs {
let infer = db.infer(def);
let db = &db;
captures_info.extend(infer.closure_info.iter().flat_map(|(closure_id, (captures, _))| {
let closure = db.lookup_intern_closure(InternedClosureId::from_intern_id(closure_id.0));
let (_, source_map) = db.body_with_source_map(closure.0);
let closure_text_range = source_map
.expr_syntax(closure.1)
.expect("failed to map closure to SyntaxNode")
.value
.text_range();
captures.iter().map(move |capture| {
fn text_range<N: AstNode>(
db: &TestDB,
syntax: InFileWrapper<HirFileId, AstPtr<N>>,
) -> TextRange {
let root = syntax.file_syntax(db);
syntax.value.to_node(&root).syntax().text_range()
}
// FIXME: Deduplicate this with hir::Local::sources().
let (body, source_map) = db.body_with_source_map(closure.0);
let local_text_range = match body.self_param.zip(source_map.self_param_syntax()) {
Some((param, source)) if param == capture.local() => {
format!("{:?}", text_range(db, source))
}
_ => source_map
.patterns_for_binding(capture.local())
.iter()
.map(|&definition| {
text_range(db, source_map.pat_syntax(definition).unwrap())
})
.map(|it| format!("{it:?}"))
.join(", "),
};
let place = capture.display_place(closure.0, db);
let capture_ty = capture.ty.skip_binders().display_test(db).to_string();
let spans = capture
.spans()
.iter()
.flat_map(|span| match *span {
MirSpan::ExprId(expr) => {
vec![text_range(db, source_map.expr_syntax(expr).unwrap())]
}
MirSpan::PatId(pat) => {
vec![text_range(db, source_map.pat_syntax(pat).unwrap())]
}
MirSpan::BindingId(binding) => source_map
.patterns_for_binding(binding)
.iter()
.map(|pat| text_range(db, source_map.pat_syntax(*pat).unwrap()))
.collect(),
MirSpan::SelfParam => {
vec![text_range(db, source_map.self_param_syntax().unwrap())]
}
MirSpan::Unknown => Vec::new(),
})
.sorted_by_key(|it| it.start())
.map(|it| format!("{it:?}"))
.join(",");
(closure_text_range, local_text_range, spans, place, capture_ty, capture.kind())
})
}));
}
captures_info.sort_unstable_by_key(|(closure_text_range, local_text_range, ..)| {
(closure_text_range.start(), local_text_range.clone())
});
let rendered = captures_info
.iter()
.map(|(closure_text_range, local_text_range, spans, place, capture_ty, capture_kind)| {
format!(
"{closure_text_range:?};{local_text_range};{spans} {capture_kind:?} {place} {capture_ty}"
)
})
.join("\n");
expect.assert_eq(&rendered);
}
#[test]
fn deref_in_let() {
check_closure_captures(
r#"
//- minicore:copy
fn main() {
let a = &mut true;
let closure = || { let b = *a; };
}
"#,
expect!["53..71;20..21;66..68 ByRef(Shared) *a &'? bool"],
);
}
#[test]
fn deref_then_ref_pattern() {
check_closure_captures(
r#"
//- minicore:copy
fn main() {
let a = &mut true;
let closure = || { let &mut ref b = a; };
}
"#,
expect!["53..79;20..21;67..72 ByRef(Shared) *a &'? bool"],
);
check_closure_captures(
r#"
//- minicore:copy
fn main() {
let a = &mut true;
let closure = || { let &mut ref mut b = a; };
}
"#,
expect!["53..83;20..21;67..76 ByRef(Mut { kind: Default }) *a &'? mut bool"],
);
}
#[test]
fn unique_borrow() {
check_closure_captures(
r#"
//- minicore:copy
fn main() {
let a = &mut true;
let closure = || { *a = false; };
}
"#,
expect!["53..71;20..21;58..60 ByRef(Mut { kind: Default }) *a &'? mut bool"],
);
}
#[test]
fn deref_ref_mut() {
check_closure_captures(
r#"
//- minicore:copy
fn main() {
let a = &mut true;
let closure = || { let ref mut b = *a; };
}
"#,
expect!["53..79;20..21;62..71 ByRef(Mut { kind: Default }) *a &'? mut bool"],
);
}
#[test]
fn let_else_not_consuming() {
check_closure_captures(
r#"
//- minicore:copy
fn main() {
let a = &mut true;
let closure = || { let _ = *a else { return; }; };
}
"#,
expect!["53..88;20..21;66..68 ByRef(Shared) *a &'? bool"],
);
}
#[test]
fn consume() {
check_closure_captures(
r#"
//- minicore:copy
struct NonCopy;
fn main() {
let a = NonCopy;
let closure = || { let b = a; };
}
"#,
expect!["67..84;36..37;80..81 ByValue a NonCopy"],
);
}
#[test]
fn ref_to_upvar() {
check_closure_captures(
r#"
//- minicore:copy
struct NonCopy;
fn main() {
let mut a = NonCopy;
let closure = || { let b = &a; };
let closure = || { let c = &mut a; };
}
"#,
expect![[r#"
71..89;36..41;84..86 ByRef(Shared) a &'? NonCopy
109..131;36..41;122..128 ByRef(Mut { kind: Default }) a &'? mut NonCopy"#]],
);
}
#[test]
fn field() {
check_closure_captures(
r#"
//- minicore:copy
struct Foo { a: i32, b: i32 }
fn main() {
let a = Foo { a: 0, b: 0 };
let closure = || { let b = a.a; };
}
"#,
expect!["92..111;50..51;105..108 ByRef(Shared) a.a &'? i32"],
);
}
#[test]
fn fields_different_mode() {
check_closure_captures(
r#"
//- minicore:copy
struct NonCopy;
struct Foo { a: i32, b: i32, c: NonCopy, d: bool }
fn main() {
let mut a = Foo { a: 0, b: 0 };
let closure = || {
let b = &a.a;
let c = &mut a.b;
let d = a.c;
};
}
"#,
expect![[r#"
133..212;87..92;154..158 ByRef(Shared) a.a &'? i32
133..212;87..92;176..184 ByRef(Mut { kind: Default }) a.b &'? mut i32
133..212;87..92;202..205 ByValue a.c NonCopy"#]],
);
}
#[test]
fn autoref() {
check_closure_captures(
r#"
//- minicore:copy
struct Foo;
impl Foo {
fn imm(&self) {}
fn mut_(&mut self) {}
}
fn main() {
let mut a = Foo;
let closure = || a.imm();
let closure = || a.mut_();
}
"#,
expect![[r#"
123..133;92..97;126..127 ByRef(Shared) a &'? Foo
153..164;92..97;156..157 ByRef(Mut { kind: Default }) a &'? mut Foo"#]],
);
}
#[test]
fn captures_priority() {
check_closure_captures(
r#"
//- minicore:copy
struct NonCopy;
fn main() {
let mut a = &mut true;
// Max ByRef(Mut { kind: Default })
let closure = || {
*a = false;
let b = &mut a;
};
// Max ByValue
let mut a = NonCopy;
let closure = || {
let b = a;
let c = &mut a;
let d = &a;
};
}
"#,
expect![[r#"
113..167;36..41;127..128,154..160 ByRef(Mut { kind: Default }) a &'? mut &'? mut bool
231..304;196..201;252..253,276..277,296..297 ByValue a NonCopy"#]],
);
}
#[test]
fn let_underscore() {
check_closure_captures(
r#"
//- minicore:copy
fn main() {
let mut a = true;
let closure = || { let _ = a; };
}
"#,
expect![""],
);
}
#[test]
fn match_wildcard() {
check_closure_captures(
r#"
//- minicore:copy
struct NonCopy;
fn main() {
let mut a = NonCopy;
let closure = || match a {
_ => {}
};
let closure = || match a {
ref b => {}
};
let closure = || match a {
ref mut b => {}
};
}
"#,
expect![[r#"
125..163;36..41;134..135 ByRef(Shared) a &'? NonCopy
183..225;36..41;192..193 ByRef(Mut { kind: Default }) a &'? mut NonCopy"#]],
);
}
#[test]
fn multiple_bindings() {
check_closure_captures(
r#"
//- minicore:copy
fn main() {
let mut a = false;
let mut closure = || { let (b | b) = a; };
}
"#,
expect!["57..80;20..25;76..77,76..77 ByRef(Shared) a &'? bool"],
);
}
#[test]
fn multiple_usages() {
check_closure_captures(
r#"
//- minicore:copy
fn main() {
let mut a = false;
let mut closure = || {
let b = &a;
let c = &a;
let d = &mut a;
a = true;
};
}
"#,
expect!["57..149;20..25;78..80,98..100,118..124,134..135 ByRef(Mut { kind: Default }) a &'? mut bool"],
);
}
#[test]
fn ref_then_deref() {
check_closure_captures(
r#"
//- minicore:copy
fn main() {
let mut a = false;
let mut closure = || { let b = *&mut a; };
}
"#,
expect!["57..80;20..25;71..77 ByRef(Mut { kind: Default }) a &'? mut bool"],
);
}
#[test]
fn ref_of_ref() {
check_closure_captures(
r#"
//- minicore:copy
fn main() {
let mut a = &false;
let closure = || { let b = &a; };
let closure = || { let b = &mut a; };
let a = &mut false;
let closure = || { let b = &a; };
let closure = || { let b = &mut a; };
}
"#,
expect![[r#"
54..72;20..25;67..69 ByRef(Shared) a &'? &'? bool
92..114;20..25;105..111 ByRef(Mut { kind: Default }) a &'? mut &'? bool
158..176;124..125;171..173 ByRef(Shared) a &'? &'? mut bool
196..218;124..125;209..215 ByRef(Mut { kind: Default }) a &'? mut &'? mut bool"#]],
);
}
#[test]
fn multiple_capture_usages() {
check_closure_captures(
r#"
//- minicore:copy
struct A { a: i32, b: bool }
fn main() {
let mut a = A { a: 123, b: false };
let closure = |$0| {
let b = a.b;
a = A { a: 456, b: true };
};
closure();
}
"#,
expect!["99..165;49..54;120..121,133..134 ByRef(Mut { kind: Default }) a &'? mut A"],
);
}

View File

@ -49,7 +49,7 @@ fn let_stmt_coerce() {
//- minicore: coerce_unsized
fn test() {
let x: &[isize] = &[1];
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
// ^^^^ adjustments: Deref(None), Borrow(Ref('?3, Not)), Pointer(Unsize)
let x: *const [isize] = &[1];
// ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize)
}
@ -96,7 +96,7 @@ fn foo<T>(x: &[T]) -> &[T] { x }
fn test() {
let x = if true {
foo(&[1])
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
// ^^^^ adjustments: Deref(None), Borrow(Ref('?8, Not)), Pointer(Unsize)
} else {
&[1]
};
@ -148,7 +148,7 @@ fn foo<T>(x: &[T]) -> &[T] { x }
fn test(i: i32) {
let x = match i {
2 => foo(&[2]),
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not)), Pointer(Unsize)
// ^^^^ adjustments: Deref(None), Borrow(Ref('?10, Not)), Pointer(Unsize)
1 => &[1],
_ => &[3],
};
@ -267,7 +267,7 @@ fn takes_ref_str(x: &str) {}
fn returns_string() -> String { loop {} }
fn test() {
takes_ref_str(&{ returns_string() });
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not))
// ^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(None), Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('{error}, Not))
}
"#,
);
@ -849,8 +849,8 @@ impl core::cmp::PartialEq for Struct {
}
fn test() {
Struct == Struct;
// ^^^^^^ adjustments: Borrow(Ref(Not))
// ^^^^^^ adjustments: Borrow(Ref(Not))
// ^^^^^^ adjustments: Borrow(Ref('{error}, Not))
// ^^^^^^ adjustments: Borrow(Ref('{error}, Not))
}",
);
}
@ -866,7 +866,7 @@ impl core::ops::AddAssign for Struct {
}
fn test() {
Struct += Struct;
// ^^^^^^ adjustments: Borrow(Ref(Mut))
// ^^^^^^ adjustments: Borrow(Ref('{error}, Mut))
// ^^^^^^ adjustments:
}",
);
@ -880,7 +880,7 @@ fn adjust_index() {
fn test() {
let x = [1, 2, 3];
x[2] = 6;
// ^ adjustments: Borrow(Ref(Mut))
// ^ adjustments: Borrow(Ref('?8, Mut))
}
",
);
@ -905,11 +905,11 @@ impl core::ops::IndexMut for StructMut {
}
fn test() {
Struct[0];
// ^^^^^^ adjustments: Borrow(Ref(Not))
// ^^^^^^ adjustments: Borrow(Ref('?2, Not))
StructMut[0];
// ^^^^^^^^^ adjustments: Borrow(Ref(Not))
// ^^^^^^^^^ adjustments: Borrow(Ref('?5, Not))
&mut StructMut[0];
// ^^^^^^^^^ adjustments: Borrow(Ref(Mut))
// ^^^^^^^^^ adjustments: Borrow(Ref('?8, Mut))
}",
);
}

View File

@ -1186,11 +1186,11 @@ fn test() {
89..109 '{ ... }': bool
99..103 'true': bool
123..167 '{ ...o(); }': ()
133..134 's': &'? S
133..134 's': &'static S
137..151 'unsafe { f() }': &'static S
146..147 'f': fn f() -> &'static S
146..149 'f()': &'static S
157..158 's': &'? S
157..158 's': &'static S
157..164 's.foo()': bool
"#]],
);
@ -1847,9 +1847,9 @@ impl Foo {
}
fn test() {
Foo.foo();
//^^^ adjustments: Borrow(Ref(Not))
//^^^ adjustments: Borrow(Ref('?1, Not))
(&Foo).foo();
// ^^^^ adjustments: Deref(None), Borrow(Ref(Not))
// ^^^^ adjustments: Deref(None), Borrow(Ref('?3, Not))
}
"#,
);
@ -1863,7 +1863,7 @@ fn receiver_adjustment_unsize_array() {
fn test() {
let a = [1, 2, 3];
a.len();
} //^ adjustments: Borrow(Ref(Not)), Pointer(Unsize)
} //^ adjustments: Borrow(Ref('?7, Not)), Pointer(Unsize)
"#,
);
}
@ -2076,7 +2076,7 @@ impl Foo {
}
fn test() {
Box::new(Foo).foo();
//^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref(Not))
//^^^^^^^^^^^^^ adjustments: Deref(None), Borrow(Ref('?3, Not))
}
"#,
);
@ -2094,7 +2094,7 @@ impl Foo {
use core::mem::ManuallyDrop;
fn test() {
ManuallyDrop::new(Foo).foo();
//^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref(Not))
//^^^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Some(Not)))), Borrow(Ref('?4, Not))
}
"#,
);

View File

@ -2141,3 +2141,90 @@ fn test() {
}"#,
);
}
#[test]
fn issue_17866() {
check_infer(
r#"
trait T {
type A;
}
type Foo = <S as T>::A;
fn main() {
Foo {};
}
"#,
expect![[r#"
60..75 '{ Foo {}; }': ()
66..72 'Foo {}': {unknown}
"#]],
);
}
#[test]
fn issue_17711() {
check_infer(
r#"
//- minicore: deref
use core::ops::Deref;
struct Struct<'a, T>(&'a T);
trait Trait {}
impl<'a, T: Deref<Target = impl Trait>> Struct<'a, T> {
fn foo(&self) -> &Self { self }
fn bar(&self) {
let _ = self.foo();
}
}
"#,
expect![[r#"
137..141 'self': &'? Struct<'a, T>
152..160 '{ self }': &'? Struct<'a, T>
154..158 'self': &'? Struct<'a, T>
174..178 'self': &'? Struct<'a, T>
180..215 '{ ... }': ()
194..195 '_': &'? Struct<'?, T>
198..202 'self': &'? Struct<'a, T>
198..208 'self.foo()': &'? Struct<'?, T>
"#]],
);
}
#[test]
fn issue_17767() {
check_infer(
r#"
extern "C" {
type Foo<T>;
}
fn f() -> Foo {}
"#,
expect![[r#"
47..49 '{}': Foo
"#]],
);
}
#[test]
fn issue_17921() {
check_infer(
r#"
//- minicore: future
trait Foo {}
type Bar = impl Foo;
async fn f<A, B, C>() -> Bar {}
"#,
expect![[r#"
64..66 '{}': ()
64..66 '{}': impl Future<Output = ()>
"#]],
);
}

View File

@ -1201,8 +1201,8 @@ fn infer_array() {
209..215 '[1, 2]': [i32; 2]
210..211 '1': i32
213..214 '2': i32
225..226 'i': [&'? str; 2]
229..239 '["a", "b"]': [&'? str; 2]
225..226 'i': [&'static str; 2]
229..239 '["a", "b"]': [&'static str; 2]
230..233 '"a"': &'static str
235..238 '"b"': &'static str
250..251 'b': [[&'? str; 1]; 2]
@ -3686,3 +3686,36 @@ fn main() {
"#,
);
}
#[test]
fn infer_bad_lang_item() {
check_infer(
r#"
#[lang="eq"]
pub trait Eq {
fn eq(&self, ) -> bool;
}
#[lang="shr"]
pub trait Shr<RHS,Result> {
fn shr(&self, rhs: &RHS) -> Result;
}
fn test() -> bool {
1 >> 1;
1 == 1;
}
"#,
expect![[r#"
39..43 'self': &'? Self
114..118 'self': &'? Self
120..123 'rhs': &'? RHS
163..190 '{ ...= 1; }': bool
169..170 '1': i32
169..175 '1 >> 1': {unknown}
181..182 '1': i32
181..187 '1 == 1': {unknown}
"#]],
);
}

View File

@ -2,6 +2,7 @@
use std::fmt::{self, Display};
use itertools::Itertools;
use span::Edition;
use crate::{
chalk_db, db::HirDatabase, from_assoc_type_id, from_chalk_trait_id, mapping::from_chalk,
@ -24,7 +25,7 @@ impl DebugContext<'_> {
AdtId::UnionId(it) => self.0.union_data(it).name.clone(),
AdtId::EnumId(it) => self.0.enum_data(it).name.clone(),
};
name.display(self.0.upcast()).fmt(f)?;
name.display(self.0.upcast(), Edition::LATEST).fmt(f)?;
Ok(())
}
@ -35,7 +36,7 @@ impl DebugContext<'_> {
) -> Result<(), fmt::Error> {
let trait_: hir_def::TraitId = from_chalk_trait_id(id);
let trait_data = self.0.trait_data(trait_);
trait_data.name.display(self.0.upcast()).fmt(f)?;
trait_data.name.display(self.0.upcast(), Edition::LATEST).fmt(f)?;
Ok(())
}
@ -54,8 +55,8 @@ impl DebugContext<'_> {
write!(
fmt,
"{}::{}",
trait_data.name.display(self.0.upcast()),
type_alias_data.name.display(self.0.upcast())
trait_data.name.display(self.0.upcast(), Edition::LATEST),
type_alias_data.name.display(self.0.upcast(), Edition::LATEST)
)?;
Ok(())
}
@ -75,7 +76,7 @@ impl DebugContext<'_> {
let trait_ref = projection_ty.trait_ref(self.0);
let trait_params = trait_ref.substitution.as_slice(Interner);
let self_ty = trait_ref.self_type_parameter(Interner);
write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0.upcast()))?;
write!(fmt, "<{self_ty:?} as {}", trait_name.display(self.0.upcast(), Edition::LATEST))?;
if trait_params.len() > 1 {
write!(
fmt,
@ -83,7 +84,7 @@ impl DebugContext<'_> {
trait_params[1..].iter().format_with(", ", |x, f| f(&format_args!("{x:?}"))),
)?;
}
write!(fmt, ">::{}", type_alias_data.name.display(self.0.upcast()))?;
write!(fmt, ">::{}", type_alias_data.name.display(self.0.upcast(), Edition::LATEST))?;
let proj_params_count = projection_ty.substitution.len(Interner) - trait_params.len();
let proj_params = &projection_ty.substitution.as_slice(Interner)[..proj_params_count];
@ -110,9 +111,11 @@ impl DebugContext<'_> {
CallableDefId::EnumVariantId(e) => self.0.enum_variant_data(e).name.clone(),
};
match def {
CallableDefId::FunctionId(_) => write!(fmt, "{{fn {}}}", name.display(self.0.upcast())),
CallableDefId::FunctionId(_) => {
write!(fmt, "{{fn {}}}", name.display(self.0.upcast(), Edition::LATEST))
}
CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {
write!(fmt, "{{ctor {}}}", name.display(self.0.upcast()))
write!(fmt, "{{ctor {}}}", name.display(self.0.upcast(), Edition::LATEST))
}
}
}

View File

@ -14,13 +14,14 @@ use hir_def::{
};
use hir_expand::name::Name;
use intern::sym;
use stdx::panic_context;
use span::Edition;
use stdx::{never, panic_context};
use triomphe::Arc;
use crate::{
db::HirDatabase, infer::unify::InferenceTable, utils::UnevaluatedConstEvaluatorFolder, AliasEq,
AliasTy, Canonical, DomainGoal, Goal, Guidance, InEnvironment, Interner, ProjectionTy,
ProjectionTyExt, Solution, TraitRefExt, Ty, TyKind, WhereClause,
ProjectionTyExt, Solution, TraitRefExt, Ty, TyKind, TypeFlags, WhereClause,
};
/// This controls how much 'time' we give the Chalk solver before giving up.
@ -90,6 +91,16 @@ pub(crate) fn normalize_projection_query(
projection: ProjectionTy,
env: Arc<TraitEnvironment>,
) -> Ty {
if projection.substitution.iter(Interner).any(|arg| {
arg.ty(Interner)
.is_some_and(|ty| ty.data(Interner).flags.intersects(TypeFlags::HAS_TY_INFER))
}) {
never!(
"Invoking `normalize_projection_query` with a projection type containing inference var"
);
return TyKind::Error.intern(Interner);
}
let mut table = InferenceTable::new(db, env);
let ty = table.normalize_projection_ty(projection);
table.resolve_completely(ty)
@ -104,7 +115,7 @@ pub(crate) fn trait_solve_query(
) -> Option<Solution> {
let detail = match &goal.value.goal.data(Interner) {
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string()
db.trait_data(it.hir_trait_id()).name.display(db.upcast(), Edition::LATEST).to_string()
}
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(),
_ => "??".to_owned(),

View File

@ -20,7 +20,6 @@ itertools.workspace = true
smallvec.workspace = true
tracing.workspace = true
triomphe.workspace = true
once_cell = "1.17.1"
# local deps
base-db.workspace = true

View File

@ -328,11 +328,9 @@ fn doc_modpath_from_str(link: &str) -> Option<ModPath> {
};
let parts = first_segment.into_iter().chain(parts).map(|segment| match segment.parse() {
Ok(idx) => Name::new_tuple_field(idx),
Err(_) => Name::new(
segment.split_once('<').map_or(segment, |it| it.0),
tt::IdentIsRaw::No,
SyntaxContextId::ROOT,
),
Err(_) => {
Name::new(segment.split_once('<').map_or(segment, |it| it.0), SyntaxContextId::ROOT)
}
});
Some(ModPath::from_segments(kind, parts))
};

View File

@ -84,7 +84,7 @@ impl HirDisplay for Function {
if let Some(abi) = &data.abi {
write!(f, "extern \"{}\" ", abi.as_str())?;
}
write!(f, "fn {}", data.name.display(f.db.upcast()))?;
write!(f, "fn {}", data.name.display(f.db.upcast(), f.edition()))?;
write_generic_params(GenericDefId::FunctionId(self.id), f)?;
@ -107,7 +107,7 @@ impl HirDisplay for Function {
first = false;
}
match local {
Some(name) => write!(f, "{}: ", name.display(f.db.upcast()))?,
Some(name) => write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?,
None => f.write_str("_: ")?,
}
type_ref.hir_fmt(f)?;
@ -177,7 +177,7 @@ fn write_impl_header(impl_: &Impl, f: &mut HirFormatter<'_>) -> Result<(), HirDi
if let Some(trait_) = impl_.trait_(db) {
let trait_data = db.trait_data(trait_.id);
write!(f, " {} for", trait_data.name.display(db.upcast()))?;
write!(f, " {} for", trait_data.name.display(db.upcast(), f.edition()))?;
}
f.write_char(' ')?;
@ -196,7 +196,7 @@ impl HirDisplay for SelfParam {
{
f.write_char('&')?;
if let Some(lifetime) = lifetime {
write!(f, "{} ", lifetime.name.display(f.db.upcast()))?;
write!(f, "{} ", lifetime.name.display(f.db.upcast(), f.edition()))?;
}
if let hir_def::type_ref::Mutability::Mut = mut_ {
f.write_str("mut ")?;
@ -227,7 +227,7 @@ impl HirDisplay for Struct {
// FIXME: Render repr if its set explicitly?
write_visibility(module_id, self.visibility(f.db), f)?;
f.write_str("struct ")?;
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
let def_id = GenericDefId::AdtId(AdtId::StructId(self.id));
write_generic_params(def_id, f)?;
@ -266,7 +266,7 @@ impl HirDisplay for Enum {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
f.write_str("enum ")?;
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
let def_id = GenericDefId::AdtId(AdtId::EnumId(self.id));
write_generic_params(def_id, f)?;
@ -283,7 +283,7 @@ impl HirDisplay for Union {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
f.write_str("union ")?;
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
let def_id = GenericDefId::AdtId(AdtId::UnionId(self.id));
write_generic_params(def_id, f)?;
@ -343,7 +343,7 @@ fn write_variants(
} else {
f.write_str("{\n")?;
for variant in &variants[..count] {
write!(f, " {}", variant.name(f.db).display(f.db.upcast()))?;
write!(f, " {}", variant.name(f.db).display(f.db.upcast(), f.edition()))?;
match variant.kind(f.db) {
StructKind::Tuple => {
let fields_str =
@ -372,21 +372,21 @@ fn write_variants(
impl HirDisplay for Field {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write_visibility(self.parent.module(f.db).id, self.visibility(f.db), f)?;
write!(f, "{}: ", self.name(f.db).display(f.db.upcast()))?;
write!(f, "{}: ", self.name(f.db).display(f.db.upcast(), f.edition()))?;
self.ty(f.db).hir_fmt(f)
}
}
impl HirDisplay for TupleField {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "pub {}: ", self.name().display(f.db.upcast()))?;
write!(f, "pub {}: ", self.name().display(f.db.upcast(), f.edition()))?;
self.ty(f.db).hir_fmt(f)
}
}
impl HirDisplay for Variant {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
let data = self.variant_data(f.db);
match &*data {
VariantData::Unit => {}
@ -424,9 +424,9 @@ impl HirDisplay for ExternCrateDecl {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
f.write_str("extern crate ")?;
write!(f, "{}", self.name(f.db).display(f.db.upcast()))?;
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))?;
if let Some(alias) = self.alias(f.db) {
write!(f, " as {alias}",)?;
write!(f, " as {}", alias.display(f.edition()))?;
}
Ok(())
}
@ -478,7 +478,7 @@ impl HirDisplay for TypeParam {
match param_data {
TypeOrConstParamData::TypeParamData(p) => match p.provenance {
TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
write!(f, "{}", p.name.clone().unwrap().display(f.db.upcast()))?
write!(f, "{}", p.name.clone().unwrap().display(f.db.upcast(), f.edition()))?
}
TypeParamProvenance::ArgumentImplTrait => {
return write_bounds_like_dyn_trait_with_prefix(
@ -491,7 +491,7 @@ impl HirDisplay for TypeParam {
}
},
TypeOrConstParamData::ConstParamData(p) => {
write!(f, "{}", p.name.display(f.db.upcast()))?;
write!(f, "{}", p.name.display(f.db.upcast(), f.edition()))?;
}
}
@ -525,13 +525,13 @@ impl HirDisplay for TypeParam {
impl HirDisplay for LifetimeParam {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "{}", self.name(f.db).display(f.db.upcast()))
write!(f, "{}", self.name(f.db).display(f.db.upcast(), f.edition()))
}
}
impl HirDisplay for ConstParam {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write!(f, "const {}: ", self.name(f.db).display(f.db.upcast()))?;
write!(f, "const {}: ", self.name(f.db).display(f.db.upcast(), f.edition()))?;
self.ty(f.db).hir_fmt(f)
}
}
@ -563,7 +563,7 @@ fn write_generic_params(
};
for (_, lifetime) in params.iter_lt() {
delim(f)?;
write!(f, "{}", lifetime.name.display(f.db.upcast()))?;
write!(f, "{}", lifetime.name.display(f.db.upcast(), f.edition()))?;
}
for (_, ty) in params.iter_type_or_consts() {
if let Some(name) = &ty.name() {
@ -573,7 +573,7 @@ fn write_generic_params(
continue;
}
delim(f)?;
write!(f, "{}", name.display(f.db.upcast()))?;
write!(f, "{}", name.display(f.db.upcast(), f.edition()))?;
if let Some(default) = &ty.default {
f.write_str(" = ")?;
default.hir_fmt(f)?;
@ -581,12 +581,12 @@ fn write_generic_params(
}
TypeOrConstParamData::ConstParamData(c) => {
delim(f)?;
write!(f, "const {}: ", name.display(f.db.upcast()))?;
write!(f, "const {}: ", name.display(f.db.upcast(), f.edition()))?;
c.ty.hir_fmt(f)?;
if let Some(default) = &c.default {
f.write_str(" = ")?;
write!(f, "{}", default.display(f.db.upcast()))?;
write!(f, "{}", default.display(f.db.upcast(), f.edition()))?;
}
}
}
@ -639,7 +639,7 @@ fn write_where_predicates(
let write_target = |target: &WherePredicateTypeTarget, f: &mut HirFormatter<'_>| match target {
WherePredicateTypeTarget::TypeRef(ty) => ty.hir_fmt(f),
WherePredicateTypeTarget::TypeOrConstParam(id) => match params[*id].name() {
Some(name) => write!(f, "{}", name.display(f.db.upcast())),
Some(name) => write!(f, "{}", name.display(f.db.upcast(), f.edition())),
None => f.write_str("{unnamed}"),
},
};
@ -668,12 +668,13 @@ fn write_where_predicates(
bound.hir_fmt(f)?;
}
Lifetime { target, bound } => {
let target = target.name.display(f.db.upcast());
let bound = bound.name.display(f.db.upcast());
let target = target.name.display(f.db.upcast(), f.edition());
let bound = bound.name.display(f.db.upcast(), f.edition());
write!(f, "{target}: {bound}")?;
}
ForLifetime { lifetimes, target, bound } => {
let lifetimes = lifetimes.iter().map(|it| it.display(f.db.upcast())).join(", ");
let lifetimes =
lifetimes.iter().map(|it| it.display(f.db.upcast(), f.edition())).join(", ");
write!(f, "for<{lifetimes}> ")?;
write_target(target, f)?;
f.write_str(": ")?;
@ -685,7 +686,9 @@ fn write_where_predicates(
f.write_str(" + ")?;
match nxt {
TypeBound { bound, .. } | ForLifetime { bound, .. } => bound.hir_fmt(f)?,
Lifetime { bound, .. } => write!(f, "{}", bound.name.display(f.db.upcast()))?,
Lifetime { bound, .. } => {
write!(f, "{}", bound.name.display(f.db.upcast(), f.edition()))?
}
}
}
f.write_str(",")?;
@ -707,7 +710,7 @@ impl HirDisplay for Const {
let data = db.const_data(self.id);
f.write_str("const ")?;
match &data.name {
Some(name) => write!(f, "{}: ", name.display(f.db.upcast()))?,
Some(name) => write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?,
None => f.write_str("_: ")?,
}
data.type_ref.hir_fmt(f)?;
@ -723,7 +726,7 @@ impl HirDisplay for Static {
if data.mutable {
f.write_str("mut ")?;
}
write!(f, "{}: ", data.name.display(f.db.upcast()))?;
write!(f, "{}: ", data.name.display(f.db.upcast(), f.edition()))?;
data.type_ref.hir_fmt(f)?;
Ok(())
}
@ -777,7 +780,7 @@ fn write_trait_header(trait_: &Trait, f: &mut HirFormatter<'_>) -> Result<(), Hi
if data.is_auto {
f.write_str("auto ")?;
}
write!(f, "trait {}", data.name.display(f.db.upcast()))?;
write!(f, "trait {}", data.name.display(f.db.upcast(), f.edition()))?;
write_generic_params(GenericDefId::TraitId(trait_.id), f)?;
Ok(())
}
@ -786,7 +789,7 @@ impl HirDisplay for TraitAlias {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
let data = f.db.trait_alias_data(self.id);
write!(f, "trait {}", data.name.display(f.db.upcast()))?;
write!(f, "trait {}", data.name.display(f.db.upcast(), f.edition()))?;
let def_id = GenericDefId::TraitAliasId(self.id);
write_generic_params(def_id, f)?;
f.write_str(" = ")?;
@ -802,7 +805,7 @@ impl HirDisplay for TypeAlias {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
write_visibility(self.module(f.db).id, self.visibility(f.db), f)?;
let data = f.db.type_alias_data(self.id);
write!(f, "type {}", data.name.display(f.db.upcast()))?;
write!(f, "type {}", data.name.display(f.db.upcast(), f.edition()))?;
let def_id = GenericDefId::TypeAliasId(self.id);
write_generic_params(def_id, f)?;
if !data.bounds.is_empty() {
@ -822,7 +825,7 @@ impl HirDisplay for Module {
fn hir_fmt(&self, f: &mut HirFormatter<'_>) -> Result<(), HirDisplayError> {
// FIXME: Module doesn't have visibility saved in data.
match self.name(f.db) {
Some(name) => write!(f, "mod {}", name.display(f.db.upcast())),
Some(name) => write!(f, "mod {}", name.display(f.db.upcast(), f.edition())),
None if self.is_crate_root() => match self.krate(f.db).display_name(f.db) {
Some(name) => write!(f, "extern crate {name}"),
None => f.write_str("extern crate {unknown}"),
@ -839,6 +842,6 @@ impl HirDisplay for Macro {
hir_def::MacroId::MacroRulesId(_) => f.write_str("macro_rules!"),
hir_def::MacroId::ProcMacroId(_) => f.write_str("proc_macro"),
}?;
write!(f, " {}", self.name(f.db).display(f.db.upcast()))
write!(f, " {}", self.name(f.db).display(f.db.upcast(), f.edition()))
}
}

View File

@ -78,7 +78,8 @@ use hir_ty::{
use itertools::Itertools;
use nameres::diagnostics::DefDiagnosticKind;
use rustc_hash::FxHashSet;
use span::{Edition, EditionedFileId, FileId, MacroCallId};
use smallvec::SmallVec;
use span::{Edition, EditionedFileId, FileId, MacroCallId, SyntaxContextId};
use stdx::{impl_from, never};
use syntax::{
ast::{self, HasAttrs as _, HasGenericParams, HasName},
@ -93,8 +94,7 @@ pub use crate::{
diagnostics::*,
has_source::HasSource,
semantics::{
DescendPreference, PathResolution, Semantics, SemanticsImpl, SemanticsScope, TypeInfo,
VisibleTraits,
PathResolution, Semantics, SemanticsImpl, SemanticsScope, TypeInfo, VisibleTraits,
},
};
pub use hir_ty::method_resolution::TyFingerprint;
@ -340,13 +340,13 @@ impl ModuleDef {
}
}
pub fn canonical_path(&self, db: &dyn HirDatabase) -> Option<String> {
pub fn canonical_path(&self, db: &dyn HirDatabase, edition: Edition) -> Option<String> {
let mut segments = vec![self.name(db)?];
for m in self.module(db)?.path_to_root(db) {
segments.extend(m.name(db))
}
segments.reverse();
Some(segments.iter().map(|it| it.display(db.upcast())).join("::"))
Some(segments.iter().map(|it| it.display(db.upcast(), edition)).join("::"))
}
pub fn canonical_module_path(
@ -556,13 +556,14 @@ impl Module {
style_lints: bool,
) {
let _p = tracing::info_span!("Module::diagnostics", name = ?self.name(db)).entered();
let edition = db.crate_graph()[self.id.krate()].edition;
let def_map = self.id.def_map(db.upcast());
for diag in def_map.diagnostics() {
if diag.in_module != self.id.local_id {
// FIXME: This is accidentally quadratic.
continue;
}
emit_def_diagnostic(db, acc, diag);
emit_def_diagnostic(db, acc, diag, edition);
}
if !self.id.is_block_module() {
@ -582,7 +583,7 @@ impl Module {
}
ModuleDef::Trait(t) => {
for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
emit_def_diagnostic(db, acc, diag, edition);
}
for item in t.items(db) {
@ -599,19 +600,19 @@ impl Module {
match adt {
Adt::Struct(s) => {
for diag in db.struct_data_with_diagnostics(s.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
emit_def_diagnostic(db, acc, diag, edition);
}
}
Adt::Union(u) => {
for diag in db.union_data_with_diagnostics(u.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
emit_def_diagnostic(db, acc, diag, edition);
}
}
Adt::Enum(e) => {
for v in e.variants(db) {
acc.extend(ModuleDef::Variant(v).diagnostics(db, style_lints));
for diag in db.enum_variant_data_with_diagnostics(v.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
emit_def_diagnostic(db, acc, diag, edition);
}
}
}
@ -645,7 +646,7 @@ impl Module {
let ast_id_map = db.ast_id_map(file_id);
for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
emit_def_diagnostic(db, acc, diag);
emit_def_diagnostic(db, acc, diag, edition);
}
if inherent_impls.invalid_impls().contains(&impl_def.id) {
@ -869,23 +870,32 @@ fn emit_macro_def_diagnostics(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>
never!("declarative expander for non decl-macro: {:?}", e);
return;
};
let krate = HasModule::krate(&m.id, db.upcast());
let edition = db.crate_graph()[krate].edition;
emit_def_diagnostic_(
db,
acc,
&DefDiagnosticKind::MacroDefError { ast, message: e.to_string() },
edition,
);
}
}
}
fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, diag: &DefDiagnostic) {
emit_def_diagnostic_(db, acc, &diag.kind)
fn emit_def_diagnostic(
db: &dyn HirDatabase,
acc: &mut Vec<AnyDiagnostic>,
diag: &DefDiagnostic,
edition: Edition,
) {
emit_def_diagnostic_(db, acc, &diag.kind, edition)
}
fn emit_def_diagnostic_(
db: &dyn HirDatabase,
acc: &mut Vec<AnyDiagnostic>,
diag: &DefDiagnosticKind,
edition: Edition,
) {
match diag {
DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
@ -910,7 +920,7 @@ fn emit_def_diagnostic_(
MacroError {
node: InFile::new(ast.file_id, item.syntax_node_ptr()),
precise_location: None,
message: format!("{}: {message}", path.display(db.upcast())),
message: format!("{}: {message}", path.display(db.upcast(), edition)),
error,
}
.into(),
@ -1764,7 +1774,7 @@ impl DefWithBody {
/// A textual representation of the HIR of this def's body for debugging purposes.
pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
let body = db.body(self.id());
body.pretty_print(db.upcast(), self.id())
body.pretty_print(db.upcast(), self.id(), Edition::CURRENT)
}
/// A textual representation of the MIR of this def's body for debugging purposes.
@ -2259,6 +2269,8 @@ impl Function {
db: &dyn HirDatabase,
span_formatter: impl Fn(FileId, TextRange) -> String,
) -> String {
let krate = HasModule::krate(&self.id, db.upcast());
let edition = db.crate_graph()[krate].edition;
let body = match db.monomorphized_mir_body(
self.id.into(),
Substitution::empty(Interner),
@ -2267,7 +2279,7 @@ impl Function {
Ok(body) => body,
Err(e) => {
let mut r = String::new();
_ = e.pretty_print(&mut r, db, &span_formatter);
_ = e.pretty_print(&mut r, db, &span_formatter, edition);
return r;
}
};
@ -2276,7 +2288,7 @@ impl Function {
Ok(_) => "pass".to_owned(),
Err(e) => {
let mut r = String::new();
_ = e.pretty_print(&mut r, db, &span_formatter);
_ = e.pretty_print(&mut r, db, &span_formatter, edition);
r
}
};
@ -2510,7 +2522,11 @@ impl Const {
Type::from_value_def(db, self.id)
}
pub fn render_eval(self, db: &dyn HirDatabase) -> Result<String, ConstEvalError> {
pub fn render_eval(
self,
db: &dyn HirDatabase,
edition: Edition,
) -> Result<String, ConstEvalError> {
let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
let data = &c.data(Interner);
if let TyKind::Scalar(s) = data.ty.kind(Interner) {
@ -2532,7 +2548,7 @@ impl Const {
if let Ok(s) = mir::render_const_using_debug_impl(db, self.id, &c) {
Ok(s)
} else {
Ok(format!("{}", c.display(db)))
Ok(format!("{}", c.display(db, edition)))
}
}
}
@ -3728,9 +3744,9 @@ impl ConstParam {
Type::new(db, self.id.parent(), db.const_param_ty(self.id))
}
pub fn default(self, db: &dyn HirDatabase) -> Option<ast::ConstArg> {
pub fn default(self, db: &dyn HirDatabase, edition: Edition) -> Option<ast::ConstArg> {
let arg = generic_arg_from_param(db, self.id.into())?;
known_const_to_ast(arg.constant(Interner)?, db)
known_const_to_ast(arg.constant(Interner)?, db, edition)
}
}
@ -4038,12 +4054,20 @@ impl Closure {
TyKind::Closure(self.id, self.subst).intern(Interner)
}
pub fn display_with_id(&self, db: &dyn HirDatabase) -> String {
self.clone().as_ty().display(db).with_closure_style(ClosureStyle::ClosureWithId).to_string()
pub fn display_with_id(&self, db: &dyn HirDatabase, edition: Edition) -> String {
self.clone()
.as_ty()
.display(db, edition)
.with_closure_style(ClosureStyle::ClosureWithId)
.to_string()
}
pub fn display_with_impl(&self, db: &dyn HirDatabase) -> String {
self.clone().as_ty().display(db).with_closure_style(ClosureStyle::ImplFn).to_string()
pub fn display_with_impl(&self, db: &dyn HirDatabase, edition: Edition) -> String {
self.clone()
.as_ty()
.display(db, edition)
.with_closure_style(ClosureStyle::ImplFn)
.to_string()
}
pub fn captured_items(&self, db: &dyn HirDatabase) -> Vec<ClosureCapture> {
@ -4090,6 +4114,15 @@ impl ClosureCapture {
Local { parent: self.owner, binding_id: self.capture.local() }
}
/// Returns whether this place has any field (aka. non-deref) projections.
pub fn has_field_projections(&self) -> bool {
self.capture.has_field_projections()
}
pub fn usages(&self) -> CaptureUsages {
CaptureUsages { parent: self.owner, spans: self.capture.spans() }
}
pub fn kind(&self) -> CaptureKind {
match self.capture.kind() {
hir_ty::CaptureKind::ByRef(
@ -4105,11 +4138,21 @@ impl ClosureCapture {
}
}
/// Converts the place to a name that can be inserted into source code.
pub fn place_to_name(&self, db: &dyn HirDatabase) -> String {
self.capture.place_to_name(self.owner, db)
}
pub fn display_place_source_code(&self, db: &dyn HirDatabase) -> String {
self.capture.display_place_source_code(self.owner, db)
}
pub fn display_place(&self, db: &dyn HirDatabase) -> String {
self.capture.display_place(self.owner, db)
}
}
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum CaptureKind {
SharedRef,
UniqueSharedRef,
@ -4117,6 +4160,74 @@ pub enum CaptureKind {
Move,
}
#[derive(Debug, Clone)]
pub struct CaptureUsages {
parent: DefWithBodyId,
spans: SmallVec<[mir::MirSpan; 3]>,
}
impl CaptureUsages {
pub fn sources(&self, db: &dyn HirDatabase) -> Vec<CaptureUsageSource> {
let (body, source_map) = db.body_with_source_map(self.parent);
let mut result = Vec::with_capacity(self.spans.len());
for &span in self.spans.iter() {
let is_ref = span.is_ref_span(&body);
match span {
mir::MirSpan::ExprId(expr) => {
if let Ok(expr) = source_map.expr_syntax(expr) {
result.push(CaptureUsageSource {
is_ref,
source: expr.map(AstPtr::wrap_left),
})
}
}
mir::MirSpan::PatId(pat) => {
if let Ok(pat) = source_map.pat_syntax(pat) {
result.push(CaptureUsageSource {
is_ref,
source: pat.map(AstPtr::wrap_right),
});
}
}
mir::MirSpan::BindingId(binding) => result.extend(
source_map
.patterns_for_binding(binding)
.iter()
.filter_map(|&pat| source_map.pat_syntax(pat).ok())
.map(|pat| CaptureUsageSource {
is_ref,
source: pat.map(AstPtr::wrap_right),
}),
),
mir::MirSpan::SelfParam | mir::MirSpan::Unknown => {
unreachable!("invalid capture usage span")
}
}
}
result
}
}
#[derive(Debug)]
pub struct CaptureUsageSource {
is_ref: bool,
source: InFile<AstPtr<Either<ast::Expr, ast::Pat>>>,
}
impl CaptureUsageSource {
pub fn source(&self) -> AstPtr<Either<ast::Expr, ast::Pat>> {
self.source.value
}
pub fn file_id(&self) -> HirFileId {
self.source.file_id
}
pub fn is_ref(&self) -> bool {
self.is_ref
}
}
#[derive(Clone, PartialEq, Eq, Debug, Hash)]
pub struct Type {
env: Arc<TraitEnvironment>,
@ -4355,6 +4466,22 @@ impl Type {
method_resolution::implements_trait(&canonical_ty, db, &self.env, trait_)
}
/// This does **not** resolve `IntoFuture`, only `Future`.
pub fn future_output(self, db: &dyn HirDatabase) -> Option<Type> {
let future_output =
db.lang_item(self.env.krate, LangItem::FutureOutput)?.as_type_alias()?;
self.normalize_trait_assoc_type(db, &[], future_output.into())
}
/// This does **not** resolve `IntoIterator`, only `Iterator`.
pub fn iterator_item(self, db: &dyn HirDatabase) -> Option<Type> {
let iterator_trait = db.lang_item(self.env.krate, LangItem::Iterator)?.as_trait()?;
let iterator_item = db
.trait_data(iterator_trait)
.associated_type_by_name(&Name::new_symbol(sym::Item.clone(), SyntaxContextId::ROOT))?;
self.normalize_trait_assoc_type(db, &[], iterator_item.into())
}
/// Checks that particular type `ty` implements `std::ops::FnOnce`.
///
/// This function can be used to check if a particular type is callable, since FnOnce is a
@ -4704,18 +4831,20 @@ impl Type {
pub fn type_and_const_arguments<'a>(
&'a self,
db: &'a dyn HirDatabase,
edition: Edition,
) -> impl Iterator<Item = SmolStr> + 'a {
self.ty
.strip_references()
.as_adt()
.into_iter()
.flat_map(|(_, substs)| substs.iter(Interner))
.filter_map(|arg| {
.filter_map(move |arg| {
// arg can be either a `Ty` or `constant`
if let Some(ty) = arg.ty(Interner) {
Some(format_smolstr!("{}", ty.display(db)))
Some(format_smolstr!("{}", ty.display(db, edition)))
} else {
arg.constant(Interner).map(|const_| format_smolstr!("{}", const_.display(db)))
arg.constant(Interner)
.map(|const_| format_smolstr!("{}", const_.display(db, edition)))
}
})
}
@ -4724,13 +4853,17 @@ impl Type {
pub fn generic_parameters<'a>(
&'a self,
db: &'a dyn HirDatabase,
edition: Edition,
) -> impl Iterator<Item = SmolStr> + 'a {
// iterate the lifetime
self.as_adt()
.and_then(|a| a.lifetime(db).map(|lt| lt.name.display_no_db().to_smolstr()))
.and_then(|a| {
// Lifetimes do not need edition-specific handling as they cannot be escaped.
a.lifetime(db).map(|lt| lt.name.display_no_db(Edition::Edition2015).to_smolstr())
})
.into_iter()
// add the type and const parameters
.chain(self.type_and_const_arguments(db))
.chain(self.type_and_const_arguments(db, edition))
}
pub fn iterate_method_candidates_with_traits<T>(

View File

@ -4,6 +4,7 @@ mod source_to_def;
use std::{
cell::RefCell,
convert::Infallible,
fmt, iter, mem,
ops::{self, ControlFlow, Not},
};
@ -22,9 +23,11 @@ use hir_expand::{
builtin::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
files::InRealFile,
inert_attr_macro::find_builtin_attr_idx,
name::AsName,
FileRange, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt,
};
use intern::Symbol;
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
@ -48,11 +51,7 @@ use crate::{
Variant, VariantDef,
};
pub enum DescendPreference {
SameText,
SameKind,
None,
}
const CONTINUE_NO_BREAKS: ControlFlow<Infallible, ()> = ControlFlow::Continue(());
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum PathResolution {
@ -182,6 +181,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
/// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
/// descend it and find again
// FIXME: Rethink this API
pub fn find_node_at_offset_with_descend<N: AstNode>(
&self,
node: &SyntaxNode,
@ -190,8 +190,9 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
}
/// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
/// Find an AstNode by offset inside SyntaxNode, if it is inside an attribute macro call,
/// descend it and find again
// FIXME: Rethink this API
pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
&'slf self,
node: &SyntaxNode,
@ -545,51 +546,53 @@ impl<'db> SemanticsImpl<'db> {
)
}
/// Retrieves all the formatting parts of the format_args! template string.
pub fn as_format_args_parts(
&self,
string: &ast::String,
) -> Option<Vec<(TextRange, Option<PathResolution>)>> {
if let Some(quote) = string.open_quote_text_range() {
return self
.descend_into_macros(DescendPreference::SameText, string.syntax().clone())
.into_iter()
.find_map(|token| {
let string = ast::String::cast(token)?;
let literal =
string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
let format_args = self.wrap_node_infile(format_args);
let res = source_analyzer
.as_format_args_parts(self.db, format_args.as_ref())?
.map(|(range, res)| (range + quote.end(), res))
.collect();
Some(res)
});
}
None
let quote = string.open_quote_text_range()?;
let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
self.descend_into_macros_breakable(token, |token| {
(|| {
let token = token.value;
let string = ast::String::cast(token)?;
let literal =
string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
let format_args = self.wrap_node_infile(format_args);
let res = source_analyzer
.as_format_args_parts(self.db, format_args.as_ref())?
.map(|(range, res)| (range + quote.end(), res))
.collect();
Some(res)
})()
.map_or(ControlFlow::Continue(()), ControlFlow::Break)
})
}
/// Retrieves the formatting part of the format_args! template string at the given offset.
pub fn check_for_format_args_template(
&self,
original_token: SyntaxToken,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
if let Some(original_string) = ast::String::cast(original_token.clone()) {
if let Some(quote) = original_string.open_quote_text_range() {
return self
.descend_into_macros(DescendPreference::SameText, original_token)
.into_iter()
.find_map(|token| {
self.resolve_offset_in_format_args(
ast::String::cast(token)?,
offset.checked_sub(quote.end())?,
)
})
.map(|(range, res)| (range + quote.end(), res));
}
}
None
let original_string = ast::String::cast(original_token.clone())?;
let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
let quote = original_string.open_quote_text_range()?;
self.descend_into_macros_breakable(original_token, |token| {
(|| {
let token = token.value;
self.resolve_offset_in_format_args(
ast::String::cast(token)?,
offset.checked_sub(quote.end())?,
)
.map(|(range, res)| (range + quote.end(), res))
})()
.map_or(ControlFlow::Continue(()), ControlFlow::Break)
})
}
fn resolve_offset_in_format_args(
@ -619,30 +622,37 @@ impl<'db> SemanticsImpl<'db> {
Some(it) => it,
None => return res,
};
let file = self.find_file(node.syntax());
let Some(file_id) = file.file_id.file_id() else {
return res;
};
if first == last {
// node is just the token, so descend the token
self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
if let Some(node) = value
.parent_ancestors()
.take_while(|it| it.text_range() == value.text_range())
.find_map(N::cast)
{
res.push(node)
}
ControlFlow::Continue(())
});
self.descend_into_macros_impl(
InRealFile::new(file_id, first),
&mut |InFile { value, .. }| {
if let Some(node) = value
.parent_ancestors()
.take_while(|it| it.text_range() == value.text_range())
.find_map(N::cast)
{
res.push(node)
}
CONTINUE_NO_BREAKS
},
);
} else {
// Descend first and last token, then zip them to look for the node they belong to
let mut scratch: SmallVec<[_; 1]> = smallvec![];
self.descend_into_macros_impl(first, &mut |token| {
self.descend_into_macros_impl(InRealFile::new(file_id, first), &mut |token| {
scratch.push(token);
ControlFlow::Continue(())
CONTINUE_NO_BREAKS
});
let mut scratch = scratch.into_iter();
self.descend_into_macros_impl(
last,
InRealFile::new(file_id, last),
&mut |InFile { value: last, file_id: last_fid }| {
if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
if first_fid == last_fid {
@ -659,130 +669,151 @@ impl<'db> SemanticsImpl<'db> {
}
}
}
ControlFlow::Continue(())
CONTINUE_NO_BREAKS
},
);
}
res
}
/// Descend the token into its macro call if it is part of one, returning the tokens in the
/// expansion that it is associated with.
pub fn descend_into_macros(
fn is_inside_macro_call(token: &SyntaxToken) -> bool {
token.parent_ancestors().any(|ancestor| {
if ast::MacroCall::can_cast(ancestor.kind()) {
return true;
}
// Check if it is an item (only items can have macro attributes) that has a non-builtin attribute.
let Some(item) = ast::Item::cast(ancestor) else { return false };
item.attrs().any(|attr| {
let Some(meta) = attr.meta() else { return false };
let Some(path) = meta.path() else { return false };
let Some(attr_name) = path.as_single_name_ref() else { return true };
let attr_name = attr_name.text();
let attr_name = attr_name.as_str();
attr_name == "derive" || find_builtin_attr_idx(&Symbol::intern(attr_name)).is_none()
})
})
}
pub fn descend_into_macros_exact_if_in_macro(
&self,
mode: DescendPreference,
token: SyntaxToken,
) -> SmallVec<[SyntaxToken; 1]> {
enum Dp<'t> {
SameText(&'t str),
SameKind(SyntaxKind),
None,
if Self::is_inside_macro_call(&token) {
self.descend_into_macros_exact(token)
} else {
smallvec![token]
}
let fetch_kind = |token: &SyntaxToken| match token.parent() {
Some(node) => match node.kind() {
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
_ => token.kind(),
},
None => token.kind(),
};
let mode = match mode {
DescendPreference::SameText => Dp::SameText(token.text()),
DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
DescendPreference::None => Dp::None,
};
}
pub fn descend_into_macros_cb(
&self,
token: SyntaxToken,
mut cb: impl FnMut(InFile<SyntaxToken>),
) {
if let Ok(token) = self.wrap_token_infile(token).into_real_file() {
self.descend_into_macros_impl(token, &mut |t| {
cb(t);
CONTINUE_NO_BREAKS
});
}
}
pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut res = smallvec![];
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
let is_a_match = match mode {
Dp::SameText(text) => value.text() == text,
Dp::SameKind(preferred_kind) => {
let kind = fetch_kind(&value);
kind == preferred_kind
// special case for derive macros
|| (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
}
Dp::None => true,
};
if is_a_match {
res.push(value);
}
ControlFlow::Continue(())
});
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
self.descend_into_macros_impl(token, &mut |t| {
res.push(t.value);
CONTINUE_NO_BREAKS
});
}
if res.is_empty() {
res.push(token);
}
res
}
pub fn descend_into_macros_single(
pub fn descend_into_macros_breakable<T>(
&self,
mode: DescendPreference,
token: SyntaxToken,
) -> SyntaxToken {
enum Dp<'t> {
SameText(&'t str),
SameKind(SyntaxKind),
None,
}
let fetch_kind = |token: &SyntaxToken| match token.parent() {
Some(node) => match node.kind() {
kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind,
_ => token.kind(),
},
None => token.kind(),
};
let mode = match mode {
DescendPreference::SameText => Dp::SameText(token.text()),
DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)),
DescendPreference::None => Dp::None,
};
let mut res = token.clone();
self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
let is_a_match = match mode {
Dp::SameText(text) => value.text() == text,
Dp::SameKind(preferred_kind) => {
let kind = fetch_kind(&value);
kind == preferred_kind
// special case for derive macros
|| (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF)
}
Dp::None => true,
};
res = value;
if is_a_match {
ControlFlow::Break(())
} else {
ControlFlow::Continue(())
}
});
res
token: InRealFile<SyntaxToken>,
mut cb: impl FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
) -> Option<T> {
self.descend_into_macros_impl(token.clone(), &mut cb)
}
fn descend_into_macros_impl(
/// Descends the token into expansions, returning the tokens that matches the input
/// token's [`SyntaxKind`] and text.
pub fn descend_into_macros_exact(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
let mut r = smallvec![];
let text = token.text();
let kind = token.kind();
self.descend_into_macros_cb(token.clone(), |InFile { value, file_id: _ }| {
let mapped_kind = value.kind();
let any_ident_match = || kind.is_any_identifier() && value.kind().is_any_identifier();
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
if matches {
r.push(value);
}
});
if r.is_empty() {
r.push(token);
}
r
}
/// Descends the token into expansions, returning the first token that matches the input
/// token's [`SyntaxKind`] and text.
pub fn descend_into_macros_single_exact(&self, token: SyntaxToken) -> SyntaxToken {
let text = token.text();
let kind = token.kind();
if let Ok(token) = self.wrap_token_infile(token.clone()).into_real_file() {
self.descend_into_macros_breakable(token.clone(), |InFile { value, file_id: _ }| {
let mapped_kind = value.kind();
let any_ident_match =
|| kind.is_any_identifier() && value.kind().is_any_identifier();
let matches = (kind == mapped_kind || any_ident_match()) && text == value.text();
if matches {
ControlFlow::Break(value)
} else {
ControlFlow::Continue(())
}
})
} else {
None
}
.unwrap_or(token)
}
fn descend_into_macros_impl<T>(
&self,
token: SyntaxToken,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<()>,
) {
InRealFile { value: token, file_id }: InRealFile<SyntaxToken>,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> ControlFlow<T>,
) -> Option<T> {
let _p = tracing::info_span!("descend_into_macros_impl").entered();
let (sa, span, file_id) =
match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
Some(sa) => match sa.file_id.file_id() {
Some(file_id) => (
sa,
self.db.real_span_map(file_id).span_for_range(token.text_range()),
file_id.into(),
),
None => {
stdx::never!();
return;
}
},
None => return,
};
let (sa, span, file_id) = token
.parent()
.and_then(|parent| {
self.analyze_impl(InRealFile::new(file_id, &parent).into(), None, false)
})
.and_then(|sa| {
let file_id = sa.file_id.file_id()?;
Some((
sa,
self.db.real_span_map(file_id).span_for_range(token.text_range()),
HirFileId::from(file_id),
))
})?;
let mut m_cache = self.macro_call_cache.borrow_mut();
let def_map = sa.resolver.def_map();
// A stack of tokens to process, along with the file they came from
// These are tracked to know which macro calls we still have to look into
// the tokens themselves aren't that interesting as the span that is being used to map
// things down never changes.
let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])];
// Process the expansion of a call, pushing all tokens with our span in the expansion back onto our stack
let process_expansion_for_token = |stack: &mut Vec<_>, macro_file| {
let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| {
Some(
@ -809,7 +840,13 @@ impl<'db> SemanticsImpl<'db> {
res
};
while let Some((file_id, mut tokens)) = stack.pop() {
// Filters out all tokens that contain the given range (usually the macro call), any such
// token is redundant as the corresponding macro call has already been processed
let filter_duplicates = |tokens: &mut SmallVec<_>, range: TextRange| {
tokens.retain(|t: &mut SyntaxToken| !range.contains_range(t.text_range()))
};
while let Some((expansion, ref mut tokens)) = stack.pop() {
while let Some(token) = tokens.pop() {
let was_not_remapped = (|| {
// First expand into attribute invocations
@ -817,7 +854,7 @@ impl<'db> SemanticsImpl<'db> {
token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
// Don't force populate the dyn cache for items that don't have an attribute anyways
item.attrs().next()?;
Some((ctx.item_to_macro_call(InFile::new(file_id, &item))?, item))
Some((ctx.item_to_macro_call(InFile::new(expansion, &item))?, item))
})
});
if let Some((call_id, item)) = containing_attribute_macro_call {
@ -849,9 +886,7 @@ impl<'db> SemanticsImpl<'db> {
})
.unwrap_or_else(|| text_range.start());
let text_range = TextRange::new(start, text_range.end());
// remove any other token in this macro input, all their mappings are the
// same as this one
tokens.retain(|t| !text_range.contains_range(t.text_range()));
filter_duplicates(tokens, text_range);
return process_expansion_for_token(&mut stack, file_id);
}
@ -862,6 +897,7 @@ impl<'db> SemanticsImpl<'db> {
.map_while(Either::<ast::TokenTree, ast::Meta>::cast)
.last()?;
match tt {
// function-like macro call
Either::Left(tt) => {
if tt.left_delimiter_token().map_or(false, |it| it == token) {
return None;
@ -870,7 +906,7 @@ impl<'db> SemanticsImpl<'db> {
return None;
}
let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
let mcall = InFile::new(file_id, macro_call);
let mcall = InFile::new(expansion, macro_call);
let file_id = match m_cache.get(&mcall) {
Some(&it) => it,
None => {
@ -888,9 +924,7 @@ impl<'db> SemanticsImpl<'db> {
}
};
let text_range = tt.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this one
tokens.retain(|t| !text_range.contains_range(t.text_range()));
filter_duplicates(tokens, text_range);
process_expansion_for_token(&mut stack, file_id).or(file_id
.eager_arg(self.db.upcast())
@ -899,6 +933,7 @@ impl<'db> SemanticsImpl<'db> {
process_expansion_for_token(&mut stack, arg.as_macro_file())
}))
}
// derive or derive helper
Either::Right(meta) => {
// attribute we failed expansion for earlier, this might be a derive invocation
// or derive helper attribute
@ -910,8 +945,8 @@ impl<'db> SemanticsImpl<'db> {
// so try downmapping the token into the pseudo derive expansion
// see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
ctx.attr_to_derive_macro_call(
InFile::new(file_id, &adt),
InFile::new(file_id, attr.clone()),
InFile::new(expansion, &adt),
InFile::new(expansion, attr.clone()),
)
.map(|(_, call_id, _)| call_id)
});
@ -945,28 +980,29 @@ impl<'db> SemanticsImpl<'db> {
)
}
}?;
if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) {
if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(expansion, &adt))) {
return None;
}
let attr_name =
attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
// Not an attribute, nor a derive, so it's either a builtin or a derive helper
// Not an attribute, nor a derive, so it's either an intert attribute or a derive helper
// Try to resolve to a derive helper and downmap
let id = self.db.ast_id_map(file_id).ast_id(&adt);
let id = self.db.ast_id_map(expansion).ast_id(&adt);
let helpers =
def_map.derive_helpers_in_scope(InFile::new(file_id, id))?;
def_map.derive_helpers_in_scope(InFile::new(expansion, id))?;
if !helpers.is_empty() {
let text_range = attr.syntax().text_range();
// remove any other token in this macro input, all their mappings are the
// same as this
tokens.retain(|t| !text_range.contains_range(t.text_range()));
filter_duplicates(tokens, text_range);
}
let mut res = None;
for (.., derive) in
helpers.iter().filter(|(helper, ..)| *helper == attr_name)
{
// as there may be multiple derives registering the same helper
// name, we gotta make sure to call this for all of them!
// FIXME: We need to call `f` for all of them as well though!
res = res.or(process_expansion_for_token(
&mut stack,
derive.as_macro_file(),
@ -978,11 +1014,14 @@ impl<'db> SemanticsImpl<'db> {
})()
.is_none();
if was_not_remapped && f(InFile::new(file_id, token)).is_break() {
break;
if was_not_remapped {
if let ControlFlow::Break(b) = f(InFile::new(expansion, token)) {
return Some(b);
}
}
}
}
None
}
// Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
@ -995,7 +1034,7 @@ impl<'db> SemanticsImpl<'db> {
offset: TextSize,
) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
node.token_at_offset(offset)
.map(move |token| self.descend_into_macros(DescendPreference::None, token))
.map(move |token| self.descend_into_macros_exact(token))
.map(|descendants| {
descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
})
@ -1179,7 +1218,8 @@ impl<'db> SemanticsImpl<'db> {
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
}
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => {
hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(_, m)) => {
// FIXME: Handle lifetimes here
Adjust::Borrow(AutoBorrow::Ref(mutability(m)))
}
hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
@ -1413,11 +1453,13 @@ impl<'db> SemanticsImpl<'db> {
/// Returns none if the file of the node is not part of a crate.
fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
let node = self.find_file(node);
self.analyze_impl(node, None, true)
}
/// Returns none if the file of the node is not part of a crate.
fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
let node = self.find_file(node);
self.analyze_impl(node, None, false)
}
@ -1426,17 +1468,17 @@ impl<'db> SemanticsImpl<'db> {
node: &SyntaxNode,
offset: TextSize,
) -> Option<SourceAnalyzer> {
let node = self.find_file(node);
self.analyze_impl(node, Some(offset), false)
}
fn analyze_impl(
&self,
node: &SyntaxNode,
node: InFile<&SyntaxNode>,
offset: Option<TextSize>,
infer_body: bool,
) -> Option<SourceAnalyzer> {
let _p = tracing::info_span!("SemanticsImpl::analyze_impl").entered();
let node = self.find_file(node);
let container = self.with_ctx(|ctx| ctx.find_container(node))?;
@ -1481,6 +1523,11 @@ impl<'db> SemanticsImpl<'db> {
InFile::new(file_id, node)
}
fn wrap_token_infile(&self, token: SyntaxToken) -> InFile<SyntaxToken> {
let InFile { file_id, .. } = self.find_file(&token.parent().unwrap());
InFile::new(file_id, token)
}
/// Wraps the node in a [`InFile`] with the file id it belongs to.
fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
let root_node = find_root(node);

View File

@ -9,6 +9,7 @@ use hir_def::{
};
use hir_expand::HirFileId;
use hir_ty::{db::HirDatabase, display::HirDisplay};
use span::Edition;
use syntax::{ast::HasName, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, ToSmolStr};
use crate::{Module, ModuleDef, Semantics};
@ -54,6 +55,7 @@ pub struct SymbolCollector<'a> {
symbols: Vec<FileSymbol>,
work: Vec<SymbolCollectorWork>,
current_container_name: Option<SmolStr>,
edition: Edition,
}
/// Given a [`ModuleId`] and a [`HirDatabase`], use the DefMap for the module's crate to collect
@ -65,10 +67,13 @@ impl<'a> SymbolCollector<'a> {
symbols: Default::default(),
work: Default::default(),
current_container_name: None,
edition: Edition::Edition2015,
}
}
pub fn collect(&mut self, module: Module) {
self.edition = module.krate().edition(self.db);
// The initial work is the root module we're collecting, additional work will
// be populated as we traverse the module's definitions.
self.work.push(SymbolCollectorWork { module_id: module.into(), parent: None });
@ -209,7 +214,8 @@ impl<'a> SymbolCollector<'a> {
fn collect_from_impl(&mut self, impl_id: ImplId) {
let impl_data = self.db.impl_data(impl_id);
let impl_name = Some(SmolStr::new(impl_data.self_ty.display(self.db).to_string()));
let impl_name =
Some(SmolStr::new(impl_data.self_ty.display(self.db, self.edition).to_string()));
self.with_container_name(impl_name, |s| {
for &assoc_item_id in impl_data.items.iter() {
s.push_assoc_item(assoc_item_id)
@ -239,16 +245,16 @@ impl<'a> SymbolCollector<'a> {
fn def_with_body_id_name(&self, body_id: DefWithBodyId) -> Option<SmolStr> {
match body_id {
DefWithBodyId::FunctionId(id) => {
Some(self.db.function_data(id).name.display_no_db().to_smolstr())
Some(self.db.function_data(id).name.display_no_db(self.edition).to_smolstr())
}
DefWithBodyId::StaticId(id) => {
Some(self.db.static_data(id).name.display_no_db().to_smolstr())
Some(self.db.static_data(id).name.display_no_db(self.edition).to_smolstr())
}
DefWithBodyId::ConstId(id) => {
Some(self.db.const_data(id).name.as_ref()?.display_no_db().to_smolstr())
Some(self.db.const_data(id).name.as_ref()?.display_no_db(self.edition).to_smolstr())
}
DefWithBodyId::VariantId(id) => {
Some(self.db.enum_variant_data(id).name.display_no_db().to_smolstr())
Some(self.db.enum_variant_data(id).name.display_no_db(self.edition).to_smolstr())
}
DefWithBodyId::InTypeConstId(_) => Some("in type const".into()),
}

View File

@ -7,6 +7,7 @@ use hir_ty::{
display::{DisplaySourceCodeError, HirDisplay},
};
use itertools::Itertools;
use span::Edition;
use crate::{
Adt, AsAssocItem, AssocItemContainer, Const, ConstParam, Field, Function, Local, ModuleDef,
@ -29,9 +30,10 @@ fn mod_item_path_str(
sema_scope: &SemanticsScope<'_>,
def: &ModuleDef,
cfg: ImportPathConfig,
edition: Edition,
) -> Result<String, DisplaySourceCodeError> {
let path = mod_item_path(sema_scope, def, cfg);
path.map(|it| it.display(sema_scope.db.upcast()).to_string())
path.map(|it| it.display(sema_scope.db.upcast(), edition).to_string())
.ok_or(DisplaySourceCodeError::PathNotFound)
}
@ -97,37 +99,38 @@ impl Expr {
sema_scope: &SemanticsScope<'_>,
many_formatter: &mut dyn FnMut(&Type) -> String,
cfg: ImportPathConfig,
edition: Edition,
) -> Result<String, DisplaySourceCodeError> {
let db = sema_scope.db;
let mod_item_path_str = |s, def| mod_item_path_str(s, def, cfg);
let mod_item_path_str = |s, def| mod_item_path_str(s, def, cfg, edition);
match self {
Expr::Const(it) => match it.as_assoc_item(db).map(|it| it.container(db)) {
Some(container) => {
let container_name = container_name(container, sema_scope, cfg)?;
let container_name = container_name(container, sema_scope, cfg, edition)?;
let const_name = it
.name(db)
.map(|c| c.display(db.upcast()).to_string())
.map(|c| c.display(db.upcast(), edition).to_string())
.unwrap_or(String::new());
Ok(format!("{container_name}::{const_name}"))
}
None => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)),
},
Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)),
Expr::Local(it) => Ok(it.name(db).display(db.upcast()).to_string()),
Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast()).to_string()),
Expr::Local(it) => Ok(it.name(db).display(db.upcast(), edition).to_string()),
Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast(), edition).to_string()),
Expr::FamousType { value, .. } => Ok(value.to_string()),
Expr::Function { func, params, .. } => {
let args = params
.iter()
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg))
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg, edition))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
match func.as_assoc_item(db).map(|it| it.container(db)) {
Some(container) => {
let container_name = container_name(container, sema_scope, cfg)?;
let fn_name = func.name(db).display(db.upcast()).to_string();
let container_name = container_name(container, sema_scope, cfg, edition)?;
let fn_name = func.name(db).display(db.upcast(), edition).to_string();
Ok(format!("{container_name}::{fn_name}({args})"))
}
None => {
@ -141,12 +144,13 @@ impl Expr {
return Ok(many_formatter(&target.ty(db)));
}
let func_name = func.name(db).display(db.upcast()).to_string();
let func_name = func.name(db).display(db.upcast(), edition).to_string();
let self_param = func.self_param(db).unwrap();
let target_str = target.gen_source_code(sema_scope, many_formatter, cfg)?;
let target_str =
target.gen_source_code(sema_scope, many_formatter, cfg, edition)?;
let args = params
.iter()
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg))
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg, edition))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
@ -176,7 +180,7 @@ impl Expr {
StructKind::Tuple => {
let args = params
.iter()
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg))
.map(|f| f.gen_source_code(sema_scope, many_formatter, cfg, edition))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
@ -190,8 +194,8 @@ impl Expr {
.map(|(a, f)| {
let tmp = format!(
"{}: {}",
f.name(db).display(db.upcast()),
a.gen_source_code(sema_scope, many_formatter, cfg)?
f.name(db).display(db.upcast(), edition),
a.gen_source_code(sema_scope, many_formatter, cfg, edition)?
);
Ok(tmp)
})
@ -211,7 +215,7 @@ impl Expr {
StructKind::Tuple => {
let args = params
.iter()
.map(|a| a.gen_source_code(sema_scope, many_formatter, cfg))
.map(|a| a.gen_source_code(sema_scope, many_formatter, cfg, edition))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
@ -225,8 +229,8 @@ impl Expr {
.map(|(a, f)| {
let tmp = format!(
"{}: {}",
f.name(db).display(db.upcast()),
a.gen_source_code(sema_scope, many_formatter, cfg)?
f.name(db).display(db.upcast(), edition),
a.gen_source_code(sema_scope, many_formatter, cfg, edition)?
);
Ok(tmp)
})
@ -244,7 +248,7 @@ impl Expr {
Expr::Tuple { params, .. } => {
let args = params
.iter()
.map(|a| a.gen_source_code(sema_scope, many_formatter, cfg))
.map(|a| a.gen_source_code(sema_scope, many_formatter, cfg, edition))
.collect::<Result<Vec<String>, DisplaySourceCodeError>>()?
.into_iter()
.join(", ");
@ -256,8 +260,8 @@ impl Expr {
return Ok(many_formatter(&expr.ty(db)));
}
let strukt = expr.gen_source_code(sema_scope, many_formatter, cfg)?;
let field = field.name(db).display(db.upcast()).to_string();
let strukt = expr.gen_source_code(sema_scope, many_formatter, cfg, edition)?;
let field = field.name(db).display(db.upcast(), edition).to_string();
Ok(format!("{strukt}.{field}"))
}
Expr::Reference(expr) => {
@ -265,7 +269,7 @@ impl Expr {
return Ok(many_formatter(&expr.ty(db)));
}
let inner = expr.gen_source_code(sema_scope, many_formatter, cfg)?;
let inner = expr.gen_source_code(sema_scope, many_formatter, cfg, edition)?;
Ok(format!("&{inner}"))
}
Expr::Many(ty) => Ok(many_formatter(ty)),
@ -353,17 +357,18 @@ fn container_name(
container: AssocItemContainer,
sema_scope: &SemanticsScope<'_>,
cfg: ImportPathConfig,
edition: Edition,
) -> Result<String, DisplaySourceCodeError> {
let container_name = match container {
crate::AssocItemContainer::Trait(trait_) => {
mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_), cfg)?
mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_), cfg, edition)?
}
crate::AssocItemContainer::Impl(imp) => {
let self_ty = imp.self_ty(sema_scope.db);
// Should it be guaranteed that `mod_item_path` always exists?
match self_ty.as_adt().and_then(|adt| mod_item_path(sema_scope, &adt.into(), cfg)) {
Some(path) => path.display(sema_scope.db.upcast()).to_string(),
None => self_ty.display(sema_scope.db).to_string(),
Some(path) => path.display(sema_scope.db.upcast(), edition).to_string(),
None => self_ty.display(sema_scope.db, edition).to_string(),
}
}
};

View File

@ -1,5 +1,8 @@
use hir::HasSource;
use syntax::ast::{self, make, AstNode};
use syntax::{
ast::{self, make, AstNode},
Edition,
};
use crate::{
assist_context::{AssistContext, Assists},
@ -150,14 +153,22 @@ fn add_missing_impl_members_inner(
&missing_items,
trait_,
&new_impl_def,
target_scope,
&target_scope,
);
if let Some(cap) = ctx.config.snippet_cap {
let mut placeholder = None;
if let DefaultMethods::No = mode {
if let ast::AssocItem::Fn(func) = &first_new_item {
if try_gen_trait_body(ctx, func, trait_ref, &impl_def).is_none() {
if try_gen_trait_body(
ctx,
func,
trait_ref,
&impl_def,
target_scope.krate().edition(ctx.sema.db),
)
.is_none()
{
if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
{
if m.syntax().text() == "todo!()" {
@ -182,9 +193,11 @@ fn try_gen_trait_body(
func: &ast::Fn,
trait_ref: hir::TraitRef,
impl_def: &ast::Impl,
edition: Edition,
) -> Option<()> {
let trait_path =
make::ext::ident_path(&trait_ref.trait_().name(ctx.db()).display(ctx.db()).to_string());
let trait_path = make::ext::ident_path(
&trait_ref.trait_().name(ctx.db()).display(ctx.db(), edition).to_string(),
);
let hir_ty = ctx.sema.resolve_type(&impl_def.self_ty()?)?;
let adt = hir_ty.as_adt()?.source(ctx.db())?;
gen_trait_fn_body(func, &trait_path, &adt.value, Some(trait_ref))

View File

@ -445,7 +445,8 @@ fn build_pat(
) -> Option<ast::Pat> {
match var {
ExtendedVariant::Variant(var) => {
let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?);
let edition = module.krate().edition(db);
let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?, edition);
// FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
Some(match var.source(db)?.value.kind() {
ast::StructKind::Tuple(field_list) => {

View File

@ -8,7 +8,7 @@ use ide_db::{
insert_use::{insert_use, insert_use_as_alias, ImportScope},
},
};
use syntax::{ast, AstNode, NodeOrToken, SyntaxElement};
use syntax::{ast, AstNode, Edition, NodeOrToken, SyntaxElement};
use crate::{AssistContext, AssistId, AssistKind, Assists, GroupLabel};
@ -120,13 +120,14 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
// prioritize more relevant imports
proposed_imports
.sort_by_key(|import| Reverse(relevance_score(ctx, import, current_module.as_ref())));
let edition = current_module.map(|it| it.krate().edition(ctx.db())).unwrap_or(Edition::CURRENT);
let group_label = group_label(import_assets.import_candidate());
for import in proposed_imports {
let import_path = import.import_path;
let (assist_id, import_name) =
(AssistId("auto_import", AssistKind::QuickFix), import_path.display(ctx.db()));
(AssistId("auto_import", AssistKind::QuickFix), import_path.display(ctx.db(), edition));
acc.add_group(
&group_label,
assist_id,
@ -138,7 +139,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
};
insert_use(&scope, mod_path_to_ast(&import_path), &ctx.config.insert_use);
insert_use(&scope, mod_path_to_ast(&import_path, edition), &ctx.config.insert_use);
},
);
@ -165,7 +166,7 @@ pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<
};
insert_use_as_alias(
&scope,
mod_path_to_ast(&import_path),
mod_path_to_ast(&import_path, edition),
&ctx.config.insert_use,
);
},

View File

@ -339,6 +339,7 @@ fn augment_references_with_imports(
let cfg = ctx.config.import_path_config();
let edition = target_module.krate().edition(ctx.db());
references
.into_iter()
.filter_map(|FileReference { range, name, .. }| {
@ -361,7 +362,10 @@ fn augment_references_with_imports(
cfg,
)
.map(|mod_path| {
make::path_concat(mod_path_to_ast(&mod_path), make::path_from_text("Bool"))
make::path_concat(
mod_path_to_ast(&mod_path, edition),
make::path_from_text("Bool"),
)
});
import_scope.zip(path)

View File

@ -159,7 +159,7 @@ pub(crate) fn convert_bool_then_to_if(acc: &mut Assists, ctx: &AssistContext<'_>
};
// Verify this is `bool::then` that is being called.
let func = ctx.sema.resolve_method_call(&mcall)?;
if func.name(ctx.sema.db).display(ctx.db()).to_string() != "then" {
if !func.name(ctx.sema.db).eq_ident("then") {
return None;
}
let assoc = func.as_assoc_item(ctx.sema.db)?;

File diff suppressed because it is too large Load Diff

View File

@ -51,7 +51,10 @@ pub(crate) fn convert_into_to_from(acc: &mut Assists, ctx: &AssistContext<'_>) -
Some(hir::PathResolution::Def(module_def)) => module_def,
_ => return None,
};
mod_path_to_ast(&module.find_path(ctx.db(), src_type_def, cfg)?)
mod_path_to_ast(
&module.find_path(ctx.db(), src_type_def, cfg)?,
module.krate().edition(ctx.db()),
)
};
let dest_type = match &ast_trait {

View File

@ -114,12 +114,16 @@ pub(crate) fn convert_for_loop_with_for_each(
|builder| {
let mut buf = String::new();
if let Some((expr_behind_ref, method)) =
if let Some((expr_behind_ref, method, krate)) =
is_ref_and_impls_iter_method(&ctx.sema, &iterable)
{
// We have either "for x in &col" and col implements a method called iter
// or "for x in &mut col" and col implements a method called iter_mut
format_to!(buf, "{expr_behind_ref}.{}()", method.display(ctx.db()));
format_to!(
buf,
"{expr_behind_ref}.{}()",
method.display(ctx.db(), krate.edition(ctx.db()))
);
} else if let ast::Expr::RangeExpr(..) = iterable {
// range expressions need to be parenthesized for the syntax to be correct
format_to!(buf, "({iterable})");
@ -144,7 +148,7 @@ pub(crate) fn convert_for_loop_with_for_each(
fn is_ref_and_impls_iter_method(
sema: &hir::Semantics<'_, ide_db::RootDatabase>,
iterable: &ast::Expr,
) -> Option<(ast::Expr, hir::Name)> {
) -> Option<(ast::Expr, hir::Name, hir::Crate)> {
let ref_expr = match iterable {
ast::Expr::RefExpr(r) => r,
_ => return None,
@ -172,7 +176,7 @@ fn is_ref_and_impls_iter_method(
return None;
}
Some((expr_behind_ref, wanted_method))
Some((expr_behind_ref, wanted_method, krate))
}
/// Whether iterable implements core::Iterator

View File

@ -2,8 +2,8 @@ use either::Either;
use ide_db::{defs::Definition, search::FileReference};
use itertools::Itertools;
use syntax::{
ast::{self, AstNode, HasGenericParams, HasVisibility},
match_ast, SyntaxKind,
ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility},
match_ast, ted, SyntaxKind,
};
use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
@ -87,9 +87,14 @@ fn edit_struct_def(
) {
// Note that we don't need to consider macro files in this function because this is
// currently not triggered for struct definitions inside macro calls.
let tuple_fields = record_fields
.fields()
.filter_map(|f| Some(ast::make::tuple_field(f.visibility(), f.ty()?)));
let tuple_fields = record_fields.fields().filter_map(|f| {
let field = ast::make::tuple_field(f.visibility(), f.ty()?).clone_for_update();
ted::insert_all(
ted::Position::first_child_of(field.syntax()),
f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(),
);
Some(field)
});
let tuple_fields = ast::make::tuple_field_list(tuple_fields);
let record_fields_text_range = record_fields.syntax().text_range();
@ -975,6 +980,22 @@ impl HasAssoc for Struct {
let Self::Assoc { value } = a;
}
}
"#,
);
}
#[test]
fn fields_with_attrs() {
check_assist(
convert_named_struct_to_tuple_struct,
r#"
pub struct $0Foo {
#[my_custom_attr]
value: u32,
}
"#,
r#"
pub struct Foo(#[my_custom_attr] u32);
"#,
);
}

View File

@ -211,7 +211,7 @@ fn augment_references_with_imports(
)
.map(|mod_path| {
make::path_concat(
mod_path_to_ast(&mod_path),
mod_path_to_ast(&mod_path, target_module.krate().edition(ctx.db())),
make::path_from_text(struct_name),
)
});

View File

@ -1,8 +1,8 @@
use either::Either;
use ide_db::defs::{Definition, NameRefClass};
use syntax::{
ast::{self, AstNode, HasGenericParams, HasVisibility},
match_ast, SyntaxKind, SyntaxNode,
ast::{self, AstNode, HasAttrs, HasGenericParams, HasVisibility},
match_ast, ted, SyntaxKind, SyntaxNode,
};
use crate::{assist_context::SourceChangeBuilder, AssistContext, AssistId, AssistKind, Assists};
@ -83,10 +83,14 @@ fn edit_struct_def(
tuple_fields: ast::TupleFieldList,
names: Vec<ast::Name>,
) {
let record_fields = tuple_fields
.fields()
.zip(names)
.filter_map(|(f, name)| Some(ast::make::record_field(f.visibility(), name, f.ty()?)));
let record_fields = tuple_fields.fields().zip(names).filter_map(|(f, name)| {
let field = ast::make::record_field(f.visibility(), name, f.ty()?).clone_for_update();
ted::insert_all(
ted::Position::first_child_of(field.syntax()),
f.attrs().map(|attr| attr.syntax().clone_subtree().clone_for_update().into()).collect(),
);
Some(field)
});
let record_fields = ast::make::record_field_list(record_fields);
let tuple_fields_text_range = tuple_fields.syntax().text_range();
@ -904,6 +908,19 @@ where
T: Foo,
{ pub field1: T }
"#,
);
}
#[test]
fn fields_with_attrs() {
check_assist(
convert_tuple_struct_to_named_struct,
r#"
pub struct $0Foo(#[my_custom_attr] u32);
"#,
r#"
pub struct Foo { #[my_custom_attr] field1: u32 }
"#,
);
}

View File

@ -1,5 +1,6 @@
use std::iter::once;
use std::iter;
use either::Either;
use ide_db::syntax_helpers::node_ext::is_pattern_cond;
use syntax::{
ast::{
@ -52,18 +53,30 @@ pub(crate) fn convert_while_to_loop(acc: &mut Assists, ctx: &AssistContext<'_>)
|edit| {
let while_indent_level = IndentLevel::from_node(while_expr.syntax());
let break_block =
make::block_expr(once(make::expr_stmt(make::expr_break(None, None)).into()), None)
.indent(while_indent_level);
let break_block = make::block_expr(
iter::once(make::expr_stmt(make::expr_break(None, None)).into()),
None,
)
.indent(while_indent_level);
let block_expr = if is_pattern_cond(while_cond.clone()) {
let if_expr = make::expr_if(while_cond, while_body, Some(break_block.into()));
let stmts = once(make::expr_stmt(if_expr).into());
let stmts = iter::once(make::expr_stmt(if_expr).into());
make::block_expr(stmts, None)
} else {
let if_cond = invert_boolean_expression(while_cond);
let if_expr = make::expr_if(if_cond, break_block, None);
let stmts = once(make::expr_stmt(if_expr).into()).chain(while_body.statements());
make::block_expr(stmts, while_body.tail_expr())
let if_expr = make::expr_if(if_cond, break_block, None).syntax().clone().into();
let elements = while_body.stmt_list().map_or_else(
|| Either::Left(iter::empty()),
|stmts| {
Either::Right(stmts.syntax().children_with_tokens().filter(|node_or_tok| {
// Filter out the trailing expr
!node_or_tok
.as_node()
.is_some_and(|node| ast::Expr::can_cast(node.kind()))
}))
},
);
make::hacky_block_expr(iter::once(if_expr).chain(elements), while_body.tail_expr())
};
let replacement = make::expr_loop(block_expr.indent(while_indent_level));
@ -182,6 +195,74 @@ fn main() {
bar();
}
}
"#,
);
}
#[test]
fn preserve_comments() {
check_assist(
convert_while_to_loop,
r#"
fn main() {
let mut i = 0;
$0while i < 5 {
// comment 1
dbg!(i);
// comment 2
i += 1;
// comment 3
}
}
"#,
r#"
fn main() {
let mut i = 0;
loop {
if i >= 5 {
break;
}
// comment 1
dbg!(i);
// comment 2
i += 1;
// comment 3
}
}
"#,
);
check_assist(
convert_while_to_loop,
r#"
fn main() {
let v = vec![1, 2, 3];
let iter = v.iter();
$0while let Some(i) = iter.next() {
// comment 1
dbg!(i);
// comment 2
}
}
"#,
r#"
fn main() {
let v = vec![1, 2, 3];
let iter = v.iter();
loop {
if let Some(i) = iter.next() {
// comment 1
dbg!(i);
// comment 2
} else {
break;
}
}
}
"#,
);
}

View File

@ -7,7 +7,7 @@ use ide_db::{
FxHashMap, FxHashSet,
};
use itertools::Itertools;
use syntax::{ast, ted, AstNode, SmolStr, SyntaxNode, ToSmolStr};
use syntax::{ast, ted, AstNode, Edition, SmolStr, SyntaxNode, ToSmolStr};
use text_edit::TextRange;
use crate::{
@ -81,6 +81,7 @@ struct StructEditData {
has_private_members: bool,
is_nested: bool,
is_ref: bool,
edition: Edition,
}
fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<StructEditData> {
@ -145,6 +146,7 @@ fn collect_data(ident_pat: ast::IdentPat, ctx: &AssistContext<'_>) -> Option<Str
names_in_scope,
is_nested,
is_ref,
edition: module.krate().edition(ctx.db()),
})
}
@ -180,7 +182,7 @@ fn build_assignment_edit(
) -> AssignmentEdit {
let ident_pat = builder.make_mut(data.ident_pat.clone());
let struct_path = mod_path_to_ast(&data.struct_def_path);
let struct_path = mod_path_to_ast(&data.struct_def_path, data.edition);
let is_ref = ident_pat.ref_token().is_some();
let is_mut = ident_pat.mut_token().is_some();
@ -247,7 +249,7 @@ fn generate_field_names(ctx: &AssistContext<'_>, data: &StructEditData) -> Vec<(
.visible_fields
.iter()
.map(|field| {
let field_name = field.name(ctx.db()).display_no_db().to_smolstr();
let field_name = field.name(ctx.db()).display_no_db(data.edition).to_smolstr();
let new_name = new_field_name(field_name.clone(), &data.names_in_scope);
(field_name, new_name)
})

View File

@ -66,7 +66,9 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let names_to_import = find_names_to_import(ctx, refs_in_target, imported_defs);
let expanded = make::use_tree_list(names_to_import.iter().map(|n| {
let path = make::ext::ident_path(&n.display(ctx.db()).to_string());
let path = make::ext::ident_path(
&n.display(ctx.db(), current_module.krate().edition(ctx.db())).to_string(),
);
make::use_tree(path, None, None, false)
}))
.clone_for_update();

View File

@ -1,11 +1,7 @@
use crate::{utils, AssistContext, Assists};
use hir::DescendPreference;
use ide_db::{
assists::{AssistId, AssistKind},
syntax_helpers::{
format_string::is_format_string,
format_string_exprs::{parse_format_exprs, Arg},
},
syntax_helpers::format_string_exprs::{parse_format_exprs, Arg},
};
use itertools::Itertools;
use syntax::{
@ -40,13 +36,7 @@ pub(crate) fn extract_expressions_from_format_string(
let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
let tt_delimiter = tt.left_delimiter_token()?.kind();
let expanded_t = ast::String::cast(
ctx.sema
.descend_into_macros_single(DescendPreference::SameKind, fmt_string.syntax().clone()),
)?;
if !is_format_string(&expanded_t) {
return None;
}
let _ = ctx.sema.as_format_args_parts(&fmt_string)?;
let (new_fmt, extracted_args) = parse_format_exprs(fmt_string.text()).ok()?;
if extracted_args.is_empty() {

View File

@ -3,8 +3,8 @@ use std::{iter, ops::RangeInclusive};
use ast::make;
use either::Either;
use hir::{
DescendPreference, HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef,
PathResolution, Semantics, TypeInfo, TypeParam,
HasSource, HirDisplay, InFile, Local, LocalSource, ModuleDef, PathResolution, Semantics,
TypeInfo, TypeParam,
};
use ide_db::{
defs::{Definition, NameRefClass},
@ -23,7 +23,7 @@ use syntax::{
self, edit::IndentLevel, edit_in_place::Indent, AstNode, AstToken, HasGenericParams,
HasName,
},
match_ast, ted, SyntaxElement,
match_ast, ted, Edition, SyntaxElement,
SyntaxKind::{self, COMMENT},
SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T,
};
@ -84,7 +84,6 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
};
let body = extraction_target(&node, range)?;
let (container_info, contains_tail_expr) = body.analyze_container(&ctx.sema)?;
let (locals_used, self_param) = body.analyze(&ctx.sema);
@ -92,6 +91,9 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let insert_after = node_to_insert_after(&body, anchor)?;
let semantics_scope = ctx.sema.scope(&insert_after)?;
let module = semantics_scope.module();
let edition = semantics_scope.krate().edition(ctx.db());
let (container_info, contains_tail_expr) = body.analyze_container(&ctx.sema, edition)?;
let ret_ty = body.return_ty(ctx)?;
let control_flow = body.external_control_flow(ctx, &container_info)?;
@ -217,7 +219,11 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
);
if let Some(mod_path) = mod_path {
insert_use(&scope, mod_path_to_ast(&mod_path), &ctx.config.insert_use);
insert_use(
&scope,
mod_path_to_ast(&mod_path, edition),
&ctx.config.insert_use,
);
}
}
}
@ -238,7 +244,13 @@ pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef {
let mut names_in_scope = vec![];
semantics_scope.process_all_names(&mut |name, _| {
names_in_scope.push(name.display(semantics_scope.db.upcast()).to_string())
names_in_scope.push(
name.display(
semantics_scope.db.upcast(),
semantics_scope.krate().edition(semantics_scope.db),
)
.to_string(),
)
});
let default_name = "fun_name";
@ -366,6 +378,7 @@ struct ContainerInfo {
ret_type: Option<hir::Type>,
generic_param_lists: Vec<ast::GenericParamList>,
where_clauses: Vec<ast::WhereClause>,
edition: Edition,
}
/// Control flow that is exported from extracted function
@ -489,8 +502,8 @@ impl Param {
}
}
fn to_arg(&self, ctx: &AssistContext<'_>) -> ast::Expr {
let var = path_expr_from_local(ctx, self.var);
fn to_arg(&self, ctx: &AssistContext<'_>, edition: Edition) -> ast::Expr {
let var = path_expr_from_local(ctx, self.var, edition);
match self.kind() {
ParamKind::Value | ParamKind::MutValue => var,
ParamKind::SharedRef => make::expr_ref(var, false),
@ -498,8 +511,13 @@ impl Param {
}
}
fn to_param(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Param {
let var = self.var.name(ctx.db()).display(ctx.db()).to_string();
fn to_param(
&self,
ctx: &AssistContext<'_>,
module: hir::Module,
edition: Edition,
) -> ast::Param {
let var = self.var.name(ctx.db()).display(ctx.db(), edition).to_string();
let var_name = make::name(&var);
let pat = match self.kind() {
ParamKind::MutValue => make::ident_pat(false, true, var_name),
@ -520,7 +538,7 @@ impl Param {
}
impl TryKind {
fn of_ty(ty: hir::Type, ctx: &AssistContext<'_>) -> Option<TryKind> {
fn of_ty(ty: hir::Type, ctx: &AssistContext<'_>, edition: Edition) -> Option<TryKind> {
if ty.is_unknown() {
// We favour Result for `expr?`
return Some(TryKind::Result { ty });
@ -529,7 +547,7 @@ impl TryKind {
let name = adt.name(ctx.db());
// FIXME: use lang items to determine if it is std type or user defined
// E.g. if user happens to define type named `Option`, we would have false positive
let name = &name.display(ctx.db()).to_string();
let name = &name.display(ctx.db(), edition).to_string();
match name.as_str() {
"Option" => Some(TryKind::Option),
"Result" => Some(TryKind::Result { ty }),
@ -816,7 +834,7 @@ impl FunctionBody {
.descendants_with_tokens()
.filter_map(SyntaxElement::into_token)
.filter(|it| matches!(it.kind(), SyntaxKind::IDENT | T![self]))
.flat_map(|t| sema.descend_into_macros(DescendPreference::None, t))
.flat_map(|t| sema.descend_into_macros_exact(t))
.for_each(|t| add_name_if_local(t.parent().and_then(ast::NameRef::cast)));
}
}
@ -828,6 +846,7 @@ impl FunctionBody {
fn analyze_container(
&self,
sema: &Semantics<'_, RootDatabase>,
edition: Edition,
) -> Option<(ContainerInfo, bool)> {
let mut ancestors = self.parent()?.ancestors();
let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted);
@ -927,6 +946,7 @@ impl FunctionBody {
ret_type: ty,
generic_param_lists,
where_clauses,
edition,
},
contains_tail_expr,
))
@ -1015,7 +1035,7 @@ impl FunctionBody {
let kind = match (try_expr, ret_expr, break_expr, continue_expr) {
(Some(_), _, None, None) => {
let ret_ty = container_info.ret_type.clone()?;
let kind = TryKind::of_ty(ret_ty, ctx)?;
let kind = TryKind::of_ty(ret_ty, ctx, container_info.edition)?;
Some(FlowKind::Try { kind })
}
@ -1397,7 +1417,7 @@ fn fixup_call_site(builder: &mut SourceChangeBuilder, body: &FunctionBody) {
fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> SyntaxNode {
let ret_ty = fun.return_type(ctx);
let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx)));
let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx, fun.mods.edition)));
let name = fun.name.clone();
let mut call_expr = if fun.self_param.is_some() {
let self_arg = make::expr_path(make::ext::ident_path("self"));
@ -1420,13 +1440,13 @@ fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> Sy
[] => None,
[var] => {
let name = var.local.name(ctx.db());
let name = make::name(&name.display(ctx.db()).to_string());
let name = make::name(&name.display(ctx.db(), fun.mods.edition).to_string());
Some(ast::Pat::IdentPat(make::ident_pat(false, var.mut_usage_outside_body, name)))
}
vars => {
let binding_pats = vars.iter().map(|var| {
let name = var.local.name(ctx.db());
let name = make::name(&name.display(ctx.db()).to_string());
let name = make::name(&name.display(ctx.db(), fun.mods.edition).to_string());
make::ident_pat(false, var.mut_usage_outside_body, name).into()
});
Some(ast::Pat::TuplePat(make::tuple_pat(binding_pats)))
@ -1569,8 +1589,8 @@ impl FlowHandler {
}
}
fn path_expr_from_local(ctx: &AssistContext<'_>, var: Local) -> ast::Expr {
let name = var.name(ctx.db()).display(ctx.db()).to_string();
fn path_expr_from_local(ctx: &AssistContext<'_>, var: Local, edition: Edition) -> ast::Expr {
let name = var.name(ctx.db()).display(ctx.db(), edition).to_string();
make::expr_path(make::ext::ident_path(&name))
}
@ -1581,7 +1601,7 @@ fn format_function(
old_indent: IndentLevel,
) -> ast::Fn {
let fun_name = make::name(&fun.name.text());
let params = fun.make_param_list(ctx, module);
let params = fun.make_param_list(ctx, module, fun.mods.edition);
let ret_ty = fun.make_ret_ty(ctx, module);
let body = make_body(ctx, old_indent, fun);
let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun);
@ -1597,6 +1617,7 @@ fn format_function(
fun.control_flow.is_async,
fun.mods.is_const,
fun.control_flow.is_unsafe,
false,
)
}
@ -1707,9 +1728,14 @@ impl Function {
type_params_in_descendant_paths.chain(type_params_in_params).collect()
}
fn make_param_list(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::ParamList {
fn make_param_list(
&self,
ctx: &AssistContext<'_>,
module: hir::Module,
edition: Edition,
) -> ast::ParamList {
let self_param = self.self_param.clone();
let params = self.params.iter().map(|param| param.to_param(ctx, module));
let params = self.params.iter().map(|param| param.to_param(ctx, module, edition));
make::param_list(self_param, params)
}
@ -1842,10 +1868,12 @@ fn make_body(ctx: &AssistContext<'_>, old_indent: IndentLevel, fun: &Function) -
None => match fun.outliving_locals.as_slice() {
[] => {}
[var] => {
tail_expr = Some(path_expr_from_local(ctx, var.local));
tail_expr = Some(path_expr_from_local(ctx, var.local, fun.mods.edition));
}
vars => {
let exprs = vars.iter().map(|var| path_expr_from_local(ctx, var.local));
let exprs = vars
.iter()
.map(|var| path_expr_from_local(ctx, var.local, fun.mods.edition));
let expr = make::expr_tuple(exprs);
tail_expr = Some(expr);
}
@ -5622,7 +5650,7 @@ fn func<T: Debug>(i: Struct<'_, T>) {
fun_name(i);
}
fn $0fun_name(i: Struct<T>) {
fn $0fun_name(i: Struct<'_, T>) {
foo(i);
}
"#,

View File

@ -1,7 +1,7 @@
use std::iter;
use either::Either;
use hir::{Module, ModuleDef, Name, Variant};
use hir::{HasCrate, Module, ModuleDef, Name, Variant};
use ide_db::{
defs::Definition,
helpers::mod_path_to_ast,
@ -16,7 +16,7 @@ use syntax::{
self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasAttrs, HasGenericParams,
HasName, HasVisibility,
},
match_ast, ted, SyntaxElement,
match_ast, ted, Edition, SyntaxElement,
SyntaxKind::*,
SyntaxNode, T,
};
@ -58,6 +58,7 @@ pub(crate) fn extract_struct_from_enum_variant(
"Extract struct from enum variant",
target,
|builder| {
let edition = enum_hir.krate(ctx.db()).edition(ctx.db());
let variant_hir_name = variant_hir.name(ctx.db());
let enum_module_def = ModuleDef::from(enum_hir);
let usages = Definition::Variant(variant_hir).usages(&ctx.sema).all();
@ -82,7 +83,7 @@ pub(crate) fn extract_struct_from_enum_variant(
references,
);
processed.into_iter().for_each(|(path, node, import)| {
apply_references(ctx.config.insert_use, path, node, import)
apply_references(ctx.config.insert_use, path, node, import, edition)
});
}
builder.edit_file(ctx.file_id());
@ -98,7 +99,7 @@ pub(crate) fn extract_struct_from_enum_variant(
references,
);
processed.into_iter().for_each(|(path, node, import)| {
apply_references(ctx.config.insert_use, path, node, import)
apply_references(ctx.config.insert_use, path, node, import, edition)
});
}
@ -169,7 +170,7 @@ fn existing_definition(db: &RootDatabase, variant_name: &ast::Name, variant: &Va
),
_ => false,
})
.any(|(name, _)| name.display(db).to_string() == variant_name.to_string())
.any(|(name, _)| name.eq_ident(variant_name.text().as_str()))
}
fn extract_generic_params(
@ -359,9 +360,10 @@ fn apply_references(
segment: ast::PathSegment,
node: SyntaxNode,
import: Option<(ImportScope, hir::ModPath)>,
edition: Edition,
) {
if let Some((scope, path)) = import {
insert_use(&scope, mod_path_to_ast(&path), &insert_use_cfg);
insert_use(&scope, mod_path_to_ast(&path, edition), &insert_use_cfg);
}
// deep clone to prevent cycle
let path = make::path_from_segments(iter::once(segment.clone_subtree()), false);

View File

@ -20,7 +20,7 @@ use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
// ->
// ```
// fn main() {
// let $0var_name = (1 + 2);
// let $0var_name = 1 + 2;
// var_name * 4;
// }
// ```
@ -58,9 +58,30 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
}
let parent = to_extract.syntax().parent().and_then(ast::Expr::cast);
let needs_adjust = parent
.as_ref()
.map_or(false, |it| matches!(it, ast::Expr::FieldExpr(_) | ast::Expr::MethodCallExpr(_)));
// Any expression that autoderefs may need adjustment.
let mut needs_adjust = parent.as_ref().map_or(false, |it| match it {
ast::Expr::FieldExpr(_)
| ast::Expr::MethodCallExpr(_)
| ast::Expr::CallExpr(_)
| ast::Expr::AwaitExpr(_) => true,
ast::Expr::IndexExpr(index) if index.base().as_ref() == Some(&to_extract) => true,
_ => false,
});
let mut to_extract_no_ref = peel_parens(to_extract.clone());
let needs_ref = needs_adjust
&& match &to_extract_no_ref {
ast::Expr::FieldExpr(_)
| ast::Expr::IndexExpr(_)
| ast::Expr::MacroExpr(_)
| ast::Expr::ParenExpr(_)
| ast::Expr::PathExpr(_) => true,
ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(ast::UnaryOp::Deref) => {
to_extract_no_ref = prefix.expr()?;
needs_adjust = false;
false
}
_ => false,
};
let anchor = Anchor::from(&to_extract)?;
let target = to_extract.syntax().text_range();
@ -87,22 +108,28 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
Some(ast::Expr::RefExpr(expr)) if expr.mut_token().is_some() => {
make::ident_pat(false, true, make::name(&var_name))
}
_ if needs_adjust
&& !needs_ref
&& ty.as_ref().is_some_and(|ty| ty.is_mutable_reference()) =>
{
make::ident_pat(false, true, make::name(&var_name))
}
_ => make::ident_pat(false, false, make::name(&var_name)),
};
let to_extract = match ty.as_ref().filter(|_| needs_adjust) {
let to_extract_no_ref = match ty.as_ref().filter(|_| needs_ref) {
Some(receiver_type) if receiver_type.is_mutable_reference() => {
make::expr_ref(to_extract, true)
make::expr_ref(to_extract_no_ref, true)
}
Some(receiver_type) if receiver_type.is_reference() => {
make::expr_ref(to_extract, false)
make::expr_ref(to_extract_no_ref, false)
}
_ => to_extract,
_ => to_extract_no_ref,
};
let expr_replace = edit.make_syntax_mut(expr_replace);
let let_stmt =
make::let_stmt(ident_pat.into(), None, Some(to_extract)).clone_for_update();
make::let_stmt(ident_pat.into(), None, Some(to_extract_no_ref)).clone_for_update();
let name_expr = make::expr_path(make::ext::ident_path(&var_name)).clone_for_update();
match anchor {
@ -202,6 +229,14 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
)
}
fn peel_parens(mut expr: ast::Expr) -> ast::Expr {
while let ast::Expr::ParenExpr(parens) = &expr {
let Some(expr_inside) = parens.expr() else { break };
expr = expr_inside;
}
expr
}
/// Check whether the node is a valid expression which can be extracted to a variable.
/// In general that's true for any expression, but in some cases that would produce invalid code.
fn valid_target_expr(node: SyntaxNode) -> Option<ast::Expr> {
@ -1220,6 +1255,45 @@ fn foo(s: &S) {
);
}
#[test]
fn test_extract_var_index_deref() {
check_assist(
extract_variable,
r#"
//- minicore: index
struct X;
impl std::ops::Index<usize> for X {
type Output = i32;
fn index(&self) -> &Self::Output { 0 }
}
struct S {
sub: X
}
fn foo(s: &S) {
$0s.sub$0[0];
}"#,
r#"
struct X;
impl std::ops::Index<usize> for X {
type Output = i32;
fn index(&self) -> &Self::Output { 0 }
}
struct S {
sub: X
}
fn foo(s: &S) {
let $0sub = &s.sub;
sub[0];
}"#,
);
}
#[test]
fn test_extract_var_reference_parameter_deep_nesting() {
check_assist(
@ -1461,4 +1535,60 @@ fn foo() {
}"#,
);
}
#[test]
fn generates_no_ref_on_calls() {
check_assist(
extract_variable,
r#"
struct S;
impl S {
fn do_work(&mut self) {}
}
fn bar() -> S { S }
fn foo() {
$0bar()$0.do_work();
}"#,
r#"
struct S;
impl S {
fn do_work(&mut self) {}
}
fn bar() -> S { S }
fn foo() {
let mut $0bar = bar();
bar.do_work();
}"#,
);
}
#[test]
fn generates_no_ref_for_deref() {
check_assist(
extract_variable,
r#"
struct S;
impl S {
fn do_work(&mut self) {}
}
fn bar() -> S { S }
fn foo() {
let v = &mut &mut bar();
$0(**v)$0.do_work();
}
"#,
r#"
struct S;
impl S {
fn do_work(&mut self) {}
}
fn bar() -> S { S }
fn foo() {
let v = &mut &mut bar();
let $0s = *v;
s.do_work();
}
"#,
);
}
}

View File

@ -45,8 +45,9 @@ pub(crate) fn fill_record_pattern_fields(acc: &mut Assists, ctx: &AssistContext<
let new_field_list =
make::record_pat_field_list(old_field_list.fields(), None).clone_for_update();
for (f, _) in missing_fields.iter() {
let edition = ctx.sema.scope(record_pat.syntax())?.krate().edition(ctx.db());
let field = make::record_pat_field_shorthand(make::name_ref(
&f.name(ctx.sema.db).display_no_db().to_smolstr(),
&f.name(ctx.sema.db).display_no_db(edition).to_smolstr(),
));
new_field_list.add_field(field.clone_for_update());
}

View File

@ -4,7 +4,7 @@ use hir::{
use ide_db::FileId;
use syntax::{
ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},
AstNode, TextRange, ToSmolStr,
AstNode, TextRange,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -48,7 +48,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
let (_, def) = module
.scope(ctx.db(), None)
.into_iter()
.find(|(name, _)| name.display_no_db().to_smolstr() == name_ref.text().as_str())?;
.find(|(name, _)| name.eq_ident(name_ref.text().as_str()))?;
let ScopeDef::ModuleDef(def) = def else {
return None;
};
@ -71,7 +71,10 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>)
let assist_label = match target_name {
None => format!("Change visibility to {missing_visibility}"),
Some(name) => {
format!("Change visibility of {} to {missing_visibility}", name.display(ctx.db()))
format!(
"Change visibility of {} to {missing_visibility}",
name.display(ctx.db(), current_module.krate().edition(ctx.db()))
)
}
};
@ -92,6 +95,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
let (record_field_def, _, _) = ctx.sema.resolve_record_field(&record_field)?;
let current_module = ctx.sema.scope(record_field.syntax())?.module();
let current_edition = current_module.krate().edition(ctx.db());
let visibility = record_field_def.visibility(ctx.db());
if visibility.is_visible_from(ctx.db(), current_module.into()) {
return None;
@ -123,8 +127,8 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>
let target_name = record_field_def.name(ctx.db());
let assist_label = format!(
"Change visibility of {}.{} to {missing_visibility}",
parent_name.display(ctx.db()),
target_name.display(ctx.db())
parent_name.display(ctx.db(), current_edition),
target_name.display(ctx.db(), current_edition)
);
acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |edit| {

View File

@ -51,6 +51,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
let strukt = ctx.find_node_at_offset::<ast::Struct>()?;
let strukt_name = strukt.name()?;
let current_module = ctx.sema.scope(strukt.syntax())?.module();
let current_edition = current_module.krate().edition(ctx.db());
let (field_name, field_ty, target) = match ctx.find_node_at_offset::<ast::RecordField>() {
Some(field) => {
@ -89,7 +90,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
methods.sort_by(|(a, _), (b, _)| a.cmp(b));
for (name, method) in methods {
let adt = ast::Adt::Struct(strukt.clone());
let name = name.display(ctx.db()).to_string();
let name = name.display(ctx.db(), current_edition).to_string();
// if `find_struct_impl` returns None, that means that a function named `name` already exists.
let Some(impl_def) = find_struct_impl(ctx, &adt, std::slice::from_ref(&name)) else {
continue;
@ -121,6 +122,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
let is_async = method_source.async_token().is_some();
let is_const = method_source.const_token().is_some();
let is_unsafe = method_source.unsafe_token().is_some();
let is_gen = method_source.gen_token().is_some();
let fn_name = make::name(&name);
@ -153,6 +155,7 @@ pub(crate) fn generate_delegate_methods(acc: &mut Assists, ctx: &AssistContext<'
is_async,
is_const,
is_unsafe,
is_gen,
)
.clone_for_update();

View File

@ -22,7 +22,7 @@ use syntax::{
WherePred,
},
ted::{self, Position},
AstNode, NodeOrToken, SmolStr, SyntaxKind, ToSmolStr,
AstNode, Edition, NodeOrToken, SmolStr, SyntaxKind, ToSmolStr,
};
// Assist: generate_delegate_trait
@ -109,6 +109,7 @@ struct Field {
ty: ast::Type,
range: syntax::TextRange,
impls: Vec<Delegee>,
edition: Edition,
}
impl Field {
@ -119,6 +120,7 @@ impl Field {
let db = ctx.sema.db;
let module = ctx.sema.file_to_module_def(ctx.file_id())?;
let edition = module.krate().edition(ctx.db());
let (name, range, ty) = match f {
Either::Left(f) => {
@ -147,7 +149,7 @@ impl Field {
}
}
Some(Field { name, ty, range, impls })
Some(Field { name, ty, range, impls, edition })
}
}
@ -163,18 +165,18 @@ enum Delegee {
}
impl Delegee {
fn signature(&self, db: &dyn HirDatabase) -> String {
fn signature(&self, db: &dyn HirDatabase, edition: Edition) -> String {
let mut s = String::new();
let (Delegee::Bound(it) | Delegee::Impls(it, _)) = self;
for m in it.module(db).path_to_root(db).iter().rev() {
if let Some(name) = m.name(db) {
s.push_str(&format!("{}::", name.display_no_db().to_smolstr()));
s.push_str(&format!("{}::", name.display_no_db(edition).to_smolstr()));
}
}
s.push_str(&it.name(db).display_no_db().to_smolstr());
s.push_str(&it.name(db).display_no_db(edition).to_smolstr());
s
}
}
@ -212,9 +214,11 @@ impl Struct {
// if self.hir_ty.impls_trait(db, trait_, &[]) {
// continue;
// }
let signature = delegee.signature(db);
let signature = delegee.signature(db, field.edition);
let Some(delegate) = generate_impl(ctx, self, &field.ty, &field.name, delegee) else {
let Some(delegate) =
generate_impl(ctx, self, &field.ty, &field.name, delegee, field.edition)
else {
continue;
};
@ -240,6 +244,7 @@ fn generate_impl(
field_ty: &ast::Type,
field_name: &str,
delegee: &Delegee,
edition: Edition,
) -> Option<ast::Impl> {
let delegate: ast::Impl;
let db = ctx.db();
@ -259,7 +264,7 @@ fn generate_impl(
strukt_params.clone(),
strukt_params.map(|params| params.to_generic_args()),
delegee.is_auto(db),
make::ty(&delegee.name(db).display_no_db().to_smolstr()),
make::ty(&delegee.name(db).display_no_db(edition).to_smolstr()),
strukt_ty,
bound_def.where_clause(),
ast_strukt.where_clause(),
@ -350,7 +355,7 @@ fn generate_impl(
let type_gen_args = strukt_params.clone().map(|params| params.to_generic_args());
let path_type =
make::ty(&trait_.name(db).display_no_db().to_smolstr()).clone_for_update();
make::ty(&trait_.name(db).display_no_db(edition).to_smolstr()).clone_for_update();
transform_impl(ctx, ast_strukt, &old_impl, &transform_args, path_type.syntax())?;
// 3) Generate delegate trait impl
@ -735,6 +740,7 @@ fn func_assoc_item(
item.async_token().is_some(),
item.const_token().is_some(),
item.unsafe_token().is_some(),
item.gen_token().is_some(),
)
.clone_for_update();

View File

@ -4,7 +4,7 @@ use hir::{ModPath, ModuleDef};
use ide_db::{famous_defs::FamousDefs, RootDatabase};
use syntax::{
ast::{self, HasName},
AstNode, SyntaxNode,
AstNode, Edition, SyntaxNode,
};
use crate::{
@ -77,6 +77,7 @@ fn generate_record_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
field_name.syntax(),
deref_type_to_generate,
trait_path,
module.krate().edition(ctx.db()),
)
},
)
@ -117,6 +118,7 @@ fn generate_tuple_deref(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()
field_list_index,
deref_type_to_generate,
trait_path,
module.krate().edition(ctx.db()),
)
},
)
@ -130,6 +132,7 @@ fn generate_edit(
field_name: impl Display,
deref_type: DerefType,
trait_path: ModPath,
edition: Edition,
) {
let start_offset = strukt.syntax().text_range().end();
let impl_code = match deref_type {
@ -147,8 +150,11 @@ fn generate_edit(
),
};
let strukt_adt = ast::Adt::Struct(strukt);
let deref_impl =
generate_trait_impl_text(&strukt_adt, &trait_path.display(db).to_string(), &impl_code);
let deref_impl = generate_trait_impl_text(
&strukt_adt,
&trait_path.display(db, edition).to_string(),
&impl_code,
);
edit.insert(start_offset, deref_impl);
}

View File

@ -5,7 +5,7 @@ use stdx::{format_to, to_lower_snake_case};
use syntax::{
algo::skip_whitespace_token,
ast::{self, edit::IndentLevel, HasDocComments, HasGenericArgs, HasName},
match_ast, AstNode, AstToken,
match_ast, AstNode, AstToken, Edition,
};
use crate::assist_context::{AssistContext, Assists};
@ -139,7 +139,8 @@ fn make_example_for_fn(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<St
let mut example = String::new();
let use_path = build_path(ast_func, ctx)?;
let edition = ctx.sema.scope(ast_func.syntax())?.krate().edition(ctx.db());
let use_path = build_path(ast_func, ctx, edition)?;
let is_unsafe = ast_func.unsafe_token().is_some();
let param_list = ast_func.param_list()?;
let ref_mut_params = ref_mut_params(&param_list);
@ -472,13 +473,13 @@ fn string_vec_from(string_array: &[&str]) -> Vec<String> {
}
/// Helper function to build the path of the module in the which is the node
fn build_path(ast_func: &ast::Fn, ctx: &AssistContext<'_>) -> Option<String> {
fn build_path(ast_func: &ast::Fn, ctx: &AssistContext<'_>, edition: Edition) -> Option<String> {
let crate_name = crate_name(ast_func, ctx)?;
let leaf = self_partial_type(ast_func)
.or_else(|| ast_func.name().map(|n| n.to_string()))
.unwrap_or_else(|| "*".into());
let module_def: ModuleDef = ctx.sema.to_def(ast_func)?.module(ctx.db()).into();
match module_def.canonical_path(ctx.db()) {
match module_def.canonical_path(ctx.db(), edition) {
Some(path) => Some(format!("{crate_name}::{path}::{leaf}")),
None => Some(format!("{crate_name}::{leaf}")),
}

View File

@ -17,7 +17,7 @@ use syntax::{
self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, BlockExpr, CallExpr,
HasArgList, HasGenericParams, HasModuleItem, HasTypeBounds,
},
ted, SyntaxKind, SyntaxNode, TextRange, T,
ted, Edition, SyntaxKind, SyntaxNode, TextRange, T,
};
use crate::{
@ -175,6 +175,7 @@ fn add_func_to_accumulator(
edit.edit_file(file);
let target = function_builder.target.clone();
let edition = function_builder.target_edition;
let func = function_builder.render(ctx.config.snippet_cap, edit);
if let Some(adt) =
@ -183,7 +184,7 @@ fn add_func_to_accumulator(
{
let name = make::ty_path(make::ext::ident_path(&format!(
"{}",
adt.name(ctx.db()).display(ctx.db())
adt.name(ctx.db()).display(ctx.db(), edition)
)));
// FIXME: adt may have generic params.
@ -222,6 +223,7 @@ struct FunctionBuilder {
should_focus_return_type: bool,
visibility: Visibility,
is_async: bool,
target_edition: Edition,
}
impl FunctionBuilder {
@ -237,6 +239,7 @@ impl FunctionBuilder {
) -> Option<Self> {
let target_module =
target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?;
let target_edition = target_module.krate().edition(ctx.db());
let current_module = ctx.sema.scope(call.syntax())?.module();
let visibility = calculate_necessary_visibility(current_module, target_module, ctx);
@ -258,7 +261,9 @@ impl FunctionBuilder {
// If generated function has the name "new" and is an associated function, we generate fn body
// as a constructor and assume a "Self" return type.
if let Some(body) = make_fn_body_as_new_function(ctx, &fn_name.text(), adt_info) {
if let Some(body) =
make_fn_body_as_new_function(ctx, &fn_name.text(), adt_info, target_edition)
{
ret_type = Some(make::ret_type(make::ty_path(make::ext::ident_path("Self"))));
should_focus_return_type = false;
fn_body = body;
@ -288,6 +293,7 @@ impl FunctionBuilder {
should_focus_return_type,
visibility,
is_async,
target_edition,
})
}
@ -299,6 +305,8 @@ impl FunctionBuilder {
target_module: Module,
target: GeneratedFunctionTarget,
) -> Option<Self> {
let target_edition = target_module.krate().edition(ctx.db());
let current_module = ctx.sema.scope(call.syntax())?.module();
let visibility = calculate_necessary_visibility(current_module, target_module, ctx);
@ -336,6 +344,7 @@ impl FunctionBuilder {
should_focus_return_type,
visibility,
is_async,
target_edition,
})
}
@ -356,6 +365,7 @@ impl FunctionBuilder {
self.is_async,
false, // FIXME : const and unsafe are not handled yet.
false,
false,
)
.clone_for_update();
@ -425,6 +435,7 @@ fn make_fn_body_as_new_function(
ctx: &AssistContext<'_>,
fn_name: &str,
adt_info: &Option<AdtInfo>,
edition: Edition,
) -> Option<ast::BlockExpr> {
if fn_name != "new" {
return None;
@ -441,7 +452,10 @@ fn make_fn_body_as_new_function(
.iter()
.map(|field| {
make::record_expr_field(
make::name_ref(&format!("{}", field.name(ctx.db()).display(ctx.db()))),
make::name_ref(&format!(
"{}",
field.name(ctx.db()).display(ctx.db(), edition)
)),
Some(placeholder_expr.clone()),
)
})
@ -1102,8 +1116,9 @@ fn fn_arg_type(
if ty.is_reference() || ty.is_mutable_reference() {
let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax())?.krate());
let target_edition = target_module.krate().edition(ctx.db());
convert_reference_type(ty.strip_references(), ctx.db(), famous_defs)
.map(|conversion| conversion.convert_type(ctx.db()).to_string())
.map(|conversion| conversion.convert_type(ctx.db(), target_edition).to_string())
.or_else(|| ty.display_source_code(ctx.db(), target_module.into(), true).ok())
} else {
ty.display_source_code(ctx.db(), target_module.into(), true).ok()

View File

@ -233,7 +233,7 @@ fn generate_getter_from_info(
.map(|conversion| {
cov_mark::hit!(convert_reference_type);
(
conversion.convert_type(ctx.db()),
conversion.convert_type(ctx.db(), krate.edition(ctx.db())),
conversion.getter(record_field_info.field_name.to_string()),
)
})
@ -261,7 +261,19 @@ fn generate_getter_from_info(
let ret_type = Some(make::ret_type(ty));
let body = make::block_expr([], Some(body));
make::fn_(strukt.visibility(), fn_name, None, None, params, body, ret_type, false, false, false)
make::fn_(
strukt.visibility(),
fn_name,
None,
None,
params,
body,
ret_type,
false,
false,
false,
false,
)
}
fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldInfo) -> ast::Fn {
@ -285,7 +297,19 @@ fn generate_setter_from_info(info: &AssistInfo, record_field_info: &RecordFieldI
let body = make::block_expr([assign_stmt.into()], None);
// Make the setter fn
make::fn_(strukt.visibility(), fn_name, None, None, params, body, None, false, false, false)
make::fn_(
strukt.visibility(),
fn_name,
None,
None,
params,
body,
None,
false,
false,
false,
false,
)
}
fn extract_and_parse(

View File

@ -64,10 +64,13 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
ctx.config.import_path_config(),
)?;
let edition = current_module.krate().edition(ctx.db());
let expr = use_trivial_constructor(
ctx.sema.db,
ide_db::helpers::mod_path_to_ast(&type_path),
ide_db::helpers::mod_path_to_ast(&type_path, edition),
&ty,
edition,
)?;
Some(make::record_expr_field(make::name_ref(&name.text()), Some(expr)))
@ -112,6 +115,7 @@ pub(crate) fn generate_new(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
false,
false,
false,
false,
)
.clone_for_update();
fn_.indent(1.into());

Some files were not shown because too many files have changed in this diff Show More