Auto merge of #130812 - lnicola:sync-from-ra, r=lnicola

Subtree update of `rust-analyzer`

r? `@ghost`
This commit is contained in:
bors 2024-09-25 14:09:47 +00:00
commit b5117538e9
247 changed files with 14379 additions and 22640 deletions

View File

@ -15,6 +15,7 @@ extend-ignore-re = [
'"flate2"',
"raison d'être",
"inout",
"INOUT",
"optin"
]

View File

@ -96,6 +96,15 @@ version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de"
[[package]]
name = "borsh"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6362ed55def622cddc70a4746a68554d7b687713770de539e59a739b249f8ed"
dependencies = [
"cfg_aliases 0.2.1",
]
[[package]]
name = "byteorder"
version = "1.5.0"
@ -167,6 +176,12 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e"
[[package]]
name = "cfg_aliases"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chalk-derive"
version = "0.98.0"
@ -982,7 +997,7 @@ checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24"
[[package]]
name = "lsp-server"
version = "0.7.6"
version = "0.7.7"
dependencies = [
"crossbeam-channel",
"ctrlc",
@ -994,9 +1009,9 @@ dependencies = [
[[package]]
name = "lsp-server"
version = "0.7.6"
version = "0.7.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "248f65b78f6db5d8e1b1604b4098a28b43d21a8eb1deeca22b1c421b276c7095"
checksum = "550446e84739dcaf6d48a4a093973850669e13e8a34d8f8d64851041be267cd9"
dependencies = [
"crossbeam-channel",
"log",
@ -1029,8 +1044,10 @@ version = "0.0.0"
dependencies = [
"arrayvec",
"cov-mark",
"expect-test",
"intern",
"parser",
"ra-ap-rustc_lexer",
"rustc-hash",
"smallvec",
"span",
@ -1113,7 +1130,7 @@ checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4"
dependencies = [
"bitflags 2.6.0",
"cfg-if",
"cfg_aliases",
"cfg_aliases 0.1.1",
"libc",
]
@ -1468,9 +1485,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_abi"
version = "0.63.0"
version = "0.68.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b011c39d409940a890414e3a7b239762ac16d88029ad71b050a8374831b93790"
checksum = "2a8cb51bb4534ac3e9c74f1d9bd90e607e60f94f734b1cf1a66f753ad2af6ed7"
dependencies = [
"bitflags 2.6.0",
"ra-ap-rustc_index",
@ -1479,9 +1496,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index"
version = "0.63.0"
version = "0.68.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9027acdee649b0b27eb10b7db5be833efee3362d394935c5eed8f0745a9d43ce"
checksum = "8b640fba2b7ef4f875459e2e76daeb846ef341d1d376fa758962ac0eba79bce6"
dependencies = [
"arrayvec",
"ra-ap-rustc_index_macros",
@ -1490,9 +1507,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_index_macros"
version = "0.63.0"
version = "0.68.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "540b86dc0384141ac8e825fc2874cd44bffd4277d99d8ec63ee416f1a98d5997"
checksum = "faef502419ba5ac9d3079b1a835c6e5b4e605388254bbe55eb5683936f541be9"
dependencies = [
"proc-macro2",
"quote",
@ -1501,9 +1518,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_lexer"
version = "0.63.0"
version = "0.68.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3bdf98bb457b47b9ae4aeebf867d0ca440c86925e0b6381658c4a02589748c9d"
checksum = "5da7f9d533b8d5be6704558da741ff20b982ad4647b1e9e08632853e4fecf9d5"
dependencies = [
"unicode-properties",
"unicode-xid",
@ -1511,9 +1528,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_parse_format"
version = "0.63.0"
version = "0.68.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8fe3556ab6311bb775220563a300e2bf62ec56404521fe0c511a583937683d5"
checksum = "94389cf81c651b1bda9ac45d3de6a2d851bb6fd4cb893875daa44e419c94205f"
dependencies = [
"ra-ap-rustc_index",
"ra-ap-rustc_lexer",
@ -1521,9 +1538,9 @@ dependencies = [
[[package]]
name = "ra-ap-rustc_pattern_analysis"
version = "0.63.0"
version = "0.68.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1709080fdeb5db630e1c2644026c2962aaa32416cd92f0190c04b0c21e114b91"
checksum = "3679d8dd0114ed6000918309f843782738e51c99d8e4baec0d0f706e4d948819"
dependencies = [
"ra-ap-rustc_index",
"rustc-hash",
@ -1636,7 +1653,7 @@ dependencies = [
"intern",
"itertools",
"load-cargo",
"lsp-server 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-server 0.7.7 (registry+https://github.com/rust-lang/crates.io-index)",
"lsp-types",
"memchr",
"mimalloc",
@ -1843,10 +1860,11 @@ checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
[[package]]
name = "smol_str"
version = "0.2.2"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd538fb6910ac1099850255cf94a94df6551fbdd602454387d0adb2d1ca6dead"
checksum = "66eaf762c5af19db3108300515c8aa7a50efc90ff745f4c62288052ebf9fdd25"
dependencies = [
"borsh",
"serde",
]
@ -2607,6 +2625,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"directories",
"either",
"flate2",
"itertools",
"proc-macro2",

View File

@ -85,11 +85,11 @@ tt = { path = "./crates/tt", version = "0.0.0" }
vfs-notify = { path = "./crates/vfs-notify", version = "0.0.0" }
vfs = { path = "./crates/vfs", version = "0.0.0" }
ra-ap-rustc_lexer = { version = "0.63.0", default-features = false }
ra-ap-rustc_parse_format = { version = "0.63.0", default-features = false }
ra-ap-rustc_index = { version = "0.63.0", default-features = false }
ra-ap-rustc_abi = { version = "0.63.0", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.63.0", default-features = false }
ra-ap-rustc_lexer = { version = "0.68.0", default-features = false }
ra-ap-rustc_parse_format = { version = "0.68.0", default-features = false }
ra-ap-rustc_index = { version = "0.68.0", default-features = false }
ra-ap-rustc_abi = { version = "0.68.0", default-features = false }
ra-ap-rustc_pattern_analysis = { version = "0.68.0", default-features = false }
# local crates that aren't published to crates.io. These should not have versions.
test-fixture = { path = "./crates/test-fixture" }
@ -145,7 +145,7 @@ smallvec = { version = "1.10.0", features = [
"union",
"const_generics",
] }
smol_str = "0.2.1"
smol_str = "0.3.1"
snap = "1.1.0"
text-size = "1.1.1"
tracing = "0.1.40"
@ -185,6 +185,7 @@ style = { level = "warn", priority = -1 }
suspicious = { level = "warn", priority = -1 }
## allow following lints
too_long_first_doc_paragraph = "allow"
# subjective
single_match = "allow"
# () makes a fine error in most cases

View File

@ -3,11 +3,15 @@
use std::fmt;
use rustc_hash::FxHashMap;
use salsa::Durability;
use triomphe::Arc;
use vfs::FileId;
use crate::{CrateGraph, SourceDatabaseFileInputExt, SourceRoot, SourceRootDatabase, SourceRootId};
use crate::{
CrateGraph, CrateId, CrateWorkspaceData, SourceDatabaseFileInputExt, SourceRoot,
SourceRootDatabase, SourceRootId,
};
/// Encapsulate a bunch of raw `.set` calls on the database.
#[derive(Default)]
@ -15,6 +19,7 @@ pub struct FileChange {
pub roots: Option<Vec<SourceRoot>>,
pub files_changed: Vec<(FileId, Option<String>)>,
pub crate_graph: Option<CrateGraph>,
pub ws_data: Option<FxHashMap<CrateId, Arc<CrateWorkspaceData>>>,
}
impl fmt::Debug for FileChange {
@ -50,6 +55,10 @@ impl FileChange {
self.crate_graph = Some(graph);
}
pub fn set_ws_data(&mut self, data: FxHashMap<CrateId, Arc<CrateWorkspaceData>>) {
self.ws_data = Some(data);
}
pub fn apply(self, db: &mut dyn SourceRootDatabase) {
let _p = tracing::info_span!("FileChange::apply").entered();
if let Some(roots) = self.roots {
@ -74,6 +83,9 @@ impl FileChange {
if let Some(crate_graph) = self.crate_graph {
db.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH);
}
if let Some(data) = self.ws_data {
db.set_crate_workspace_data_with_durability(Arc::new(data), Durability::HIGH);
}
}
}

View File

@ -374,37 +374,6 @@ impl CrateGraph {
self.arena.alloc(data)
}
/// Remove the crate from crate graph. If any crates depend on this crate, the dependency would be replaced
/// with the second input.
pub fn remove_and_replace(
&mut self,
id: CrateId,
replace_with: CrateId,
) -> Result<(), CyclicDependenciesError> {
for (x, data) in self.arena.iter() {
if x == id {
continue;
}
for edge in &data.dependencies {
if edge.crate_id == id {
self.check_cycle_after_dependency(edge.crate_id, replace_with)?;
}
}
}
// if everything was ok, start to replace
for (x, data) in self.arena.iter_mut() {
if x == id {
continue;
}
for edge in &mut data.dependencies {
if edge.crate_id == id {
edge.crate_id = replace_with;
}
}
}
Ok(())
}
pub fn add_dep(
&mut self,
from: CrateId,
@ -412,26 +381,17 @@ impl CrateGraph {
) -> Result<(), CyclicDependenciesError> {
let _p = tracing::info_span!("add_dep").entered();
self.check_cycle_after_dependency(from, dep.crate_id)?;
self.arena[from].add_dep(dep);
Ok(())
}
/// Check if adding a dep from `from` to `to` creates a cycle. To figure
/// that out, look for a path in the *opposite* direction, from `to` to
/// `from`.
fn check_cycle_after_dependency(
&self,
from: CrateId,
to: CrateId,
) -> Result<(), CyclicDependenciesError> {
if let Some(path) = self.find_path(&mut FxHashSet::default(), to, from) {
// Check if adding a dep from `from` to `to` creates a cycle. To figure
// that out, look for a path in the *opposite* direction, from `to` to
// `from`.
if let Some(path) = self.find_path(&mut FxHashSet::default(), dep.crate_id, from) {
let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect();
let err = CyclicDependenciesError { path };
assert!(err.from().0 == from && err.to().0 == to);
assert!(err.from().0 == from && err.to().0 == dep.crate_id);
return Err(err);
}
self.arena[from].add_dep(dep);
Ok(())
}
@ -531,22 +491,15 @@ impl CrateGraph {
.for_each(|(_, data)| data.dependencies.sort_by_key(|dep| dep.crate_id));
}
/// Extends this crate graph by adding a complete disjoint second crate
/// Extends this crate graph by adding a complete second crate
/// graph and adjust the ids in the [`ProcMacroPaths`] accordingly.
///
/// This will deduplicate the crates of the graph where possible.
/// Note that for deduplication to fully work, `self`'s crate dependencies must be sorted by crate id.
/// If the crate dependencies were sorted, the resulting graph from this `extend` call will also
/// have the crate dependencies sorted.
///
/// Returns a mapping from `other`'s crate ids to the new crate ids in `self`.
/// Returns a map mapping `other`'s IDs to the new IDs in `self`.
pub fn extend(
&mut self,
mut other: CrateGraph,
proc_macros: &mut ProcMacroPaths,
merge: impl Fn((CrateId, &mut CrateData), (CrateId, &CrateData)) -> bool,
) -> FxHashMap<CrateId, CrateId> {
let m = self.len();
let topo = other.crates_in_topological_order();
let mut id_map: FxHashMap<CrateId, CrateId> = FxHashMap::default();
for topo in topo {
@ -554,20 +507,13 @@ impl CrateGraph {
crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]);
crate_data.dependencies.sort_by_key(|dep| dep.crate_id);
let res = self
.arena
.iter_mut()
.take(m)
.find_map(|(id, data)| merge((id, data), (topo, crate_data)).then_some(id));
let new_id =
if let Some(res) = res { res } else { self.arena.alloc(crate_data.clone()) };
let new_id = self.arena.alloc(crate_data.clone());
id_map.insert(topo, new_id);
}
*proc_macros =
mem::take(proc_macros).into_iter().map(|(id, macros)| (id_map[&id], macros)).collect();
id_map
}

View File

@ -5,11 +5,12 @@ mod input;
use std::panic;
use rustc_hash::FxHashMap;
use salsa::Durability;
use span::EditionedFileId;
use syntax::{ast, Parse, SourceFile, SyntaxError};
use triomphe::Arc;
use vfs::FileId;
use vfs::{AbsPathBuf, FileId};
pub use crate::{
change::FileChange,
@ -74,19 +75,30 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
#[salsa::input]
fn crate_graph(&self) -> Arc<CrateGraph>;
// FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
#[salsa::input]
fn data_layout(&self, krate: CrateId) -> TargetLayoutLoadResult;
#[salsa::input]
fn toolchain(&self, krate: CrateId) -> Option<Version>;
fn crate_workspace_data(&self) -> Arc<FxHashMap<CrateId, Arc<CrateWorkspaceData>>>;
#[salsa::transparent]
fn toolchain_channel(&self, krate: CrateId) -> Option<ReleaseChannel>;
}
/// Crate related data shared by the whole workspace.
#[derive(Debug, PartialEq, Eq, Hash, Clone)]
pub struct CrateWorkspaceData {
/// The working directory to run proc-macros in. This is usually the workspace root of cargo workspaces.
pub proc_macro_cwd: Option<AbsPathBuf>,
// FIXME: Consider removing this, making HirDatabase::target_data_layout an input query
pub data_layout: TargetLayoutLoadResult,
/// Toolchain version used to compile the crate.
pub toolchain: Option<Version>,
}
fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option<ReleaseChannel> {
db.toolchain(krate).as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre))
db.crate_workspace_data()
.get(&krate)?
.toolchain
.as_ref()
.and_then(|v| ReleaseChannel::from_str(&v.pre))
}
fn parse(db: &dyn SourceDatabase, file_id: EditionedFileId) -> Parse<ast::SourceFile> {

View File

@ -100,7 +100,14 @@ pub struct BodySourceMap {
field_map_back: FxHashMap<ExprId, FieldSource>,
pat_field_map_back: FxHashMap<PatId, PatFieldSource>,
format_args_template_map: FxHashMap<ExprId, Vec<(syntax::TextRange, Name)>>,
template_map: Option<
Box<(
// format_args!
FxHashMap<ExprId, Vec<(syntax::TextRange, Name)>>,
// asm!
FxHashMap<ExprId, Vec<Vec<(syntax::TextRange, usize)>>>,
)>,
>,
expansions: FxHashMap<InFile<AstPtr<ast::MacroCall>>, MacroFileId>,
@ -220,6 +227,17 @@ impl Body {
pretty::print_expr_hir(db, self, owner, expr, edition)
}
pub fn pretty_print_pat(
&self,
db: &dyn DefDatabase,
owner: DefWithBodyId,
pat: PatId,
oneline: bool,
edition: Edition,
) -> String {
pretty::print_pat_hir(db, self, owner, pat, oneline, edition)
}
fn new(
db: &dyn DefDatabase,
owner: DefWithBodyId,
@ -426,7 +444,16 @@ impl BodySourceMap {
node: InFile<&ast::FormatArgsExpr>,
) -> Option<&[(syntax::TextRange, Name)]> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
self.format_args_template_map.get(self.expr_map.get(&src)?).map(std::ops::Deref::deref)
self.template_map.as_ref()?.0.get(self.expr_map.get(&src)?).map(std::ops::Deref::deref)
}
pub fn asm_template_args(
&self,
node: InFile<&ast::AsmExpr>,
) -> Option<(ExprId, &[Vec<(syntax::TextRange, usize)>])> {
let src = node.map(AstPtr::new).map(AstPtr::upcast::<ast::Expr>);
let expr = self.expr_map.get(&src)?;
Some(*expr).zip(self.template_map.as_ref()?.1.get(expr).map(std::ops::Deref::deref))
}
/// Get a reference to the body source map's diagnostics.
@ -446,11 +473,14 @@ impl BodySourceMap {
field_map_back,
pat_field_map_back,
expansions,
format_args_template_map,
template_map,
diagnostics,
binding_definitions,
} = self;
format_args_template_map.shrink_to_fit();
if let Some(template_map) = template_map {
template_map.0.shrink_to_fit();
template_map.1.shrink_to_fit();
}
expr_map.shrink_to_fit();
expr_map_back.shrink_to_fit();
pat_map.shrink_to_fit();
@ -463,4 +493,13 @@ impl BodySourceMap {
diagnostics.shrink_to_fit();
binding_definitions.shrink_to_fit();
}
pub fn template_map(
&self,
) -> Option<&(
FxHashMap<Idx<Expr>, Vec<(tt::TextRange, Name)>>,
FxHashMap<Idx<Expr>, Vec<Vec<(tt::TextRange, usize)>>>,
)> {
self.template_map.as_deref()
}
}

View File

@ -1,6 +1,8 @@
//! Transforms `ast::Expr` into an equivalent `hir_def::expr::Expr`
//! representation.
mod asm;
use std::mem;
use base_db::CrateId;
@ -35,8 +37,8 @@ use crate::{
FormatPlaceholder, FormatSign, FormatTrait,
},
Array, Binding, BindingAnnotation, BindingId, BindingProblems, CaptureBy, ClosureKind,
Expr, ExprId, InlineAsm, Label, LabelId, Literal, LiteralOrConst, MatchArm, Movability,
OffsetOf, Pat, PatId, RecordFieldPat, RecordLitField, Statement,
Expr, ExprId, Label, LabelId, Literal, LiteralOrConst, MatchArm, Movability, OffsetOf, Pat,
PatId, RecordFieldPat, RecordLitField, Statement,
},
item_scope::BuiltinShadowMode,
lang_item::LangItem,
@ -693,10 +695,7 @@ impl ExprCollector<'_> {
}
}
ast::Expr::UnderscoreExpr(_) => self.alloc_expr(Expr::Underscore, syntax_ptr),
ast::Expr::AsmExpr(e) => {
let e = self.collect_expr_opt(e.expr());
self.alloc_expr(Expr::InlineAsm(InlineAsm { e }), syntax_ptr)
}
ast::Expr::AsmExpr(e) => self.lower_inline_asm(e, syntax_ptr),
ast::Expr::OffsetOfExpr(e) => {
let container = Interned::new(TypeRef::from_ast_opt(&self.ctx(), e.ty()));
let fields = e.fields().map(|it| it.as_name()).collect();
@ -737,7 +736,7 @@ impl ExprCollector<'_> {
/// `try { <stmts>; }` into `'<new_label>: { <stmts>; ::std::ops::Try::from_output(()) }`
/// and save the `<new_label>` to use it as a break target for desugaring of the `?` operator.
fn desugar_try_block(&mut self, e: BlockExpr) -> ExprId {
let Some(try_from_output) = LangItem::TryTraitFromOutput.path(self.db, self.krate) else {
let Some(try_from_output) = self.lang_path(LangItem::TryTraitFromOutput) else {
return self.collect_block(e);
};
let label = self
@ -840,10 +839,10 @@ impl ExprCollector<'_> {
fn collect_for_loop(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::ForExpr) -> ExprId {
let Some((into_iter_fn, iter_next_fn, option_some, option_none)) = (|| {
Some((
LangItem::IntoIterIntoIter.path(self.db, self.krate)?,
LangItem::IteratorNext.path(self.db, self.krate)?,
LangItem::OptionSome.path(self.db, self.krate)?,
LangItem::OptionNone.path(self.db, self.krate)?,
self.lang_path(LangItem::IntoIterIntoIter)?,
self.lang_path(LangItem::IteratorNext)?,
self.lang_path(LangItem::OptionSome)?,
self.lang_path(LangItem::OptionNone)?,
))
})() else {
// Some of the needed lang items are missing, so we can't desugar
@ -896,6 +895,15 @@ impl ExprCollector<'_> {
Expr::Match { expr: iter_next_expr, arms: Box::new([none_arm, some_arm]) },
syntax_ptr,
);
let loop_inner = self.alloc_expr(
Expr::Block {
id: None,
statements: Box::default(),
tail: Some(loop_inner),
label: None,
},
syntax_ptr,
);
let loop_outer = self.alloc_expr(Expr::Loop { body: loop_inner, label }, syntax_ptr);
let iter_binding = self.alloc_binding(iter_name, BindingAnnotation::Mutable);
let iter_pat = self.alloc_pat_desugared(Pat::Bind { id: iter_binding, subpat: None });
@ -923,10 +931,10 @@ impl ExprCollector<'_> {
fn collect_try_operator(&mut self, syntax_ptr: AstPtr<ast::Expr>, e: ast::TryExpr) -> ExprId {
let Some((try_branch, cf_continue, cf_break, try_from_residual)) = (|| {
Some((
LangItem::TryTraitBranch.path(self.db, self.krate)?,
LangItem::ControlFlowContinue.path(self.db, self.krate)?,
LangItem::ControlFlowBreak.path(self.db, self.krate)?,
LangItem::TryTraitFromResidual.path(self.db, self.krate)?,
self.lang_path(LangItem::TryTraitBranch)?,
self.lang_path(LangItem::ControlFlowContinue)?,
self.lang_path(LangItem::ControlFlowBreak)?,
self.lang_path(LangItem::TryTraitFromResidual)?,
))
})() else {
// Some of the needed lang items are missing, so we can't desugar
@ -1839,7 +1847,7 @@ impl ExprCollector<'_> {
},
syntax_ptr,
);
self.source_map.format_args_template_map.insert(idx, mappings);
self.source_map.template_map.get_or_insert_with(Default::default).0.insert(idx, mappings);
idx
}
@ -2052,7 +2060,12 @@ impl ExprCollector<'_> {
is_assignee_expr: false,
})
}
// endregion: format
fn lang_path(&self, lang: LangItem) -> Option<Path> {
lang.path(self.db, self.krate)
}
}
fn pat_literal_to_hir(lit: &ast::LiteralPat) -> Option<(Literal, ast::Literal)> {

View File

@ -0,0 +1,276 @@
//! Lowering of inline assembly.
use hir_expand::name::Name;
use intern::Symbol;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{
ast::{self, HasName, IsString},
AstNode, AstPtr, AstToken, T,
};
use tt::{TextRange, TextSize};
use crate::{
body::lower::{ExprCollector, FxIndexSet},
hir::{AsmOperand, AsmOptions, Expr, ExprId, InlineAsm, InlineAsmRegOrRegClass},
};
impl ExprCollector<'_> {
pub(super) fn lower_inline_asm(
&mut self,
asm: ast::AsmExpr,
syntax_ptr: AstPtr<ast::Expr>,
) -> ExprId {
let mut clobber_abis = FxIndexSet::default();
let mut operands = vec![];
let mut options = AsmOptions::empty();
let mut named_pos: FxHashMap<usize, Symbol> = Default::default();
let mut named_args: FxHashMap<Symbol, usize> = Default::default();
let mut reg_args: FxHashSet<usize> = Default::default();
for piece in asm.asm_pieces() {
let slot = operands.len();
let mut lower_reg = |reg: Option<ast::AsmRegSpec>| {
let reg = reg?;
if let Some(string) = reg.string_token() {
reg_args.insert(slot);
Some(InlineAsmRegOrRegClass::Reg(Symbol::intern(string.text())))
} else {
reg.name_ref().map(|name_ref| {
InlineAsmRegOrRegClass::RegClass(Symbol::intern(&name_ref.text()))
})
}
};
let op = match piece {
ast::AsmPiece::AsmClobberAbi(clobber_abi) => {
if let Some(abi_name) = clobber_abi.string_token() {
clobber_abis.insert(Symbol::intern(abi_name.text()));
}
continue;
}
ast::AsmPiece::AsmOptions(opt) => {
opt.asm_options().for_each(|opt| {
options |= match opt.syntax().first_token().map_or(T![$], |it| it.kind()) {
T![att_syntax] => AsmOptions::ATT_SYNTAX,
T![may_unwind] => AsmOptions::MAY_UNWIND,
T![nomem] => AsmOptions::NOMEM,
T![noreturn] => AsmOptions::NORETURN,
T![nostack] => AsmOptions::NOSTACK,
T![preserves_flags] => AsmOptions::PRESERVES_FLAGS,
T![pure] => AsmOptions::PURE,
T![raw] => AsmOptions::RAW,
T![readonly] => AsmOptions::READONLY,
_ => return,
}
});
continue;
}
ast::AsmPiece::AsmOperandNamed(op) => {
let name = op.name().map(|name| Symbol::intern(&name.text()));
if let Some(name) = &name {
named_args.insert(name.clone(), slot);
named_pos.insert(slot, name.clone());
}
let Some(op) = op.asm_operand() else { continue };
(
name.map(Name::new_symbol_root),
match op {
ast::AsmOperand::AsmRegOperand(op) => {
let Some(dir_spec) = op.asm_dir_spec() else {
continue;
};
let Some(reg) = lower_reg(op.asm_reg_spec()) else {
continue;
};
if dir_spec.in_token().is_some() {
let expr = self.collect_expr_opt(
op.asm_operand_expr().and_then(|it| it.in_expr()),
);
AsmOperand::In { reg, expr }
} else if dir_spec.out_token().is_some() {
let expr = op
.asm_operand_expr()
.and_then(|it| it.in_expr())
.filter(|it| !matches!(it, ast::Expr::UnderscoreExpr(_)))
.map(|expr| self.collect_expr(expr));
AsmOperand::Out { reg, expr, late: false }
} else if dir_spec.lateout_token().is_some() {
let expr = op
.asm_operand_expr()
.and_then(|it| it.in_expr())
.filter(|it| !matches!(it, ast::Expr::UnderscoreExpr(_)))
.map(|expr| self.collect_expr(expr));
AsmOperand::Out { reg, expr, late: true }
} else if dir_spec.inout_token().is_some() {
let Some(op_expr) = op.asm_operand_expr() else { continue };
let in_expr = self.collect_expr_opt(op_expr.in_expr());
match op_expr.fat_arrow_token().is_some() {
true => {
let out_expr = op_expr
.out_expr()
.filter(|it| {
!matches!(it, ast::Expr::UnderscoreExpr(_))
})
.map(|expr| self.collect_expr(expr));
AsmOperand::SplitInOut {
reg,
in_expr,
out_expr,
late: false,
}
}
false => {
AsmOperand::InOut { reg, expr: in_expr, late: false }
}
}
} else if dir_spec.inlateout_token().is_some() {
let Some(op_expr) = op.asm_operand_expr() else { continue };
let in_expr = self.collect_expr_opt(op_expr.in_expr());
match op_expr.fat_arrow_token().is_some() {
true => {
let out_expr = op_expr
.out_expr()
.filter(|it| {
!matches!(it, ast::Expr::UnderscoreExpr(_))
})
.map(|expr| self.collect_expr(expr));
AsmOperand::SplitInOut {
reg,
in_expr,
out_expr,
late: true,
}
}
false => {
AsmOperand::InOut { reg, expr: in_expr, late: true }
}
}
} else {
continue;
}
}
ast::AsmOperand::AsmLabel(l) => {
AsmOperand::Label(self.collect_block_opt(l.block_expr()))
}
ast::AsmOperand::AsmConst(c) => {
AsmOperand::Const(self.collect_expr_opt(c.expr()))
}
ast::AsmOperand::AsmSym(s) => {
let Some(path) =
s.path().and_then(|p| self.expander.parse_path(self.db, p))
else {
continue;
};
AsmOperand::Sym(path)
}
},
)
}
};
operands.push(op);
}
let mut mappings = vec![];
let mut curarg = 0;
if !options.contains(AsmOptions::RAW) {
// Don't treat raw asm as a format string.
asm.template()
.enumerate()
.filter_map(|(idx, it)| Some((idx, it.clone(), self.expand_macros_to_string(it)?)))
.for_each(|(idx, expr, (s, is_direct_literal))| {
mappings.resize_with(idx + 1, Vec::default);
let Ok(text) = s.value() else {
return;
};
let mappings = &mut mappings[idx];
let template_snippet = match expr {
ast::Expr::Literal(literal) => match literal.kind() {
ast::LiteralKind::String(s) => Some(s.text().to_owned()),
_ => None,
},
_ => None,
};
let str_style = match s.quote_offsets() {
Some(offsets) => {
let raw = usize::from(offsets.quotes.0.len()) - 1;
// subtract 1 for the `r` prefix
(raw != 0).then(|| raw - 1)
}
None => None,
};
let mut parser = rustc_parse_format::Parser::new(
&text,
str_style,
template_snippet,
false,
rustc_parse_format::ParseMode::InlineAsm,
);
parser.curarg = curarg;
let mut unverified_pieces = Vec::new();
while let Some(piece) = parser.next() {
if !parser.errors.is_empty() {
break;
} else {
unverified_pieces.push(piece);
}
}
curarg = parser.curarg;
let to_span = |inner_span: rustc_parse_format::InnerSpan| {
is_direct_literal.then(|| {
TextRange::new(
inner_span.start.try_into().unwrap(),
inner_span.end.try_into().unwrap(),
) - TextSize::from(str_style.map(|it| it + 1).unwrap_or(0) as u32 + 1)
})
};
for piece in unverified_pieces {
match piece {
rustc_parse_format::Piece::String(_) => {}
rustc_parse_format::Piece::NextArgument(arg) => {
// let span = arg_spans.next();
let (operand_idx, _name) = match arg.position {
rustc_parse_format::ArgumentIs(idx)
| rustc_parse_format::ArgumentImplicitlyIs(idx) => {
if idx >= operands.len()
|| named_pos.contains_key(&idx)
|| reg_args.contains(&idx)
{
(None, None)
} else {
(Some(idx), None)
}
}
rustc_parse_format::ArgumentNamed(name) => {
let name = Symbol::intern(name);
(
named_args.get(&name).copied(),
Some(Name::new_symbol_root(name)),
)
}
};
if let Some(operand_idx) = operand_idx {
if let Some(position_span) = to_span(arg.position_span) {
mappings.push((position_span, operand_idx));
}
}
}
}
}
})
};
let idx = self.alloc_expr(
Expr::InlineAsm(InlineAsm { operands: operands.into_boxed_slice(), options }),
syntax_ptr,
);
self.source_map.template_map.get_or_insert_with(Default::default).1.insert(idx, mappings);
idx
}
}

View File

@ -16,6 +16,13 @@ use crate::{
use super::*;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(super) enum LineFormat {
Oneline,
Newline,
Indentation,
}
pub(super) fn print_body_hir(
db: &dyn DefDatabase,
body: &Body,
@ -52,7 +59,14 @@ pub(super) fn print_body_hir(
}
};
let mut p = Printer { db, body, buf: header, indent_level: 0, needs_indent: false, edition };
let mut p = Printer {
db,
body,
buf: header,
indent_level: 0,
line_format: LineFormat::Newline,
edition,
};
if let DefWithBodyId::FunctionId(it) = owner {
p.buf.push('(');
let function_data = &db.function_data(it);
@ -95,12 +109,38 @@ pub(super) fn print_expr_hir(
expr: ExprId,
edition: Edition,
) -> String {
let mut p =
Printer { db, body, buf: String::new(), indent_level: 0, needs_indent: false, edition };
let mut p = Printer {
db,
body,
buf: String::new(),
indent_level: 0,
line_format: LineFormat::Newline,
edition,
};
p.print_expr(expr);
p.buf
}
pub(super) fn print_pat_hir(
db: &dyn DefDatabase,
body: &Body,
_owner: DefWithBodyId,
pat: PatId,
oneline: bool,
edition: Edition,
) -> String {
let mut p = Printer {
db,
body,
buf: String::new(),
indent_level: 0,
line_format: if oneline { LineFormat::Oneline } else { LineFormat::Newline },
edition,
};
p.print_pat(pat);
p.buf
}
macro_rules! w {
($dst:expr, $($arg:tt)*) => {
{ let _ = write!($dst, $($arg)*); }
@ -109,10 +149,10 @@ macro_rules! w {
macro_rules! wln {
($dst:expr) => {
{ let _ = writeln!($dst); }
{ $dst.newline(); }
};
($dst:expr, $($arg:tt)*) => {
{ let _ = writeln!($dst, $($arg)*); }
{ let _ = w!($dst, $($arg)*); $dst.newline(); }
};
}
@ -121,24 +161,30 @@ struct Printer<'a> {
body: &'a Body,
buf: String,
indent_level: usize,
needs_indent: bool,
line_format: LineFormat,
edition: Edition,
}
impl Write for Printer<'_> {
fn write_str(&mut self, s: &str) -> fmt::Result {
for line in s.split_inclusive('\n') {
if self.needs_indent {
if matches!(self.line_format, LineFormat::Indentation) {
match self.buf.chars().rev().find(|ch| *ch != ' ') {
Some('\n') | None => {}
_ => self.buf.push('\n'),
}
self.buf.push_str(&" ".repeat(self.indent_level));
self.needs_indent = false;
}
self.buf.push_str(line);
self.needs_indent = line.ends_with('\n');
if matches!(self.line_format, LineFormat::Newline | LineFormat::Indentation) {
self.line_format = if line.ends_with('\n') {
LineFormat::Indentation
} else {
LineFormat::Newline
};
}
}
Ok(())
@ -161,14 +207,28 @@ impl Printer<'_> {
}
}
// Add a newline if the current line is not empty.
// If the current line is empty, add a space instead.
//
// Do not use [`writeln!()`] or [`wln!()`] here, which will result in
// infinite recursive calls to this function.
fn newline(&mut self) {
match self.buf.chars().rev().find_position(|ch| *ch != ' ') {
Some((_, '\n')) | None => {}
Some((idx, _)) => {
if idx != 0 {
self.buf.drain(self.buf.len() - idx..);
if matches!(self.line_format, LineFormat::Oneline) {
match self.buf.chars().last() {
Some(' ') | None => {}
Some(_) => {
w!(self, " ");
}
}
} else {
match self.buf.chars().rev().find_position(|ch| *ch != ' ') {
Some((_, '\n')) | None => {}
Some((idx, _)) => {
if idx != 0 {
self.buf.drain(self.buf.len() - idx..);
}
w!(self, "\n");
}
writeln!(self).unwrap()
}
}
}
@ -539,12 +599,14 @@ impl Printer<'_> {
w!(self, ")");
}
Pat::Or(pats) => {
w!(self, "(");
for (i, pat) in pats.iter().enumerate() {
if i != 0 {
w!(self, " | ");
}
self.print_pat(*pat);
}
w!(self, ")");
}
Pat::Record { path, args, ellipsis } => {
match path {
@ -554,12 +616,37 @@ impl Printer<'_> {
w!(self, " {{");
let edition = self.edition;
let oneline = matches!(self.line_format, LineFormat::Oneline);
self.indented(|p| {
for arg in args.iter() {
w!(p, "{}: ", arg.name.display(self.db.upcast(), edition));
p.print_pat(arg.pat);
wln!(p, ",");
for (idx, arg) in args.iter().enumerate() {
let field_name = arg.name.display(self.db.upcast(), edition).to_string();
let mut same_name = false;
if let Pat::Bind { id, subpat: None } = &self.body[arg.pat] {
if let Binding { name, mode: BindingAnnotation::Unannotated, .. } =
&self.body.bindings[*id]
{
if name.as_str() == field_name {
same_name = true;
}
}
}
w!(p, "{}", field_name);
if !same_name {
w!(p, ": ");
p.print_pat(arg.pat);
}
// Do not print the extra comma if the line format is oneline
if oneline && idx == args.len() - 1 {
w!(p, " ");
} else {
wln!(p, ",");
}
}
if *ellipsis {
wln!(p, "..");
}

View File

@ -142,6 +142,41 @@ mod m {
);
}
#[test]
fn desugar_for_loop() {
let (db, body, def) = lower(
r#"
//- minicore: iterator
fn main() {
for ident in 0..10 {
foo();
bar()
}
}
"#,
);
expect![[r#"
fn main() -> () {
match builtin#lang(into_iter)(
(0) ..(10) ,
) {
mut <ra@gennew>11 => loop {
match builtin#lang(next)(
&mut <ra@gennew>11,
) {
builtin#lang(None) => break,
builtin#lang(Some)(ident) => {
foo();
bar()
},
}
},
}
}"#]]
.assert_eq(&body.pretty_print(&db, def, Edition::CURRENT))
}
#[test]
fn desugar_builtin_format_args() {
let (db, body, def) = lower(

View File

@ -14,6 +14,7 @@ use triomphe::Arc;
use crate::{
builtin_type::{BuiltinInt, BuiltinUint},
db::DefDatabase,
hir::Expr,
item_tree::{
AttrOwner, Field, FieldParent, FieldsShape, ItemTree, ModItem, RawVisibilityId, TreeId,
},
@ -317,6 +318,27 @@ impl EnumData {
_ => IntegerType::Pointer(true),
}
}
// [Adopted from rustc](https://github.com/rust-lang/rust/blob/bd53aa3bf7a24a70d763182303bd75e5fc51a9af/compiler/rustc_middle/src/ty/adt.rs#L446-L448)
pub fn is_payload_free(&self, db: &dyn DefDatabase) -> bool {
self.variants.iter().all(|(v, _)| {
// The condition check order is slightly modified from rustc
// to improve performance by early returning with relatively fast checks
let variant = &db.enum_variant_data(*v).variant_data;
if !variant.fields().is_empty() {
return false;
}
// The outer if condition is whether this variant has const ctor or not
if !matches!(variant.kind(), StructKind::Unit) {
let body = db.body((*v).into());
// A variant with explicit discriminant
if body.exprs[body.body_expr] != Expr::Missing {
return false;
}
}
true
})
}
}
impl EnumVariantData {

View File

@ -307,7 +307,120 @@ pub struct OffsetOf {
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct InlineAsm {
pub e: ExprId,
pub operands: Box<[(Option<Name>, AsmOperand)]>,
pub options: AsmOptions,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct AsmOptions(u16);
bitflags::bitflags! {
impl AsmOptions: u16 {
const PURE = 1 << 0;
const NOMEM = 1 << 1;
const READONLY = 1 << 2;
const PRESERVES_FLAGS = 1 << 3;
const NORETURN = 1 << 4;
const NOSTACK = 1 << 5;
const ATT_SYNTAX = 1 << 6;
const RAW = 1 << 7;
const MAY_UNWIND = 1 << 8;
}
}
impl AsmOptions {
pub const COUNT: usize = Self::all().bits().count_ones() as usize;
pub const GLOBAL_OPTIONS: Self = Self::ATT_SYNTAX.union(Self::RAW);
pub const NAKED_OPTIONS: Self = Self::ATT_SYNTAX.union(Self::RAW).union(Self::NORETURN);
pub fn human_readable_names(&self) -> Vec<&'static str> {
let mut options = vec![];
if self.contains(AsmOptions::PURE) {
options.push("pure");
}
if self.contains(AsmOptions::NOMEM) {
options.push("nomem");
}
if self.contains(AsmOptions::READONLY) {
options.push("readonly");
}
if self.contains(AsmOptions::PRESERVES_FLAGS) {
options.push("preserves_flags");
}
if self.contains(AsmOptions::NORETURN) {
options.push("noreturn");
}
if self.contains(AsmOptions::NOSTACK) {
options.push("nostack");
}
if self.contains(AsmOptions::ATT_SYNTAX) {
options.push("att_syntax");
}
if self.contains(AsmOptions::RAW) {
options.push("raw");
}
if self.contains(AsmOptions::MAY_UNWIND) {
options.push("may_unwind");
}
options
}
}
impl std::fmt::Debug for AsmOptions {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
bitflags::parser::to_writer(self, f)
}
}
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub enum AsmOperand {
In {
reg: InlineAsmRegOrRegClass,
expr: ExprId,
},
Out {
reg: InlineAsmRegOrRegClass,
expr: Option<ExprId>,
late: bool,
},
InOut {
reg: InlineAsmRegOrRegClass,
expr: ExprId,
late: bool,
},
SplitInOut {
reg: InlineAsmRegOrRegClass,
in_expr: ExprId,
out_expr: Option<ExprId>,
late: bool,
},
Label(ExprId),
Const(ExprId),
Sym(Path),
}
impl AsmOperand {
pub fn reg(&self) -> Option<&InlineAsmRegOrRegClass> {
match self {
Self::In { reg, .. }
| Self::Out { reg, .. }
| Self::InOut { reg, .. }
| Self::SplitInOut { reg, .. } => Some(reg),
Self::Const { .. } | Self::Sym { .. } | Self::Label { .. } => None,
}
}
pub fn is_clobber(&self) -> bool {
matches!(self, AsmOperand::Out { reg: InlineAsmRegOrRegClass::Reg(_), late: _, expr: None })
}
}
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub enum InlineAsmRegOrRegClass {
Reg(Symbol),
RegClass(Symbol),
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@ -372,7 +485,21 @@ impl Expr {
match self {
Expr::Missing => {}
Expr::Path(_) | Expr::OffsetOf(_) => {}
Expr::InlineAsm(it) => f(it.e),
Expr::InlineAsm(it) => it.operands.iter().for_each(|(_, op)| match op {
AsmOperand::In { expr, .. }
| AsmOperand::Out { expr: Some(expr), .. }
| AsmOperand::InOut { expr, .. } => f(*expr),
AsmOperand::SplitInOut { in_expr, out_expr, .. } => {
f(*in_expr);
if let Some(out_expr) = out_expr {
f(*out_expr);
}
}
AsmOperand::Out { expr: None, .. }
| AsmOperand::Const(_)
| AsmOperand::Label(_)
| AsmOperand::Sym(_) => (),
}),
Expr::If { condition, then_branch, else_branch } => {
f(*condition);
f(*then_branch);

View File

@ -50,11 +50,7 @@ fn main() {
let i: u64 = 3;
let o: u64;
unsafe {
builtin #asm ( {
$crate::format_args!("mov {0}, {1}");
$crate::format_args!("add {0}, 5");
}
);
builtin #asm ("mov {0}, {1}", "add {0}, 5", out (reg)o, in (reg)i, );
}
}
"##]],
@ -532,3 +528,21 @@ fn main() { foobar; }
"##]],
);
}
#[test]
fn test_quote_string() {
check(
r##"
#[rustc_builtin_macro]
macro_rules! stringify {}
fn main() { stringify!("hello"); }
"##,
expect![[r##"
#[rustc_builtin_macro]
macro_rules! stringify {}
fn main() { "\"hello\""; }
"##]],
);
}

View File

@ -389,7 +389,7 @@ m! { foo# bar }
m! { Foo,# Bar }
"#,
expect![[r##"
expect![[r#"
macro_rules! m {
($($i:ident),*) => ($(mod $i {} )*);
($($i:ident)#*) => ($(fn $i() {} )*);
@ -404,27 +404,29 @@ fn bar() {}
struct Foo;
struct Bar;
"##]],
"#]],
);
}
#[test]
fn test_match_group_pattern_with_multiple_defs() {
// FIXME: The pretty printer breaks by leaving whitespace here, +syntaxctxt is used to avoid that
check(
r#"
macro_rules! m {
($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
}
// +syntaxctxt
m! { foo, bar }
"#,
expect![[r#"
macro_rules! m {
($($i:ident),*) => ( impl Bar { $(fn $i() {})* } );
}
impl Bar {
fn foo() {}
fn bar() {}
}
impl#\1# Bar#\1# {#\1#
fn#\1# foo#\0#(#\1#)#\1# {#\1#}#\1#
fn#\1# bar#\0#(#\1#)#\1# {#\1#}#\1#
}#\1#
"#]],
);
}
@ -480,12 +482,12 @@ macro_rules! m {
}
m!{#abc}
"#,
expect![[r##"
expect![[r#"
macro_rules! m {
($($i:ident)* #abc) => ( fn baz() { $($i ();)* } );
}
fn baz() {}
"##]],
"#]],
)
}
@ -1189,13 +1191,13 @@ macro_rules! m {
m! { cfg(target_os = "windows") }
m! { hello::world }
"#,
expect![[r##"
expect![[r#"
macro_rules! m {
($m:meta) => ( #[$m] fn bar() {} )
}
#[cfg(target_os = "windows")] fn bar() {}
#[hello::world] fn bar() {}
"##]],
"#]],
);
}
@ -1213,7 +1215,7 @@ m! {
*/
}
"#,
expect![[r##"
expect![[r#"
macro_rules! m {
($(#[$m:meta])+) => ( $(#[$m])+ fn bar() {} )
}
@ -1221,7 +1223,7 @@ macro_rules! m {
#[doc = r"
MultiLines Doc
"] fn bar() {}
"##]],
"#]],
);
}
@ -1234,12 +1236,12 @@ macro_rules! m {
}
m! { #[doc = concat!("The `", "bla", "` lang item.")] }
"#,
expect![[r##"
expect![[r#"
macro_rules! m {
(#[$m:meta]) => ( #[$m] fn bar() {} )
}
#[doc = concat!("The `", "bla", "` lang item.")] fn bar() {}
"##]],
"#]],
);
}
@ -1257,7 +1259,7 @@ m! {
*/
}
"#,
expect![[r##"
expect![[r#"
macro_rules! m {
($(#[$ m:meta])+) => ( $(#[$m])+ fn bar() {} )
}
@ -1265,7 +1267,7 @@ macro_rules! m {
#[doc = r"
"] fn bar() {}
"##]],
"#]],
);
}
@ -1342,10 +1344,10 @@ fn test_tt_composite2() {
macro_rules! m { ($($tt:tt)*) => { abs!(=> $($tt)*); } }
m! {#}
"#,
expect![[r##"
expect![[r#"
macro_rules! m { ($($tt:tt)*) => { abs!(=> $($tt)*); } }
abs!( = > #);
"##]],
"#]],
);
}

View File

@ -311,3 +311,150 @@ fn test() {
"#]],
);
}
#[test]
fn concat() {
// FIXME: Should this error? rustc currently accepts it.
check(
r#"
macro_rules! m {
( $a:ident, $b:literal ) => {
let ${concat($a, _, "123", _foo, $b, _, 123)};
};
}
fn test() {
m!( abc, 456 );
m!( def, "hello" );
}
"#,
expect![[r#"
macro_rules! m {
( $a:ident, $b:literal ) => {
let ${concat($a, _, "123", _foo, $b, _, 123)};
};
}
fn test() {
let abc_123_foo456_123;;
let def_123_foohello_123;;
}
"#]],
);
}
#[test]
fn concat_less_than_two_elements() {
// FIXME: Should this error? rustc currently accepts it.
check(
r#"
macro_rules! m {
() => {
let ${concat(abc)};
};
}
fn test() {
m!()
}
"#,
expect![[r#"
macro_rules! m {
() => {
let ${concat(abc)};
};
}
fn test() {
/* error: macro definition has parse errors */
}
"#]],
);
}
#[test]
fn concat_invalid_ident() {
// FIXME: Should this error? rustc currently accepts it.
check(
r#"
macro_rules! m {
() => {
let ${concat(abc, '"')};
};
}
fn test() {
m!()
}
"#,
expect![[r#"
macro_rules! m {
() => {
let ${concat(abc, '"')};
};
}
fn test() {
/* error: `${concat(..)}` is not generating a valid identifier */let __ra_concat_dummy;
}
"#]],
);
}
#[test]
fn concat_invalid_fragment() {
// FIXME: Should this error? rustc currently accepts it.
check(
r#"
macro_rules! m {
( $e:expr ) => {
let ${concat(abc, $e)};
};
}
fn test() {
m!(())
}
"#,
expect![[r#"
macro_rules! m {
( $e:expr ) => {
let ${concat(abc, $e)};
};
}
fn test() {
/* error: metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt` */let abc;
}
"#]],
);
}
#[test]
fn concat_repetition() {
// FIXME: Should this error? rustc currently accepts it.
check(
r#"
macro_rules! m {
( $($i:ident)* ) => {
let ${concat(abc, $i)};
};
}
fn test() {
m!(a b c)
}
"#,
expect![[r#"
macro_rules! m {
( $($i:ident)* ) => {
let ${concat(abc, $i)};
};
}
fn test() {
/* error: expected simple binding, found nested binding `i` */let abc;
}
"#]],
);
}

View File

@ -139,7 +139,7 @@ STRUCT!{struct D3DVSHADERCAPS2_0 {Caps: u8,}}
STRUCT!{#[cfg_attr(target_arch = "x86", repr(packed))] struct D3DCONTENTPROTECTIONCAPS {Caps : u8 ,}}
"#,
expect![[r##"
expect![[r#"
macro_rules! STRUCT {
($(#[$attrs:meta])* struct $name:ident {
$($field:ident: $ftype:ty,)+
@ -194,7 +194,7 @@ impl Clone for D3DCONTENTPROTECTIONCAPS {
}
}
}
"##]],
"#]],
);
}
@ -214,7 +214,7 @@ macro_rules! int_base {
}
int_base!{Binary for isize as usize -> Binary}
"#,
expect![[r##"
expect![[r#"
macro_rules! int_base {
($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => {
#[stable(feature = "rust1", since = "1.0.0")]
@ -230,7 +230,7 @@ macro_rules! int_base {
Binary.fmt_int(*self as usize, f)
}
}
"##]],
"#]],
);
}
@ -318,7 +318,7 @@ impl_fn_for_zst ! {
}
"#,
expect![[r##"
expect![[r#"
macro_rules! impl_fn_for_zst {
{$( $( #[$attr: meta] )*
struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )? Fn =
@ -410,7 +410,7 @@ impl FnOnce<(char, )> for CharEscapeDefault {
}
}
"##]],
"#]],
);
}
@ -511,7 +511,7 @@ cfg_if! {
@__apply cfg(all(not(any(not(any(target_os = "solaris", target_os = "illumos")))))),
}
"#,
expect![[r##"
expect![[r#"
macro_rules! cfg_if {
($(if #[cfg($($meta:meta),*)] { $($it:item)* } )else* else { $($it2:item)* })
=> {
@ -534,7 +534,7 @@ __cfg_if_items! {
}
"##]],
"#]],
);
}
@ -618,7 +618,7 @@ RIDL!{interface ID3D11Asynchronous(ID3D11AsynchronousVtbl): ID3D11DeviceChild(ID
fn GetDataSize(&mut self) -> UINT
}}
"#,
expect![[r##"
expect![[r#"
#[macro_export]
macro_rules! RIDL {
(interface $interface:ident ($vtbl:ident) : $pinterface:ident ($pvtbl:ident)
@ -639,7 +639,7 @@ impl ID3D11Asynchronous {
((*self .lpVtbl).GetDataSize)(self )
}
}
"##]],
"#]],
);
}
@ -676,7 +676,7 @@ quick_error ! (
);
"#,
expect![[r##"
expect![[r#"
macro_rules! quick_error {
(SORT [enum $name:ident $( #[$meta:meta] )*]
items [$($( #[$imeta:meta] )*
@ -697,7 +697,7 @@ macro_rules! quick_error {
}
quick_error!(ENUMINITION[enum Wrapped#[derive(Debug)]]body[]queue[ = > One: UNIT[] = > Two: TUPLE[s: String]]);
"##]],
"#]],
)
}
@ -746,7 +746,7 @@ delegate_impl ! {
[G, &'a mut G, deref] pub trait Data: GraphBase {@section type type NodeWeight;}
}
"#,
expect![[r##"
expect![[r#"
macro_rules! delegate_impl {
([$self_type:ident, $self_wrap:ty, $self_map:ident]
pub trait $name:ident $(: $sup:ident)* $(+ $more_sup:ident)* {
@ -785,7 +785,7 @@ macro_rules! delegate_impl {
}
}
impl <> Data for &'amut G where G: Data {}
"##]],
"#]],
);
}
@ -959,14 +959,14 @@ macro_rules! with_std {
with_std! {mod m;mod f;}
"#,
expect![[r##"
expect![[r#"
macro_rules! with_std {
($($i:item)*) => ($(#[cfg(feature = "std")]$i)*)
}
#[cfg(feature = "std")] mod m;
#[cfg(feature = "std")] mod f;
"##]],
"#]],
)
}
@ -1144,3 +1144,27 @@ mod any {
"#]],
);
}
#[test]
fn regression_18148() {
check(
r#"
macro_rules! m {
( $e:expr ) => {};
}
fn foo() {
m!(r#const);
}
"#,
expect![[r#"
macro_rules! m {
( $e:expr ) => {};
}
fn foo() {
;
}
"#]],
);
}

View File

@ -1,6 +1,6 @@
//! This module contains tests for macro expansion. Effectively, it covers `tt`,
//! `mbe`, `proc_macro_api` and `hir_expand` crates. This might seem like a
//! wrong architecture at the first glance, but is intentional.
//! This module contains integration tests for macro expansion with name resolution. Effectively, it
//! covers `tt`, `mbe`, `proc_macro_api` and `hir_expand` crates. This might seem like a wrong
//! architecture at the first glance, but is intentional.
//!
//! Physically, macro expansion process is intertwined with name resolution. You
//! can not expand *just* the syntax. So, to be able to write integration tests
@ -320,6 +320,7 @@ impl ProcMacroExpander for IdentityWhenValidProcMacroExpander {
_: Span,
_: Span,
_: Span,
_: Option<String>,
) -> Result<Subtree, ProcMacroExpansionError> {
let (parse, _) = syntax_bridge::token_tree_to_syntax_node(
subtree,

View File

@ -16,12 +16,12 @@ fn attribute_macro_attr_censoring() {
#[attr1] #[proc_macros::identity] #[attr2]
struct S;
"#,
expect![[r##"
expect![[r#"
#[attr1] #[proc_macros::identity] #[attr2]
struct S;
#[attr1]
#[attr2] struct S;"##]],
#[attr2] struct S;"#]],
);
}
@ -39,7 +39,7 @@ fn derive_censoring() {
#[attr2]
struct S;
"#,
expect![[r##"
expect![[r#"
#[attr1]
#[derive(Foo)]
#[derive(proc_macros::DeriveIdentity)]
@ -49,7 +49,7 @@ struct S;
#[attr1]
#[derive(Bar)]
#[attr2] struct S;"##]],
#[attr2] struct S;"#]],
);
}
@ -62,14 +62,14 @@ fn attribute_macro_syntax_completion_1() {
#[proc_macros::identity_when_valid]
fn foo() { bar.baz(); blub }
"#,
expect![[r##"
expect![[r#"
#[proc_macros::identity_when_valid]
fn foo() { bar.baz(); blub }
fn foo() {
bar.baz();
blub
}"##]],
}"#]],
);
}

View File

@ -69,7 +69,7 @@ use la_arena::Arena;
use rustc_hash::{FxHashMap, FxHashSet};
use span::{Edition, EditionedFileId, FileAstId, FileId, ROOT_ERASED_FILE_AST_ID};
use stdx::format_to;
use syntax::{ast, SmolStr};
use syntax::{ast, AstNode, SmolStr, SyntaxNode};
use triomphe::Arc;
use tt::TextRange;
@ -291,7 +291,7 @@ impl ModuleOrigin {
/// Returns a node which defines this module.
/// That is, a file or a `mod foo {}` with items.
fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
pub fn definition_source(&self, db: &dyn DefDatabase) -> InFile<ModuleSource> {
match self {
&ModuleOrigin::File { definition, .. } | &ModuleOrigin::CrateRoot { definition } => {
let sf = db.parse(definition).tree();
@ -728,6 +728,16 @@ pub enum ModuleSource {
BlockExpr(ast::BlockExpr),
}
impl ModuleSource {
pub fn node(&self) -> SyntaxNode {
match self {
ModuleSource::SourceFile(it) => it.syntax().clone(),
ModuleSource::Module(it) => it.syntax().clone(),
ModuleSource::BlockExpr(it) => it.syntax().clone(),
}
}
}
/// See `sub_namespace_match()`.
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum MacroSubNs {

View File

@ -221,7 +221,7 @@ struct DefCollector<'a> {
deps: FxHashMap<Name, Dependency>,
glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, Visibility, UseId)>>,
unresolved_imports: Vec<ImportDirective>,
indeterminate_imports: Vec<ImportDirective>,
indeterminate_imports: Vec<(ImportDirective, PerNs)>,
unresolved_macros: Vec<MacroDirective>,
mod_dirs: FxHashMap<LocalModuleId, ModDir>,
cfg_options: &'a CfgOptions,
@ -415,16 +415,6 @@ impl DefCollector<'_> {
self.resolution_loop();
// Resolve all indeterminate resolved imports again
// As some of the macros will expand newly import shadowing partial resolved imports
// FIXME: We maybe could skip this, if we handle the indeterminate imports in `resolve_imports`
// correctly
let partial_resolved = self.indeterminate_imports.drain(..).map(|directive| {
ImportDirective { status: PartialResolvedImport::Unresolved, ..directive }
});
self.unresolved_imports.extend(partial_resolved);
self.resolve_imports();
let unresolved_imports = mem::take(&mut self.unresolved_imports);
// show unresolved imports in completion, etc
for directive in &unresolved_imports {
@ -749,9 +739,9 @@ impl DefCollector<'_> {
.filter_map(|mut directive| {
directive.status = self.resolve_import(directive.module_id, &directive.import);
match directive.status {
PartialResolvedImport::Indeterminate(_) => {
PartialResolvedImport::Indeterminate(resolved) => {
self.record_resolved_import(&directive);
self.indeterminate_imports.push(directive);
self.indeterminate_imports.push((directive, resolved));
res = ReachedFixedPoint::No;
None
}
@ -764,6 +754,33 @@ impl DefCollector<'_> {
}
})
.collect();
// Resolve all indeterminate resolved imports again
// As some of the macros will expand newly import shadowing partial resolved imports
// FIXME: We maybe could skip this, if we handle the indeterminate imports in `resolve_imports`
// correctly
let mut indeterminate_imports = std::mem::take(&mut self.indeterminate_imports);
indeterminate_imports.retain_mut(|(directive, partially_resolved)| {
let partially_resolved = partially_resolved.availability();
directive.status = self.resolve_import(directive.module_id, &directive.import);
match directive.status {
PartialResolvedImport::Indeterminate(import)
if partially_resolved != import.availability() =>
{
self.record_resolved_import(directive);
res = ReachedFixedPoint::No;
false
}
PartialResolvedImport::Resolved(_) => {
self.record_resolved_import(directive);
res = ReachedFixedPoint::No;
false
}
_ => true,
}
});
self.indeterminate_imports = indeterminate_imports;
res
}

View File

@ -3,6 +3,8 @@
//!
//! `PerNs` (per namespace) captures this.
use bitflags::bitflags;
use crate::{
item_scope::{ImportId, ImportOrExternCrate, ItemInNs},
visibility::Visibility,
@ -16,6 +18,16 @@ pub enum Namespace {
Macros,
}
bitflags! {
/// Describes only the presence/absence of each namespace, without its value.
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct NsAvailability : u32 {
const TYPES = 1 << 0;
const VALUES = 1 << 1;
const MACROS = 1 << 2;
}
}
#[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)]
pub struct PerNs {
pub types: Option<(ModuleDefId, Visibility, Option<ImportOrExternCrate>)>,
@ -24,6 +36,14 @@ pub struct PerNs {
}
impl PerNs {
pub(crate) fn availability(&self) -> NsAvailability {
let mut result = NsAvailability::empty();
result.set(NsAvailability::TYPES, self.types.is_some());
result.set(NsAvailability::VALUES, self.values.is_some());
result.set(NsAvailability::MACROS, self.macros.is_some());
result
}
pub fn none() -> PerNs {
PerNs { types: None, values: None, macros: None }
}

View File

@ -1,6 +1,6 @@
//! Builtin macros and attributes
#[macro_use]
mod quote;
pub mod quote;
mod attr_macro;
mod derive_macro;

View File

@ -119,9 +119,8 @@ register_builtin! {
(module_path, ModulePath) => module_path_expand,
(assert, Assert) => assert_expand,
(stringify, Stringify) => stringify_expand,
(llvm_asm, LlvmAsm) => asm_expand,
(asm, Asm) => asm_expand,
(global_asm, GlobalAsm) => global_asm_expand,
(global_asm, GlobalAsm) => asm_expand,
(cfg, Cfg) => cfg_expand,
(core_panic, CorePanic) => panic_expand,
(std_panic, StdPanic) => panic_expand,
@ -324,40 +323,15 @@ fn asm_expand(
tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> {
// We expand all assembly snippets to `format_args!` invocations to get format syntax
// highlighting for them.
let mut literals = Vec::new();
for tt in tt.token_trees.chunks(2) {
match tt {
[tt::TokenTree::Leaf(tt::Leaf::Literal(lit))]
| [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] =>
{
let dollar_krate = dollar_crate(span);
literals.push(quote!(span=>#dollar_krate::format_args!(#lit);));
}
_ => break,
}
}
let mut tt = tt.clone();
tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
let pound = mk_pound(span);
let expanded = quote! {span =>
builtin #pound asm (
{##literals}
)
builtin #pound asm #tt
};
ExpandResult::ok(expanded)
}
fn global_asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> {
// Expand to nothing (at item-level)
ExpandResult::ok(quote! {span =>})
}
fn cfg_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
@ -509,7 +483,7 @@ fn concat_expand(
match it.kind {
tt::LitKind::Char => {
if let Ok(c) = unescape_char(it.symbol.as_str()) {
text.extend(c.escape_default());
text.push(c);
}
record_span(it.span);
}
@ -517,11 +491,11 @@ fn concat_expand(
format_to!(text, "{}", it.symbol.as_str())
}
tt::LitKind::Str => {
text.push_str(it.symbol.as_str());
text.push_str(unescape_str(&it.symbol).as_str());
record_span(it.span);
}
tt::LitKind::StrRaw(_) => {
format_to!(text, "{}", it.symbol.as_str().escape_debug());
format_to!(text, "{}", it.symbol.as_str());
record_span(it.span);
}
tt::LitKind::Byte
@ -839,7 +813,7 @@ fn include_str_expand(
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &Symbol) -> Option<String> {
let krate = db.lookup_intern_macro_call(arg_id).krate;
db.crate_graph()[krate].env.get(key.as_str()).map(|it| it.escape_debug().to_string())
db.crate_graph()[krate].env.get(key.as_str())
}
fn env_expand(

View File

@ -3,6 +3,7 @@
use intern::{sym, Symbol};
use span::Span;
use syntax::ToSmolStr;
use tt::IdentIsRaw;
use crate::name::Name;
@ -17,6 +18,7 @@ pub(crate) fn dollar_crate(span: Span) -> tt::Ident<Span> {
// 2. #()* pattern repetition not supported now
// * But we can do it manually, see `test_quote_derive_copy_hack`
#[doc(hidden)]
#[macro_export]
macro_rules! quote_impl__ {
($span:ident) => {
Vec::<$crate::tt::TokenTree>::new()
@ -26,8 +28,8 @@ macro_rules! quote_impl__ {
{
let children = $crate::builtin::quote::__quote!($span $($tt)*);
$crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::$delim,
delimiter: $crate::tt::Delimiter {
kind: $crate::tt::DelimiterKind::$delim,
open: $span,
close: $span,
},
@ -39,9 +41,9 @@ macro_rules! quote_impl__ {
( @PUNCT($span:ident) $first:literal ) => {
{
vec![
crate::tt::Leaf::Punct(crate::tt::Punct {
$crate::tt::Leaf::Punct($crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Alone,
spacing: $crate::tt::Spacing::Alone,
span: $span,
}).into()
]
@ -51,14 +53,14 @@ macro_rules! quote_impl__ {
( @PUNCT($span:ident) $first:literal, $sec:literal ) => {
{
vec![
crate::tt::Leaf::Punct(crate::tt::Punct {
$crate::tt::Leaf::Punct($crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Joint,
spacing: $crate::tt::Spacing::Joint,
span: $span,
}).into(),
crate::tt::Leaf::Punct(crate::tt::Punct {
$crate::tt::Leaf::Punct($crate::tt::Punct {
char: $sec,
spacing: crate::tt::Spacing::Alone,
spacing: $crate::tt::Spacing::Alone,
span: $span,
}).into()
]
@ -97,7 +99,7 @@ macro_rules! quote_impl__ {
// Ident
($span:ident $tt:ident ) => {
vec![ {
crate::tt::Leaf::Ident(crate::tt::Ident {
$crate::tt::Leaf::Ident($crate::tt::Ident {
sym: intern::Symbol::intern(stringify!($tt)),
span: $span,
is_raw: tt::IdentIsRaw::No,
@ -108,6 +110,7 @@ macro_rules! quote_impl__ {
// Puncts
// FIXME: Not all puncts are handled
($span:ident -> ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '-', '>')};
($span:ident => ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '=', '>')};
($span:ident & ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '&')};
($span:ident , ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ',')};
($span:ident : ) => {$crate::builtin::quote::__quote!(@PUNCT($span) ':')};
@ -117,6 +120,9 @@ macro_rules! quote_impl__ {
($span:ident < ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '<')};
($span:ident > ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '>')};
($span:ident ! ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '!')};
($span:ident # ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '#')};
($span:ident $ ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '$')};
($span:ident * ) => {$crate::builtin::quote::__quote!(@PUNCT($span) '*')};
($span:ident $first:tt $($tail:tt)+ ) => {
{
@ -128,18 +134,19 @@ macro_rules! quote_impl__ {
}
};
}
pub(super) use quote_impl__ as __quote;
pub use quote_impl__ as __quote;
/// FIXME:
/// It probably should implement in proc-macro
macro_rules! quote_impl {
#[macro_export]
macro_rules! quote {
($span:ident=> $($tt:tt)* ) => {
$crate::builtin::quote::IntoTt::to_subtree($crate::builtin::quote::__quote!($span $($tt)*), $span)
}
}
pub(super) use quote_impl as quote;
pub(super) use quote;
pub(crate) trait IntoTt {
pub trait IntoTt {
fn to_subtree(self, span: Span) -> crate::tt::Subtree;
fn to_tokens(self) -> Vec<crate::tt::TokenTree>;
}
@ -167,7 +174,7 @@ impl IntoTt for crate::tt::Subtree {
}
}
pub(crate) trait ToTokenTree {
pub trait ToTokenTree {
fn to_token(self, span: Span) -> crate::tt::TokenTree;
}
@ -211,8 +218,8 @@ impl_to_to_tokentrees! {
_span: crate::tt::Literal => self { self };
_span: crate::tt::Ident => self { self };
_span: crate::tt::Punct => self { self };
span: &str => self { crate::tt::Literal{symbol: Symbol::intern(self), span, kind: tt::LitKind::Str, suffix: None }};
span: String => self { crate::tt::Literal{symbol: Symbol::intern(&self), span, kind: tt::LitKind::Str, suffix: None }};
span: &str => self { crate::tt::Literal{symbol: Symbol::intern(&self.escape_default().to_smolstr()), span, kind: tt::LitKind::Str, suffix: None }};
span: String => self { crate::tt::Literal{symbol: Symbol::intern(&self.escape_default().to_smolstr()), span, kind: tt::LitKind::Str, suffix: None }};
span: Name => self {
let (is_raw, s) = IdentIsRaw::split_from_symbol(self.as_str());
crate::tt::Ident{sym: Symbol::intern(s), span, is_raw }

View File

@ -6,7 +6,7 @@ use cfg::{CfgAtom, CfgExpr};
use intern::{sym, Symbol};
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, Attr, HasAttrs, Meta, VariantList},
ast::{self, Attr, HasAttrs, Meta, TokenTree, VariantList},
AstNode, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, T,
};
use tracing::{debug, warn};
@ -17,7 +17,7 @@ fn check_cfg(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Option<boo
if !attr.simple_name().as_deref().map(|v| v == "cfg")? {
return None;
}
let cfg = parse_from_attr_meta(attr.meta()?)?;
let cfg = parse_from_attr_token_tree(&attr.meta()?.token_tree()?)?;
let enabled = db.crate_graph()[krate].cfg_options.check(&cfg) != Some(false);
Some(enabled)
}
@ -26,7 +26,15 @@ fn check_cfg_attr(db: &dyn ExpandDatabase, attr: &Attr, krate: CrateId) -> Optio
if !attr.simple_name().as_deref().map(|v| v == "cfg_attr")? {
return None;
}
let cfg_expr = parse_from_attr_meta(attr.meta()?)?;
check_cfg_attr_value(db, &attr.token_tree()?, krate)
}
pub fn check_cfg_attr_value(
db: &dyn ExpandDatabase,
attr: &TokenTree,
krate: CrateId,
) -> Option<bool> {
let cfg_expr = parse_from_attr_token_tree(attr)?;
let enabled = db.crate_graph()[krate].cfg_options.check(&cfg_expr) != Some(false);
Some(enabled)
}
@ -238,8 +246,7 @@ pub(crate) fn process_cfg_attrs(
Some(remove)
}
/// Parses a `cfg` attribute from the meta
fn parse_from_attr_meta(meta: Meta) -> Option<CfgExpr> {
let tt = meta.token_tree()?;
fn parse_from_attr_token_tree(tt: &TokenTree) -> Option<CfgExpr> {
let mut iter = tt
.token_trees_and_tokens()
.filter(is_not_whitespace)
@ -328,7 +335,7 @@ mod tests {
use expect_test::{expect, Expect};
use syntax::{ast::Attr, AstNode, SourceFile};
use crate::cfg_process::parse_from_attr_meta;
use crate::cfg_process::parse_from_attr_token_tree;
fn check_dnf_from_syntax(input: &str, expect: Expect) {
let parse = SourceFile::parse(input, span::Edition::CURRENT);
@ -342,7 +349,7 @@ mod tests {
let node = node.clone_subtree();
assert_eq!(node.syntax().text_range().start(), 0.into());
let cfg = parse_from_attr_meta(node.meta().unwrap()).unwrap();
let cfg = parse_from_attr_token_tree(&node.meta().unwrap().token_tree().unwrap()).unwrap();
let actual = format!("#![cfg({})]", DnfExpr::new(&cfg));
expect.assert_eq(&actual);
}

View File

@ -1,10 +1,10 @@
//! Defines a unit of change that can applied to the database to get the next
//! state. Changes are transactional.
use base_db::{
salsa::Durability, CrateGraph, CrateId, FileChange, SourceRoot, SourceRootDatabase,
TargetLayoutLoadResult, Version,
salsa::Durability, CrateGraph, CrateId, CrateWorkspaceData, FileChange, SourceRoot,
SourceRootDatabase,
};
use la_arena::RawIdx;
use rustc_hash::FxHashMap;
use span::FileId;
use triomphe::Arc;
@ -14,8 +14,6 @@ use crate::{db::ExpandDatabase, proc_macro::ProcMacros};
pub struct ChangeWithProcMacros {
pub source_change: FileChange,
pub proc_macros: Option<ProcMacros>,
pub toolchains: Option<Vec<Option<Version>>>,
pub target_data_layouts: Option<Vec<TargetLayoutLoadResult>>,
}
impl ChangeWithProcMacros {
@ -28,46 +26,25 @@ impl ChangeWithProcMacros {
if let Some(proc_macros) = self.proc_macros {
db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH);
}
if let Some(target_data_layouts) = self.target_data_layouts {
for (id, val) in target_data_layouts.into_iter().enumerate() {
db.set_data_layout_with_durability(
CrateId::from_raw(RawIdx::from(id as u32)),
val,
Durability::HIGH,
);
}
}
if let Some(toolchains) = self.toolchains {
for (id, val) in toolchains.into_iter().enumerate() {
db.set_toolchain_with_durability(
CrateId::from_raw(RawIdx::from(id as u32)),
val,
Durability::HIGH,
);
}
}
}
pub fn change_file(&mut self, file_id: FileId, new_text: Option<String>) {
self.source_change.change_file(file_id, new_text)
}
pub fn set_crate_graph(&mut self, graph: CrateGraph) {
self.source_change.set_crate_graph(graph)
pub fn set_crate_graph(
&mut self,
graph: CrateGraph,
ws_data: FxHashMap<CrateId, Arc<CrateWorkspaceData>>,
) {
self.source_change.set_crate_graph(graph);
self.source_change.set_ws_data(ws_data);
}
pub fn set_proc_macros(&mut self, proc_macros: ProcMacros) {
self.proc_macros = Some(proc_macros);
}
pub fn set_toolchains(&mut self, toolchains: Vec<Option<Version>>) {
self.toolchains = Some(toolchains);
}
pub fn set_target_data_layouts(&mut self, target_data_layouts: Vec<TargetLayoutLoadResult>) {
self.target_data_layouts = Some(target_data_layouts);
}
pub fn set_roots(&mut self, roots: Vec<SourceRoot>) {
self.source_change.set_roots(roots)
}

View File

@ -1,4 +1,4 @@
//! Compiled declarative macro expanders (`macro_rules!`` and `macro`)
//! Compiled declarative macro expanders (`macro_rules!` and `macro`)
use base_db::CrateId;
use intern::sym;

View File

@ -21,6 +21,7 @@ pub mod span_map;
mod cfg_process;
mod fixup;
mod prettify_macro_expansion_;
use attrs::collect_attrs;
use rustc_hash::FxHashMap;
@ -51,7 +52,11 @@ use crate::{
span_map::{ExpansionSpanMap, SpanMap},
};
pub use crate::files::{AstId, ErasedAstId, FileRange, InFile, InMacroFile, InRealFile};
pub use crate::{
cfg_process::check_cfg_attr_value,
files::{AstId, ErasedAstId, FileRange, InFile, InMacroFile, InRealFile},
prettify_macro_expansion_::prettify_macro_expansion,
};
pub use mbe::{DeclarativeMacro, ValueResult};
pub use span::{HirFileId, MacroCallId, MacroFileId};

View File

@ -0,0 +1,60 @@
//! Pretty printing of macros output.
use base_db::CrateId;
use rustc_hash::FxHashMap;
use syntax::NodeOrToken;
use syntax::{ast::make, SyntaxNode};
use crate::{db::ExpandDatabase, span_map::ExpansionSpanMap};
/// Inserts whitespace and replaces `$crate` in macro expansions.
#[expect(deprecated)]
pub fn prettify_macro_expansion(
db: &dyn ExpandDatabase,
syn: SyntaxNode,
span_map: &ExpansionSpanMap,
target_crate_id: CrateId,
) -> SyntaxNode {
let crate_graph = db.crate_graph();
let target_crate = &crate_graph[target_crate_id];
let mut syntax_ctx_id_to_dollar_crate_replacement = FxHashMap::default();
syntax_bridge::prettify_macro_expansion::prettify_macro_expansion(syn, &mut |dollar_crate| {
let ctx = span_map.span_at(dollar_crate.text_range().start()).ctx;
let replacement =
syntax_ctx_id_to_dollar_crate_replacement.entry(ctx).or_insert_with(|| {
let ctx_data = db.lookup_intern_syntax_context(ctx);
let macro_call_id =
ctx_data.outer_expn.expect("`$crate` cannot come from `SyntaxContextId::ROOT`");
let macro_call = db.lookup_intern_macro_call(macro_call_id);
let macro_def_crate = macro_call.def.krate;
// First, if this is the same crate as the macro, nothing will work but `crate`.
// If not, if the target trait has the macro's crate as a dependency, using the dependency name
// will work in inserted code and match the user's expectation.
// If not, the crate's display name is what the dependency name is likely to be once such dependency
// is inserted, and also understandable to the user.
// Lastly, if nothing else found, resort to leaving `$crate`.
if target_crate_id == macro_def_crate {
make::tokens::crate_kw()
} else if let Some(dep) =
target_crate.dependencies.iter().find(|dep| dep.crate_id == macro_def_crate)
{
make::tokens::ident(&dep.name)
} else if let Some(crate_name) = &crate_graph[macro_def_crate].display_name {
make::tokens::ident(crate_name.crate_name())
} else {
return dollar_crate.clone();
}
});
if replacement.text() == "$crate" {
// The parent may have many children, and looking for the token may yield incorrect results.
return dollar_crate.clone();
}
// We need to `clone_subtree()` but rowan doesn't provide such operation for tokens.
let parent = replacement.parent().unwrap().clone_subtree().clone_for_update();
parent
.children_with_tokens()
.filter_map(NodeOrToken::into_token)
.find(|it| it.kind() == replacement.kind())
.unwrap()
})
}

View File

@ -29,6 +29,7 @@ pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
def_site: Span,
call_site: Span,
mixed_site: Span,
current_dir: Option<String>,
) -> Result<tt::Subtree, ProcMacroExpansionError>;
}
@ -234,8 +235,18 @@ impl CustomProcMacroExpander {
let krate_graph = db.crate_graph();
// Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env;
match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site)
{
match proc_macro.expander.expand(
tt,
attr_arg,
env,
def_site,
call_site,
mixed_site,
db.crate_workspace_data()[&calling_crate]
.proc_macro_cwd
.as_ref()
.map(ToString::to_string),
) {
Ok(t) => ExpandResult::ok(t),
Err(err) => match err {
// Don't discard the item in case something unexpected happened while expanding attributes

View File

@ -381,9 +381,9 @@ impl chalk_solve::RustIrDatabase<Interner> for ChalkContext<'_> {
TyKind::Error.intern(Interner)
}
fn is_object_safe(&self, _trait_id: chalk_ir::TraitId<Interner>) -> bool {
// FIXME: implement actual object safety
true
fn is_object_safe(&self, trait_id: chalk_ir::TraitId<Interner>) -> bool {
let trait_ = from_chalk_trait_id(trait_id);
crate::object_safety::object_safety(self.db, trait_).is_none()
}
fn closure_kind(

View File

@ -11,7 +11,7 @@ use hir_def::{
ConstBlockLoc, EnumVariantId, GeneralConstId, StaticId,
};
use hir_expand::Lookup;
use stdx::never;
use stdx::{never, IsNoneOr};
use triomphe::Arc;
use crate::{
@ -184,6 +184,22 @@ pub fn try_const_usize(db: &dyn HirDatabase, c: &Const) -> Option<u128> {
}
}
pub fn try_const_isize(db: &dyn HirDatabase, c: &Const) -> Option<i128> {
match &c.data(Interner).value {
chalk_ir::ConstValue::BoundVar(_) => None,
chalk_ir::ConstValue::InferenceVar(_) => None,
chalk_ir::ConstValue::Placeholder(_) => None,
chalk_ir::ConstValue::Concrete(c) => match &c.interned {
ConstScalar::Bytes(it, _) => Some(i128::from_le_bytes(pad16(it, true))),
ConstScalar::UnevaluatedConst(c, subst) => {
let ec = db.const_eval(*c, subst.clone(), None).ok()?;
try_const_isize(db, &ec)
}
_ => None,
},
}
}
pub(crate) fn const_eval_recover(
_: &dyn HirDatabase,
_: &Cycle,
@ -256,8 +272,8 @@ pub(crate) fn const_eval_discriminant_variant(
) -> Result<i128, ConstEvalError> {
let def = variant_id.into();
let body = db.body(def);
let loc = variant_id.lookup(db.upcast());
if body.exprs[body.body_expr] == Expr::Missing {
let loc = variant_id.lookup(db.upcast());
let prev_idx = loc.index.checked_sub(1);
let value = match prev_idx {
Some(prev_idx) => {
@ -269,13 +285,21 @@ pub(crate) fn const_eval_discriminant_variant(
};
return Ok(value);
}
let repr = db.enum_data(loc.parent).repr;
let is_signed = IsNoneOr::is_none_or(repr.and_then(|repr| repr.int), |int| int.is_signed());
let mir_body = db.monomorphized_mir_body(
def,
Substitution::empty(Interner),
db.trait_environment_for_body(def),
)?;
let c = interpret_mir(db, mir_body, false, None).0?;
let c = try_const_usize(db, &c).unwrap() as i128;
let c = if is_signed {
try_const_isize(db, &c).unwrap()
} else {
try_const_usize(db, &c).unwrap() as i128
};
Ok(c)
}

View File

@ -186,7 +186,13 @@ fn floating_point() {
#[test]
fn casts() {
check_number(r#"const GOAL: usize = 12 as *const i32 as usize"#, 12);
check_number(
r#"
//- minicore: sized
const GOAL: usize = 12 as *const i32 as usize
"#,
12,
);
check_number(
r#"
//- minicore: coerce_unsized, index, slice
@ -204,7 +210,7 @@ fn casts() {
r#"
//- minicore: coerce_unsized, index, slice
const GOAL: i16 = {
let a = &mut 5;
let a = &mut 5_i16;
let z = a as *mut _;
unsafe { *z }
};
@ -244,7 +250,13 @@ fn casts() {
"#,
4,
);
check_number(r#"const GOAL: i32 = -12i8 as i32"#, -12);
check_number(
r#"
//- minicore: sized
const GOAL: i32 = -12i8 as i32
"#,
-12,
);
}
#[test]
@ -1544,7 +1556,7 @@ fn builtin_derive_macro() {
Bar,
}
#[derive(Clone)]
struct X(i32, Z, i64)
struct X(i32, Z, i64);
#[derive(Clone)]
struct Y {
field1: i32,
@ -1562,20 +1574,20 @@ fn builtin_derive_macro() {
);
check_number(
r#"
//- minicore: default, derive, builtin_impls
#[derive(Default)]
struct X(i32, Y, i64)
#[derive(Default)]
struct Y {
field1: i32,
field2: u8,
}
//- minicore: default, derive, builtin_impls
#[derive(Default)]
struct X(i32, Y, i64);
#[derive(Default)]
struct Y {
field1: i32,
field2: u8,
}
const GOAL: u8 = {
let x = X::default();
x.1.field2
};
"#,
const GOAL: u8 = {
let x = X::default();
x.1.field2
};
"#,
0,
);
}
@ -1911,6 +1923,7 @@ fn function_pointer() {
);
check_number(
r#"
//- minicore: sized
fn add2(x: u8) -> u8 {
x + 2
}
@ -2007,7 +2020,7 @@ fn function_traits() {
);
check_number(
r#"
//- minicore: coerce_unsized, fn
//- minicore: coerce_unsized, fn, dispatch_from_dyn
fn add2(x: u8) -> u8 {
x + 2
}
@ -2062,7 +2075,7 @@ fn function_traits() {
fn dyn_trait() {
check_number(
r#"
//- minicore: coerce_unsized, index, slice
//- minicore: coerce_unsized, index, slice, dispatch_from_dyn
trait Foo {
fn foo(&self) -> u8 { 10 }
}
@ -2085,7 +2098,7 @@ fn dyn_trait() {
);
check_number(
r#"
//- minicore: coerce_unsized, index, slice
//- minicore: coerce_unsized, index, slice, dispatch_from_dyn
trait Foo {
fn foo(&self) -> i32 { 10 }
}
@ -2109,7 +2122,7 @@ fn dyn_trait() {
);
check_number(
r#"
//- minicore: coerce_unsized, index, slice
//- minicore: coerce_unsized, index, slice, dispatch_from_dyn
trait A {
fn x(&self) -> i32;
}
@ -2422,6 +2435,7 @@ fn statics() {
fn extern_weak_statics() {
check_number(
r#"
//- minicore: sized
extern "C" {
#[linkage = "extern_weak"]
static __dso_handle: *mut u8;
@ -2716,6 +2730,7 @@ fn const_trait_assoc() {
);
check_number(
r#"
//- minicore: sized
struct S<T>(*mut T);
trait MySized: Sized {
@ -2813,7 +2828,7 @@ fn type_error() {
y.0
};
"#,
|e| matches!(e, ConstEvalError::MirLowerError(MirLowerError::TypeMismatch(_))),
|e| matches!(e, ConstEvalError::MirLowerError(MirLowerError::HasErrors)),
);
}

View File

@ -89,7 +89,7 @@ fn size_of_val() {
);
check_number(
r#"
//- minicore: coerce_unsized, fmt, builtin_impls
//- minicore: coerce_unsized, fmt, builtin_impls, dispatch_from_dyn
extern "rust-intrinsic" {
pub fn size_of_val<T: ?Sized>(_: *const T) -> usize;
}
@ -311,6 +311,7 @@ fn saturating() {
fn allocator() {
check_number(
r#"
//- minicore: sized
extern "Rust" {
#[rustc_allocator]
fn __rust_alloc(size: usize, align: usize) -> *mut u8;

View File

@ -11,7 +11,7 @@ use base_db::{
use hir_def::{
db::DefDatabase, hir::ExprId, layout::TargetDataLayout, AdtId, BlockId, CallableDefId,
ConstParamId, DefWithBodyId, EnumVariantId, FunctionId, GeneralConstId, GenericDefId, ImplId,
LifetimeParamId, LocalFieldId, StaticId, TypeAliasId, TypeOrConstParamId, VariantId,
LifetimeParamId, LocalFieldId, StaticId, TraitId, TypeAliasId, TypeOrConstParamId, VariantId,
};
use la_arena::ArenaMap;
use smallvec::SmallVec;
@ -24,6 +24,7 @@ use crate::{
lower::{GenericDefaults, GenericPredicates},
method_resolution::{InherentImpls, TraitImpls, TyFingerprint},
mir::{BorrowckResult, MirBody, MirLowerError},
object_safety::ObjectSafetyViolation,
Binders, ClosureId, Const, FnDefId, ImplTraitId, ImplTraits, InferenceResult, Interner,
PolyFnSig, Substitution, TraitEnvironment, TraitRef, Ty, TyDefId, ValueTyDefId,
};
@ -107,6 +108,9 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::layout::target_data_layout_query)]
fn target_data_layout(&self, krate: CrateId) -> Result<Arc<TargetDataLayout>, Arc<str>>;
#[salsa::invoke(crate::object_safety::object_safety_of_trait_query)]
fn object_safety_of_trait(&self, trait_: TraitId) -> Option<ObjectSafetyViolation>;
#[salsa::invoke(crate::lower::ty_query)]
#[salsa::cycle(crate::lower::ty_recover)]
fn ty(&self, def: TyDefId) -> Binders<Ty>;
@ -150,6 +154,9 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
#[salsa::invoke(crate::lower::generic_predicates_query)]
fn generic_predicates(&self, def: GenericDefId) -> GenericPredicates;
#[salsa::invoke(crate::lower::generic_predicates_without_parent_query)]
fn generic_predicates_without_parent(&self, def: GenericDefId) -> GenericPredicates;
#[salsa::invoke(crate::lower::trait_environment_for_body_query)]
#[salsa::transparent]
fn trait_environment_for_body(&self, def: DefWithBodyId) -> Arc<TraitEnvironment>;

View File

@ -16,13 +16,13 @@ mod case_conv;
use std::fmt;
use hir_def::{
data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, AttrDefId, ConstId,
EnumId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId,
StaticId, StructId, TraitId, TypeAliasId,
data::adt::VariantData, db::DefDatabase, hir::Pat, src::HasSource, AdtId, ConstId, EnumId,
EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, ModuleDefId, ModuleId, StaticId,
StructId, TraitId, TypeAliasId,
};
use hir_expand::{
name::{AsName, Name},
HirFileId, HirFileIdExt, MacroFileIdExt,
HirFileId, HirFileIdExt,
};
use intern::sym;
use stdx::{always, never};
@ -36,14 +36,6 @@ use crate::db::HirDatabase;
use self::case_conv::{to_camel_case, to_lower_snake_case, to_upper_snake_case};
mod allow {
pub(super) const BAD_STYLE: &str = "bad_style";
pub(super) const NONSTANDARD_STYLE: &str = "nonstandard_style";
pub(super) const NON_SNAKE_CASE: &str = "non_snake_case";
pub(super) const NON_UPPER_CASE_GLOBAL: &str = "non_upper_case_globals";
pub(super) const NON_CAMEL_CASE_TYPES: &str = "non_camel_case_types";
}
pub fn incorrect_case(db: &dyn HirDatabase, owner: ModuleDefId) -> Vec<IncorrectCase> {
let _p = tracing::info_span!("incorrect_case").entered();
let mut validator = DeclValidator::new(db);
@ -160,92 +152,7 @@ impl<'a> DeclValidator<'a> {
}
}
/// Checks whether not following the convention is allowed for this item.
fn allowed(&self, id: AttrDefId, allow_name: &str, recursing: bool) -> bool {
let is_allowed = |def_id| {
let attrs = self.db.attrs(def_id);
// don't bug the user about directly no_mangle annotated stuff, they can't do anything about it
(!recursing && attrs.by_key(&sym::no_mangle).exists())
|| attrs.by_key(&sym::allow).tt_values().any(|tt| {
let allows = tt.to_string();
allows.contains(allow_name)
|| allows.contains(allow::BAD_STYLE)
|| allows.contains(allow::NONSTANDARD_STYLE)
})
};
let db = self.db.upcast();
let file_id_is_derive = || {
match id {
AttrDefId::ModuleId(m) => {
m.def_map(db)[m.local_id].origin.file_id().map(Into::into)
}
AttrDefId::FunctionId(f) => Some(f.lookup(db).id.file_id()),
AttrDefId::StaticId(sid) => Some(sid.lookup(db).id.file_id()),
AttrDefId::ConstId(cid) => Some(cid.lookup(db).id.file_id()),
AttrDefId::TraitId(tid) => Some(tid.lookup(db).id.file_id()),
AttrDefId::TraitAliasId(taid) => Some(taid.lookup(db).id.file_id()),
AttrDefId::ImplId(iid) => Some(iid.lookup(db).id.file_id()),
AttrDefId::ExternBlockId(id) => Some(id.lookup(db).id.file_id()),
AttrDefId::ExternCrateId(id) => Some(id.lookup(db).id.file_id()),
AttrDefId::UseId(id) => Some(id.lookup(db).id.file_id()),
// These warnings should not explore macro definitions at all
AttrDefId::MacroId(_) => None,
AttrDefId::AdtId(aid) => match aid {
AdtId::StructId(sid) => Some(sid.lookup(db).id.file_id()),
AdtId::EnumId(eid) => Some(eid.lookup(db).id.file_id()),
// Unions aren't yet supported
AdtId::UnionId(_) => None,
},
AttrDefId::FieldId(_) => None,
AttrDefId::EnumVariantId(_) => None,
AttrDefId::TypeAliasId(_) => None,
AttrDefId::GenericParamId(_) => None,
}
.map_or(false, |file_id| {
matches!(file_id.macro_file(), Some(file_id) if file_id.is_custom_derive(db.upcast()) || file_id.is_builtin_derive(db.upcast()))
})
};
let parent = || {
match id {
AttrDefId::ModuleId(m) => m.containing_module(db).map(|v| v.into()),
AttrDefId::FunctionId(f) => Some(f.lookup(db).container.into()),
AttrDefId::StaticId(sid) => Some(sid.lookup(db).container.into()),
AttrDefId::ConstId(cid) => Some(cid.lookup(db).container.into()),
AttrDefId::TraitId(tid) => Some(tid.lookup(db).container.into()),
AttrDefId::TraitAliasId(taid) => Some(taid.lookup(db).container.into()),
AttrDefId::ImplId(iid) => Some(iid.lookup(db).container.into()),
AttrDefId::ExternBlockId(id) => Some(id.lookup(db).container.into()),
AttrDefId::ExternCrateId(id) => Some(id.lookup(db).container.into()),
AttrDefId::UseId(id) => Some(id.lookup(db).container.into()),
// These warnings should not explore macro definitions at all
AttrDefId::MacroId(_) => None,
AttrDefId::AdtId(aid) => match aid {
AdtId::StructId(sid) => Some(sid.lookup(db).container.into()),
AdtId::EnumId(eid) => Some(eid.lookup(db).container.into()),
// Unions aren't yet supported
AdtId::UnionId(_) => None,
},
AttrDefId::FieldId(_) => None,
AttrDefId::EnumVariantId(_) => None,
AttrDefId::TypeAliasId(_) => None,
AttrDefId::GenericParamId(_) => None,
}
.is_some_and(|mid| self.allowed(mid, allow_name, true))
};
is_allowed(id)
// FIXME: this is a hack to avoid false positives in derive macros currently
|| file_id_is_derive()
// go upwards one step or give up
|| parent()
}
fn validate_module(&mut self, module_id: ModuleId) {
// Check whether non-snake case identifiers are allowed for this module.
if self.allowed(module_id.into(), allow::NON_SNAKE_CASE, false) {
return;
}
// Check the module name.
let Some(module_name) = module_id.name(self.db.upcast()) else { return };
let Some(module_name_replacement) =
@ -270,11 +177,6 @@ impl<'a> DeclValidator<'a> {
}
fn validate_trait(&mut self, trait_id: TraitId) {
// Check whether non-snake case identifiers are allowed for this trait.
if self.allowed(trait_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
return;
}
// Check the trait name.
let data = self.db.trait_data(trait_id);
self.create_incorrect_case_diagnostic_for_item_name(
@ -292,21 +194,24 @@ impl<'a> DeclValidator<'a> {
return;
}
// Check whether non-snake case identifiers are allowed for this function.
if self.allowed(func.into(), allow::NON_SNAKE_CASE, false) {
return;
}
// Check the function name.
// Skipped if function is an associated item of a trait implementation.
if !self.is_trait_impl_container(container) {
let data = self.db.function_data(func);
self.create_incorrect_case_diagnostic_for_item_name(
func,
&data.name,
CaseType::LowerSnakeCase,
IdentType::Function,
);
// Don't run the lint on extern "[not Rust]" fn items with the
// #[no_mangle] attribute.
let no_mangle = data.attrs.by_key(&sym::no_mangle).exists();
if no_mangle && data.abi.as_ref().is_some_and(|abi| *abi != sym::Rust) {
cov_mark::hit!(extern_func_no_mangle_ignored);
} else {
self.create_incorrect_case_diagnostic_for_item_name(
func,
&data.name,
CaseType::LowerSnakeCase,
IdentType::Function,
);
}
} else {
cov_mark::hit!(trait_impl_assoc_func_name_incorrect_case_ignored);
}
@ -389,17 +294,13 @@ impl<'a> DeclValidator<'a> {
fn validate_struct(&mut self, struct_id: StructId) {
// Check the structure name.
let non_camel_case_allowed =
self.allowed(struct_id.into(), allow::NON_CAMEL_CASE_TYPES, false);
if !non_camel_case_allowed {
let data = self.db.struct_data(struct_id);
self.create_incorrect_case_diagnostic_for_item_name(
struct_id,
&data.name,
CaseType::UpperCamelCase,
IdentType::Structure,
);
}
let data = self.db.struct_data(struct_id);
self.create_incorrect_case_diagnostic_for_item_name(
struct_id,
&data.name,
CaseType::UpperCamelCase,
IdentType::Structure,
);
// Check the field names.
self.validate_struct_fields(struct_id);
@ -407,10 +308,6 @@ impl<'a> DeclValidator<'a> {
/// Check incorrect names for struct fields.
fn validate_struct_fields(&mut self, struct_id: StructId) {
if self.allowed(struct_id.into(), allow::NON_SNAKE_CASE, false) {
return;
}
let data = self.db.struct_data(struct_id);
let VariantData::Record(fields) = data.variant_data.as_ref() else {
return;
@ -484,11 +381,6 @@ impl<'a> DeclValidator<'a> {
fn validate_enum(&mut self, enum_id: EnumId) {
let data = self.db.enum_data(enum_id);
// Check whether non-camel case names are allowed for this enum.
if self.allowed(enum_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
return;
}
// Check the enum name.
self.create_incorrect_case_diagnostic_for_item_name(
enum_id,
@ -653,10 +545,6 @@ impl<'a> DeclValidator<'a> {
return;
}
if self.allowed(const_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
return;
}
let data = self.db.const_data(const_id);
let Some(name) = &data.name else {
return;
@ -676,10 +564,6 @@ impl<'a> DeclValidator<'a> {
return;
}
if self.allowed(static_id.into(), allow::NON_UPPER_CASE_GLOBAL, false) {
return;
}
self.create_incorrect_case_diagnostic_for_item_name(
static_id,
&data.name,
@ -695,11 +579,6 @@ impl<'a> DeclValidator<'a> {
return;
}
// Check whether non-snake case identifiers are allowed for this type alias.
if self.allowed(type_alias_id.into(), allow::NON_CAMEL_CASE_TYPES, false) {
return;
}
// Check the type alias name.
let data = self.db.type_alias_data(type_alias_id);
self.create_incorrect_case_diagnostic_for_item_name(

View File

@ -5,6 +5,7 @@ use hir_def::{
body::Body,
hir::{Expr, ExprId, UnaryOp},
resolver::{resolver_for_expr, ResolveValueResult, Resolver, ValueNs},
type_ref::Rawness,
DefWithBodyId,
};
@ -12,7 +13,10 @@ use crate::{
db::HirDatabase, utils::is_fn_unsafe_to_call, InferenceResult, Interner, TyExt, TyKind,
};
pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
/// Returns `(unsafe_exprs, fn_is_unsafe)`.
///
/// If `fn_is_unsafe` is false, `unsafe_exprs` are hard errors. If true, they're `unsafe_op_in_unsafe_fn`.
pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> (Vec<ExprId>, bool) {
let _p = tracing::info_span!("missing_unsafe").entered();
let mut res = Vec::new();
@ -23,9 +27,6 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
| DefWithBodyId::VariantId(_)
| DefWithBodyId::InTypeConstId(_) => false,
};
if is_unsafe {
return res;
}
let body = db.body(def);
let infer = db.infer(def);
@ -35,7 +36,7 @@ pub fn missing_unsafe(db: &dyn HirDatabase, def: DefWithBodyId) -> Vec<ExprId> {
}
});
res
(res, is_unsafe)
}
pub struct UnsafeExpr {
@ -87,12 +88,20 @@ fn walk_unsafe(
let g = resolver.update_to_inner_scope(db.upcast(), def, current);
let value_or_partial = resolver.resolve_path_in_value_ns(db.upcast(), path);
if let Some(ResolveValueResult::ValueNs(ValueNs::StaticId(id), _)) = value_or_partial {
if db.static_data(id).mutable {
let static_data = db.static_data(id);
if static_data.mutable || static_data.is_extern {
unsafe_expr_cb(UnsafeExpr { expr: current, inside_unsafe_block });
}
}
resolver.reset_to_guard(g);
}
Expr::Ref { expr, rawness: Rawness::RawPtr, mutability: _ } => {
if let Expr::Path(_) = body.exprs[*expr] {
// Do not report unsafe for `addr_of[_mut]!(EXTERN_OR_MUT_STATIC)`,
// see https://github.com/rust-lang/rust/pull/125834.
return;
}
}
Expr::MethodCall { .. } => {
if infer
.method_resolution(current)

View File

@ -225,6 +225,23 @@ impl Generics {
}
}
pub(crate) fn trait_self_param_idx(db: &dyn DefDatabase, def: GenericDefId) -> Option<usize> {
match def {
GenericDefId::TraitId(_) | GenericDefId::TraitAliasId(_) => {
let params = db.generic_params(def);
params.trait_self_param().map(|idx| idx.into_raw().into_u32() as usize)
}
GenericDefId::ImplId(_) => None,
_ => {
let parent_def = parent_generic_def(db, def)?;
let parent_params = db.generic_params(parent_def);
let parent_self_idx = parent_params.trait_self_param()?.into_raw().into_u32() as usize;
let self_params = db.generic_params(def);
Some(self_params.len() + parent_self_idx)
}
}
}
fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
let container = match def {
GenericDefId::FunctionId(it) => it.lookup(db).container,

View File

@ -13,7 +13,7 @@
//! to certain types. To record this, we use the union-find implementation from
//! the `ena` crate, which is extracted from rustc.
mod cast;
pub(crate) mod cast;
pub(crate) mod closure;
mod coerce;
mod expr;
@ -76,7 +76,7 @@ pub use coerce::could_coerce;
#[allow(unreachable_pub)]
pub use unify::{could_unify, could_unify_deeply};
use cast::CastCheck;
use cast::{CastCheck, CastError};
pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy};
/// The entry point of type inference.
@ -254,6 +254,16 @@ pub enum InferenceDiagnostic {
expr: ExprId,
expected: Ty,
},
CastToUnsized {
expr: ExprId,
cast_ty: Ty,
},
InvalidCast {
expr: ExprId,
error: CastError,
expr_ty: Ty,
cast_ty: Ty,
},
}
/// A mismatch between an expected and an inferred type.
@ -456,6 +466,7 @@ pub struct InferenceResult {
pub(crate) closure_info: FxHashMap<ClosureId, (Vec<CapturedItem>, FnTrait)>,
// FIXME: remove this field
pub mutated_bindings_in_closure: FxHashSet<BindingId>,
pub coercion_casts: FxHashSet<ExprId>,
}
impl InferenceResult {
@ -666,7 +677,7 @@ impl<'a> InferenceContext<'a> {
let InferenceContext {
mut table,
mut result,
deferred_cast_checks,
mut deferred_cast_checks,
tuple_field_accesses_rev,
..
} = self;
@ -695,15 +706,25 @@ impl<'a> InferenceContext<'a> {
closure_info: _,
mutated_bindings_in_closure: _,
tuple_field_access_types: _,
coercion_casts,
} = &mut result;
table.fallback_if_possible();
// Comment from rustc:
// Even though coercion casts provide type hints, we check casts after fallback for
// backwards compatibility. This makes fallback a stronger type hint than a cast coercion.
for cast in deferred_cast_checks {
cast.check(&mut table);
let mut apply_adjustments = |expr, adj| {
expr_adjustments.insert(expr, adj);
};
let mut set_coercion_cast = |expr| {
coercion_casts.insert(expr);
};
for cast in deferred_cast_checks.iter_mut() {
if let Err(diag) =
cast.check(&mut table, &mut apply_adjustments, &mut set_coercion_cast)
{
diagnostics.push(diag);
}
}
// FIXME resolve obligations as well (use Guidance if necessary)
@ -732,7 +753,7 @@ impl<'a> InferenceContext<'a> {
*has_errors = *has_errors || ty.contains_unknown();
}
*has_errors = !type_mismatches.is_empty();
*has_errors |= !type_mismatches.is_empty();
type_mismatches.retain(|_, mismatch| {
mismatch.expected = table.resolve_completely(mismatch.expected.clone());
@ -775,20 +796,30 @@ impl<'a> InferenceContext<'a> {
});
for (_, subst) in method_resolutions.values_mut() {
*subst = table.resolve_completely(subst.clone());
*has_errors =
*has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
}
for (_, subst) in assoc_resolutions.values_mut() {
*subst = table.resolve_completely(subst.clone());
*has_errors =
*has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
}
for adjustment in expr_adjustments.values_mut().flatten() {
adjustment.target = table.resolve_completely(adjustment.target.clone());
*has_errors = *has_errors || adjustment.target.contains_unknown();
}
for adjustment in pat_adjustments.values_mut().flatten() {
*adjustment = table.resolve_completely(adjustment.clone());
*has_errors = *has_errors || adjustment.contains_unknown();
}
result.tuple_field_access_types = tuple_field_accesses_rev
.into_iter()
.enumerate()
.map(|(idx, subst)| (TupleId(idx as u32), table.resolve_completely(subst)))
.inspect(|(_, subst)| {
*has_errors =
*has_errors || subst.type_parameters(Interner).any(|ty| ty.contains_unknown());
})
.collect();
result
}

View File

@ -1,47 +1,451 @@
//! Type cast logic. Basically coercion + additional casts.
use crate::{infer::unify::InferenceTable, Interner, Ty, TyExt, TyKind};
use chalk_ir::{Mutability, Scalar, TyVariableKind, UintTy};
use hir_def::{hir::ExprId, AdtId};
use stdx::never;
use crate::{
infer::unify::InferenceTable, Adjustment, Binders, DynTy, InferenceDiagnostic, Interner,
PlaceholderIndex, QuantifiedWhereClauses, Ty, TyExt, TyKind, TypeFlags, WhereClause,
};
#[derive(Debug)]
pub(crate) enum Int {
I,
U(UintTy),
Bool,
Char,
CEnum,
InferenceVar,
}
#[derive(Debug)]
pub(crate) enum CastTy {
Int(Int),
Float,
FnPtr,
Ptr(Ty, Mutability),
// `DynStar` is Not supported yet in r-a
}
impl CastTy {
pub(crate) fn from_ty(table: &mut InferenceTable<'_>, t: &Ty) -> Option<Self> {
match t.kind(Interner) {
TyKind::Scalar(Scalar::Bool) => Some(Self::Int(Int::Bool)),
TyKind::Scalar(Scalar::Char) => Some(Self::Int(Int::Char)),
TyKind::Scalar(Scalar::Int(_)) => Some(Self::Int(Int::I)),
TyKind::Scalar(Scalar::Uint(it)) => Some(Self::Int(Int::U(*it))),
TyKind::InferenceVar(_, TyVariableKind::Integer) => Some(Self::Int(Int::InferenceVar)),
TyKind::InferenceVar(_, TyVariableKind::Float) => Some(Self::Float),
TyKind::Scalar(Scalar::Float(_)) => Some(Self::Float),
TyKind::Adt(..) => {
let (AdtId::EnumId(id), _) = t.as_adt()? else {
return None;
};
let enum_data = table.db.enum_data(id);
if enum_data.is_payload_free(table.db.upcast()) {
Some(Self::Int(Int::CEnum))
} else {
None
}
}
TyKind::Raw(m, ty) => Some(Self::Ptr(table.resolve_ty_shallow(ty), *m)),
TyKind::Function(_) => Some(Self::FnPtr),
_ => None,
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum CastError {
Unknown,
CastToBool,
CastToChar,
DifferingKinds,
SizedUnsizedCast,
IllegalCast,
IntToFatCast,
NeedDeref,
NeedViaPtr,
NeedViaThinPtr,
NeedViaInt,
NonScalar,
UnknownCastPtrKind,
UnknownExprPtrKind,
}
impl CastError {
fn into_diagnostic(self, expr: ExprId, expr_ty: Ty, cast_ty: Ty) -> InferenceDiagnostic {
InferenceDiagnostic::InvalidCast { expr, error: self, expr_ty, cast_ty }
}
}
#[derive(Clone, Debug)]
pub(super) struct CastCheck {
expr: ExprId,
source_expr: ExprId,
expr_ty: Ty,
cast_ty: Ty,
}
impl CastCheck {
pub(super) fn new(expr_ty: Ty, cast_ty: Ty) -> Self {
Self { expr_ty, cast_ty }
pub(super) fn new(expr: ExprId, source_expr: ExprId, expr_ty: Ty, cast_ty: Ty) -> Self {
Self { expr, source_expr, expr_ty, cast_ty }
}
pub(super) fn check(self, table: &mut InferenceTable<'_>) {
// FIXME: This function currently only implements the bits that influence the type
// inference. We should return the adjustments on success and report diagnostics on error.
let expr_ty = table.resolve_ty_shallow(&self.expr_ty);
let cast_ty = table.resolve_ty_shallow(&self.cast_ty);
pub(super) fn check<F, G>(
&mut self,
table: &mut InferenceTable<'_>,
apply_adjustments: &mut F,
set_coercion_cast: &mut G,
) -> Result<(), InferenceDiagnostic>
where
F: FnMut(ExprId, Vec<Adjustment>),
G: FnMut(ExprId),
{
table.resolve_obligations_as_possible();
self.expr_ty = table.resolve_ty_shallow(&self.expr_ty);
self.cast_ty = table.resolve_ty_shallow(&self.cast_ty);
if table.coerce(&expr_ty, &cast_ty).is_ok() {
return;
if self.expr_ty.contains_unknown() || self.cast_ty.contains_unknown() {
return Ok(());
}
if check_ref_to_ptr_cast(expr_ty, cast_ty, table) {
// Note that this type of cast is actually split into a coercion to a
// pointer type and a cast:
// &[T; N] -> *[T; N] -> *T
if !self.cast_ty.data(Interner).flags.contains(TypeFlags::HAS_TY_INFER)
&& !table.is_sized(&self.cast_ty)
{
return Err(InferenceDiagnostic::CastToUnsized {
expr: self.expr,
cast_ty: self.cast_ty.clone(),
});
}
// FIXME: Check other kinds of non-coercion casts and report error if any?
// Chalk doesn't support trait upcasting and fails to solve some obvious goals
// when the trait environment contains some recursive traits (See issue #18047)
// We skip cast checks for such cases for now, until the next-gen solver.
if contains_dyn_trait(&self.cast_ty) {
return Ok(());
}
if let Ok((adj, _)) = table.coerce(&self.expr_ty, &self.cast_ty) {
apply_adjustments(self.source_expr, adj);
set_coercion_cast(self.source_expr);
return Ok(());
}
self.do_check(table, apply_adjustments)
.map_err(|e| e.into_diagnostic(self.expr, self.expr_ty.clone(), self.cast_ty.clone()))
}
fn do_check<F>(
&self,
table: &mut InferenceTable<'_>,
apply_adjustments: &mut F,
) -> Result<(), CastError>
where
F: FnMut(ExprId, Vec<Adjustment>),
{
let (t_from, t_cast) =
match (CastTy::from_ty(table, &self.expr_ty), CastTy::from_ty(table, &self.cast_ty)) {
(Some(t_from), Some(t_cast)) => (t_from, t_cast),
(None, Some(t_cast)) => match self.expr_ty.kind(Interner) {
TyKind::FnDef(..) => {
let sig = self.expr_ty.callable_sig(table.db).expect("FnDef had no sig");
let sig = table.normalize_associated_types_in(sig);
let fn_ptr = TyKind::Function(sig.to_fn_ptr()).intern(Interner);
if let Ok((adj, _)) = table.coerce(&self.expr_ty, &fn_ptr) {
apply_adjustments(self.source_expr, adj);
} else {
return Err(CastError::IllegalCast);
}
(CastTy::FnPtr, t_cast)
}
TyKind::Ref(mutbl, _, inner_ty) => {
let inner_ty = table.resolve_ty_shallow(inner_ty);
return match t_cast {
CastTy::Int(_) | CastTy::Float => match inner_ty.kind(Interner) {
TyKind::Scalar(
Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_),
)
| TyKind::InferenceVar(
_,
TyVariableKind::Integer | TyVariableKind::Float,
) => Err(CastError::NeedDeref),
_ => Err(CastError::NeedViaPtr),
},
// array-ptr-cast
CastTy::Ptr(t, m) => {
let t = table.resolve_ty_shallow(&t);
if !table.is_sized(&t) {
return Err(CastError::IllegalCast);
}
self.check_ref_cast(
table,
&inner_ty,
*mutbl,
&t,
m,
apply_adjustments,
)
}
_ => Err(CastError::NonScalar),
};
}
_ => return Err(CastError::NonScalar),
},
_ => return Err(CastError::NonScalar),
};
// rustc checks whether the `expr_ty` is foreign adt with `non_exhaustive` sym
match (t_from, t_cast) {
(_, CastTy::Int(Int::CEnum) | CastTy::FnPtr) => Err(CastError::NonScalar),
(_, CastTy::Int(Int::Bool)) => Err(CastError::CastToBool),
(CastTy::Int(Int::U(UintTy::U8)), CastTy::Int(Int::Char)) => Ok(()),
(_, CastTy::Int(Int::Char)) => Err(CastError::CastToChar),
(CastTy::Int(Int::Bool | Int::CEnum | Int::Char), CastTy::Float) => {
Err(CastError::NeedViaInt)
}
(CastTy::Int(Int::Bool | Int::CEnum | Int::Char) | CastTy::Float, CastTy::Ptr(..))
| (CastTy::Ptr(..) | CastTy::FnPtr, CastTy::Float) => Err(CastError::IllegalCast),
(CastTy::Ptr(src, _), CastTy::Ptr(dst, _)) => {
self.check_ptr_ptr_cast(table, &src, &dst)
}
(CastTy::Ptr(src, _), CastTy::Int(_)) => self.check_ptr_addr_cast(table, &src),
(CastTy::Int(_), CastTy::Ptr(dst, _)) => self.check_addr_ptr_cast(table, &dst),
(CastTy::FnPtr, CastTy::Ptr(dst, _)) => self.check_fptr_ptr_cast(table, &dst),
(CastTy::Int(Int::CEnum), CastTy::Int(_)) => Ok(()),
(CastTy::Int(Int::Char | Int::Bool), CastTy::Int(_)) => Ok(()),
(CastTy::Int(_) | CastTy::Float, CastTy::Int(_) | CastTy::Float) => Ok(()),
(CastTy::FnPtr, CastTy::Int(_)) => Ok(()),
}
}
fn check_ref_cast<F>(
&self,
table: &mut InferenceTable<'_>,
t_expr: &Ty,
m_expr: Mutability,
t_cast: &Ty,
m_cast: Mutability,
apply_adjustments: &mut F,
) -> Result<(), CastError>
where
F: FnMut(ExprId, Vec<Adjustment>),
{
// Mutability order is opposite to rustc. `Mut < Not`
if m_expr <= m_cast {
if let TyKind::Array(ety, _) = t_expr.kind(Interner) {
// Coerce to a raw pointer so that we generate RawPtr in MIR.
let array_ptr_type = TyKind::Raw(m_expr, t_expr.clone()).intern(Interner);
if let Ok((adj, _)) = table.coerce(&self.expr_ty, &array_ptr_type) {
apply_adjustments(self.source_expr, adj);
} else {
never!(
"could not cast from reference to array to pointer to array ({:?} to {:?})",
self.expr_ty,
array_ptr_type
);
}
// This is a less strict condition than rustc's `demand_eqtype`,
// but false negative is better than false positive
if table.coerce(ety, t_cast).is_ok() {
return Ok(());
}
}
}
Err(CastError::IllegalCast)
}
fn check_ptr_ptr_cast(
&self,
table: &mut InferenceTable<'_>,
src: &Ty,
dst: &Ty,
) -> Result<(), CastError> {
let src_kind = pointer_kind(src, table).map_err(|_| CastError::Unknown)?;
let dst_kind = pointer_kind(dst, table).map_err(|_| CastError::Unknown)?;
match (src_kind, dst_kind) {
(Some(PointerKind::Error), _) | (_, Some(PointerKind::Error)) => Ok(()),
(_, None) => Err(CastError::UnknownCastPtrKind),
(_, Some(PointerKind::Thin)) => Ok(()),
(None, _) => Err(CastError::UnknownExprPtrKind),
(Some(PointerKind::Thin), _) => Err(CastError::SizedUnsizedCast),
(Some(PointerKind::VTable(src_tty)), Some(PointerKind::VTable(dst_tty))) => {
let principal = |tty: &Binders<QuantifiedWhereClauses>| {
tty.skip_binders().as_slice(Interner).first().and_then(|pred| {
if let WhereClause::Implemented(tr) = pred.skip_binders() {
Some(tr.trait_id)
} else {
None
}
})
};
match (principal(&src_tty), principal(&dst_tty)) {
(Some(src_principal), Some(dst_principal)) => {
if src_principal == dst_principal {
return Ok(());
}
let src_principal =
table.db.trait_datum(table.trait_env.krate, src_principal);
let dst_principal =
table.db.trait_datum(table.trait_env.krate, dst_principal);
if src_principal.is_auto_trait() && dst_principal.is_auto_trait() {
Ok(())
} else {
Err(CastError::DifferingKinds)
}
}
_ => Err(CastError::Unknown),
}
}
(Some(src_kind), Some(dst_kind)) if src_kind == dst_kind => Ok(()),
(_, _) => Err(CastError::DifferingKinds),
}
}
fn check_ptr_addr_cast(
&self,
table: &mut InferenceTable<'_>,
expr_ty: &Ty,
) -> Result<(), CastError> {
match pointer_kind(expr_ty, table).map_err(|_| CastError::Unknown)? {
None => Err(CastError::UnknownExprPtrKind),
Some(PointerKind::Error) => Ok(()),
Some(PointerKind::Thin) => Ok(()),
_ => Err(CastError::NeedViaThinPtr),
}
}
fn check_addr_ptr_cast(
&self,
table: &mut InferenceTable<'_>,
cast_ty: &Ty,
) -> Result<(), CastError> {
match pointer_kind(cast_ty, table).map_err(|_| CastError::Unknown)? {
None => Err(CastError::UnknownCastPtrKind),
Some(PointerKind::Error) => Ok(()),
Some(PointerKind::Thin) => Ok(()),
Some(PointerKind::VTable(_)) => Err(CastError::IntToFatCast),
Some(PointerKind::Length) => Err(CastError::IntToFatCast),
Some(PointerKind::OfAlias | PointerKind::OfParam(_)) => Err(CastError::IntToFatCast),
}
}
fn check_fptr_ptr_cast(
&self,
table: &mut InferenceTable<'_>,
cast_ty: &Ty,
) -> Result<(), CastError> {
match pointer_kind(cast_ty, table).map_err(|_| CastError::Unknown)? {
None => Err(CastError::UnknownCastPtrKind),
Some(PointerKind::Error) => Ok(()),
Some(PointerKind::Thin) => Ok(()),
_ => Err(CastError::IllegalCast),
}
}
}
fn check_ref_to_ptr_cast(expr_ty: Ty, cast_ty: Ty, table: &mut InferenceTable<'_>) -> bool {
let Some((expr_inner_ty, _, _)) = expr_ty.as_reference() else {
return false;
};
let Some((cast_inner_ty, _)) = cast_ty.as_raw_ptr() else {
return false;
};
let TyKind::Array(expr_elt_ty, _) = expr_inner_ty.kind(Interner) else {
return false;
};
table.coerce(expr_elt_ty, cast_inner_ty).is_ok()
#[derive(PartialEq, Eq)]
enum PointerKind {
// thin pointer
Thin,
// trait object
VTable(Binders<QuantifiedWhereClauses>),
// slice
Length,
OfAlias,
OfParam(PlaceholderIndex),
Error,
}
fn pointer_kind(ty: &Ty, table: &mut InferenceTable<'_>) -> Result<Option<PointerKind>, ()> {
let ty = table.resolve_ty_shallow(ty);
if table.is_sized(&ty) {
return Ok(Some(PointerKind::Thin));
}
match ty.kind(Interner) {
TyKind::Slice(_) | TyKind::Str => Ok(Some(PointerKind::Length)),
TyKind::Dyn(DynTy { bounds, .. }) => Ok(Some(PointerKind::VTable(bounds.clone()))),
TyKind::Adt(chalk_ir::AdtId(id), subst) => {
let AdtId::StructId(id) = *id else {
never!("`{:?}` should be sized but is not?", ty);
return Err(());
};
let struct_data = table.db.struct_data(id);
if let Some((last_field, _)) = struct_data.variant_data.fields().iter().last() {
let last_field_ty =
table.db.field_types(id.into())[last_field].clone().substitute(Interner, subst);
pointer_kind(&last_field_ty, table)
} else {
Ok(Some(PointerKind::Thin))
}
}
TyKind::Tuple(_, subst) => {
match subst.iter(Interner).last().and_then(|arg| arg.ty(Interner)) {
None => Ok(Some(PointerKind::Thin)),
Some(ty) => pointer_kind(ty, table),
}
}
TyKind::Foreign(_) => Ok(Some(PointerKind::Thin)),
TyKind::Alias(_) | TyKind::AssociatedType(..) | TyKind::OpaqueType(..) => {
Ok(Some(PointerKind::OfAlias))
}
TyKind::Error => Ok(Some(PointerKind::Error)),
TyKind::Placeholder(idx) => Ok(Some(PointerKind::OfParam(*idx))),
TyKind::BoundVar(_) | TyKind::InferenceVar(..) => Ok(None),
TyKind::Scalar(_)
| TyKind::Array(..)
| TyKind::CoroutineWitness(..)
| TyKind::Raw(..)
| TyKind::Ref(..)
| TyKind::FnDef(..)
| TyKind::Function(_)
| TyKind::Closure(..)
| TyKind::Coroutine(..)
| TyKind::Never => {
never!("`{:?}` should be sized but is not?", ty);
Err(())
}
}
}
fn contains_dyn_trait(ty: &Ty) -> bool {
use std::ops::ControlFlow;
use chalk_ir::{
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
DebruijnIndex,
};
struct DynTraitVisitor;
impl TypeVisitor<Interner> for DynTraitVisitor {
type BreakTy = ();
fn as_dyn(&mut self) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn visit_ty(&mut self, ty: &Ty, outer_binder: DebruijnIndex) -> ControlFlow<Self::BreakTy> {
match ty.kind(Interner) {
TyKind::Dyn(_) => ControlFlow::Break(()),
_ => ty.super_visit_with(self.as_dyn(), outer_binder),
}
}
}
ty.visit_with(DynTraitVisitor.as_dyn(), DebruijnIndex::INNERMOST).is_break()
}

View File

@ -10,7 +10,10 @@ use chalk_ir::{
use either::Either;
use hir_def::{
data::adt::VariantData,
hir::{Array, BinaryOp, BindingId, CaptureBy, Expr, ExprId, Pat, PatId, Statement, UnaryOp},
hir::{
Array, AsmOperand, BinaryOp, BindingId, CaptureBy, Expr, ExprId, Pat, PatId, Statement,
UnaryOp,
},
lang_item::LangItem,
resolver::{resolver_for_expr, ResolveValueResult, ValueNs},
DefWithBodyId, FieldId, HasModule, TupleFieldId, TupleId, VariantId,
@ -666,7 +669,21 @@ impl InferenceContext<'_> {
fn walk_expr_without_adjust(&mut self, tgt_expr: ExprId) {
match &self.body[tgt_expr] {
Expr::OffsetOf(_) => (),
Expr::InlineAsm(e) => self.walk_expr_without_adjust(e.e),
Expr::InlineAsm(e) => e.operands.iter().for_each(|(_, op)| match op {
AsmOperand::In { expr, .. }
| AsmOperand::Out { expr: Some(expr), .. }
| AsmOperand::InOut { expr, .. } => self.walk_expr_without_adjust(*expr),
AsmOperand::SplitInOut { in_expr, out_expr, .. } => {
self.walk_expr_without_adjust(*in_expr);
if let Some(out_expr) = out_expr {
self.walk_expr_without_adjust(*out_expr);
}
}
AsmOperand::Out { expr: None, .. }
| AsmOperand::Const(_)
| AsmOperand::Label(_)
| AsmOperand::Sym(_) => (),
}),
Expr::If { condition, then_branch, else_branch } => {
self.consume_expr(*condition);
self.consume_expr(*then_branch);

View File

@ -9,7 +9,8 @@ use chalk_ir::{cast::Cast, fold::Shift, DebruijnIndex, Mutability, TyVariableKin
use either::Either;
use hir_def::{
hir::{
ArithOp, Array, BinaryOp, ClosureKind, Expr, ExprId, LabelId, Literal, Statement, UnaryOp,
ArithOp, Array, AsmOperand, AsmOptions, BinaryOp, ClosureKind, Expr, ExprId, LabelId,
Literal, Statement, UnaryOp,
},
lang_item::{LangItem, LangItemTarget},
path::{GenericArg, GenericArgs, Path},
@ -41,9 +42,9 @@ use crate::{
primitive::{self, UintTy},
static_lifetime, to_chalk_trait_id,
traits::FnTrait,
Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, FnAbi, FnPointer, FnSig,
FnSubst, Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder,
TyExt, TyKind,
Adjust, Adjustment, AdtId, AutoBorrow, Binders, CallableDefId, CallableSig, FnAbi, FnPointer,
FnSig, FnSubst, Interner, Rawness, Scalar, Substitution, TraitEnvironment, TraitRef, Ty,
TyBuilder, TyExt, TyKind,
};
use super::{
@ -610,7 +611,12 @@ impl InferenceContext<'_> {
Expr::Cast { expr, type_ref } => {
let cast_ty = self.make_ty(type_ref);
let expr_ty = self.infer_expr(*expr, &Expectation::Castable(cast_ty.clone()));
self.deferred_cast_checks.push(CastCheck::new(expr_ty, cast_ty.clone()));
self.deferred_cast_checks.push(CastCheck::new(
tgt_expr,
*expr,
expr_ty,
cast_ty.clone(),
));
cast_ty
}
Expr::Ref { expr, rawness, mutability } => {
@ -845,7 +851,7 @@ impl InferenceContext<'_> {
};
for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone()));
*ty = self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone()));
}
TyKind::Tuple(tys.len(), Substitution::from_iter(Interner, tys)).intern(Interner)
@ -889,21 +895,52 @@ impl InferenceContext<'_> {
TyKind::Scalar(Scalar::Int(primitive::int_ty_from_builtin(*int_ty)))
.intern(Interner)
}
None => self.table.new_integer_var(),
None => {
let expected_ty = expected.to_option(&mut self.table);
let opt_ty = match expected_ty.as_ref().map(|it| it.kind(Interner)) {
Some(TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))) => expected_ty,
Some(TyKind::Scalar(Scalar::Char)) => {
Some(TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner))
}
Some(TyKind::Raw(..) | TyKind::FnDef(..) | TyKind::Function(..)) => {
Some(TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner))
}
_ => None,
};
opt_ty.unwrap_or_else(|| self.table.new_integer_var())
}
},
Literal::Uint(_v, ty) => match ty {
Some(int_ty) => {
TyKind::Scalar(Scalar::Uint(primitive::uint_ty_from_builtin(*int_ty)))
.intern(Interner)
}
None => self.table.new_integer_var(),
None => {
let expected_ty = expected.to_option(&mut self.table);
let opt_ty = match expected_ty.as_ref().map(|it| it.kind(Interner)) {
Some(TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))) => expected_ty,
Some(TyKind::Scalar(Scalar::Char)) => {
Some(TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner))
}
Some(TyKind::Raw(..) | TyKind::FnDef(..) | TyKind::Function(..)) => {
Some(TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner))
}
_ => None,
};
opt_ty.unwrap_or_else(|| self.table.new_integer_var())
}
},
Literal::Float(_v, ty) => match ty {
Some(float_ty) => {
TyKind::Scalar(Scalar::Float(primitive::float_ty_from_builtin(*float_ty)))
.intern(Interner)
}
None => self.table.new_float_var(),
None => {
let opt_ty = expected.to_option(&mut self.table).filter(|ty| {
matches!(ty.kind(Interner), TyKind::Scalar(Scalar::Float(_)))
});
opt_ty.unwrap_or_else(|| self.table.new_float_var())
}
},
},
Expr::Underscore => {
@ -919,9 +956,61 @@ impl InferenceContext<'_> {
expected
}
Expr::OffsetOf(_) => TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
Expr::InlineAsm(it) => {
self.infer_expr_no_expect(it.e);
self.result.standard_types.unit.clone()
Expr::InlineAsm(asm) => {
let mut check_expr_asm_operand = |expr, is_input: bool| {
let ty = self.infer_expr_no_expect(expr);
// If this is an input value, we require its type to be fully resolved
// at this point. This allows us to provide helpful coercions which help
// pass the type candidate list in a later pass.
//
// We don't require output types to be resolved at this point, which
// allows them to be inferred based on how they are used later in the
// function.
if is_input {
let ty = self.resolve_ty_shallow(&ty);
match ty.kind(Interner) {
TyKind::FnDef(def, parameters) => {
let fnptr_ty = TyKind::Function(
CallableSig::from_def(self.db, *def, parameters).to_fn_ptr(),
)
.intern(Interner);
_ = self.coerce(Some(expr), &ty, &fnptr_ty);
}
TyKind::Ref(mutbl, _, base_ty) => {
let ptr_ty = TyKind::Raw(*mutbl, base_ty.clone()).intern(Interner);
_ = self.coerce(Some(expr), &ty, &ptr_ty);
}
_ => {}
}
}
};
let diverge = asm.options.contains(AsmOptions::NORETURN);
asm.operands.iter().for_each(|(_, operand)| match *operand {
AsmOperand::In { expr, .. } => check_expr_asm_operand(expr, true),
AsmOperand::Out { expr: Some(expr), .. } | AsmOperand::InOut { expr, .. } => {
check_expr_asm_operand(expr, false)
}
AsmOperand::Out { expr: None, .. } => (),
AsmOperand::SplitInOut { in_expr, out_expr, .. } => {
check_expr_asm_operand(in_expr, true);
if let Some(out_expr) = out_expr {
check_expr_asm_operand(out_expr, false);
}
}
// FIXME
AsmOperand::Label(_) => (),
// FIXME
AsmOperand::Const(_) => (),
// FIXME
AsmOperand::Sym(_) => (),
});
if diverge {
self.result.standard_types.never.clone()
} else {
self.result.standard_types.unit.clone()
}
}
};
// use a new type variable if we got unknown here

View File

@ -3,7 +3,9 @@
use chalk_ir::{cast::Cast, Mutability};
use hir_def::{
hir::{Array, BinaryOp, BindingAnnotation, Expr, ExprId, PatId, Statement, UnaryOp},
hir::{
Array, AsmOperand, BinaryOp, BindingAnnotation, Expr, ExprId, PatId, Statement, UnaryOp,
},
lang_item::LangItem,
};
use hir_expand::name::Name;
@ -39,7 +41,25 @@ impl InferenceContext<'_> {
fn infer_mut_expr_without_adjust(&mut self, tgt_expr: ExprId, mutability: Mutability) {
match &self.body[tgt_expr] {
Expr::Missing => (),
Expr::InlineAsm(e) => self.infer_mut_expr_without_adjust(e.e, Mutability::Not),
Expr::InlineAsm(e) => {
e.operands.iter().for_each(|(_, op)| match op {
AsmOperand::In { expr, .. }
| AsmOperand::Out { expr: Some(expr), .. }
| AsmOperand::InOut { expr, .. } => {
self.infer_mut_expr_without_adjust(*expr, Mutability::Not)
}
AsmOperand::SplitInOut { in_expr, out_expr, .. } => {
self.infer_mut_expr_without_adjust(*in_expr, Mutability::Not);
if let Some(out_expr) = out_expr {
self.infer_mut_expr_without_adjust(*out_expr, Mutability::Not);
}
}
AsmOperand::Out { expr: None, .. }
| AsmOperand::Label(_)
| AsmOperand::Sym(_)
| AsmOperand::Const(_) => (),
});
}
Expr::OffsetOf(_) => (),
&Expr::If { condition, then_branch, else_branch } => {
self.infer_mut_expr(condition, Mutability::Not);
@ -129,7 +149,7 @@ impl InferenceContext<'_> {
target,
}) = base_adjustments
{
// For assignee exprs `IndexMut` obiligations are already applied
// For assignee exprs `IndexMut` obligations are already applied
if !is_assignee_expr {
if let TyKind::Ref(_, _, ty) = target.kind(Interner) {
base_ty = Some(ty.clone());

View File

@ -247,8 +247,12 @@ impl InferenceContext<'_> {
&self.resolver,
self.owner.into(),
);
let trait_ref =
ctx.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
let trait_ref = ctx.lower_trait_ref_from_resolved_path(
trait_,
resolved_segment,
self.table.new_type_var(),
);
self.resolve_trait_assoc_item(trait_ref, segment, id)
}
(def, _) => {

View File

@ -9,6 +9,7 @@ use chalk_ir::{
use chalk_solve::infer::ParameterEnaVariableExt;
use either::Either;
use ena::unify::UnifyKey;
use hir_def::{lang_item::LangItem, AdtId};
use hir_expand::name::Name;
use intern::sym;
use rustc_hash::FxHashMap;
@ -21,7 +22,7 @@ use crate::{
to_chalk_trait_id, traits::FnTrait, AliasEq, AliasTy, BoundVar, Canonical, Const, ConstValue,
DebruijnIndex, DomainGoal, GenericArg, GenericArgData, Goal, GoalData, Guidance, InEnvironment,
InferenceVar, Interner, Lifetime, OpaqueTyId, ParamKind, ProjectionTy, ProjectionTyExt, Scalar,
Solution, Substitution, TraitEnvironment, Ty, TyBuilder, TyExt, TyKind, VariableKind,
Solution, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, TyKind, VariableKind,
WhereClause,
};
@ -265,14 +266,16 @@ impl<'a> InferenceTable<'a> {
}
let v = InferenceVar::from(i as u32);
let root = self.var_unification_table.inference_var_root(v);
if let Some(data) = self.type_variable_table.get_mut(root.index() as usize) {
*data |= TypeVariableFlags::DIVERGING;
}
self.modify_type_variable_flag(root, |f| {
*f |= TypeVariableFlags::DIVERGING;
});
}
}
pub(super) fn set_diverging(&mut self, iv: InferenceVar, diverging: bool) {
self.type_variable_table[iv.index() as usize].set(TypeVariableFlags::DIVERGING, diverging);
self.modify_type_variable_flag(iv, |f| {
f.set(TypeVariableFlags::DIVERGING, diverging);
});
}
fn fallback_value(&self, iv: InferenceVar, kind: TyVariableKind) -> Ty {
@ -369,6 +372,18 @@ impl<'a> InferenceTable<'a> {
var
}
fn modify_type_variable_flag<F>(&mut self, var: InferenceVar, cb: F)
where
F: FnOnce(&mut TypeVariableFlags),
{
let idx = var.index() as usize;
if self.type_variable_table.len() <= idx {
self.extend_type_variable_table(idx);
}
if let Some(f) = self.type_variable_table.get_mut(idx) {
cb(f);
}
}
fn extend_type_variable_table(&mut self, to_index: usize) {
let count = to_index - self.type_variable_table.len() + 1;
self.type_variable_table.extend(iter::repeat(TypeVariableFlags::default()).take(count));
@ -898,6 +913,37 @@ impl<'a> InferenceTable<'a> {
_ => c,
}
}
/// Check if given type is `Sized` or not
pub(crate) fn is_sized(&mut self, ty: &Ty) -> bool {
// Early return for some obvious types
if matches!(ty.kind(Interner), TyKind::Scalar(..) | TyKind::Ref(..) | TyKind::Raw(..)) {
return true;
}
if let Some((AdtId::StructId(id), subst)) = ty.as_adt() {
let struct_data = self.db.struct_data(id);
if let Some((last_field, _)) = struct_data.variant_data.fields().iter().last() {
let last_field_ty =
self.db.field_types(id.into())[last_field].clone().substitute(Interner, subst);
// Structs can have DST as its last field and such cases are not handled
// as unsized by the chalk, so we do this manually
return self.is_sized(&last_field_ty);
}
}
let Some(sized) = self
.db
.lang_item(self.trait_env.krate, LangItem::Sized)
.and_then(|sized| sized.as_trait())
else {
return false;
};
let sized_pred = WhereClause::Implemented(TraitRef {
trait_id: to_chalk_trait_id(sized),
substitution: Substitution::from1(Interner, ty.clone()),
});
let goal = GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(sized_pred)).intern(Interner);
matches!(self.try_obligation(goal), Some(Solution::Unique(_)))
}
}
impl fmt::Debug for InferenceTable<'_> {

View File

@ -72,6 +72,7 @@ pub type Variants = hir_def::layout::Variants<RustcFieldIdx, RustcEnumVariantIdx
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum LayoutError {
EmptyUnion,
HasErrorConst,
HasErrorType,
HasPlaceholder,
@ -80,6 +81,7 @@ pub enum LayoutError {
RecursiveTypeWithoutIndirection,
SizeOverflow,
TargetLayoutNotAvailable,
UnexpectedUnsized,
Unknown,
UserReprTooSmall,
}
@ -88,6 +90,7 @@ impl std::error::Error for LayoutError {}
impl fmt::Display for LayoutError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
LayoutError::EmptyUnion => write!(f, "type is an union with no fields"),
LayoutError::HasErrorConst => write!(f, "type contains an unevaluatable const"),
LayoutError::HasErrorType => write!(f, "type contains an error"),
LayoutError::HasPlaceholder => write!(f, "type contains placeholders"),
@ -98,6 +101,9 @@ impl fmt::Display for LayoutError {
}
LayoutError::SizeOverflow => write!(f, "size overflow"),
LayoutError::TargetLayoutNotAvailable => write!(f, "target layout not available"),
LayoutError::UnexpectedUnsized => {
write!(f, "an unsized type was found where a sized type was expected")
}
LayoutError::Unknown => write!(f, "unknown"),
LayoutError::UserReprTooSmall => {
write!(f, "the `#[repr]` hint is too small to hold the discriminants of the enum")
@ -109,9 +115,8 @@ impl fmt::Display for LayoutError {
impl<F> From<LayoutCalculatorError<F>> for LayoutError {
fn from(err: LayoutCalculatorError<F>) -> Self {
match err {
LayoutCalculatorError::UnexpectedUnsized(_) | LayoutCalculatorError::EmptyUnion => {
LayoutError::Unknown
}
LayoutCalculatorError::EmptyUnion => LayoutError::EmptyUnion,
LayoutCalculatorError::UnexpectedUnsized(_) => LayoutError::UnexpectedUnsized,
LayoutCalculatorError::SizeOverflow => LayoutError::SizeOverflow,
}
}

View File

@ -11,8 +11,8 @@ pub fn target_data_layout_query(
db: &dyn HirDatabase,
krate: CrateId,
) -> Result<Arc<TargetDataLayout>, Arc<str>> {
match db.data_layout(krate) {
Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(&it) {
match &db.crate_workspace_data()[&krate].data_layout {
Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) {
Ok(it) => Ok(Arc::new(it)),
Err(e) => {
Err(match e {
@ -42,6 +42,6 @@ pub fn target_data_layout_query(
}.into())
}
},
Err(e) => Err(e),
Err(e) => Err(e.clone()),
}
}

View File

@ -42,6 +42,7 @@ pub mod lang_items;
pub mod layout;
pub mod method_resolution;
pub mod mir;
pub mod object_safety;
pub mod primitive;
pub mod traits;
@ -82,6 +83,7 @@ pub use autoderef::autoderef;
pub use builder::{ParamKind, TyBuilder};
pub use chalk_ext::*;
pub use infer::{
cast::CastError,
closure::{CaptureKind, CapturedItem},
could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode,
InferenceDiagnostic, InferenceResult, OverloadedDeref, PointerCast,

View File

@ -58,7 +58,7 @@ use crate::{
},
db::HirDatabase,
error_lifetime,
generics::{generics, Generics},
generics::{generics, trait_self_param_idx, Generics},
make_binders,
mapping::{from_chalk_trait_id, lt_to_placeholder_idx, ToChalk},
static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
@ -516,8 +516,11 @@ impl<'a> TyLoweringContext<'a> {
TypeNs::TraitId(trait_) => {
let ty = match remaining_segments.len() {
1 => {
let trait_ref =
self.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
let trait_ref = self.lower_trait_ref_from_resolved_path(
trait_,
resolved_segment,
TyKind::Error.intern(Interner),
);
let segment = remaining_segments.first().unwrap();
let found = self
.db
@ -952,11 +955,17 @@ impl<'a> TyLoweringContext<'a> {
Substitution::from_iter(Interner, substs)
}
fn lower_trait_ref_from_path(
pub(crate) fn lower_trait_ref_from_resolved_path(
&self,
path: &Path,
explicit_self_ty: Option<Ty>,
) -> Option<TraitRef> {
resolved: TraitId,
segment: PathSegment<'_>,
explicit_self_ty: Ty,
) -> TraitRef {
let substs = self.trait_ref_substs_from_path(segment, resolved, explicit_self_ty);
TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
}
fn lower_trait_ref_from_path(&self, path: &Path, explicit_self_ty: Ty) -> Option<TraitRef> {
let resolved = match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path)? {
// FIXME(trait_alias): We need to handle trait alias here.
TypeNs::TraitId(tr) => tr,
@ -966,21 +975,7 @@ impl<'a> TyLoweringContext<'a> {
Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty))
}
pub(crate) fn lower_trait_ref_from_resolved_path(
&self,
resolved: TraitId,
segment: PathSegment<'_>,
explicit_self_ty: Option<Ty>,
) -> TraitRef {
let substs = self.trait_ref_substs_from_path(segment, resolved, explicit_self_ty);
TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
}
fn lower_trait_ref(
&self,
trait_ref: &HirTraitRef,
explicit_self_ty: Option<Ty>,
) -> Option<TraitRef> {
fn lower_trait_ref(&self, trait_ref: &HirTraitRef, explicit_self_ty: Ty) -> Option<TraitRef> {
self.lower_trait_ref_from_path(&trait_ref.path, explicit_self_ty)
}
@ -988,9 +983,9 @@ impl<'a> TyLoweringContext<'a> {
&self,
segment: PathSegment<'_>,
resolved: TraitId,
explicit_self_ty: Option<Ty>,
explicit_self_ty: Ty,
) -> Substitution {
self.substs_from_path_segment(segment, Some(resolved.into()), false, explicit_self_ty)
self.substs_from_path_segment(segment, Some(resolved.into()), false, Some(explicit_self_ty))
}
pub(crate) fn lower_where_predicate<'b>(
@ -1041,7 +1036,7 @@ impl<'a> TyLoweringContext<'a> {
let mut trait_ref = None;
let clause = match bound.as_ref() {
TypeBound::Path(path, TraitBoundModifier::None) => {
trait_ref = self.lower_trait_ref_from_path(path, Some(self_ty));
trait_ref = self.lower_trait_ref_from_path(path, self_ty);
trait_ref.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
}
TypeBound::Path(path, TraitBoundModifier::Maybe) => {
@ -1053,7 +1048,7 @@ impl<'a> TyLoweringContext<'a> {
// `?Sized` has no of them.
// If we got another trait here ignore the bound completely.
let trait_id = self
.lower_trait_ref_from_path(path, Some(self_ty.clone()))
.lower_trait_ref_from_path(path, self_ty.clone())
.map(|trait_ref| trait_ref.hir_trait_id());
if trait_id == sized_trait {
self.unsized_types.borrow_mut().insert(self_ty);
@ -1062,7 +1057,7 @@ impl<'a> TyLoweringContext<'a> {
}
TypeBound::ForLifetime(_, path) => {
// FIXME Don't silently drop the hrtb lifetimes here
trait_ref = self.lower_trait_ref_from_path(path, Some(self_ty));
trait_ref = self.lower_trait_ref_from_path(path, self_ty);
trait_ref.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
}
TypeBound::Lifetime(l) => {
@ -1700,6 +1695,28 @@ pub(crate) fn generic_predicates_query(
db: &dyn HirDatabase,
def: GenericDefId,
) -> GenericPredicates {
generic_predicates_filtered_by(db, def, |_, _| true)
}
/// Resolve the where clause(s) of an item with generics,
/// except the ones inherited from the parent
pub(crate) fn generic_predicates_without_parent_query(
db: &dyn HirDatabase,
def: GenericDefId,
) -> GenericPredicates {
generic_predicates_filtered_by(db, def, |_, d| *d == def)
}
/// Resolve the where clause(s) of an item with generics,
/// except the ones inherited from the parent
fn generic_predicates_filtered_by<F>(
db: &dyn HirDatabase,
def: GenericDefId,
filter: F,
) -> GenericPredicates
where
F: Fn(&WherePredicate, &GenericDefId) -> bool,
{
let resolver = def.resolver(db.upcast());
let (impl_trait_lowering, param_lowering) = match def {
GenericDefId::FunctionId(_) => {
@ -1714,6 +1731,7 @@ pub(crate) fn generic_predicates_query(
let mut predicates = resolver
.where_predicates_in_scope()
.filter(|(pred, def)| filter(pred, def))
.flat_map(|(pred, def)| {
ctx.lower_where_predicate(pred, def, false).map(|p| make_binders(db, &generics, p))
})
@ -1747,21 +1765,7 @@ fn implicitly_sized_clauses<'a, 'subst: 'a>(
.lang_item(resolver.krate(), LangItem::Sized)
.and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id))?;
let get_trait_self_idx = |container: ItemContainerId| {
if matches!(container, ItemContainerId::TraitId(_)) {
let generics = generics(db.upcast(), def);
Some(generics.len_self())
} else {
None
}
};
let trait_self_idx = match def {
GenericDefId::TraitId(_) => Some(0),
GenericDefId::FunctionId(it) => get_trait_self_idx(it.lookup(db.upcast()).container),
GenericDefId::ConstId(it) => get_trait_self_idx(it.lookup(db.upcast()).container),
GenericDefId::TypeAliasId(it) => get_trait_self_idx(it.lookup(db.upcast()).container),
_ => None,
};
let trait_self_idx = trait_self_param_idx(db.upcast(), def);
Some(
substitution
@ -2117,7 +2121,7 @@ pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<
.with_type_param_mode(ParamLoweringMode::Variable);
let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders();
let target_trait = impl_data.target_trait.as_ref()?;
Some(Binders::new(binders, ctx.lower_trait_ref(target_trait, Some(self_ty))?))
Some(Binders::new(binders, ctx.lower_trait_ref(target_trait, self_ty)?))
}
pub(crate) fn return_type_impl_traits(

View File

@ -185,8 +185,8 @@ impl<V, T> ProjectionElem<V, T> {
never!("Out of bound tuple field");
TyKind::Error.intern(Interner)
}),
_ => {
never!("Only tuple has tuple field");
ty => {
never!("Only tuple has tuple field: {:?}", ty);
TyKind::Error.intern(Interner)
}
},
@ -837,7 +837,9 @@ pub enum CastKind {
PointerFromExposedAddress,
/// All sorts of pointer-to-pointer casts. Note that reference-to-raw-ptr casts are
/// translated into `&raw mut/const *r`, i.e., they are not actually casts.
Pointer(PointerCast),
PtrToPtr,
/// Pointer related casts that are done by coercions.
PointerCoercion(PointerCast),
/// Cast into a dyn* object.
DynStar,
IntToInt,

View File

@ -421,9 +421,25 @@ impl MirEvalError {
}
MirEvalError::MirLowerError(func, err) => {
let function_name = db.function_data(*func);
let self_ = match func.lookup(db.upcast()).container {
ItemContainerId::ImplId(impl_id) => Some({
let generics = crate::generics::generics(db.upcast(), impl_id.into());
let substs = generics.placeholder_subst(db);
db.impl_self_ty(impl_id)
.substitute(Interner, &substs)
.display(db, edition)
.to_string()
}),
ItemContainerId::TraitId(it) => {
Some(db.trait_data(it).name.display(db.upcast(), edition).to_string())
}
_ => None,
};
writeln!(
f,
"MIR lowering for function `{}` ({:?}) failed due:",
"MIR lowering for function `{}{}{}` ({:?}) failed due:",
self_.as_deref().unwrap_or_default(),
if self_.is_some() { "::" } else { "" },
function_name.name.display(db.upcast(), edition),
func
)?;
@ -1475,7 +1491,7 @@ impl Evaluator<'_> {
}
}
Rvalue::Cast(kind, operand, target_ty) => match kind {
CastKind::Pointer(cast) => match cast {
CastKind::PointerCoercion(cast) => match cast {
PointerCast::ReifyFnPointer | PointerCast::ClosureFnPointer(_) => {
let current_ty = self.operand_ty(operand, locals)?;
if let TyKind::FnDef(_, _) | TyKind::Closure(_, _) =
@ -1506,6 +1522,7 @@ impl Evaluator<'_> {
},
CastKind::DynStar => not_supported!("dyn star cast"),
CastKind::IntToInt
| CastKind::PtrToPtr
| CastKind::PointerExposeAddress
| CastKind::PointerFromExposedAddress => {
let current_ty = self.operand_ty(operand, locals)?;

View File

@ -399,7 +399,7 @@ extern "C" {
fn memcmp(s1: *const u8, s2: *const u8, n: usize) -> i32;
}
fn my_cmp(x: &[u8], y: &[u8]) -> i32 {
fn my_cmp(x: &[u8; 3], y: &[u8; 3]) -> i32 {
memcmp(x as *const u8, y as *const u8, x.len())
}
@ -779,6 +779,7 @@ fn main() {
fn posix_getenv() {
check_pass(
r#"
//- minicore: sized
//- /main.rs env:foo=bar
type c_char = u8;
@ -849,7 +850,7 @@ fn main() {
fn regression_14966() {
check_pass(
r#"
//- minicore: fn, copy, coerce_unsized
//- minicore: fn, copy, coerce_unsized, dispatch_from_dyn
trait A<T> {
fn a(&self) {}
}

View File

@ -31,7 +31,7 @@ use crate::{
display::HirDisplay,
error_lifetime,
generics::generics,
infer::{CaptureKind, CapturedItem, TypeMismatch},
infer::{cast::CastTy, unify::InferenceTable, CaptureKind, CapturedItem, TypeMismatch},
inhabitedness::is_ty_uninhabited_from,
layout::LayoutError,
mapping::ToChalk,
@ -94,7 +94,8 @@ pub enum MirLowerError {
UnresolvedField,
UnsizedTemporary(Ty),
MissingFunctionDefinition(DefWithBodyId, ExprId),
TypeMismatch(Option<TypeMismatch>),
TypeMismatch(TypeMismatch),
HasErrors,
/// This should never happen. Type mismatch should catch everything.
TypeError(&'static str),
NotSupported(String),
@ -179,15 +180,13 @@ impl MirLowerError {
body.pretty_print_expr(db.upcast(), *owner, *it, edition)
)?;
}
MirLowerError::TypeMismatch(e) => match e {
Some(e) => writeln!(
f,
"Type mismatch: Expected {}, found {}",
e.expected.display(db, edition),
e.actual.display(db, edition),
)?,
None => writeln!(f, "Type mismatch: types mismatch with {{unknown}}",)?,
},
MirLowerError::HasErrors => writeln!(f, "Type inference result contains errors")?,
MirLowerError::TypeMismatch(e) => writeln!(
f,
"Type mismatch: Expected {}, found {}",
e.expected.display(db, edition),
e.actual.display(db, edition),
)?,
MirLowerError::GenericArgNotProvided(id, subst) => {
let parent = id.parent;
let param = &db.generic_params(parent)[id.local_id];
@ -362,7 +361,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
current,
place,
Rvalue::Cast(
CastKind::Pointer(*cast),
CastKind::PointerCoercion(*cast),
Operand::Copy(p),
last.target.clone(),
),
@ -898,14 +897,26 @@ impl<'ctx> MirLowerCtx<'ctx> {
let Some((it, current)) = self.lower_expr_to_some_operand(*expr, current)? else {
return Ok(None);
};
let source_ty = self.infer[*expr].clone();
let target_ty = self.infer[expr_id].clone();
self.push_assignment(
current,
place,
Rvalue::Cast(cast_kind(&source_ty, &target_ty)?, it, target_ty),
expr_id.into(),
);
// Since we don't have THIR, this is the "zipped" version of [rustc's HIR lowering](https://github.com/rust-lang/rust/blob/e71f9529121ca8f687e4b725e3c9adc3f1ebab4d/compiler/rustc_mir_build/src/thir/cx/expr.rs#L165-L178)
// and [THIR lowering as RValue](https://github.com/rust-lang/rust/blob/a4601859ae3875732797873612d424976d9e3dd0/compiler/rustc_mir_build/src/build/expr/as_rvalue.rs#L193-L313)
let rvalue = if self.infer.coercion_casts.contains(expr) {
Rvalue::Use(it)
} else {
let source_ty = self.infer[*expr].clone();
let target_ty = self.infer[expr_id].clone();
let cast_kind = if source_ty.as_reference().is_some() {
CastKind::PointerCoercion(PointerCast::ArrayToPointer)
} else {
let mut table = InferenceTable::new(
self.db,
self.db.trait_environment_for_body(self.owner),
);
cast_kind(&mut table, &source_ty, &target_ty)?
};
Rvalue::Cast(cast_kind, it, target_ty)
};
self.push_assignment(current, place, rvalue, expr_id.into());
Ok(Some(current))
}
Expr::Ref { expr, rawness: _, mutability } => {
@ -2005,40 +2016,21 @@ impl<'ctx> MirLowerCtx<'ctx> {
}
}
fn cast_kind(source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
Ok(match (source_ty.kind(Interner), target_ty.kind(Interner)) {
(TyKind::FnDef(..), TyKind::Function(_)) => CastKind::Pointer(PointerCast::ReifyFnPointer),
(TyKind::Scalar(s), TyKind::Scalar(t)) => match (s, t) {
(chalk_ir::Scalar::Float(_), chalk_ir::Scalar::Float(_)) => CastKind::FloatToFloat,
(chalk_ir::Scalar::Float(_), _) => CastKind::FloatToInt,
(_, chalk_ir::Scalar::Float(_)) => CastKind::IntToFloat,
(_, _) => CastKind::IntToInt,
},
(TyKind::Scalar(_), TyKind::Raw(..)) => CastKind::PointerFromExposedAddress,
(TyKind::Raw(..), TyKind::Scalar(_)) => CastKind::PointerExposeAddress,
(TyKind::Raw(_, a) | TyKind::Ref(_, _, a), TyKind::Raw(_, b) | TyKind::Ref(_, _, b)) => {
CastKind::Pointer(if a == b {
PointerCast::MutToConstPointer
} else if matches!(b.kind(Interner), TyKind::Slice(_))
&& matches!(a.kind(Interner), TyKind::Array(_, _))
|| matches!(b.kind(Interner), TyKind::Dyn(_))
{
PointerCast::Unsize
} else if matches!(a.kind(Interner), TyKind::Slice(s) if s == b) {
PointerCast::ArrayToPointer
} else {
// cast between two sized pointer, like *const i32 to *const i8, or two unsized pointer, like
// slice to slice, slice to str, ... . These are no-ops (even in the unsized case, no metadata
// will be touched) but there is no specific variant
// for it in `PointerCast` so we use `MutToConstPointer`
PointerCast::MutToConstPointer
})
fn cast_kind(table: &mut InferenceTable<'_>, source_ty: &Ty, target_ty: &Ty) -> Result<CastKind> {
let from = CastTy::from_ty(table, source_ty);
let cast = CastTy::from_ty(table, target_ty);
Ok(match (from, cast) {
(Some(CastTy::Ptr(..) | CastTy::FnPtr), Some(CastTy::Int(_))) => {
CastKind::PointerExposeAddress
}
// Enum to int casts
(TyKind::Scalar(_), TyKind::Adt(..)) | (TyKind::Adt(..), TyKind::Scalar(_)) => {
CastKind::IntToInt
}
(a, b) => not_supported!("Unknown cast between {a:?} and {b:?}"),
(Some(CastTy::Int(_)), Some(CastTy::Ptr(..))) => CastKind::PointerFromExposedAddress,
(Some(CastTy::Int(_)), Some(CastTy::Int(_))) => CastKind::IntToInt,
(Some(CastTy::FnPtr), Some(CastTy::Ptr(..))) => CastKind::FnPtrToPtr,
(Some(CastTy::Float), Some(CastTy::Int(_))) => CastKind::FloatToInt,
(Some(CastTy::Int(_)), Some(CastTy::Float)) => CastKind::IntToFloat,
(Some(CastTy::Float), Some(CastTy::Float)) => CastKind::FloatToFloat,
(Some(CastTy::Ptr(..)), Some(CastTy::Ptr(..))) => CastKind::PtrToPtr,
_ => not_supported!("Unknown cast between {source_ty:?} and {target_ty:?}"),
})
}
@ -2191,7 +2183,7 @@ pub fn lower_to_mir(
root_expr: ExprId,
) -> Result<MirBody> {
if infer.has_errors {
return Err(MirLowerError::TypeMismatch(None));
return Err(MirLowerError::HasErrors);
}
let mut ctx = MirLowerCtx::new(db, owner, body, infer);
// 0 is return local

View File

@ -0,0 +1,612 @@
//! Compute the object-safety of a trait
use std::ops::ControlFlow;
use chalk_ir::{
cast::Cast,
visit::{TypeSuperVisitable, TypeVisitable, TypeVisitor},
DebruijnIndex,
};
use chalk_solve::rust_ir::InlineBound;
use hir_def::{
lang_item::LangItem, AssocItemId, ConstId, FunctionId, GenericDefId, HasModule, TraitId,
TypeAliasId,
};
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use crate::{
all_super_traits,
db::HirDatabase,
from_assoc_type_id, from_chalk_trait_id,
generics::{generics, trait_self_param_idx},
lower::callable_item_sig,
to_assoc_type_id, to_chalk_trait_id,
utils::elaborate_clause_supertraits,
AliasEq, AliasTy, Binders, BoundVar, CallableSig, GoalData, ImplTraitId, Interner, OpaqueTyId,
ProjectionTyExt, Solution, Substitution, TraitRef, Ty, TyKind, WhereClause,
};
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum ObjectSafetyViolation {
SizedSelf,
SelfReferential,
Method(FunctionId, MethodViolationCode),
AssocConst(ConstId),
GAT(TypeAliasId),
// This doesn't exist in rustc, but added for better visualization
HasNonSafeSuperTrait(TraitId),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum MethodViolationCode {
StaticMethod,
ReferencesSelfInput,
ReferencesSelfOutput,
ReferencesImplTraitInTrait,
AsyncFn,
WhereClauseReferencesSelf,
Generic,
UndispatchableReceiver,
}
pub fn object_safety(db: &dyn HirDatabase, trait_: TraitId) -> Option<ObjectSafetyViolation> {
for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
if db.object_safety_of_trait(super_trait).is_some() {
return Some(ObjectSafetyViolation::HasNonSafeSuperTrait(super_trait));
}
}
db.object_safety_of_trait(trait_)
}
pub fn object_safety_with_callback<F>(
db: &dyn HirDatabase,
trait_: TraitId,
cb: &mut F,
) -> ControlFlow<()>
where
F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>,
{
for super_trait in all_super_traits(db.upcast(), trait_).into_iter().skip(1).rev() {
if db.object_safety_of_trait(super_trait).is_some() {
cb(ObjectSafetyViolation::HasNonSafeSuperTrait(trait_))?;
}
}
object_safety_of_trait_with_callback(db, trait_, cb)
}
pub fn object_safety_of_trait_with_callback<F>(
db: &dyn HirDatabase,
trait_: TraitId,
cb: &mut F,
) -> ControlFlow<()>
where
F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>,
{
// Check whether this has a `Sized` bound
if generics_require_sized_self(db, trait_.into()) {
cb(ObjectSafetyViolation::SizedSelf)?;
}
// Check if there exist bounds that referencing self
if predicates_reference_self(db, trait_) {
cb(ObjectSafetyViolation::SelfReferential)?;
}
if bounds_reference_self(db, trait_) {
cb(ObjectSafetyViolation::SelfReferential)?;
}
// rustc checks for non-lifetime binders here, but we don't support HRTB yet
let trait_data = db.trait_data(trait_);
for (_, assoc_item) in &trait_data.items {
object_safety_violation_for_assoc_item(db, trait_, *assoc_item, cb)?;
}
ControlFlow::Continue(())
}
pub fn object_safety_of_trait_query(
db: &dyn HirDatabase,
trait_: TraitId,
) -> Option<ObjectSafetyViolation> {
let mut res = None;
object_safety_of_trait_with_callback(db, trait_, &mut |osv| {
res = Some(osv);
ControlFlow::Break(())
});
res
}
fn generics_require_sized_self(db: &dyn HirDatabase, def: GenericDefId) -> bool {
let krate = def.module(db.upcast()).krate();
let Some(sized) = db.lang_item(krate, LangItem::Sized).and_then(|l| l.as_trait()) else {
return false;
};
let Some(trait_self_param_idx) = trait_self_param_idx(db.upcast(), def) else {
return false;
};
let predicates = &*db.generic_predicates(def);
let predicates = predicates.iter().map(|p| p.skip_binders().skip_binders().clone());
elaborate_clause_supertraits(db, predicates).any(|pred| match pred {
WhereClause::Implemented(trait_ref) => {
if from_chalk_trait_id(trait_ref.trait_id) == sized {
if let TyKind::BoundVar(it) =
*trait_ref.self_type_parameter(Interner).kind(Interner)
{
// Since `generic_predicates` is `Binder<Binder<..>>`, the `DebrujinIndex` of
// self-parameter is `1`
return it
.index_if_bound_at(DebruijnIndex::ONE)
.is_some_and(|idx| idx == trait_self_param_idx);
}
}
false
}
_ => false,
})
}
// rustc gathers all the spans that references `Self` for error rendering,
// but we don't have good way to render such locations.
// So, just return single boolean value for existence of such `Self` reference
fn predicates_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
db.generic_predicates(trait_.into())
.iter()
.any(|pred| predicate_references_self(db, trait_, pred, AllowSelfProjection::No))
}
// Same as the above, `predicates_reference_self`
fn bounds_reference_self(db: &dyn HirDatabase, trait_: TraitId) -> bool {
let trait_data = db.trait_data(trait_);
trait_data
.items
.iter()
.filter_map(|(_, it)| match *it {
AssocItemId::TypeAliasId(id) => {
let assoc_ty_id = to_assoc_type_id(id);
let assoc_ty_data = db.associated_ty_data(assoc_ty_id);
Some(assoc_ty_data)
}
_ => None,
})
.any(|assoc_ty_data| {
assoc_ty_data.binders.skip_binders().bounds.iter().any(|bound| {
let def = from_assoc_type_id(assoc_ty_data.id).into();
match bound.skip_binders() {
InlineBound::TraitBound(it) => it.args_no_self.iter().any(|arg| {
contains_illegal_self_type_reference(
db,
def,
trait_,
arg,
DebruijnIndex::ONE,
AllowSelfProjection::Yes,
)
}),
InlineBound::AliasEqBound(it) => it.parameters.iter().any(|arg| {
contains_illegal_self_type_reference(
db,
def,
trait_,
arg,
DebruijnIndex::ONE,
AllowSelfProjection::Yes,
)
}),
}
})
})
}
#[derive(Clone, Copy)]
enum AllowSelfProjection {
Yes,
No,
}
fn predicate_references_self(
db: &dyn HirDatabase,
trait_: TraitId,
predicate: &Binders<Binders<WhereClause>>,
allow_self_projection: AllowSelfProjection,
) -> bool {
match predicate.skip_binders().skip_binders() {
WhereClause::Implemented(trait_ref) => {
trait_ref.substitution.iter(Interner).skip(1).any(|arg| {
contains_illegal_self_type_reference(
db,
trait_.into(),
trait_,
arg,
DebruijnIndex::ONE,
allow_self_projection,
)
})
}
WhereClause::AliasEq(AliasEq { alias: AliasTy::Projection(proj), .. }) => {
proj.substitution.iter(Interner).skip(1).any(|arg| {
contains_illegal_self_type_reference(
db,
trait_.into(),
trait_,
arg,
DebruijnIndex::ONE,
allow_self_projection,
)
})
}
_ => false,
}
}
fn contains_illegal_self_type_reference<T: TypeVisitable<Interner>>(
db: &dyn HirDatabase,
def: GenericDefId,
trait_: TraitId,
t: &T,
outer_binder: DebruijnIndex,
allow_self_projection: AllowSelfProjection,
) -> bool {
let Some(trait_self_param_idx) = trait_self_param_idx(db.upcast(), def) else {
return false;
};
struct IllegalSelfTypeVisitor<'a> {
db: &'a dyn HirDatabase,
trait_: TraitId,
super_traits: Option<SmallVec<[TraitId; 4]>>,
trait_self_param_idx: usize,
allow_self_projection: AllowSelfProjection,
}
impl<'a> TypeVisitor<Interner> for IllegalSelfTypeVisitor<'a> {
type BreakTy = ();
fn as_dyn(&mut self) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn visit_ty(&mut self, ty: &Ty, outer_binder: DebruijnIndex) -> ControlFlow<Self::BreakTy> {
match ty.kind(Interner) {
TyKind::BoundVar(BoundVar { debruijn, index }) => {
if *debruijn == outer_binder && *index == self.trait_self_param_idx {
ControlFlow::Break(())
} else {
ty.super_visit_with(self.as_dyn(), outer_binder)
}
}
TyKind::Alias(AliasTy::Projection(proj)) => match self.allow_self_projection {
AllowSelfProjection::Yes => {
let trait_ = proj.trait_(self.db);
if self.super_traits.is_none() {
self.super_traits =
Some(all_super_traits(self.db.upcast(), self.trait_));
}
if self.super_traits.as_ref().is_some_and(|s| s.contains(&trait_)) {
ControlFlow::Continue(())
} else {
ty.super_visit_with(self.as_dyn(), outer_binder)
}
}
AllowSelfProjection::No => ty.super_visit_with(self.as_dyn(), outer_binder),
},
_ => ty.super_visit_with(self.as_dyn(), outer_binder),
}
}
fn visit_const(
&mut self,
constant: &chalk_ir::Const<Interner>,
outer_binder: DebruijnIndex,
) -> std::ops::ControlFlow<Self::BreakTy> {
constant.data(Interner).ty.super_visit_with(self.as_dyn(), outer_binder)
}
}
let mut visitor = IllegalSelfTypeVisitor {
db,
trait_,
super_traits: None,
trait_self_param_idx,
allow_self_projection,
};
t.visit_with(visitor.as_dyn(), outer_binder).is_break()
}
fn object_safety_violation_for_assoc_item<F>(
db: &dyn HirDatabase,
trait_: TraitId,
item: AssocItemId,
cb: &mut F,
) -> ControlFlow<()>
where
F: FnMut(ObjectSafetyViolation) -> ControlFlow<()>,
{
// Any item that has a `Self : Sized` requisite is otherwise
// exempt from the regulations.
if generics_require_sized_self(db, item.into()) {
return ControlFlow::Continue(());
}
match item {
AssocItemId::ConstId(it) => cb(ObjectSafetyViolation::AssocConst(it)),
AssocItemId::FunctionId(it) => {
virtual_call_violations_for_method(db, trait_, it, &mut |mvc| {
cb(ObjectSafetyViolation::Method(it, mvc))
})
}
AssocItemId::TypeAliasId(it) => {
let def_map = db.crate_def_map(trait_.krate(db.upcast()));
if def_map.is_unstable_feature_enabled(&intern::sym::generic_associated_type_extended) {
ControlFlow::Continue(())
} else {
let generic_params = db.generic_params(item.into());
if !generic_params.is_empty() {
cb(ObjectSafetyViolation::GAT(it))
} else {
ControlFlow::Continue(())
}
}
}
}
}
fn virtual_call_violations_for_method<F>(
db: &dyn HirDatabase,
trait_: TraitId,
func: FunctionId,
cb: &mut F,
) -> ControlFlow<()>
where
F: FnMut(MethodViolationCode) -> ControlFlow<()>,
{
let func_data = db.function_data(func);
if !func_data.has_self_param() {
cb(MethodViolationCode::StaticMethod)?;
}
if func_data.is_async() {
cb(MethodViolationCode::AsyncFn)?;
}
let sig = callable_item_sig(db, func.into());
if sig.skip_binders().params().iter().skip(1).any(|ty| {
contains_illegal_self_type_reference(
db,
func.into(),
trait_,
ty,
DebruijnIndex::INNERMOST,
AllowSelfProjection::Yes,
)
}) {
cb(MethodViolationCode::ReferencesSelfInput)?;
}
if contains_illegal_self_type_reference(
db,
func.into(),
trait_,
sig.skip_binders().ret(),
DebruijnIndex::INNERMOST,
AllowSelfProjection::Yes,
) {
cb(MethodViolationCode::ReferencesSelfOutput)?;
}
if !func_data.is_async() {
if let Some(mvc) = contains_illegal_impl_trait_in_trait(db, &sig) {
cb(mvc)?;
}
}
let generic_params = db.generic_params(func.into());
if generic_params.len_type_or_consts() > 0 {
cb(MethodViolationCode::Generic)?;
}
if func_data.has_self_param() && !receiver_is_dispatchable(db, trait_, func, &sig) {
cb(MethodViolationCode::UndispatchableReceiver)?;
}
let predicates = &*db.generic_predicates_without_parent(func.into());
let trait_self_idx = trait_self_param_idx(db.upcast(), func.into());
for pred in predicates {
let pred = pred.skip_binders().skip_binders();
if matches!(pred, WhereClause::TypeOutlives(_)) {
continue;
}
// Allow `impl AutoTrait` predicates
if let WhereClause::Implemented(TraitRef { trait_id, substitution }) = pred {
let trait_data = db.trait_data(from_chalk_trait_id(*trait_id));
if trait_data.is_auto
&& substitution
.as_slice(Interner)
.first()
.and_then(|arg| arg.ty(Interner))
.and_then(|ty| ty.bound_var(Interner))
.is_some_and(|b| {
b.debruijn == DebruijnIndex::ONE && Some(b.index) == trait_self_idx
})
{
continue;
}
}
if contains_illegal_self_type_reference(
db,
func.into(),
trait_,
pred,
DebruijnIndex::ONE,
AllowSelfProjection::Yes,
) {
cb(MethodViolationCode::WhereClauseReferencesSelf)?;
break;
}
}
ControlFlow::Continue(())
}
fn receiver_is_dispatchable(
db: &dyn HirDatabase,
trait_: TraitId,
func: FunctionId,
sig: &Binders<CallableSig>,
) -> bool {
let Some(trait_self_idx) = trait_self_param_idx(db.upcast(), func.into()) else {
return false;
};
// `self: Self` can't be dispatched on, but this is already considered object safe.
// See rustc's comment on https://github.com/rust-lang/rust/blob/3f121b9461cce02a703a0e7e450568849dfaa074/compiler/rustc_trait_selection/src/traits/object_safety.rs#L433-L437
if sig
.skip_binders()
.params()
.first()
.and_then(|receiver| receiver.bound_var(Interner))
.is_some_and(|b| {
b == BoundVar { debruijn: DebruijnIndex::INNERMOST, index: trait_self_idx }
})
{
return true;
}
let placeholder_subst = generics(db.upcast(), func.into()).placeholder_subst(db);
let substituted_sig = sig.clone().substitute(Interner, &placeholder_subst);
let Some(receiver_ty) = substituted_sig.params().first() else {
return false;
};
let krate = func.module(db.upcast()).krate();
let traits = (
db.lang_item(krate, LangItem::Unsize).and_then(|it| it.as_trait()),
db.lang_item(krate, LangItem::DispatchFromDyn).and_then(|it| it.as_trait()),
);
let (Some(unsize_did), Some(dispatch_from_dyn_did)) = traits else {
return false;
};
// Type `U`
let unsized_self_ty =
TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(Interner);
// `Receiver[Self => U]`
let Some(unsized_receiver_ty) = receiver_for_self_ty(db, func, unsized_self_ty.clone()) else {
return false;
};
let self_ty = placeholder_subst.as_slice(Interner)[trait_self_idx].assert_ty_ref(Interner);
let unsized_predicate = WhereClause::Implemented(TraitRef {
trait_id: to_chalk_trait_id(unsize_did),
substitution: Substitution::from_iter(Interner, [self_ty.clone(), unsized_self_ty.clone()]),
});
let trait_predicate = WhereClause::Implemented(TraitRef {
trait_id: to_chalk_trait_id(trait_),
substitution: Substitution::from_iter(
Interner,
std::iter::once(unsized_self_ty.clone().cast(Interner))
.chain(placeholder_subst.iter(Interner).skip(1).cloned()),
),
});
let generic_predicates = &*db.generic_predicates(func.into());
let clauses = std::iter::once(unsized_predicate)
.chain(std::iter::once(trait_predicate))
.chain(generic_predicates.iter().map(|pred| {
pred.clone().substitute(Interner, &placeholder_subst).into_value_and_skipped_binders().0
}))
.map(|pred| {
pred.cast::<chalk_ir::ProgramClause<Interner>>(Interner).into_from_env_clause(Interner)
});
let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
let obligation = WhereClause::Implemented(TraitRef {
trait_id: to_chalk_trait_id(dispatch_from_dyn_did),
substitution: Substitution::from_iter(Interner, [receiver_ty.clone(), unsized_receiver_ty]),
});
let goal = GoalData::DomainGoal(chalk_ir::DomainGoal::Holds(obligation)).intern(Interner);
let in_env = chalk_ir::InEnvironment::new(&env, goal);
let mut table = chalk_solve::infer::InferenceTable::<Interner>::new();
let canonicalized = table.canonicalize(Interner, in_env);
let solution = db.trait_solve(krate, None, canonicalized.quantified);
matches!(solution, Some(Solution::Unique(_)))
}
fn receiver_for_self_ty(db: &dyn HirDatabase, func: FunctionId, ty: Ty) -> Option<Ty> {
let generics = generics(db.upcast(), func.into());
let trait_self_idx = trait_self_param_idx(db.upcast(), func.into())?;
let subst = generics.placeholder_subst(db);
let subst = Substitution::from_iter(
Interner,
subst.iter(Interner).enumerate().map(|(idx, arg)| {
if idx == trait_self_idx {
ty.clone().cast(Interner)
} else {
arg.clone()
}
}),
);
let sig = callable_item_sig(db, func.into());
let sig = sig.substitute(Interner, &subst);
sig.params_and_return.first().cloned()
}
fn contains_illegal_impl_trait_in_trait(
db: &dyn HirDatabase,
sig: &Binders<CallableSig>,
) -> Option<MethodViolationCode> {
struct OpaqueTypeCollector(FxHashSet<OpaqueTyId>);
impl TypeVisitor<Interner> for OpaqueTypeCollector {
type BreakTy = ();
fn as_dyn(&mut self) -> &mut dyn TypeVisitor<Interner, BreakTy = Self::BreakTy> {
self
}
fn interner(&self) -> Interner {
Interner
}
fn visit_ty(&mut self, ty: &Ty, outer_binder: DebruijnIndex) -> ControlFlow<Self::BreakTy> {
if let TyKind::OpaqueType(opaque_ty_id, _) = ty.kind(Interner) {
self.0.insert(*opaque_ty_id);
}
ty.super_visit_with(self.as_dyn(), outer_binder)
}
}
let ret = sig.skip_binders().ret();
let mut visitor = OpaqueTypeCollector(FxHashSet::default());
ret.visit_with(visitor.as_dyn(), DebruijnIndex::INNERMOST);
// Since we haven't implemented RPITIT in proper way like rustc yet,
// just check whether `ret` contains RPIT for now
for opaque_ty in visitor.0 {
let impl_trait_id = db.lookup_intern_impl_trait_id(opaque_ty.into());
if matches!(impl_trait_id, ImplTraitId::ReturnTypeImplTrait(..)) {
return Some(MethodViolationCode::ReferencesImplTraitInTrait);
}
}
None
}
#[cfg(test)]
mod tests;

View File

@ -0,0 +1,393 @@
use std::ops::ControlFlow;
use hir_def::db::DefDatabase;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::ToSmolStr;
use test_fixture::WithFixture;
use crate::{object_safety::object_safety_with_callback, test_db::TestDB};
use super::{
MethodViolationCode::{self, *},
ObjectSafetyViolation,
};
use ObjectSafetyViolationKind::*;
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
enum ObjectSafetyViolationKind {
SizedSelf,
SelfReferential,
Method(MethodViolationCode),
AssocConst,
GAT,
HasNonSafeSuperTrait,
}
fn check_object_safety<'a>(
ra_fixture: &str,
expected: impl IntoIterator<Item = (&'a str, Vec<ObjectSafetyViolationKind>)>,
) {
let mut expected: FxHashMap<_, _> =
expected.into_iter().map(|(id, osvs)| (id, FxHashSet::from_iter(osvs))).collect();
let (db, file_ids) = TestDB::with_many_files(ra_fixture);
for (trait_id, name) in file_ids.into_iter().flat_map(|file_id| {
let module_id = db.module_for_file(file_id);
let def_map = module_id.def_map(&db);
let scope = &def_map[module_id.local_id].scope;
scope
.declarations()
.filter_map(|def| {
if let hir_def::ModuleDefId::TraitId(trait_id) = def {
let name =
db.trait_data(trait_id).name.display_no_db(file_id.edition()).to_smolstr();
Some((trait_id, name))
} else {
None
}
})
.collect::<Vec<_>>()
}) {
let Some(expected) = expected.remove(name.as_str()) else {
continue;
};
let mut osvs = FxHashSet::default();
object_safety_with_callback(&db, trait_id, &mut |osv| {
osvs.insert(match osv {
ObjectSafetyViolation::SizedSelf => SizedSelf,
ObjectSafetyViolation::SelfReferential => SelfReferential,
ObjectSafetyViolation::Method(_, mvc) => Method(mvc),
ObjectSafetyViolation::AssocConst(_) => AssocConst,
ObjectSafetyViolation::GAT(_) => GAT,
ObjectSafetyViolation::HasNonSafeSuperTrait(_) => HasNonSafeSuperTrait,
});
ControlFlow::Continue(())
});
assert_eq!(osvs, expected, "Object safety violations for `{name}` do not match;");
}
let remains: Vec<_> = expected.keys().collect();
assert!(remains.is_empty(), "Following traits do not exist in the test fixture; {remains:?}");
}
#[test]
fn item_bounds_can_reference_self() {
check_object_safety(
r#"
//- minicore: eq
pub trait Foo {
type X: PartialEq;
type Y: PartialEq<Self::Y>;
type Z: PartialEq<Self::Y>;
}
"#,
[("Foo", vec![])],
);
}
#[test]
fn associated_consts() {
check_object_safety(
r#"
trait Bar {
const X: usize;
}
"#,
[("Bar", vec![AssocConst])],
);
}
#[test]
fn bounds_reference_self() {
check_object_safety(
r#"
//- minicore: eq
trait X {
type U: PartialEq<Self>;
}
"#,
[("X", vec![SelfReferential])],
);
}
#[test]
fn by_value_self() {
check_object_safety(
r#"
//- minicore: dispatch_from_dyn
trait Bar {
fn bar(self);
}
trait Baz {
fn baz(self: Self);
}
trait Quux {
// Legal because of the where clause:
fn baz(self: Self) where Self : Sized;
}
"#,
[("Bar", vec![]), ("Baz", vec![]), ("Quux", vec![])],
);
}
#[test]
fn generic_methods() {
check_object_safety(
r#"
//- minicore: dispatch_from_dyn
trait Bar {
fn bar<T>(&self, t: T);
}
trait Quux {
fn bar<T>(&self, t: T)
where Self : Sized;
}
trait Qax {
fn bar<'a>(&self, t: &'a ());
}
"#,
[("Bar", vec![Method(Generic)]), ("Quux", vec![]), ("Qax", vec![])],
);
}
#[test]
fn mentions_self() {
check_object_safety(
r#"
//- minicore: dispatch_from_dyn
trait Bar {
fn bar(&self, x: &Self);
}
trait Baz {
fn baz(&self) -> Self;
}
trait Quux {
fn quux(&self, s: &Self) -> Self where Self : Sized;
}
"#,
[
("Bar", vec![Method(ReferencesSelfInput)]),
("Baz", vec![Method(ReferencesSelfOutput)]),
("Quux", vec![]),
],
);
}
#[test]
fn no_static() {
check_object_safety(
r#"
//- minicore: dispatch_from_dyn
trait Foo {
fn foo() {}
}
"#,
[("Foo", vec![Method(StaticMethod)])],
);
}
#[test]
fn sized_self() {
check_object_safety(
r#"
//- minicore: dispatch_from_dyn
trait Bar: Sized {
fn bar<T>(&self, t: T);
}
"#,
[("Bar", vec![SizedSelf])],
);
check_object_safety(
r#"
//- minicore: dispatch_from_dyn
trait Bar
where Self : Sized
{
fn bar<T>(&self, t: T);
}
"#,
[("Bar", vec![SizedSelf])],
);
}
#[test]
fn supertrait_gat() {
check_object_safety(
r#"
//- minicore: dispatch_from_dyn
trait GatTrait {
type Gat<T>;
}
trait SuperTrait<T>: GatTrait {}
"#,
[("GatTrait", vec![GAT]), ("SuperTrait", vec![HasNonSafeSuperTrait])],
);
}
#[test]
fn supertrait_mentions_self() {
check_object_safety(
r#"
//- minicore: dispatch_from_dyn
trait Bar<T> {
fn bar(&self, x: &T);
}
trait Baz : Bar<Self> {
}
"#,
[("Bar", vec![]), ("Baz", vec![SizedSelf, SelfReferential])],
);
}
#[test]
fn rustc_issue_19538() {
check_object_safety(
r#"
//- minicore: dispatch_from_dyn
trait Foo {
fn foo<T>(&self, val: T);
}
trait Bar: Foo {}
"#,
[("Foo", vec![Method(Generic)]), ("Bar", vec![HasNonSafeSuperTrait])],
);
}
#[test]
fn rustc_issue_22040() {
check_object_safety(
r#"
//- minicore: fmt, eq, dispatch_from_dyn
use core::fmt::Debug;
trait Expr: Debug + PartialEq {
fn print_element_count(&self);
}
"#,
[("Expr", vec![SelfReferential])],
);
}
#[test]
fn rustc_issue_102762() {
check_object_safety(
r#"
//- minicore: future, send, sync, dispatch_from_dyn, deref
use core::pin::Pin;
struct Box<T: ?Sized> {}
impl<T: ?Sized> core::ops::Deref for Box<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
loop {}
}
}
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Box<U>> for Box<T> {}
struct Vec<T> {}
pub trait Fetcher: Send + Sync {
fn get<'a>(self: &'a Box<Self>) -> Pin<Box<dyn Future<Output = Vec<u8>> + 'a>>
where
Self: Sync,
{
loop {}
}
}
"#,
[("Fetcher", vec![Method(UndispatchableReceiver)])],
);
}
#[test]
fn rustc_issue_102933() {
check_object_safety(
r#"
//- minicore: future, dispatch_from_dyn, deref
use core::future::Future;
struct Box<T: ?Sized> {}
impl<T: ?Sized> core::ops::Deref for Box<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
loop {}
}
}
impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Box<U>> for Box<T> {}
pub trait Service {
type Response;
type Future: Future<Output = Self::Response>;
}
pub trait A1: Service<Response = i32> {}
pub trait A2: Service<Future = Box<dyn Future<Output = i32>>> + A1 {
fn foo(&self) {}
}
pub trait B1: Service<Future = Box<dyn Future<Output = i32>>> {}
pub trait B2: Service<Response = i32> + B1 {
fn foo(&self) {}
}
"#,
[("A2", vec![]), ("B2", vec![])],
);
}
#[test]
fn rustc_issue_106247() {
check_object_safety(
r#"
//- minicore: sync, dispatch_from_dyn
pub trait Trait {
fn method(&self) where Self: Sync;
}
"#,
[("Trait", vec![])],
);
}
#[test]
fn std_error_is_object_safe() {
check_object_safety(
r#"
//- minicore: fmt, dispatch_from_dyn
trait Erased<'a>: 'a {}
pub struct Request<'a>(dyn Erased<'a> + 'a);
pub trait Error: core::fmt::Debug + core::fmt::Display {
fn provide<'a>(&'a self, request: &mut Request<'a>);
}
"#,
[("Error", vec![])],
);
}
#[test]
fn lifetime_gat_is_object_unsafe() {
check_object_safety(
r#"
//- minicore: dispatch_from_dyn
trait Foo {
type Bar<'a>;
}
"#,
[("Foo", vec![ObjectSafetyViolationKind::GAT])],
);
}

View File

@ -49,7 +49,7 @@ fn let_stmt_coerce() {
//- minicore: coerce_unsized
fn test() {
let x: &[isize] = &[1];
// ^^^^ adjustments: Deref(None), Borrow(Ref('?3, Not)), Pointer(Unsize)
// ^^^^ adjustments: Deref(None), Borrow(Ref('?2, Not)), Pointer(Unsize)
let x: *const [isize] = &[1];
// ^^^^ adjustments: Deref(None), Borrow(RawPtr(Not)), Pointer(Unsize)
}
@ -148,7 +148,7 @@ fn foo<T>(x: &[T]) -> &[T] { x }
fn test(i: i32) {
let x = match i {
2 => foo(&[2]),
// ^^^^ adjustments: Deref(None), Borrow(Ref('?10, Not)), Pointer(Unsize)
// ^^^^ adjustments: Deref(None), Borrow(Ref('?8, Not)), Pointer(Unsize)
1 => &[1],
_ => &[3],
};

View File

@ -1,7 +1,7 @@
use expect_test::expect;
use test_utils::{bench, bench_fixture, skip_slow_tests};
use crate::tests::check_infer_with_mismatches;
use crate::tests::{check_infer_with_mismatches, check_no_mismatches};
use super::{check_infer, check_types};
@ -206,6 +206,7 @@ fn expr_macro_def_expanded_in_various_places() {
100..119 'for _ ...!() {}': ()
100..119 'for _ ...!() {}': ()
100..119 'for _ ...!() {}': ()
100..119 'for _ ...!() {}': ()
104..105 '_': IntoIterator::Item<isize>
117..119 '{}': ()
124..134 '|| spam!()': impl Fn() -> isize
@ -299,6 +300,7 @@ fn expr_macro_rules_expanded_in_various_places() {
114..133 'for _ ...!() {}': ()
114..133 'for _ ...!() {}': ()
114..133 'for _ ...!() {}': ()
114..133 'for _ ...!() {}': ()
118..119 '_': IntoIterator::Item<isize>
131..133 '{}': ()
138..148 '|| spam!()': impl Fn() -> isize
@ -1404,3 +1406,105 @@ fn foo(t: Tensor) {
"#,
);
}
#[test]
fn asm_unit() {
check_no_mismatches(
r#"
//- minicore: asm
fn unit() {
core::arch::asm!("")
}
"#,
);
}
#[test]
fn asm_no_return() {
check_no_mismatches(
r#"
//- minicore: asm
fn unit() -> ! {
core::arch::asm!("", options(noreturn))
}
"#,
);
}
#[test]
fn asm_things() {
check_infer(
r#"
//- minicore: asm, concat
fn main() {
unsafe {
let foo = 1;
let mut o = 0;
core::arch::asm!(
"%input = OpLoad _ {0}",
concat!("%result = ", bar, " _ %input"),
"OpStore {1} %result",
in(reg) &foo,
in(reg) &mut o,
);
o
let thread_id: usize;
core::arch::asm!("
mov {0}, gs:[0x30]
mov {0}, [{0}+0x48]
", out(reg) thread_id, options(pure, readonly, nostack));
static UNMAP_BASE: usize;
const MEM_RELEASE: usize;
static VirtualFree: usize;
const OffPtr: usize;
const OffFn: usize;
core::arch::asm!("
push {free_type}
push {free_size}
push {base}
mov eax, fs:[30h]
mov eax, [eax+8h]
add eax, {off_fn}
mov [eax-{off_fn}+{off_ptr}], eax
push eax
jmp {virtual_free}
",
off_ptr = const OffPtr,
off_fn = const OffFn,
free_size = const 0,
free_type = const MEM_RELEASE,
virtual_free = sym VirtualFree,
base = sym UNMAP_BASE,
options(noreturn),
);
}
}
"#,
expect![[r#"
!0..122 'builti...muto,)': ()
!0..136 'builti...tack))': ()
!0..449 'builti...urn),)': !
10..1236 '{ ... } }': ()
16..1234 'unsafe... }': ()
37..40 'foo': i32
43..44 '1': i32
58..63 'mut o': i32
66..67 '0': i32
!95..104 'thread_id': usize
!103..107 '&foo': &'? i32
!104..107 'foo': i32
!115..120 '&muto': &'? mut i32
!119..120 'o': i32
293..294 'o': i32
308..317 'thread_id': usize
"#]],
)
}

View File

@ -1286,6 +1286,7 @@ fn main() {
fn method_on_dyn_impl() {
check_types(
r#"
//- minicore: coerce_unsized
trait Foo {}
impl Foo for u32 {}

View File

@ -371,6 +371,7 @@ fn diverging_expression_3_break() {
151..172 'for a ...eak; }': ()
151..172 'for a ...eak; }': ()
151..172 'for a ...eak; }': ()
151..172 'for a ...eak; }': ()
155..156 'a': {unknown}
160..161 'b': {unknown}
162..172 '{ break; }': ()
@ -387,6 +388,7 @@ fn diverging_expression_3_break() {
237..250 'for a in b {}': ()
237..250 'for a in b {}': ()
237..250 'for a in b {}': ()
237..250 'for a in b {}': ()
241..242 'a': {unknown}
246..247 'b': {unknown}
248..250 '{}': ()
@ -402,6 +404,7 @@ fn diverging_expression_3_break() {
315..337 'for a ...urn; }': ()
315..337 'for a ...urn; }': ()
315..337 'for a ...urn; }': ()
315..337 'for a ...urn; }': ()
319..320 'a': {unknown}
324..325 'b': {unknown}
326..337 '{ return; }': ()

View File

@ -57,6 +57,7 @@ fn infer_pattern() {
101..151 'for (e... }': ()
101..151 'for (e... }': ()
101..151 'for (e... }': ()
101..151 'for (e... }': ()
105..111 '(e, f)': ({unknown}, {unknown})
106..107 'e': {unknown}
109..110 'f': {unknown}

View File

@ -275,6 +275,7 @@ fn infer_std_crash_5() {
32..320 'for co... }': ()
32..320 'for co... }': ()
32..320 'for co... }': ()
32..320 'for co... }': ()
36..43 'content': {unknown}
47..60 'doesnt_matter': {unknown}
61..320 '{ ... }': ()
@ -1065,7 +1066,7 @@ fn test() {
fn bare_dyn_trait_binders_9639() {
check_no_mismatches(
r#"
//- minicore: fn, coerce_unsized
//- minicore: fn, coerce_unsized, dispatch_from_dyn
fn infix_parse<T, S>(_state: S, _level_code: &Fn(S)) -> T {
loop {}
}
@ -1244,6 +1245,7 @@ fn test() {
16..66 'for _ ... }': ()
16..66 'for _ ... }': ()
16..66 'for _ ... }': ()
16..66 'for _ ... }': ()
20..21 '_': IntoIterator::Item<()>
25..39 '{ let x = 0; }': ()
31..32 'x': i32
@ -1907,6 +1909,7 @@ fn dont_unify_on_casts() {
// #15246
check_types(
r#"
//- minicore: sized
fn unify(_: [bool; 1]) {}
fn casted(_: *const bool) {}
fn default<T>() -> T { loop {} }
@ -1926,6 +1929,7 @@ fn test() {
fn rustc_test_issue_52437() {
check_types(
r#"
//- minicore: sized
fn main() {
let x = [(); &(&'static: loop { |x| {}; }) as *const _ as usize]
//^ [(); _]
@ -2228,3 +2232,66 @@ async fn f<A, B, C>() -> Bar {}
"#]],
);
}
#[test]
fn issue_18109() {
check_infer(
r#"
//- minicore: option
struct Map<T, U>(T, U);
impl<T, U> Map<T, U> {
fn new() -> Self { loop {} }
fn get(&self, _: &T) -> Option<&U> { loop {} }
}
fn test(x: bool) {
let map = Map::new();
let _ = match x {
true => {
let Some(val) = map.get(&8) else { return };
*val
}
false => return,
_ => 42,
};
}
"#,
expect![[r#"
69..80 '{ loop {} }': Map<T, U>
71..78 'loop {}': !
76..78 '{}': ()
93..97 'self': &'? Map<T, U>
99..100 '_': &'? T
120..131 '{ loop {} }': Option<&'? U>
122..129 'loop {}': !
127..129 '{}': ()
143..144 'x': bool
152..354 '{ ... }; }': ()
162..165 'map': Map<i32, i32>
168..176 'Map::new': fn new<i32, i32>() -> Map<i32, i32>
168..178 'Map::new()': Map<i32, i32>
188..189 '_': i32
192..351 'match ... }': i32
198..199 'x': bool
210..214 'true': bool
210..214 'true': bool
218..303 '{ ... }': i32
236..245 'Some(val)': Option<&'? i32>
241..244 'val': &'? i32
248..251 'map': Map<i32, i32>
248..259 'map.get(&8)': Option<&'? i32>
256..258 '&8': &'? i32
257..258 '8': i32
265..275 '{ return }': !
267..273 'return': !
289..293 '*val': i32
290..293 'val': &'? i32
312..317 'false': bool
312..317 'false': bool
321..327 'return': !
337..338 '_': bool
342..344 '42': i32
"#]],
);
}

View File

@ -917,7 +917,7 @@ fn test(a: A<i32>) {
278..279 'A': extern "rust-call" A<i32>(*mut i32) -> A<i32>
278..292 'A(0 as *mut _)': A<i32>
278..307 'A(0 as...B(a)))': &'? i32
280..281 '0': i32
280..281 '0': usize
280..291 '0 as *mut _': *mut i32
297..306 '&&B(B(a))': &'? &'? B<B<A<i32>>>
298..306 '&B(B(a))': &'? B<B<A<i32>>>
@ -3572,6 +3572,7 @@ fn f<T>(t: Ark<T>) {
fn ref_to_array_to_ptr_cast() {
check_types(
r#"
//- minicore: sized
fn default<T>() -> T { loop {} }
fn foo() {
let arr = [default()];

View File

@ -1448,14 +1448,20 @@ fn foo<X>() -> Foo<impl Future<Output = ()>> {
fn dyn_trait() {
check_infer(
r#"
//- minicore: sized
//- minicore: deref, dispatch_from_dyn
trait Trait<T> {
fn foo(&self) -> T;
fn foo2(&self) -> i64;
}
fn bar() -> dyn Trait<u64> {}
fn test(x: dyn Trait<u64>, y: &dyn Trait<u64>) {
struct Box<T: ?Sized> {}
impl<T: ?Sized> core::ops::Deref for Box<T> {
type Target = T;
}
fn bar() -> Box<dyn Trait<u64>> {}
fn test(x: Box<dyn Trait<u64>>, y: &dyn Trait<u64>) {
x;
y;
let z = bar();
@ -1469,27 +1475,27 @@ fn test(x: dyn Trait<u64>, y: &dyn Trait<u64>) {
expect![[r#"
29..33 'self': &'? Self
54..58 'self': &'? Self
97..99 '{}': dyn Trait<u64>
109..110 'x': dyn Trait<u64>
128..129 'y': &'? dyn Trait<u64>
148..265 '{ ...2(); }': ()
154..155 'x': dyn Trait<u64>
161..162 'y': &'? dyn Trait<u64>
172..173 'z': dyn Trait<u64>
176..179 'bar': fn bar() -> dyn Trait<u64>
176..181 'bar()': dyn Trait<u64>
187..188 'x': dyn Trait<u64>
187..194 'x.foo()': u64
200..201 'y': &'? dyn Trait<u64>
200..207 'y.foo()': u64
213..214 'z': dyn Trait<u64>
213..220 'z.foo()': u64
226..227 'x': dyn Trait<u64>
226..234 'x.foo2()': i64
240..241 'y': &'? dyn Trait<u64>
240..248 'y.foo2()': i64
254..255 'z': dyn Trait<u64>
254..262 'z.foo2()': i64
198..200 '{}': Box<dyn Trait<u64>>
210..211 'x': Box<dyn Trait<u64>>
234..235 'y': &'? dyn Trait<u64>
254..371 '{ ...2(); }': ()
260..261 'x': Box<dyn Trait<u64>>
267..268 'y': &'? dyn Trait<u64>
278..279 'z': Box<dyn Trait<u64>>
282..285 'bar': fn bar() -> Box<dyn Trait<u64>>
282..287 'bar()': Box<dyn Trait<u64>>
293..294 'x': Box<dyn Trait<u64>>
293..300 'x.foo()': u64
306..307 'y': &'? dyn Trait<u64>
306..313 'y.foo()': u64
319..320 'z': Box<dyn Trait<u64>>
319..326 'z.foo()': u64
332..333 'x': Box<dyn Trait<u64>>
332..340 'x.foo2()': i64
346..347 'y': &'? dyn Trait<u64>
346..354 'y.foo2()': i64
360..361 'z': Box<dyn Trait<u64>>
360..368 'z.foo2()': i64
"#]],
);
}
@ -1534,7 +1540,7 @@ fn test(s: S<u32, i32>) {
fn dyn_trait_bare() {
check_infer(
r#"
//- minicore: sized
//- minicore: sized, dispatch_from_dyn
trait Trait {
fn foo(&self) -> u64;
}
@ -1570,7 +1576,7 @@ fn test(x: Trait, y: &Trait) -> u64 {
check_infer_with_mismatches(
r#"
//- minicore: fn, coerce_unsized
//- minicore: fn, coerce_unsized, dispatch_from_dyn
struct S;
impl S {
fn foo(&self) {}
@ -3106,7 +3112,7 @@ fn dyn_fn_param_informs_call_site_closure_signature() {
cov_mark::check!(dyn_fn_param_informs_call_site_closure_signature);
check_types(
r#"
//- minicore: fn, coerce_unsized
//- minicore: fn, coerce_unsized, dispatch_from_dyn
struct S;
impl S {
fn inherent(&self) -> u8 { 0 }
@ -3151,7 +3157,7 @@ fn infer_box_fn_arg() {
// The type mismatch is because we don't define Unsize and CoerceUnsized
check_infer_with_mismatches(
r#"
//- minicore: fn, deref, option
//- minicore: fn, deref, option, dispatch_from_dyn
#[lang = "owned_box"]
pub struct Box<T: ?Sized> {
inner: *mut T,

View File

@ -4,7 +4,9 @@
//! This probably isn't the best way to do this -- ideally, diagnostics should
//! be expressed in terms of hir types themselves.
pub use hir_ty::diagnostics::{CaseType, IncorrectCase};
use hir_ty::{db::HirDatabase, diagnostics::BodyValidationDiagnostic, InferenceDiagnostic};
use hir_ty::{
db::HirDatabase, diagnostics::BodyValidationDiagnostic, CastError, InferenceDiagnostic,
};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
@ -50,10 +52,12 @@ macro_rules! diagnostics {
diagnostics![
AwaitOutsideOfAsync,
BreakOutsideOfLoop,
CastToUnsized,
ExpectedFunction,
InactiveCode,
IncoherentImpl,
IncorrectCase,
InvalidCast,
InvalidDeriveTarget,
MacroDefError,
MacroError,
@ -254,6 +258,8 @@ pub struct PrivateField {
#[derive(Debug)]
pub struct MissingUnsafe {
pub expr: InFile<AstPtr<ast::Expr>>,
/// If true, the diagnostics is an `unsafe_op_in_unsafe_fn` lint instead of a hard error.
pub only_lint: bool,
}
#[derive(Debug)]
@ -364,6 +370,20 @@ pub struct RemoveUnnecessaryElse {
pub if_expr: InFile<AstPtr<ast::IfExpr>>,
}
#[derive(Debug)]
pub struct CastToUnsized {
pub expr: InFile<AstPtr<ast::Expr>>,
pub cast_ty: Type,
}
#[derive(Debug)]
pub struct InvalidCast {
pub expr: InFile<AstPtr<ast::Expr>>,
pub error: CastError,
pub expr_ty: Type,
pub cast_ty: Type,
}
impl AnyDiagnostic {
pub(crate) fn body_validation_diagnostic(
db: &dyn HirDatabase,
@ -620,6 +640,16 @@ impl AnyDiagnostic {
};
MismatchedTupleStructPatArgCount { expr_or_pat, expected, found }.into()
}
InferenceDiagnostic::CastToUnsized { expr, cast_ty } => {
let expr = expr_syntax(*expr)?;
CastToUnsized { expr, cast_ty: Type::new(db, def, cast_ty.clone()) }.into()
}
InferenceDiagnostic::InvalidCast { expr, error, expr_ty, cast_ty } => {
let expr = expr_syntax(*expr)?;
let expr_ty = Type::new(db, def, expr_ty.clone());
let cast_ty = Type::new(db, def, cast_ty.clone());
InvalidCast { expr, error: *error, expr_ty, cast_ty }.into()
}
})
}
}

View File

@ -99,17 +99,20 @@ impl HirDisplay for Function {
}
// FIXME: Use resolved `param.ty` once we no longer discard lifetimes
let body = db.body(self.id.into());
for (type_ref, param) in data.params.iter().zip(self.assoc_fn_params(db)).skip(skip_self) {
let local = param.as_local(db).map(|it| it.name(db));
if !first {
f.write_str(", ")?;
} else {
first = false;
}
match local {
Some(name) => write!(f, "{}: ", name.display(f.db.upcast(), f.edition()))?,
None => f.write_str("_: ")?,
}
let pat_id = body.params[param.idx - body.self_param.is_some() as usize];
let pat_str =
body.pretty_print_pat(db.upcast(), self.id.into(), pat_id, true, f.edition());
f.write_str(&pat_str)?;
f.write_str(": ")?;
type_ref.hir_fmt(f)?;
}

View File

@ -14,8 +14,8 @@ use tt::TextRange;
use crate::{
db::HirDatabase, Adt, Callee, Const, Enum, ExternCrateDecl, Field, FieldSource, Function, Impl,
Label, LifetimeParam, LocalSource, Macro, Module, Param, SelfParam, Static, Struct, Trait,
TraitAlias, TypeAlias, TypeOrConstParam, Union, Variant,
InlineAsmOperand, Label, LifetimeParam, LocalSource, Macro, Module, Param, SelfParam, Static,
Struct, Trait, TraitAlias, TypeAlias, TypeOrConstParam, Union, Variant,
};
pub trait HasSource {
@ -292,3 +292,26 @@ impl HasSource for ExternCrateDecl {
Some(self.id.lookup(db.upcast()).source(db.upcast()))
}
}
impl HasSource for InlineAsmOperand {
type Ast = ast::AsmOperandNamed;
fn source(self, db: &dyn HirDatabase) -> Option<InFile<Self::Ast>> {
let (_body, source_map) = db.body_with_source_map(self.owner);
if let Ok(src) = source_map.expr_syntax(self.expr) {
let root = src.file_syntax(db.upcast());
return src
.map(|ast| match ast.to_node(&root) {
ast::Expr::AsmExpr(asm) => asm
.asm_pieces()
.filter_map(|it| match it {
ast::AsmPiece::AsmOperandNamed(it) => Some(it),
_ => None,
})
.nth(self.index),
_ => None,
})
.transpose();
}
None
}
}

View File

@ -43,7 +43,7 @@ use hir_def::{
body::{BodyDiagnostic, SyntheticSyntax},
data::adt::VariantData,
generics::{LifetimeParamData, TypeOrConstParamData, TypeParamProvenance},
hir::{BindingAnnotation, BindingId, ExprOrPatId, LabelId, Pat},
hir::{BindingAnnotation, BindingId, ExprId, ExprOrPatId, LabelId, Pat},
item_tree::{AttrOwner, FieldParent, ItemTreeFieldId, ItemTreeNode},
lang_item::LangItemTarget,
layout::{self, ReprOptions, TargetDataLayout},
@ -66,7 +66,7 @@ use hir_ty::{
diagnostics::BodyValidationDiagnostic,
error_lifetime, known_const_to_ast,
layout::{Layout as TyLayout, RustcEnumVariantIdx, RustcFieldIdx, TagEncoding},
method_resolution::{self},
method_resolution,
mir::{interpret_mir, MutBorrowKind},
primitive::UintTy,
traits::FnTrait,
@ -80,7 +80,7 @@ use nameres::diagnostics::DefDiagnosticKind;
use rustc_hash::FxHashSet;
use smallvec::SmallVec;
use span::{Edition, EditionedFileId, FileId, MacroCallId, SyntaxContextId};
use stdx::{impl_from, never};
use stdx::{format_to, impl_from, never};
use syntax::{
ast::{self, HasAttrs as _, HasGenericParams, HasName},
format_smolstr, AstNode, AstPtr, SmolStr, SyntaxNode, SyntaxNodePtr, TextRange, ToSmolStr, T,
@ -137,6 +137,7 @@ pub use {
hygiene::{marks_rev, SyntaxContextExt},
inert_attr_macro::AttributeTemplate,
name::Name,
prettify_macro_expansion,
proc_macro::{ProcMacros, ProcMacrosBuilder},
tt, ExpandResult, HirFileId, HirFileIdExt, MacroFileId, MacroFileIdExt,
},
@ -145,7 +146,8 @@ pub use {
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
layout::LayoutError,
mir::{MirEvalError, MirLowerError},
FnAbi, PointerCast, Safety,
object_safety::{MethodViolationCode, ObjectSafetyViolation},
CastError, FnAbi, PointerCast, Safety,
},
// FIXME: Properly encapsulate mir
hir_ty::{mir, Interner as ChalkTyInterner},
@ -1882,9 +1884,10 @@ impl DefWithBody {
);
}
for expr in hir_ty::diagnostics::missing_unsafe(db, self.into()) {
let (unafe_exprs, only_lint) = hir_ty::diagnostics::missing_unsafe(db, self.into());
for expr in unafe_exprs {
match source_map.expr_syntax(expr) {
Ok(expr) => acc.push(MissingUnsafe { expr }.into()),
Ok(expr) => acc.push(MissingUnsafe { expr, only_lint }.into()),
Err(SyntheticSyntax) => {
// FIXME: Here and elsewhere in this file, the `expr` was
// desugared, report or assert that this doesn't happen.
@ -2206,6 +2209,35 @@ impl Function {
db.function_data(self.id).is_async()
}
pub fn returns_impl_future(self, db: &dyn HirDatabase) -> bool {
if self.is_async(db) {
return true;
}
let Some(impl_traits) = self.ret_type(db).as_impl_traits(db) else { return false };
let Some(future_trait_id) =
db.lang_item(self.ty(db).env.krate, LangItem::Future).and_then(|t| t.as_trait())
else {
return false;
};
let Some(sized_trait_id) =
db.lang_item(self.ty(db).env.krate, LangItem::Sized).and_then(|t| t.as_trait())
else {
return false;
};
let mut has_impl_future = false;
impl_traits
.filter(|t| {
let fut = t.id == future_trait_id;
has_impl_future |= fut;
!fut && t.id != sized_trait_id
})
// all traits but the future trait must be auto traits
.all(|t| t.is_auto(db))
&& has_impl_future
}
/// Does this function have `#[test]` attribute?
pub fn is_test(self, db: &dyn HirDatabase) -> bool {
db.function_data(self.id).attrs.is_test()
@ -2522,6 +2554,17 @@ impl Const {
Type::from_value_def(db, self.id)
}
/// Evaluate the constant and return the result as a string.
///
/// This function is intended for IDE assistance, different from [`Const::render_eval`].
pub fn eval(self, db: &dyn HirDatabase, edition: Edition) -> Result<String, ConstEvalError> {
let c = db.const_eval(self.id.into(), Substitution::empty(Interner), None)?;
Ok(format!("{}", c.display(db, edition)))
}
/// Evaluate the constant and return the result as a string, with more detailed information.
///
/// This function is intended for user-facing display.
pub fn render_eval(
self,
db: &dyn HirDatabase,
@ -2536,10 +2579,16 @@ impl Const {
let value = u128::from_le_bytes(mir::pad16(b, false));
let value_signed =
i128::from_le_bytes(mir::pad16(b, matches!(s, Scalar::Int(_))));
if value >= 10 {
return Ok(format!("{value_signed} ({value:#X})"));
let mut result = if let Scalar::Int(_) = s {
value_signed.to_string()
} else {
return Ok(format!("{value_signed}"));
value.to_string()
};
if value >= 10 {
format_to!(result, " ({value:#X})");
return Ok(result);
} else {
return Ok(result);
}
}
}
@ -2641,6 +2690,10 @@ impl Trait {
.count()
}
pub fn object_safety(&self, db: &dyn HirDatabase) -> Option<ObjectSafetyViolation> {
hir_ty::object_safety::object_safety(db, self.id)
}
fn all_macro_calls(&self, db: &dyn HirDatabase) -> Box<[(AstId<ast::Item>, MacroCallId)]> {
db.trait_data(self.id)
.macro_calls
@ -5211,6 +5264,26 @@ impl Type {
}
}
#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
pub struct InlineAsmOperand {
owner: DefWithBodyId,
expr: ExprId,
index: usize,
}
impl InlineAsmOperand {
pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
self.owner.into()
}
pub fn name(&self, db: &dyn HirDatabase) -> Option<Name> {
match &db.body(self.owner)[self.expr] {
hir_def::hir::Expr::InlineAsm(e) => e.operands.get(self.index)?.0.clone(),
_ => None,
}
}
}
// FIXME: Document this
#[derive(Debug)]
pub struct Callable {

View File

@ -13,7 +13,8 @@ use either::Either;
use hir_def::{
hir::Expr,
lower::LowerCtx,
nameres::MacroSubNs,
nameres::{MacroSubNs, ModuleOrigin},
path::ModPath,
resolver::{self, HasResolver, Resolver, TypeNs},
type_ref::Mutability,
AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId,
@ -31,7 +32,7 @@ use intern::Symbol;
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
use span::{EditionedFileId, FileId};
use span::{EditionedFileId, FileId, HirFileIdRepr};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
@ -46,9 +47,9 @@ use crate::{
source_analyzer::{resolve_hir_path, SourceAnalyzer},
Access, Adjust, Adjustment, Adt, AutoBorrow, BindingMode, BuiltinAttr, Callable, Const,
ConstParam, Crate, DeriveHelper, Enum, Field, Function, HasSource, HirFileId, Impl, InFile,
Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef,
Static, Struct, ToolModule, Trait, TraitAlias, TupleField, Type, TypeAlias, TypeParam, Union,
Variant, VariantDef,
InlineAsmOperand, ItemInNs, Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name,
OverloadedDeref, Path, ScopeDef, Static, Struct, ToolModule, Trait, TraitAlias, TupleField,
Type, TypeAlias, TypeParam, Union, Variant, VariantDef,
};
const CONTINUE_NO_BREAKS: ControlFlow<Infallible, ()> = ControlFlow::Continue(());
@ -322,6 +323,47 @@ impl<'db> SemanticsImpl<'db> {
tree
}
pub fn find_parent_file(&self, file_id: HirFileId) -> Option<InFile<SyntaxNode>> {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
let module = self.file_to_module_defs(file_id.file_id()).next()?;
let def_map = self.db.crate_def_map(module.krate().id);
match def_map[module.id.local_id].origin {
ModuleOrigin::CrateRoot { .. } => None,
ModuleOrigin::File { declaration, declaration_tree_id, .. } => {
let file_id = declaration_tree_id.file_id();
let in_file = InFile::new(file_id, declaration);
let node = in_file.to_node(self.db.upcast());
let root = find_root(node.syntax());
self.cache(root, file_id);
Some(in_file.with_value(node.syntax().clone()))
}
_ => unreachable!("FileId can only belong to a file module"),
}
}
HirFileIdRepr::MacroFile(macro_file) => {
let node = self
.db
.lookup_intern_macro_call(macro_file.macro_call_id)
.to_node(self.db.upcast());
let root = find_root(&node.value);
self.cache(root, node.file_id);
Some(node)
}
}
}
/// Returns the `SyntaxNode` of the module. If this is a file module, returns
/// the `SyntaxNode` of the *definition* file, not of the *declaration*.
pub fn module_definition_node(&self, module: Module) -> InFile<SyntaxNode> {
let def_map = module.id.def_map(self.db.upcast());
let definition = def_map[module.id.local_id].origin.definition_source(self.db.upcast());
let definition = definition.map(|it| it.node());
let root_node = find_root(&definition.value);
self.cache(root_node, definition.file_id);
definition
}
pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode {
let node = self.db.parse_or_expand(file_id);
self.cache(node.clone(), file_id);
@ -344,6 +386,19 @@ impl<'db> SemanticsImpl<'db> {
Some(node)
}
pub fn check_cfg_attr(&self, attr: &ast::TokenTree) -> Option<bool> {
let file_id = self.find_file(attr.syntax()).file_id;
let krate = match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
self.file_to_module_defs(file_id.file_id()).next()?.krate().id
}
HirFileIdRepr::MacroFile(macro_file) => {
self.db.lookup_intern_macro_call(macro_file.macro_call_id).krate
}
};
hir_expand::check_cfg_attr_value(self.db.upcast(), attr, krate)
}
/// Expands the macro if it isn't one of the built-in ones that expand to custom syntax or dummy
/// expansions.
pub fn expand_allowed_builtins(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
@ -367,7 +422,6 @@ impl<'db> SemanticsImpl<'db> {
| BuiltinFnLikeExpander::File
| BuiltinFnLikeExpander::ModulePath
| BuiltinFnLikeExpander::Asm
| BuiltinFnLikeExpander::LlvmAsm
| BuiltinFnLikeExpander::GlobalAsm
| BuiltinFnLikeExpander::LogSyntax
| BuiltinFnLikeExpander::TraceMacros
@ -408,7 +462,7 @@ impl<'db> SemanticsImpl<'db> {
Some(
calls
.into_iter()
.map(|call| macro_call_to_macro_id(ctx, call?).map(|id| Macro { id }))
.map(|call| macro_call_to_macro_id(self, ctx, call?).map(|id| Macro { id }))
.collect(),
)
})
@ -546,11 +600,11 @@ impl<'db> SemanticsImpl<'db> {
)
}
/// Retrieves all the formatting parts of the format_args! template string.
/// Retrieves all the formatting parts of the format_args! (or `asm!`) template string.
pub fn as_format_args_parts(
&self,
string: &ast::String,
) -> Option<Vec<(TextRange, Option<PathResolution>)>> {
) -> Option<Vec<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)>> {
let quote = string.open_quote_text_range()?;
let token = self.wrap_token_infile(string.syntax().clone()).into_real_file().ok()?;
@ -560,14 +614,33 @@ impl<'db> SemanticsImpl<'db> {
let string = ast::String::cast(token)?;
let literal =
string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
let format_args = self.wrap_node_infile(format_args);
let res = source_analyzer
.as_format_args_parts(self.db, format_args.as_ref())?
.map(|(range, res)| (range + quote.end(), res))
.collect();
Some(res)
let parent = literal.parent()?;
if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
let source_analyzer = self.analyze_no_infer(format_args.syntax())?;
let format_args = self.wrap_node_infile(format_args);
let res = source_analyzer
.as_format_args_parts(self.db, format_args.as_ref())?
.map(|(range, res)| (range + quote.end(), res.map(Either::Left)))
.collect();
Some(res)
} else {
let asm = ast::AsmExpr::cast(parent)?;
let source_analyzer = self.analyze_no_infer(asm.syntax())?;
let line = asm.template().position(|it| *it.syntax() == literal)?;
let asm = self.wrap_node_infile(asm);
let (owner, (expr, asm_parts)) = source_analyzer.as_asm_parts(asm.as_ref())?;
let res = asm_parts
.get(line)?
.iter()
.map(|&(range, index)| {
(
range + quote.end(),
Some(Either::Right(InlineAsmOperand { owner, expr, index })),
)
})
.collect();
Some(res)
}
})()
.map_or(ControlFlow::Continue(()), ControlFlow::Break)
})
@ -578,7 +651,7 @@ impl<'db> SemanticsImpl<'db> {
&self,
original_token: SyntaxToken,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
let original_string = ast::String::cast(original_token.clone())?;
let original_token = self.wrap_token_infile(original_token).into_real_file().ok()?;
let quote = original_string.open_quote_text_range()?;
@ -599,13 +672,27 @@ impl<'db> SemanticsImpl<'db> {
&self,
string: ast::String,
offset: TextSize,
) -> Option<(TextRange, Option<PathResolution>)> {
) -> Option<(TextRange, Option<Either<PathResolution, InlineAsmOperand>>)> {
debug_assert!(offset <= string.syntax().text_range().len());
let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?;
let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?;
let source_analyzer = &self.analyze_no_infer(format_args.syntax())?;
let format_args = self.wrap_node_infile(format_args);
source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset)
let parent = literal.parent()?;
if let Some(format_args) = ast::FormatArgsExpr::cast(parent.clone()) {
let source_analyzer = &self.analyze_no_infer(format_args.syntax())?;
let format_args = self.wrap_node_infile(format_args);
source_analyzer
.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset)
.map(|(range, res)| (range, res.map(Either::Left)))
} else {
let asm = ast::AsmExpr::cast(parent)?;
let source_analyzer = &self.analyze_no_infer(asm.syntax())?;
let line = asm.template().position(|it| *it.syntax() == literal)?;
let asm = self.wrap_node_infile(asm);
source_analyzer.resolve_offset_in_asm_template(asm.as_ref(), line, offset).map(
|(owner, (expr, range, index))| {
(range, Some(Either::Right(InlineAsmOperand { owner, expr, index })))
},
)
}
}
/// Maps a node down by mapping its first and last token down.
@ -818,16 +905,7 @@ impl<'db> SemanticsImpl<'db> {
let InMacroFile { file_id, value: mapped_tokens } = self.with_ctx(|ctx| {
Some(
ctx.cache
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| {
let exp_info = macro_file.expansion_info(self.db.upcast());
let InMacroFile { file_id, value } = exp_info.expanded();
self.cache(value, file_id.into());
exp_info
})
.get_or_insert_expansion(self, macro_file)
.map_range_down(span)?
.map(SmallVec::<[_; 2]>::from_iter),
)
@ -1113,11 +1191,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_file = file_id.macro_file()?;
self.with_ctx(|ctx| {
let expansion_info = ctx
.cache
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
let expansion_info = ctx.cache.get_or_insert_expansion(self, macro_file);
expansion_info.arg().map(|node| node?.parent()).transpose()
})
}
@ -1333,7 +1407,7 @@ impl<'db> SemanticsImpl<'db> {
let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
self.with_ctx(|ctx| {
ctx.macro_call_to_macro_call(macro_call)
.and_then(|call| macro_call_to_macro_id(ctx, call))
.and_then(|call| macro_call_to_macro_id(self, ctx, call))
.map(Into::into)
})
.or_else(|| {
@ -1375,7 +1449,7 @@ impl<'db> SemanticsImpl<'db> {
let item_in_file = self.wrap_node_infile(item.clone());
let id = self.with_ctx(|ctx| {
let macro_call_id = ctx.item_to_macro_call(item_in_file.as_ref())?;
macro_call_to_macro_id(ctx, macro_call_id)
macro_call_to_macro_id(self, ctx, macro_call_id)
})?;
Some(Macro { id })
}
@ -1384,6 +1458,16 @@ impl<'db> SemanticsImpl<'db> {
self.analyze(path.syntax())?.resolve_path(self.db, path)
}
pub fn resolve_mod_path(
&self,
scope: &SyntaxNode,
path: &ModPath,
) -> Option<impl Iterator<Item = ItemInNs>> {
let analyze = self.analyze(scope)?;
let items = analyze.resolver.resolve_module_path_in_items(self.db.upcast(), path);
Some(items.iter_items().map(|(item, _)| item.into()))
}
fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
}
@ -1685,6 +1769,7 @@ impl<'db> SemanticsImpl<'db> {
}
fn macro_call_to_macro_id(
sema: &SemanticsImpl<'_>,
ctx: &mut SourceToDefCtx<'_, '_>,
macro_call_id: MacroCallId,
) -> Option<MacroId> {
@ -1700,11 +1785,7 @@ fn macro_call_to_macro_id(
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {
let expansion_info = ctx
.cache
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(ctx.db.upcast()));
let expansion_info = ctx.cache.get_or_insert_expansion(sema, macro_file);
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}
};
@ -1716,11 +1797,7 @@ fn macro_call_to_macro_id(
it.to_ptr(db).to_node(&db.parse(file_id).syntax_node())
}
HirFileIdRepr::MacroFile(macro_file) => {
let expansion_info = ctx
.cache
.expansion_info_cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(ctx.db.upcast()));
let expansion_info = ctx.cache.get_or_insert_expansion(sema, macro_file);
it.to_ptr(db).to_node(&expansion_info.expanded().value)
}
};
@ -1771,6 +1848,7 @@ to_def_impls![
(crate::Label, ast::Label, label_to_def),
(crate::Adt, ast::Adt, adt_to_def),
(crate::ExternCrateDecl, ast::ExternCrate, extern_crate_to_def),
(crate::InlineAsmOperand, ast::AsmOperandNamed, asm_operand_to_def),
(MacroCallId, ast::MacroCall, macro_call_to_macro_call),
];

View File

@ -99,7 +99,8 @@ use hir_def::{
VariantId,
};
use hir_expand::{
attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, MacroCallId,
attrs::AttrId, name::AsName, ExpansionInfo, HirFileId, HirFileIdExt, InMacroFile, MacroCallId,
MacroFileIdExt,
};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
@ -110,15 +111,32 @@ use syntax::{
AstNode, AstPtr, SyntaxNode,
};
use crate::{db::HirDatabase, InFile};
use crate::{db::HirDatabase, InFile, InlineAsmOperand, SemanticsImpl};
#[derive(Default)]
pub(super) struct SourceToDefCache {
pub(super) dynmap_cache: FxHashMap<(ChildContainer, HirFileId), DynMap>,
pub(super) expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>,
expansion_info_cache: FxHashMap<MacroFileId, ExpansionInfo>,
pub(super) file_to_def_cache: FxHashMap<FileId, SmallVec<[ModuleId; 1]>>,
}
impl SourceToDefCache {
pub(super) fn get_or_insert_expansion(
&mut self,
sema: &SemanticsImpl<'_>,
macro_file: MacroFileId,
) -> &ExpansionInfo {
self.expansion_info_cache.entry(macro_file).or_insert_with(|| {
let exp_info = macro_file.expansion_info(sema.db.upcast());
let InMacroFile { file_id, value } = exp_info.expanded();
sema.cache(value, file_id.into());
exp_info
})
}
}
pub(super) struct SourceToDefCtx<'db, 'cache> {
pub(super) db: &'db dyn HirDatabase,
pub(super) cache: &'cache mut SourceToDefCache,
@ -273,6 +291,25 @@ impl SourceToDefCtx<'_, '_> {
ast::Adt::Union(it) => self.union_to_def(InFile::new(file_id, it)).map(AdtId::UnionId),
}
}
pub(super) fn asm_operand_to_def(
&mut self,
src: InFile<&ast::AsmOperandNamed>,
) -> Option<InlineAsmOperand> {
let asm = src.value.syntax().parent().and_then(ast::AsmExpr::cast)?;
let index = asm
.asm_pieces()
.filter_map(|it| match it {
ast::AsmPiece::AsmOperandNamed(it) => Some(it),
_ => None,
})
.position(|it| it == *src.value)?;
let container = self.find_pat_or_label_container(src.syntax_ref())?;
let (_, source_map) = self.db.body_with_source_map(container);
let expr = source_map.node_expr(src.with_value(&ast::Expr::AsmExpr(asm)))?;
Some(InlineAsmOperand { owner: container, expr, index })
}
pub(super) fn bind_pat_to_def(
&mut self,
src: InFile<&ast::IdentPat>,
@ -281,7 +318,7 @@ impl SourceToDefCtx<'_, '_> {
let (body, source_map) = self.db.body_with_source_map(container);
let src = src.cloned().map(ast::Pat::from);
let pat_id = source_map.node_pat(src.as_ref())?;
// the pattern could resolve to a constant, verify that that is not the case
// the pattern could resolve to a constant, verify that this is not the case
if let crate::Pat::Bind { id, .. } = body[pat_id] {
Some((container, id))
} else {

View File

@ -904,6 +904,22 @@ impl SourceAnalyzer {
})
}
pub(crate) fn resolve_offset_in_asm_template(
&self,
asm: InFile<&ast::AsmExpr>,
line: usize,
offset: TextSize,
) -> Option<(DefWithBodyId, (ExprId, TextRange, usize))> {
let (def, _, body_source_map) = self.def.as_ref()?;
let (expr, args) = body_source_map.asm_template_args(asm)?;
Some(*def).zip(
args.get(line)?
.iter()
.find(|(range, _)| range.contains_inclusive(offset))
.map(|(range, idx)| (expr, *range, *idx)),
)
}
pub(crate) fn as_format_args_parts<'a>(
&'a self,
db: &'a dyn HirDatabase,
@ -927,6 +943,14 @@ impl SourceAnalyzer {
))
}
pub(crate) fn as_asm_parts(
&self,
asm: InFile<&ast::AsmExpr>,
) -> Option<(DefWithBodyId, (ExprId, &[Vec<(TextRange, usize)>]))> {
let (def, _, body_source_map) = self.def.as_ref()?;
Some(*def).zip(body_source_map.asm_template_args(asm))
}
fn resolve_impl_method_or_trait_def(
&self,
db: &dyn HirDatabase,

View File

@ -34,5 +34,8 @@ expect-test = "1.4.0"
test-utils.workspace = true
test-fixture.workspace = true
[features]
in-rust-tree = []
[lints]
workspace = true

View File

@ -1,12 +1,13 @@
use std::iter::{self, Peekable};
use either::Either;
use hir::{sym, Adt, Crate, HasAttrs, HasSource, ImportPathConfig, ModuleDef, Semantics};
use hir::{sym, Adt, Crate, HasAttrs, ImportPathConfig, ModuleDef, Semantics};
use ide_db::syntax_helpers::suggest_name;
use ide_db::RootDatabase;
use ide_db::{famous_defs::FamousDefs, helpers::mod_path_to_ast};
use itertools::Itertools;
use syntax::ast::edit_in_place::Removable;
use syntax::ast::{self, make, AstNode, HasName, MatchArmList, MatchExpr, Pat};
use syntax::ast::{self, make, AstNode, MatchArmList, MatchExpr, Pat};
use crate::{utils, AssistContext, AssistId, AssistKind, Assists};
@ -90,7 +91,7 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
.into_iter()
.filter_map(|variant| {
Some((
build_pat(ctx.db(), module, variant, cfg)?,
build_pat(ctx, module, variant, cfg)?,
variant.should_be_hidden(ctx.db(), module.krate()),
))
})
@ -141,9 +142,8 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
let is_hidden = variants
.iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
let patterns = variants
.into_iter()
.filter_map(|variant| build_pat(ctx.db(), module, variant, cfg));
let patterns =
variants.into_iter().filter_map(|variant| build_pat(ctx, module, variant, cfg));
(ast::Pat::from(make::tuple_pat(patterns)), is_hidden)
})
@ -174,9 +174,8 @@ pub(crate) fn add_missing_match_arms(acc: &mut Assists, ctx: &AssistContext<'_>)
let is_hidden = variants
.iter()
.any(|variant| variant.should_be_hidden(ctx.db(), module.krate()));
let patterns = variants
.into_iter()
.filter_map(|variant| build_pat(ctx.db(), module, variant, cfg));
let patterns =
variants.into_iter().filter_map(|variant| build_pat(ctx, module, variant, cfg));
(ast::Pat::from(make::slice_pat(patterns)), is_hidden)
})
.filter(|(variant_pat, _)| is_variant_missing(&top_lvl_pats, variant_pat));
@ -438,33 +437,39 @@ fn resolve_array_of_enum_def(
}
fn build_pat(
db: &RootDatabase,
ctx: &AssistContext<'_>,
module: hir::Module,
var: ExtendedVariant,
cfg: ImportPathConfig,
) -> Option<ast::Pat> {
let db = ctx.db();
match var {
ExtendedVariant::Variant(var) => {
let edition = module.krate().edition(db);
let path = mod_path_to_ast(&module.find_path(db, ModuleDef::from(var), cfg)?, edition);
// FIXME: use HIR for this; it doesn't currently expose struct vs. tuple vs. unit variants though
Some(match var.source(db)?.value.kind() {
ast::StructKind::Tuple(field_list) => {
let pats =
iter::repeat(make::wildcard_pat().into()).take(field_list.fields().count());
let fields = var.fields(db);
let pat = match var.kind(db) {
hir::StructKind::Tuple => {
let mut name_generator = suggest_name::NameGenerator::new();
let pats = fields.into_iter().map(|f| {
let name = name_generator.for_type(&f.ty(db), db, edition);
match name {
Some(name) => make::ext::simple_ident_pat(make::name(&name)).into(),
None => make::wildcard_pat().into(),
}
});
make::tuple_struct_pat(path, pats).into()
}
ast::StructKind::Record(field_list) => {
let pats = field_list.fields().map(|f| {
make::ext::simple_ident_pat(
f.name().expect("Record field must have a name"),
)
.into()
});
hir::StructKind::Record => {
let pats = fields
.into_iter()
.map(|f| make::name(f.name(db).as_str()))
.map(|name| make::ext::simple_ident_pat(name).into());
make::record_pat(path, pats).into()
}
ast::StructKind::Unit => make::path_pat(path),
})
hir::StructKind::Unit => make::path_pat(path),
};
Some(pat)
}
ExtendedVariant::True => Some(ast::Pat::from(make::literal_pat("true"))),
ExtendedVariant::False => Some(ast::Pat::from(make::literal_pat("false"))),
@ -1976,4 +1981,81 @@ fn a() {
}"#,
)
}
#[test]
fn suggest_name_for_tuple_struct_patterns() {
// single tuple struct
check_assist(
add_missing_match_arms,
r#"
struct S;
pub enum E {
A
B(S),
}
fn f() {
let value = E::A;
match value {
$0
}
}
"#,
r#"
struct S;
pub enum E {
A
B(S),
}
fn f() {
let value = E::A;
match value {
$0E::A => todo!(),
E::B(s) => todo!(),
}
}
"#,
);
// multiple tuple struct patterns
check_assist(
add_missing_match_arms,
r#"
struct S1;
struct S2;
pub enum E {
A
B(S1, S2),
}
fn f() {
let value = E::A;
match value {
$0
}
}
"#,
r#"
struct S1;
struct S2;
pub enum E {
A
B(S1, S2),
}
fn f() {
let value = E::A;
match value {
$0E::A => todo!(),
E::B(s1, s2) => todo!(),
}
}
"#,
);
}
}

View File

@ -0,0 +1,206 @@
use hir::Semantics;
use ide_db::{
assists::{AssistId, AssistKind},
source_change::SourceChangeBuilder,
RootDatabase,
};
use syntax::{ast, AstNode};
use crate::{AssistContext, Assists};
// Assist: explicit_enum_discriminant
//
// Adds explicit discriminant to all enum variants.
//
// ```
// enum TheEnum$0 {
// Foo,
// Bar,
// Baz = 42,
// Quux,
// }
// ```
// ->
// ```
// enum TheEnum {
// Foo = 0,
// Bar = 1,
// Baz = 42,
// Quux = 43,
// }
// ```
pub(crate) fn explicit_enum_discriminant(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let enum_node = ctx.find_node_at_offset::<ast::Enum>()?;
let enum_def = ctx.sema.to_def(&enum_node)?;
let is_data_carrying = enum_def.is_data_carrying(ctx.db());
let has_primitive_repr = enum_def.repr(ctx.db()).and_then(|repr| repr.int).is_some();
// Data carrying enums without a primitive repr have no stable discriminants.
if is_data_carrying && !has_primitive_repr {
return None;
}
let variant_list = enum_node.variant_list()?;
// Don't offer the assist if the enum has no variants or if all variants already have an
// explicit discriminant.
if variant_list.variants().all(|variant_node| variant_node.expr().is_some()) {
return None;
}
acc.add(
AssistId("explicit_enum_discriminant", AssistKind::RefactorRewrite),
"Add explicit enum discriminants",
enum_node.syntax().text_range(),
|builder| {
for variant_node in variant_list.variants() {
add_variant_discriminant(&ctx.sema, builder, &variant_node);
}
},
);
Some(())
}
fn add_variant_discriminant(
sema: &Semantics<'_, RootDatabase>,
builder: &mut SourceChangeBuilder,
variant_node: &ast::Variant,
) {
if variant_node.expr().is_some() {
return;
}
let Some(variant_def) = sema.to_def(variant_node) else {
return;
};
let Ok(discriminant) = variant_def.eval(sema.db) else {
return;
};
let variant_range = variant_node.syntax().text_range();
builder.insert(variant_range.end(), format!(" = {discriminant}"));
}
#[cfg(test)]
mod tests {
use crate::tests::{check_assist, check_assist_not_applicable};
use super::explicit_enum_discriminant;
#[test]
fn non_primitive_repr_non_data_bearing_add_discriminant() {
check_assist(
explicit_enum_discriminant,
r#"
enum TheEnum$0 {
Foo,
Bar,
Baz = 42,
Quux,
FooBar = -5,
FooBaz,
}
"#,
r#"
enum TheEnum {
Foo = 0,
Bar = 1,
Baz = 42,
Quux = 43,
FooBar = -5,
FooBaz = -4,
}
"#,
);
}
#[test]
fn primitive_repr_data_bearing_add_discriminant() {
check_assist(
explicit_enum_discriminant,
r#"
#[repr(u8)]
$0enum TheEnum {
Foo { x: u32 },
Bar,
Baz(String),
Quux,
}
"#,
r#"
#[repr(u8)]
enum TheEnum {
Foo { x: u32 } = 0,
Bar = 1,
Baz(String) = 2,
Quux = 3,
}
"#,
);
}
#[test]
fn non_primitive_repr_data_bearing_not_applicable() {
check_assist_not_applicable(
explicit_enum_discriminant,
r#"
enum TheEnum$0 {
Foo,
Bar(u16),
Baz,
}
"#,
);
}
#[test]
fn primitive_repr_non_data_bearing_add_discriminant() {
check_assist(
explicit_enum_discriminant,
r#"
#[repr(i64)]
enum TheEnum {
Foo = 1 << 63,
Bar,
Baz$0 = 0x7fff_ffff_ffff_fffe,
Quux,
}
"#,
r#"
#[repr(i64)]
enum TheEnum {
Foo = 1 << 63,
Bar = -9223372036854775807,
Baz = 0x7fff_ffff_ffff_fffe,
Quux = 9223372036854775807,
}
"#,
);
}
#[test]
fn discriminants_already_explicit_not_applicable() {
check_assist_not_applicable(
explicit_enum_discriminant,
r#"
enum TheEnum$0 {
Foo = 0,
Bar = 4,
}
"#,
);
}
#[test]
fn empty_enum_not_applicable() {
check_assist_not_applicable(
explicit_enum_discriminant,
r#"
enum TheEnum$0 {}
"#,
);
}
}

View File

@ -2,7 +2,7 @@ use either::Either;
use ide_db::syntax_helpers::node_ext::walk_ty;
use syntax::{
ast::{self, edit::IndentLevel, make, AstNode, HasGenericArgs, HasGenericParams, HasName},
ted,
syntax_editor,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -43,9 +43,8 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
AssistId("extract_type_alias", AssistKind::RefactorExtract),
"Extract type as type alias",
target,
|edit| {
let node = edit.make_syntax_mut(node.clone());
let target_ty = edit.make_mut(ty.clone());
|builder| {
let mut edit = builder.make_editor(node);
let mut known_generics = match item.generic_param_list() {
Some(it) => it.generic_params().collect(),
@ -67,25 +66,28 @@ pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) ->
.map_or(String::new(), |it| it.to_generic_args().to_string());
// FIXME: replace with a `ast::make` constructor
let new_ty = make::ty(&format!("Type{ty_args}")).clone_for_update();
ted::replace(target_ty.syntax(), new_ty.syntax());
edit.replace(ty.syntax(), new_ty.syntax());
// Insert new alias
let indent = IndentLevel::from_node(&node);
let ty_alias = make::ty_alias("Type", generic_params, None, None, Some((ty, None)))
.clone_for_update();
ted::insert_all(
ted::Position::before(node),
if let Some(cap) = ctx.config.snippet_cap {
if let Some(name) = ty_alias.name() {
edit.add_annotation(name.syntax(), builder.make_tabstop_before(cap));
}
}
let indent = IndentLevel::from_node(node);
edit.insert_all(
syntax_editor::Position::before(node),
vec![
ty_alias.syntax().clone().into(),
make::tokens::whitespace(&format!("\n\n{indent}")).into(),
],
);
if let Some(cap) = ctx.config.snippet_cap {
if let Some(name) = ty_alias.name() {
edit.add_tabstop_before(cap, name);
}
}
builder.add_file_edits(ctx.file_id(), edit);
},
)
}

View File

@ -1,4 +1,5 @@
use hir::TypeInfo;
use ide_db::syntax_helpers::suggest_name;
use syntax::{
ast::{self, edit::IndentLevel, edit_in_place::Indent, make, AstNode, HasName},
ted, NodeOrToken,
@ -6,7 +7,7 @@ use syntax::{
SyntaxNode, T,
};
use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
use crate::{AssistContext, AssistId, AssistKind, Assists};
// Assist: extract_variable
//

View File

@ -1,4 +1,8 @@
use syntax::{algo::non_trivia_sibling, Direction, SyntaxKind, T};
use ide_db::base_db::SourceDatabase;
use syntax::TextSize;
use syntax::{
algo::non_trivia_sibling, ast, AstNode, Direction, SyntaxKind, SyntaxToken, TextRange, T,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -21,6 +25,8 @@ pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
let comma = ctx.find_token_syntax_at_offset(T![,])?;
let prev = non_trivia_sibling(comma.clone().into(), Direction::Prev)?;
let next = non_trivia_sibling(comma.clone().into(), Direction::Next)?;
let (mut prev_text, mut next_text) = (prev.to_string(), next.to_string());
let (mut prev_range, mut next_range) = (prev.text_range(), next.text_range());
// Don't apply a "flip" in case of a last comma
// that typically comes before punctuation
@ -34,17 +40,55 @@ pub(crate) fn flip_comma(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<(
return None;
}
if let Some(parent) = comma.parent().and_then(ast::TokenTree::cast) {
// An attribute. It often contains a path followed by a token tree (e.g. `align(2)`), so we have
// to be smarter.
let prev_start =
match comma.siblings_with_tokens(Direction::Prev).skip(1).find(|it| it.kind() == T![,])
{
Some(it) => position_after_token(it.as_token().unwrap()),
None => position_after_token(&parent.left_delimiter_token()?),
};
let prev_end = prev.text_range().end();
let next_start = next.text_range().start();
let next_end =
match comma.siblings_with_tokens(Direction::Next).skip(1).find(|it| it.kind() == T![,])
{
Some(it) => position_before_token(it.as_token().unwrap()),
None => position_before_token(&parent.right_delimiter_token()?),
};
prev_range = TextRange::new(prev_start, prev_end);
next_range = TextRange::new(next_start, next_end);
let file_text = ctx.db().file_text(ctx.file_id().file_id());
prev_text = file_text[prev_range].to_owned();
next_text = file_text[next_range].to_owned();
}
acc.add(
AssistId("flip_comma", AssistKind::RefactorRewrite),
"Flip comma",
comma.text_range(),
|edit| {
edit.replace(prev.text_range(), next.to_string());
edit.replace(next.text_range(), prev.to_string());
edit.replace(prev_range, next_text);
edit.replace(next_range, prev_text);
},
)
}
fn position_before_token(token: &SyntaxToken) -> TextSize {
match non_trivia_sibling(token.clone().into(), Direction::Prev) {
Some(prev_token) => prev_token.text_range().end(),
None => token.text_range().start(),
}
}
fn position_after_token(token: &SyntaxToken) -> TextSize {
match non_trivia_sibling(token.clone().into(), Direction::Next) {
Some(prev_token) => prev_token.text_range().start(),
None => token.text_range().end(),
}
}
#[cfg(test)]
mod tests {
use super::*;
@ -89,4 +133,18 @@ mod tests {
// See https://github.com/rust-lang/rust-analyzer/issues/7693
check_assist_not_applicable(flip_comma, r#"bar!(a,$0 b)"#);
}
#[test]
fn flip_comma_attribute() {
check_assist(
flip_comma,
r#"#[repr(align(2),$0 C)] struct Foo;"#,
r#"#[repr(C, align(2))] struct Foo;"#,
);
check_assist(
flip_comma,
r#"#[foo(bar, baz(1 + 1),$0 qux, other)] struct Foo;"#,
r#"#[foo(bar, qux, baz(1 + 1), other)] struct Foo;"#,
);
}
}

View File

@ -2,13 +2,14 @@ use std::ops::Not;
use crate::{
assist_context::{AssistContext, Assists},
utils::{convert_param_list_to_arg_list, suggest_name},
utils::convert_param_list_to_arg_list,
};
use either::Either;
use hir::{db::HirDatabase, HasVisibility};
use ide_db::{
assists::{AssistId, GroupLabel},
path_transform::PathTransform,
syntax_helpers::suggest_name,
FxHashMap, FxHashSet,
};
use itertools::Itertools;
@ -281,8 +282,11 @@ fn generate_impl(
ai.assoc_items()
.filter(|item| matches!(item, AssocItem::MacroCall(_)).not())
.for_each(|item| {
let assoc =
process_assoc_item(item, qualified_path_type.clone(), field_name);
let assoc = process_assoc_item(
item.clone_for_update(),
qualified_path_type.clone(),
field_name,
);
if let Some(assoc) = assoc {
delegate_assoc_items.add_item(assoc);
}
@ -583,7 +587,7 @@ fn resolve_name_conflicts(
for old_strukt_param in old_strukt_params.generic_params() {
// Get old name from `strukt`
let mut name = SmolStr::from(match &old_strukt_param {
let name = SmolStr::from(match &old_strukt_param {
ast::GenericParam::ConstParam(c) => c.name()?.to_string(),
ast::GenericParam::LifetimeParam(l) => {
l.lifetime()?.lifetime_ident_token()?.to_string()
@ -592,8 +596,19 @@ fn resolve_name_conflicts(
});
// The new name cannot be conflicted with generics in trait, and the renamed names.
name = suggest_name::for_unique_generic_name(&name, old_impl_params);
name = suggest_name::for_unique_generic_name(&name, &params);
let param_list_to_names = |param_list: &GenericParamList| {
param_list.generic_params().flat_map(|param| match param {
ast::GenericParam::TypeParam(t) => t.name().map(|name| name.to_string()),
p => Some(p.to_string()),
})
};
let existing_names = param_list_to_names(old_impl_params)
.chain(param_list_to_names(&params))
.collect_vec();
let mut name_generator = suggest_name::NameGenerator::new_with_names(
existing_names.iter().map(|s| s.as_str()),
);
let name = name_generator.suggest_name(&name);
match old_strukt_param {
ast::GenericParam::ConstParam(c) => {
if let Some(const_ty) = c.ty() {
@ -1212,9 +1227,9 @@ struct S<T> {
b : B<T>,
}
impl<T0> Trait<T0> for S<T0> {
fn f(&self, a: T0) -> T0 {
<B<T0> as Trait<T0>>::f(&self.b, a)
impl<T1> Trait<T1> for S<T1> {
fn f(&self, a: T1) -> T1 {
<B<T1> as Trait<T1>>::f(&self.b, a)
}
}
"#,
@ -1526,12 +1541,12 @@ where
b : B<T, T1>,
}
impl<T, T2, T10> Trait<T> for S<T2, T10>
impl<T, T2, T3> Trait<T> for S<T2, T3>
where
T10: AnotherTrait
T3: AnotherTrait
{
fn f(&self, a: T) -> T {
<B<T2, T10> as Trait<T>>::f(&self.b, a)
<B<T2, T3> as Trait<T>>::f(&self.b, a)
}
}"#,
);
@ -1588,12 +1603,12 @@ where
b : B<T>,
}
impl<T, T0> Trait<T> for S<T0>
impl<T, T2> Trait<T> for S<T2>
where
T0: AnotherTrait
T2: AnotherTrait
{
fn f(&self, a: T) -> T {
<B<T0> as Trait<T>>::f(&self.b, a)
<B<T2> as Trait<T>>::f(&self.b, a)
}
}"#,
);
@ -1785,4 +1800,40 @@ impl T for B {
"#,
);
}
#[test]
fn assoc_items_attributes_mutably_cloned() {
check_assist(
generate_delegate_trait,
r#"
pub struct A;
pub trait C<D> {
#[allow(clippy::dead_code)]
fn a_funk(&self) -> &D;
}
pub struct B<T: C<A>> {
has_dr$0ain: T,
}
"#,
r#"
pub struct A;
pub trait C<D> {
#[allow(clippy::dead_code)]
fn a_funk(&self) -> &D;
}
pub struct B<T: C<A>> {
has_drain: T,
}
impl<D, T: C<A>> C<D> for B<T> {
#[allow(clippy::dead_code)]
fn a_funk(&self) -> &D {
<T as C<D>>::a_funk(&self.has_drain)
}
}
"#,
)
}
}

View File

@ -1,10 +1,22 @@
use syntax::{
ast::{self, make, AstNode, HasName},
ast::{self, edit_in_place::Indent, make, AstNode, HasName},
ted,
};
use crate::{utils, AssistContext, AssistId, AssistKind, Assists};
fn insert_impl(impl_: ast::Impl, nominal: &ast::Adt) {
let indent = nominal.indent_level();
ted::insert_all_raw(
ted::Position::after(nominal.syntax()),
vec![
// Add a blank line after the ADT, and indentation for the impl to match the ADT
make::tokens::whitespace(&format!("\n\n{indent}")).into(),
impl_.syntax().clone().into(),
],
);
}
// Assist: generate_impl
//
// Adds a new inherent impl for a type.
@ -46,12 +58,7 @@ pub(crate) fn generate_impl(acc: &mut Assists, ctx: &AssistContext<'_>) -> Optio
}
}
// Add the impl after the adt
let nominal = edit.make_mut(nominal);
ted::insert_all_raw(
ted::Position::after(nominal.syntax()),
vec![make::tokens::blank_line().into(), impl_.syntax().clone().into()],
);
insert_impl(impl_, &edit.make_mut(nominal));
},
)
}
@ -97,12 +104,7 @@ pub(crate) fn generate_trait_impl(acc: &mut Assists, ctx: &AssistContext<'_>) ->
}
}
// Add the impl after the adt
let nominal = edit.make_mut(nominal);
ted::insert_all_raw(
ted::Position::after(nominal.syntax()),
vec![make::tokens::blank_line().into(), impl_.syntax().clone().into()],
);
insert_impl(impl_, &edit.make_mut(nominal));
},
)
}
@ -418,4 +420,65 @@ mod tests {
"/// Has a lifetime parameter\nstruct Foo<'a, T: Foo<'a>> {}",
);
}
#[test]
fn add_impl_with_indent() {
check_assist(
generate_impl,
r#"
mod foo {
struct Bar$0 {}
}
"#,
r#"
mod foo {
struct Bar {}
impl Bar {$0}
}
"#,
);
}
#[test]
fn add_impl_with_multiple_indent() {
check_assist(
generate_impl,
r#"
mod foo {
fn bar() {
struct Baz$0 {}
}
}
"#,
r#"
mod foo {
fn bar() {
struct Baz {}
impl Baz {$0}
}
}
"#,
);
}
#[test]
fn add_trait_impl_with_indent() {
check_assist(
generate_trait_impl,
r#"
mod foo {
struct Bar$0 {}
}
"#,
r#"
mod foo {
struct Bar {}
impl ${0:_} for Bar {}
}
"#,
);
}
}

View File

@ -2,14 +2,18 @@ use std::collections::BTreeSet;
use ast::make;
use either::Either;
use hir::{db::HirDatabase, sym, FileRange, PathResolution, Semantics, TypeInfo};
use hir::{
db::{ExpandDatabase, HirDatabase},
sym, FileRange, PathResolution, Semantics, TypeInfo,
};
use ide_db::{
base_db::CrateId,
defs::Definition,
imports::insert_use::remove_path_if_in_use_stmt,
path_transform::PathTransform,
search::{FileReference, FileReferenceNode, SearchScope},
source_change::SourceChangeBuilder,
syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref},
syntax_helpers::{node_ext::expr_as_name_ref, prettify_macro_expansion},
EditionedFileId, RootDatabase,
};
use itertools::{izip, Itertools};
@ -102,12 +106,13 @@ pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let mut remove_def = true;
let mut inline_refs_for_file = |file_id, refs: Vec<FileReference>| {
builder.edit_file(file_id);
let call_krate = ctx.sema.file_to_module_def(file_id).map(|it| it.krate());
let count = refs.len();
// The collects are required as we are otherwise iterating while mutating 🙅‍♀️🙅‍♂️
let (name_refs, name_refs_use) = split_refs_and_uses(builder, refs, Some);
let call_infos: Vec<_> = name_refs
.into_iter()
.filter_map(CallInfo::from_name_ref)
.filter_map(|it| CallInfo::from_name_ref(it, call_krate?.into()))
// FIXME: do not handle callsites in macros' parameters, because
// directly inlining into macros may cause errors.
.filter(|call_info| !ctx.sema.hir_file_for(call_info.node.syntax()).is_macro())
@ -185,7 +190,10 @@ pub(super) fn split_refs_and_uses<T: ast::AstNode>(
// ```
pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let name_ref: ast::NameRef = ctx.find_node_at_offset()?;
let call_info = CallInfo::from_name_ref(name_ref.clone())?;
let call_info = CallInfo::from_name_ref(
name_ref.clone(),
ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(),
)?;
let (function, label) = match &call_info.node {
ast::CallableExpr::Call(call) => {
let path = match call.expr()? {
@ -243,10 +251,11 @@ struct CallInfo {
node: ast::CallableExpr,
arguments: Vec<ast::Expr>,
generic_arg_list: Option<ast::GenericArgList>,
krate: CrateId,
}
impl CallInfo {
fn from_name_ref(name_ref: ast::NameRef) -> Option<CallInfo> {
fn from_name_ref(name_ref: ast::NameRef, krate: CrateId) -> Option<CallInfo> {
let parent = name_ref.syntax().parent()?;
if let Some(call) = ast::MethodCallExpr::cast(parent.clone()) {
let receiver = call.receiver()?;
@ -256,6 +265,7 @@ impl CallInfo {
generic_arg_list: call.generic_arg_list(),
node: ast::CallableExpr::MethodCall(call),
arguments,
krate,
})
} else if let Some(segment) = ast::PathSegment::cast(parent) {
let path = segment.syntax().parent().and_then(ast::Path::cast)?;
@ -266,6 +276,7 @@ impl CallInfo {
arguments: call.arg_list()?.args().collect(),
node: ast::CallableExpr::Call(call),
generic_arg_list: segment.generic_arg_list(),
krate,
})
} else {
None
@ -307,11 +318,15 @@ fn inline(
function: hir::Function,
fn_body: &ast::BlockExpr,
params: &[(ast::Pat, Option<ast::Type>, hir::Param)],
CallInfo { node, arguments, generic_arg_list }: &CallInfo,
CallInfo { node, arguments, generic_arg_list, krate }: &CallInfo,
) -> ast::Expr {
let mut body = if sema.hir_file_for(fn_body.syntax()).is_macro() {
let file_id = sema.hir_file_for(fn_body.syntax());
let mut body = if let Some(macro_file) = file_id.macro_file() {
cov_mark::hit!(inline_call_defined_in_macro);
if let Some(body) = ast::BlockExpr::cast(insert_ws_into(fn_body.syntax().clone())) {
let span_map = sema.db.expansion_span_map(macro_file);
let body_prettified =
prettify_macro_expansion(sema.db, fn_body.syntax().clone(), &span_map, *krate);
if let Some(body) = ast::BlockExpr::cast(body_prettified) {
body
} else {
fn_body.clone_for_update()
@ -420,8 +435,16 @@ fn inline(
let mut insert_let_stmt = || {
let param_ty = param_ty.clone().map(|param_ty| {
if sema.hir_file_for(param_ty.syntax()).is_macro() {
ast::Type::cast(insert_ws_into(param_ty.syntax().clone())).unwrap_or(param_ty)
let file_id = sema.hir_file_for(param_ty.syntax());
if let Some(macro_file) = file_id.macro_file() {
let span_map = sema.db.expansion_span_map(macro_file);
let param_ty_prettified = prettify_macro_expansion(
sema.db,
param_ty.syntax().clone(),
&span_map,
*krate,
);
ast::Type::cast(param_ty_prettified).unwrap_or(param_ty)
} else {
param_ty
}
@ -1020,6 +1043,7 @@ fn main() {
check_assist(
inline_call,
r#"
//- minicore: sized
fn foo(x: *const u32) -> u32 {
x as u32
}

View File

@ -53,10 +53,7 @@ pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_>
| ast::Expr::BinExpr(_)
| ast::Expr::CallExpr(_) => {
let edition = ctx.sema.scope(variable.syntax())?.krate().edition(ctx.db());
match konst.render_eval(ctx.sema.db, edition) {
Ok(result) => result,
Err(_) => return None,
}
konst.eval(ctx.sema.db, edition).ok()?
}
_ => return None,
};
@ -127,12 +124,14 @@ mod tests {
("u64", "0", NUMBER),
("u128", "0", NUMBER),
("usize", "0", NUMBER),
("usize", "16", NUMBER),
("i8", "0", NUMBER),
("i16", "0", NUMBER),
("i32", "0", NUMBER),
("i64", "0", NUMBER),
("i128", "0", NUMBER),
("isize", "0", NUMBER),
("isize", "16", NUMBER),
("bool", "false", BOOL),
("&str", "\"str\"", STR),
("char", "'c'", CHAR),

View File

@ -333,7 +333,8 @@ fn foo() {
check_assist(
inline_local_variable,
r"
fn bar(a: usize): usize { a }
//- minicore: sized
fn bar(a: usize) -> usize { a }
fn foo() {
let a$0 = bar(1) as u64;
a + 1;
@ -347,7 +348,7 @@ fn foo() {
bar(a);
}",
r"
fn bar(a: usize): usize { a }
fn bar(a: usize) -> usize { a }
fn foo() {
(bar(1) as u64) + 1;
if (bar(1) as u64) > 10 {

View File

@ -1,4 +1,5 @@
use ide_db::syntax_helpers::insert_whitespace_into_node::insert_ws_into;
use hir::db::ExpandDatabase;
use ide_db::syntax_helpers::prettify_macro_expansion;
use syntax::ast::{self, AstNode};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -36,7 +37,15 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// ```
pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let unexpanded = ctx.find_node_at_offset::<ast::MacroCall>()?;
let expanded = insert_ws_into(ctx.sema.expand(&unexpanded)?.clone_for_update());
let macro_call = ctx.sema.to_def(&unexpanded)?;
let expanded = ctx.sema.parse_or_expand(macro_call.as_file());
let span_map = ctx.sema.db.expansion_span_map(macro_call.as_macro_file());
let expanded = prettify_macro_expansion(
ctx.db(),
expanded,
&span_map,
ctx.sema.file_to_module_def(ctx.file_id())?.krate().into(),
);
let text_range = unexpanded.syntax().text_range();
acc.add(
@ -295,6 +304,75 @@ fn main() {
}
};
}
"#,
);
}
#[test]
fn dollar_crate() {
check_assist(
inline_macro,
r#"
pub struct Foo;
#[macro_export]
macro_rules! m {
() => { $crate::Foo };
}
fn bar() {
m$0!();
}
"#,
r#"
pub struct Foo;
#[macro_export]
macro_rules! m {
() => { $crate::Foo };
}
fn bar() {
crate::Foo;
}
"#,
);
check_assist(
inline_macro,
r#"
//- /a.rs crate:a
pub struct Foo;
#[macro_export]
macro_rules! m {
() => { $crate::Foo };
}
//- /b.rs crate:b deps:a
fn bar() {
a::m$0!();
}
"#,
r#"
fn bar() {
a::Foo;
}
"#,
);
check_assist(
inline_macro,
r#"
//- /a.rs crate:a
pub struct Foo;
#[macro_export]
macro_rules! m {
() => { $crate::Foo };
}
//- /b.rs crate:b deps:a
pub use a::m;
//- /c.rs crate:c deps:b
fn bar() {
b::m$0!();
}
"#,
r#"
fn bar() {
a::Foo;
}
"#,
);
}

View File

@ -43,6 +43,7 @@ use super::inline_call::split_refs_and_uses;
// fn foo() {
// let _: i32 = 3;
// }
// ```
pub(crate) fn inline_type_alias_uses(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
let name = ctx.find_node_at_offset::<ast::Name>()?;
let ast_alias = name.syntax().parent().and_then(ast::TypeAlias::cast)?;

View File

@ -1,9 +1,11 @@
use ide_db::syntax_helpers::suggest_name;
use itertools::Itertools;
use syntax::{
ast::{self, edit_in_place::GenericParamsOwnerEdit, make, AstNode, HasGenericParams},
ast::{self, edit_in_place::GenericParamsOwnerEdit, make, AstNode, HasGenericParams, HasName},
ted,
};
use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
use crate::{AssistContext, AssistId, AssistKind, Assists};
// Assist: introduce_named_generic
//
@ -32,8 +34,18 @@ pub(crate) fn introduce_named_generic(acc: &mut Assists, ctx: &AssistContext<'_>
let impl_trait_type = edit.make_mut(impl_trait_type);
let fn_ = edit.make_mut(fn_);
let fn_generic_param_list = fn_.get_or_create_generic_param_list();
let type_param_name =
suggest_name::for_impl_trait_as_generic(&impl_trait_type, &fn_generic_param_list);
let existing_names = fn_generic_param_list
.generic_params()
.flat_map(|param| match param {
ast::GenericParam::TypeParam(t) => t.name().map(|name| name.to_string()),
p => Some(p.to_string()),
})
.collect_vec();
let type_param_name = suggest_name::NameGenerator::new_with_names(
existing_names.iter().map(|s| s.as_str()),
)
.for_impl_trait_as_generic(&impl_trait_type);
let type_param = make::type_param(make::name(&type_param_name), Some(type_bound_list))
.clone_for_update();
@ -115,7 +127,7 @@ fn foo<$0B: Bar
check_assist(
introduce_named_generic,
r#"fn foo<B>(bar: $0impl Bar) {}"#,
r#"fn foo<B, $0B0: Bar>(bar: B0) {}"#,
r#"fn foo<B, $0B1: Bar>(bar: B1) {}"#,
);
}
@ -124,7 +136,7 @@ fn foo<$0B: Bar
check_assist(
introduce_named_generic,
r#"fn foo<B, B0, B1, B3>(bar: $0impl Bar) {}"#,
r#"fn foo<B, B0, B1, B3, $0B2: Bar>(bar: B2) {}"#,
r#"fn foo<B, B0, B1, B3, $0B4: Bar>(bar: B4) {}"#,
);
}

View File

@ -6,7 +6,10 @@ use ide_db::{
search::{FileReference, ReferenceCategory, SearchScope},
FxHashMap, RootDatabase,
};
use syntax::{ast, AstNode};
use syntax::{
ast::{self, Rename},
AstNode,
};
use text_edit::TextRange;
use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -100,19 +103,19 @@ pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>)
hir::ScopeDef::ModuleDef(d) => Some(Definition::from(*d)),
_ => None,
})
.any(|d| used_once_in_scope(ctx, d, scope))
.any(|d| used_once_in_scope(ctx, d, u.rename(), scope))
{
return Some(u);
}
} else if let Definition::Trait(ref t) = def {
// If the trait or any item is used.
if !std::iter::once(def)
.chain(t.items(ctx.db()).into_iter().map(Definition::from))
.any(|d| used_once_in_scope(ctx, d, scope))
if !std::iter::once((def, u.rename()))
.chain(t.items(ctx.db()).into_iter().map(|item| (item.into(), None)))
.any(|(d, rename)| used_once_in_scope(ctx, d, rename, scope))
{
return Some(u);
}
} else if !used_once_in_scope(ctx, def, scope) {
} else if !used_once_in_scope(ctx, def, u.rename(), scope) {
return Some(u);
}
@ -138,7 +141,12 @@ pub(crate) fn remove_unused_imports(acc: &mut Assists, ctx: &AssistContext<'_>)
}
}
fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec<SearchScope>) -> bool {
fn used_once_in_scope(
ctx: &AssistContext<'_>,
def: Definition,
rename: Option<Rename>,
scopes: &Vec<SearchScope>,
) -> bool {
let mut found = false;
for scope in scopes {
@ -151,7 +159,10 @@ fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec<Sea
false
}
};
def.usages(&ctx.sema).in_scope(scope).search(&mut search_non_import);
def.usages(&ctx.sema)
.in_scope(scope)
.with_rename(rename.as_ref())
.search(&mut search_non_import);
if found {
break;
}
@ -330,7 +341,7 @@ fn w() {
}
#[test]
fn ranamed_trait_item_use_is_use() {
fn renamed_trait_item_use_is_use() {
check_assist_not_applicable(
remove_unused_imports,
r#"
@ -356,7 +367,7 @@ fn w() {
}
#[test]
fn ranamed_underscore_trait_item_use_is_use() {
fn renamed_underscore_trait_item_use_is_use() {
check_assist_not_applicable(
remove_unused_imports,
r#"
@ -942,6 +953,62 @@ pub struct X();
mod z {
mod foo;
}
"#,
);
}
#[test]
fn use_as_alias() {
check_assist_not_applicable(
remove_unused_imports,
r#"
mod foo {
pub struct Foo {}
}
use foo::Foo as Bar$0;
fn test(_: Bar) {}
"#,
);
check_assist(
remove_unused_imports,
r#"
mod foo {
pub struct Foo {}
pub struct Bar {}
pub struct Qux {}
pub trait Quux {
fn quxx(&self) {}
}
impl<T> Quxx for T {}
}
use foo::{Foo as Bar, Bar as Baz, Qux as _, Quxx as _}$0;
fn test(_: Bar) {
let a = ();
a.quxx();
}
"#,
r#"
mod foo {
pub struct Foo {}
pub struct Bar {}
pub struct Qux {}
pub trait Quux {
fn quxx(&self) {}
}
impl<T> Quxx for T {}
}
use foo::{Foo as Bar, Quxx as _};
fn test(_: Bar) {
let a = ();
a.quxx();
}
"#,
);
}

View File

@ -1,9 +1,10 @@
use ide_db::syntax_helpers::suggest_name;
use syntax::{
ast::{self, make, AstNode},
ted,
};
use crate::{utils::suggest_name, AssistContext, AssistId, AssistKind, Assists};
use crate::{AssistContext, AssistId, AssistKind, Assists};
// Assist: replace_is_some_with_if_let_some
//

View File

@ -29,7 +29,7 @@ pub(crate) fn replace_qualified_name_with_use(
acc: &mut Assists,
ctx: &AssistContext<'_>,
) -> Option<()> {
let original_path: ast::Path = ctx.find_node_at_offset()?;
let mut original_path: ast::Path = ctx.find_node_at_offset()?;
// We don't want to mess with use statements
if original_path.syntax().ancestors().find_map(ast::UseTree::cast).is_some() {
cov_mark::hit!(not_applicable_in_use);
@ -37,8 +37,7 @@ pub(crate) fn replace_qualified_name_with_use(
}
if original_path.qualifier().is_none() {
cov_mark::hit!(dont_import_trivial_paths);
return None;
original_path = original_path.parent_path()?;
}
// only offer replacement for non assoc items
@ -236,12 +235,6 @@ fs::Path
);
}
#[test]
fn dont_import_trivial_paths() {
cov_mark::check!(dont_import_trivial_paths);
check_assist_not_applicable(replace_qualified_name_with_use, r"impl foo$0 for () {}");
}
#[test]
fn test_replace_not_applicable_in_use() {
cov_mark::check!(not_applicable_in_use);
@ -271,6 +264,29 @@ fn main() {
);
}
#[test]
fn assist_runs_on_first_segment() {
check_assist(
replace_qualified_name_with_use,
r"
mod std { pub mod fmt { pub trait Debug {} } }
fn main() {
$0std::fmt::Debug;
let x: std::fmt::Debug = std::fmt::Debug;
}
",
r"
use std::fmt;
mod std { pub mod fmt { pub trait Debug {} } }
fn main() {
fmt::Debug;
let x: fmt::Debug = fmt::Debug;
}
",
);
}
#[test]
fn does_not_replace_in_submodules() {
check_assist(

View File

@ -64,12 +64,9 @@ pub(crate) fn toggle_macro_delimiter(acc: &mut Assists, ctx: &AssistContext<'_>)
acc.add(
AssistId("toggle_macro_delimiter", AssistKind::Refactor),
match token {
MacroDelims::LPar => "Replace delimiters with braces",
MacroDelims::RPar => "Replace delimiters with braces",
MacroDelims::LBra => "Replace delimiters with parentheses",
MacroDelims::RBra => "Replace delimiters with parentheses",
MacroDelims::LCur => "Replace delimiters with brackets",
MacroDelims::RCur => "Replace delimiters with brackets",
MacroDelims::LPar | MacroDelims::RPar => "Replace delimiters with braces",
MacroDelims::LBra | MacroDelims::RBra => "Replace delimiters with parentheses",
MacroDelims::LCur | MacroDelims::RCur => "Replace delimiters with brackets",
},
token_tree.syntax().text_range(),
|builder| {

View File

@ -1,12 +1,14 @@
use std::iter;
use hir::HasSource;
use ide_db::{
famous_defs::FamousDefs,
syntax_helpers::node_ext::{for_each_tail_expr, walk_expr},
};
use itertools::Itertools;
use syntax::{
ast::{self, make, Expr},
match_ast, ted, AstNode,
ast::{self, make, Expr, HasGenericParams},
match_ast, ted, AstNode, ToSmolStr,
};
use crate::{AssistContext, AssistId, AssistKind, Assists};
@ -39,25 +41,22 @@ pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext<
};
let type_ref = &ret_type.ty()?;
let ty = ctx.sema.resolve_type(type_ref)?.as_adt();
let result_enum =
let core_result =
FamousDefs(&ctx.sema, ctx.sema.scope(type_ref.syntax())?.krate()).core_result_Result()?;
if matches!(ty, Some(hir::Adt::Enum(ret_type)) if ret_type == result_enum) {
let ty = ctx.sema.resolve_type(type_ref)?.as_adt();
if matches!(ty, Some(hir::Adt::Enum(ret_type)) if ret_type == core_result) {
// The return type is already wrapped in a Result
cov_mark::hit!(wrap_return_type_in_result_simple_return_type_already_result);
return None;
}
let new_result_ty =
make::ext::ty_result(type_ref.clone(), make::ty_placeholder()).clone_for_update();
let generic_args = new_result_ty.syntax().descendants().find_map(ast::GenericArgList::cast)?;
let last_genarg = generic_args.generic_args().last()?;
acc.add(
AssistId("wrap_return_type_in_result", AssistKind::RefactorRewrite),
"Wrap return type in Result",
type_ref.syntax().text_range(),
|edit| {
let new_result_ty = result_type(ctx, &core_result, type_ref).clone_for_update();
let body = edit.make_mut(ast::Expr::BlockExpr(body));
let mut exprs_to_wrap = Vec::new();
@ -81,16 +80,72 @@ pub(crate) fn wrap_return_type_in_result(acc: &mut Assists, ctx: &AssistContext<
}
let old_result_ty = edit.make_mut(type_ref.clone());
ted::replace(old_result_ty.syntax(), new_result_ty.syntax());
if let Some(cap) = ctx.config.snippet_cap {
edit.add_placeholder_snippet(cap, last_genarg);
// Add a placeholder snippet at the first generic argument that doesn't equal the return type.
// This is normally the error type, but that may not be the case when we inserted a type alias.
let args = new_result_ty.syntax().descendants().find_map(ast::GenericArgList::cast);
let error_type_arg = args.and_then(|list| {
list.generic_args().find(|arg| match arg {
ast::GenericArg::TypeArg(_) => arg.syntax().text() != type_ref.syntax().text(),
ast::GenericArg::LifetimeArg(_) => false,
_ => true,
})
});
if let Some(error_type_arg) = error_type_arg {
if let Some(cap) = ctx.config.snippet_cap {
edit.add_placeholder_snippet(cap, error_type_arg);
}
}
},
)
}
fn result_type(
ctx: &AssistContext<'_>,
core_result: &hir::Enum,
ret_type: &ast::Type,
) -> ast::Type {
// Try to find a Result<T, ...> type alias in the current scope (shadowing the default).
let result_path = hir::ModPath::from_segments(
hir::PathKind::Plain,
iter::once(hir::Name::new_symbol_root(hir::sym::Result.clone())),
);
let alias = ctx.sema.resolve_mod_path(ret_type.syntax(), &result_path).and_then(|def| {
def.filter_map(|def| match def.as_module_def()? {
hir::ModuleDef::TypeAlias(alias) => {
let enum_ty = alias.ty(ctx.db()).as_adt()?.as_enum()?;
(&enum_ty == core_result).then_some(alias)
}
_ => None,
})
.find_map(|alias| {
let mut inserted_ret_type = false;
let generic_params = alias
.source(ctx.db())?
.value
.generic_param_list()?
.generic_params()
.map(|param| match param {
// Replace the very first type parameter with the functions return type.
ast::GenericParam::TypeParam(_) if !inserted_ret_type => {
inserted_ret_type = true;
ret_type.to_smolstr()
}
ast::GenericParam::LifetimeParam(_) => make::lifetime("'_").to_smolstr(),
_ => make::ty_placeholder().to_smolstr(),
})
.join(", ");
let name = alias.name(ctx.db());
let name = name.as_str();
Some(make::ty(&format!("{name}<{generic_params}>")))
})
});
// If there is no applicable alias in scope use the default Result type.
alias.unwrap_or_else(|| make::ext::ty_result(ret_type.clone(), make::ty_placeholder()))
}
fn tail_cb_impl(acc: &mut Vec<ast::Expr>, e: &ast::Expr) {
match e {
Expr::BreakExpr(break_expr) => {
@ -998,4 +1053,216 @@ fn foo(the_field: u32) -> Result<u32, ${0:_}> {
"#,
);
}
#[test]
fn wrap_return_type_in_local_result_type() {
check_assist(
wrap_return_type_in_result,
r#"
//- minicore: result
type Result<T> = core::result::Result<T, ()>;
fn foo() -> i3$02 {
return 42i32;
}
"#,
r#"
type Result<T> = core::result::Result<T, ()>;
fn foo() -> Result<i32> {
return Ok(42i32);
}
"#,
);
check_assist(
wrap_return_type_in_result,
r#"
//- minicore: result
type Result2<T> = core::result::Result<T, ()>;
fn foo() -> i3$02 {
return 42i32;
}
"#,
r#"
type Result2<T> = core::result::Result<T, ()>;
fn foo() -> Result<i32, ${0:_}> {
return Ok(42i32);
}
"#,
);
}
#[test]
fn wrap_return_type_in_imported_local_result_type() {
check_assist(
wrap_return_type_in_result,
r#"
//- minicore: result
mod some_module {
pub type Result<T> = core::result::Result<T, ()>;
}
use some_module::Result;
fn foo() -> i3$02 {
return 42i32;
}
"#,
r#"
mod some_module {
pub type Result<T> = core::result::Result<T, ()>;
}
use some_module::Result;
fn foo() -> Result<i32> {
return Ok(42i32);
}
"#,
);
check_assist(
wrap_return_type_in_result,
r#"
//- minicore: result
mod some_module {
pub type Result<T> = core::result::Result<T, ()>;
}
use some_module::*;
fn foo() -> i3$02 {
return 42i32;
}
"#,
r#"
mod some_module {
pub type Result<T> = core::result::Result<T, ()>;
}
use some_module::*;
fn foo() -> Result<i32> {
return Ok(42i32);
}
"#,
);
}
#[test]
fn wrap_return_type_in_local_result_type_from_function_body() {
check_assist(
wrap_return_type_in_result,
r#"
//- minicore: result
fn foo() -> i3$02 {
type Result<T> = core::result::Result<T, ()>;
0
}
"#,
r#"
fn foo() -> Result<i32, ${0:_}> {
type Result<T> = core::result::Result<T, ()>;
Ok(0)
}
"#,
);
}
#[test]
fn wrap_return_type_in_local_result_type_already_using_alias() {
check_assist_not_applicable(
wrap_return_type_in_result,
r#"
//- minicore: result
pub type Result<T> = core::result::Result<T, ()>;
fn foo() -> Result<i3$02> {
return Ok(42i32);
}
"#,
);
}
#[test]
fn wrap_return_type_in_local_result_type_multiple_generics() {
check_assist(
wrap_return_type_in_result,
r#"
//- minicore: result
type Result<T, E> = core::result::Result<T, E>;
fn foo() -> i3$02 {
0
}
"#,
r#"
type Result<T, E> = core::result::Result<T, E>;
fn foo() -> Result<i32, ${0:_}> {
Ok(0)
}
"#,
);
check_assist(
wrap_return_type_in_result,
r#"
//- minicore: result
type Result<T, E> = core::result::Result<Foo<T, E>, ()>;
fn foo() -> i3$02 {
0
}
"#,
r#"
type Result<T, E> = core::result::Result<Foo<T, E>, ()>;
fn foo() -> Result<i32, ${0:_}> {
Ok(0)
}
"#,
);
check_assist(
wrap_return_type_in_result,
r#"
//- minicore: result
type Result<'a, T, E> = core::result::Result<Foo<T, E>, &'a ()>;
fn foo() -> i3$02 {
0
}
"#,
r#"
type Result<'a, T, E> = core::result::Result<Foo<T, E>, &'a ()>;
fn foo() -> Result<'_, i32, ${0:_}> {
Ok(0)
}
"#,
);
check_assist(
wrap_return_type_in_result,
r#"
//- minicore: result
type Result<T, const N: usize> = core::result::Result<Foo<T>, Bar<N>>;
fn foo() -> i3$02 {
0
}
"#,
r#"
type Result<T, const N: usize> = core::result::Result<Foo<T>, Bar<N>>;
fn foo() -> Result<i32, ${0:_}> {
Ok(0)
}
"#,
);
}
}

View File

@ -25,6 +25,7 @@ use crate::{AssistContext, AssistId, AssistKind, Assists};
// struct S {
// field: i32
// }
// ```
enum WrapUnwrapOption {
WrapDerive { derive: TextRange, attr: ast::Attr },

View File

@ -58,6 +58,8 @@
//! See also this post:
//! <https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html>
#![cfg_attr(feature = "in-rust-tree", feature(rustc_private))]
mod assist_config;
mod assist_context;
#[cfg(test)]
@ -136,6 +138,7 @@ mod handlers {
mod destructure_tuple_binding;
mod desugar_doc_comment;
mod expand_glob_import;
mod explicit_enum_discriminant;
mod extract_expressions_from_format_string;
mod extract_function;
mod extract_module;
@ -266,6 +269,7 @@ mod handlers {
destructure_tuple_binding::destructure_tuple_binding,
destructure_struct_binding::destructure_struct_binding,
expand_glob_import::expand_glob_import,
explicit_enum_discriminant::explicit_enum_discriminant,
extract_expressions_from_format_string::extract_expressions_from_format_string,
extract_struct_from_enum_variant::extract_struct_from_enum_variant,
extract_type_alias::extract_type_alias,

View File

@ -909,6 +909,29 @@ fn qux(bar: Bar, baz: Baz) {}
)
}
#[test]
fn doctest_explicit_enum_discriminant() {
check_doc_test(
"explicit_enum_discriminant",
r#####"
enum TheEnum$0 {
Foo,
Bar,
Baz = 42,
Quux,
}
"#####,
r#####"
enum TheEnum {
Foo = 0,
Bar = 1,
Baz = 42,
Quux = 43,
}
"#####,
)
}
#[test]
fn doctest_extract_expressions_from_format_string() {
check_doc_test(

View File

@ -1,10 +1,13 @@
//! Assorted functions shared by several assists.
pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
use hir::{db::HirDatabase, HasAttrs as HirHasAttrs, HirDisplay, InFile, Semantics};
use hir::{
db::{ExpandDatabase, HirDatabase},
HasAttrs as HirHasAttrs, HirDisplay, InFile, Semantics,
};
use ide_db::{
famous_defs::FamousDefs, path_transform::PathTransform,
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
syntax_helpers::prettify_macro_expansion, RootDatabase,
};
use stdx::format_to;
use syntax::{
@ -23,7 +26,6 @@ use crate::assist_context::{AssistContext, SourceChangeBuilder};
mod gen_trait_fn_body;
pub(crate) mod ref_field_expr;
pub(crate) mod suggest_name;
pub(crate) fn unwrap_trivial_block(block_expr: ast::BlockExpr) -> ast::Expr {
extract_trivial_expression(&block_expr)
@ -179,10 +181,15 @@ pub fn add_trait_assoc_items_to_impl(
let new_indent_level = IndentLevel::from_node(impl_.syntax()) + 1;
let items = original_items.iter().map(|InFile { file_id, value: original_item }| {
let cloned_item = {
if file_id.is_macro() {
if let Some(formatted) =
ast::AssocItem::cast(insert_ws_into(original_item.syntax().clone()))
{
if let Some(macro_file) = file_id.macro_file() {
let span_map = sema.db.expansion_span_map(macro_file);
let item_prettified = prettify_macro_expansion(
sema.db,
original_item.syntax().clone(),
&span_map,
target_scope.krate().into(),
);
if let Some(formatted) = ast::AssocItem::cast(item_prettified) {
return formatted;
} else {
stdx::never!("formatted `AssocItem` could not be cast back to `AssocItem`");

View File

@ -617,6 +617,16 @@ impl Completions {
}
self.add_opt(render_struct_pat(RenderContext::new(ctx), pattern_ctx, strukt, local_name));
}
pub(crate) fn suggest_name(&mut self, ctx: &CompletionContext<'_>, name: &str) {
let item = CompletionItem::new(
CompletionItemKind::Binding,
ctx.source_range(),
SmolStr::from(name),
ctx.edition,
);
item.add_to(self, ctx.db);
}
}
/// Calls the callback for each variant of the provided enum with the path to the variant.

View File

@ -56,7 +56,7 @@ pub(crate) fn complete_known_attribute_input(
&parse_tt_as_comma_sep_paths(tt, ctx.edition)?,
FEATURES,
),
"allow" | "warn" | "deny" | "forbid" => {
"allow" | "expect" | "deny" | "forbid" | "warn" => {
let existing_lints = parse_tt_as_comma_sep_paths(tt, ctx.edition)?;
let lints: Vec<Lint> = CLIPPY_LINT_GROUPS
@ -222,7 +222,7 @@ macro_rules! attrs {
[@ {} {$($tt:tt)*}] => { &[$($tt)*] as _ };
// starting matcher
[$($tt:tt),*] => {
attrs!(@ { $($tt)* } { "allow", "cfg", "cfg_attr", "deny", "forbid", "warn" })
attrs!(@ { $($tt)* } { "allow", "cfg", "cfg_attr", "deny", "expect", "forbid", "warn" })
};
}
@ -303,6 +303,7 @@ const ATTRIBUTES: &[AttrCompletion] = &[
attr(r#"doc = "…""#, Some("doc"), Some(r#"doc = "${0:docs}""#)),
attr(r#"doc(alias = "…")"#, Some("docalias"), Some(r#"doc(alias = "${0:docs}")"#)),
attr(r#"doc(hidden)"#, Some("dochidden"), Some(r#"doc(hidden)"#)),
attr("expect(…)", Some("expect"), Some("expect(${0:lint})")),
attr(
r#"export_name = "…""#,
Some("export_name"),

View File

@ -600,7 +600,7 @@ fn foo(a: A) { a.$0 }
struct A {}
trait Trait { fn the_method(&self); }
impl Trait for A {}
fn foo(a: A) { a.the_method()$0 }
fn foo(a: A) { a.the_method();$0 }
"#,
);
}

View File

@ -31,14 +31,14 @@
//! }
//! ```
use hir::HasAttrs;
use hir::{db::ExpandDatabase, HasAttrs, MacroFileId, Name};
use ide_db::{
documentation::HasDocs, path_transform::PathTransform,
syntax_helpers::insert_whitespace_into_node, traits::get_missing_assoc_items, SymbolKind,
syntax_helpers::prettify_macro_expansion, traits::get_missing_assoc_items, SymbolKind,
};
use syntax::{
ast::{self, edit_in_place::AttrsOwnerEdit, HasTypeBounds},
format_smolstr, AstNode, SmolStr, SyntaxElement, SyntaxKind, TextRange, ToSmolStr, T,
ast::{self, edit_in_place::AttrsOwnerEdit, make, HasGenericArgs, HasTypeBounds},
format_smolstr, ted, AstNode, SmolStr, SyntaxElement, SyntaxKind, TextRange, ToSmolStr, T,
};
use text_edit::TextEdit;
@ -178,12 +178,36 @@ fn add_function_impl(
func: hir::Function,
impl_def: hir::Impl,
) {
let fn_name = func.name(ctx.db);
let fn_name = &func.name(ctx.db);
let sugar: &[_] = if func.is_async(ctx.db) {
&[AsyncSugaring::Async, AsyncSugaring::Desugar]
} else if func.returns_impl_future(ctx.db) {
&[AsyncSugaring::Plain, AsyncSugaring::Resugar]
} else {
&[AsyncSugaring::Plain]
};
for &sugaring in sugar {
add_function_impl_(acc, ctx, replacement_range, func, impl_def, fn_name, sugaring);
}
}
let is_async = func.is_async(ctx.db);
fn add_function_impl_(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
replacement_range: TextRange,
func: hir::Function,
impl_def: hir::Impl,
fn_name: &Name,
async_sugaring: AsyncSugaring,
) {
let async_ = if let AsyncSugaring::Async | AsyncSugaring::Resugar = async_sugaring {
"async "
} else {
""
};
let label = format_smolstr!(
"{}fn {}({})",
if is_async { "async " } else { "" },
async_,
fn_name.display(ctx.db, ctx.edition),
if func.assoc_fn_params(ctx.db).is_empty() { "" } else { ".." }
);
@ -195,23 +219,16 @@ fn add_function_impl(
});
let mut item = CompletionItem::new(completion_kind, replacement_range, label, ctx.edition);
item.lookup_by(format!(
"{}fn {}",
if is_async { "async " } else { "" },
fn_name.display(ctx.db, ctx.edition)
))
.set_documentation(func.docs(ctx.db))
.set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() });
item.lookup_by(format!("{}fn {}", async_, fn_name.display(ctx.db, ctx.edition)))
.set_documentation(func.docs(ctx.db))
.set_relevance(CompletionRelevance { exact_name_match: true, ..Default::default() });
if let Some(source) = ctx.sema.source(func) {
let assoc_item = ast::AssocItem::Fn(source.value);
if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) {
let transformed_fn = match transformed_item {
ast::AssocItem::Fn(func) => func,
_ => unreachable!(),
};
let function_decl = function_declaration(&transformed_fn, source.file_id.is_macro());
if let Some(transformed_fn) =
get_transformed_fn(ctx, source.value, impl_def, async_sugaring)
{
let function_decl =
function_declaration(ctx, &transformed_fn, source.file_id.macro_file());
match ctx.config.snippet_cap {
Some(cap) => {
let snippet = format!("{function_decl} {{\n $0\n}}");
@ -227,6 +244,14 @@ fn add_function_impl(
}
}
#[derive(Copy, Clone)]
enum AsyncSugaring {
Desugar,
Resugar,
Async,
Plain,
}
/// Transform a relevant associated item to inline generics from the impl, remove attrs and docs, etc.
fn get_transformed_assoc_item(
ctx: &CompletionContext<'_>,
@ -251,6 +276,82 @@ fn get_transformed_assoc_item(
Some(assoc_item)
}
/// Transform a relevant associated item to inline generics from the impl, remove attrs and docs, etc.
fn get_transformed_fn(
ctx: &CompletionContext<'_>,
fn_: ast::Fn,
impl_def: hir::Impl,
async_: AsyncSugaring,
) -> Option<ast::Fn> {
let trait_ = impl_def.trait_(ctx.db)?;
let source_scope = &ctx.sema.scope(fn_.syntax())?;
let target_scope = &ctx.sema.scope(ctx.sema.source(impl_def)?.syntax().value)?;
let transform = PathTransform::trait_impl(
target_scope,
source_scope,
trait_,
ctx.sema.source(impl_def)?.value,
);
let fn_ = fn_.clone_for_update();
// FIXME: Paths in nested macros are not handled well. See
// `macro_generated_assoc_item2` test.
transform.apply(fn_.syntax());
fn_.remove_attrs_and_docs();
match async_ {
AsyncSugaring::Desugar => {
match fn_.ret_type() {
Some(ret_ty) => {
let ty = ret_ty.ty()?;
ted::replace(
ty.syntax(),
make::ty(&format!("impl Future<Output = {ty}>"))
.syntax()
.clone_for_update(),
);
}
None => ted::append_child(
fn_.param_list()?.syntax(),
make::ret_type(make::ty("impl Future<Output = ()>"))
.syntax()
.clone_for_update(),
),
}
fn_.async_token().unwrap().detach();
}
AsyncSugaring::Resugar => {
let ty = fn_.ret_type()?.ty()?;
match &ty {
// best effort guessing here
ast::Type::ImplTraitType(t) => {
let output = t.type_bound_list()?.bounds().find_map(|b| match b.ty()? {
ast::Type::PathType(p) => {
let p = p.path()?.segment()?;
if p.name_ref()?.text() != "Future" {
return None;
}
match p.generic_arg_list()?.generic_args().next()? {
ast::GenericArg::AssocTypeArg(a)
if a.name_ref()?.text() == "Output" =>
{
a.ty()
}
_ => None,
}
}
_ => None,
})?;
ted::replace(ty.syntax(), output.syntax());
}
_ => (),
}
ted::prepend_child(fn_.syntax(), make::token(T![async]));
}
AsyncSugaring::Async | AsyncSugaring::Plain => (),
}
Some(fn_)
}
fn add_type_alias_impl(
acc: &mut Completions,
ctx: &CompletionContext<'_>,
@ -266,7 +367,7 @@ fn add_type_alias_impl(
CompletionItem::new(SymbolKind::TypeAlias, replacement_range, label, ctx.edition);
item.lookup_by(format!("type {alias_name}"))
.set_documentation(type_alias.docs(ctx.db))
.set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() });
.set_relevance(CompletionRelevance { exact_name_match: true, ..Default::default() });
if let Some(source) = ctx.sema.source(type_alias) {
let assoc_item = ast::AssocItem::TypeAlias(source.value);
@ -332,7 +433,8 @@ fn add_const_impl(
_ => unreachable!(),
};
let label = make_const_compl_syntax(&transformed_const, source.file_id.is_macro());
let label =
make_const_compl_syntax(ctx, &transformed_const, source.file_id.macro_file());
let replacement = format!("{label} ");
let mut item =
@ -340,7 +442,7 @@ fn add_const_impl(
item.lookup_by(format_smolstr!("const {const_name}"))
.set_documentation(const_.docs(ctx.db))
.set_relevance(CompletionRelevance {
is_item_from_trait: true,
exact_name_match: true,
..Default::default()
});
match ctx.config.snippet_cap {
@ -356,9 +458,14 @@ fn add_const_impl(
}
}
fn make_const_compl_syntax(const_: &ast::Const, needs_whitespace: bool) -> SmolStr {
let const_ = if needs_whitespace {
insert_whitespace_into_node::insert_ws_into(const_.syntax().clone())
fn make_const_compl_syntax(
ctx: &CompletionContext<'_>,
const_: &ast::Const,
macro_file: Option<MacroFileId>,
) -> SmolStr {
let const_ = if let Some(macro_file) = macro_file {
let span_map = ctx.db.expansion_span_map(macro_file);
prettify_macro_expansion(ctx.db, const_.syntax().clone(), &span_map, ctx.krate.into())
} else {
const_.syntax().clone()
};
@ -379,9 +486,14 @@ fn make_const_compl_syntax(const_: &ast::Const, needs_whitespace: bool) -> SmolS
format_smolstr!("{} =", syntax.trim_end())
}
fn function_declaration(node: &ast::Fn, needs_whitespace: bool) -> String {
let node = if needs_whitespace {
insert_whitespace_into_node::insert_ws_into(node.syntax().clone())
fn function_declaration(
ctx: &CompletionContext<'_>,
node: &ast::Fn,
macro_file: Option<MacroFileId>,
) -> String {
let node = if let Some(macro_file) = macro_file {
let span_map = ctx.db.expansion_span_map(macro_file);
prettify_macro_expansion(ctx.db, node.syntax().clone(), &span_map, ctx.krate.into())
} else {
node.syntax().clone()
};
@ -1401,6 +1513,134 @@ trait Tr {
impl Tr for () {
type Item = $0;
}
"#,
);
}
#[test]
fn impl_fut() {
check_edit(
"fn foo",
r#"
//- minicore: future, send, sized
use core::future::Future;
trait DesugaredAsyncTrait {
fn foo(&self) -> impl Future<Output = usize> + Send;
}
impl DesugaredAsyncTrait for () {
$0
}
"#,
r#"
use core::future::Future;
trait DesugaredAsyncTrait {
fn foo(&self) -> impl Future<Output = usize> + Send;
}
impl DesugaredAsyncTrait for () {
fn foo(&self) -> impl Future<Output = usize> + Send {
$0
}
}
"#,
);
}
#[test]
fn impl_fut_resugared() {
check_edit(
"async fn foo",
r#"
//- minicore: future, send, sized
use core::future::Future;
trait DesugaredAsyncTrait {
fn foo(&self) -> impl Future<Output = usize> + Send;
}
impl DesugaredAsyncTrait for () {
$0
}
"#,
r#"
use core::future::Future;
trait DesugaredAsyncTrait {
fn foo(&self) -> impl Future<Output = usize> + Send;
}
impl DesugaredAsyncTrait for () {
async fn foo(&self) -> usize {
$0
}
}
"#,
);
}
#[test]
fn async_desugared() {
check_edit(
"fn foo",
r#"
//- minicore: future, send, sized
use core::future::Future;
trait DesugaredAsyncTrait {
async fn foo(&self) -> usize;
}
impl DesugaredAsyncTrait for () {
$0
}
"#,
r#"
use core::future::Future;
trait DesugaredAsyncTrait {
async fn foo(&self) -> usize;
}
impl DesugaredAsyncTrait for () {
fn foo(&self) -> impl Future<Output = usize> {
$0
}
}
"#,
);
}
#[test]
fn async_() {
check_edit(
"async fn foo",
r#"
//- minicore: future, send, sized
use core::future::Future;
trait DesugaredAsyncTrait {
async fn foo(&self) -> usize;
}
impl DesugaredAsyncTrait for () {
$0
}
"#,
r#"
use core::future::Future;
trait DesugaredAsyncTrait {
async fn foo(&self) -> usize;
}
impl DesugaredAsyncTrait for () {
async fn foo(&self) -> usize {
$0
}
}
"#,
);
}

View File

@ -150,6 +150,68 @@ fn foo(a: A) { a.$0 }
);
}
#[test]
fn for_in_impl() {
check_edit(
"for",
r#"
struct X;
impl X $0 {}
"#,
r#"
struct X;
impl X for $0 {}
"#,
);
check_edit(
"for",
r#"
fn foo() {
struct X;
impl X $0 {}
}
"#,
r#"
fn foo() {
struct X;
impl X for $0 {}
}
"#,
);
check_edit(
"for",
r#"
fn foo() {
struct X;
impl X $0
}
"#,
r#"
fn foo() {
struct X;
impl X for $0
}
"#,
);
check_edit(
"for",
r#"
fn foo() {
struct X;
impl X { fn bar() { $0 } }
}
"#,
r#"
fn foo() {
struct X;
impl X { fn bar() { for $1 in $2 {
$0
} } }
}
"#,
);
}
#[test]
fn let_semi() {
cov_mark::check!(let_semi);

View File

@ -1,6 +1,7 @@
//! Completes constants and paths in unqualified patterns.
use hir::{db::DefDatabase, AssocItem, ScopeDef};
use ide_db::syntax_helpers::suggest_name;
use syntax::ast::Pat;
use crate::{
@ -45,6 +46,19 @@ pub(crate) fn complete_pattern(
return;
}
// Suggest name only in let-stmt and fn param
if pattern_ctx.should_suggest_name {
let mut name_generator = suggest_name::NameGenerator::new();
if let Some(suggested) = ctx
.expected_type
.as_ref()
.map(|ty| ty.strip_references())
.and_then(|ty| name_generator.for_type(&ty, ctx.db, ctx.edition))
{
acc.suggest_name(ctx, &suggested);
}
}
let refutable = pattern_ctx.refutability == PatternRefutability::Refutable;
let single_variant_enum = |enum_: hir::Enum| ctx.db.enum_data(enum_.into()).variants.len() == 1;

Some files were not shown because too many files have changed in this diff Show More