Auto merge of #15959 - Veykril:macro-shower3, r=lnicola

TokenMap -> SpanMap rewrite

Opening early so I can have an overview over the full diff more easily, still very unfinished and lots of work to be done.

The gist of what this PR does is move away from assigning IDs to tokens in arguments and expansions and instead gives the subtrees the text ranges they are sourced from (made relative to some item for incrementality). This means we now only have a single map per expension, opposed to map for expansion and arguments.

A few of the things that are not done yet (in arbitrary order):
- [x] generally clean up the current mess
- [x] proc-macros, have been completely ignored so far
- [x] syntax fixups, has been commented out for the time being needs to be rewritten on top of some marker SyntaxContextId
- [x] macro invocation syntax contexts are not properly passed around yet, so $crate hygiene does not work in all cases (but most)
  - [x] builtin macros do not set spans properly, $crate basically does not work with them rn (which we use)
~~- [ ] remove all uses of dummy spans (or if that does not work, change the dummy entries for dummy spans so that tests will not silently pass due to havin a file id for the dummy file)~~
  - [x] de-queryfy `macro_expand`, the sole caller of it is `parse_macro_expansion`, and both of these are lru-cached with the same limit so having it be a query is pointless
- [x] docs and more docs
- [x] fix eager macro spans and other stuff
  - [x] simplify include! handling
- [x] Figure out how to undo the sudden `()` expression wrapping in expansions / alternatively prioritize getting invisible delimiters working again
- [x] Simplify InFile stuff and HirFIleId extensions
~~- [ ] span crate containing all the file ids, span stuff, ast ids. Then remove the dependency injection generics from tt and mbe~~

Fixes https://github.com/rust-lang/rust-analyzer/issues/10300
Fixes https://github.com/rust-lang/rust-analyzer/issues/15685
This commit is contained in:
bors 2023-12-04 19:59:53 +00:00
commit e91fdf7860
133 changed files with 5403 additions and 4205 deletions

6
Cargo.lock generated
View File

@ -1255,6 +1255,9 @@ checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
name = "proc-macro-api" name = "proc-macro-api"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"base-db",
"indexmap",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"memmap2", "memmap2",
"object 0.32.0", "object 0.32.0",
"paths", "paths",
@ -1263,6 +1266,7 @@ dependencies = [
"serde_json", "serde_json",
"snap", "snap",
"stdx", "stdx",
"text-size",
"tracing", "tracing",
"triomphe", "triomphe",
"tt", "tt",
@ -1751,6 +1755,7 @@ dependencies = [
"always-assert", "always-assert",
"backtrace", "backtrace",
"crossbeam-channel", "crossbeam-channel",
"itertools",
"jod-thread", "jod-thread",
"libc", "libc",
"miow", "miow",
@ -2010,6 +2015,7 @@ version = "0.0.0"
dependencies = [ dependencies = [
"smol_str", "smol_str",
"stdx", "stdx",
"text-size",
] ]
[[package]] [[package]]

View File

@ -12,7 +12,7 @@ authors = ["rust-analyzer team"]
[profile.dev] [profile.dev]
# Disabling debug info speeds up builds a bunch, # Disabling debug info speeds up builds a bunch,
# and we don't rely on it for debugging that much. # and we don't rely on it for debugging that much.
debug = 0 debug = 1
[profile.dev.package] [profile.dev.package]
# These speed up local tests. # These speed up local tests.

View File

@ -8,11 +8,12 @@ use test_utils::{
ESCAPED_CURSOR_MARKER, ESCAPED_CURSOR_MARKER,
}; };
use triomphe::Arc; use triomphe::Arc;
use tt::token_id::{Leaf, Subtree, TokenTree}; use tt::{Leaf, Subtree, TokenTree};
use vfs::{file_set::FileSet, VfsPath}; use vfs::{file_set::FileSet, VfsPath};
use crate::{ use crate::{
input::{CrateName, CrateOrigin, LangCrateOrigin}, input::{CrateName, CrateOrigin, LangCrateOrigin},
span::SpanData,
Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env, Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env,
FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId, ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId,
@ -539,10 +540,13 @@ struct IdentityProcMacroExpander;
impl ProcMacroExpander for IdentityProcMacroExpander { impl ProcMacroExpander for IdentityProcMacroExpander {
fn expand( fn expand(
&self, &self,
subtree: &Subtree, subtree: &Subtree<SpanData>,
_: Option<&Subtree>, _: Option<&Subtree<SpanData>>,
_: &Env, _: &Env,
) -> Result<Subtree, ProcMacroExpansionError> { _: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone()) Ok(subtree.clone())
} }
} }
@ -553,10 +557,13 @@ struct AttributeInputReplaceProcMacroExpander;
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander { impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
fn expand( fn expand(
&self, &self,
_: &Subtree, _: &Subtree<SpanData>,
attrs: Option<&Subtree>, attrs: Option<&Subtree<SpanData>>,
_: &Env, _: &Env,
) -> Result<Subtree, ProcMacroExpansionError> { _: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
attrs attrs
.cloned() .cloned()
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into())) .ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
@ -568,11 +575,14 @@ struct MirrorProcMacroExpander;
impl ProcMacroExpander for MirrorProcMacroExpander { impl ProcMacroExpander for MirrorProcMacroExpander {
fn expand( fn expand(
&self, &self,
input: &Subtree, input: &Subtree<SpanData>,
_: Option<&Subtree>, _: Option<&Subtree<SpanData>>,
_: &Env, _: &Env,
) -> Result<Subtree, ProcMacroExpansionError> { _: SpanData,
fn traverse(input: &Subtree) -> Subtree { _: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let mut token_trees = vec![]; let mut token_trees = vec![];
for tt in input.token_trees.iter().rev() { for tt in input.token_trees.iter().rev() {
let tt = match tt { let tt = match tt {
@ -595,13 +605,16 @@ struct ShortenProcMacroExpander;
impl ProcMacroExpander for ShortenProcMacroExpander { impl ProcMacroExpander for ShortenProcMacroExpander {
fn expand( fn expand(
&self, &self,
input: &Subtree, input: &Subtree<SpanData>,
_: Option<&Subtree>, _: Option<&Subtree<SpanData>>,
_: &Env, _: &Env,
) -> Result<Subtree, ProcMacroExpansionError> { _: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
return Ok(traverse(input)); return Ok(traverse(input));
fn traverse(input: &Subtree) -> Subtree { fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let token_trees = input let token_trees = input
.token_trees .token_trees
.iter() .iter()
@ -613,7 +626,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
Subtree { delimiter: input.delimiter, token_trees } Subtree { delimiter: input.delimiter, token_trees }
} }
fn modify_leaf(leaf: &Leaf) -> Leaf { fn modify_leaf(leaf: &Leaf<SpanData>) -> Leaf<SpanData> {
let mut leaf = leaf.clone(); let mut leaf = leaf.clone();
match &mut leaf { match &mut leaf {
Leaf::Literal(it) => { Leaf::Literal(it) => {

View File

@ -13,9 +13,10 @@ use la_arena::{Arena, Idx};
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use syntax::SmolStr; use syntax::SmolStr;
use triomphe::Arc; use triomphe::Arc;
use tt::token_id::Subtree;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
use crate::span::SpanData;
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`, // Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing. // then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>; pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
@ -242,6 +243,9 @@ impl CrateDisplayName {
} }
} }
// FIXME: These should not be defined in here? Why does base db know about proc-macros
// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32); pub struct ProcMacroId(pub u32);
@ -255,10 +259,13 @@ pub enum ProcMacroKind {
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe { pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
fn expand( fn expand(
&self, &self,
subtree: &Subtree, subtree: &tt::Subtree<SpanData>,
attrs: Option<&Subtree>, attrs: Option<&tt::Subtree<SpanData>>,
env: &Env, env: &Env,
) -> Result<Subtree, ProcMacroExpansionError>; def_site: SpanData,
call_site: SpanData,
mixed_site: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>;
} }
#[derive(Debug)] #[derive(Debug)]
@ -323,7 +330,9 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>, pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin, pub origin: CrateOrigin,
pub is_proc_macro: bool, pub is_proc_macro: bool,
// FIXME: These things should not be per crate! These are more per workspace crate graph level things // FIXME: These things should not be per crate! These are more per workspace crate graph level
// things. This info does need to be somewhat present though as to prevent deduplication from
// happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult, pub target_layout: TargetLayoutLoadResult,
pub channel: Option<ReleaseChannel>, pub channel: Option<ReleaseChannel>,
} }

View File

@ -5,6 +5,7 @@
mod input; mod input;
mod change; mod change;
pub mod fixture; pub mod fixture;
pub mod span;
use std::panic; use std::panic;
@ -12,14 +13,13 @@ use rustc_hash::FxHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize}; use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
use triomphe::Arc; use triomphe::Arc;
pub use crate::input::DependencyKind;
pub use crate::{ pub use crate::{
change::Change, change::Change,
input::{ input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander,
ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros, ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths,
ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult, ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
}, },
}; };
pub use salsa::{self, Cancelled}; pub use salsa::{self, Cancelled};
@ -68,8 +68,7 @@ pub trait FileLoader {
/// model. Everything else in rust-analyzer is derived from these queries. /// model. Everything else in rust-analyzer is derived from these queries.
#[salsa::query_group(SourceDatabaseStorage)] #[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug { pub trait SourceDatabase: FileLoader + std::fmt::Debug {
// Parses the file into the syntax tree. /// Parses the file into the syntax tree.
#[salsa::invoke(parse_query)]
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>; fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
/// The crate graph. /// The crate graph.
@ -81,7 +80,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
fn proc_macros(&self) -> Arc<ProcMacros>; fn proc_macros(&self) -> Arc<ProcMacros>;
} }
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = profile::span("parse_query").detail(|| format!("{file_id:?}")); let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
let text = db.file_text(file_id); let text = db.file_text(file_id);
SourceFile::parse(&text) SourceFile::parse(&text)

209
crates/base-db/src/span.rs Normal file
View File

@ -0,0 +1,209 @@
//! File and span related types.
// FIXME: This should probably be moved into its own crate.
use std::fmt;
use salsa::InternId;
use tt::SyntaxContext;
use vfs::FileId;
pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
// The first inde is always the root node's AstId
pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SyntaxContextId(InternId);
impl fmt::Debug for SyntaxContextId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if *self == Self::SELF_REF {
f.debug_tuple("SyntaxContextId")
.field(&{
#[derive(Debug)]
#[allow(non_camel_case_types)]
struct SELF_REF;
SELF_REF
})
.finish()
} else {
f.debug_tuple("SyntaxContextId").field(&self.0).finish()
}
}
}
crate::impl_intern_key!(SyntaxContextId);
impl fmt::Display for SyntaxContextId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0.as_u32())
}
}
impl SyntaxContext for SyntaxContextId {
const DUMMY: Self = Self::ROOT;
}
// inherent trait impls please tyvm
impl SyntaxContextId {
pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
// veykril(HACK): FIXME salsa doesn't allow us fetching the id of the current input to be allocated so
// we need a special value that behaves as the current context.
pub const SELF_REF: Self =
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
pub fn is_root(self) -> bool {
self == Self::ROOT
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct SpanAnchor {
pub file_id: FileId,
pub ast_id: ErasedFileAstId,
}
impl fmt::Debug for SpanAnchor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id.into_raw()).finish()
}
}
impl tt::SpanAnchor for SpanAnchor {
const DUMMY: Self = SpanAnchor { file_id: FileId(0), ast_id: ROOT_ERASED_FILE_AST_ID };
}
/// Input to the analyzer is a set of files, where each file is identified by
/// `FileId` and contains source code. However, another source of source code in
/// Rust are macros: each macro can be thought of as producing a "temporary
/// file". To assign an id to such a file, we use the id of the macro call that
/// produced the file. So, a `HirFileId` is either a `FileId` (source code
/// written by user), or a `MacroCallId` (source code produced by macro).
///
/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
/// containing the call plus the offset of the macro call in the file. Note that
/// this is a recursive definition! However, the size_of of `HirFileId` is
/// finite (because everything bottoms out at the real `FileId`) and small
/// (`MacroCallId` uses the location interning. You can check details here:
/// <https://en.wikipedia.org/wiki/String_interning>).
///
/// The two variants are encoded in a single u32 which are differentiated by the MSB.
/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
/// `MacroCallId`.
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct HirFileId(u32);
impl From<HirFileId> for u32 {
fn from(value: HirFileId) -> Self {
value.0
}
}
impl From<u32> for HirFileId {
fn from(value: u32) -> Self {
HirFileId(value)
}
}
impl From<MacroCallId> for HirFileId {
fn from(value: MacroCallId) -> Self {
value.as_file()
}
}
impl fmt::Debug for HirFileId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.repr().fmt(f)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroFileId {
pub macro_call_id: MacroCallId,
}
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MacroCallId(salsa::InternId);
crate::impl_intern_key!(MacroCallId);
impl MacroCallId {
pub fn as_file(self) -> HirFileId {
MacroFileId { macro_call_id: self }.into()
}
pub fn as_macro_file(self) -> MacroFileId {
MacroFileId { macro_call_id: self }
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub enum HirFileIdRepr {
FileId(FileId),
MacroFile(MacroFileId),
}
impl fmt::Debug for HirFileIdRepr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.0).finish(),
Self::MacroFile(arg0) => {
f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish()
}
}
}
}
impl From<FileId> for HirFileId {
fn from(FileId(id): FileId) -> Self {
assert!(id < Self::MAX_FILE_ID);
HirFileId(id)
}
}
impl From<MacroFileId> for HirFileId {
fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
let id = id.as_u32();
assert!(id < Self::MAX_FILE_ID);
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
}
}
impl HirFileId {
const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
#[inline]
pub fn is_macro(self) -> bool {
self.0 & Self::MACRO_FILE_TAG_MASK != 0
}
#[inline]
pub fn macro_file(self) -> Option<MacroFileId> {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => None,
_ => Some(MacroFileId {
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
}),
}
}
#[inline]
pub fn file_id(self) -> Option<FileId> {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => Some(FileId(self.0)),
_ => None,
}
}
#[inline]
pub fn repr(self) -> HirFileIdRepr {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => HirFileIdRepr::FileId(FileId(self.0)),
_ => HirFileIdRepr::MacroFile(MacroFileId {
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
}),
}
}
}

View File

@ -1,37 +1,31 @@
use arbitrary::{Arbitrary, Unstructured}; use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use mbe::syntax_node_to_token_tree; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
fn assert_parse_result(input: &str, expected: CfgExpr) { fn assert_parse_result(input: &str, expected: CfgExpr) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax()) let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
};
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected); assert_eq!(cfg, expected);
} }
fn check_dnf(input: &str, expect: Expect) { fn check_dnf(input: &str, expect: Expect) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax()) let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
};
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual); expect.assert_eq(&actual);
} }
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax()) let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
};
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg); let dnf = DnfExpr::new(cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
@ -40,11 +34,9 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
#[track_caller] #[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax()) let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
};
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg); let dnf = DnfExpr::new(cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>(); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();

View File

@ -421,6 +421,7 @@ impl AttrsWithOwner {
RawAttrs::from_attrs_owner( RawAttrs::from_attrs_owner(
db.upcast(), db.upcast(),
src.with_value(&src.value[it.local_id()]), src.with_value(&src.value[it.local_id()]),
db.span_map(src.file_id).as_ref(),
) )
} }
GenericParamId::TypeParamId(it) => { GenericParamId::TypeParamId(it) => {
@ -428,11 +429,16 @@ impl AttrsWithOwner {
RawAttrs::from_attrs_owner( RawAttrs::from_attrs_owner(
db.upcast(), db.upcast(),
src.with_value(&src.value[it.local_id()]), src.with_value(&src.value[it.local_id()]),
db.span_map(src.file_id).as_ref(),
) )
} }
GenericParamId::LifetimeParamId(it) => { GenericParamId::LifetimeParamId(it) => {
let src = it.parent.child_source(db); let src = it.parent.child_source(db);
RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id])) RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id]),
db.span_map(src.file_id).as_ref(),
)
} }
}, },
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it), AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),

View File

@ -1,17 +1,20 @@
//! This module contains tests for doc-expression parsing. //! This module contains tests for doc-expression parsing.
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
use base_db::FileId;
use hir_expand::span::{RealSpanMap, SpanMapRef};
use mbe::syntax_node_to_token_tree; use mbe::syntax_node_to_token_tree;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
use crate::attr::{DocAtom, DocExpr}; use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) { fn assert_parse_result(input: &str, expected: DocExpr) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax()) let tt = syntax_node_to_token_tree(
}; tt.syntax(),
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId(0))),
);
let cfg = DocExpr::parse(&tt); let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected); assert_eq!(cfg, expected);
} }

View File

@ -1025,7 +1025,7 @@ impl ExprCollector<'_> {
let id = collector(self, Some(expansion.tree())); let id = collector(self, Some(expansion.tree()));
self.ast_id_map = prev_ast_id_map; self.ast_id_map = prev_ast_id_map;
self.expander.exit(self.db, mark); self.expander.exit(mark);
id id
} }
None => collector(self, None), None => collector(self, None),

View File

@ -143,7 +143,6 @@ mod m {
#[test] #[test]
fn desugar_builtin_format_args() { fn desugar_builtin_format_args() {
// Regression test for a path resolution bug introduced with inner item handling.
let (db, body, def) = lower( let (db, body, def) = lower(
r#" r#"
//- minicore: fmt //- minicore: fmt
@ -221,3 +220,115 @@ fn main() {
}"#]] }"#]]
.assert_eq(&body.pretty_print(&db, def)) .assert_eq(&body.pretty_print(&db, def))
} }
#[test]
fn test_macro_hygiene() {
let (db, body, def) = lower(
r##"
//- minicore: fmt, from
//- /main.rs
mod error;
use crate::error::error;
fn main() {
// _ = forces body expansion instead of block def map expansion
_ = error!("Failed to resolve path `{}`", node.text());
}
//- /error.rs
macro_rules! _error {
($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))}
}
pub(crate) use _error as error;
macro_rules! _intermediate {
($arg:expr) => {$crate::error::SsrError::new($arg)}
}
pub(crate) use _intermediate as intermediate;
pub struct SsrError(pub(crate) core::fmt::Arguments);
impl SsrError {
pub(crate) fn new(message: impl Into<core::fmt::Arguments>) -> SsrError {
SsrError(message.into())
}
}
"##,
);
assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]);
expect![[r#"
fn main() {
_ = $crate::error::SsrError::new(
builtin#lang(Arguments::new_v1_formatted)(
&[
"\"Failed to resolve path `", "`\"",
],
&[
builtin#lang(Argument::new_display)(
&node.text(),
),
],
&[
builtin#lang(Placeholder::new)(
0usize,
' ',
builtin#lang(Alignment::Unknown),
0u32,
builtin#lang(Count::Implied),
builtin#lang(Count::Implied),
),
],
unsafe {
builtin#lang(UnsafeArg::new)()
},
),
);
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}
#[test]
fn regression_10300() {
let (db, body, def) = lower(
r#"
//- minicore: concat, panic
mod private {
pub use core::concat;
}
macro_rules! m {
() => {
panic!(concat!($crate::private::concat!("cc")));
};
}
fn f() {
m!();
}
"#,
);
let (_, source_map) = db.body_with_source_map(def.into());
assert_eq!(source_map.diagnostics(), &[]);
for (_, def_map) in body.blocks(&db) {
assert_eq!(def_map.diagnostics(), &[]);
}
expect![[r#"
fn f() {
$crate::panicking::panic_fmt(
builtin#lang(Arguments::new_v1_formatted)(
&[
"\"cc\"",
],
&[],
&[],
unsafe {
builtin#lang(UnsafeArg::new)()
},
),
);
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}

View File

@ -663,7 +663,7 @@ impl<'a> AssocItemCollector<'a> {
self.module_id.local_id, self.module_id.local_id,
MacroCallKind::Attr { MacroCallKind::Attr {
ast_id, ast_id,
attr_args: Arc::new((tt::Subtree::empty(), Default::default())), attr_args: None,
invoc_attr_index: attr.id, invoc_attr_index: attr.id,
}, },
attr.path().clone(), attr.path().clone(),
@ -706,7 +706,7 @@ impl<'a> AssocItemCollector<'a> {
} }
AssocItem::MacroCall(call) => { AssocItem::MacroCall(call) => {
let file_id = self.expander.current_file_id(); let file_id = self.expander.current_file_id();
let MacroCall { ast_id, expand_to, ref path } = item_tree[call]; let MacroCall { ast_id, expand_to, call_site, ref path } = item_tree[call];
let module = self.expander.module.local_id; let module = self.expander.module.local_id;
let resolver = |path| { let resolver = |path| {
@ -725,6 +725,7 @@ impl<'a> AssocItemCollector<'a> {
match macro_call_as_call_id( match macro_call_as_call_id(
self.db.upcast(), self.db.upcast(),
&AstIdWithPath::new(file_id, ast_id, Clone::clone(path)), &AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
call_site,
expand_to, expand_to,
self.expander.module.krate(), self.expander.module.krate(),
resolver, resolver,
@ -793,7 +794,7 @@ impl<'a> AssocItemCollector<'a> {
self.collect(&item_tree, tree_id, &iter); self.collect(&item_tree, tree_id, &iter);
self.expander.exit(self.db, mark); self.expander.exit(mark);
} }
} }

View File

@ -4,21 +4,21 @@ use base_db::CrateId;
use cfg::CfgOptions; use cfg::CfgOptions;
use drop_bomb::DropBomb; use drop_bomb::DropBomb;
use hir_expand::{ use hir_expand::{
attrs::RawAttrs, hygiene::Hygiene, mod_path::ModPath, ExpandError, ExpandResult, HirFileId, attrs::RawAttrs, mod_path::ModPath, span::SpanMap, ExpandError, ExpandResult, HirFileId,
InFile, MacroCallId, UnresolvedMacro, InFile, MacroCallId,
}; };
use limit::Limit; use limit::Limit;
use syntax::{ast, Parse, SyntaxNode}; use syntax::{ast, Parse, SyntaxNode};
use crate::{ use crate::{
attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall, attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall,
MacroId, ModuleId, MacroId, ModuleId, UnresolvedMacro,
}; };
#[derive(Debug)] #[derive(Debug)]
pub struct Expander { pub struct Expander {
cfg_options: CfgOptions, cfg_options: CfgOptions,
hygiene: Hygiene, span_map: SpanMap,
krate: CrateId, krate: CrateId,
pub(crate) current_file_id: HirFileId, pub(crate) current_file_id: HirFileId,
pub(crate) module: ModuleId, pub(crate) module: ModuleId,
@ -41,7 +41,7 @@ impl Expander {
recursion_depth: 0, recursion_depth: 0,
recursion_limit, recursion_limit,
cfg_options: db.crate_graph()[module.krate].cfg_options.clone(), cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
hygiene: Hygiene::new(db.upcast(), current_file_id), span_map: db.span_map(current_file_id),
krate: module.krate, krate: module.krate,
} }
} }
@ -94,8 +94,8 @@ impl Expander {
ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) } ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) }
} }
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) { pub fn exit(&mut self, mut mark: Mark) {
self.hygiene = Hygiene::new(db.upcast(), mark.file_id); self.span_map = mark.span_map;
self.current_file_id = mark.file_id; self.current_file_id = mark.file_id;
if self.recursion_depth == u32::MAX { if self.recursion_depth == u32::MAX {
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the // Recursion limit has been reached somewhere in the macro expansion tree. Reset the
@ -110,7 +110,7 @@ impl Expander {
} }
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> { pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
LowerCtx::new(db, &self.hygiene, self.current_file_id) LowerCtx::new(db, self.span_map.clone(), self.current_file_id)
} }
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> { pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
@ -118,7 +118,7 @@ impl Expander {
} }
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs { pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene)) Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, self.span_map.as_ref()))
} }
pub(crate) fn cfg_options(&self) -> &CfgOptions { pub(crate) fn cfg_options(&self) -> &CfgOptions {
@ -130,8 +130,8 @@ impl Expander {
} }
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> { pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id); let ctx = LowerCtx::new(db, self.span_map.clone(), self.current_file_id);
Path::from_src(path, &ctx) Path::from_src(&ctx, path)
} }
fn within_limit<F, T: ast::AstNode>( fn within_limit<F, T: ast::AstNode>(
@ -174,10 +174,11 @@ impl Expander {
let parse = value.cast::<T>()?; let parse = value.cast::<T>()?;
self.recursion_depth += 1; self.recursion_depth += 1;
self.hygiene = Hygiene::new(db.upcast(), file_id); let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id));
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id); let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
let mark = Mark { let mark = Mark {
file_id: old_file_id, file_id: old_file_id,
span_map: old_span_map,
bomb: DropBomb::new("expansion mark dropped"), bomb: DropBomb::new("expansion mark dropped"),
}; };
Some((mark, parse)) Some((mark, parse))
@ -190,5 +191,6 @@ impl Expander {
#[derive(Debug)] #[derive(Debug)]
pub struct Mark { pub struct Mark {
file_id: HirFileId, file_id: HirFileId,
span_map: SpanMap,
bomb: DropBomb, bomb: DropBomb,
} }

View File

@ -586,7 +586,7 @@ fn find_local_import_locations(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use base_db::fixture::WithFixture; use base_db::fixture::WithFixture;
use hir_expand::hygiene::Hygiene; use hir_expand::db::ExpandDatabase;
use syntax::ast::AstNode; use syntax::ast::AstNode;
use crate::test_db::TestDB; use crate::test_db::TestDB;
@ -608,7 +608,8 @@ mod tests {
let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};")); let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
let ast_path = let ast_path =
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap(); parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap(); let mod_path =
ModPath::from_src(&db, ast_path, db.span_map(pos.file_id.into()).as_ref()).unwrap();
let def_map = module.def_map(&db); let def_map = module.def_map(&db);
let resolved = def_map let resolved = def_map

View File

@ -21,7 +21,7 @@ use crate::{
db::DefDatabase, db::DefDatabase,
dyn_map::{keys, DynMap}, dyn_map::{keys, DynMap},
expander::Expander, expander::Expander,
item_tree::{AttrOwner, ItemTree}, item_tree::ItemTree,
lower::LowerCtx, lower::LowerCtx,
nameres::{DefMap, MacroSubNs}, nameres::{DefMap, MacroSubNs},
src::{HasChildSource, HasSource}, src::{HasChildSource, HasSource},
@ -250,7 +250,10 @@ impl GenericParams {
&mut self, &mut self,
lower_ctx: &LowerCtx<'_>, lower_ctx: &LowerCtx<'_>,
node: &dyn HasGenericParams, node: &dyn HasGenericParams,
add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam), add_param_attrs: impl FnMut(
Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
ast::GenericParam,
),
) { ) {
if let Some(params) = node.generic_param_list() { if let Some(params) = node.generic_param_list() {
self.fill_params(lower_ctx, params, add_param_attrs) self.fill_params(lower_ctx, params, add_param_attrs)
@ -275,7 +278,10 @@ impl GenericParams {
&mut self, &mut self,
lower_ctx: &LowerCtx<'_>, lower_ctx: &LowerCtx<'_>,
params: ast::GenericParamList, params: ast::GenericParamList,
mut add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam), mut add_param_attrs: impl FnMut(
Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
ast::GenericParam,
),
) { ) {
for type_or_const_param in params.type_or_const_params() { for type_or_const_param in params.type_or_const_params() {
match type_or_const_param { match type_or_const_param {
@ -297,7 +303,7 @@ impl GenericParams {
type_param.type_bound_list(), type_param.type_bound_list(),
Either::Left(type_ref), Either::Left(type_ref),
); );
add_param_attrs(idx.into(), ast::GenericParam::TypeParam(type_param)); add_param_attrs(Either::Left(idx), ast::GenericParam::TypeParam(type_param));
} }
ast::TypeOrConstParam::Const(const_param) => { ast::TypeOrConstParam::Const(const_param) => {
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name()); let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
@ -310,7 +316,7 @@ impl GenericParams {
default: ConstRef::from_const_param(lower_ctx, &const_param), default: ConstRef::from_const_param(lower_ctx, &const_param),
}; };
let idx = self.type_or_consts.alloc(param.into()); let idx = self.type_or_consts.alloc(param.into());
add_param_attrs(idx.into(), ast::GenericParam::ConstParam(const_param)); add_param_attrs(Either::Left(idx), ast::GenericParam::ConstParam(const_param));
} }
} }
} }
@ -325,7 +331,7 @@ impl GenericParams {
lifetime_param.type_bound_list(), lifetime_param.type_bound_list(),
Either::Right(lifetime_ref), Either::Right(lifetime_ref),
); );
add_param_attrs(idx.into(), ast::GenericParam::LifetimeParam(lifetime_param)); add_param_attrs(Either::Right(idx), ast::GenericParam::LifetimeParam(lifetime_param));
} }
} }
@ -433,7 +439,7 @@ impl GenericParams {
let ctx = expander.ctx(db); let ctx = expander.ctx(db);
let type_ref = TypeRef::from_ast(&ctx, expanded.tree()); let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref); self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref);
exp.1.exit(db, mark); exp.1.exit(mark);
} }
} }
}); });

View File

@ -112,6 +112,7 @@ pub struct ItemScope {
#[derive(Debug, PartialEq, Eq)] #[derive(Debug, PartialEq, Eq)]
struct DeriveMacroInvocation { struct DeriveMacroInvocation {
attr_id: AttrId, attr_id: AttrId,
/// The `#[derive]` call
attr_call_id: MacroCallId, attr_call_id: MacroCallId,
derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>, derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
} }
@ -401,6 +402,14 @@ impl ItemScope {
}) })
} }
pub fn derive_macro_invoc(
&self,
ast_id: AstId<ast::Adt>,
attr_id: AttrId,
) -> Option<MacroCallId> {
Some(self.derive_macros.get(&ast_id)?.iter().find(|it| it.attr_id == attr_id)?.attr_call_id)
}
// FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope // FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope
pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> { pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a) self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a)

View File

@ -42,12 +42,11 @@ use std::{
}; };
use ast::{AstNode, HasName, StructKind}; use ast::{AstNode, HasName, StructKind};
use base_db::CrateId; use base_db::{span::SyntaxContextId, CrateId};
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{
ast_id_map::{AstIdNode, FileAstId}, ast_id_map::{AstIdNode, FileAstId},
attrs::RawAttrs, attrs::RawAttrs,
hygiene::Hygiene,
name::{name, AsName, Name}, name::{name, AsName, Name},
ExpandTo, HirFileId, InFile, ExpandTo, HirFileId, InFile,
}; };
@ -118,7 +117,7 @@ impl ItemTree {
let mut item_tree = match_ast! { let mut item_tree = match_ast! {
match syntax { match syntax {
ast::SourceFile(file) => { ast::SourceFile(file) => {
top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene())); top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.span_map()));
ctx.lower_module_items(&file) ctx.lower_module_items(&file)
}, },
ast::MacroItems(items) => { ast::MacroItems(items) => {
@ -749,6 +748,7 @@ pub struct MacroCall {
pub path: Interned<ModPath>, pub path: Interned<ModPath>,
pub ast_id: FileAstId<ast::MacroCall>, pub ast_id: FileAstId<ast::MacroCall>,
pub expand_to: ExpandTo, pub expand_to: ExpandTo,
pub call_site: SyntaxContextId,
} }
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
@ -778,9 +778,9 @@ impl Use {
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`. // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
let hygiene = Hygiene::new(db.upcast(), file_id); let span_map = db.span_map(file_id);
let (_, source_map) = let (_, source_map) = lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree"); .expect("failed to lower use tree");
source_map[index].clone() source_map[index].clone()
} }
/// Maps a `UseTree` contained in this import back to its AST node. /// Maps a `UseTree` contained in this import back to its AST node.
@ -793,8 +793,10 @@ impl Use {
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`. // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast()); let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`"); let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
let hygiene = Hygiene::new(db.upcast(), file_id); let span_map = db.span_map(file_id);
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree").1 lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
.expect("failed to lower use tree")
.1
} }
} }

View File

@ -2,12 +2,13 @@
use std::collections::hash_map::Entry; use std::collections::hash_map::Entry;
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId}; use hir_expand::{ast_id_map::AstIdMap, span::SpanMapRef, HirFileId};
use syntax::ast::{self, HasModuleItem, HasTypeBounds}; use syntax::ast::{self, HasModuleItem, HasTypeBounds};
use crate::{ use crate::{
generics::{GenericParams, TypeParamData, TypeParamProvenance}, generics::{GenericParams, TypeParamData, TypeParamProvenance},
type_ref::{LifetimeRef, TraitBoundModifier, TraitRef}, type_ref::{LifetimeRef, TraitBoundModifier, TraitRef},
LocalLifetimeParamId, LocalTypeOrConstParamId,
}; };
use super::*; use super::*;
@ -33,8 +34,8 @@ impl<'a> Ctx<'a> {
} }
} }
pub(super) fn hygiene(&self) -> &Hygiene { pub(super) fn span_map(&self) -> SpanMapRef<'_> {
self.body_ctx.hygiene() self.body_ctx.span_map()
} }
pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree { pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
@ -79,7 +80,7 @@ impl<'a> Ctx<'a> {
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree { pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
self.tree self.tree
.attrs .attrs
.insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.hygiene())); .insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.span_map()));
self.tree.top_level = block self.tree.top_level = block
.statements() .statements()
.filter_map(|stmt| match stmt { .filter_map(|stmt| match stmt {
@ -109,8 +110,7 @@ impl<'a> Ctx<'a> {
} }
fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> { fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene()); let mod_item: ModItem = match item {
let item: ModItem = match item {
ast::Item::Struct(ast) => self.lower_struct(ast)?.into(), ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
ast::Item::Union(ast) => self.lower_union(ast)?.into(), ast::Item::Union(ast) => self.lower_union(ast)?.into(),
ast::Item::Enum(ast) => self.lower_enum(ast)?.into(), ast::Item::Enum(ast) => self.lower_enum(ast)?.into(),
@ -129,10 +129,10 @@ impl<'a> Ctx<'a> {
ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(), ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(), ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
}; };
let attrs = RawAttrs::new(self.db.upcast(), item, self.span_map());
self.add_attrs(mod_item.into(), attrs);
self.add_attrs(item.into(), attrs); Some(mod_item)
Some(item)
} }
fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) { fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) {
@ -146,21 +146,32 @@ impl<'a> Ctx<'a> {
} }
} }
fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option<AssocItem> { fn lower_assoc_item(&mut self, item_node: &ast::AssocItem) -> Option<AssocItem> {
match item { let item: AssocItem = match item_node {
ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into), ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into), ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()), ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into), ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
} }?;
let attrs = RawAttrs::new(self.db.upcast(), item_node, self.span_map());
self.add_attrs(
match item {
AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)),
AssocItem::TypeAlias(it) => AttrOwner::ModItem(ModItem::TypeAlias(it)),
AssocItem::Const(it) => AttrOwner::ModItem(ModItem::Const(it)),
AssocItem::MacroCall(it) => AttrOwner::ModItem(ModItem::MacroCall(it)),
},
attrs,
);
Some(item)
} }
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> { fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
let visibility = self.lower_visibility(strukt); let visibility = self.lower_visibility(strukt);
let name = strukt.name()?.as_name(); let name = strukt.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(strukt);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt); let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt);
let fields = self.lower_fields(&strukt.kind()); let fields = self.lower_fields(&strukt.kind());
let ast_id = self.source_ast_id_map.ast_id(strukt);
let res = Struct { name, visibility, generic_params, fields, ast_id }; let res = Struct { name, visibility, generic_params, fields, ast_id };
Some(id(self.data().structs.alloc(res))) Some(id(self.data().structs.alloc(res)))
} }
@ -184,7 +195,10 @@ impl<'a> Ctx<'a> {
for field in fields.fields() { for field in fields.fields() {
if let Some(data) = self.lower_record_field(&field) { if let Some(data) = self.lower_record_field(&field) {
let idx = self.data().fields.alloc(data); let idx = self.data().fields.alloc(data);
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene())); self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &field, self.span_map()),
);
} }
} }
let end = self.next_field_idx(); let end = self.next_field_idx();
@ -205,7 +219,7 @@ impl<'a> Ctx<'a> {
for (i, field) in fields.fields().enumerate() { for (i, field) in fields.fields().enumerate() {
let data = self.lower_tuple_field(i, &field); let data = self.lower_tuple_field(i, &field);
let idx = self.data().fields.alloc(data); let idx = self.data().fields.alloc(data);
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene())); self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.span_map()));
} }
let end = self.next_field_idx(); let end = self.next_field_idx();
IdxRange::new(start..end) IdxRange::new(start..end)
@ -222,12 +236,12 @@ impl<'a> Ctx<'a> {
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> { fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
let visibility = self.lower_visibility(union); let visibility = self.lower_visibility(union);
let name = union.name()?.as_name(); let name = union.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(union);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, union); let generic_params = self.lower_generic_params(HasImplicitSelf::No, union);
let fields = match union.record_field_list() { let fields = match union.record_field_list() {
Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)), Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())), None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
}; };
let ast_id = self.source_ast_id_map.ast_id(union);
let res = Union { name, visibility, generic_params, fields, ast_id }; let res = Union { name, visibility, generic_params, fields, ast_id };
Some(id(self.data().unions.alloc(res))) Some(id(self.data().unions.alloc(res)))
} }
@ -235,12 +249,12 @@ impl<'a> Ctx<'a> {
fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> { fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
let visibility = self.lower_visibility(enum_); let visibility = self.lower_visibility(enum_);
let name = enum_.name()?.as_name(); let name = enum_.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(enum_);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_); let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_);
let variants = match &enum_.variant_list() { let variants = match &enum_.variant_list() {
Some(variant_list) => self.lower_variants(variant_list), Some(variant_list) => self.lower_variants(variant_list),
None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()), None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
}; };
let ast_id = self.source_ast_id_map.ast_id(enum_);
let res = Enum { name, visibility, generic_params, variants, ast_id }; let res = Enum { name, visibility, generic_params, variants, ast_id };
Some(id(self.data().enums.alloc(res))) Some(id(self.data().enums.alloc(res)))
} }
@ -252,7 +266,7 @@ impl<'a> Ctx<'a> {
let idx = self.data().variants.alloc(data); let idx = self.data().variants.alloc(data);
self.add_attrs( self.add_attrs(
idx.into(), idx.into(),
RawAttrs::new(self.db.upcast(), &variant, self.hygiene()), RawAttrs::new(self.db.upcast(), &variant, self.span_map()),
); );
} }
} }
@ -303,28 +317,29 @@ impl<'a> Ctx<'a> {
}); });
self.add_attrs( self.add_attrs(
idx.into(), idx.into(),
RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()), RawAttrs::new(self.db.upcast(), &self_param, self.span_map()),
); );
has_self_param = true; has_self_param = true;
} }
for param in param_list.params() { for param in param_list.params() {
let idx = match param.dotdotdot_token() {
Some(_) => {
let ast_id = self.source_ast_id_map.ast_id(&param); let ast_id = self.source_ast_id_map.ast_id(&param);
self.data() let idx = match param.dotdotdot_token() {
Some(_) => self
.data()
.params .params
.alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }) .alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }),
}
None => { None => {
let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty()); let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
let ty = Interned::new(type_ref); let ty = Interned::new(type_ref);
let ast_id = self.source_ast_id_map.ast_id(&param);
self.data() self.data()
.params .params
.alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) }) .alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) })
} }
}; };
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &param, self.hygiene())); self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &param, self.span_map()),
);
} }
} }
let end_param = self.next_param_idx(); let end_param = self.next_param_idx();
@ -394,8 +409,8 @@ impl<'a> Ctx<'a> {
let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it)); let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
let visibility = self.lower_visibility(type_alias); let visibility = self.lower_visibility(type_alias);
let bounds = self.lower_type_bounds(type_alias); let bounds = self.lower_type_bounds(type_alias);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
let ast_id = self.source_ast_id_map.ast_id(type_alias); let ast_id = self.source_ast_id_map.ast_id(type_alias);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id }; let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id };
Some(id(self.data().type_aliases.alloc(res))) Some(id(self.data().type_aliases.alloc(res)))
} }
@ -443,23 +458,17 @@ impl<'a> Ctx<'a> {
fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> { fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
let name = trait_def.name()?.as_name(); let name = trait_def.name()?.as_name();
let visibility = self.lower_visibility(trait_def); let visibility = self.lower_visibility(trait_def);
let ast_id = self.source_ast_id_map.ast_id(trait_def);
let generic_params = let generic_params =
self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def); self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def);
let is_auto = trait_def.auto_token().is_some(); let is_auto = trait_def.auto_token().is_some();
let is_unsafe = trait_def.unsafe_token().is_some(); let is_unsafe = trait_def.unsafe_token().is_some();
let ast_id = self.source_ast_id_map.ast_id(trait_def);
let items = trait_def let items = trait_def
.assoc_item_list() .assoc_item_list()
.into_iter() .into_iter()
.flat_map(|list| list.assoc_items()) .flat_map(|list| list.assoc_items())
.filter_map(|item| { .filter_map(|item_node| self.lower_assoc_item(&item_node))
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
self.lower_assoc_item(&item).map(|item| {
self.add_attrs(ModItem::from(item).into(), attrs);
item
})
})
.collect(); .collect();
let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id }; let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
@ -472,17 +481,18 @@ impl<'a> Ctx<'a> {
) -> Option<FileItemTreeId<TraitAlias>> { ) -> Option<FileItemTreeId<TraitAlias>> {
let name = trait_alias_def.name()?.as_name(); let name = trait_alias_def.name()?.as_name();
let visibility = self.lower_visibility(trait_alias_def); let visibility = self.lower_visibility(trait_alias_def);
let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
let generic_params = self.lower_generic_params( let generic_params = self.lower_generic_params(
HasImplicitSelf::Yes(trait_alias_def.type_bound_list()), HasImplicitSelf::Yes(trait_alias_def.type_bound_list()),
trait_alias_def, trait_alias_def,
); );
let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
let alias = TraitAlias { name, visibility, generic_params, ast_id }; let alias = TraitAlias { name, visibility, generic_params, ast_id };
Some(id(self.data().trait_aliases.alloc(alias))) Some(id(self.data().trait_aliases.alloc(alias)))
} }
fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> { fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
let ast_id = self.source_ast_id_map.ast_id(impl_def);
// Note that trait impls don't get implicit `Self` unlike traits, because here they are a // Note that trait impls don't get implicit `Self` unlike traits, because here they are a
// type alias rather than a type parameter, so this is handled by the resolver. // type alias rather than a type parameter, so this is handled by the resolver.
let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def); let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def);
@ -499,14 +509,8 @@ impl<'a> Ctx<'a> {
.assoc_item_list() .assoc_item_list()
.into_iter() .into_iter()
.flat_map(|it| it.assoc_items()) .flat_map(|it| it.assoc_items())
.filter_map(|item| { .filter_map(|item| self.lower_assoc_item(&item))
let assoc = self.lower_assoc_item(&item)?;
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
self.add_attrs(ModItem::from(assoc).into(), attrs);
Some(assoc)
})
.collect(); .collect();
let ast_id = self.source_ast_id_map.ast_id(impl_def);
let res = let res =
Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id }; Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id };
Some(id(self.data().impls.alloc(res))) Some(id(self.data().impls.alloc(res)))
@ -515,7 +519,7 @@ impl<'a> Ctx<'a> {
fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> { fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
let visibility = self.lower_visibility(use_item); let visibility = self.lower_visibility(use_item);
let ast_id = self.source_ast_id_map.ast_id(use_item); let ast_id = self.source_ast_id_map.ast_id(use_item);
let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?; let (use_tree, _) = lower_use_tree(self.db, self.span_map(), use_item.use_tree()?)?;
let res = Use { visibility, ast_id, use_tree }; let res = Use { visibility, ast_id, use_tree };
Some(id(self.data().uses.alloc(res))) Some(id(self.data().uses.alloc(res)))
@ -537,10 +541,16 @@ impl<'a> Ctx<'a> {
} }
fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> { fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, self.hygiene())?); let span_map = self.span_map();
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, span_map)?);
let ast_id = self.source_ast_id_map.ast_id(m); let ast_id = self.source_ast_id_map.ast_id(m);
let expand_to = hir_expand::ExpandTo::from_call_site(m); let expand_to = hir_expand::ExpandTo::from_call_site(m);
let res = MacroCall { path, ast_id, expand_to }; let res = MacroCall {
path,
ast_id,
expand_to,
call_site: span_map.span_for_range(m.syntax().text_range()).ctx,
};
Some(id(self.data().macro_calls.alloc(res))) Some(id(self.data().macro_calls.alloc(res)))
} }
@ -572,15 +582,15 @@ impl<'a> Ctx<'a> {
// (in other words, the knowledge that they're in an extern block must not be used). // (in other words, the knowledge that they're in an extern block must not be used).
// This is because an extern block can contain macros whose ItemTree's top-level items // This is because an extern block can contain macros whose ItemTree's top-level items
// should be considered to be in an extern block too. // should be considered to be in an extern block too.
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene()); let mod_item: ModItem = match &item {
let id: ModItem = match item { ast::ExternItem::Fn(ast) => self.lower_function(ast)?.into(),
ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(), ast::ExternItem::Static(ast) => self.lower_static(ast)?.into(),
ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(), ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(), ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(),
}; };
self.add_attrs(id.into(), attrs); let attrs = RawAttrs::new(self.db.upcast(), &item, self.span_map());
Some(id) self.add_attrs(mod_item.into(), attrs);
Some(mod_item)
}) })
.collect() .collect()
}); });
@ -612,12 +622,16 @@ impl<'a> Ctx<'a> {
generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param)); generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param));
} }
let add_param_attrs = |item, param| { let add_param_attrs = |item: Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.hygiene()); param| {
let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.span_map());
// This is identical to the body of `Ctx::add_attrs()` but we can't call that here // This is identical to the body of `Ctx::add_attrs()` but we can't call that here
// because it requires `&mut self` and the call to `generics.fill()` below also // because it requires `&mut self` and the call to `generics.fill()` below also
// references `self`. // references `self`.
match self.tree.attrs.entry(item) { match self.tree.attrs.entry(match item {
Either::Right(id) => id.into(),
Either::Left(id) => id.into(),
}) {
Entry::Occupied(mut entry) => { Entry::Occupied(mut entry) => {
*entry.get_mut() = entry.get().merge(attrs); *entry.get_mut() = entry.get().merge(attrs);
} }
@ -643,7 +657,8 @@ impl<'a> Ctx<'a> {
} }
fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId { fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.hygiene()); let vis =
RawVisibility::from_ast_with_span_map(self.db, item.visibility(), self.span_map());
self.data().vis.alloc(vis) self.data().vis.alloc(vis)
} }
@ -721,7 +736,7 @@ fn lower_abi(abi: ast::Abi) -> Interned<str> {
struct UseTreeLowering<'a> { struct UseTreeLowering<'a> {
db: &'a dyn DefDatabase, db: &'a dyn DefDatabase,
hygiene: &'a Hygiene, span_map: SpanMapRef<'a>,
mapping: Arena<ast::UseTree>, mapping: Arena<ast::UseTree>,
} }
@ -734,7 +749,7 @@ impl UseTreeLowering<'_> {
// E.g. `use something::{inner}` (prefix is `None`, path is `something`) // E.g. `use something::{inner}` (prefix is `None`, path is `something`)
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`) // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
Some(path) => { Some(path) => {
match ModPath::from_src(self.db.upcast(), path, self.hygiene) { match ModPath::from_src(self.db.upcast(), path, self.span_map) {
Some(it) => Some(it), Some(it) => Some(it),
None => return None, // FIXME: report errors somewhere None => return None, // FIXME: report errors somewhere
} }
@ -753,7 +768,7 @@ impl UseTreeLowering<'_> {
} else { } else {
let is_glob = tree.star_token().is_some(); let is_glob = tree.star_token().is_some();
let path = match tree.path() { let path = match tree.path() {
Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.hygiene)?), Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.span_map)?),
None => None, None => None,
}; };
let alias = tree.rename().map(|a| { let alias = tree.rename().map(|a| {
@ -789,10 +804,10 @@ impl UseTreeLowering<'_> {
pub(crate) fn lower_use_tree( pub(crate) fn lower_use_tree(
db: &dyn DefDatabase, db: &dyn DefDatabase,
hygiene: &Hygiene, span_map: SpanMapRef<'_>,
tree: ast::UseTree, tree: ast::UseTree,
) -> Option<(UseTree, Arena<ast::UseTree>)> { ) -> Option<(UseTree, Arena<ast::UseTree>)> {
let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() }; let mut lowering = UseTreeLowering { db, span_map, mapping: Arena::new() };
let tree = lowering.lower_use_tree(tree)?; let tree = lowering.lower_use_tree(tree)?;
Some((tree, lowering.mapping)) Some((tree, lowering.mapping))
} }

View File

@ -457,7 +457,7 @@ impl Printer<'_> {
} }
} }
ModItem::MacroCall(it) => { ModItem::MacroCall(it) => {
let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it]; let MacroCall { path, ast_id: _, expand_to: _, call_site: _ } = &self.tree[it];
wln!(self, "{}!(...);", path.display(self.db.upcast())); wln!(self, "{}!(...);", path.display(self.db.upcast()));
} }
ModItem::MacroRules(it) => { ModItem::MacroRules(it) => {

View File

@ -63,7 +63,7 @@ use std::{
panic::{RefUnwindSafe, UnwindSafe}, panic::{RefUnwindSafe, UnwindSafe},
}; };
use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind}; use base_db::{impl_intern_key, salsa, span::SyntaxContextId, CrateId, ProcMacroKind};
use hir_expand::{ use hir_expand::{
ast_id_map::{AstIdNode, FileAstId}, ast_id_map::{AstIdNode, FileAstId},
attrs::{Attr, AttrId, AttrInput}, attrs::{Attr, AttrId, AttrInput},
@ -72,19 +72,18 @@ use hir_expand::{
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase, db::ExpandDatabase,
eager::expand_eager_macro_input, eager::expand_eager_macro_input,
hygiene::Hygiene,
name::Name, name::Name,
proc_macro::ProcMacroExpander, proc_macro::ProcMacroExpander,
AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
MacroDefId, MacroDefKind, UnresolvedMacro, MacroDefId, MacroDefKind,
}; };
use item_tree::ExternBlock; use item_tree::ExternBlock;
use la_arena::Idx; use la_arena::Idx;
use nameres::DefMap; use nameres::DefMap;
use stdx::impl_from; use stdx::impl_from;
use syntax::ast; use syntax::{ast, AstNode};
use ::tt::token_id as tt; pub use hir_expand::tt;
use crate::{ use crate::{
builtin_type::BuiltinType, builtin_type::BuiltinType,
@ -1166,16 +1165,20 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> { ) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let expands_to = hir_expand::ExpandTo::from_call_site(self.value); let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value)); let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
let h = Hygiene::new(db, self.file_id); let span_map = db.span_map(self.file_id);
let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &h)); let path =
self.value.path().and_then(|path| path::ModPath::from_src(db, path, span_map.as_ref()));
let Some(path) = path else { let Some(path) = path else {
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation"))); return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
}; };
let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx;
macro_call_as_call_id_with_eager( macro_call_as_call_id_with_eager(
db, db,
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path), &AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
call_site,
expands_to, expands_to,
krate, krate,
resolver, resolver,
@ -1200,17 +1203,19 @@ impl<T: AstIdNode> AstIdWithPath<T> {
fn macro_call_as_call_id( fn macro_call_as_call_id(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>, call: &AstIdWithPath<ast::MacroCall>,
call_site: SyntaxContextId,
expand_to: ExpandTo, expand_to: ExpandTo,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy, resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<Option<MacroCallId>, UnresolvedMacro> { ) -> Result<Option<MacroCallId>, UnresolvedMacro> {
macro_call_as_call_id_with_eager(db, call, expand_to, krate, resolver, resolver) macro_call_as_call_id_with_eager(db, call, call_site, expand_to, krate, resolver, resolver)
.map(|res| res.value) .map(|res| res.value)
} }
fn macro_call_as_call_id_with_eager( fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>, call: &AstIdWithPath<ast::MacroCall>,
call_site: SyntaxContextId,
expand_to: ExpandTo, expand_to: ExpandTo,
krate: CrateId, krate: CrateId,
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>, resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
@ -1222,7 +1227,7 @@ fn macro_call_as_call_id_with_eager(
let res = match def.kind { let res = match def.kind {
MacroDefKind::BuiltInEager(..) => { MacroDefKind::BuiltInEager(..) => {
let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db)); let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
expand_eager_macro_input(db, krate, macro_call, def, &|path| { expand_eager_macro_input(db, krate, macro_call, def, call_site, &|path| {
eager_resolver(path).filter(MacroDefId::is_fn_like) eager_resolver(path).filter(MacroDefId::is_fn_like)
}) })
} }
@ -1231,6 +1236,7 @@ fn macro_call_as_call_id_with_eager(
db, db,
krate, krate,
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to }, MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
call_site,
)), )),
err: None, err: None,
}, },
@ -1315,6 +1321,7 @@ fn derive_macro_as_call_id(
item_attr: &AstIdWithPath<ast::Adt>, item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId, derive_attr_index: AttrId,
derive_pos: u32, derive_pos: u32,
call_site: SyntaxContextId,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
@ -1329,6 +1336,7 @@ fn derive_macro_as_call_id(
derive_index: derive_pos, derive_index: derive_pos,
derive_attr_index, derive_attr_index,
}, },
call_site,
); );
Ok((macro_id, def_id, call_id)) Ok((macro_id, def_id, call_id))
} }
@ -1341,15 +1349,13 @@ fn attr_macro_as_call_id(
def: MacroDefId, def: MacroDefId,
) -> MacroCallId { ) -> MacroCallId {
let arg = match macro_attr.input.as_deref() { let arg = match macro_attr.input.as_deref() {
Some(AttrInput::TokenTree(tt)) => ( Some(AttrInput::TokenTree(tt)) => {
{ let mut tt = tt.as_ref().clone();
let mut tt = tt.0.clone(); tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
tt.delimiter = tt::Delimiter::UNSPECIFIED; Some(tt)
tt }
},
tt.1.clone(), _ => None,
),
_ => (tt::Subtree::empty(), Default::default()),
}; };
def.as_lazy_macro( def.as_lazy_macro(
@ -1357,11 +1363,18 @@ fn attr_macro_as_call_id(
krate, krate,
MacroCallKind::Attr { MacroCallKind::Attr {
ast_id: item_attr.ast_id, ast_id: item_attr.ast_id,
attr_args: Arc::new(arg), attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id, invoc_attr_index: macro_attr.id,
}, },
macro_attr.ctxt,
) )
} }
#[derive(Debug)]
pub struct UnresolvedMacro {
pub path: hir_expand::mod_path::ModPath,
}
intern::impl_internable!( intern::impl_internable!(
crate::type_ref::TypeRef, crate::type_ref::TypeRef,
crate::type_ref::TraitRef, crate::type_ref::TraitRef,

View File

@ -3,7 +3,7 @@ use std::cell::OnceCell;
use hir_expand::{ use hir_expand::{
ast_id_map::{AstIdMap, AstIdNode}, ast_id_map::{AstIdMap, AstIdNode},
hygiene::Hygiene, span::{SpanMap, SpanMapRef},
AstId, HirFileId, InFile, AstId, HirFileId, InFile,
}; };
use syntax::ast; use syntax::ast;
@ -13,33 +13,34 @@ use crate::{db::DefDatabase, path::Path};
pub struct LowerCtx<'a> { pub struct LowerCtx<'a> {
pub db: &'a dyn DefDatabase, pub db: &'a dyn DefDatabase,
hygiene: Hygiene, span_map: SpanMap,
// FIXME: This optimization is probably pointless, ast id map should pretty much always exist anyways.
ast_id_map: Option<(HirFileId, OnceCell<Arc<AstIdMap>>)>, ast_id_map: Option<(HirFileId, OnceCell<Arc<AstIdMap>>)>,
} }
impl<'a> LowerCtx<'a> { impl<'a> LowerCtx<'a> {
pub fn new(db: &'a dyn DefDatabase, hygiene: &Hygiene, file_id: HirFileId) -> Self { pub fn new(db: &'a dyn DefDatabase, span_map: SpanMap, file_id: HirFileId) -> Self {
LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: Some((file_id, OnceCell::new())) } LowerCtx { db, span_map, ast_id_map: Some((file_id, OnceCell::new())) }
} }
pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self { pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self {
LowerCtx { LowerCtx {
db, db,
hygiene: Hygiene::new(db.upcast(), file_id), span_map: db.span_map(file_id),
ast_id_map: Some((file_id, OnceCell::new())), ast_id_map: Some((file_id, OnceCell::new())),
} }
} }
pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: &Hygiene) -> Self { pub fn with_span_map(db: &'a dyn DefDatabase, span_map: SpanMap) -> Self {
LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: None } LowerCtx { db, span_map, ast_id_map: None }
} }
pub(crate) fn hygiene(&self) -> &Hygiene { pub(crate) fn span_map(&self) -> SpanMapRef<'_> {
&self.hygiene self.span_map.as_ref()
} }
pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> { pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
Path::from_src(ast, self) Path::from_src(self, ast)
} }
pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> { pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> {

View File

@ -15,7 +15,6 @@ use crate::macro_expansion_tests::check;
fn token_mapping_smoke_test() { fn token_mapping_smoke_test() {
check( check(
r#" r#"
// +tokenids
macro_rules! f { macro_rules! f {
( struct $ident:ident ) => { ( struct $ident:ident ) => {
struct $ident { struct $ident {
@ -24,26 +23,22 @@ macro_rules! f {
}; };
} }
// +tokenids // +spans+syntaxctxt
f!(struct MyTraitMap2); f!(struct MyTraitMap2);
"#, "#,
expect![[r##" expect![[r#"
// call ids will be shifted by Shift(30) macro_rules! f {
// +tokenids ( struct $ident:ident ) => {
macro_rules! f {#0 struct $ident {
(#1 struct#2 $#3ident#4:#5ident#6 )#1 =#7>#8 {#9 map: ::std::collections::HashSet<()>,
struct#10 $#11ident#12 {#13 }
map#14:#15 :#16:#17std#18:#19:#20collections#21:#22:#23HashSet#24<#25(#26)#26>#27,#28 };
}#13 }
}#9;#29
}#0
// // +tokenids struct#FileId(0):1@58..64\2# MyTraitMap2#FileId(0):2@31..42\0# {#FileId(0):1@72..73\2#
// f!(struct#1 MyTraitMap2#2); map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@89..90\2#::#FileId(0):1@91..92\2#std#FileId(0):1@93..96\2#::#FileId(0):1@96..97\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@109..110\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2#
struct#10 MyTraitMap2#32 {#13 }#FileId(0):1@132..133\2#
map#14:#15 ::std#18::collections#21::HashSet#24<#25(#26)#26>#27,#28 "#]],
}#13
"##]],
); );
} }
@ -53,49 +48,42 @@ fn token_mapping_floats() {
// (and related issues) // (and related issues)
check( check(
r#" r#"
// +tokenids // +spans+syntaxctxt
macro_rules! f { macro_rules! f {
($($tt:tt)*) => { ($($tt:tt)*) => {
$($tt)* $($tt)*
}; };
} }
// +tokenids // +spans+syntaxctxt
f! { f! {
fn main() { fn main() {
1; 1;
1.0; 1.0;
((1,),).0.0;
let x = 1; let x = 1;
} }
} }
"#, "#,
expect![[r##" expect![[r#"
// call ids will be shifted by Shift(18) // +spans+syntaxctxt
// +tokenids macro_rules! f {
macro_rules! f {#0 ($($tt:tt)*) => {
(#1$#2(#3$#4tt#5:#6tt#7)#3*#8)#1 =#9>#10 {#11 $($tt)*
$#12(#13$#14tt#15)#13*#16 };
}#11;#17 }
}#0
// // +tokenids fn#FileId(0):2@30..32\0# main#FileId(0):2@33..37\0#(#FileId(0):2@37..38\0#)#FileId(0):2@38..39\0# {#FileId(0):2@40..41\0#
// f! { 1#FileId(0):2@50..51\0#;#FileId(0):2@51..52\0#
// fn#1 main#2() { 1.0#FileId(0):2@61..64\0#;#FileId(0):2@64..65\0#
// 1#5;#6 (#FileId(0):2@74..75\0#(#FileId(0):2@75..76\0#1#FileId(0):2@76..77\0#,#FileId(0):2@77..78\0# )#FileId(0):2@78..79\0#,#FileId(0):2@79..80\0# )#FileId(0):2@80..81\0#.#FileId(0):2@81..82\0#0#FileId(0):2@82..85\0#.#FileId(0):2@82..85\0#0#FileId(0):2@82..85\0#;#FileId(0):2@85..86\0#
// 1.0#7;#8 let#FileId(0):2@95..98\0# x#FileId(0):2@99..100\0# =#FileId(0):2@101..102\0# 1#FileId(0):2@103..104\0#;#FileId(0):2@104..105\0#
// let#9 x#10 =#11 1#12;#13 }#FileId(0):2@110..111\0#
// }
// }
fn#19 main#20(#21)#21 {#22
1#23;#24
1.0#25;#26
let#27 x#28 =#29 1#30;#31
}#22
"##]], "#]],
); );
} }
@ -105,59 +93,115 @@ fn eager_expands_with_unresolved_within() {
r#" r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
#[macro_export] #[macro_export]
macro_rules! format_args {} macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) { fn main(foo: ()) {
format_args!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") concat!("hello", identity!("world"), unresolved!(), identity!("!"));
} }
"#, "#,
expect![[r##" expect![[r##"
#[rustc_builtin_macro] #[rustc_builtin_macro]
#[macro_export] #[macro_export]
macro_rules! format_args {} macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) { fn main(foo: ()) {
builtin #format_args ("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") /* error: unresolved macro unresolved */"helloworld!";
} }
"##]], "##]],
); );
} }
#[test] #[test]
fn token_mapping_eager() { fn concat_spans() {
check( check(
r#" r#"
#[rustc_builtin_macro] #[rustc_builtin_macro]
#[macro_export] #[macro_export]
macro_rules! format_args {} macro_rules! concat {}
macro_rules! identity { macro_rules! identity {
($expr:expr) => { $expr }; ($tt:tt) => {
$tt
}
} }
fn main(foo: ()) { fn main(foo: ()) {
format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar") #[rustc_builtin_macro]
#[macro_export]
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
concat/*+spans+syntaxctxt*/!("hello", concat!("w", identity!("o")), identity!("rld"), unresolved!(), identity!("!"));
}
} }
"#, "#,
expect![[r##" expect![[r##"
#[rustc_builtin_macro] #[rustc_builtin_macro]
#[macro_export] #[macro_export]
macro_rules! format_args {} macro_rules! concat {}
macro_rules! identity { macro_rules! identity {
($expr:expr) => { $expr }; ($tt:tt) => {
$tt
}
} }
fn main(foo: ()) { fn main(foo: ()) {
// format_args/*+tokenids*/!("{} {} {}"#1,#2 format_args#3!#4("{}"#6,#7 0#8),#9 foo#10,#11 identity#12!#13(10#15),#16 "bar"#17) #[rustc_builtin_macro]
builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_args#3!#4(#5"{}"#6,#7 0#8)#5,#9 foo#10,#11 identity#12!#13(#1410#15)#14,#16 "bar"#17)#0 #[macro_export]
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
/* error: unresolved macro unresolved */"helloworld!"#FileId(0):3@207..323\6#;
}
} }
"##]], "##]],
); );
} }
#[test]
fn token_mapping_across_files() {
check(
r#"
//- /lib.rs
#[macro_use]
mod foo;
mk_struct/*+spans+syntaxctxt*/!(Foo with u32);
//- /foo.rs
macro_rules! mk_struct {
($foo:ident with $ty:ty) => { struct $foo($ty); }
}
"#,
expect![[r#"
#[macro_use]
mod foo;
struct#FileId(1):1@59..65\2# Foo#FileId(0):2@32..35\0#(#FileId(1):1@70..71\2#u32#FileId(0):2@41..44\0#)#FileId(1):1@74..75\2#;#FileId(1):1@75..76\2#
"#]],
);
}
#[test] #[test]
fn float_field_access_macro_input() { fn float_field_access_macro_input() {
check( check(

View File

@ -16,21 +16,16 @@ mod proc_macros;
use std::{iter, ops::Range, sync}; use std::{iter, ops::Range, sync};
use ::mbe::TokenMap; use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase};
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
use expect_test::Expect; use expect_test::Expect;
use hir_expand::{ use hir_expand::{db::ExpandDatabase, span::SpanMapRef, HirFileIdExt, InFile, MacroFileId};
db::{DeclarativeMacroExpander, ExpandDatabase},
AstId, InFile, MacroFile,
};
use stdx::format_to; use stdx::format_to;
use syntax::{ use syntax::{
ast::{self, edit::IndentLevel}, ast::{self, edit::IndentLevel},
AstNode, SyntaxElement, AstNode,
SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT}, SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT},
SyntaxNode, TextRange, T, SyntaxNode, T,
}; };
use tt::token_id::{Subtree, TokenId};
use crate::{ use crate::{
db::DefDatabase, db::DefDatabase,
@ -39,6 +34,7 @@ use crate::{
resolver::HasResolver, resolver::HasResolver,
src::HasSource, src::HasSource,
test_db::TestDB, test_db::TestDB,
tt::Subtree,
AdtId, AsMacroCall, Lookup, ModuleDefId, AdtId, AsMacroCall, Lookup, ModuleDefId,
}; };
@ -88,43 +84,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let mut text_edits = Vec::new(); let mut text_edits = Vec::new();
let mut expansions = Vec::new(); let mut expansions = Vec::new();
for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) {
let mut show_token_ids = false;
for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
show_token_ids |= comment.to_string().contains("+tokenids");
}
if !show_token_ids {
continue;
}
let call_offset = macro_.syntax().text_range().start().into();
let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
let DeclarativeMacroExpander { mac, def_site_token_map } =
&*db.decl_macro_expander(krate, ast_id);
assert_eq!(mac.err(), None);
let tt = match &macro_ {
ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
ast::Macro::MacroDef(_) => unimplemented!(""),
};
let tt_start = tt.syntax().text_range().start();
tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
|token| {
let range = token.text_range().checked_sub(tt_start).unwrap();
if let Some(id) = def_site_token_map.token_by_range(range) {
let offset = (range.end() + tt_start).into();
text_edits.push((offset..offset, format!("#{}", id.0)));
}
},
);
text_edits.push((
call_offset..call_offset,
format!("// call ids will be shifted by {:?}\n", mac.shift()),
));
}
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) { for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
let macro_call = InFile::new(source.file_id, &macro_call); let macro_call = InFile::new(source.file_id, &macro_call);
let res = macro_call let res = macro_call
@ -135,20 +94,22 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}) })
.unwrap(); .unwrap();
let macro_call_id = res.value.unwrap(); let macro_call_id = res.value.unwrap();
let macro_file = MacroFile { macro_call_id }; let macro_file = MacroFileId { macro_call_id };
let mut expansion_result = db.parse_macro_expansion(macro_file); let mut expansion_result = db.parse_macro_expansion(macro_file);
expansion_result.err = expansion_result.err.or(res.err); expansion_result.err = expansion_result.err.or(res.err);
expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id))); expansions.push((macro_call.value.clone(), expansion_result));
} }
for (call, exp, arg) in expansions.into_iter().rev() { for (call, exp) in expansions.into_iter().rev() {
let mut tree = false; let mut tree = false;
let mut expect_errors = false; let mut expect_errors = false;
let mut show_token_ids = false; let mut show_spans = false;
let mut show_ctxt = false;
for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) { for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
tree |= comment.to_string().contains("+tree"); tree |= comment.to_string().contains("+tree");
expect_errors |= comment.to_string().contains("+errors"); expect_errors |= comment.to_string().contains("+errors");
show_token_ids |= comment.to_string().contains("+tokenids"); show_spans |= comment.to_string().contains("+spans");
show_ctxt |= comment.to_string().contains("+syntaxctxt");
} }
let mut expn_text = String::new(); let mut expn_text = String::new();
@ -164,13 +125,16 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
} else { } else {
assert!( assert!(
parse.errors().is_empty(), parse.errors().is_empty(),
"parse errors in expansion: \n{:#?}", "parse errors in expansion: \n{:#?}\n```\n{}\n```",
parse.errors() parse.errors(),
parse.syntax_node(),
); );
} }
let pp = pretty_print_macro_expansion( let pp = pretty_print_macro_expansion(
parse.syntax_node(), parse.syntax_node(),
show_token_ids.then_some(&*token_map), SpanMapRef::ExpansionSpanMap(&token_map),
show_spans,
show_ctxt,
); );
let indent = IndentLevel::from_node(call.syntax()); let indent = IndentLevel::from_node(call.syntax());
let pp = reindent(indent, pp); let pp = reindent(indent, pp);
@ -185,28 +149,8 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
} }
let range = call.syntax().text_range(); let range = call.syntax().text_range();
let range: Range<usize> = range.into(); let range: Range<usize> = range.into();
if show_token_ids {
if let Some((tree, map, _)) = arg.value.as_deref() {
let tt_range = call.token_tree().unwrap().syntax().text_range();
let mut ranges = Vec::new();
extract_id_ranges(&mut ranges, map, tree);
for (range, id) in ranges {
let idx = (tt_range.start() + range.end()).into();
text_edits.push((idx..idx, format!("#{}", id.0)));
}
}
text_edits.push((range.start..range.start, "// ".into()));
call.to_string().match_indices('\n').for_each(|(offset, _)| {
let offset = offset + 1 + range.start;
text_edits.push((offset..offset, "// ".into()));
});
text_edits.push((range.end..range.end, "\n".into()));
text_edits.push((range.end..range.end, expn_text));
} else {
text_edits.push((range, expn_text)); text_edits.push((range, expn_text));
} }
}
text_edits.sort_by_key(|(range, _)| range.start); text_edits.sort_by_key(|(range, _)| range.start);
text_edits.reverse(); text_edits.reverse();
@ -226,9 +170,22 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
} }
_ => None, _ => None,
}; };
if let Some(src) = src { if let Some(src) = src {
if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) { if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
let pp = pretty_print_macro_expansion(src.value, None); let call = src.file_id.call_node(&db).expect("macro file");
let mut show_spans = false;
let mut show_ctxt = false;
for comment in call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) {
show_spans |= comment.to_string().contains("+spans");
show_ctxt |= comment.to_string().contains("+syntaxctxt");
}
let pp = pretty_print_macro_expansion(
src.value,
db.span_map(src.file_id).as_ref(),
show_spans,
show_ctxt,
);
format_to!(expanded_text, "\n{}", pp) format_to!(expanded_text, "\n{}", pp)
} }
} }
@ -237,7 +194,12 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
for impl_id in def_map[local_id].scope.impls() { for impl_id in def_map[local_id].scope.impls() {
let src = impl_id.lookup(&db).source(&db); let src = impl_id.lookup(&db).source(&db);
if src.file_id.is_builtin_derive(&db) { if src.file_id.is_builtin_derive(&db) {
let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None); let pp = pretty_print_macro_expansion(
src.value.syntax().clone(),
db.span_map(src.file_id).as_ref(),
false,
false,
);
format_to!(expanded_text, "\n{}", pp) format_to!(expanded_text, "\n{}", pp)
} }
} }
@ -246,20 +208,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
expect.assert_eq(&expanded_text); expect.assert_eq(&expanded_text);
} }
fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) {
tree.token_trees.iter().for_each(|tree| match tree {
tt::TokenTree::Leaf(leaf) => {
let id = match leaf {
tt::Leaf::Literal(it) => it.span,
tt::Leaf::Punct(it) => it.span,
tt::Leaf::Ident(it) => it.span,
};
ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id)));
}
tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree),
});
}
fn reindent(indent: IndentLevel, pp: String) -> String { fn reindent(indent: IndentLevel, pp: String) -> String {
if !pp.contains('\n') { if !pp.contains('\n') {
return pp; return pp;
@ -276,7 +224,12 @@ fn reindent(indent: IndentLevel, pp: String) -> String {
res res
} }
fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String { fn pretty_print_macro_expansion(
expn: SyntaxNode,
map: SpanMapRef<'_>,
show_spans: bool,
show_ctxt: bool,
) -> String {
let mut res = String::new(); let mut res = String::new();
let mut prev_kind = EOF; let mut prev_kind = EOF;
let mut indent_level = 0; let mut indent_level = 0;
@ -322,10 +275,22 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> Str
} }
prev_kind = curr_kind; prev_kind = curr_kind;
format_to!(res, "{}", token); format_to!(res, "{}", token);
if let Some(map) = map { if show_spans || show_ctxt {
if let Some(id) = map.token_by_range(token.text_range()) { let span = map.span_for_range(token.text_range());
format_to!(res, "#{}", id.0); format_to!(res, "#");
if show_spans {
format_to!(
res,
"{:?}:{:?}@{:?}",
span.anchor.file_id,
span.anchor.ast_id.into_raw(),
span.range,
);
} }
if show_ctxt {
format_to!(res, "\\{}", span.ctx);
}
format_to!(res, "#");
} }
} }
res res
@ -342,6 +307,9 @@ impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
subtree: &Subtree, subtree: &Subtree,
_: Option<&Subtree>, _: Option<&Subtree>,
_: &base_db::Env, _: &base_db::Env,
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree, base_db::ProcMacroExpansionError> { ) -> Result<Subtree, base_db::ProcMacroExpansionError> {
let (parse, _) = let (parse, _) =
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems); ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);

View File

@ -93,6 +93,41 @@ fn foo() {
); );
} }
#[test]
fn macro_rules_in_attr() {
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
check(
r#"
//- proc_macros: identity
macro_rules! id {
($($t:tt)*) => {
$($t)*
};
}
id! {
#[proc_macros::identity]
impl Foo for WrapBj {
async fn foo(&self) {
self.id().await;
}
}
}
"#,
expect![[r#"
macro_rules! id {
($($t:tt)*) => {
$($t)*
};
}
#[proc_macros::identity] impl Foo for WrapBj {
async fn foo(&self ) {
self .id().await ;
}
}
"#]],
);
}
#[test] #[test]
fn float_parsing_panic() { fn float_parsing_panic() {
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211 // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
@ -127,3 +162,27 @@ macro_rules! id {
"#]], "#]],
); );
} }
#[test]
fn float_attribute_mapping() {
check(
r#"
//- proc_macros: identity
//+spans+syntaxctxt
#[proc_macros::identity]
fn foo(&self) {
self.0. 1;
}
"#,
expect![[r#"
//+spans+syntaxctxt
#[proc_macros::identity]
fn foo(&self) {
self.0. 1;
}
fn#FileId(0):1@45..47\0# foo#FileId(0):1@48..51\0#(#FileId(0):1@51..52\0#&#FileId(0):1@52..53\0#self#FileId(0):1@53..57\0# )#FileId(0):1@57..58\0# {#FileId(0):1@59..60\0#
self#FileId(0):1@65..69\0# .#FileId(0):1@69..70\0#0#FileId(0):1@70..71\0#.#FileId(0):1@71..72\0#1#FileId(0):1@73..74\0#;#FileId(0):1@74..75\0#
}#FileId(0):1@76..77\0#"#]],
);
}

View File

@ -5,7 +5,7 @@
use std::{cmp::Ordering, iter, mem}; use std::{cmp::Ordering, iter, mem};
use base_db::{CrateId, Dependency, Edition, FileId}; use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId};
use cfg::{CfgExpr, CfgOptions}; use cfg::{CfgExpr, CfgOptions};
use either::Either; use either::Either;
use hir_expand::{ use hir_expand::{
@ -14,7 +14,6 @@ use hir_expand::{
builtin_attr_macro::find_builtin_attr, builtin_attr_macro::find_builtin_attr,
builtin_derive_macro::find_builtin_derive, builtin_derive_macro::find_builtin_derive,
builtin_fn_macro::find_builtin_macro, builtin_fn_macro::find_builtin_macro,
hygiene::Hygiene,
name::{name, AsName, Name}, name::{name, AsName, Name},
proc_macro::ProcMacroExpander, proc_macro::ProcMacroExpander,
ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc,
@ -85,8 +84,17 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
.enumerate() .enumerate()
.map(|(idx, it)| { .map(|(idx, it)| {
// FIXME: a hacky way to create a Name from string. // FIXME: a hacky way to create a Name from string.
let name = let name = tt::Ident {
tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() }; text: it.name.clone(),
span: tt::SpanData {
range: syntax::TextRange::empty(syntax::TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
},
};
(name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32))) (name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32)))
}) })
.collect()) .collect())
@ -112,7 +120,6 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
from_glob_import: Default::default(), from_glob_import: Default::default(),
skip_attrs: Default::default(), skip_attrs: Default::default(),
is_proc_macro, is_proc_macro,
hygienes: FxHashMap::default(),
}; };
if tree_id.is_block() { if tree_id.is_block() {
collector.seed_with_inner(tree_id); collector.seed_with_inner(tree_id);
@ -212,9 +219,23 @@ struct MacroDirective {
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
enum MacroDirectiveKind { enum MacroDirectiveKind {
FnLike { ast_id: AstIdWithPath<ast::MacroCall>, expand_to: ExpandTo }, FnLike {
Derive { ast_id: AstIdWithPath<ast::Adt>, derive_attr: AttrId, derive_pos: usize }, ast_id: AstIdWithPath<ast::MacroCall>,
Attr { ast_id: AstIdWithPath<ast::Item>, attr: Attr, mod_item: ModItem, tree: TreeId }, expand_to: ExpandTo,
call_site: SyntaxContextId,
},
Derive {
ast_id: AstIdWithPath<ast::Adt>,
derive_attr: AttrId,
derive_pos: usize,
call_site: SyntaxContextId,
},
Attr {
ast_id: AstIdWithPath<ast::Item>,
attr: Attr,
mod_item: ModItem,
/* is this needed? */ tree: TreeId,
},
} }
/// Walks the tree of module recursively /// Walks the tree of module recursively
@ -242,12 +263,6 @@ struct DefCollector<'a> {
/// This also stores the attributes to skip when we resolve derive helpers and non-macro /// This also stores the attributes to skip when we resolve derive helpers and non-macro
/// non-builtin attributes in general. /// non-builtin attributes in general.
skip_attrs: FxHashMap<InFile<ModItem>, AttrId>, skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
/// `Hygiene` cache, because `Hygiene` construction is expensive.
///
/// Almost all paths should have been lowered to `ModPath` during `ItemTree` construction.
/// However, `DefCollector` still needs to lower paths in attributes, in particular those in
/// derive meta item list.
hygienes: FxHashMap<HirFileId, Hygiene>,
} }
impl DefCollector<'_> { impl DefCollector<'_> {
@ -315,12 +330,11 @@ impl DefCollector<'_> {
} }
if *attr_name == hir_expand::name![feature] { if *attr_name == hir_expand::name![feature] {
let hygiene = &Hygiene::new_unhygienic();
let features = attr let features = attr
.parse_path_comma_token_tree(self.db.upcast(), hygiene) .parse_path_comma_token_tree(self.db.upcast())
.into_iter() .into_iter()
.flatten() .flatten()
.filter_map(|feat| match feat.segments() { .filter_map(|(feat, _)| match feat.segments() {
[name] => Some(name.to_smol_str()), [name] => Some(name.to_smol_str()),
_ => None, _ => None,
}); });
@ -471,7 +485,7 @@ impl DefCollector<'_> {
directive.module_id, directive.module_id,
MacroCallKind::Attr { MacroCallKind::Attr {
ast_id: ast_id.ast_id, ast_id: ast_id.ast_id,
attr_args: Arc::new((tt::Subtree::empty(), Default::default())), attr_args: None,
invoc_attr_index: attr.id, invoc_attr_index: attr.id,
}, },
attr.path().clone(), attr.path().clone(),
@ -1119,10 +1133,11 @@ impl DefCollector<'_> {
let resolver_def_id = |path| resolver(path).map(|(_, it)| it); let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
match &directive.kind { match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to } => { MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
let call_id = macro_call_as_call_id( let call_id = macro_call_as_call_id(
self.db.upcast(), self.db.upcast(),
ast_id, ast_id,
*call_site,
*expand_to, *expand_to,
self.def_map.krate, self.def_map.krate,
resolver_def_id, resolver_def_id,
@ -1134,12 +1149,13 @@ impl DefCollector<'_> {
return false; return false;
} }
} }
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => { MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => {
let id = derive_macro_as_call_id( let id = derive_macro_as_call_id(
self.db, self.db,
ast_id, ast_id,
*derive_attr, *derive_attr,
*derive_pos as u32, *derive_pos as u32,
*call_site,
self.def_map.krate, self.def_map.krate,
resolver, resolver,
); );
@ -1234,22 +1250,10 @@ impl DefCollector<'_> {
}; };
let ast_id = ast_id.with_value(ast_adt_id); let ast_id = ast_id.with_value(ast_adt_id);
let extend_unhygenic; match attr.parse_path_comma_token_tree(self.db.upcast()) {
let hygiene = if file_id.is_macro() {
self.hygienes
.entry(file_id)
.or_insert_with(|| Hygiene::new(self.db.upcast(), file_id))
} else {
// Avoid heap allocation (`Hygiene` embraces `Arc`) and hash map entry
// when we're in an oridinary (non-macro) file.
extend_unhygenic = Hygiene::new_unhygienic();
&extend_unhygenic
};
match attr.parse_path_comma_token_tree(self.db.upcast(), hygiene) {
Some(derive_macros) => { Some(derive_macros) => {
let mut len = 0; let mut len = 0;
for (idx, path) in derive_macros.enumerate() { for (idx, (path, call_site)) in derive_macros.enumerate() {
let ast_id = AstIdWithPath::new(file_id, ast_id.value, path); let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
self.unresolved_macros.push(MacroDirective { self.unresolved_macros.push(MacroDirective {
module_id: directive.module_id, module_id: directive.module_id,
@ -1258,6 +1262,7 @@ impl DefCollector<'_> {
ast_id, ast_id,
derive_attr: attr.id, derive_attr: attr.id,
derive_pos: idx, derive_pos: idx,
call_site,
}, },
container: directive.container, container: directive.container,
}); });
@ -1414,11 +1419,12 @@ impl DefCollector<'_> {
for directive in &self.unresolved_macros { for directive in &self.unresolved_macros {
match &directive.kind { match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to } => { MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
// FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error! // FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
let macro_call_as_call_id = macro_call_as_call_id( let macro_call_as_call_id = macro_call_as_call_id(
self.db.upcast(), self.db.upcast(),
ast_id, ast_id,
*call_site,
*expand_to, *expand_to,
self.def_map.krate, self.def_map.krate,
|path| { |path| {
@ -1444,7 +1450,7 @@ impl DefCollector<'_> {
)); ));
} }
} }
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => { MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call( self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id, directive.module_id,
MacroCallKind::Derive { MacroCallKind::Derive {
@ -1823,9 +1829,8 @@ impl ModCollector<'_, '_> {
cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use); cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
let mut single_imports = Vec::new(); let mut single_imports = Vec::new();
let hygiene = Hygiene::new_unhygienic();
for attr in macro_use_attrs { for attr in macro_use_attrs {
let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else { let Some(paths) = attr.parse_path_comma_token_tree(db.upcast()) else {
// `#[macro_use]` (without any paths) found, forget collected names and just import // `#[macro_use]` (without any paths) found, forget collected names and just import
// all visible macros. // all visible macros.
self.def_collector.import_macros_from_extern_crate( self.def_collector.import_macros_from_extern_crate(
@ -1835,7 +1840,7 @@ impl ModCollector<'_, '_> {
); );
return; return;
}; };
for path in paths { for (path, _) in paths {
if let Some(name) = path.as_ident() { if let Some(name) = path.as_ident() {
single_imports.push(name.clone()); single_imports.push(name.clone());
} }
@ -2083,8 +2088,18 @@ impl ModCollector<'_, '_> {
let name = match attrs.by_key("rustc_builtin_macro").string_value() { let name = match attrs.by_key("rustc_builtin_macro").string_value() {
Some(it) => { Some(it) => {
// FIXME: a hacky way to create a Name from string. // FIXME: a hacky way to create a Name from string.
name = name = tt::Ident {
tt::Ident { text: it.clone(), span: tt::TokenId::unspecified() }.as_name(); text: it.clone(),
span: tt::SpanData {
range: syntax::TextRange::empty(syntax::TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
},
}
.as_name();
&name &name
} }
None => { None => {
@ -2210,8 +2225,12 @@ impl ModCollector<'_, '_> {
} }
} }
fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) { fn collect_macro_call(
let ast_id = AstIdWithPath::new(self.file_id(), mac.ast_id, ModPath::clone(&mac.path)); &mut self,
&MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall,
container: ItemContainerId,
) {
let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(&path));
let db = self.def_collector.db; let db = self.def_collector.db;
// FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define // FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define
@ -2222,7 +2241,8 @@ impl ModCollector<'_, '_> {
if let Ok(res) = macro_call_as_call_id_with_eager( if let Ok(res) = macro_call_as_call_id_with_eager(
db.upcast(), db.upcast(),
&ast_id, &ast_id,
mac.expand_to, call_site,
expand_to,
self.def_collector.def_map.krate, self.def_collector.def_map.krate,
|path| { |path| {
path.as_ident().and_then(|name| { path.as_ident().and_then(|name| {
@ -2276,7 +2296,7 @@ impl ModCollector<'_, '_> {
self.def_collector.unresolved_macros.push(MacroDirective { self.def_collector.unresolved_macros.push(MacroDirective {
module_id: self.module_id, module_id: self.module_id,
depth: self.macro_depth + 1, depth: self.macro_depth + 1,
kind: MacroDirectiveKind::FnLike { ast_id, expand_to: mac.expand_to }, kind: MacroDirectiveKind::FnLike { ast_id, expand_to: expand_to, call_site },
container, container,
}); });
} }
@ -2363,7 +2383,6 @@ mod tests {
from_glob_import: Default::default(), from_glob_import: Default::default(),
skip_attrs: Default::default(), skip_attrs: Default::default(),
is_proc_macro: false, is_proc_macro: false,
hygienes: FxHashMap::default(),
}; };
collector.seed_with_top_level(); collector.seed_with_top_level();
collector.collect(); collector.collect();

View File

@ -1,7 +1,7 @@
//! This module resolves `mod foo;` declaration to file. //! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec; use arrayvec::ArrayVec;
use base_db::{AnchoredPath, FileId}; use base_db::{AnchoredPath, FileId};
use hir_expand::name::Name; use hir_expand::{name::Name, HirFileIdExt};
use limit::Limit; use limit::Limit;
use syntax::SmolStr; use syntax::SmolStr;
@ -66,7 +66,7 @@ impl ModDir {
attr_path: Option<&SmolStr>, attr_path: Option<&SmolStr>,
) -> Result<(FileId, bool, ModDir), Box<[String]>> { ) -> Result<(FileId, bool, ModDir), Box<[String]>> {
let name = name.unescaped(); let name = name.unescaped();
let orig_file_id = file_id.original_file(db.upcast()); let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
let mut candidate_files = ArrayVec::<_, 2>::new(); let mut candidate_files = ArrayVec::<_, 2>::new();
match attr_path { match attr_path {

View File

@ -8,9 +8,7 @@ use base_db::{fixture::WithFixture, SourceDatabase};
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use triomphe::Arc; use triomphe::Arc;
use crate::{db::DefDatabase, test_db::TestDB}; use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB};
use super::DefMap;
fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> { fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
let db = TestDB::with_files(ra_fixture); let db = TestDB::with_files(ra_fixture);

View File

@ -1,13 +1,19 @@
use base_db::SourceDatabaseExt; use base_db::{SourceDatabase, SourceDatabaseExt};
use triomphe::Arc; use triomphe::Arc;
use crate::{db::DefDatabase, AdtId, ModuleDefId}; use crate::{
db::DefDatabase,
use super::*; nameres::tests::{TestDB, WithFixture},
AdtId, ModuleDefId,
};
fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) { fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) {
let (mut db, pos) = TestDB::with_position(ra_fixture_initial); let (mut db, pos) = TestDB::with_position(ra_fixture_initial);
let krate = db.test_crate(); let krate = {
let crate_graph = db.crate_graph();
// Some of these tests use minicore/proc-macros which will be injected as the first crate
crate_graph.iter().last().unwrap()
};
{ {
let events = db.log_executed(|| { let events = db.log_executed(|| {
db.crate_def_map(krate); db.crate_def_map(krate);
@ -66,7 +72,7 @@ fn typing_inside_a_function_should_not_invalidate_def_map() {
#[test] #[test]
fn typing_inside_a_macro_should_not_invalidate_def_map() { fn typing_inside_a_macro_should_not_invalidate_def_map() {
let (mut db, pos) = TestDB::with_position( check_def_map_is_not_recomputed(
r" r"
//- /lib.rs //- /lib.rs
macro_rules! m { macro_rules! m {
@ -84,27 +90,142 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() {
//- /foo/bar.rs //- /foo/bar.rs
$0 $0
m!(X); m!(X);
pub struct S {}
",
r"
m!(Y);
pub struct S {}
", ",
); );
let krate = db.test_crate();
{
let events = db.log_executed(|| {
let crate_def_map = db.crate_def_map(krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 1);
});
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
} }
db.set_file_text(pos.file_id, Arc::from("m!(Y);"));
{ #[test]
let events = db.log_executed(|| { fn typing_inside_an_attribute_should_not_invalidate_def_map() {
let crate_def_map = db.crate_def_map(krate); check_def_map_is_not_recomputed(
let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); r"
assert_eq!(module_data.scope.resolutions().count(), 1); //- proc_macros: identity
}); //- /lib.rs
assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}") mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
#[proc_macros::identity]
fn f() {}
",
r"
#[proc_macros::identity]
fn f() { foo }
",
);
} }
#[test]
fn typing_inside_an_attribute_arg_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: identity
//- /lib.rs
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
#[proc_macros::identity]
fn f() {}
",
r"
#[proc_macros::identity(foo)]
fn f() {}
",
);
}
#[test]
fn typing_inside_macro_heavy_file_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: identity, derive_identity
//- /lib.rs
macro_rules! m {
($ident:ident) => {
fn fm() {
$ident + $ident;
};
}
}
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
fn f() {}
m!(X);
macro_rules! m2 {
($ident:ident) => {
fn f2() {
$ident + $ident;
};
}
}
m2!(X);
#[proc_macros::identity]
#[derive(proc_macros::DeriveIdentity)]
pub struct S {}
",
r"
fn f() {0}
m!(X);
macro_rules! m2 {
($ident:ident) => {
fn f2() {
$ident + $ident;
};
}
}
m2!(X);
#[proc_macros::identity]
#[derive(proc_macros::DeriveIdentity)]
pub struct S {}
",
);
}
#[test]
fn typing_inside_a_derive_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: derive_identity
//- minicore:derive
//- /lib.rs
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
#[derive(proc_macros::DeriveIdentity)]
#[allow()]
struct S;
",
r"
#[derive(proc_macros::DeriveIdentity)]
#[allow(dead_code)]
struct S;
",
);
} }
#[test] #[test]

View File

@ -96,8 +96,8 @@ pub enum GenericArg {
impl Path { impl Path {
/// Converts an `ast::Path` to `Path`. Works with use trees. /// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call. /// It correctly handles `$crate` based path from macro call.
pub fn from_src(path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> { pub fn from_src(ctx: &LowerCtx<'_>, path: ast::Path) -> Option<Path> {
lower::lower_path(path, ctx) lower::lower_path(ctx, path)
} }
/// Converts a known mod path to `Path`. /// Converts a known mod path to `Path`.

View File

@ -4,8 +4,10 @@ use std::iter;
use crate::{lower::LowerCtx, type_ref::ConstRef}; use crate::{lower::LowerCtx, type_ref::ConstRef};
use either::Either; use hir_expand::{
use hir_expand::name::{name, AsName}; mod_path::resolve_crate_root,
name::{name, AsName},
};
use intern::Interned; use intern::Interned;
use syntax::ast::{self, AstNode, HasTypeBounds}; use syntax::ast::{self, AstNode, HasTypeBounds};
@ -16,12 +18,12 @@ use crate::{
/// Converts an `ast::Path` to `Path`. Works with use trees. /// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call. /// It correctly handles `$crate` based path from macro call.
pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> { pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path> {
let mut kind = PathKind::Plain; let mut kind = PathKind::Plain;
let mut type_anchor = None; let mut type_anchor = None;
let mut segments = Vec::new(); let mut segments = Vec::new();
let mut generic_args = Vec::new(); let mut generic_args = Vec::new();
let hygiene = ctx.hygiene(); let span_map = ctx.span_map();
loop { loop {
let segment = path.segment()?; let segment = path.segment()?;
@ -31,9 +33,15 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
match segment.kind()? { match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => { ast::PathSegmentKind::Name(name_ref) => {
// FIXME: this should just return name if name_ref.text() == "$crate" {
match hygiene.name_ref_to_name(ctx.db.upcast(), name_ref) { break kind = resolve_crate_root(
Either::Left(name) => { ctx.db.upcast(),
span_map.span_for_range(name_ref.syntax().text_range()).ctx,
)
.map(PathKind::DollarCrate)
.unwrap_or(PathKind::Crate);
}
let name = name_ref.as_name();
let args = segment let args = segment
.generic_arg_list() .generic_arg_list()
.and_then(|it| lower_generic_args(ctx, it)) .and_then(|it| lower_generic_args(ctx, it))
@ -51,12 +59,6 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
} }
segments.push(name); segments.push(name);
} }
Either::Right(crate_id) => {
kind = PathKind::DollarCrate(crate_id);
break;
}
}
}
ast::PathSegmentKind::SelfTypeKw => { ast::PathSegmentKind::SelfTypeKw => {
segments.push(name![Self]); segments.push(name![Self]);
} }
@ -74,7 +76,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo // <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => { Some(trait_ref) => {
let Path::Normal { mod_path, generic_args: path_generic_args, .. } = let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
Path::from_src(trait_ref.path()?, ctx)? Path::from_src(ctx, trait_ref.path()?)?
else { else {
return None; return None;
}; };
@ -151,8 +153,14 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// We follow what it did anyway :) // We follow what it did anyway :)
if segments.len() == 1 && kind == PathKind::Plain { if segments.len() == 1 && kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
if let Some(crate_id) = hygiene.local_inner_macros(ctx.db.upcast(), path) { let syn_ctxt = span_map.span_for_range(path.segment()?.syntax().text_range()).ctx;
kind = PathKind::DollarCrate(crate_id); if let Some(macro_call_id) = ctx.db.lookup_intern_syntax_context(syn_ctxt).outer_expn {
if ctx.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
kind = match resolve_crate_root(ctx.db.upcast(), syn_ctxt) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
}
}
} }
} }
} }

View File

@ -34,6 +34,7 @@ pub(crate) struct TestDB {
impl Default for TestDB { impl Default for TestDB {
fn default() -> Self { fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() }; let mut this = Self { storage: Default::default(), events: Default::default() };
this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this this
} }

View File

@ -2,7 +2,7 @@
use std::iter; use std::iter;
use hir_expand::{hygiene::Hygiene, InFile}; use hir_expand::{span::SpanMapRef, InFile};
use la_arena::ArenaMap; use la_arena::ArenaMap;
use syntax::ast; use syntax::ast;
use triomphe::Arc; use triomphe::Arc;
@ -34,22 +34,22 @@ impl RawVisibility {
db: &dyn DefDatabase, db: &dyn DefDatabase,
node: InFile<Option<ast::Visibility>>, node: InFile<Option<ast::Visibility>>,
) -> RawVisibility { ) -> RawVisibility {
Self::from_ast_with_hygiene(db, node.value, &Hygiene::new(db.upcast(), node.file_id)) Self::from_ast_with_span_map(db, node.value, db.span_map(node.file_id).as_ref())
} }
pub(crate) fn from_ast_with_hygiene( pub(crate) fn from_ast_with_span_map(
db: &dyn DefDatabase, db: &dyn DefDatabase,
node: Option<ast::Visibility>, node: Option<ast::Visibility>,
hygiene: &Hygiene, span_map: SpanMapRef<'_>,
) -> RawVisibility { ) -> RawVisibility {
Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene) Self::from_ast_with_span_map_and_default(db, node, RawVisibility::private(), span_map)
} }
pub(crate) fn from_ast_with_hygiene_and_default( pub(crate) fn from_ast_with_span_map_and_default(
db: &dyn DefDatabase, db: &dyn DefDatabase,
node: Option<ast::Visibility>, node: Option<ast::Visibility>,
default: RawVisibility, default: RawVisibility,
hygiene: &Hygiene, span_map: SpanMapRef<'_>,
) -> RawVisibility { ) -> RawVisibility {
let node = match node { let node = match node {
None => return default, None => return default,
@ -57,7 +57,7 @@ impl RawVisibility {
}; };
match node.kind() { match node.kind() {
ast::VisibilityKind::In(path) => { ast::VisibilityKind::In(path) => {
let path = ModPath::from_src(db.upcast(), path, hygiene); let path = ModPath::from_src(db.upcast(), path, span_map);
let path = match path { let path = match path {
None => return RawVisibility::private(), None => return RawVisibility::private(),
Some(path) => path, Some(path) => path,

View File

@ -12,11 +12,40 @@ use std::{
marker::PhantomData, marker::PhantomData,
}; };
use la_arena::{Arena, Idx}; use la_arena::{Arena, Idx, RawIdx};
use profile::Count; use profile::Count;
use rustc_hash::FxHasher; use rustc_hash::FxHasher;
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
use crate::db;
pub use base_db::span::ErasedFileAstId;
/// `AstId` points to an AST node in any file.
///
/// It is stable across reparses, and can be used as salsa key/value.
pub type AstId<N> = crate::InFile<FileAstId<N>>;
impl<N: AstIdNode> AstId<N> {
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
}
pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile<N> {
crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
}
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
db.ast_id_map(self.file_id).get(self.value)
}
}
pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
impl ErasedAstId {
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
db.ast_id_map(self.file_id).get_erased(self.value)
}
}
/// `AstId` points to an AST node in a specific file. /// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstIdNode> { pub struct FileAstId<N: AstIdNode> {
raw: ErasedFileAstId, raw: ErasedFileAstId,
@ -62,8 +91,6 @@ impl<N: AstIdNode> FileAstId<N> {
} }
} }
pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
pub trait AstIdNode: AstNode {} pub trait AstIdNode: AstNode {}
macro_rules! register_ast_id_node { macro_rules! register_ast_id_node {
(impl AstIdNode for $($ident:ident),+ ) => { (impl AstIdNode for $($ident:ident),+ ) => {
@ -129,6 +156,11 @@ impl AstIdMap {
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap { pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none()); assert!(node.parent().is_none());
let mut res = AstIdMap::default(); let mut res = AstIdMap::default();
// make sure to allocate the root node
if !should_alloc_id(node.kind()) {
res.alloc(node);
}
// By walking the tree in breadth-first order we make sure that parents // By walking the tree in breadth-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not // get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a // change parent's id. This means that, say, adding a new function to a
@ -136,9 +168,9 @@ impl AstIdMap {
bdfs(node, |it| { bdfs(node, |it| {
if should_alloc_id(it.kind()) { if should_alloc_id(it.kind()) {
res.alloc(&it); res.alloc(&it);
true TreeOrder::BreadthFirst
} else { } else {
false TreeOrder::DepthFirst
} }
}); });
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ()); res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
@ -155,6 +187,11 @@ impl AstIdMap {
res res
} }
/// The [`AstId`] of the root node
pub fn root(&self) -> SyntaxNodePtr {
self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone()
}
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> { pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax()); let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, covariant: PhantomData } FileAstId { raw, covariant: PhantomData }
@ -164,7 +201,7 @@ impl AstIdMap {
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap() AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
} }
pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr { pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
self.arena[id].clone() self.arena[id].clone()
} }
@ -192,14 +229,20 @@ fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
hasher.finish() hasher.finish()
} }
#[derive(Copy, Clone, PartialEq, Eq)]
enum TreeOrder {
BreadthFirst,
DepthFirst,
}
/// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs /// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs
/// order? It is a mix of breadth-first and depth first orders. Nodes for which /// order? It is a mix of breadth-first and depth first orders. Nodes for which
/// `f` returns true are visited breadth-first, all the other nodes are explored /// `f` returns [`TreeOrder::BreadthFirst`] are visited breadth-first, all the other nodes are explored
/// depth-first. /// [`TreeOrder::DepthFirst`].
/// ///
/// In other words, the size of the bfs queue is bound by the number of "true" /// In other words, the size of the bfs queue is bound by the number of "true"
/// nodes. /// nodes.
fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) { fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> TreeOrder) {
let mut curr_layer = vec![node.clone()]; let mut curr_layer = vec![node.clone()];
let mut next_layer = vec![]; let mut next_layer = vec![];
while !curr_layer.is_empty() { while !curr_layer.is_empty() {
@ -208,7 +251,7 @@ fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
while let Some(event) = preorder.next() { while let Some(event) = preorder.next() {
match event { match event {
syntax::WalkEvent::Enter(node) => { syntax::WalkEvent::Enter(node) => {
if f(node.clone()) { if f(node.clone()) == TreeOrder::BreadthFirst {
next_layer.extend(node.children()); next_layer.extend(node.children());
preorder.skip_subtree(); preorder.skip_subtree();
} }

View File

@ -1,19 +1,19 @@
//! A higher level attributes based on TokenTree, with also some shortcuts. //! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{fmt, ops}; use std::{fmt, ops};
use base_db::CrateId; use base_db::{span::SyntaxContextId, CrateId};
use cfg::CfgExpr; use cfg::CfgExpr;
use either::Either; use either::Either;
use intern::Interned; use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode}; use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
db::ExpandDatabase, db::ExpandDatabase,
hygiene::Hygiene,
mod_path::ModPath, mod_path::ModPath,
span::SpanMapRef,
tt::{self, Subtree}, tt::{self, Subtree},
InFile, InFile,
}; };
@ -39,16 +39,21 @@ impl ops::Deref for RawAttrs {
impl RawAttrs { impl RawAttrs {
pub const EMPTY: Self = Self { entries: None }; pub const EMPTY: Self = Self { entries: None };
pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self { pub fn new(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
) -> Self {
let entries = collect_attrs(owner) let entries = collect_attrs(owner)
.filter_map(|(id, attr)| match attr { .filter_map(|(id, attr)| match attr {
Either::Left(attr) => { Either::Left(attr) => {
attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id)) attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
} }
Either::Right(comment) => comment.doc_comment().map(|doc| Attr { Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
id, id,
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
path: Interned::new(ModPath::from(crate::name!(doc))), path: Interned::new(ModPath::from(crate::name!(doc))),
ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
}), }),
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
@ -58,9 +63,12 @@ impl RawAttrs {
Self { entries: if entries.is_empty() { None } else { Some(entries) } } Self { entries: if entries.is_empty() { None } else { Some(entries) } }
} }
pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self { pub fn from_attrs_owner(
let hygiene = Hygiene::new(db, owner.file_id); db: &dyn ExpandDatabase,
Self::new(db, owner.value, &hygiene) owner: InFile<&dyn ast::HasAttrs>,
span_map: SpanMapRef<'_>,
) -> Self {
Self::new(db, owner.value, span_map)
} }
pub fn merge(&self, other: Self) -> Self { pub fn merge(&self, other: Self) -> Self {
@ -122,12 +130,10 @@ impl RawAttrs {
let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map( let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(
|(idx, attr)| { |(idx, attr)| {
let tree = Subtree { let tree = Subtree {
delimiter: tt::Delimiter::unspecified(), delimiter: tt::Delimiter::dummy_invisible(),
token_trees: attr.to_vec(), token_trees: attr.to_vec(),
}; };
// FIXME hygiene Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
let hygiene = Hygiene::new_unhygienic();
Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
}, },
); );
@ -185,21 +191,23 @@ pub struct Attr {
pub id: AttrId, pub id: AttrId,
pub path: Interned<ModPath>, pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>, pub input: Option<Interned<AttrInput>>,
pub ctxt: SyntaxContextId,
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AttrInput { pub enum AttrInput {
/// `#[attr = "string"]` /// `#[attr = "string"]`
// FIXME: This is losing span
Literal(SmolStr), Literal(SmolStr),
/// `#[attr(subtree)]` /// `#[attr(subtree)]`
TokenTree(Box<(tt::Subtree, mbe::TokenMap)>), TokenTree(Box<tt::Subtree>),
} }
impl fmt::Display for AttrInput { impl fmt::Display for AttrInput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()), AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
AttrInput::TokenTree(tt) => tt.0.fmt(f), AttrInput::TokenTree(tt) => tt.fmt(f),
} }
} }
} }
@ -208,10 +216,10 @@ impl Attr {
fn from_src( fn from_src(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
ast: ast::Meta, ast: ast::Meta,
hygiene: &Hygiene, span_map: SpanMapRef<'_>,
id: AttrId, id: AttrId,
) -> Option<Attr> { ) -> Option<Attr> {
let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?); let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let value = match lit.kind() { let value = match lit.kind() {
ast::LiteralKind::String(string) => string.value()?.into(), ast::LiteralKind::String(string) => string.value()?.into(),
@ -219,24 +227,20 @@ impl Attr {
}; };
Some(Interned::new(AttrInput::Literal(value))) Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() { } else if let Some(tt) = ast.token_tree() {
let (tree, map) = syntax_node_to_token_tree(tt.syntax()); let tree = syntax_node_to_token_tree(tt.syntax(), span_map);
Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map))))) Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
} else { } else {
None None
}; };
Some(Attr { id, path, input }) Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx })
} }
fn from_tt( fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
db: &dyn ExpandDatabase, // FIXME: Unecessary roundtrip tt -> ast -> tt
tt: &tt::Subtree, let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
hygiene: &Hygiene,
id: AttrId,
) -> Option<Attr> {
let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
let ast = ast::Meta::cast(parse.syntax_node())?; let ast = ast::Meta::cast(parse.syntax_node())?;
Self::from_src(db, ast, hygiene, id) Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id)
} }
pub fn path(&self) -> &ModPath { pub fn path(&self) -> &ModPath {
@ -256,7 +260,7 @@ impl Attr {
/// #[path(ident)] /// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> { pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? { match self.input.as_deref()? {
AttrInput::TokenTree(tt) => match &*tt.0.token_trees { AttrInput::TokenTree(tt) => match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident), [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
_ => None, _ => None,
}, },
@ -267,7 +271,7 @@ impl Attr {
/// #[path TokenTree] /// #[path TokenTree]
pub fn token_tree_value(&self) -> Option<&Subtree> { pub fn token_tree_value(&self) -> Option<&Subtree> {
match self.input.as_deref()? { match self.input.as_deref()? {
AttrInput::TokenTree(tt) => Some(&tt.0), AttrInput::TokenTree(tt) => Some(tt),
_ => None, _ => None,
} }
} }
@ -276,8 +280,7 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>( pub fn parse_path_comma_token_tree<'a>(
&'a self, &'a self,
db: &'a dyn ExpandDatabase, db: &'a dyn ExpandDatabase,
hygiene: &'a Hygiene, ) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> {
) -> Option<impl Iterator<Item = ModPath> + 'a> {
let args = self.token_tree_value()?; let args = self.token_tree_value()?;
if args.delimiter.kind != DelimiterKind::Parenthesis { if args.delimiter.kind != DelimiterKind::Parenthesis {
@ -290,12 +293,13 @@ impl Attr {
if tts.is_empty() { if tts.is_empty() {
return None; return None;
} }
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here. // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
// here or maybe just parse a mod path from a token tree directly
let subtree = tt::Subtree { let subtree = tt::Subtree {
delimiter: tt::Delimiter::unspecified(), delimiter: tt::Delimiter::dummy_invisible(),
token_trees: tts.into_iter().cloned().collect(), token_trees: tts.to_vec(),
}; };
let (parse, _) = let (parse, span_map) =
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem); mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
let meta = ast::Meta::cast(parse.syntax_node())?; let meta = ast::Meta::cast(parse.syntax_node())?;
// Only simple paths are allowed. // Only simple paths are allowed.
@ -304,7 +308,11 @@ impl Attr {
return None; return None;
} }
let path = meta.path()?; let path = meta.path()?;
ModPath::from_src(db, path, hygiene) let call_site = span_map.span_at(path.syntax().text_range().start()).ctx;
Some((
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
call_site,
))
}); });
Some(paths) Some(paths)

View File

@ -1,16 +1,22 @@
//! Builtin attributes. //! Builtin attributes.
use base_db::{
span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use syntax::{TextRange, TextSize};
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind}; use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin { macro_rules! register_builtin {
( $(($name:ident, $variant:ident) => $expand:ident),* ) => { ($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinAttrExpander { pub enum BuiltinAttrExpander {
$($variant),* $($variant),*
} }
impl BuiltinAttrExpander { impl BuiltinAttrExpander {
pub fn expand( pub fn $expand_fn(
&self, &self,
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
@ -45,7 +51,7 @@ impl BuiltinAttrExpander {
} }
} }
register_builtin! { register_builtin! { expand:
(bench, Bench) => dummy_attr_expand, (bench, Bench) => dummy_attr_expand,
(cfg_accessible, CfgAccessible) => dummy_attr_expand, (cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand, (cfg_eval, CfgEval) => dummy_attr_expand,
@ -77,9 +83,8 @@ fn dummy_attr_expand(
/// ///
/// As such, we expand `#[derive(Foo, bar::Bar)]` into /// As such, we expand `#[derive(Foo, bar::Bar)]` into
/// ``` /// ```
/// #[Foo] /// #![Foo]
/// #[bar::Bar] /// #![bar::Bar]
/// ();
/// ``` /// ```
/// which allows fallback path resolution in hir::Semantics to properly identify our derives. /// which allows fallback path resolution in hir::Semantics to properly identify our derives.
/// Since we do not expand the attribute in nameres though, we keep the original item. /// Since we do not expand the attribute in nameres though, we keep the original item.
@ -98,21 +103,31 @@ fn derive_attr_expand(
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id); let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind { let derives = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0, MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
_ => return ExpandResult::ok(tt::Subtree::empty()), attr_args
}
_ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)),
}; };
pseudo_derive_attr_expansion(tt, derives) pseudo_derive_attr_expansion(tt, derives, loc.call_site)
} }
pub fn pseudo_derive_attr_expansion( pub fn pseudo_derive_attr_expansion(
tt: &tt::Subtree, tt: &tt::Subtree,
args: &tt::Subtree, args: &tt::Subtree,
call_site: SyntaxContextId,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let mk_leaf = |char| { let mk_leaf = |char| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char, char,
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
span: tt::TokenId::unspecified(), span: tt::SpanData {
range: TextRange::empty(TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: call_site,
},
})) }))
}; };
@ -122,12 +137,10 @@ pub fn pseudo_derive_attr_expansion(
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. })))) .split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
{ {
token_trees.push(mk_leaf('#')); token_trees.push(mk_leaf('#'));
token_trees.push(mk_leaf('!'));
token_trees.push(mk_leaf('[')); token_trees.push(mk_leaf('['));
token_trees.extend(tt.iter().cloned()); token_trees.extend(tt.iter().cloned());
token_trees.push(mk_leaf(']')); token_trees.push(mk_leaf(']'));
} }
token_trees.push(mk_leaf('('));
token_trees.push(mk_leaf(')'));
token_trees.push(mk_leaf(';'));
ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees }) ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
} }

View File

@ -1,16 +1,16 @@
//! Builtin derives. //! Builtin derives.
use ::tt::Ident; use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin};
use base_db::{CrateOrigin, LangCrateOrigin};
use itertools::izip; use itertools::izip;
use mbe::TokenMap;
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use stdx::never; use stdx::never;
use tracing::debug; use tracing::debug;
use crate::{ use crate::{
hygiene::span_with_def_site_ctxt,
name::{AsName, Name}, name::{AsName, Name},
tt::{self, TokenId}, span::SpanMapRef,
tt,
}; };
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds}; use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
@ -29,12 +29,15 @@ macro_rules! register_builtin {
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &ast::Adt, tt: &ast::Adt,
token_map: &TokenMap, token_map: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let expander = match *self { let expander = match *self {
$( BuiltinDeriveExpander::$trait => $expand, )* $( BuiltinDeriveExpander::$trait => $expand, )*
}; };
expander(db, id, tt, token_map)
let span = db.lookup_intern_macro_call(id).span(db);
let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, span, tt, token_map)
} }
fn find_by_name(name: &name::Name) -> Option<Self> { fn find_by_name(name: &name::Name) -> Option<Self> {
@ -70,19 +73,19 @@ enum VariantShape {
Unit, Unit,
} }
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> { fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator<Item = tt::Ident> {
(0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified())) (0..n).map(move |it| tt::Ident::new(format!("f{it}"), span))
} }
impl VariantShape { impl VariantShape {
fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree { fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree {
self.as_pattern_map(path, |it| quote!(#it)) self.as_pattern_map(path, span, |it| quote!(span => #it))
} }
fn field_names(&self) -> Vec<tt::Ident> { fn field_names(&self, span: SpanData) -> Vec<tt::Ident> {
match self { match self {
VariantShape::Struct(s) => s.clone(), VariantShape::Struct(s) => s.clone(),
VariantShape::Tuple(n) => tuple_field_iterator(*n).collect(), VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(),
VariantShape::Unit => vec![], VariantShape::Unit => vec![],
} }
} }
@ -90,26 +93,27 @@ impl VariantShape {
fn as_pattern_map( fn as_pattern_map(
&self, &self,
path: tt::Subtree, path: tt::Subtree,
span: SpanData,
field_map: impl Fn(&tt::Ident) -> tt::Subtree, field_map: impl Fn(&tt::Ident) -> tt::Subtree,
) -> tt::Subtree { ) -> tt::Subtree {
match self { match self {
VariantShape::Struct(fields) => { VariantShape::Struct(fields) => {
let fields = fields.iter().map(|it| { let fields = fields.iter().map(|it| {
let mapped = field_map(it); let mapped = field_map(it);
quote! { #it : #mapped , } quote! {span => #it : #mapped , }
}); });
quote! { quote! {span =>
#path { ##fields } #path { ##fields }
} }
} }
&VariantShape::Tuple(n) => { &VariantShape::Tuple(n) => {
let fields = tuple_field_iterator(n).map(|it| { let fields = tuple_field_iterator(span, n).map(|it| {
let mapped = field_map(&it); let mapped = field_map(&it);
quote! { quote! {span =>
#mapped , #mapped ,
} }
}); });
quote! { quote! {span =>
#path ( ##fields ) #path ( ##fields )
} }
} }
@ -117,7 +121,7 @@ impl VariantShape {
} }
} }
fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> { fn from(tm: SpanMapRef<'_>, value: Option<FieldList>) -> Result<Self, ExpandError> {
let r = match value { let r = match value {
None => VariantShape::Unit, None => VariantShape::Unit,
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct( Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
@ -139,17 +143,17 @@ enum AdtShape {
} }
impl AdtShape { impl AdtShape {
fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> { fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec<tt::Subtree> {
self.as_pattern_map(name, |it| quote!(#it)) self.as_pattern_map(name, |it| quote!(span =>#it), span)
} }
fn field_names(&self) -> Vec<Vec<tt::Ident>> { fn field_names(&self, span: SpanData) -> Vec<Vec<tt::Ident>> {
match self { match self {
AdtShape::Struct(s) => { AdtShape::Struct(s) => {
vec![s.field_names()] vec![s.field_names(span)]
} }
AdtShape::Enum { variants, .. } => { AdtShape::Enum { variants, .. } => {
variants.iter().map(|(_, fields)| fields.field_names()).collect() variants.iter().map(|(_, fields)| fields.field_names(span)).collect()
} }
AdtShape::Union => { AdtShape::Union => {
never!("using fields of union in derive is always wrong"); never!("using fields of union in derive is always wrong");
@ -162,18 +166,21 @@ impl AdtShape {
&self, &self,
name: &tt::Ident, name: &tt::Ident,
field_map: impl Fn(&tt::Ident) -> tt::Subtree, field_map: impl Fn(&tt::Ident) -> tt::Subtree,
span: SpanData,
) -> Vec<tt::Subtree> { ) -> Vec<tt::Subtree> {
match self { match self {
AdtShape::Struct(s) => { AdtShape::Struct(s) => {
vec![s.as_pattern_map(quote! { #name }, field_map)] vec![s.as_pattern_map(quote! {span => #name }, span, field_map)]
} }
AdtShape::Enum { variants, .. } => variants AdtShape::Enum { variants, .. } => variants
.iter() .iter()
.map(|(v, fields)| fields.as_pattern_map(quote! { #name :: #v }, &field_map)) .map(|(v, fields)| {
fields.as_pattern_map(quote! {span => #name :: #v }, span, &field_map)
})
.collect(), .collect(),
AdtShape::Union => { AdtShape::Union => {
never!("pattern matching on union is always wrong"); never!("pattern matching on union is always wrong");
vec![quote! { un }] vec![quote! {span => un }]
} }
} }
} }
@ -189,8 +196,12 @@ struct BasicAdtInfo {
associated_types: Vec<tt::Subtree>, associated_types: Vec<tt::Subtree>,
} }
fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> { fn parse_adt(
let (name, generic_param_list, shape) = match &adt { tm: SpanMapRef<'_>,
adt: &ast::Adt,
call_site: SpanData,
) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, shape) = match adt {
ast::Adt::Struct(it) => ( ast::Adt::Struct(it) => (
it.name(), it.name(),
it.generic_param_list(), it.generic_param_list(),
@ -234,22 +245,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
match this { match this {
Some(it) => { Some(it) => {
param_type_set.insert(it.as_name()); param_type_set.insert(it.as_name());
mbe::syntax_node_to_token_tree(it.syntax()).0 mbe::syntax_node_to_token_tree(it.syntax(), tm)
}
None => {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
} }
None => tt::Subtree::empty(),
} }
}; };
let bounds = match &param { let bounds = match &param {
ast::TypeOrConstParam::Type(it) => { ast::TypeOrConstParam::Type(it) => {
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0) it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
} }
ast::TypeOrConstParam::Const(_) => None, ast::TypeOrConstParam::Const(_) => None,
}; };
let ty = if let ast::TypeOrConstParam::Const(param) = param { let ty = if let ast::TypeOrConstParam::Const(param) = param {
let ty = param let ty = param
.ty() .ty()
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0) .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm))
.unwrap_or_else(tt::Subtree::empty); .unwrap_or_else(|| {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
});
Some(ty) Some(ty)
} else { } else {
None None
@ -282,20 +297,22 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name(); let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p) param_type_set.contains(&name).then_some(p)
}) })
.map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0) .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
.collect(); .collect();
let name_token = name_to_token(&tm, name)?; let name_token = name_to_token(tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types }) Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
} }
fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> { fn name_to_token(
token_map: SpanMapRef<'_>,
name: Option<ast::Name>,
) -> Result<tt::Ident, ExpandError> {
let name = name.ok_or_else(|| { let name = name.ok_or_else(|| {
debug!("parsed item has no name"); debug!("parsed item has no name");
ExpandError::other("missing name") ExpandError::other("missing name")
})?; })?;
let name_token_id = let span = token_map.span_for_range(name.syntax().text_range());
token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified); let name_token = tt::Ident { span, text: name.text().into() };
let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
Ok(name_token) Ok(name_token)
} }
@ -331,14 +348,21 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and /// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
/// therefore does not get bound by the derived trait. /// therefore does not get bound by the derived trait.
fn expand_simple_derive( fn expand_simple_derive(
// FIXME: use
invoc_span: SpanData,
tt: &ast::Adt, tt: &ast::Adt,
tm: &TokenMap, tm: SpanMapRef<'_>,
trait_path: tt::Subtree, trait_path: tt::Subtree,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree, make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let info = match parse_adt(tm, tt) { let info = match parse_adt(tm, tt, invoc_span) {
Ok(info) => info, Ok(info) => info,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), Err(e) => {
return ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }),
e,
)
}
}; };
let trait_body = make_trait_body(&info); let trait_body = make_trait_body(&info);
let mut where_block = vec![]; let mut where_block = vec![];
@ -349,13 +373,13 @@ fn expand_simple_derive(
let ident_ = ident.clone(); let ident_ = ident.clone();
if let Some(b) = bound { if let Some(b) = bound {
let ident = ident.clone(); let ident = ident.clone();
where_block.push(quote! { #ident : #b , }); where_block.push(quote! {invoc_span => #ident : #b , });
} }
if let Some(ty) = param_ty { if let Some(ty) = param_ty {
(quote! { const #ident : #ty , }, quote! { #ident_ , }) (quote! {invoc_span => const #ident : #ty , }, quote! {invoc_span => #ident_ , })
} else { } else {
let bound = trait_path.clone(); let bound = trait_path.clone();
(quote! { #ident : #bound , }, quote! { #ident_ , }) (quote! {invoc_span => #ident : #bound , }, quote! {invoc_span => #ident_ , })
} }
}) })
.unzip(); .unzip();
@ -363,17 +387,17 @@ fn expand_simple_derive(
where_block.extend(info.associated_types.iter().map(|it| { where_block.extend(info.associated_types.iter().map(|it| {
let it = it.clone(); let it = it.clone();
let bound = trait_path.clone(); let bound = trait_path.clone();
quote! { #it : #bound , } quote! {invoc_span => #it : #bound , }
})); }));
let name = info.name; let name = info.name;
let expanded = quote! { let expanded = quote! {invoc_span =>
impl < ##params > #trait_path for #name < ##args > where ##where_block { #trait_body } impl < ##params > #trait_path for #name < ##args > where ##where_block { #trait_body }
}; };
ExpandResult::ok(expanded) ExpandResult::ok(expanded)
} }
fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree { fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree {
// FIXME: make hygiene works for builtin derive macro // FIXME: make hygiene works for builtin derive macro
// such that $crate can be used here. // such that $crate can be used here.
let cg = db.crate_graph(); let cg = db.crate_graph();
@ -381,9 +405,9 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) { let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) {
cov_mark::hit!(test_copy_expand_in_core); cov_mark::hit!(test_copy_expand_in_core);
quote! { crate } quote! {span => crate }
} else { } else {
quote! { core } quote! {span => core }
}; };
tt.token_trees[0].clone() tt.token_trees[0].clone()
@ -392,56 +416,50 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
fn copy_expand( fn copy_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData,
tt: &ast::Adt, tt: &ast::Adt,
tm: &TokenMap, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id); let krate = find_builtin_crate(db, id, span);
expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {}) expand_simple_derive(span, tt, tm, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
} }
fn clone_expand( fn clone_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData,
tt: &ast::Adt, tt: &ast::Adt,
tm: &TokenMap, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id); let krate = find_builtin_crate(db, id, span);
expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| { expand_simple_derive(span, tt, tm, quote! {span => #krate::clone::Clone }, |adt| {
if matches!(adt.shape, AdtShape::Union) { if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct { let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
char: '*', return quote! {span =>
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
fn clone(&self) -> Self { fn clone(&self) -> Self {
#star self #star self
} }
}; };
} }
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct { let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
char: '*', return quote! {span =>
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
fn clone(&self) -> Self { fn clone(&self) -> Self {
match #star self {} match #star self {}
} }
}; };
} }
let name = &adt.name; let name = &adt.name;
let patterns = adt.shape.as_pattern(name); let patterns = adt.shape.as_pattern(span, name);
let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() }); let exprs = adt.shape.as_pattern_map(name, |it| quote! {span => #it .clone() }, span);
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| { let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
let fat_arrow = fat_arrow(); let fat_arrow = fat_arrow(span);
quote! { quote! {span =>
#pat #fat_arrow #expr, #pat #fat_arrow #expr,
} }
}); });
quote! { quote! {span =>
fn clone(&self) -> Self { fn clone(&self) -> Self {
match self { match self {
##arms ##arms
@ -451,53 +469,56 @@ fn clone_expand(
}) })
} }
/// This function exists since `quote! { => }` doesn't work. /// This function exists since `quote! {span => => }` doesn't work.
fn fat_arrow() -> ::tt::Subtree<TokenId> { fn fat_arrow(span: SpanData) -> tt::Subtree {
let eq = let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span };
tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() }; quote! {span => #eq> }
quote! { #eq> }
} }
/// This function exists since `quote! { && }` doesn't work. /// This function exists since `quote! {span => && }` doesn't work.
fn and_and() -> ::tt::Subtree<TokenId> { fn and_and(span: SpanData) -> tt::Subtree {
let and = let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span };
tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() }; quote! {span => #and& }
quote! { #and& }
} }
fn default_expand( fn default_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData,
tt: &ast::Adt, tt: &ast::Adt,
tm: &TokenMap, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id); let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| { expand_simple_derive(span, tt, tm, quote! {span => #krate::default::Default }, |adt| {
let body = match &adt.shape { let body = match &adt.shape {
AdtShape::Struct(fields) => { AdtShape::Struct(fields) => {
let name = &adt.name; let name = &adt.name;
fields fields.as_pattern_map(
.as_pattern_map(quote!(#name), |_| quote!(#krate::default::Default::default())) quote!(span =>#name),
span,
|_| quote!(span =>#krate::default::Default::default()),
)
} }
AdtShape::Enum { default_variant, variants } => { AdtShape::Enum { default_variant, variants } => {
if let Some(d) = default_variant { if let Some(d) = default_variant {
let (name, fields) = &variants[*d]; let (name, fields) = &variants[*d];
let adt_name = &adt.name; let adt_name = &adt.name;
fields.as_pattern_map( fields.as_pattern_map(
quote!(#adt_name :: #name), quote!(span =>#adt_name :: #name),
|_| quote!(#krate::default::Default::default()), span,
|_| quote!(span =>#krate::default::Default::default()),
) )
} else { } else {
// FIXME: Return expand error here // FIXME: Return expand error here
quote!() quote!(span =>)
} }
} }
AdtShape::Union => { AdtShape::Union => {
// FIXME: Return expand error here // FIXME: Return expand error here
quote!() quote!(span =>)
} }
}; };
quote! { quote! {span =>
fn default() -> Self { fn default() -> Self {
#body #body
} }
@ -508,44 +529,41 @@ fn default_expand(
fn debug_expand( fn debug_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData,
tt: &ast::Adt, tt: &ast::Adt,
tm: &TokenMap, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id); let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| { expand_simple_derive(span, tt, tm, quote! {span => #krate::fmt::Debug }, |adt| {
let for_variant = |name: String, v: &VariantShape| match v { let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => { VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| { let for_fields = fields.iter().map(|it| {
let x_string = it.to_string(); let x_string = it.to_string();
quote! { quote! {span =>
.field(#x_string, & #it) .field(#x_string, & #it)
} }
}); });
quote! { quote! {span =>
f.debug_struct(#name) ##for_fields .finish() f.debug_struct(#name) ##for_fields .finish()
} }
} }
VariantShape::Tuple(n) => { VariantShape::Tuple(n) => {
let for_fields = tuple_field_iterator(*n).map(|it| { let for_fields = tuple_field_iterator(span, *n).map(|it| {
quote! { quote! {span =>
.field( & #it) .field( & #it)
} }
}); });
quote! { quote! {span =>
f.debug_tuple(#name) ##for_fields .finish() f.debug_tuple(#name) ##for_fields .finish()
} }
} }
VariantShape::Unit => quote! { VariantShape::Unit => quote! {span =>
f.write_str(#name) f.write_str(#name)
}, },
}; };
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct { let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
char: '*', return quote! {span =>
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result { fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match #star self {} match #star self {}
} }
@ -553,20 +571,20 @@ fn debug_expand(
} }
let arms = match &adt.shape { let arms = match &adt.shape {
AdtShape::Struct(fields) => { AdtShape::Struct(fields) => {
let fat_arrow = fat_arrow(); let fat_arrow = fat_arrow(span);
let name = &adt.name; let name = &adt.name;
let pat = fields.as_pattern(quote!(#name)); let pat = fields.as_pattern(quote!(span =>#name), span);
let expr = for_variant(name.to_string(), fields); let expr = for_variant(name.to_string(), fields);
vec![quote! { #pat #fat_arrow #expr }] vec![quote! {span => #pat #fat_arrow #expr }]
} }
AdtShape::Enum { variants, .. } => variants AdtShape::Enum { variants, .. } => variants
.iter() .iter()
.map(|(name, v)| { .map(|(name, v)| {
let fat_arrow = fat_arrow(); let fat_arrow = fat_arrow(span);
let adt_name = &adt.name; let adt_name = &adt.name;
let pat = v.as_pattern(quote!(#adt_name :: #name)); let pat = v.as_pattern(quote!(span =>#adt_name :: #name), span);
let expr = for_variant(name.to_string(), v); let expr = for_variant(name.to_string(), v);
quote! { quote! {span =>
#pat #fat_arrow #expr , #pat #fat_arrow #expr ,
} }
}) })
@ -576,7 +594,7 @@ fn debug_expand(
vec![] vec![]
} }
}; };
quote! { quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result { fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match self { match self {
##arms ##arms
@ -589,47 +607,46 @@ fn debug_expand(
fn hash_expand( fn hash_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData,
tt: &ast::Adt, tt: &ast::Adt,
tm: &TokenMap, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id); let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| { expand_simple_derive(span, tt, tm, quote! {span => #krate::hash::Hash }, |adt| {
if matches!(adt.shape, AdtShape::Union) { if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here // FIXME: Return expand error here
return quote! {}; return quote! {span =>};
} }
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) { if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct { let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
char: '*', return quote! {span =>
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) { fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
match #star self {} match #star self {}
} }
}; };
} }
let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map( let arms =
adt.shape.as_pattern(span, &adt.name).into_iter().zip(adt.shape.field_names(span)).map(
|(pat, names)| { |(pat, names)| {
let expr = { let expr = {
let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); }); let it =
quote! { { names.iter().map(|it| quote! {span => #it . hash(ra_expand_state); });
quote! {span => {
##it ##it
} } } }
}; };
let fat_arrow = fat_arrow(); let fat_arrow = fat_arrow(span);
quote! { quote! {span =>
#pat #fat_arrow #expr , #pat #fat_arrow #expr ,
} }
}, },
); );
let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) { let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) {
quote! { #krate::mem::discriminant(self).hash(ra_expand_state); } quote! {span => #krate::mem::discriminant(self).hash(ra_expand_state); }
} else { } else {
quote! {} quote! {span =>}
}; };
quote! { quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) { fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
#check_discriminant #check_discriminant
match self { match self {
@ -643,56 +660,58 @@ fn hash_expand(
fn eq_expand( fn eq_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData,
tt: &ast::Adt, tt: &ast::Adt,
tm: &TokenMap, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id); let krate = find_builtin_crate(db, id, span);
expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {}) expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
} }
fn partial_eq_expand( fn partial_eq_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData,
tt: &ast::Adt, tt: &ast::Adt,
tm: &TokenMap, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id); let krate = find_builtin_crate(db, id, span);
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| { expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialEq }, |adt| {
if matches!(adt.shape, AdtShape::Union) { if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here // FIXME: Return expand error here
return quote! {}; return quote! {span =>};
} }
let name = &adt.name; let name = &adt.name;
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name); let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map( let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, names)| { |(pat1, pat2, names)| {
let fat_arrow = fat_arrow(); let fat_arrow = fat_arrow(span);
let body = match &*names { let body = match &*names {
[] => { [] => {
quote!(true) quote!(span =>true)
} }
[first, rest @ ..] => { [first, rest @ ..] => {
let rest = rest.iter().map(|it| { let rest = rest.iter().map(|it| {
let t1 = Ident::new(format!("{}_self", it.text), it.span); let t1 = tt::Ident::new(format!("{}_self", it.text), it.span);
let t2 = Ident::new(format!("{}_other", it.text), it.span); let t2 = tt::Ident::new(format!("{}_other", it.text), it.span);
let and_and = and_and(); let and_and = and_and(span);
quote!(#and_and #t1 .eq( #t2 )) quote!(span =>#and_and #t1 .eq( #t2 ))
}); });
let first = { let first = {
let t1 = Ident::new(format!("{}_self", first.text), first.span); let t1 = tt::Ident::new(format!("{}_self", first.text), first.span);
let t2 = Ident::new(format!("{}_other", first.text), first.span); let t2 = tt::Ident::new(format!("{}_other", first.text), first.span);
quote!(#t1 .eq( #t2 )) quote!(span =>#t1 .eq( #t2 ))
}; };
quote!(#first ##rest) quote!(span =>#first ##rest)
} }
}; };
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , } quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
}, },
); );
let fat_arrow = fat_arrow(); let fat_arrow = fat_arrow(span);
quote! { quote! {span =>
fn eq(&self, other: &Self) -> bool { fn eq(&self, other: &Self) -> bool {
match (self, other) { match (self, other) {
##arms ##arms
@ -706,35 +725,46 @@ fn partial_eq_expand(
fn self_and_other_patterns( fn self_and_other_patterns(
adt: &BasicAdtInfo, adt: &BasicAdtInfo,
name: &tt::Ident, name: &tt::Ident,
span: SpanData,
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) { ) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
let self_patterns = adt.shape.as_pattern_map(name, |it| { let self_patterns = adt.shape.as_pattern_map(
let t = Ident::new(format!("{}_self", it.text), it.span); name,
quote!(#t) |it| {
}); let t = tt::Ident::new(format!("{}_self", it.text), it.span);
let other_patterns = adt.shape.as_pattern_map(name, |it| { quote!(span =>#t)
let t = Ident::new(format!("{}_other", it.text), it.span); },
quote!(#t) span,
}); );
let other_patterns = adt.shape.as_pattern_map(
name,
|it| {
let t = tt::Ident::new(format!("{}_other", it.text), it.span);
quote!(span =>#t)
},
span,
);
(self_patterns, other_patterns) (self_patterns, other_patterns)
} }
fn ord_expand( fn ord_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData,
tt: &ast::Adt, tt: &ast::Adt,
tm: &TokenMap, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id); let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| { expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Ord }, |adt| {
fn compare( fn compare(
krate: &tt::TokenTree, krate: &tt::TokenTree,
left: tt::Subtree, left: tt::Subtree,
right: tt::Subtree, right: tt::Subtree,
rest: tt::Subtree, rest: tt::Subtree,
span: SpanData,
) -> tt::Subtree { ) -> tt::Subtree {
let fat_arrow1 = fat_arrow(); let fat_arrow1 = fat_arrow(span);
let fat_arrow2 = fat_arrow(); let fat_arrow2 = fat_arrow(span);
quote! { quote! {span =>
match #left.cmp(&#right) { match #left.cmp(&#right) {
#krate::cmp::Ordering::Equal #fat_arrow1 { #krate::cmp::Ordering::Equal #fat_arrow1 {
#rest #rest
@ -745,34 +775,34 @@ fn ord_expand(
} }
if matches!(adt.shape, AdtShape::Union) { if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here // FIXME: Return expand error here
return quote!(); return quote!(span =>);
} }
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name); let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map( let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| { |(pat1, pat2, fields)| {
let mut body = quote!(#krate::cmp::Ordering::Equal); let mut body = quote!(span =>#krate::cmp::Ordering::Equal);
for f in fields.into_iter().rev() { for f in fields.into_iter().rev() {
let t1 = Ident::new(format!("{}_self", f.text), f.span); let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
let t2 = Ident::new(format!("{}_other", f.text), f.span); let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(#t1), quote!(#t2), body); body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
} }
let fat_arrow = fat_arrow(); let fat_arrow = fat_arrow(span);
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , } quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
}, },
); );
let fat_arrow = fat_arrow(); let fat_arrow = fat_arrow(span);
let mut body = quote! { let mut body = quote! {span =>
match (self, other) { match (self, other) {
##arms ##arms
_unused #fat_arrow #krate::cmp::Ordering::Equal _unused #fat_arrow #krate::cmp::Ordering::Equal
} }
}; };
if matches!(&adt.shape, AdtShape::Enum { .. }) { if matches!(&adt.shape, AdtShape::Enum { .. }) {
let left = quote!(#krate::intrinsics::discriminant_value(self)); let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(#krate::intrinsics::discriminant_value(other)); let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
body = compare(krate, left, right, body); body = compare(krate, left, right, body, span);
} }
quote! { quote! {span =>
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering { fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
#body #body
} }
@ -783,20 +813,22 @@ fn ord_expand(
fn partial_ord_expand( fn partial_ord_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
span: SpanData,
tt: &ast::Adt, tt: &ast::Adt,
tm: &TokenMap, tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id); let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| { expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialOrd }, |adt| {
fn compare( fn compare(
krate: &tt::TokenTree, krate: &tt::TokenTree,
left: tt::Subtree, left: tt::Subtree,
right: tt::Subtree, right: tt::Subtree,
rest: tt::Subtree, rest: tt::Subtree,
span: SpanData,
) -> tt::Subtree { ) -> tt::Subtree {
let fat_arrow1 = fat_arrow(); let fat_arrow1 = fat_arrow(span);
let fat_arrow2 = fat_arrow(); let fat_arrow2 = fat_arrow(span);
quote! { quote! {span =>
match #left.partial_cmp(&#right) { match #left.partial_cmp(&#right) {
#krate::option::Option::Some(#krate::cmp::Ordering::Equal) #fat_arrow1 { #krate::option::Option::Some(#krate::cmp::Ordering::Equal) #fat_arrow1 {
#rest #rest
@ -807,37 +839,39 @@ fn partial_ord_expand(
} }
if matches!(adt.shape, AdtShape::Union) { if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here // FIXME: Return expand error here
return quote!(); return quote!(span =>);
} }
let left = quote!(#krate::intrinsics::discriminant_value(self)); let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(#krate::intrinsics::discriminant_value(other)); let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name); let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map( let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| { |(pat1, pat2, fields)| {
let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal)); let mut body =
quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
for f in fields.into_iter().rev() { for f in fields.into_iter().rev() {
let t1 = Ident::new(format!("{}_self", f.text), f.span); let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
let t2 = Ident::new(format!("{}_other", f.text), f.span); let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(#t1), quote!(#t2), body); body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
} }
let fat_arrow = fat_arrow(); let fat_arrow = fat_arrow(span);
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , } quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
}, },
); );
let fat_arrow = fat_arrow(); let fat_arrow = fat_arrow(span);
let body = compare( let body = compare(
krate, krate,
left, left,
right, right,
quote! { quote! {span =>
match (self, other) { match (self, other) {
##arms ##arms
_unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal) _unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal)
} }
}, },
span,
); );
quote! { quote! {span =>
fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> { fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> {
#body #body
} }

View File

@ -1,17 +1,23 @@
//! Builtin macro //! Builtin macro
use base_db::{AnchoredPath, Edition, FileId}; use base_db::{
span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
AnchoredPath, Edition, FileId,
};
use cfg::CfgExpr; use cfg::CfgExpr;
use either::Either; use either::Either;
use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap}; use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use syntax::{ use syntax::{
ast::{self, AstToken}, ast::{self, AstToken},
SmolStr, SmolStr,
}; };
use crate::{ use crate::{
db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId, db::ExpandDatabase,
MacroCallLoc, hygiene::span_with_def_site_ctxt,
name, quote,
tt::{self, DelimSpan},
ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
}; };
macro_rules! register_builtin { macro_rules! register_builtin {
@ -36,7 +42,10 @@ macro_rules! register_builtin {
let expander = match *self { let expander = match *self {
$( BuiltinFnLikeExpander::$kind => $expand, )* $( BuiltinFnLikeExpander::$kind => $expand, )*
}; };
expander(db, id, tt)
let span = db.lookup_intern_macro_call(id).span(db);
let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, tt, span)
} }
} }
@ -44,13 +53,16 @@ macro_rules! register_builtin {
pub fn expand( pub fn expand(
&self, &self,
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
arg_id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let expander = match *self { let expander = match *self {
$( EagerExpander::$e_kind => $e_expand, )* $( EagerExpander::$e_kind => $e_expand, )*
}; };
expander(db, arg_id, tt)
let span = db.lookup_intern_macro_call(id).span(db);
let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, tt, span)
} }
} }
@ -109,29 +121,42 @@ register_builtin! {
(option_env, OptionEnv) => option_env_expand (option_env, OptionEnv) => option_env_expand
} }
const DOLLAR_CRATE: tt::Ident = fn mk_pound(span: SpanData) -> tt::Subtree {
tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() }; crate::quote::IntoTt::to_subtree(
vec![crate::tt::Leaf::Punct(crate::tt::Punct {
char: '#',
spacing: crate::tt::Spacing::Alone,
span: span,
})
.into()],
span,
)
}
fn module_path_expand( fn module_path_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
// Just return a dummy result. // Just return a dummy result.
ExpandResult::ok(quote! { "module::path" }) ExpandResult::ok(quote! {span =>
"module::path"
})
} }
fn line_expand( fn line_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
// dummy implementation for type-checking purposes // dummy implementation for type-checking purposes
ExpandResult::ok(tt::Subtree { ExpandResult::ok(tt::Subtree {
delimiter: tt::Delimiter::unspecified(), delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: "0u32".into(), text: "0u32".into(),
span: tt::Span::UNSPECIFIED, span,
}))], }))],
}) })
} }
@ -140,26 +165,29 @@ fn log_syntax_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(quote! {}) ExpandResult::ok(quote! {span =>})
} }
fn trace_macros_expand( fn trace_macros_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(quote! {}) ExpandResult::ok(quote! {span =>})
} }
fn stringify_expand( fn stringify_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let pretty = ::tt::pretty(&tt.token_trees); let pretty = ::tt::pretty(&tt.token_trees);
let expanded = quote! { let expanded = quote! {span =>
#pretty #pretty
}; };
@ -170,27 +198,29 @@ fn assert_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let args = parse_exprs_with_sep(tt, ','); let args = parse_exprs_with_sep(tt, ',');
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let expanded = match &*args { let expanded = match &*args {
[cond, panic_args @ ..] => { [cond, panic_args @ ..] => {
let comma = tt::Subtree { let comma = tt::Subtree {
delimiter: tt::Delimiter::unspecified(), delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',', char: ',',
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
span: tt::TokenId::unspecified(), span,
}))], }))],
}; };
let cond = cond.clone(); let cond = cond.clone();
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma); let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
quote! {{ quote! {span =>{
if !(#cond) { if !(#cond) {
#DOLLAR_CRATE::panic!(##panic_args); #dollar_crate::panic!(##panic_args);
} }
}} }}
} }
[] => quote! {{}}, [] => quote! {span =>{}},
}; };
ExpandResult::ok(expanded) ExpandResult::ok(expanded)
@ -200,12 +230,13 @@ fn file_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
// FIXME: RA purposefully lacks knowledge of absolute file names // FIXME: RA purposefully lacks knowledge of absolute file names
// so just return "". // so just return "".
let file_name = ""; let file_name = "";
let expanded = quote! { let expanded = quote! {span =>
#file_name #file_name
}; };
@ -216,16 +247,18 @@ fn format_args_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
format_args_expand_general(db, id, tt, "") format_args_expand_general(db, id, tt, "", span)
} }
fn format_args_nl_expand( fn format_args_nl_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
format_args_expand_general(db, id, tt, "\\n") format_args_expand_general(db, id, tt, "\\n", span)
} }
fn format_args_expand_general( fn format_args_expand_general(
@ -234,11 +267,12 @@ fn format_args_expand_general(
tt: &tt::Subtree, tt: &tt::Subtree,
// FIXME: Make use of this so that mir interpretation works properly // FIXME: Make use of this so that mir interpretation works properly
_end_string: &str, _end_string: &str,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let pound = quote! {@PUNCT '#'}; let pound = mk_pound(span);
let mut tt = tt.clone(); let mut tt = tt.clone();
tt.delimiter.kind = tt::DelimiterKind::Parenthesis; tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
return ExpandResult::ok(quote! { return ExpandResult::ok(quote! {span =>
builtin #pound format_args #tt builtin #pound format_args #tt
}); });
} }
@ -247,25 +281,25 @@ fn asm_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
// We expand all assembly snippets to `format_args!` invocations to get format syntax // We expand all assembly snippets to `format_args!` invocations to get format syntax
// highlighting for them. // highlighting for them.
let mut literals = Vec::new(); let mut literals = Vec::new();
for tt in tt.token_trees.chunks(2) { for tt in tt.token_trees.chunks(2) {
match tt { match tt {
[tt::TokenTree::Leaf(tt::Leaf::Literal(lit))] [tt::TokenTree::Leaf(tt::Leaf::Literal(lit))]
| [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] => | [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] =>
{ {
let krate = DOLLAR_CRATE.clone(); let dollar_krate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
literals.push(quote!(#krate::format_args!(#lit);)); literals.push(quote!(span=>#dollar_krate::format_args!(#lit);));
} }
_ => break, _ => break,
} }
} }
let pound = quote! {@PUNCT '#'}; let pound = mk_pound(span);
let expanded = quote! { let expanded = quote! {span =>
builtin #pound asm ( builtin #pound asm (
{##literals} {##literals}
) )
@ -277,20 +311,22 @@ fn global_asm_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
_tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
// Expand to nothing (at item-level) // Expand to nothing (at item-level)
ExpandResult::ok(quote! {}) ExpandResult::ok(quote! {span =>})
} }
fn cfg_expand( fn cfg_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id); let loc = db.lookup_intern_macro_call(id);
let expr = CfgExpr::parse(tt); let expr = CfgExpr::parse(tt);
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false); let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
let expanded = if enabled { quote!(true) } else { quote!(false) }; let expanded = if enabled { quote!(span=>true) } else { quote!(span=>false) };
ExpandResult::ok(expanded) ExpandResult::ok(expanded)
} }
@ -298,13 +334,15 @@ fn panic_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id); let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
// Expand to a macro call `$crate::panic::panic_{edition}` // Expand to a macro call `$crate::panic::panic_{edition}`
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
quote!(#DOLLAR_CRATE::panic::panic_2021!) quote!(span =>#dollar_crate::panic::panic_2021!)
} else { } else {
quote!(#DOLLAR_CRATE::panic::panic_2015!) quote!(span =>#dollar_crate::panic::panic_2015!)
}; };
// Pass the original arguments // Pass the original arguments
@ -316,13 +354,15 @@ fn unreachable_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id); let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
// Expand to a macro call `$crate::panic::unreachable_{edition}` // Expand to a macro call `$crate::panic::unreachable_{edition}`
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 { let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
quote!(#DOLLAR_CRATE::panic::unreachable_2021!) quote!(span =>#dollar_crate::panic::unreachable_2021!)
} else { } else {
quote!(#DOLLAR_CRATE::panic::unreachable_2015!) quote!(span =>#dollar_crate::panic::unreachable_2015!)
}; };
// Pass the original arguments // Pass the original arguments
@ -352,6 +392,7 @@ fn compile_error_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_id: MacroCallId, _id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees { let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
@ -361,13 +402,14 @@ fn compile_error_expand(
_ => ExpandError::other("`compile_error!` argument must be a string"), _ => ExpandError::other("`compile_error!` argument must be a string"),
}; };
ExpandResult { value: quote! {}, err: Some(err) } ExpandResult { value: quote! {span =>}, err: Some(err) }
} }
fn concat_expand( fn concat_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_arg_id: MacroCallId, _arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let mut err = None; let mut err = None;
let mut text = String::new(); let mut text = String::new();
@ -407,13 +449,14 @@ fn concat_expand(
} }
} }
} }
ExpandResult { value: quote!(#text), err } ExpandResult { value: quote!(span =>#text), err }
} }
fn concat_bytes_expand( fn concat_bytes_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_arg_id: MacroCallId, _arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let mut bytes = Vec::new(); let mut bytes = Vec::new();
let mut err = None; let mut err = None;
@ -446,8 +489,8 @@ fn concat_bytes_expand(
} }
} }
} }
let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() }; let ident = tt::Ident { text: bytes.join(", ").into(), span };
ExpandResult { value: quote!([#ident]), err } ExpandResult { value: quote!(span =>[#ident]), err }
} }
fn concat_bytes_expand_subtree( fn concat_bytes_expand_subtree(
@ -480,6 +523,7 @@ fn concat_idents_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_arg_id: MacroCallId, _arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let mut err = None; let mut err = None;
let mut ident = String::new(); let mut ident = String::new();
@ -494,8 +538,9 @@ fn concat_idents_expand(
} }
} }
} }
let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() }; // FIXME merge spans
ExpandResult { value: quote!(#ident), err } let ident = tt::Ident { text: ident.into(), span };
ExpandResult { value: quote!(span =>#ident), err }
} }
fn relative_file( fn relative_file(
@ -530,45 +575,48 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
fn include_expand( fn include_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
arg_id: MacroCallId, arg_id: MacroCallId,
_tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
match db.include_expand(arg_id) { let file_id = match include_input_to_file_id(db, arg_id, tt) {
Ok((res, _)) => ExpandResult::ok(res.0.clone()), Ok(it) => it,
Err(e) => ExpandResult::new(tt::Subtree::empty(), e), Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
match parse_to_token_tree(
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
SyntaxContextId::ROOT,
&db.file_text(file_id),
) {
Some(it) => ExpandResult::ok(it),
None => ExpandResult::new(
tt::Subtree::empty(DelimSpan { open: span, close: span }),
ExpandError::other("failed to parse included file"),
),
} }
} }
pub(crate) fn include_arg_to_tt( pub fn include_input_to_file_id(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
arg_id: MacroCallId, arg_id: MacroCallId,
) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> { arg: &tt::Subtree,
let loc = db.lookup_intern_macro_call(arg_id); ) -> Result<FileId, ExpandError> {
let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else { relative_file(db, arg_id, &parse_string(arg)?, false)
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
};
let path = parse_string(&arg.0)?;
let file_id = relative_file(db, *arg_id, &path, false)?;
let (subtree, map) =
parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
Ok((triomphe::Arc::new((subtree, map)), file_id))
} }
fn include_bytes_expand( fn include_bytes_expand(
_db: &dyn ExpandDatabase, _db: &dyn ExpandDatabase,
_arg_id: MacroCallId, _arg_id: MacroCallId,
tt: &tt::Subtree, _tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
if let Err(e) = parse_string(tt) {
return ExpandResult::new(tt::Subtree::empty(), e);
}
// FIXME: actually read the file here if the user asked for macro expansion // FIXME: actually read the file here if the user asked for macro expansion
let res = tt::Subtree { let res = tt::Subtree {
delimiter: tt::Delimiter::unspecified(), delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: r#"b"""#.into(), text: r#"b"""#.into(),
span: tt::TokenId::unspecified(), span,
}))], }))],
}; };
ExpandResult::ok(res) ExpandResult::ok(res)
@ -578,10 +626,13 @@ fn include_str_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
arg_id: MacroCallId, arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let path = match parse_string(tt) { let path = match parse_string(tt) {
Ok(it) => it, Ok(it) => it,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
}; };
// FIXME: we're not able to read excluded files (which is most of them because // FIXME: we're not able to read excluded files (which is most of them because
@ -591,14 +642,14 @@ fn include_str_expand(
let file_id = match relative_file(db, arg_id, &path, true) { let file_id = match relative_file(db, arg_id, &path, true) {
Ok(file_id) => file_id, Ok(file_id) => file_id,
Err(_) => { Err(_) => {
return ExpandResult::ok(quote!("")); return ExpandResult::ok(quote!(span =>""));
} }
}; };
let text = db.file_text(file_id); let text = db.file_text(file_id);
let text = &*text; let text = &*text;
ExpandResult::ok(quote!(#text)) ExpandResult::ok(quote!(span =>#text))
} }
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> { fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
@ -610,10 +661,13 @@ fn env_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
arg_id: MacroCallId, arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) { let key = match parse_string(tt) {
Ok(it) => it, Ok(it) => it,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
}; };
let mut err = None; let mut err = None;
@ -630,7 +684,7 @@ fn env_expand(
// `include!("foo.rs"), which might go to infinite loop // `include!("foo.rs"), which might go to infinite loop
"UNRESOLVED_ENV_VAR".to_string() "UNRESOLVED_ENV_VAR".to_string()
}); });
let expanded = quote! { #s }; let expanded = quote! {span => #s };
ExpandResult { value: expanded, err } ExpandResult { value: expanded, err }
} }
@ -639,15 +693,18 @@ fn option_env_expand(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
arg_id: MacroCallId, arg_id: MacroCallId,
tt: &tt::Subtree, tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) { let key = match parse_string(tt) {
Ok(it) => it, Ok(it) => it,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e), Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
}; };
// FIXME: Use `DOLLAR_CRATE` when that works in eager macros. // FIXME: Use `DOLLAR_CRATE` when that works in eager macros.
let expanded = match get_env_inner(db, arg_id, &key) { let expanded = match get_env_inner(db, arg_id, &key) {
None => quote! { ::core::option::Option::None::<&str> }, None => quote! {span => ::core::option::Option::None::<&str> },
Some(s) => quote! { ::core::option::Option::Some(#s) }, Some(s) => quote! {span => ::core::option::Option::Some(#s) },
}; };
ExpandResult::ok(expanded) ExpandResult::ok(expanded)

View File

@ -1,22 +1,31 @@
//! Defines database & queries for macro expansion. //! Defines database & queries for macro expansion.
use base_db::{salsa, CrateId, Edition, SourceDatabase}; use base_db::{
salsa::{self, debug::DebugQueryTable},
span::SyntaxContextId,
CrateId, Edition, FileId, SourceDatabase,
};
use either::Either; use either::Either;
use limit::Limit; use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult}; use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use syntax::{ use syntax::{
ast::{self, HasAttrs, HasDocComments}, ast::{self, HasAttrs, HasDocComments},
AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
}; };
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion, ast_id_map::AstIdMap,
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander, attrs::RawAttrs,
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult, builtin_attr_macro::pseudo_derive_attr_expansion,
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId, builtin_fn_macro::EagerExpander,
MacroDefKind, MacroFile, ProcMacroExpander, fixup::{self, SyntaxFixupUndoInfo},
hygiene::{apply_mark, SyntaxContextData, Transparency},
span::{RealSpanMap, SpanMap, SpanMapRef},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander,
}; };
/// Total limit on the number of tokens produced by any macro invocation. /// Total limit on the number of tokens produced by any macro invocation.
@ -30,32 +39,43 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
/// Old-style `macro_rules` or the new macros 2.0 /// Old-style `macro_rules` or the new macros 2.0
pub struct DeclarativeMacroExpander { pub struct DeclarativeMacroExpander {
pub mac: mbe::DeclarativeMacro, pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
pub def_site_token_map: mbe::TokenMap, pub transparency: Transparency,
} }
impl DeclarativeMacroExpander { impl DeclarativeMacroExpander {
pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> { pub fn expand(
&self,
db: &dyn ExpandDatabase,
tt: tt::Subtree,
call_id: MacroCallId,
) -> ExpandResult<tt::Subtree> {
match self.mac.err() { match self.mac.err() {
Some(e) => ExpandResult::new( Some(e) => ExpandResult::new(
tt::Subtree::empty(), tt::Subtree::empty(tt::DelimSpan::DUMMY),
ExpandError::other(format!("invalid macro definition: {e}")), ExpandError::other(format!("invalid macro definition: {e}")),
), ),
None => self.mac.expand(tt).map_err(Into::into), None => self
.mac
.expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency))
.map_err(Into::into),
} }
} }
pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId { pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
self.mac.map_id_down(token_id) match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan::DUMMY),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(&tt, |_| ()).map_err(Into::into),
} }
pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
self.mac.map_id_up(token_id)
} }
} }
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander { pub enum TokenExpander {
/// Old-style `macro_rules` or the new macros 2.0
DeclarativeMacro(Arc<DeclarativeMacroExpander>), DeclarativeMacro(Arc<DeclarativeMacroExpander>),
/// Stuff like `line!` and `file!`. /// Stuff like `line!` and `file!`.
BuiltIn(BuiltinFnLikeExpander), BuiltIn(BuiltinFnLikeExpander),
@ -69,31 +89,6 @@ pub enum TokenExpander {
ProcMacro(ProcMacroExpander), ProcMacro(ProcMacroExpander),
} }
// FIXME: Get rid of these methods
impl TokenExpander {
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
match self {
TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
TokenExpander::BuiltIn(..)
| TokenExpander::BuiltInEager(..)
| TokenExpander::BuiltInAttr(..)
| TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => id,
}
}
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
match self {
TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
TokenExpander::BuiltIn(..)
| TokenExpander::BuiltInEager(..)
| TokenExpander::BuiltInAttr(..)
| TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
}
}
}
#[salsa::query_group(ExpandDatabaseStorage)] #[salsa::query_group(ExpandDatabaseStorage)]
pub trait ExpandDatabase: SourceDatabase { pub trait ExpandDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>; fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
@ -108,8 +103,12 @@ pub trait ExpandDatabase: SourceDatabase {
// This query is LRU cached // This query is LRU cached
fn parse_macro_expansion( fn parse_macro_expansion(
&self, &self,
macro_file: MacroFile, macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>; ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
#[salsa::transparent]
fn span_map(&self, file_id: HirFileId) -> SpanMap;
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the /// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all. /// reason why we use salsa at all.
@ -118,23 +117,21 @@ pub trait ExpandDatabase: SourceDatabase {
/// to be incremental. /// to be incremental.
#[salsa::interned] #[salsa::interned]
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId; fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
#[salsa::interned]
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
/// Lowers syntactic macro call to a token tree representation.
#[salsa::transparent] #[salsa::transparent]
fn setup_syntax_context_root(&self) -> ();
#[salsa::transparent]
fn dump_syntax_contexts(&self) -> String;
/// Lowers syntactic macro call to a token tree representation. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
fn macro_arg( fn macro_arg(
&self, &self,
id: MacroCallId, id: MacroCallId,
) -> ValueResult< ) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
Arc<Box<[SyntaxError]>>,
>;
/// Extracts syntax node, corresponding to a macro call. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
fn macro_arg_node(
&self,
id: MacroCallId,
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
/// Fetches the expander for this macro. /// Fetches the expander for this macro.
#[salsa::transparent] #[salsa::transparent]
fn macro_expander(&self, id: MacroDefId) -> TokenExpander; fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
@ -144,18 +141,6 @@ pub trait ExpandDatabase: SourceDatabase {
def_crate: CrateId, def_crate: CrateId,
id: AstId<ast::Macro>, id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander>; ) -> Arc<DeclarativeMacroExpander>;
/// Expand macro call to a token tree.
// This query is LRU cached
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
#[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
fn include_expand(
&self,
arg_id: MacroCallId,
) -> Result<
(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
ExpandError,
>;
/// Special case of the previous query for procedural macros. We can't LRU /// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and /// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way. /// non-determinism breaks salsa in a very, very, very bad way.
@ -166,8 +151,20 @@ pub trait ExpandDatabase: SourceDatabase {
&self, &self,
macro_call: MacroCallId, macro_call: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>>; ) -> ExpandResult<Box<[SyntaxError]>>;
}
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>; #[inline]
pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
HirFileIdRepr::MacroFile(m) => {
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
}
}
}
pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
Arc::new(RealSpanMap::from_file(db, file_id))
} }
/// This expands the given macro call, but with different arguments. This is /// This expands the given macro call, but with different arguments. This is
@ -181,21 +178,36 @@ pub fn expand_speculative(
token_to_map: SyntaxToken, token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> { ) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call); let loc = db.lookup_intern_macro_call(actual_macro_call);
let token_range = token_to_map.text_range();
let span_map = RealSpanMap::absolute(FileId::BOGUS);
let span_map = SpanMapRef::RealSpanMap(&span_map);
// Build the subtree and token mapping for the speculative args // Build the subtree and token mapping for the speculative args
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => {
(mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE)
}
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, speculative_args); let censor = censor_for_macro_input(&loc, speculative_args);
let mut fixups = fixup::fixup_syntax(speculative_args); let mut fixups = fixup::fixup_syntax(span_map, speculative_args);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); fixups.append.retain(|it, _| match it {
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true,
});
fixups.remove.extend(censor);
(
mbe::syntax_node_to_token_tree_modified(
speculative_args, speculative_args,
fixups.token_map, span_map,
fixups.next_id,
fixups.replace,
fixups.append, fixups.append,
); fixups.remove,
),
fixups.undo_info,
)
}
};
let (attr_arg, token_id) = match loc.kind { let attr_arg = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => { MacroCallKind::Attr { invoc_attr_index, .. } => {
let attr = if loc.def.is_attribute_derive() { let attr = if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only // for pseudo-derive expansion we actually pass the attribute itself only
@ -210,59 +222,45 @@ pub fn expand_speculative(
}?; }?;
match attr.token_tree() { match attr.token_tree() {
Some(token_tree) => { Some(token_tree) => {
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax()); let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map);
tree.delimiter = tt::Delimiter::unspecified(); tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
let shift = mbe::Shift::new(&tt); Some(tree)
shift.shift_all(&mut tree);
let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
let attr_input_start =
token_tree.left_delimiter_token()?.text_range().start();
let range = token_range.checked_sub(attr_input_start)?;
let token_id = shift.shift(map.token_by_range(range)?);
Some(token_id)
} else {
None
};
(Some(tree), token_id)
} }
_ => (None, None), _ => None,
}
}
_ => (None, None),
};
let token_id = match token_id {
Some(token_id) => token_id,
// token wasn't inside an attribute input so it has to be in the general macro input
None => {
let range = token_range.checked_sub(speculative_args.text_range().start())?;
let token_id = spec_args_tmap.token_by_range(range)?;
match loc.def.kind {
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
}
_ => token_id,
} }
} }
_ => None,
}; };
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args // Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead. // Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind { let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => { MacroDefKind::ProcMacro(expander, ..) => {
tt.delimiter = tt::Delimiter::unspecified(); tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref()) let call_site = loc.span(db);
expander.expand(
db,
loc.def.krate,
loc.krate,
&tt,
attr_arg.as_ref(),
call_site,
call_site,
call_site,
)
} }
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?) pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site)
} }
MacroDefKind::BuiltInDerive(expander, ..) => { MacroDefKind::BuiltInDerive(expander, ..) => {
// this cast is a bit sus, can we avoid losing the typedness here? // this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(speculative_args.clone()).unwrap(); let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
expander.expand(db, actual_macro_call, &adt, &spec_args_tmap) expander.expand(db, actual_macro_call, &adt, span_map)
}
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt)
} }
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into), MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => { MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into) it.expand(db, actual_macro_call, &tt).map_err(Into::into)
@ -270,13 +268,14 @@ pub fn expand_speculative(
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt), MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
}; };
let expand_to = macro_expand_to(db, actual_macro_call); let expand_to = loc.expand_to();
fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to); let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
let syntax_node = node.syntax_node(); let syntax_node = node.syntax_node();
let token = rev_tmap let token = rev_tmap
.ranges_by_token(token_id, token_to_map.kind()) .ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
.filter_map(|range| syntax_node.covering_element(range).into_token()) .filter_map(|range| syntax_node.covering_element(range).into_token())
.min_by_key(|t| { .min_by_key(|t| {
// prefer tokens of the same kind and text // prefer tokens of the same kind and text
@ -293,7 +292,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode { fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() { match file_id.repr() {
HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(), HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
HirFileIdRepr::MacroFile(macro_file) => { HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node() db.parse_macro_expansion(macro_file).value.0.syntax_node()
} }
@ -312,17 +311,16 @@ fn parse_or_expand_with_err(
} }
} }
// FIXME: We should verify that the parsed node is one of the many macro node variants we expect
// instead of having it be untyped
fn parse_macro_expansion( fn parse_macro_expansion(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
macro_file: MacroFile, macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> { ) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = profile::span("parse_macro_expansion"); let _p = profile::span("parse_macro_expansion");
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id); let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let expand_to = loc.expand_to();
let expand_to = macro_expand_to(db, macro_file.macro_call_id); let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
tracing::debug!("expanded = {}", tt.as_debug_string());
tracing::debug!("kind = {:?}", expand_to);
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to); let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
@ -333,51 +331,129 @@ fn parse_macro_expansion_error(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
macro_call_id: MacroCallId, macro_call_id: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>> { ) -> ExpandResult<Box<[SyntaxError]>> {
db.parse_macro_expansion(MacroFile { macro_call_id }) db.parse_macro_expansion(MacroFileId { macro_call_id })
.map(|it| it.0.errors().to_vec().into_boxed_slice()) .map(|it| it.0.errors().to_vec().into_boxed_slice())
} }
fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
}
HirFileIdRepr::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse, SpanMap::ExpansionSpanMap(map))
}
}
}
fn macro_arg( fn macro_arg(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
id: MacroCallId, id: MacroCallId,
) -> ValueResult< // FIXME: consider the following by putting fixup info into eager call info args
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>, // ) -> ValueResult<Option<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>>, Arc<Box<[SyntaxError]>>> {
Arc<Box<[SyntaxError]>>, ) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
> { let mismatched_delimiters = |arg: &SyntaxNode| {
let loc = db.lookup_intern_macro_call(id); let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() { let well_formed_tt =
return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default())))); matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
Some(Arc::new(Box::new([SyntaxError::new(
"unbalanced token tree".to_owned(),
arg.text_range(),
)]) as Box<[_]>))
} else {
None
} }
let ValueResult { value, err } = db.macro_arg_node(id);
let Some(arg) = value else {
return ValueResult { value: None, err };
}; };
let loc = db.lookup_intern_macro_call(id);
if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
.then(|| loc.eager.as_deref())
.flatten()
{
ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE)))
} else {
let (parse, map) = parse_with_map(db, loc.kind.file_id());
let root = parse.syntax_node();
let node = SyntaxNode::new_root(arg); let syntax = match loc.kind {
let censor = censor_for_macro_input(&loc, &node); MacroCallKind::FnLike { ast_id, .. } => {
let mut fixups = fixup::fixup_syntax(&node); let node = &ast_id.to_ptr(db).to_node(&root);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); let offset = node.syntax().text_range().start();
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( match node.token_tree() {
&node, Some(tt) => {
fixups.token_map, let tt = tt.syntax();
fixups.next_id, if let Some(e) = mismatched_delimiters(tt) {
fixups.replace, return ValueResult::only_err(e);
}
tt.clone()
}
None => {
return ValueResult::only_err(Arc::new(Box::new([
SyntaxError::new_at_offset("missing token tree".to_owned(), offset),
])));
}
}
}
MacroCallKind::Derive { ast_id, .. } => {
ast_id.to_ptr(db).to_node(&root).syntax().clone()
}
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
};
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => {
(mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE)
}
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, &syntax);
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true,
});
fixups.remove.extend(censor);
(
mbe::syntax_node_to_token_tree_modified(
&syntax,
map,
fixups.append, fixups.append,
); fixups.remove,
),
fixups.undo_info,
)
}
};
if loc.def.is_proc_macro() { if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included // proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::unspecified(); tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
}
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
match parse.errors() {
[] => ValueResult::ok(Some((Arc::new(tt), undo_info))),
errors => ValueResult::new(
Some((Arc::new(tt), undo_info)),
// Box::<[_]>::from(res.errors()), not stable yet
Arc::new(errors.to_vec().into_boxed_slice()),
),
}
} else {
ValueResult::ok(Some((Arc::new(tt), undo_info)))
} }
let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
match err {
Some(err) => ValueResult::new(val, err),
None => ValueResult::ok(val),
} }
} }
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely: /// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped /// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
/// - attributes expect the invoking attribute to be stripped /// - attributes expect the invoking attribute to be stripped
@ -417,103 +493,67 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default() .unwrap_or_default()
} }
fn macro_arg_node(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
let err = || -> Arc<Box<[_]>> {
Arc::new(Box::new([SyntaxError::new_at_offset(
"invalid macro call".to_owned(),
syntax::TextSize::from(0),
)]))
};
let loc = db.lookup_intern_macro_call(id);
let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
} else {
loc.kind
.arg(db)
.and_then(|arg| ast::TokenTree::cast(arg.value))
.map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
};
match res {
Some(res) if res.errors().is_empty() => res.syntax_node(),
Some(res) => {
return ValueResult::new(
Some(res.syntax_node().green().into()),
// Box::<[_]>::from(res.errors()), not stable yet
Arc::new(res.errors().to_vec().into_boxed_slice()),
);
}
None => return ValueResult::only_err(err()),
}
} else {
match loc.kind.arg(db) {
Some(res) => res.value,
None => return ValueResult::only_err(err()),
}
};
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
let well_formed_tt =
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
"unbalanced token tree".to_owned(),
arg.text_range(),
)])));
}
}
ValueResult::ok(Some(arg.green().into()))
}
fn decl_macro_expander( fn decl_macro_expander(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
def_crate: CrateId, def_crate: CrateId,
id: AstId<ast::Macro>, id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> { ) -> Arc<DeclarativeMacroExpander> {
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021; let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
let (mac, def_site_token_map) = match id.to_node(db) { let (root, map) = parse_with_map(db, id.file_id);
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() { let root = root.syntax_node();
Some(arg) => {
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax()); let transparency = |node| {
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021); // ... would be nice to have the item tree here
(mac, def_site_token_map) let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
} match &*attrs
None => ( .iter()
mbe::DeclarativeMacro::from_err( .find(|it| {
mbe::ParseError::Expected("expected a token tree".into()), it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
is_2021, })?
), .token_tree_value()?
Default::default(), .token_trees
), {
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
"transparent" => Some(Transparency::Transparent),
"semitransparent" => Some(Transparency::SemiTransparent),
"opaque" => Some(Transparency::Opaque),
_ => None,
}, },
ast::Macro::MacroDef(macro_def) => match macro_def.body() { _ => None,
Some(arg) => {
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
(mac, def_site_token_map)
} }
None => (
mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
Default::default(),
),
},
}; };
Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
ast::Macro::MacroRules(macro_rules) => (
match macro_rules.token_tree() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
mac
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
),
ast::Macro::MacroDef(macro_def) => (
match macro_def.body() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
mac
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_def).unwrap_or(Transparency::Opaque),
),
};
Arc::new(DeclarativeMacroExpander { mac, transparency })
} }
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander { fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
@ -529,39 +569,31 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
} }
} }
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> { fn macro_expand(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
loc: MacroCallLoc,
) -> ExpandResult<Arc<tt::Subtree>> {
let _p = profile::span("macro_expand"); let _p = profile::span("macro_expand");
let loc = db.lookup_intern_macro_call(id);
let ExpandResult { value: tt, mut err } = match loc.def.kind { let ExpandResult { value: tt, mut err } = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id), MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
MacroDefKind::BuiltInDerive(expander, ..) => { MacroDefKind::BuiltInDerive(expander, ..) => {
let arg = db.macro_arg_node(id).value.unwrap(); let (root, map) = parse_with_map(db, loc.kind.file_id());
let root = root.syntax_node();
let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
let node = ast_id.to_ptr(db).to_node(&root);
let node = SyntaxNode::new_root(arg); // FIXME: Use censoring
let censor = censor_for_macro_input(&loc, &node); let _censor = censor_for_macro_input(&loc, node.syntax());
let mut fixups = fixup::fixup_syntax(&node); expander.expand(db, macro_call_id, &node, map.as_ref())
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
&node,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(node).unwrap();
let mut res = expander.expand(db, id, &adt, &tmap);
fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
res
} }
_ => { _ => {
let ValueResult { value, err } = db.macro_arg(id); let ValueResult { value, err } = db.macro_arg(macro_call_id);
let Some(macro_arg) = value else { let Some((macro_arg, undo_info)) = value else {
return ExpandResult { return ExpandResult {
value: Arc::new(tt::Subtree { value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED, delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(), token_trees: Vec::new(),
}), }),
// FIXME: We should make sure to enforce an invariant that invalid macro // FIXME: We should make sure to enforce an invariant that invalid macro
@ -570,12 +602,14 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}; };
}; };
let (arg, arg_tm, undo_info) = &*macro_arg; let arg = &*macro_arg;
let mut res = match loc.def.kind { match loc.def.kind {
MacroDefKind::Declarative(id) => { MacroDefKind::Declarative(id) => {
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone()) db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
}
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, macro_call_id, &arg).map_err(Into::into)
} }
MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
// This might look a bit odd, but we do not expand the inputs to eager macros here. // This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls. // Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through // That kind of expansion uses the ast id map of an eager macros input though which goes through
@ -583,11 +617,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
// will end up going through here again, whereas we want to just want to inspect the raw input. // will end up going through here again, whereas we want to just want to inspect the raw input.
// As such we just return the input subtree here. // As such we just return the input subtree here.
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => { MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
let mut arg = arg.clone();
fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
return ExpandResult { return ExpandResult {
value: Arc::new(arg), value: macro_arg.clone(),
err: err.map(|err| { err: err.map(|err| {
let mut buf = String::new(); let mut buf = String::new();
for err in &**err { for err in &**err {
@ -600,13 +631,17 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}), }),
}; };
} }
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into), MacroDefKind::BuiltInEager(it, _) => {
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg), it.expand(db, macro_call_id, &arg).map_err(Into::into)
_ => unreachable!(), }
}; MacroDefKind::BuiltInAttr(it, _) => {
fixup::reverse_fixups(&mut res.value, arg_tm, undo_info); let mut res = it.expand(db, macro_call_id, &arg);
fixup::reverse_fixups(&mut res.value, &undo_info);
res res
} }
_ => unreachable!(),
}
}
}; };
if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() { if let Some(EagerCallInfo { error, .. }) = loc.eager.as_deref() {
@ -627,10 +662,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> { fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro_call(id); let loc = db.lookup_intern_macro_call(id);
let Some(macro_arg) = db.macro_arg(id).value else { let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
return ExpandResult { return ExpandResult {
value: Arc::new(tt::Subtree { value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED, delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(), token_trees: Vec::new(),
}), }),
// FIXME: We should make sure to enforce an invariant that invalid macro // FIXME: We should make sure to enforce an invariant that invalid macro
@ -639,47 +674,44 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
}; };
}; };
let (arg_tt, arg_tm, undo_info) = &*macro_arg;
let expander = match loc.def.kind { let expander = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => expander, MacroDefKind::ProcMacro(expander, ..) => expander,
_ => unreachable!(), _ => unreachable!(),
}; };
let attr_arg = match &loc.kind { let attr_arg = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => { MacroCallKind::Attr { attr_args: Some(attr_args), .. } => Some(&**attr_args),
let mut attr_args = attr_args.0.clone();
mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
Some(attr_args)
}
_ => None, _ => None,
}; };
let ExpandResult { value: mut tt, err } = let call_site = loc.span(db);
expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref()); let ExpandResult { value: mut tt, err } = expander.expand(
db,
loc.def.krate,
loc.krate,
&macro_arg,
attr_arg,
// FIXME
call_site,
call_site,
// FIXME
call_site,
);
// Set a hard limit for the expanded tt // Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) { if let Err(value) = check_tt_count(&tt) {
return value; return value;
} }
fixup::reverse_fixups(&mut tt, arg_tm, undo_info); fixup::reverse_fixups(&mut tt, &undo_info);
ExpandResult { value: Arc::new(tt), err } ExpandResult { value: Arc::new(tt), err }
} }
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
Arc::new(HygieneFrame::new(db, file_id))
}
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
db.lookup_intern_macro_call(id).expand_to()
}
fn token_tree_to_syntax_node( fn token_tree_to_syntax_node(
tt: &tt::Subtree, tt: &tt::Subtree,
expand_to: ExpandTo, expand_to: ExpandTo,
) -> (Parse<SyntaxNode>, mbe::TokenMap) { ) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
let entry_point = match expand_to { let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts, ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::TopEntryPoint::MacroItems, ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
@ -695,7 +727,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
if TOKEN_LIMIT.check(count).is_err() { if TOKEN_LIMIT.check(count).is_err() {
Err(ExpandResult { Err(ExpandResult {
value: Arc::new(tt::Subtree { value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED, delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: vec![], token_trees: vec![],
}), }),
err: Some(ExpandError::other(format!( err: Some(ExpandError::other(format!(
@ -708,3 +740,44 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
Ok(()) Ok(())
} }
} }
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
db.intern_syntax_context(SyntaxContextData::root());
}
fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
let mut s = String::from("Expansions:");
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
let id = e.key;
let expn_data = e.value.as_ref().unwrap();
s.push_str(&format!(
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
id,
expn_data.kind.file_id(),
expn_data.call_site,
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
expn_data.kind.descr(),
));
}
s.push_str("\n\nSyntaxContexts:\n");
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
struct SyntaxContextDebug<'a>(
&'a dyn ExpandDatabase,
SyntaxContextId,
&'a SyntaxContextData,
);
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.2.fancy_debug(self.1, self.0, f)
}
}
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
}
s
}

View File

@ -18,18 +18,17 @@
//! //!
//! //!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros> //! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::CrateId; use base_db::{span::SyntaxContextId, CrateId};
use rustc_hash::{FxHashMap, FxHashSet}; use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
ast::{self, AstNode}, ast::{self, AstNode},
db::ExpandDatabase, db::ExpandDatabase,
hygiene::Hygiene,
mod_path::ModPath, mod_path::ModPath,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind, span::SpanMapRef,
MacroCallLoc, MacroDefId, MacroDefKind, EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
}; };
pub fn expand_eager_macro_input( pub fn expand_eager_macro_input(
@ -37,6 +36,7 @@ pub fn expand_eager_macro_input(
krate: CrateId, krate: CrateId,
macro_call: InFile<ast::MacroCall>, macro_call: InFile<ast::MacroCall>,
def: MacroDefId, def: MacroDefId,
call_site: SyntaxContextId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> { ) -> ExpandResult<Option<MacroCallId>> {
let ast_map = db.ast_id_map(macro_call.file_id); let ast_map = db.ast_id_map(macro_call.file_id);
@ -53,75 +53,44 @@ pub fn expand_eager_macro_input(
krate, krate,
eager: None, eager: None,
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr }, kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
call_site,
}); });
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } = let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file()); db.parse_macro_expansion(arg_id.as_macro_file());
// we need this map here as the expansion of the eager input fake file loses whitespace ...
let mut ws_mapping = FxHashMap::default(); let mut arg_map = ExpansionSpanMap::empty();
if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
}));
}
let ExpandResult { value: expanded_eager_input, err } = { let ExpandResult { value: expanded_eager_input, err } = {
eager_macro_recur( eager_macro_recur(
db, db,
&Hygiene::new(db, macro_call.file_id), &arg_exp_map,
&mut arg_map,
TextSize::new(0),
InFile::new(arg_id.as_file(), arg_exp.syntax_node()), InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
krate, krate,
call_site,
resolver, resolver,
) )
}; };
let err = parse_err.or(err); let err = parse_err.or(err);
if cfg!(debug) {
arg_map.finish();
}
let Some((expanded_eager_input, mapping)) = expanded_eager_input else { let Some((expanded_eager_input, _mapping)) = expanded_eager_input else {
return ExpandResult { value: None, err }; return ExpandResult { value: None, err };
}; };
let (mut subtree, expanded_eager_input_token_map) = let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map);
mbe::syntax_node_to_token_tree(&expanded_eager_input);
let og_tmap = if let Some(tt) = macro_call.value.token_tree() { subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE;
let mut ids_used = FxHashSet::default();
let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
// so we need to remap them to the original input of the eager macro.
subtree.visit_ids(&mut |id| {
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
if let Some(range) = expanded_eager_input_token_map
.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
{
// remap from expanded eager input to eager input expansion
if let Some(og_range) = mapping.get(&range) {
// remap from eager input expansion to original eager input
if let Some(&og_range) = ws_mapping.get(og_range) {
if let Some(og_token) = og_tmap.token_by_range(og_range) {
ids_used.insert(og_token);
return og_token;
}
}
}
}
tt::TokenId::UNSPECIFIED
});
og_tmap.filter(|id| ids_used.contains(&id));
og_tmap
} else {
Default::default()
};
subtree.delimiter = crate::tt::Delimiter::unspecified();
let loc = MacroCallLoc { let loc = MacroCallLoc {
def, def,
krate, krate,
eager: Some(Box::new(EagerCallInfo { eager: Some(Box::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
arg: Arc::new((subtree, og_tmap)),
arg_id,
error: err.clone(),
})),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to }, kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
call_site,
}; };
ExpandResult { value: Some(db.intern_macro_call(loc)), err } ExpandResult { value: Some(db.intern_macro_call(loc)), err }
@ -132,12 +101,13 @@ fn lazy_expand(
def: &MacroDefId, def: &MacroDefId,
macro_call: InFile<ast::MacroCall>, macro_call: InFile<ast::MacroCall>,
krate: CrateId, krate: CrateId,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> { call_site: SyntaxContextId,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value); let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
let expand_to = ExpandTo::from_call_site(&macro_call.value); let expand_to = ExpandTo::from_call_site(&macro_call.value);
let ast_id = macro_call.with_value(ast_id); let ast_id = macro_call.with_value(ast_id);
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }); let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }, call_site);
let macro_file = id.as_macro_file(); let macro_file = id.as_macro_file();
db.parse_macro_expansion(macro_file) db.parse_macro_expansion(macro_file)
@ -146,57 +116,59 @@ fn lazy_expand(
fn eager_macro_recur( fn eager_macro_recur(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
hygiene: &Hygiene, span_map: &ExpansionSpanMap,
expanded_map: &mut ExpansionSpanMap,
mut offset: TextSize,
curr: InFile<SyntaxNode>, curr: InFile<SyntaxNode>,
krate: CrateId, krate: CrateId,
call_site: SyntaxContextId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>> { ) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update(); let original = curr.value.clone_for_update();
let mut mapping = FxHashMap::default();
let mut replacements = Vec::new(); let mut replacements = Vec::new();
// FIXME: We only report a single error inside of eager expansions // FIXME: We only report a single error inside of eager expansions
let mut error = None; let mut error = None;
let mut offset = 0i32;
let apply_offset = |it: TextSize, offset: i32| {
TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
};
let mut children = original.preorder_with_tokens(); let mut children = original.preorder_with_tokens();
// Collect replacement // Collect replacement
while let Some(child) = children.next() { while let Some(child) = children.next() {
let WalkEvent::Enter(child) = child else { continue };
let call = match child { let call = match child {
syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) { WalkEvent::Enter(SyntaxElement::Node(child)) => match ast::MacroCall::cast(child) {
Some(it) => { Some(it) => {
children.skip_subtree(); children.skip_subtree();
it it
} }
None => continue, _ => continue,
}, },
syntax::NodeOrToken::Token(t) => { WalkEvent::Enter(_) => continue,
mapping.insert( WalkEvent::Leave(child) => {
TextRange::new( if let SyntaxElement::Token(t) = child {
apply_offset(t.text_range().start(), offset), let start = t.text_range().start();
apply_offset(t.text_range().end(), offset), offset += t.text_range().len();
), expanded_map.push(offset, span_map.span_at(start));
t.text_range(), }
);
continue; continue;
} }
}; };
let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
let def = match call
.path()
.and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(span_map)))
{
Some(path) => match macro_resolver(path.clone()) { Some(path) => match macro_resolver(path.clone()) {
Some(def) => def, Some(def) => def,
None => { None => {
error = error =
Some(ExpandError::other(format!("unresolved macro {}", path.display(db)))); Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
offset += call.syntax().text_range().len();
continue; continue;
} }
}, },
None => { None => {
error = Some(ExpandError::other("malformed macro invocation")); error = Some(ExpandError::other("malformed macro invocation"));
offset += call.syntax().text_range().len();
continue; continue;
} }
}; };
@ -207,29 +179,22 @@ fn eager_macro_recur(
krate, krate,
curr.with_value(call.clone()), curr.with_value(call.clone()),
def, def,
call_site,
macro_resolver, macro_resolver,
); );
match value { match value {
Some(call_id) => { Some(call_id) => {
let ExpandResult { value, err: err2 } = let ExpandResult { value: (parse, map), err: err2 } =
db.parse_macro_expansion(call_id.as_macro_file()); db.parse_macro_expansion(call_id.as_macro_file());
if let Some(tt) = call.token_tree() { map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));
let call_tt_start = tt.syntax().text_range().start();
let call_start =
apply_offset(call.syntax().text_range().start(), offset);
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
value
.1
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
.map(|r| (r + call_start, range + call_tt_start))
}));
}
}
let syntax_node = parse.syntax_node();
ExpandResult { ExpandResult {
value: Some(value.0.syntax_node().clone_for_update()), value: Some((
syntax_node.clone_for_update(),
offset + syntax_node.text_range().len(),
)),
err: err.or(err2), err: err.or(err2),
} }
} }
@ -242,45 +207,23 @@ fn eager_macro_recur(
| MacroDefKind::BuiltInDerive(..) | MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => { | MacroDefKind::ProcMacro(..) => {
let ExpandResult { value: (parse, tm), err } = let ExpandResult { value: (parse, tm), err } =
lazy_expand(db, &def, curr.with_value(call.clone()), krate); lazy_expand(db, &def, curr.with_value(call.clone()), krate, call_site);
let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
Some(db.decl_macro_expander(def.krate, ast_id))
} else {
None
};
// replace macro inside // replace macro inside
let hygiene = Hygiene::new(db, parse.file_id);
let ExpandResult { value, err: error } = eager_macro_recur( let ExpandResult { value, err: error } = eager_macro_recur(
db, db,
&hygiene, &tm,
expanded_map,
offset,
// FIXME: We discard parse errors here // FIXME: We discard parse errors here
parse.as_ref().map(|it| it.syntax_node()), parse.as_ref().map(|it| it.syntax_node()),
krate, krate,
call_site,
macro_resolver, macro_resolver,
); );
let err = err.or(error); let err = err.or(error);
if let Some(tt) = call.token_tree() { ExpandResult { value, err }
let call_tt_start = tt.syntax().text_range().start();
let call_start = apply_offset(call.syntax().text_range().start(), offset);
if let Some((_tt, arg_map, _)) = parse
.file_id
.macro_file()
.and_then(|id| db.macro_arg(id.macro_call_id).value)
.as_deref()
{
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
tm.first_range_by_token(
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
syntax::SyntaxKind::TOMBSTONE,
)
.map(|r| (r + call_start, range + call_tt_start))
}));
}
}
// FIXME: Do we need to re-use _m here?
ExpandResult { value: value.map(|(n, _m)| n), err }
} }
}; };
if err.is_some() { if err.is_some() {
@ -288,16 +231,18 @@ fn eager_macro_recur(
} }
// check if the whole original syntax is replaced // check if the whole original syntax is replaced
if call.syntax() == &original { if call.syntax() == &original {
return ExpandResult { value: value.zip(Some(mapping)), err: error }; return ExpandResult { value, err: error };
} }
if let Some(insert) = value { match value {
offset += u32::from(insert.text_range().len()) as i32 Some((insert, new_offset)) => {
- u32::from(call.syntax().text_range().len()) as i32;
replacements.push((call, insert)); replacements.push((call, insert));
offset = new_offset;
}
None => offset += call.syntax().text_range().len(),
} }
} }
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new)); replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
ExpandResult { value: Some((original, mapping)), err: error } ExpandResult { value: Some((original, offset)), err: error }
} }

View File

@ -0,0 +1,340 @@
//! Things to wrap other things in file ids.
use std::iter;
use base_db::{
span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId},
FileId, FileRange,
};
use either::Either;
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange};
use crate::{db, ExpansionInfo, HirFileIdExt as _};
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
///
/// Typical usages are:
///
/// * `InFile<SyntaxNode>` -- syntax node in a file
/// * `InFile<ast::FnDef>` -- ast node in a file
/// * `InFile<TextSize>` -- offset in a file
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub struct InFileWrapper<FileKind, T> {
pub file_id: FileKind,
pub value: T,
}
pub type InFile<T> = InFileWrapper<HirFileId, T>;
pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
pub type InRealFile<T> = InFileWrapper<FileId, T>;
impl<FileKind, T> InFileWrapper<FileKind, T> {
pub fn new(file_id: FileKind, value: T) -> Self {
Self { file_id, value }
}
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFileWrapper<FileKind, U> {
InFileWrapper::new(self.file_id, f(self.value))
}
}
impl<FileKind: Copy, T> InFileWrapper<FileKind, T> {
pub fn with_value<U>(&self, value: U) -> InFileWrapper<FileKind, U> {
InFileWrapper::new(self.file_id, value)
}
pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
self.with_value(&self.value)
}
}
impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
pub fn cloned(&self) -> InFileWrapper<FileKind, T> {
self.with_value(self.value.clone())
}
}
impl<T> From<InMacroFile<T>> for InFile<T> {
fn from(InMacroFile { file_id, value }: InMacroFile<T>) -> Self {
InFile { file_id: file_id.into(), value }
}
}
impl<T> From<InRealFile<T>> for InFile<T> {
fn from(InRealFile { file_id, value }: InRealFile<T>) -> Self {
InFile { file_id: file_id.into(), value }
}
}
// region:transpose impls
impl<FileKind, T> InFileWrapper<FileKind, Option<T>> {
pub fn transpose(self) -> Option<InFileWrapper<FileKind, T>> {
Some(InFileWrapper::new(self.file_id, self.value?))
}
}
impl<FileKind, L, R> InFileWrapper<FileKind, Either<L, R>> {
pub fn transpose(self) -> Either<InFileWrapper<FileKind, L>, InFileWrapper<FileKind, R>> {
match self.value {
Either::Left(l) => Either::Left(InFileWrapper::new(self.file_id, l)),
Either::Right(r) => Either::Right(InFileWrapper::new(self.file_id, r)),
}
}
}
// endregion:transpose impls
trait FileIdToSyntax: Copy {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode;
}
impl FileIdToSyntax for FileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse(self).syntax_node()
}
}
impl FileIdToSyntax for MacroFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_macro_expansion(self).value.0.syntax_node()
}
}
impl FileIdToSyntax for HirFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_or_expand(self)
}
}
#[allow(private_bounds)]
impl<FileId: FileIdToSyntax, T> InFileWrapper<FileId, T> {
pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
FileIdToSyntax::file_syntax(self.file_id, db)
}
}
impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> {
self.with_value(self.value.syntax())
}
}
// region:specific impls
impl InFile<&SyntaxNode> {
pub fn ancestors_with_macros(
self,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
iter::successors(Some(self.cloned()), move |node| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => node.file_id.call_node(db),
})
}
/// Skips the attributed item that caused the macro invocation we are climbing up
pub fn ancestors_with_macros_skip_attr_item(
self,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => {
let parent_node = node.file_id.call_node(db)?;
if node.file_id.is_attr_macro(db) {
// macro call was an attributed item, skip it
// FIXME: does this fail if this is a direct expansion of another macro?
parent_node.map(|node| node.parent()).transpose()
} else {
Some(parent_node)
}
}
};
iter::successors(succ(&self.cloned()), succ)
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
///
/// For attributes and derives, this will point back to the attribute only.
/// For the entire item use [`InFile::original_file_range_full`].
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range_with_body(db)
}
}
}
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(
self,
db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContextId)> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
}
}
}
pub fn original_syntax_node(
self,
db: &dyn db::ExpandDatabase,
) -> Option<InRealFile<SyntaxNode>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value.clone() })
}
HirFileIdRepr::MacroFile(m) => m,
};
if !self.file_id.is_attr_macro(db) {
return None;
}
let (FileRange { file_id, range }, ctx) =
ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if !ctx.is_root() {
return None;
}
let anc = db.parse(file_id).syntax_node().covering_element(range);
let kind = self.value.kind();
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let value = anc.ancestors().find(|it| it.kind() == kind)?;
Some(InRealFile::new(file_id, value))
}
}
impl InFile<SyntaxToken> {
pub fn upmap_once(
self,
db: &dyn db::ExpandDatabase,
) -> Option<InFile<smallvec::SmallVec<[TextRange; 1]>>> {
Some(self.file_id.expansion_info(db)?.map_range_up_once(db, self.value.text_range()))
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
.map_token_range_up(db, self.value.text_range());
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return range;
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
}
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some(FileRange { file_id, range: self.value.text_range() })
}
HirFileIdRepr::MacroFile(mac_file) => {
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
.map_token_range_up(db, self.value.text_range());
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
Some(range)
} else {
None
}
}
}
}
}
impl InFile<TextRange> {
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
let (range, _ctxt) = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
}
HirFileIdRepr::MacroFile(m) => {
ExpansionInfo::new(db, m).map_token_range_up(db, self.value)
}
};
range
}
}
impl<N: AstNode> InFile<N> {
pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value })
}
HirFileIdRepr::MacroFile(m) => m,
};
if !self.file_id.is_attr_macro(db) {
return None;
}
let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id)
.map_node_range_up(db, self.value.syntax().text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if !ctx.is_root() {
return None;
}
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let anc = db.parse(file_id).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?;
Some(InRealFile::new(file_id, value))
}
}

View File

@ -1,111 +1,122 @@
//! To make attribute macros work reliably when typing, we need to take care to //! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them. //! fix up syntax errors in the code we're passing to them.
use std::mem;
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap}; use base_db::{
use rustc_hash::FxHashMap; span::{ErasedFileAstId, SpanAnchor, SpanData},
FileId,
};
use la_arena::RawIdx;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec; use smallvec::SmallVec;
use syntax::{ use syntax::{
ast::{self, AstNode, HasLoopBody}, ast::{self, AstNode, HasLoopBody},
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use triomphe::Arc;
use tt::Spacing;
use crate::{
span::SpanMapRef,
tt::{Ident, Leaf, Punct, Subtree},
}; };
use tt::token_id::Subtree;
/// The result of calculating fixes for a syntax node -- a bunch of changes /// The result of calculating fixes for a syntax node -- a bunch of changes
/// (appending to and replacing nodes), the information that is needed to /// (appending to and replacing nodes), the information that is needed to
/// reverse those changes afterwards, and a token map. /// reverse those changes afterwards, and a token map.
#[derive(Debug, Default)] #[derive(Debug, Default)]
pub(crate) struct SyntaxFixups { pub(crate) struct SyntaxFixups {
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>, pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>, pub(crate) remove: FxHashSet<SyntaxNode>,
pub(crate) undo_info: SyntaxFixupUndoInfo, pub(crate) undo_info: SyntaxFixupUndoInfo,
pub(crate) token_map: TokenMap,
pub(crate) next_id: u32,
} }
/// This is the information needed to reverse the fixups. /// This is the information needed to reverse the fixups.
#[derive(Debug, Default, PartialEq, Eq)] #[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct SyntaxFixupUndoInfo { pub struct SyntaxFixupUndoInfo {
original: Box<[Subtree]>, // FIXME: ThinArc<[Subtree]>
original: Option<Arc<Box<[Subtree]>>>,
} }
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0); impl SyntaxFixupUndoInfo {
pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
}
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { // censoring -> just don't convert the node
// replacement -> censor + append
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
// to remove later
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default(); let mut append = FxHashMap::<SyntaxElement, _>::default();
let mut replace = FxHashMap::<SyntaxElement, _>::default(); let mut remove = FxHashSet::<SyntaxNode>::default();
let mut preorder = node.preorder(); let mut preorder = node.preorder();
let mut original = Vec::new(); let mut original = Vec::new();
let mut token_map = TokenMap::default(); let dummy_range = TextRange::empty(TextSize::new(0));
let mut next_id = 0; // we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
while let Some(event) = preorder.next() { // the index into the replacement vec but only if the end points to !0
let node = match event { let dummy_anchor =
syntax::WalkEvent::Enter(node) => node, SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)) };
syntax::WalkEvent::Leave(_) => continue, let fake_span = |range| SpanData {
range: dummy_range,
anchor: dummy_anchor,
ctx: span_map.span_for_range(range).ctx,
}; };
while let Some(event) = preorder.next() {
let syntax::WalkEvent::Enter(node) = event else { continue };
let node_range = node.text_range();
if can_handle_error(&node) && has_error_to_handle(&node) { if can_handle_error(&node) && has_error_to_handle(&node) {
remove.insert(node.clone().into());
// the node contains an error node, we have to completely replace it by something valid // the node contains an error node, we have to completely replace it by something valid
let (original_tree, new_tmap, new_next_id) = let original_tree = mbe::syntax_node_to_token_tree(&node, span_map);
mbe::syntax_node_to_token_tree_with_modifications(
&node,
mem::take(&mut token_map),
next_id,
Default::default(),
Default::default(),
);
token_map = new_tmap;
next_id = new_next_id;
let idx = original.len() as u32; let idx = original.len() as u32;
original.push(original_tree); original.push(original_tree);
let replacement = SyntheticToken { let replacement = Leaf::Ident(Ident {
kind: SyntaxKind::IDENT,
text: "__ra_fixup".into(), text: "__ra_fixup".into(),
range: node.text_range(), span: SpanData {
id: SyntheticTokenId(idx), range: TextRange::new(TextSize::new(idx), TextSize::new(!0)),
}; anchor: dummy_anchor,
replace.insert(node.clone().into(), vec![replacement]); ctx: span_map.span_for_range(node_range).ctx,
},
});
append.insert(node.clone().into(), vec![replacement]);
preorder.skip_subtree(); preorder.skip_subtree();
continue; continue;
} }
// In some other situations, we can fix things by just appending some tokens. // In some other situations, we can fix things by just appending some tokens.
let end_range = TextRange::empty(node.text_range().end());
match_ast! { match_ast! {
match node { match node {
ast::FieldExpr(it) => { ast::FieldExpr(it) => {
if it.name_ref().is_none() { if it.name_ref().is_none() {
// incomplete field access: some_expr.| // incomplete field access: some_expr.|
append.insert(node.clone().into(), vec![ append.insert(node.clone().into(), vec![
SyntheticToken { Leaf::Ident(Ident {
kind: SyntaxKind::IDENT,
text: "__ra_fixup".into(), text: "__ra_fixup".into(),
range: end_range, span: fake_span(node_range),
id: EMPTY_ID, }),
},
]); ]);
} }
}, },
ast::ExprStmt(it) => { ast::ExprStmt(it) => {
if it.semicolon_token().is_none() { if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![ append.insert(node.clone().into(), vec![
SyntheticToken { Leaf::Punct(Punct {
kind: SyntaxKind::SEMICOLON, char: ';',
text: ";".into(), spacing: Spacing::Alone,
range: end_range, span: fake_span(node_range),
id: EMPTY_ID, }),
},
]); ]);
} }
}, },
ast::LetStmt(it) => { ast::LetStmt(it) => {
if it.semicolon_token().is_none() { if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![ append.insert(node.clone().into(), vec![
SyntheticToken { Leaf::Punct(Punct {
kind: SyntaxKind::SEMICOLON, char: ';',
text: ";".into(), spacing: Spacing::Alone,
range: end_range, span: fake_span(node_range)
id: EMPTY_ID, }),
},
]); ]);
} }
}, },
@ -117,28 +128,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue, None => continue,
}; };
append.insert(if_token.into(), vec![ append.insert(if_token.into(), vec![
SyntheticToken { Leaf::Ident(Ident {
kind: SyntaxKind::IDENT,
text: "__ra_fixup".into(), text: "__ra_fixup".into(),
range: end_range, span: fake_span(node_range)
id: EMPTY_ID, }),
},
]); ]);
} }
if it.then_branch().is_none() { if it.then_branch().is_none() {
append.insert(node.clone().into(), vec![ append.insert(node.clone().into(), vec![
SyntheticToken { // FIXME: THis should be a subtree no?
kind: SyntaxKind::L_CURLY, Leaf::Punct(Punct {
text: "{".into(), char: '{',
range: end_range, spacing: Spacing::Alone,
id: EMPTY_ID, span: fake_span(node_range)
}, }),
SyntheticToken { Leaf::Punct(Punct {
kind: SyntaxKind::R_CURLY, char: '}',
text: "}".into(), spacing: Spacing::Alone,
range: end_range, span: fake_span(node_range)
id: EMPTY_ID, }),
},
]); ]);
} }
}, },
@ -150,46 +158,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue, None => continue,
}; };
append.insert(while_token.into(), vec![ append.insert(while_token.into(), vec![
SyntheticToken { Leaf::Ident(Ident {
kind: SyntaxKind::IDENT,
text: "__ra_fixup".into(), text: "__ra_fixup".into(),
range: end_range, span: fake_span(node_range)
id: EMPTY_ID, }),
},
]); ]);
} }
if it.loop_body().is_none() { if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![ append.insert(node.clone().into(), vec![
SyntheticToken { // FIXME: THis should be a subtree no?
kind: SyntaxKind::L_CURLY, Leaf::Punct(Punct {
text: "{".into(), char: '{',
range: end_range, spacing: Spacing::Alone,
id: EMPTY_ID, span: fake_span(node_range)
}, }),
SyntheticToken { Leaf::Punct(Punct {
kind: SyntaxKind::R_CURLY, char: '}',
text: "}".into(), spacing: Spacing::Alone,
range: end_range, span: fake_span(node_range)
id: EMPTY_ID, }),
},
]); ]);
} }
}, },
ast::LoopExpr(it) => { ast::LoopExpr(it) => {
if it.loop_body().is_none() { if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![ append.insert(node.clone().into(), vec![
SyntheticToken { // FIXME: THis should be a subtree no?
kind: SyntaxKind::L_CURLY, Leaf::Punct(Punct {
text: "{".into(), char: '{',
range: end_range, spacing: Spacing::Alone,
id: EMPTY_ID, span: fake_span(node_range)
}, }),
SyntheticToken { Leaf::Punct(Punct {
kind: SyntaxKind::R_CURLY, char: '}',
text: "}".into(), spacing: Spacing::Alone,
range: end_range, span: fake_span(node_range)
id: EMPTY_ID, }),
},
]); ]);
} }
}, },
@ -201,29 +205,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue None => continue
}; };
append.insert(match_token.into(), vec![ append.insert(match_token.into(), vec![
SyntheticToken { Leaf::Ident(Ident {
kind: SyntaxKind::IDENT,
text: "__ra_fixup".into(), text: "__ra_fixup".into(),
range: end_range, span: fake_span(node_range)
id: EMPTY_ID }),
},
]); ]);
} }
if it.match_arm_list().is_none() { if it.match_arm_list().is_none() {
// No match arms // No match arms
append.insert(node.clone().into(), vec![ append.insert(node.clone().into(), vec![
SyntheticToken { // FIXME: THis should be a subtree no?
kind: SyntaxKind::L_CURLY, Leaf::Punct(Punct {
text: "{".into(), char: '{',
range: end_range, spacing: Spacing::Alone,
id: EMPTY_ID, span: fake_span(node_range)
}, }),
SyntheticToken { Leaf::Punct(Punct {
kind: SyntaxKind::R_CURLY, char: '}',
text: "}".into(), spacing: Spacing::Alone,
range: end_range, span: fake_span(node_range)
id: EMPTY_ID, }),
},
]); ]);
} }
}, },
@ -234,10 +235,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
}; };
let [pat, in_token, iter] = [ let [pat, in_token, iter] = [
(SyntaxKind::UNDERSCORE, "_"), "_",
(SyntaxKind::IN_KW, "in"), "in",
(SyntaxKind::IDENT, "__ra_fixup") "__ra_fixup"
].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID}); ].map(|text|
Leaf::Ident(Ident {
text: text.into(),
span: fake_span(node_range)
}),
);
if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() { if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
append.insert(for_token.into(), vec![pat, in_token, iter]); append.insert(for_token.into(), vec![pat, in_token, iter]);
@ -248,18 +254,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
if it.loop_body().is_none() { if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![ append.insert(node.clone().into(), vec![
SyntheticToken { // FIXME: THis should be a subtree no?
kind: SyntaxKind::L_CURLY, Leaf::Punct(Punct {
text: "{".into(), char: '{',
range: end_range, spacing: Spacing::Alone,
id: EMPTY_ID, span: fake_span(node_range)
}, }),
SyntheticToken { Leaf::Punct(Punct {
kind: SyntaxKind::R_CURLY, char: '}',
text: "}".into(), spacing: Spacing::Alone,
range: end_range, span: fake_span(node_range)
id: EMPTY_ID, }),
},
]); ]);
} }
}, },
@ -267,12 +272,13 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
} }
} }
} }
let needs_fixups = !append.is_empty() || !original.is_empty();
SyntaxFixups { SyntaxFixups {
append, append,
replace, remove,
token_map, undo_info: SyntaxFixupUndoInfo {
next_id, original: needs_fixups.then(|| Arc::new(original.into_boxed_slice())),
undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() }, },
} }
} }
@ -288,30 +294,32 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c)) has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
} }
pub(crate) fn reverse_fixups( pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
tt: &mut Subtree, let Some(undo_info) = undo_info.original.as_deref() else { return };
token_map: &TokenMap, let undo_info = &**undo_info;
undo_info: &SyntaxFixupUndoInfo, reverse_fixups_(tt, undo_info);
) { }
fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
let tts = std::mem::take(&mut tt.token_trees); let tts = std::mem::take(&mut tt.token_trees);
tt.token_trees = tts tt.token_trees = tts
.into_iter() .into_iter()
// delete all fake nodes
.filter(|tt| match tt { .filter(|tt| match tt {
tt::TokenTree::Leaf(leaf) => { tt::TokenTree::Leaf(leaf) => {
token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID) let span = leaf.span();
} span.anchor.file_id != FileId(!0) || span.range.end() == TextSize::new(!0)
tt::TokenTree::Subtree(st) => {
token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID)
} }
tt::TokenTree::Subtree(_) => true,
}) })
.flat_map(|tt| match tt { .flat_map(|tt| match tt {
tt::TokenTree::Subtree(mut tt) => { tt::TokenTree::Subtree(mut tt) => {
reverse_fixups(&mut tt, token_map, undo_info); reverse_fixups_(&mut tt, undo_info);
SmallVec::from_const([tt.into()]) SmallVec::from_const([tt.into()])
} }
tt::TokenTree::Leaf(leaf) => { tt::TokenTree::Leaf(leaf) => {
if let Some(id) = token_map.synthetic_token_id(*leaf.span()) { if leaf.span().anchor.file_id == FileId(!0) {
let original = undo_info.original[id.0 as usize].clone(); let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
if original.delimiter.kind == tt::DelimiterKind::Invisible { if original.delimiter.kind == tt::DelimiterKind::Invisible {
original.token_trees.into() original.token_trees.into()
} else { } else {
@ -327,11 +335,15 @@ pub(crate) fn reverse_fixups(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use base_db::FileId;
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use triomphe::Arc;
use crate::tt; use crate::{
fixup::reverse_fixups,
use super::reverse_fixups; span::{RealSpanMap, SpanMap},
tt,
};
// The following three functions are only meant to check partial structural equivalence of // The following three functions are only meant to check partial structural equivalence of
// `TokenTree`s, see the last assertion in `check()`. // `TokenTree`s, see the last assertion in `check()`.
@ -361,13 +373,13 @@ mod tests {
#[track_caller] #[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) { fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture); let parsed = syntax::SourceFile::parse(ra_fixture);
let fixups = super::fixup_syntax(&parsed.syntax_node()); let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId(0))));
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node());
let mut tt = mbe::syntax_node_to_token_tree_modified(
&parsed.syntax_node(), &parsed.syntax_node(),
fixups.token_map, span_map.as_ref(),
fixups.next_id,
fixups.replace,
fixups.append, fixups.append,
fixups.remove,
); );
let actual = format!("{tt}\n"); let actual = format!("{tt}\n");
@ -383,14 +395,15 @@ mod tests {
parse.syntax_node() parse.syntax_node()
); );
reverse_fixups(&mut tt, &tmap, &fixups.undo_info); reverse_fixups(&mut tt, &fixups.undo_info);
// the fixed-up + reversed version should be equivalent to the original input // the fixed-up + reversed version should be equivalent to the original input
// modulo token IDs and `Punct`s' spacing. // modulo token IDs and `Punct`s' spacing.
let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node()); let original_as_tt =
mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref());
assert!( assert!(
check_subtree_eq(&tt, &original_as_tt), check_subtree_eq(&tt, &original_as_tt),
"different token tree: {tt:?},\n{original_as_tt:?}" "different token tree:\n{tt:?}\n\n{original_as_tt:?}"
); );
} }

View File

@ -2,252 +2,241 @@
//! //!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at //! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`. //! this moment, this is horribly incomplete and handles only `$crate`.
use base_db::CrateId; use std::iter;
use db::TokenExpander;
use either::Either;
use mbe::Origin;
use syntax::{
ast::{self, HasDocComments},
AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use triomphe::Arc;
use crate::{ use base_db::span::{MacroCallId, SpanData, SyntaxContextId};
db::{self, ExpandDatabase},
fixup,
name::{AsName, Name},
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
};
#[derive(Clone, Debug)] use crate::db::ExpandDatabase;
pub struct Hygiene {
frames: Option<HygieneFrames>, #[derive(Copy, Clone, Hash, PartialEq, Eq)]
pub struct SyntaxContextData {
pub outer_expn: Option<MacroCallId>,
pub outer_transparency: Transparency,
pub parent: SyntaxContextId,
/// This context, but with all transparent and semi-transparent expansions filtered away.
pub opaque: SyntaxContextId,
/// This context, but with all transparent expansions filtered away.
pub opaque_and_semitransparent: SyntaxContextId,
} }
impl Hygiene { impl std::fmt::Debug for SyntaxContextData {
pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Hygiene { frames: Some(HygieneFrames::new(db, file_id)) } f.debug_struct("SyntaxContextData")
.field("outer_expn", &self.outer_expn)
.field("outer_transparency", &self.outer_transparency)
.field("parent", &self.parent)
.field("opaque", &self.opaque)
.field("opaque_and_semitransparent", &self.opaque_and_semitransparent)
.finish()
}
} }
pub fn new_unhygienic() -> Hygiene { impl SyntaxContextData {
Hygiene { frames: None } pub fn root() -> Self {
SyntaxContextData {
outer_expn: None,
outer_transparency: Transparency::Opaque,
parent: SyntaxContextId::ROOT,
opaque: SyntaxContextId::ROOT,
opaque_and_semitransparent: SyntaxContextId::ROOT,
}
} }
// FIXME: this should just return name pub fn fancy_debug(
pub fn name_ref_to_name( self,
&self, self_id: SyntaxContextId,
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
name_ref: ast::NameRef, f: &mut std::fmt::Formatter<'_>,
) -> Either<Name, CrateId> { ) -> std::fmt::Result {
if let Some(frames) = &self.frames { write!(f, "#{self_id} parent: #{}, outer_mark: (", self.parent)?;
if name_ref.text() == "$crate" { match self.outer_expn {
if let Some(krate) = frames.root_crate(db, name_ref.syntax()) { Some(id) => {
return Either::Right(krate); write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)?
} }
None => write!(f, "root")?,
}
write!(f, ", {:?})", self.outer_transparency)
} }
} }
Either::Left(name_ref.as_name()) /// A property of a macro expansion that determines how identifiers
/// produced by that expansion are resolved.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
pub enum Transparency {
/// Identifier produced by a transparent expansion is always resolved at call-site.
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
Transparent,
/// Identifier produced by a semi-transparent expansion may be resolved
/// either at call-site or at definition-site.
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
/// Otherwise it's resolved at call-site.
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
/// but that's an implementation detail.
SemiTransparent,
/// Identifier produced by an opaque expansion is always resolved at definition-site.
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
Opaque,
} }
pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> { pub fn span_with_def_site_ctxt(
let mut token = path.syntax().first_token()?.text_range(); db: &dyn ExpandDatabase,
let frames = self.frames.as_ref()?; span: SpanData,
let mut current = &frames.0; expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque)
}
loop { pub fn span_with_call_site_ctxt(
let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?; db: &dyn ExpandDatabase,
if origin == Origin::Def { span: SpanData,
return if current.local_inner { expn_id: MacroCallId,
frames.root_crate(db, path.syntax()) ) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent)
}
pub fn span_with_mixed_site_ctxt(
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent)
}
fn span_with_ctxt_from_mark(
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
transparency: Transparency,
) -> SpanData {
SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span }
}
pub(super) fn apply_mark(
db: &dyn ExpandDatabase,
ctxt: SyntaxContextId,
call_id: MacroCallId,
transparency: Transparency,
) -> SyntaxContextId {
if transparency == Transparency::Opaque {
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
}
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db)
} else { } else {
None call_site_ctxt.normalize_to_macro_rules(db)
};
}
current = current.call_site.as_ref()?;
token = mapped.value;
}
}
}
#[derive(Clone, Debug)]
struct HygieneFrames(Arc<HygieneFrame>);
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct HygieneFrame {
expansion: Option<HygieneInfo>,
// Indicate this is a local inner macro
local_inner: bool,
krate: Option<CrateId>,
call_site: Option<Arc<HygieneFrame>>,
def_site: Option<Arc<HygieneFrame>>,
}
impl HygieneFrames {
fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self {
// Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
// usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
}
fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
let mut token = node.first_token()?.text_range();
let mut result = self.0.krate;
let mut current = self.0.clone();
while let Some((mapped, origin)) =
current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
{
result = current.krate;
let site = match origin {
Origin::Def => &current.def_site,
Origin::Call => &current.call_site,
}; };
let site = match site { if call_site_ctxt.is_root() {
None => break, return apply_mark_internal(db, ctxt, Some(call_id), transparency);
Some(it) => it,
};
current = site.clone();
token = mapped.value;
} }
result // Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
// macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition.
//
// In this case, the tokens from the macros 1.0 definition inherit the hygiene
// at their invocation. That is, we pretend that the macros 1.0 definition
// was defined at its invocation (i.e., inside the macros 2.0 definition)
// so that the macros 2.0 definition remains hygienic.
//
// See the example at `test/ui/hygiene/legacy_interaction.rs`.
for (call_id, transparency) in ctxt.marks(db) {
call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency);
} }
apply_mark_internal(db, call_site_ctxt, Some(call_id), transparency)
} }
#[derive(Debug, Clone, PartialEq, Eq)] fn apply_mark_internal(
struct HygieneInfo {
file: MacroFile,
/// The start offset of the `macro_rules!` arguments or attribute input.
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
macro_def: TokenExpander,
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>,
}
impl HygieneInfo {
fn map_ident_up(
&self,
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
token: TextRange, ctxt: SyntaxContextId,
) -> Option<(InFile<TextRange>, Origin)> { call_id: Option<MacroCallId>,
let token_id = self.exp_map.token_by_range(token)?; transparency: Transparency,
let (mut token_id, origin) = self.macro_def.map_id_up(token_id); ) -> SyntaxContextId {
let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
let mut opaque = syntax_context_data.opaque;
let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
let loc = db.lookup_intern_macro_call(self.file.macro_call_id); if transparency >= Transparency::Opaque {
let parent = opaque;
let new_opaque = SyntaxContextId::SELF_REF;
// But we can't just grab the to be allocated ID either as that would not deduplicate
// things!
// So we need a new salsa store type here ...
opaque = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque: new_opaque,
opaque_and_semitransparent: new_opaque,
});
}
let (token_map, tt) = match &loc.kind { if transparency >= Transparency::SemiTransparent {
MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) { let parent = opaque_and_semitransparent;
Some(unshifted) => { let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
token_id = unshifted; opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
(&attr_args.1, self.attr_input_or_mac_def_start?) outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent: new_opaque_and_semitransparent,
});
} }
None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
},
_ => match origin {
mbe::Origin::Call => {
(&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
}
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
(&expander.def_site_token_map, *tt)
}
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
},
},
};
let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?; let parent = ctxt;
Some((tt.with_value(range + tt.value), origin)) db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent,
})
}
pub trait SyntaxContextExt {
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)>;
}
#[inline(always)]
fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId {
match n {
SyntaxContextId::SELF_REF => p,
_ => n,
} }
} }
fn make_hygiene_info( impl SyntaxContextExt for SyntaxContextId {
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent)
}
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque)
}
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
db.lookup_intern_syntax_context(self).parent
}
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
let data = db.lookup_intern_syntax_context(self);
(data.outer_expn, data.outer_transparency)
}
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)> {
let mut marks = marks_rev(self, db).collect::<Vec<_>>();
marks.reverse();
marks
}
}
// FIXME: Make this a SyntaxContextExt method once we have RPIT
pub fn marks_rev(
ctxt: SyntaxContextId,
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
macro_file: MacroFile, ) -> impl Iterator<Item = (Option<MacroCallId>, Transparency)> + '_ {
loc: &MacroCallLoc, iter::successors(Some(ctxt), move |&mark| {
) -> HygieneInfo { Some(mark.parent_ctxt(db)).filter(|&it| it != SyntaxContextId::ROOT)
let def = loc.def.ast_id().left().and_then(|id| { })
let def_tt = match id.to_node(db) { .map(|ctx| ctx.outer_mark(db))
ast::Macro::MacroRules(mac) => mac.token_tree()?,
ast::Macro::MacroDef(mac) => mac.body()?,
};
Some(InFile::new(id.file_id, def_tt))
});
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let tt = ast_id
.to_node(db)
.doc_comments_and_attrs()
.nth(invoc_attr_index.ast_index())
.and_then(Either::left)?
.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
_ => None,
});
let macro_def = db.macro_expander(loc.def);
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
Arc::new((
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
Default::default(),
Default::default(),
))
});
HygieneInfo {
file: macro_file,
attr_input_or_mac_def_start: attr_input_or_mac_def
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
macro_arg_shift: mbe::Shift::new(&macro_arg.0),
macro_arg,
macro_def,
exp_map,
}
}
impl HygieneFrame {
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame {
let (info, krate, local_inner) = match file_id.macro_file() {
None => (None, None, false),
Some(macro_file) => {
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
match loc.def.kind {
MacroDefKind::Declarative(_) => {
(info, Some(loc.def.krate), loc.def.local_inner)
}
MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false),
MacroDefKind::BuiltInAttr(..) => (info, None, false),
MacroDefKind::BuiltInDerive(..) => (info, None, false),
MacroDefKind::BuiltInEager(..) => (info, None, false),
MacroDefKind::ProcMacro(..) => (info, None, false),
}
}
};
let Some((info, calling_file)) = info else {
return HygieneFrame {
expansion: None,
local_inner,
krate,
call_site: None,
def_site: None,
};
};
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
let call_site = Some(db.hygiene_frame(calling_file));
HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }
}
} }

File diff suppressed because it is too large Load Diff

View File

@ -7,11 +7,11 @@ use std::{
use crate::{ use crate::{
db::ExpandDatabase, db::ExpandDatabase,
hygiene::Hygiene, hygiene::{marks_rev, SyntaxContextExt, Transparency},
name::{known, Name}, name::{known, AsName, Name},
span::SpanMapRef,
}; };
use base_db::CrateId; use base_db::{span::SyntaxContextId, CrateId};
use either::Either;
use smallvec::SmallVec; use smallvec::SmallVec;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
@ -38,6 +38,7 @@ pub enum PathKind {
Crate, Crate,
/// Absolute path (::foo) /// Absolute path (::foo)
Abs, Abs,
// FIXME: Remove this
/// `$crate` from macro expansion /// `$crate` from macro expansion
DollarCrate(CrateId), DollarCrate(CrateId),
} }
@ -46,9 +47,9 @@ impl ModPath {
pub fn from_src( pub fn from_src(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
path: ast::Path, path: ast::Path,
hygiene: &Hygiene, span_map: SpanMapRef<'_>,
) -> Option<ModPath> { ) -> Option<ModPath> {
convert_path(db, None, path, hygiene) convert_path(db, None, path, span_map)
} }
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath { pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
@ -193,34 +194,37 @@ fn convert_path(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
prefix: Option<ModPath>, prefix: Option<ModPath>,
path: ast::Path, path: ast::Path,
hygiene: &Hygiene, span_map: SpanMapRef<'_>,
) -> Option<ModPath> { ) -> Option<ModPath> {
let prefix = match path.qualifier() { let prefix = match path.qualifier() {
Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?), Some(qual) => Some(convert_path(db, prefix, qual, span_map)?),
None => prefix, None => prefix,
}; };
let segment = path.segment()?; let segment = path.segment()?;
let mut mod_path = match segment.kind()? { let mut mod_path = match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => { ast::PathSegmentKind::Name(name_ref) => {
match hygiene.name_ref_to_name(db, name_ref) { if name_ref.text() == "$crate" {
Either::Left(name) => { if prefix.is_some() {
// no type args in use return None;
}
ModPath::from_kind(
resolve_crate_root(
db,
span_map.span_for_range(name_ref.syntax().text_range()).ctx,
)
.map(PathKind::DollarCrate)
.unwrap_or(PathKind::Crate),
)
} else {
let mut res = prefix.unwrap_or_else(|| { let mut res = prefix.unwrap_or_else(|| {
ModPath::from_kind( ModPath::from_kind(
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs), segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
) )
}); });
res.segments.push(name); res.segments.push(name_ref.as_name());
res res
} }
Either::Right(crate_id) => {
return Some(ModPath::from_segments(
PathKind::DollarCrate(crate_id),
iter::empty(),
))
}
}
} }
ast::PathSegmentKind::SelfTypeKw => { ast::PathSegmentKind::SelfTypeKw => {
if prefix.is_some() { if prefix.is_some() {
@ -261,8 +265,14 @@ fn convert_path(
// We follow what it did anyway :) // We follow what it did anyway :)
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain { if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) { if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
if let Some(crate_id) = hygiene.local_inner_macros(db, path) { let syn_ctx = span_map.span_for_range(segment.syntax().text_range()).ctx;
mod_path.kind = PathKind::DollarCrate(crate_id); if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
mod_path.kind = match resolve_crate_root(db, syn_ctx) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
}
}
} }
} }
} }
@ -270,6 +280,29 @@ fn convert_path(
Some(mod_path) Some(mod_path)
} }
pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
// as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
// FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
// definitions actually produced by `macro` and `macro` definitions produced by
// `macro_rules!`, but at least such configurations are not stable yet.
ctxt = ctxt.normalize_to_macro_rules(db);
let mut iter = marks_rev(ctxt, db).peekable();
let mut result_mark = None;
// Find the last opaque mark from the end if it exists.
while let Some(&(mark, Transparency::Opaque)) = iter.peek() {
result_mark = Some(mark);
iter.next();
}
// Then find the last semi-transparent mark from the end if it exists.
while let Some((mark, Transparency::SemiTransparent)) = iter.next() {
result_mark = Some(mark);
}
result_mark.flatten().map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
}
pub use crate::name as __name; pub use crate::name as __name;
#[macro_export] #[macro_export]

View File

@ -470,6 +470,7 @@ pub mod known {
pub const SELF_TYPE: super::Name = super::Name::new_inline("Self"); pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static"); pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
pub const DOLLAR_CRATE: super::Name = super::Name::new_inline("$crate");
#[macro_export] #[macro_export]
macro_rules! name { macro_rules! name {

View File

@ -1,6 +1,6 @@
//! Proc Macro Expander stub //! Proc Macro Expander stub
use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
use stdx::never; use stdx::never;
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
@ -33,11 +33,15 @@ impl ProcMacroExpander {
calling_crate: CrateId, calling_crate: CrateId,
tt: &tt::Subtree, tt: &tt::Subtree,
attr_arg: Option<&tt::Subtree>, attr_arg: Option<&tt::Subtree>,
def_site: SpanData,
call_site: SpanData,
mixed_site: SpanData,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
match self.proc_macro_id { match self.proc_macro_id {
ProcMacroId(DUMMY_ID) => { ProcMacroId(DUMMY_ID) => ExpandResult::new(
ExpandResult::new(tt::Subtree::empty(), ExpandError::UnresolvedProcMacro(def_crate)) tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
} ExpandError::UnresolvedProcMacro(def_crate),
),
ProcMacroId(id) => { ProcMacroId(id) => {
let proc_macros = db.proc_macros(); let proc_macros = db.proc_macros();
let proc_macros = match proc_macros.get(&def_crate) { let proc_macros = match proc_macros.get(&def_crate) {
@ -45,7 +49,7 @@ impl ProcMacroExpander {
Some(Err(_)) | None => { Some(Err(_)) | None => {
never!("Non-dummy expander even though there are no proc macros"); never!("Non-dummy expander even though there are no proc macros");
return ExpandResult::new( return ExpandResult::new(
tt::Subtree::empty(), tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"), ExpandError::other("Internal error"),
); );
} }
@ -59,7 +63,7 @@ impl ProcMacroExpander {
id id
); );
return ExpandResult::new( return ExpandResult::new(
tt::Subtree::empty(), tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"), ExpandError::other("Internal error"),
); );
} }
@ -68,7 +72,8 @@ impl ProcMacroExpander {
let krate_graph = db.crate_graph(); let krate_graph = db.crate_graph();
// Proc macros have access to the environment variables of the invoking crate. // Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env; let env = &krate_graph[calling_crate].env;
match proc_macro.expander.expand(tt, attr_arg, env) { match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site)
{
Ok(t) => ExpandResult::ok(t), Ok(t) => ExpandResult::ok(t),
Err(err) => match err { Err(err) => match err {
// Don't discard the item in case something unexpected happened while expanding attributes // Don't discard the item in case something unexpected happened while expanding attributes
@ -78,9 +83,10 @@ impl ProcMacroExpander {
ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) } ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) }
} }
ProcMacroExpansionError::System(text) ProcMacroExpansionError::System(text)
| ProcMacroExpansionError::Panic(text) => { | ProcMacroExpansionError::Panic(text) => ExpandResult::new(
ExpandResult::new(tt::Subtree::empty(), ExpandError::other(text)) tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
} ExpandError::other(text),
),
}, },
} }
} }

View File

@ -1,5 +1,7 @@
//! A simplified version of quote-crate like quasi quote macro //! A simplified version of quote-crate like quasi quote macro
use base_db::span::SpanData;
// A helper macro quote macro // A helper macro quote macro
// FIXME: // FIXME:
// 1. Not all puncts are handled // 1. Not all puncts are handled
@ -8,109 +10,109 @@
#[doc(hidden)] #[doc(hidden)]
#[macro_export] #[macro_export]
macro_rules! __quote { macro_rules! __quote {
() => { ($span:ident) => {
Vec::<crate::tt::TokenTree>::new() Vec::<crate::tt::TokenTree>::new()
}; };
( @SUBTREE $delim:ident $($tt:tt)* ) => { ( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => {
{ {
let children = $crate::__quote!($($tt)*); let children = $crate::__quote!($span $($tt)*);
crate::tt::Subtree { crate::tt::Subtree {
delimiter: crate::tt::Delimiter { delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::$delim, kind: crate::tt::DelimiterKind::$delim,
open: crate::tt::TokenId::unspecified(), open: $span,
close: crate::tt::TokenId::unspecified(), close: $span,
}, },
token_trees: $crate::quote::IntoTt::to_tokens(children), token_trees: $crate::quote::IntoTt::to_tokens(children),
} }
} }
}; };
( @PUNCT $first:literal ) => { ( @PUNCT($span:ident) $first:literal ) => {
{ {
vec![ vec![
crate::tt::Leaf::Punct(crate::tt::Punct { crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first, char: $first,
spacing: crate::tt::Spacing::Alone, spacing: crate::tt::Spacing::Alone,
span: crate::tt::TokenId::unspecified(), span: $span,
}).into() }).into()
] ]
} }
}; };
( @PUNCT $first:literal, $sec:literal ) => { ( @PUNCT($span:ident) $first:literal, $sec:literal ) => {
{ {
vec![ vec![
crate::tt::Leaf::Punct(crate::tt::Punct { crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first, char: $first,
spacing: crate::tt::Spacing::Joint, spacing: crate::tt::Spacing::Joint,
span: crate::tt::TokenId::unspecified(), span: $span,
}).into(), }).into(),
crate::tt::Leaf::Punct(crate::tt::Punct { crate::tt::Leaf::Punct(crate::tt::Punct {
char: $sec, char: $sec,
spacing: crate::tt::Spacing::Alone, spacing: crate::tt::Spacing::Alone,
span: crate::tt::TokenId::unspecified(), span: $span,
}).into() }).into()
] ]
} }
}; };
// hash variable // hash variable
( # $first:ident $($tail:tt)* ) => { ($span:ident # $first:ident $($tail:tt)* ) => {
{ {
let token = $crate::quote::ToTokenTree::to_token($first); let token = $crate::quote::ToTokenTree::to_token($first, $span);
let mut tokens = vec![token.into()]; let mut tokens = vec![token.into()];
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens); tokens.append(&mut tail_tokens);
tokens tokens
} }
}; };
( ## $first:ident $($tail:tt)* ) => { ($span:ident ## $first:ident $($tail:tt)* ) => {
{ {
let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<crate::tt::TokenTree>>(); let mut tokens = $first.into_iter().map(|it| $crate::quote::ToTokenTree::to_token(it, $span)).collect::<Vec<crate::tt::TokenTree>>();
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens); tokens.append(&mut tail_tokens);
tokens tokens
} }
}; };
// Brace // Brace
( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) }; ($span:ident { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE($span) Brace $($tt)*) };
// Bracket // Bracket
( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) }; ($span:ident [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE($span) Bracket $($tt)*) };
// Parenthesis // Parenthesis
( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) }; ($span:ident ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE($span) Parenthesis $($tt)*) };
// Literal // Literal
( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] }; ($span:ident $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt, $span).into()] };
// Ident // Ident
( $tt:ident ) => { ($span:ident $tt:ident ) => {
vec![ { vec![ {
crate::tt::Leaf::Ident(crate::tt::Ident { crate::tt::Leaf::Ident(crate::tt::Ident {
text: stringify!($tt).into(), text: stringify!($tt).into(),
span: crate::tt::TokenId::unspecified(), span: $span,
}).into() }).into()
}] }]
}; };
// Puncts // Puncts
// FIXME: Not all puncts are handled // FIXME: Not all puncts are handled
( -> ) => {$crate::__quote!(@PUNCT '-', '>')}; ($span:ident -> ) => {$crate::__quote!(@PUNCT($span) '-', '>')};
( & ) => {$crate::__quote!(@PUNCT '&')}; ($span:ident & ) => {$crate::__quote!(@PUNCT($span) '&')};
( , ) => {$crate::__quote!(@PUNCT ',')}; ($span:ident , ) => {$crate::__quote!(@PUNCT($span) ',')};
( : ) => {$crate::__quote!(@PUNCT ':')}; ($span:ident : ) => {$crate::__quote!(@PUNCT($span) ':')};
( ; ) => {$crate::__quote!(@PUNCT ';')}; ($span:ident ; ) => {$crate::__quote!(@PUNCT($span) ';')};
( :: ) => {$crate::__quote!(@PUNCT ':', ':')}; ($span:ident :: ) => {$crate::__quote!(@PUNCT($span) ':', ':')};
( . ) => {$crate::__quote!(@PUNCT '.')}; ($span:ident . ) => {$crate::__quote!(@PUNCT($span) '.')};
( < ) => {$crate::__quote!(@PUNCT '<')}; ($span:ident < ) => {$crate::__quote!(@PUNCT($span) '<')};
( > ) => {$crate::__quote!(@PUNCT '>')}; ($span:ident > ) => {$crate::__quote!(@PUNCT($span) '>')};
( ! ) => {$crate::__quote!(@PUNCT '!')}; ($span:ident ! ) => {$crate::__quote!(@PUNCT($span) '!')};
( $first:tt $($tail:tt)+ ) => { ($span:ident $first:tt $($tail:tt)+ ) => {
{ {
let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first)); let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $first ));
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*)); let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens); tokens.append(&mut tail_tokens);
tokens tokens
@ -122,19 +124,22 @@ macro_rules! __quote {
/// It probably should implement in proc-macro /// It probably should implement in proc-macro
#[macro_export] #[macro_export]
macro_rules! quote { macro_rules! quote {
( $($tt:tt)* ) => { ($span:ident=> $($tt:tt)* ) => {
$crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*)) $crate::quote::IntoTt::to_subtree($crate::__quote!($span $($tt)*), $span)
} }
} }
pub(crate) trait IntoTt { pub(crate) trait IntoTt {
fn to_subtree(self) -> crate::tt::Subtree; fn to_subtree(self, span: SpanData) -> crate::tt::Subtree;
fn to_tokens(self) -> Vec<crate::tt::TokenTree>; fn to_tokens(self) -> Vec<crate::tt::TokenTree>;
} }
impl IntoTt for Vec<crate::tt::TokenTree> { impl IntoTt for Vec<crate::tt::TokenTree> {
fn to_subtree(self) -> crate::tt::Subtree { fn to_subtree(self, span: SpanData) -> crate::tt::Subtree {
crate::tt::Subtree { delimiter: crate::tt::Delimiter::unspecified(), token_trees: self } crate::tt::Subtree {
delimiter: crate::tt::Delimiter::invisible_spanned(span),
token_trees: self,
}
} }
fn to_tokens(self) -> Vec<crate::tt::TokenTree> { fn to_tokens(self) -> Vec<crate::tt::TokenTree> {
@ -143,7 +148,7 @@ impl IntoTt for Vec<crate::tt::TokenTree> {
} }
impl IntoTt for crate::tt::Subtree { impl IntoTt for crate::tt::Subtree {
fn to_subtree(self) -> crate::tt::Subtree { fn to_subtree(self, _: SpanData) -> crate::tt::Subtree {
self self
} }
@ -153,39 +158,39 @@ impl IntoTt for crate::tt::Subtree {
} }
pub(crate) trait ToTokenTree { pub(crate) trait ToTokenTree {
fn to_token(self) -> crate::tt::TokenTree; fn to_token(self, span: SpanData) -> crate::tt::TokenTree;
} }
impl ToTokenTree for crate::tt::TokenTree { impl ToTokenTree for crate::tt::TokenTree {
fn to_token(self) -> crate::tt::TokenTree { fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self self
} }
} }
impl ToTokenTree for &crate::tt::TokenTree { impl ToTokenTree for &crate::tt::TokenTree {
fn to_token(self) -> crate::tt::TokenTree { fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self.clone() self.clone()
} }
} }
impl ToTokenTree for crate::tt::Subtree { impl ToTokenTree for crate::tt::Subtree {
fn to_token(self) -> crate::tt::TokenTree { fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self.into() self.into()
} }
} }
macro_rules! impl_to_to_tokentrees { macro_rules! impl_to_to_tokentrees {
($($ty:ty => $this:ident $im:block);*) => { ($($span:ident: $ty:ty => $this:ident $im:block);*) => {
$( $(
impl ToTokenTree for $ty { impl ToTokenTree for $ty {
fn to_token($this) -> crate::tt::TokenTree { fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.into(); let leaf: crate::tt::Leaf = $im.into();
leaf.into() leaf.into()
} }
} }
impl ToTokenTree for &$ty { impl ToTokenTree for &$ty {
fn to_token($this) -> crate::tt::TokenTree { fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.clone().into(); let leaf: crate::tt::Leaf = $im.clone().into();
leaf.into() leaf.into()
} }
@ -195,60 +200,76 @@ macro_rules! impl_to_to_tokentrees {
} }
impl_to_to_tokentrees! { impl_to_to_tokentrees! {
u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; span: u32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; span: usize => self { crate::tt::Literal{text: self.to_string().into(), span} };
i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; span: i32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} }; span: bool => self { crate::tt::Ident{text: self.to_string().into(), span} };
crate::tt::Leaf => self { self }; _span: crate::tt::Leaf => self { self };
crate::tt::Literal => self { self }; _span: crate::tt::Literal => self { self };
crate::tt::Ident => self { self }; _span: crate::tt::Ident => self { self };
crate::tt::Punct => self { self }; _span: crate::tt::Punct => self { self };
&str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}; span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}};
String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}} span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use crate::tt;
use base_db::{
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use expect_test::expect;
use syntax::{TextRange, TextSize};
const DUMMY: tt::SpanData = tt::SpanData {
range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT,
};
#[test] #[test]
fn test_quote_delimiters() { fn test_quote_delimiters() {
assert_eq!(quote!({}).to_string(), "{}"); assert_eq!(quote!(DUMMY =>{}).to_string(), "{}");
assert_eq!(quote!(()).to_string(), "()"); assert_eq!(quote!(DUMMY =>()).to_string(), "()");
assert_eq!(quote!([]).to_string(), "[]"); assert_eq!(quote!(DUMMY =>[]).to_string(), "[]");
} }
#[test] #[test]
fn test_quote_idents() { fn test_quote_idents() {
assert_eq!(quote!(32).to_string(), "32"); assert_eq!(quote!(DUMMY =>32).to_string(), "32");
assert_eq!(quote!(struct).to_string(), "struct"); assert_eq!(quote!(DUMMY =>struct).to_string(), "struct");
} }
#[test] #[test]
fn test_quote_hash_simple_literal() { fn test_quote_hash_simple_literal() {
let a = 20; let a = 20;
assert_eq!(quote!(#a).to_string(), "20"); assert_eq!(quote!(DUMMY =>#a).to_string(), "20");
let s: String = "hello".into(); let s: String = "hello".into();
assert_eq!(quote!(#s).to_string(), "\"hello\""); assert_eq!(quote!(DUMMY =>#s).to_string(), "\"hello\"");
} }
fn mk_ident(name: &str) -> crate::tt::Ident { fn mk_ident(name: &str) -> crate::tt::Ident {
crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() } crate::tt::Ident { text: name.into(), span: DUMMY }
} }
#[test] #[test]
fn test_quote_hash_token_tree() { fn test_quote_hash_token_tree() {
let a = mk_ident("hello"); let a = mk_ident("hello");
let quoted = quote!(#a); let quoted = quote!(DUMMY =>#a);
assert_eq!(quoted.to_string(), "hello"); assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:?}"); let t = format!("{quoted:?}");
assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295"); expect![[r#"
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) }
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
} }
#[test] #[test]
fn test_quote_simple_derive_copy() { fn test_quote_simple_derive_copy() {
let name = mk_ident("Foo"); let name = mk_ident("Foo");
let quoted = quote! { let quoted = quote! {DUMMY =>
impl Clone for #name { impl Clone for #name {
fn clone(&self) -> Self { fn clone(&self) -> Self {
Self {} Self {}
@ -268,18 +289,19 @@ mod tests {
// } // }
let struct_name = mk_ident("Foo"); let struct_name = mk_ident("Foo");
let fields = [mk_ident("name"), mk_ident("id")]; let fields = [mk_ident("name"), mk_ident("id")];
let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees); let fields =
fields.iter().flat_map(|it| quote!(DUMMY =>#it: self.#it.clone(), ).token_trees);
let list = crate::tt::Subtree { let list = crate::tt::Subtree {
delimiter: crate::tt::Delimiter { delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::Brace, kind: crate::tt::DelimiterKind::Brace,
open: crate::tt::TokenId::unspecified(), open: DUMMY,
close: crate::tt::TokenId::unspecified(), close: DUMMY,
}, },
token_trees: fields.collect(), token_trees: fields.collect(),
}; };
let quoted = quote! { let quoted = quote! {DUMMY =>
impl Clone for #struct_name { impl Clone for #struct_name {
fn clone(&self) -> Self { fn clone(&self) -> Self {
Self #list Self #list

View File

@ -0,0 +1,111 @@
//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well
//! as associating spans with text ranges in a particular file.
use base_db::{
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize};
use triomphe::Arc;
use crate::db::ExpandDatabase;
pub type ExpansionSpanMap = mbe::SpanMap<SpanData>;
/// Spanmap for a macro file or a real file
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum SpanMap {
/// Spanmap for a macro file
ExpansionSpanMap(Arc<ExpansionSpanMap>),
/// Spanmap for a real file
RealSpanMap(Arc<RealSpanMap>),
}
#[derive(Copy, Clone)]
pub enum SpanMapRef<'a> {
/// Spanmap for a macro file
ExpansionSpanMap(&'a ExpansionSpanMap),
/// Spanmap for a real file
RealSpanMap(&'a RealSpanMap),
}
impl mbe::SpanMapper<SpanData> for SpanMap {
fn span_for(&self, range: TextRange) -> SpanData {
self.span_for_range(range)
}
}
impl mbe::SpanMapper<SpanData> for SpanMapRef<'_> {
fn span_for(&self, range: TextRange) -> SpanData {
self.span_for_range(range)
}
}
impl mbe::SpanMapper<SpanData> for RealSpanMap {
fn span_for(&self, range: TextRange) -> SpanData {
self.span_for_range(range)
}
}
impl SpanMap {
pub fn span_for_range(&self, range: TextRange) -> SpanData {
match self {
Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
Self::RealSpanMap(span_map) => span_map.span_for_range(range),
}
}
pub fn as_ref(&self) -> SpanMapRef<'_> {
match self {
Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map),
Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
}
}
}
impl SpanMapRef<'_> {
pub fn span_for_range(self, range: TextRange) -> SpanData {
match self {
Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
Self::RealSpanMap(span_map) => span_map.span_for_range(range),
}
}
}
#[derive(PartialEq, Eq, Hash, Debug)]
pub struct RealSpanMap {
file_id: FileId,
/// Invariant: Sorted vec over TextSize
// FIXME: SortedVec<(TextSize, ErasedFileAstId)>?
pairs: Box<[(TextSize, ErasedFileAstId)]>,
}
impl RealSpanMap {
/// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
pub fn absolute(file_id: FileId) -> Self {
RealSpanMap { file_id, pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]) }
}
pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self {
let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(file_id.into());
pairs.extend(
db.parse(file_id)
.tree()
.items()
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
);
RealSpanMap { file_id, pairs: pairs.into_boxed_slice() }
}
pub fn span_for_range(&self, range: TextRange) -> SpanData {
let start = range.start();
let idx = self
.pairs
.binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less))
.unwrap_err();
let (offset, ast_id) = self.pairs[idx - 1];
SpanData {
range: range - offset,
anchor: SpanAnchor { file_id: self.file_id, ast_id },
ctx: SyntaxContextId::ROOT,
}
}
}

View File

@ -24,7 +24,7 @@ use hir_def::{
}; };
use hir_expand::{ use hir_expand::{
name::{AsName, Name}, name::{AsName, Name},
HirFileId, HirFileId, HirFileIdExt,
}; };
use stdx::{always, never}; use stdx::{always, never};
use syntax::{ use syntax::{

View File

@ -23,7 +23,7 @@ use hir_def::{
EnumVariantId, HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId, EnumVariantId, HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId,
TraitId, TraitId,
}; };
use hir_expand::{hygiene::Hygiene, name::Name}; use hir_expand::name::Name;
use intern::{Internable, Interned}; use intern::{Internable, Interned};
use itertools::Itertools; use itertools::Itertools;
use la_arena::ArenaMap; use la_arena::ArenaMap;
@ -1732,13 +1732,13 @@ impl HirDisplay for TypeRef {
f.write_joined(bounds, " + ")?; f.write_joined(bounds, " + ")?;
} }
TypeRef::Macro(macro_call) => { TypeRef::Macro(macro_call) => {
let macro_call = macro_call.to_node(f.db.upcast()); let ctx = hir_def::lower::LowerCtx::with_span_map(
let ctx = hir_def::lower::LowerCtx::with_hygiene(
f.db.upcast(), f.db.upcast(),
&Hygiene::new_unhygienic(), f.db.span_map(macro_call.file_id),
); );
let macro_call = macro_call.to_node(f.db.upcast());
match macro_call.path() { match macro_call.path() {
Some(path) => match Path::from_src(path, &ctx) { Some(path) => match Path::from_src(&ctx, path) {
Some(path) => path.hir_fmt(f)?, Some(path) => path.hir_fmt(f)?,
None => write!(f, "{{macro}}")?, None => write!(f, "{{macro}}")?,
}, },

View File

@ -390,6 +390,7 @@ impl InferenceContext<'_> {
} }
} }
#[derive(Debug)]
enum ValuePathResolution { enum ValuePathResolution {
// It's awkward to wrap a single ID in two enums, but we need both and this saves fallible // It's awkward to wrap a single ID in two enums, but we need both and this saves fallible
// conversion between them + `unwrap()`. // conversion between them + `unwrap()`.

View File

@ -407,11 +407,7 @@ impl<'a> TyLoweringContext<'a> {
drop(expander); drop(expander);
let ty = self.lower_ty(&type_ref); let ty = self.lower_ty(&type_ref);
self.expander self.expander.borrow_mut().as_mut().unwrap().exit(mark);
.borrow_mut()
.as_mut()
.unwrap()
.exit(self.db.upcast(), mark);
Some(ty) Some(ty)
} }
_ => { _ => {

View File

@ -21,7 +21,7 @@ use hir_def::{
AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup, AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
StaticId, VariantId, StaticId, VariantId,
}; };
use hir_expand::{mod_path::ModPath, InFile}; use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile};
use intern::Interned; use intern::Interned;
use la_arena::ArenaMap; use la_arena::ArenaMap;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};

View File

@ -30,6 +30,7 @@ pub(crate) struct TestDB {
impl Default for TestDB { impl Default for TestDB {
fn default() -> Self { fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() }; let mut this = Self { storage: Default::default(), events: Default::default() };
this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH); this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this this
} }

View File

@ -1,5 +1,6 @@
//! Attributes & documentation for hir types. //! Attributes & documentation for hir types.
use base_db::FileId;
use hir_def::{ use hir_def::{
attr::AttrsWithOwner, attr::AttrsWithOwner,
item_scope::ItemInNs, item_scope::ItemInNs,
@ -8,7 +9,10 @@ use hir_def::{
resolver::{HasResolver, Resolver, TypeNs}, resolver::{HasResolver, Resolver, TypeNs},
AssocItemId, AttrDefId, ModuleDefId, AssocItemId, AttrDefId, ModuleDefId,
}; };
use hir_expand::{hygiene::Hygiene, name::Name}; use hir_expand::{
name::Name,
span::{RealSpanMap, SpanMapRef},
};
use hir_ty::db::HirDatabase; use hir_ty::db::HirDatabase;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
@ -234,7 +238,11 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
if ast_path.syntax().text() != link { if ast_path.syntax().text() != link {
return None; return None;
} }
ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic()) ModPath::from_src(
db.upcast(),
ast_path,
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId(0))),
)
}; };
let full = try_get_modpath(link); let full = try_get_modpath(link);

View File

@ -3,10 +3,27 @@
//! we didn't do that. //! we didn't do that.
//! //!
//! But we need this for at least LRU caching at the query level. //! But we need this for at least LRU caching at the query level.
pub use hir_def::db::*; pub use hir_def::db::{
AttrsQuery, BlockDefMapQuery, BlockItemTreeQueryQuery, BodyQuery, BodyWithSourceMapQuery,
ConstDataQuery, ConstVisibilityQuery, CrateDefMapQueryQuery, CrateLangItemsQuery,
CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery,
EnumDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery,
FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery,
FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataQuery,
ImplDataWithDiagnosticsQuery, ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery,
InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery,
InternExternBlockQuery, InternExternCrateQuery, InternFunctionQuery, InternImplQuery,
InternInTypeConstQuery, InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery,
InternStaticQuery, InternStructQuery, InternTraitAliasQuery, InternTraitQuery,
InternTypeAliasQuery, InternUnionQuery, InternUseQuery, LangAttrQuery, LangItemQuery,
Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery, StaticDataQuery, StructDataQuery,
StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataQuery,
TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataQuery,
UnionDataWithDiagnosticsQuery, VariantsAttrsQuery, VariantsAttrsSourceMapQuery,
};
pub use hir_expand::db::{ pub use hir_expand::db::{
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage, AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery, ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery,
}; };
pub use hir_ty::db::*; pub use hir_ty::db::*;

View File

@ -124,7 +124,7 @@ pub use {
hir_expand::{ hir_expand::{
attrs::{Attr, AttrId}, attrs::{Attr, AttrId},
name::{known, Name}, name::{known, Name},
ExpandResult, HirFileId, InFile, MacroFile, Origin, tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId,
}, },
hir_ty::{ hir_ty::{
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite}, display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
@ -140,7 +140,10 @@ pub use {
#[allow(unused)] #[allow(unused)]
use { use {
hir_def::path::Path, hir_def::path::Path,
hir_expand::{hygiene::Hygiene, name::AsName}, hir_expand::{
name::AsName,
span::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
},
}; };
/// hir::Crate describes a single crate. It's the main interface with which /// hir::Crate describes a single crate. It's the main interface with which
@ -3490,9 +3493,34 @@ impl Impl {
self.id.lookup(db.upcast()).container.into() self.id.lookup(db.upcast()).container.into()
} }
pub fn as_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> { pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option<InMacroFile<ast::Path>> {
let src = self.source(db)?; let src = self.source(db)?;
src.file_id.as_builtin_derive_attr_node(db.upcast())
let macro_file = src.file_id.macro_file()?;
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let (derive_attr, derive_index) = match loc.kind {
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
let module_id = self.id.lookup(db.upcast()).container;
(
db.crate_def_map(module_id.krate())[module_id.local_id]
.scope
.derive_macro_invoc(ast_id, derive_attr_index)?,
derive_index,
)
}
_ => return None,
};
let file_id = MacroFileId { macro_call_id: derive_attr };
let path = db
.parse_macro_expansion(file_id)
.value
.0
.syntax_node()
.children()
.nth(derive_index as usize)
.and_then(<ast::Attr as AstNode>::cast)
.and_then(|it| it.path())?;
Some(InMacroFile { file_id, value: path })
} }
pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool { pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {

View File

@ -15,10 +15,14 @@ use hir_def::{
type_ref::Mutability, type_ref::Mutability,
AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId, AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
}; };
use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId}; use hir_expand::{
db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, HirFileIdExt, MacroCallId,
MacroFileId, MacroFileIdExt,
};
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use stdx::TupleExt;
use syntax::{ use syntax::{
algo::skip_trivia_token, algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody}, ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
@ -114,11 +118,11 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> { pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase, pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>, s2d_cache: RefCell<SourceToDefCache>,
expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>, expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
// Rootnode to HirFileId cache /// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// MacroCall to its expansion's HirFileId cache /// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>, macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
} }
impl<DB> fmt::Debug for Semantics<'_, DB> { impl<DB> fmt::Debug for Semantics<'_, DB> {
@ -255,7 +259,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?; let sa = self.analyze_no_infer(macro_call.syntax())?;
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?; let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
let node = self.parse_or_expand(file_id); let node = self.parse_or_expand(file_id.into());
Some(node) Some(node)
} }
@ -524,52 +528,54 @@ impl<'db> SemanticsImpl<'db> {
res res
} }
// FIXME: should only take real file inputs for simplicity
fn descend_into_macros_impl( fn descend_into_macros_impl(
&self, &self,
token: SyntaxToken, token: SyntaxToken,
// FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange // FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
// mapping, specifically for node downmapping // mapping, specifically for node downmapping
offset: TextSize, _offset: TextSize,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool, f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
) { ) {
// FIXME: Clean this up
let _p = profile::span("descend_into_macros"); let _p = profile::span("descend_into_macros");
let relative_token_offset = token.text_range().start().checked_sub(offset); let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
let parent = match token.parent() {
Some(it) => it, Some(it) => it,
None => return, None => return,
}; };
let sa = match self.analyze_no_infer(&parent) {
Some(it) => it,
None => return,
};
let def_map = sa.resolver.def_map();
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
let mut cache = self.expansion_info_cache.borrow_mut(); let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut(); let mut mcache = self.macro_call_cache.borrow_mut();
let span = match sa.file_id.repr() {
let mut process_expansion_for_token = base_db::span::HirFileIdRepr::FileId(file_id) => {
|stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| { self.db.real_span_map(file_id).span_for_range(token.text_range())
let expansion_info = cache }
base_db::span::HirFileIdRepr::MacroFile(macro_file) => cache
.entry(macro_file) .entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(self.db.upcast())) .or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
.as_ref()?; .exp_map
.span_at(token.text_range().start()),
};
let def_map = sa.resolver.def_map();
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
let mut process_expansion_for_token =
|stack: &mut SmallVec<_>, macro_file, _token: InFile<&_>| {
let expansion_info = cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
{ {
let InFile { file_id, value } = expansion_info.expanded(); let InFile { file_id, value } = expansion_info.expanded();
self.cache(value, file_id); self.cache(value, file_id);
} }
let mapped_tokens = expansion_info.map_token_down( let mapped_tokens = expansion_info.map_range_down(span, None)?;
self.db.upcast(),
item,
token,
relative_token_offset,
)?;
let len = stack.len(); let len = stack.len();
// requeue the tokens we got from mapping our current token down // requeue the tokens we got from mapping our current token down
stack.extend(mapped_tokens); stack.extend(mapped_tokens.map(Into::into));
// if the length changed we have found a mapping for the token // if the length changed we have found a mapping for the token
(stack.len() != len).then_some(()) (stack.len() != len).then_some(())
}; };
@ -578,26 +584,21 @@ impl<'db> SemanticsImpl<'db> {
// either due to not being in a macro-call or because its unused push it into the result vec, // either due to not being in a macro-call or because its unused push it into the result vec,
// otherwise push the remapped tokens back into the queue as they can potentially be remapped again. // otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
while let Some(token) = stack.pop() { while let Some(token) = stack.pop() {
self.db.unwind_if_cancelled();
let was_not_remapped = (|| { let was_not_remapped = (|| {
// First expand into attribute invocations // First expand into attribute invocations
let containing_attribute_macro_call = self.with_ctx(|ctx| { let containing_attribute_macro_call = self.with_ctx(|ctx| {
token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
if item.attrs().next().is_none() { if item.attrs().next().is_none() {
// Don't force populate the dyn cache for items that don't have an attribute anyways // Don't force populate the dyn cache for items that don't have an attribute anyways
return None; return None;
} }
Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item)) Some(ctx.item_to_macro_call(token.with_value(item.clone()))?)
}) })
}); });
if let Some((call_id, item)) = containing_attribute_macro_call { if let Some(call_id) = containing_attribute_macro_call {
let file_id = call_id.as_file(); let file_id = call_id.as_macro_file();
return process_expansion_for_token( return process_expansion_for_token(&mut stack, file_id, token.as_ref());
&mut stack,
file_id,
Some(item),
token.as_ref(),
);
} }
// Then check for token trees, that means we are either in a function-like macro or // Then check for token trees, that means we are either in a function-like macro or
@ -613,7 +614,8 @@ impl<'db> SemanticsImpl<'db> {
} }
if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) { if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
let mcall = token.with_value(macro_call); let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> =
token.with_value(macro_call);
let file_id = match mcache.get(&mcall) { let file_id = match mcache.get(&mcall) {
Some(&it) => it, Some(&it) => it,
None => { None => {
@ -622,7 +624,7 @@ impl<'db> SemanticsImpl<'db> {
it it
} }
}; };
process_expansion_for_token(&mut stack, file_id, None, token.as_ref()) process_expansion_for_token(&mut stack, file_id, token.as_ref())
} else if let Some(meta) = ast::Meta::cast(parent) { } else if let Some(meta) = ast::Meta::cast(parent) {
// attribute we failed expansion for earlier, this might be a derive invocation // attribute we failed expansion for earlier, this might be a derive invocation
// or derive helper attribute // or derive helper attribute
@ -643,11 +645,10 @@ impl<'db> SemanticsImpl<'db> {
match derive_call { match derive_call {
Some(call_id) => { Some(call_id) => {
// resolved to a derive // resolved to a derive
let file_id = call_id.as_file(); let file_id = call_id.as_macro_file();
return process_expansion_for_token( return process_expansion_for_token(
&mut stack, &mut stack,
file_id, file_id,
Some(adt.into()),
token.as_ref(), token.as_ref(),
); );
} }
@ -679,13 +680,11 @@ impl<'db> SemanticsImpl<'db> {
let id = self.db.ast_id_map(token.file_id).ast_id(&adt); let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
let helpers = let helpers =
def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?; def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
let item = Some(adt.into());
let mut res = None; let mut res = None;
for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
res = res.or(process_expansion_for_token( res = res.or(process_expansion_for_token(
&mut stack, &mut stack,
derive.as_file(), derive.as_macro_file(),
item.clone(),
token.as_ref(), token.as_ref(),
)); ));
} }
@ -737,14 +736,16 @@ impl<'db> SemanticsImpl<'db> {
pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> { pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
let node = self.find_file(node); let node = self.find_file(node);
node.original_file_range_opt(self.db.upcast()) node.original_file_range_opt(self.db.upcast())
.filter(|(_, ctx)| ctx.is_root())
.map(TupleExt::head)
} }
/// Attempts to map the node out of macro expanded files. /// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input. /// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> { pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map( self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
|InFile { file_id, value }| { |InRealFile { file_id, value }| {
self.cache(find_root(value.syntax()), file_id); self.cache(find_root(value.syntax()), file_id.into());
value value
}, },
) )
@ -755,8 +756,8 @@ impl<'db> SemanticsImpl<'db> {
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> { pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
let InFile { file_id, .. } = self.find_file(node); let InFile { file_id, .. } = self.find_file(node);
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map( InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map(
|InFile { file_id, value }| { |InRealFile { file_id, value }| {
self.cache(find_root(&value), file_id); self.cache(find_root(&value), file_id.into());
value value
}, },
) )
@ -851,9 +852,9 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> { pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
let analyze = self.analyze(path.syntax())?; let analyze = self.analyze(path.syntax())?;
let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id); let span_map = self.db.span_map(analyze.file_id);
let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene); let ctx = LowerCtx::with_span_map(self.db.upcast(), span_map);
let hir_path = Path::from_src(path.clone(), &ctx)?; let hir_path = Path::from_src(&ctx, path.clone())?;
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? { match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
TypeNs::TraitId(id) => Some(Trait { id }), TypeNs::TraitId(id) => Some(Trait { id }),
_ => None, _ => None,
@ -1037,7 +1038,7 @@ impl<'db> SemanticsImpl<'db> {
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T { fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
let mut cache = self.s2d_cache.borrow_mut(); let mut cache = self.s2d_cache.borrow_mut();
let mut ctx = SourceToDefCtx { db: self.db, cache: &mut cache }; let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache: &mut cache };
f(&mut ctx) f(&mut ctx)
} }
@ -1451,7 +1452,7 @@ impl SemanticsScope<'_> {
/// necessary a heuristic, as it doesn't take hygiene into account. /// necessary a heuristic, as it doesn't take hygiene into account.
pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> { pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
let ctx = LowerCtx::with_file_id(self.db.upcast(), self.file_id); let ctx = LowerCtx::with_file_id(self.db.upcast(), self.file_id);
let path = Path::from_src(path.clone(), &ctx)?; let path = Path::from_src(&ctx, path.clone())?;
resolve_hir_path(self.db, &self.resolver, &path) resolve_hir_path(self.db, &self.resolver, &path)
} }

View File

@ -97,7 +97,7 @@ use hir_def::{
FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId, FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId,
StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId, StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
}; };
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId}; use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId};
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use stdx::{impl_from, never}; use stdx::{impl_from, never};
@ -112,7 +112,7 @@ pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap
pub(super) struct SourceToDefCtx<'a, 'b> { pub(super) struct SourceToDefCtx<'a, 'b> {
pub(super) db: &'b dyn HirDatabase, pub(super) db: &'b dyn HirDatabase,
pub(super) cache: &'a mut SourceToDefCache, pub(super) dynmap_cache: &'a mut SourceToDefCache,
} }
impl SourceToDefCtx<'_, '_> { impl SourceToDefCtx<'_, '_> {
@ -300,7 +300,7 @@ impl SourceToDefCtx<'_, '_> {
fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap { fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap {
let db = self.db; let db = self.db;
self.cache self.dynmap_cache
.entry((container, file_id)) .entry((container, file_id))
.or_insert_with(|| container.child_by_source(db, file_id)) .or_insert_with(|| container.child_by_source(db, file_id))
} }

View File

@ -26,11 +26,10 @@ use hir_def::{
}; };
use hir_expand::{ use hir_expand::{
builtin_fn_macro::BuiltinFnLikeExpander, builtin_fn_macro::BuiltinFnLikeExpander,
hygiene::Hygiene,
mod_path::path, mod_path::path,
name, name,
name::{AsName, Name}, name::{AsName, Name},
HirFileId, InFile, HirFileId, HirFileIdExt, InFile, MacroFileId, MacroFileIdExt,
}; };
use hir_ty::{ use hir_ty::{
diagnostics::{ diagnostics::{
@ -484,7 +483,7 @@ impl SourceAnalyzer {
macro_call: InFile<&ast::MacroCall>, macro_call: InFile<&ast::MacroCall>,
) -> Option<Macro> { ) -> Option<Macro> {
let ctx = LowerCtx::with_file_id(db.upcast(), macro_call.file_id); let ctx = LowerCtx::with_file_id(db.upcast(), macro_call.file_id);
let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?; let path = macro_call.value.path().and_then(|ast| Path::from_src(&ctx, ast))?;
self.resolver self.resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang)) .resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang))
.map(|(it, _)| it.into()) .map(|(it, _)| it.into())
@ -596,9 +595,8 @@ impl SourceAnalyzer {
} }
// This must be a normal source file rather than macro file. // This must be a normal source file rather than macro file.
let hygiene = Hygiene::new(db.upcast(), self.file_id); let ctx = LowerCtx::with_span_map(db.upcast(), db.span_map(self.file_id));
let ctx = LowerCtx::with_hygiene(db.upcast(), &hygiene); let hir_path = Path::from_src(&ctx, path.clone())?;
let hir_path = Path::from_src(path.clone(), &ctx)?;
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
// trying to resolve foo::bar. // trying to resolve foo::bar.
@ -755,14 +753,15 @@ impl SourceAnalyzer {
&self, &self,
db: &dyn HirDatabase, db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>, macro_call: InFile<&ast::MacroCall>,
) -> Option<HirFileId> { ) -> Option<MacroFileId> {
let krate = self.resolver.krate(); let krate = self.resolver.krate();
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| { let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
self.resolver self.resolver
.resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang)) .resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang))
.map(|(it, _)| macro_id_to_def_id(db.upcast(), it)) .map(|(it, _)| macro_id_to_def_id(db.upcast(), it))
})?; })?;
Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64) // why the 64?
Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
} }
pub(crate) fn resolve_variant( pub(crate) fn resolve_variant(

View File

@ -50,13 +50,8 @@ impl DeclarationLocation {
node.as_ref().original_file_range(db.upcast()) node.as_ref().original_file_range(db.upcast())
} }
pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> { pub fn original_name_range(&self, db: &dyn HirDatabase) -> FileRange {
if let Some(file_id) = self.hir_file_id.file_id() { InFile::new(self.hir_file_id, self.name_ptr.text_range()).original_file_range(db.upcast())
// fast path to prevent parsing
return Some(FileRange { file_id, range: self.name_ptr.text_range() });
}
let node = resolve_node(db, self.hir_file_id, &self.name_ptr);
node.as_ref().original_file_range_opt(db.upcast())
} }
} }

View File

@ -3,7 +3,7 @@ use std::{
iter, iter,
}; };
use hir::{HasSource, ModuleSource}; use hir::{HasSource, HirFileIdExt, ModuleSource};
use ide_db::{ use ide_db::{
assists::{AssistId, AssistKind}, assists::{AssistId, AssistKind},
base_db::FileId, base_db::FileId,

View File

@ -1,4 +1,6 @@
use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef}; use hir::{
db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef,
};
use ide_db::base_db::FileId; use ide_db::base_db::FileId;
use syntax::{ use syntax::{
ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _}, ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},

View File

@ -1,5 +1,5 @@
use crate::assist_context::{AssistContext, Assists}; use crate::assist_context::{AssistContext, Assists};
use hir::{HasVisibility, HirDisplay, Module}; use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module};
use ide_db::{ use ide_db::{
assists::{AssistId, AssistKind}, assists::{AssistId, AssistKind},
base_db::{FileId, Upcast}, base_db::{FileId, Upcast},

View File

@ -1,4 +1,4 @@
use hir::{HasSource, HirDisplay, InFile}; use hir::{HasSource, HirDisplay, InRealFile};
use ide_db::assists::{AssistId, AssistKind}; use ide_db::assists::{AssistId, AssistKind};
use syntax::{ use syntax::{
ast::{self, make, HasArgList}, ast::{self, make, HasArgList},
@ -114,14 +114,14 @@ fn add_variant_to_accumulator(
parent: PathParent, parent: PathParent,
) -> Option<()> { ) -> Option<()> {
let db = ctx.db(); let db = ctx.db();
let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?; let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
acc.add( acc.add(
AssistId("generate_enum_variant", AssistKind::Generate), AssistId("generate_enum_variant", AssistKind::Generate),
"Generate variant", "Generate variant",
target, target,
|builder| { |builder| {
builder.edit_file(file_id.original_file(db)); builder.edit_file(file_id);
let node = builder.make_mut(enum_node); let node = builder.make_mut(enum_node);
let variant = make_variant(ctx, name_ref, parent); let variant = make_variant(ctx, name_ref, parent);
node.variant_list().map(|it| it.add_variant(variant.clone_for_update())); node.variant_list().map(|it| it.add_variant(variant.clone_for_update()));

View File

@ -1,5 +1,6 @@
use hir::{ use hir::{
Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, Type, TypeInfo, Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics, Type,
TypeInfo,
}; };
use ide_db::{ use ide_db::{
base_db::FileId, base_db::FileId,
@ -510,7 +511,7 @@ fn assoc_fn_target_info(
} }
fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize { fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
match &target { match target {
GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(), GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'), GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'),
} }

View File

@ -1,6 +1,6 @@
use std::collections::{hash_map::Entry, HashMap}; use std::collections::{hash_map::Entry, HashMap};
use hir::{InFile, Module, ModuleSource}; use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
use ide_db::{ use ide_db::{
base_db::FileRange, base_db::FileRange,
defs::Definition, defs::Definition,
@ -167,7 +167,7 @@ fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec<Sea
fn module_search_scope(db: &RootDatabase, module: hir::Module) -> Vec<SearchScope> { fn module_search_scope(db: &RootDatabase, module: hir::Module) -> Vec<SearchScope> {
let (file_id, range) = { let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db); let InFile { file_id, value } = module.definition_source(db);
if let Some((file_id, call_source)) = file_id.original_call_node(db) { if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db) {
(file_id, Some(call_source.text_range())) (file_id, Some(call_source.text_range()))
} else { } else {
( (

View File

@ -1,4 +1,4 @@
use hir::{InFile, ModuleDef}; use hir::{HirFileIdExt, InFile, ModuleDef};
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator}; use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
use itertools::Itertools; use itertools::Itertools;
use syntax::{ use syntax::{

View File

@ -2,7 +2,7 @@
use std::iter; use std::iter;
use hir::{Module, ModuleSource}; use hir::{HirFileIdExt, Module, ModuleSource};
use ide_db::{ use ide_db::{
base_db::{SourceDatabaseExt, VfsPath}, base_db::{SourceDatabaseExt, VfsPath},
FxHashSet, RootDatabase, SymbolKind, FxHashSet, RootDatabase, SymbolKind,

View File

@ -97,13 +97,13 @@ impl RootDatabase {
// ExpandDatabase // ExpandDatabase
hir::db::AstIdMapQuery hir::db::AstIdMapQuery
hir::db::ParseMacroExpansionQuery
hir::db::InternMacroCallQuery
hir::db::MacroArgNodeQuery
hir::db::DeclMacroExpanderQuery hir::db::DeclMacroExpanderQuery
hir::db::MacroExpandQuery
hir::db::ExpandProcMacroQuery hir::db::ExpandProcMacroQuery
hir::db::HygieneFrameQuery hir::db::InternMacroCallQuery
hir::db::InternSyntaxContextQuery
hir::db::MacroArgQuery
hir::db::ParseMacroExpansionQuery
hir::db::RealSpanMapQuery
// DefDatabase // DefDatabase
hir::db::FileItemTreeQuery hir::db::FileItemTreeQuery
@ -143,6 +143,13 @@ impl RootDatabase {
hir::db::FunctionVisibilityQuery hir::db::FunctionVisibilityQuery
hir::db::ConstVisibilityQuery hir::db::ConstVisibilityQuery
hir::db::CrateSupportsNoStdQuery hir::db::CrateSupportsNoStdQuery
hir::db::BlockItemTreeQueryQuery
hir::db::ExternCrateDeclDataQuery
hir::db::LangAttrQuery
hir::db::InternAnonymousConstQuery
hir::db::InternExternCrateQuery
hir::db::InternInTypeConstQuery
hir::db::InternUseQuery
// HirDatabase // HirDatabase
hir::db::InferQueryQuery hir::db::InferQueryQuery

View File

@ -144,6 +144,7 @@ impl RootDatabase {
db.set_library_roots_with_durability(Default::default(), Durability::HIGH); db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH); db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH);
db.update_parse_query_lru_capacity(lru_capacity); db.update_parse_query_lru_capacity(lru_capacity);
db.setup_syntax_context_root();
db db
} }
@ -156,7 +157,6 @@ impl RootDatabase {
base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity); base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
// macro expansions are usually rather small, so we can afford to keep more of them alive // macro expansions are usually rather small, so we can afford to keep more of them alive
hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity); hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
} }
pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) { pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) {
@ -174,12 +174,6 @@ impl RootDatabase {
.copied() .copied()
.unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP), .unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
); );
hir_db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(
lru_capacities
.get(stringify!(MacroExpandQuery))
.copied()
.unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
);
macro_rules! update_lru_capacity_per_query { macro_rules! update_lru_capacity_per_query {
($( $module:ident :: $query:ident )*) => {$( ($( $module:ident :: $query:ident )*) => {$(
@ -204,11 +198,10 @@ impl RootDatabase {
hir_db::AstIdMapQuery hir_db::AstIdMapQuery
// hir_db::ParseMacroExpansionQuery // hir_db::ParseMacroExpansionQuery
// hir_db::InternMacroCallQuery // hir_db::InternMacroCallQuery
hir_db::MacroArgNodeQuery hir_db::MacroArgQuery
hir_db::DeclMacroExpanderQuery hir_db::DeclMacroExpanderQuery
// hir_db::MacroExpandQuery // hir_db::MacroExpandQuery
hir_db::ExpandProcMacroQuery hir_db::ExpandProcMacroQuery
hir_db::HygieneFrameQuery
hir_db::ParseMacroExpansionErrorQuery hir_db::ParseMacroExpansionErrorQuery
// DefDatabase // DefDatabase

View File

@ -22,10 +22,10 @@
//! Our current behavior is ¯\_(ツ)_/¯. //! Our current behavior is ¯\_(ツ)_/¯.
use std::fmt; use std::fmt;
use base_db::{AnchoredPathBuf, FileId, FileRange}; use base_db::{span::SyntaxContextId, AnchoredPathBuf, FileId, FileRange};
use either::Either; use either::Either;
use hir::{FieldSource, HasSource, InFile, ModuleSource, Semantics}; use hir::{FieldSource, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics};
use stdx::never; use stdx::{never, TupleExt};
use syntax::{ use syntax::{
ast::{self, HasName}, ast::{self, HasName},
AstNode, SyntaxKind, TextRange, T, AstNode, SyntaxKind, TextRange, T,
@ -103,6 +103,7 @@ impl Definition {
/// renamed and extern crate names will report its range, though a rename will introduce /// renamed and extern crate names will report its range, though a rename will introduce
/// an alias instead. /// an alias instead.
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> { pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
let syn_ctx_is_root = |(range, ctx): (_, SyntaxContextId)| ctx.is_root().then(|| range);
let res = match self { let res = match self {
Definition::Macro(mac) => { Definition::Macro(mac) => {
let src = mac.source(sema.db)?; let src = mac.source(sema.db)?;
@ -110,14 +111,18 @@ impl Definition {
Either::Left(it) => it.name()?, Either::Left(it) => it.name()?,
Either::Right(it) => it.name()?, Either::Right(it) => it.name()?,
}; };
src.with_value(name.syntax()).original_file_range_opt(sema.db) src.with_value(name.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
} }
Definition::Field(field) => { Definition::Field(field) => {
let src = field.source(sema.db)?; let src = field.source(sema.db)?;
match &src.value { match &src.value {
FieldSource::Named(record_field) => { FieldSource::Named(record_field) => {
let name = record_field.name()?; let name = record_field.name()?;
src.with_value(name.syntax()).original_file_range_opt(sema.db) src.with_value(name.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
} }
FieldSource::Pos(_) => None, FieldSource::Pos(_) => None,
} }
@ -125,25 +130,31 @@ impl Definition {
Definition::Module(module) => { Definition::Module(module) => {
let src = module.declaration_source(sema.db)?; let src = module.declaration_source(sema.db)?;
let name = src.value.name()?; let name = src.value.name()?;
src.with_value(name.syntax()).original_file_range_opt(sema.db) src.with_value(name.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
} }
Definition::Function(it) => name_range(it, sema), Definition::Function(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::Adt(adt) => match adt { Definition::Adt(adt) => match adt {
hir::Adt::Struct(it) => name_range(it, sema), hir::Adt::Struct(it) => name_range(it, sema).and_then(syn_ctx_is_root),
hir::Adt::Union(it) => name_range(it, sema), hir::Adt::Union(it) => name_range(it, sema).and_then(syn_ctx_is_root),
hir::Adt::Enum(it) => name_range(it, sema), hir::Adt::Enum(it) => name_range(it, sema).and_then(syn_ctx_is_root),
}, },
Definition::Variant(it) => name_range(it, sema), Definition::Variant(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::Const(it) => name_range(it, sema), Definition::Const(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::Static(it) => name_range(it, sema), Definition::Static(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::Trait(it) => name_range(it, sema), Definition::Trait(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::TraitAlias(it) => name_range(it, sema), Definition::TraitAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::TypeAlias(it) => name_range(it, sema), Definition::TypeAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::Local(it) => name_range(it.primary_source(sema.db), sema), Definition::Local(it) => {
name_range(it.primary_source(sema.db), sema).and_then(syn_ctx_is_root)
}
Definition::GenericParam(generic_param) => match generic_param { Definition::GenericParam(generic_param) => match generic_param {
hir::GenericParam::LifetimeParam(lifetime_param) => { hir::GenericParam::LifetimeParam(lifetime_param) => {
let src = lifetime_param.source(sema.db)?; let src = lifetime_param.source(sema.db)?;
src.with_value(src.value.lifetime()?.syntax()).original_file_range_opt(sema.db) src.with_value(src.value.lifetime()?.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
} }
_ => { _ => {
let x = match generic_param { let x = match generic_param {
@ -156,22 +167,30 @@ impl Definition {
Either::Left(x) => x.name()?, Either::Left(x) => x.name()?,
Either::Right(_) => return None, Either::Right(_) => return None,
}; };
src.with_value(name.syntax()).original_file_range_opt(sema.db) src.with_value(name.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
} }
}, },
Definition::Label(label) => { Definition::Label(label) => {
let src = label.source(sema.db); let src = label.source(sema.db);
let lifetime = src.value.lifetime()?; let lifetime = src.value.lifetime()?;
src.with_value(lifetime.syntax()).original_file_range_opt(sema.db) src.with_value(lifetime.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
} }
Definition::ExternCrateDecl(it) => { Definition::ExternCrateDecl(it) => {
let src = it.source(sema.db)?; let src = it.source(sema.db)?;
if let Some(rename) = src.value.rename() { if let Some(rename) = src.value.rename() {
let name = rename.name()?; let name = rename.name()?;
src.with_value(name.syntax()).original_file_range_opt(sema.db) src.with_value(name.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
} else { } else {
let name = src.value.name_ref()?; let name = src.value.name_ref()?;
src.with_value(name.syntax()).original_file_range_opt(sema.db) src.with_value(name.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
} }
} }
Definition::BuiltinType(_) => return None, Definition::BuiltinType(_) => return None,
@ -183,7 +202,10 @@ impl Definition {
}; };
return res; return res;
fn name_range<D>(def: D, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> fn name_range<D>(
def: D,
sema: &Semantics<'_, RootDatabase>,
) -> Option<(FileRange, SyntaxContextId)>
where where
D: HasSource, D: HasSource,
D::Ast: ast::HasName, D::Ast: ast::HasName,
@ -256,8 +278,10 @@ fn rename_mod(
let file_id = src.file_id.original_file(sema.db); let file_id = src.file_id.original_file(sema.db);
match src.value.name() { match src.value.name() {
Some(name) => { Some(name) => {
if let Some(file_range) = if let Some(file_range) = src
src.with_value(name.syntax()).original_file_range_opt(sema.db) .with_value(name.syntax())
.original_file_range_opt(sema.db)
.map(TupleExt::head)
{ {
source_change.insert_source_edit( source_change.insert_source_edit(
file_id, file_id,
@ -493,7 +517,12 @@ fn source_edit_from_def(
for source in local.sources(sema.db) { for source in local.sources(sema.db) {
let source = match source.source.clone().original_ast_node(sema.db) { let source = match source.source.clone().original_ast_node(sema.db) {
Some(source) => source, Some(source) => source,
None => match source.source.syntax().original_file_range_opt(sema.db) { None => match source
.source
.syntax()
.original_file_range_opt(sema.db)
.map(TupleExt::head)
{
Some(FileRange { file_id: file_id2, range }) => { Some(FileRange { file_id: file_id2, range }) => {
file_id = Some(file_id2); file_id = Some(file_id2);
edit.replace(range, new_name.to_owned()); edit.replace(range, new_name.to_owned());
@ -504,7 +533,7 @@ fn source_edit_from_def(
} }
}, },
}; };
file_id = source.file_id.file_id(); file_id = Some(source.file_id);
if let Either::Left(pat) = source.value { if let Either::Left(pat) = source.value {
let name_range = pat.name().unwrap().syntax().text_range(); let name_range = pat.name().unwrap().syntax().text_range();
// special cases required for renaming fields/locals in Record patterns // special cases required for renaming fields/locals in Record patterns

View File

@ -8,7 +8,8 @@ use std::mem;
use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt}; use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{ use hir::{
AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility, AsAssocItem, DefWithBody, HasAttrs, HasSource, HirFileIdExt, InFile, InRealFile, ModuleSource,
Semantics, Visibility,
}; };
use memchr::memmem::Finder; use memchr::memmem::Finder;
use nohash_hasher::IntMap; use nohash_hasher::IntMap;
@ -132,7 +133,8 @@ impl SearchScope {
let (file_id, range) = { let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db); let InFile { file_id, value } = module.definition_source(db);
if let Some((file_id, call_source)) = file_id.original_call_node(db) { if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db)
{
(file_id, Some(call_source.text_range())) (file_id, Some(call_source.text_range()))
} else { } else {
( (

View File

@ -21,10 +21,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 83..119, range: 83..119,
@ -50,10 +48,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 0..81, range: 0..81,
@ -79,10 +75,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 0..81, range: 0..81,
@ -108,10 +102,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 0..81, range: 0..81,
@ -137,10 +129,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 0..81, range: 0..81,
@ -166,10 +156,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 83..119, range: 83..119,
@ -195,10 +183,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 0..81, range: 0..81,

View File

@ -19,10 +19,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: TYPE_ALIAS, kind: TYPE_ALIAS,
range: 397..417, range: 397..417,
@ -46,10 +44,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: CONST, kind: CONST,
range: 340..361, range: 340..361,
@ -73,10 +69,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: CONST, kind: CONST,
range: 520..592, range: 520..592,
@ -102,10 +96,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: ENUM, kind: ENUM,
range: 185..207, range: 185..207,
@ -131,10 +123,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: USE_TREE, kind: USE_TREE,
range: 654..676, range: 654..676,
@ -160,10 +150,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: MACRO_DEF, kind: MACRO_DEF,
range: 153..168, range: 153..168,
@ -187,10 +175,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STATIC, kind: STATIC,
range: 362..396, range: 362..396,
@ -216,10 +202,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 170..184, range: 170..184,
@ -245,12 +229,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: MacroFile( hir_file_id: MacroFile(
MacroFile {
macro_call_id: MacroCallId(
0, 0,
), ),
},
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 0..22, range: 0..22,
@ -276,10 +256,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 318..336, range: 318..336,
@ -307,10 +285,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 555..581, range: 555..581,
@ -338,10 +314,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 479..507, range: 479..507,
@ -365,10 +339,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: TRAIT, kind: TRAIT,
range: 261..300, range: 261..300,
@ -394,10 +366,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: USE_TREE, kind: USE_TREE,
range: 682..696, range: 682..696,
@ -423,10 +393,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: UNION, kind: UNION,
range: 208..222, range: 208..222,
@ -452,10 +420,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: MODULE, kind: MODULE,
range: 419..457, range: 419..457,
@ -481,10 +447,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: MODULE, kind: MODULE,
range: 594..604, range: 594..604,
@ -510,10 +474,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: MACRO_RULES, kind: MACRO_RULES,
range: 51..131, range: 51..131,
@ -537,10 +499,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: FN, kind: FN,
range: 242..257, range: 242..257,
@ -566,10 +526,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: MACRO_RULES, kind: MACRO_RULES,
range: 1..48, range: 1..48,
@ -593,10 +551,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: FN, kind: FN,
range: 302..338, range: 302..338,
@ -622,10 +578,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: USE_TREE, kind: USE_TREE,
range: 611..648, range: 611..648,
@ -649,10 +603,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: FN, kind: FN,
range: 279..298, range: 279..298,
@ -691,10 +643,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
0, 0,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 435..455, range: 435..455,
@ -731,10 +681,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
1, 1,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: USE_TREE, kind: USE_TREE,
range: 111..143, range: 111..143,
@ -760,10 +708,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
1, 1,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: STRUCT, kind: STRUCT,
range: 0..20, range: 0..20,
@ -789,10 +735,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
1, 1,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: USE_TREE, kind: USE_TREE,
range: 25..59, range: 25..59,
@ -818,10 +762,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
1, 1,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: USE_TREE, kind: USE_TREE,
range: 65..105, range: 65..105,
@ -847,10 +789,8 @@
), ),
loc: DeclarationLocation { loc: DeclarationLocation {
hir_file_id: FileId( hir_file_id: FileId(
FileId(
1, 1,
), ),
),
ptr: SyntaxNodePtr { ptr: SyntaxNodePtr {
kind: USE_TREE, kind: USE_TREE,
range: 65..105, range: 65..105,

View File

@ -60,9 +60,6 @@ macro_rules! compile_error { () => {} }
#[test] #[test]
fn eager_macro_concat() { fn eager_macro_concat() {
// FIXME: this is incorrectly handling `$crate`, resulting in a wrong diagnostic.
// See: https://github.com/rust-lang/rust-analyzer/issues/10300
check_diagnostics( check_diagnostics(
r#" r#"
//- /lib.rs crate:lib deps:core //- /lib.rs crate:lib deps:core
@ -80,7 +77,6 @@ macro_rules! m {
fn f() { fn f() {
m!(); m!();
//^^^^ error: unresolved macro $crate::private::concat
} }
//- /core.rs crate:core //- /core.rs crate:core

View File

@ -1,7 +1,7 @@
use either::Either; use either::Either;
use hir::{ use hir::{
db::{ExpandDatabase, HirDatabase}, db::{ExpandDatabase, HirDatabase},
known, AssocItem, HirDisplay, InFile, Type, known, AssocItem, HirDisplay, HirFileIdExt, InFile, Type,
}; };
use ide_db::{ use ide_db::{
assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search, assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search,

View File

@ -1,4 +1,5 @@
use hir::db::ExpandDatabase; use hir::db::ExpandDatabase;
use hir::HirFileIdExt;
use ide_db::{assists::Assist, source_change::SourceChange}; use ide_db::{assists::Assist, source_change::SourceChange};
use syntax::{ast, SyntaxNode}; use syntax::{ast, SyntaxNode};
use syntax::{match_ast, AstNode}; use syntax::{match_ast, AstNode};

View File

@ -1,5 +1,5 @@
use either::Either; use either::Either;
use hir::{db::ExpandDatabase, HasSource, HirDisplay, Semantics}; use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics};
use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase}; use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
use syntax::{ use syntax::{
ast::{self, edit::IndentLevel, make}, ast::{self, edit::IndentLevel, make},

View File

@ -1,4 +1,4 @@
use hir::{db::ExpandDatabase, InFile}; use hir::{db::ExpandDatabase, HirFileIdExt, InFile};
use ide_db::source_change::SourceChange; use ide_db::source_change::SourceChange;
use syntax::{ use syntax::{
ast::{self, HasArgList}, ast::{self, HasArgList},

View File

@ -1,4 +1,4 @@
use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, InFile, Type}; use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type};
use ide_db::{famous_defs::FamousDefs, source_change::SourceChange}; use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
use syntax::{ use syntax::{
ast::{self, BlockExpr, ExprStmt}, ast::{self, BlockExpr, ExprStmt},

View File

@ -33,7 +33,7 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>> { fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>> {
let db = ctx.sema.db; let db = ctx.sema.db;
let root = db.parse_or_expand(d.expr.file_id); let root = db.parse_or_expand(d.expr.file_id);
let original_range = let (original_range, _) =
d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?; d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?;
let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?; let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?;
let mut assists = vec![]; let mut assists = vec![];

View File

@ -1,4 +1,4 @@
use hir::db::ExpandDatabase; use hir::{db::ExpandDatabase, HirFileIdExt};
use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit}; use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit};
use itertools::Itertools; use itertools::Itertools;
use syntax::AstNode; use syntax::AstNode;
@ -150,12 +150,10 @@ mod baz {}
], ],
), ),
main_node: Some( main_node: Some(
InFile { InFileWrapper {
file_id: FileId( file_id: FileId(
FileId(
0, 0,
), ),
),
value: MODULE@0..8 value: MODULE@0..8
MOD_KW@0..3 "mod" MOD_KW@0..3 "mod"
WHITESPACE@3..4 " " WHITESPACE@3..4 " "

View File

@ -1,4 +1,4 @@
use hir::{HasSource, InFile, Semantics}; use hir::{HasSource, InFile, InRealFile, Semantics};
use ide_db::{ use ide_db::{
base_db::{FileId, FilePosition, FileRange}, base_db::{FileId, FilePosition, FileRange},
defs::Definition, defs::Definition,
@ -149,8 +149,8 @@ pub(crate) fn annotations(
node: InFile<T>, node: InFile<T>,
source_file_id: FileId, source_file_id: FileId,
) -> Option<(TextRange, Option<TextRange>)> { ) -> Option<(TextRange, Option<TextRange>)> {
if let Some(InFile { file_id, value }) = node.original_ast_node(db) { if let Some(InRealFile { file_id, value }) = node.original_ast_node(db) {
if file_id == source_file_id.into() { if file_id == source_file_id {
return Some(( return Some((
value.syntax().text_range(), value.syntax().text_range(),
value.name().map(|name| name.syntax().text_range()), value.name().map(|name| name.syntax().text_range()),

View File

@ -149,7 +149,7 @@ mod tests {
fn check_hierarchy( fn check_hierarchy(
ra_fixture: &str, ra_fixture: &str,
expected: Expect, expected_nav: Expect,
expected_incoming: Expect, expected_incoming: Expect,
expected_outgoing: Expect, expected_outgoing: Expect,
) { ) {
@ -158,7 +158,7 @@ mod tests {
let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info; let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info;
assert_eq!(navs.len(), 1); assert_eq!(navs.len(), 1);
let nav = navs.pop().unwrap(); let nav = navs.pop().unwrap();
expected.assert_eq(&nav.debug_render()); expected_nav.assert_eq(&nav.debug_render());
let item_pos = let item_pos =
FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() }; FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() };

View File

@ -1,4 +1,4 @@
use hir::Semantics; use hir::{HirFileIdExt, InFile, Semantics};
use ide_db::{ use ide_db::{
base_db::FileId, helpers::pick_best_token, base_db::FileId, helpers::pick_best_token,
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase, syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
@ -49,7 +49,9 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string(); let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
// up map out of the #[derive] expansion // up map out of the #[derive] expansion
let token = hir::InFile::new(hir_file, descended).upmap(db)?.value; let InFile { file_id, value: tokens } =
hir::InFile::new(hir_file, descended).upmap_once(db)?;
let token = sema.parse_or_expand(file_id).covering_element(tokens[0]).into_token()?;
let attr = token.parent_ancestors().find_map(ast::Attr::cast)?; let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
let expansions = sema.expand_derive_macro(&attr)?; let expansions = sema.expand_derive_macro(&attr)?;
let idx = attr let idx = attr

View File

@ -60,13 +60,13 @@ pub(crate) fn goto_definition(
.into_iter() .into_iter()
.filter_map(|token| { .filter_map(|token| {
let parent = token.parent()?; let parent = token.parent()?;
if let Some(tt) = ast::TokenTree::cast(parent) { if let Some(tt) = ast::TokenTree::cast(parent.clone()) {
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) { if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) {
return Some(vec![x]); return Some(vec![x]);
} }
} }
Some( Some(
IdentClass::classify_token(sema, &token)? IdentClass::classify_node(sema, &parent)?
.definitions() .definitions()
.into_iter() .into_iter()
.flat_map(|def| { .flat_map(|def| {
@ -392,6 +392,8 @@ fn bar() {
); );
} }
// FIXME: We should emit two targets here, one for the identifier in the declaration, one for
// the macro call
#[test] #[test]
fn goto_def_for_macro_defined_fn_no_arg() { fn goto_def_for_macro_defined_fn_no_arg() {
check( check(
@ -399,11 +401,11 @@ fn bar() {
//- /lib.rs //- /lib.rs
macro_rules! define_fn { macro_rules! define_fn {
() => (fn foo() {}) () => (fn foo() {})
} }
define_fn!(); define_fn!();
//^^^^^^^^^^^^^ //^^^^^^^^^^^^^
fn bar() { fn bar() {
$0foo(); $0foo();
} }

View File

@ -249,7 +249,7 @@ impl T for &Foo {}
r#" r#"
//- minicore: copy, derive //- minicore: copy, derive
#[derive(Copy)] #[derive(Copy)]
//^^^^^^^^^^^^^^^ //^^^^
struct Foo$0; struct Foo$0;
"#, "#,
); );

View File

@ -2,6 +2,7 @@
//! //!
//! Tests live in [`bind_pat`][super::bind_pat] module. //! Tests live in [`bind_pat`][super::bind_pat] module.
use ide_db::{base_db::FileId, famous_defs::FamousDefs}; use ide_db::{base_db::FileId, famous_defs::FamousDefs};
use stdx::TupleExt;
use syntax::ast::{self, AstNode}; use syntax::ast::{self, AstNode};
use text_edit::{TextRange, TextSize}; use text_edit::{TextRange, TextSize};
@ -73,7 +74,9 @@ pub(super) fn hints(
capture.display_place(sema.db) capture.display_place(sema.db)
), ),
None, None,
source.name().and_then(|name| name.syntax().original_file_range_opt(sema.db)), source.name().and_then(|name| {
name.syntax().original_file_range_opt(sema.db).map(TupleExt::head)
}),
); );
acc.push(InlayHint { acc.push(InlayHint {
needs_resolve: label.needs_resolve(), needs_resolve: label.needs_resolve(),

View File

@ -4,8 +4,8 @@ use std::fmt;
use either::Either; use either::Either;
use hir::{ use hir::{
symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource, HirDisplay, HirFileId, db::ExpandDatabase, symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource,
InFile, LocalSource, ModuleSource, HirDisplay, HirFileId, InFile, LocalSource, ModuleSource,
}; };
use ide_db::{ use ide_db::{
base_db::{FileId, FileRange}, base_db::{FileId, FileRange},
@ -40,6 +40,8 @@ pub struct NavigationTarget {
/// comments, and `focus_range` is the range of the identifier. /// comments, and `focus_range` is the range of the identifier.
/// ///
/// Clients should place the cursor on this range when navigating to this target. /// Clients should place the cursor on this range when navigating to this target.
///
/// This range must be contained within [`Self::full_range`].
pub focus_range: Option<TextRange>, pub focus_range: Option<TextRange>,
pub name: SmolStr, pub name: SmolStr,
pub kind: Option<SymbolKind>, pub kind: Option<SymbolKind>,
@ -166,13 +168,14 @@ impl NavigationTarget {
impl TryToNav for FileSymbol { impl TryToNav for FileSymbol {
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> { fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
let full_range = self.loc.original_range(db); let full_range = self.loc.original_range(db);
let focus_range = self.loc.original_name_range(db).and_then(|it| { let focus_range = self.loc.original_name_range(db);
if it.file_id == full_range.file_id { let focus_range = if focus_range.file_id == full_range.file_id
Some(it.range) && full_range.range.contains_range(focus_range.range)
{
Some(focus_range.range)
} else { } else {
None None
} };
});
Some(NavigationTarget { Some(NavigationTarget {
file_id: full_range.file_id, file_id: full_range.file_id,
@ -363,11 +366,11 @@ impl ToNav for hir::Module {
impl TryToNav for hir::Impl { impl TryToNav for hir::Impl {
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> { fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
let InFile { file_id, value } = self.source(db)?; let InFile { file_id, value } = self.source(db)?;
let derive_attr = self.as_builtin_derive(db); let derive_path = self.as_builtin_derive_path(db);
let (focus, syntax) = match &derive_attr { let (file_id, focus, syntax) = match &derive_path {
Some(attr) => (None, attr.value.syntax()), Some(attr) => (attr.file_id.into(), None, attr.value.syntax()),
None => (value.self_ty(), value.syntax()), None => (file_id, value.self_ty(), value.syntax()),
}; };
let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus); let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus);
@ -628,19 +631,30 @@ impl TryToNav for hir::ConstParam {
} }
} }
/// Returns the original range of the syntax node, and the range of the name mapped out of macro expansions
/// Additionally verifies that the name span is in bounds and related to the original range.
fn orig_range_with_focus( fn orig_range_with_focus(
db: &RootDatabase, db: &RootDatabase,
hir_file: HirFileId, hir_file: HirFileId,
value: &SyntaxNode, value: &SyntaxNode,
name: Option<impl AstNode>, name: Option<impl AstNode>,
) -> (FileId, TextRange, Option<TextRange>) { ) -> (FileId, TextRange, Option<TextRange>) {
let FileRange { file_id, range: full_range } = let FileRange { file_id, range } =
InFile::new(hir_file, value).original_file_range(db); match InFile::new(hir_file, value).original_file_range_opt(db) {
Some((range, ctxt)) if ctxt.is_root() => range,
_ => db
.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id)
.kind
.original_call_range(db),
};
let focus_range = name let focus_range = name
.and_then(|it| InFile::new(hir_file, it.syntax()).original_file_range_opt(db)) .and_then(|it| InFile::new(hir_file, it.syntax()).original_file_range_opt(db))
.and_then(|range| if range.file_id == file_id { Some(range.range) } else { None }); .filter(|(frange, ctxt)| {
ctxt.is_root() && frange.file_id == file_id && frange.range.contains_range(frange.range)
})
.map(|(frange, _ctxt)| frange.range);
(file_id, full_range, focus_range) (file_id, range, focus_range)
} }
#[cfg(test)] #[cfg(test)]

View File

@ -4,7 +4,7 @@
//! tests. This module also implements a couple of magic tricks, like renaming //! tests. This module also implements a couple of magic tricks, like renaming
//! `self` and to `self` (to switch between associated function and method). //! `self` and to `self` (to switch between associated function and method).
use hir::{AsAssocItem, InFile, Semantics}; use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics};
use ide_db::{ use ide_db::{
base_db::FileId, base_db::FileId,
defs::{Definition, NameClass, NameRefClass}, defs::{Definition, NameClass, NameRefClass},

View File

@ -2,7 +2,7 @@ use std::fmt;
use ast::HasName; use ast::HasName;
use cfg::CfgExpr; use cfg::CfgExpr;
use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, Semantics}; use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, HirFileIdExt, Semantics};
use ide_assists::utils::test_related_attribute; use ide_assists::utils::test_related_attribute;
use ide_db::{ use ide_db::{
base_db::{FilePosition, FileRange}, base_db::{FilePosition, FileRange},

View File

@ -3,7 +3,7 @@
use std::collections::HashMap; use std::collections::HashMap;
use hir::{db::HirDatabase, Crate, Module}; use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
use ide_db::helpers::get_definition; use ide_db::helpers::get_definition;
use ide_db::{ use ide_db::{
base_db::{FileId, FileRange, SourceDatabaseExt}, base_db::{FileId, FileRange, SourceDatabaseExt},
@ -243,6 +243,7 @@ mod tests {
} }
} }
#[track_caller]
fn check_definitions(ra_fixture: &str) { fn check_definitions(ra_fixture: &str) {
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture); let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
let s = StaticIndex::compute(&analysis); let s = StaticIndex::compute(&analysis);

View File

@ -2,7 +2,7 @@ use std::{fmt, marker::PhantomData};
use hir::{ use hir::{
db::{AstIdMapQuery, AttrsQuery, BlockDefMapQuery, ParseMacroExpansionQuery}, db::{AstIdMapQuery, AttrsQuery, BlockDefMapQuery, ParseMacroExpansionQuery},
Attr, Attrs, ExpandResult, MacroFile, Module, Attr, Attrs, ExpandResult, MacroFileId, Module,
}; };
use ide_db::{ use ide_db::{
base_db::{ base_db::{
@ -199,8 +199,12 @@ impl StatCollect<FileId, Parse<ast::SourceFile>> for SyntaxTreeStats<false> {
} }
} }
impl<M> StatCollect<MacroFile, ExpandResult<(Parse<SyntaxNode>, M)>> for SyntaxTreeStats<true> { impl<M> StatCollect<MacroFileId, ExpandResult<(Parse<SyntaxNode>, M)>> for SyntaxTreeStats<true> {
fn collect_entry(&mut self, _: MacroFile, value: Option<ExpandResult<(Parse<SyntaxNode>, M)>>) { fn collect_entry(
&mut self,
_: MacroFileId,
value: Option<ExpandResult<(Parse<SyntaxNode>, M)>>,
) {
self.total += 1; self.total += 1;
self.retained += value.is_some() as usize; self.retained += value.is_some() as usize;
} }

View File

@ -1,6 +1,6 @@
//! Computes color for a single element. //! Computes color for a single element.
use hir::{AsAssocItem, HasVisibility, Semantics}; use hir::{AsAssocItem, HasVisibility, HirFileIdExt, Semantics};
use ide_db::{ use ide_db::{
defs::{Definition, IdentClass, NameClass, NameRefClass}, defs::{Definition, IdentClass, NameClass, NameRefClass},
FxHashMap, RootDatabase, SymbolKind, FxHashMap, RootDatabase, SymbolKind,

View File

@ -127,7 +127,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">+</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">+</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">#</span><span class="variable">x</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">27</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">#</span><span class="variable">x</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">27</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">-</span><span class="numeric_literal macro">5</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span>, <span class="numeric_literal macro">-</span><span class="numeric_literal macro">5</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">#</span><span class="numeric_literal">0</span><span class="numeric_literal">10</span><span class="variable">x</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">27</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">#</span><span class="numeric_literal">0</span><span class="numeric_literal">10</span><span class="variable">x</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">27</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro"> is </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0.01</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro"> is </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0.01</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">}</span><span class="string_literal macro"> is </span><span class="format_specifier">{</span><span class="numeric_literal">2</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">0</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0.01</span><span class="parenthesis macro">)</span><span class="semicolon">;</span> <span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">}</span><span class="string_literal macro"> is </span><span class="format_specifier">{</span><span class="numeric_literal">2</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">0</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0.01</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>

View File

@ -4,19 +4,19 @@
// to run rust-analyzer as a library. // to run rust-analyzer as a library.
use std::{collections::hash_map::Entry, mem, path::Path, sync}; use std::{collections::hash_map::Entry, mem, path::Path, sync};
use ::tt::token_id as tt;
use crossbeam_channel::{unbounded, Receiver}; use crossbeam_channel::{unbounded, Receiver};
use ide::{AnalysisHost, Change, SourceRoot}; use ide::{AnalysisHost, Change, SourceRoot};
use ide_db::{ use ide_db::{
base_db::{ base_db::{
CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind, span::SpanData, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
ProcMacroLoadResult, ProcMacros, ProcMacroKind, ProcMacroLoadResult, ProcMacros,
}, },
FxHashMap, FxHashMap,
}; };
use itertools::Itertools; use itertools::Itertools;
use proc_macro_api::{MacroDylib, ProcMacroServer}; use proc_macro_api::{MacroDylib, ProcMacroServer};
use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace}; use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
use tt::DelimSpan;
use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath}; use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath};
pub struct LoadCargoConfig { pub struct LoadCargoConfig {
@ -374,12 +374,15 @@ struct Expander(proc_macro_api::ProcMacro);
impl ProcMacroExpander for Expander { impl ProcMacroExpander for Expander {
fn expand( fn expand(
&self, &self,
subtree: &tt::Subtree, subtree: &tt::Subtree<SpanData>,
attrs: Option<&tt::Subtree>, attrs: Option<&tt::Subtree<SpanData>>,
env: &Env, env: &Env,
) -> Result<tt::Subtree, ProcMacroExpansionError> { def_site: SpanData,
call_site: SpanData,
mixed_site: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
match self.0.expand(subtree, attrs, env) { match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) {
Ok(Ok(subtree)) => Ok(subtree), Ok(Ok(subtree)) => Ok(subtree),
Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
@ -394,10 +397,13 @@ struct IdentityExpander;
impl ProcMacroExpander for IdentityExpander { impl ProcMacroExpander for IdentityExpander {
fn expand( fn expand(
&self, &self,
subtree: &tt::Subtree, subtree: &tt::Subtree<SpanData>,
_: Option<&tt::Subtree>, _: Option<&tt::Subtree<SpanData>>,
_: &Env, _: &Env,
) -> Result<tt::Subtree, ProcMacroExpansionError> { _: SpanData,
_: SpanData,
_: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone()) Ok(subtree.clone())
} }
} }
@ -409,11 +415,14 @@ struct EmptyExpander;
impl ProcMacroExpander for EmptyExpander { impl ProcMacroExpander for EmptyExpander {
fn expand( fn expand(
&self, &self,
_: &tt::Subtree, _: &tt::Subtree<SpanData>,
_: Option<&tt::Subtree>, _: Option<&tt::Subtree<SpanData>>,
_: &Env, _: &Env,
) -> Result<tt::Subtree, ProcMacroExpansionError> { call_site: SpanData,
Ok(tt::Subtree::empty()) _: SpanData,
_: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
Ok(tt::Subtree::empty(DelimSpan { open: call_site, close: call_site }))
} }
} }

View File

@ -9,7 +9,7 @@ use test_utils::{bench, bench_fixture, skip_slow_tests};
use crate::{ use crate::{
parser::{MetaVarKind, Op, RepeatKind, Separator}, parser::{MetaVarKind, Op, RepeatKind, Separator},
syntax_node_to_token_tree, tt, DeclarativeMacro, syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap, DUMMY,
}; };
#[test] #[test]
@ -38,7 +38,7 @@ fn benchmark_expand_macro_rules() {
invocations invocations
.into_iter() .into_iter()
.map(|(id, tt)| { .map(|(id, tt)| {
let res = rules[&id].expand(tt); let res = rules[&id].expand(&tt, |_| ());
assert!(res.err.is_none()); assert!(res.err.is_none());
res.value.token_trees.len() res.value.token_trees.len()
}) })
@ -47,14 +47,14 @@ fn benchmark_expand_macro_rules() {
assert_eq!(hash, 69413); assert_eq!(hash, 69413);
} }
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> { fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<DummyTestSpanData>> {
macro_rules_fixtures_tt() macro_rules_fixtures_tt()
.into_iter() .into_iter()
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true))) .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
.collect() .collect()
} }
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> { fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>> {
let fixture = bench_fixture::numerous_macro_rules(); let fixture = bench_fixture::numerous_macro_rules();
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap(); let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
@ -64,14 +64,17 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
.filter_map(ast::MacroRules::cast) .filter_map(ast::MacroRules::cast)
.map(|rule| { .map(|rule| {
let id = rule.name().unwrap().to_string(); let id = rule.name().unwrap().to_string();
let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax()); let def_tt =
syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap);
(id, def_tt) (id, def_tt)
}) })
.collect() .collect()
} }
/// Generate random invocation fixtures from rules /// Generate random invocation fixtures from rules
fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(String, tt::Subtree)> { fn invocation_fixtures(
rules: &FxHashMap<String, DeclarativeMacro<DummyTestSpanData>>,
) -> Vec<(String, tt::Subtree<DummyTestSpanData>)> {
let mut seed = 123456789; let mut seed = 123456789;
let mut res = Vec::new(); let mut res = Vec::new();
@ -93,8 +96,8 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
loop { loop {
let mut subtree = tt::Subtree { let mut subtree = tt::Subtree {
delimiter: tt::Delimiter { delimiter: tt::Delimiter {
open: tt::TokenId::UNSPECIFIED, open: DUMMY,
close: tt::TokenId::UNSPECIFIED, close: DUMMY,
kind: tt::DelimiterKind::Invisible, kind: tt::DelimiterKind::Invisible,
}, },
token_trees: vec![], token_trees: vec![],
@ -102,7 +105,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
for op in rule.lhs.iter() { for op in rule.lhs.iter() {
collect_from_op(op, &mut subtree, &mut seed); collect_from_op(op, &mut subtree, &mut seed);
} }
if it.expand(subtree.clone()).err.is_none() { if it.expand(&subtree, |_| ()).err.is_none() {
res.push((name.clone(), subtree)); res.push((name.clone(), subtree));
break; break;
} }
@ -116,7 +119,11 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
} }
return res; return res;
fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) { fn collect_from_op(
op: &Op<DummyTestSpanData>,
parent: &mut tt::Subtree<DummyTestSpanData>,
seed: &mut usize,
) {
return match op { return match op {
Op::Var { kind, .. } => match kind.as_ref() { Op::Var { kind, .. } => match kind.as_ref() {
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")), Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
@ -202,38 +209,21 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c); *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
*seed *seed
} }
fn make_ident(ident: &str) -> tt::TokenTree { fn make_ident(ident: &str) -> tt::TokenTree<DummyTestSpanData> {
tt::Leaf::Ident(tt::Ident { tt::Leaf::Ident(tt::Ident { span: DUMMY, text: SmolStr::new(ident) }).into()
span: tt::TokenId::unspecified(),
text: SmolStr::new(ident),
})
.into()
} }
fn make_punct(char: char) -> tt::TokenTree { fn make_punct(char: char) -> tt::TokenTree<DummyTestSpanData> {
tt::Leaf::Punct(tt::Punct { tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }).into()
span: tt::TokenId::unspecified(),
char,
spacing: tt::Spacing::Alone,
})
.into()
} }
fn make_literal(lit: &str) -> tt::TokenTree { fn make_literal(lit: &str) -> tt::TokenTree<DummyTestSpanData> {
tt::Leaf::Literal(tt::Literal { tt::Leaf::Literal(tt::Literal { span: DUMMY, text: SmolStr::new(lit) }).into()
span: tt::TokenId::unspecified(),
text: SmolStr::new(lit),
})
.into()
} }
fn make_subtree( fn make_subtree(
kind: tt::DelimiterKind, kind: tt::DelimiterKind,
token_trees: Option<Vec<tt::TokenTree>>, token_trees: Option<Vec<tt::TokenTree<DummyTestSpanData>>>,
) -> tt::TokenTree { ) -> tt::TokenTree<DummyTestSpanData> {
tt::Subtree { tt::Subtree {
delimiter: tt::Delimiter { delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind },
open: tt::TokenId::unspecified(),
close: tt::TokenId::unspecified(),
kind,
},
token_trees: token_trees.unwrap_or_default(), token_trees: token_trees.unwrap_or_default(),
} }
.into() .into()

View File

@ -7,15 +7,17 @@ mod transcriber;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use syntax::SmolStr; use syntax::SmolStr;
use tt::Span;
use crate::{parser::MetaVarKind, tt, ExpandError, ExpandResult}; use crate::{parser::MetaVarKind, ExpandError, ExpandResult};
pub(crate) fn expand_rules( pub(crate) fn expand_rules<S: Span>(
rules: &[crate::Rule], rules: &[crate::Rule<S>],
input: &tt::Subtree, input: &tt::Subtree<S>,
marker: impl Fn(&mut S) + Copy,
is_2021: bool, is_2021: bool,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree<S>> {
let mut match_: Option<(matcher::Match, &crate::Rule)> = None; let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None;
for rule in rules { for rule in rules {
let new_match = matcher::match_(&rule.lhs, input, is_2021); let new_match = matcher::match_(&rule.lhs, input, is_2021);
@ -24,7 +26,7 @@ pub(crate) fn expand_rules(
// Unconditionally returning the transcription here makes the // Unconditionally returning the transcription here makes the
// `test_repeat_bad_var` test fail. // `test_repeat_bad_var` test fail.
let ExpandResult { value, err: transcribe_err } = let ExpandResult { value, err: transcribe_err } =
transcriber::transcribe(&rule.rhs, &new_match.bindings); transcriber::transcribe(&rule.rhs, &new_match.bindings, marker);
if transcribe_err.is_none() { if transcribe_err.is_none() {
return ExpandResult::ok(value); return ExpandResult::ok(value);
} }
@ -43,11 +45,11 @@ pub(crate) fn expand_rules(
if let Some((match_, rule)) = match_ { if let Some((match_, rule)) = match_ {
// if we got here, there was no match without errors // if we got here, there was no match without errors
let ExpandResult { value, err: transcribe_err } = let ExpandResult { value, err: transcribe_err } =
transcriber::transcribe(&rule.rhs, &match_.bindings); transcriber::transcribe(&rule.rhs, &match_.bindings, marker);
ExpandResult { value, err: match_.err.or(transcribe_err) } ExpandResult { value, err: match_.err.or(transcribe_err) }
} else { } else {
ExpandResult::new( ExpandResult::new(
tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }, tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] },
ExpandError::NoMatchingRule, ExpandError::NoMatchingRule,
) )
} }
@ -98,23 +100,29 @@ pub(crate) fn expand_rules(
/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to /// In other words, `Bindings` is a *multi* mapping from `SmolStr` to
/// `tt::TokenTree`, where the index to select a particular `TokenTree` among /// `tt::TokenTree`, where the index to select a particular `TokenTree` among
/// many is not a plain `usize`, but a `&[usize]`. /// many is not a plain `usize`, but a `&[usize]`.
#[derive(Debug, Default, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
struct Bindings { struct Bindings<S> {
inner: FxHashMap<SmolStr, Binding>, inner: FxHashMap<SmolStr, Binding<S>>,
}
impl<S> Default for Bindings<S> {
fn default() -> Self {
Self { inner: Default::default() }
}
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
enum Binding { enum Binding<S> {
Fragment(Fragment), Fragment(Fragment<S>),
Nested(Vec<Binding>), Nested(Vec<Binding<S>>),
Empty, Empty,
Missing(MetaVarKind), Missing(MetaVarKind),
} }
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
enum Fragment { enum Fragment<S> {
/// token fragments are just copy-pasted into the output /// token fragments are just copy-pasted into the output
Tokens(tt::TokenTree), Tokens(tt::TokenTree<S>),
/// Expr ast fragments are surrounded with `()` on insertion to preserve /// Expr ast fragments are surrounded with `()` on insertion to preserve
/// precedence. Note that this impl is different from the one currently in /// precedence. Note that this impl is different from the one currently in
/// `rustc` -- `rustc` doesn't translate fragments into token trees at all. /// `rustc` -- `rustc` doesn't translate fragments into token trees at all.
@ -122,7 +130,7 @@ enum Fragment {
/// At one point in time, we tried to use "fake" delimiters here à la /// At one point in time, we tried to use "fake" delimiters here à la
/// proc-macro delimiter=none. As we later discovered, "none" delimiters are /// proc-macro delimiter=none. As we later discovered, "none" delimiters are
/// tricky to handle in the parser, and rustc doesn't handle those either. /// tricky to handle in the parser, and rustc doesn't handle those either.
Expr(tt::TokenTree), Expr(tt::Subtree<S>),
/// There are roughly two types of paths: paths in expression context, where a /// There are roughly two types of paths: paths in expression context, where a
/// separator `::` between an identifier and its following generic argument list /// separator `::` between an identifier and its following generic argument list
/// is mandatory, and paths in type context, where `::` can be omitted. /// is mandatory, and paths in type context, where `::` can be omitted.
@ -132,5 +140,5 @@ enum Fragment {
/// and is trasncribed as an expression-context path, verbatim transcription /// and is trasncribed as an expression-context path, verbatim transcription
/// would cause a syntax error. We need to fix it up just before transcribing; /// would cause a syntax error. We need to fix it up just before transcribing;
/// see `transcriber::fix_up_and_push_path_tt()`. /// see `transcriber::fix_up_and_push_path_tt()`.
Path(tt::TokenTree), Path(tt::Subtree<S>),
} }

View File

@ -63,21 +63,21 @@ use std::rc::Rc;
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use syntax::SmolStr; use syntax::SmolStr;
use tt::Span;
use crate::{ use crate::{
expander::{Binding, Bindings, ExpandResult, Fragment}, expander::{Binding, Bindings, ExpandResult, Fragment},
parser::{MetaVarKind, Op, RepeatKind, Separator}, parser::{MetaVarKind, Op, RepeatKind, Separator},
tt,
tt_iter::TtIter, tt_iter::TtIter,
ExpandError, MetaTemplate, ValueResult, ExpandError, MetaTemplate, ValueResult,
}; };
impl Bindings { impl<S: Span> Bindings<S> {
fn push_optional(&mut self, name: &SmolStr) { fn push_optional(&mut self, name: &SmolStr) {
// FIXME: Do we have a better way to represent an empty token ? // FIXME: Do we have a better way to represent an empty token ?
// Insert an empty subtree for empty token // Insert an empty subtree for empty token
let tt = let tt =
tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }.into(); tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }.into();
self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt))); self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
} }
@ -85,14 +85,14 @@ impl Bindings {
self.inner.insert(name.clone(), Binding::Empty); self.inner.insert(name.clone(), Binding::Empty);
} }
fn bindings(&self) -> impl Iterator<Item = &Binding> { fn bindings(&self) -> impl Iterator<Item = &Binding<S>> {
self.inner.values() self.inner.values()
} }
} }
#[derive(Clone, Debug, Default, PartialEq, Eq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct Match { pub(super) struct Match<S> {
pub(super) bindings: Bindings, pub(super) bindings: Bindings<S>,
/// We currently just keep the first error and count the rest to compare matches. /// We currently just keep the first error and count the rest to compare matches.
pub(super) err: Option<ExpandError>, pub(super) err: Option<ExpandError>,
pub(super) err_count: usize, pub(super) err_count: usize,
@ -102,7 +102,19 @@ pub(super) struct Match {
pub(super) bound_count: usize, pub(super) bound_count: usize,
} }
impl Match { impl<S> Default for Match<S> {
fn default() -> Self {
Self {
bindings: Default::default(),
err: Default::default(),
err_count: Default::default(),
unmatched_tts: Default::default(),
bound_count: Default::default(),
}
}
}
impl<S> Match<S> {
fn add_err(&mut self, err: ExpandError) { fn add_err(&mut self, err: ExpandError) {
let prev_err = self.err.take(); let prev_err = self.err.take();
self.err = prev_err.or(Some(err)); self.err = prev_err.or(Some(err));
@ -111,12 +123,16 @@ impl Match {
} }
/// Matching errors are added to the `Match`. /// Matching errors are added to the `Match`.
pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool) -> Match { pub(super) fn match_<S: Span>(
pattern: &MetaTemplate<S>,
input: &tt::Subtree<S>,
is_2021: bool,
) -> Match<S> {
let mut res = match_loop(pattern, input, is_2021); let mut res = match_loop(pattern, input, is_2021);
res.bound_count = count(res.bindings.bindings()); res.bound_count = count(res.bindings.bindings());
return res; return res;
fn count<'a>(bindings: impl Iterator<Item = &'a Binding>) -> usize { fn count<'a, S: 'a>(bindings: impl Iterator<Item = &'a Binding<S>>) -> usize {
bindings bindings
.map(|it| match it { .map(|it| match it {
Binding::Fragment(_) => 1, Binding::Fragment(_) => 1,
@ -129,10 +145,10 @@ pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool)
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
enum BindingKind { enum BindingKind<S> {
Empty(SmolStr), Empty(SmolStr),
Optional(SmolStr), Optional(SmolStr),
Fragment(SmolStr, Fragment), Fragment(SmolStr, Fragment<S>),
Missing(SmolStr, MetaVarKind), Missing(SmolStr, MetaVarKind),
Nested(usize, usize), Nested(usize, usize),
} }
@ -146,13 +162,18 @@ enum LinkNode<T> {
Parent { idx: usize, len: usize }, Parent { idx: usize, len: usize },
} }
#[derive(Default)] struct BindingsBuilder<S> {
struct BindingsBuilder { nodes: Vec<Vec<LinkNode<Rc<BindingKind<S>>>>>,
nodes: Vec<Vec<LinkNode<Rc<BindingKind>>>>,
nested: Vec<Vec<LinkNode<usize>>>, nested: Vec<Vec<LinkNode<usize>>>,
} }
impl BindingsBuilder { impl<S> Default for BindingsBuilder<S> {
fn default() -> Self {
Self { nodes: Default::default(), nested: Default::default() }
}
}
impl<S: Span> BindingsBuilder<S> {
fn alloc(&mut self) -> BindingsIdx { fn alloc(&mut self) -> BindingsIdx {
let idx = self.nodes.len(); let idx = self.nodes.len();
self.nodes.push(Vec::new()); self.nodes.push(Vec::new());
@ -189,7 +210,7 @@ impl BindingsBuilder {
self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone())))); self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone()))));
} }
fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) { fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment<S>) {
self.nodes[idx.0] self.nodes[idx.0]
.push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment)))); .push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment))));
} }
@ -210,11 +231,11 @@ impl BindingsBuilder {
idx.0 = new_idx; idx.0 = new_idx;
} }
fn build(self, idx: &BindingsIdx) -> Bindings { fn build(self, idx: &BindingsIdx) -> Bindings<S> {
self.build_inner(&self.nodes[idx.0]) self.build_inner(&self.nodes[idx.0])
} }
fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind>>]) -> Bindings { fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind<S>>>]) -> Bindings<S> {
let mut bindings = Bindings::default(); let mut bindings = Bindings::default();
let mut nodes = Vec::new(); let mut nodes = Vec::new();
self.collect_nodes(link_nodes, &mut nodes); self.collect_nodes(link_nodes, &mut nodes);
@ -264,7 +285,7 @@ impl BindingsBuilder {
&'a self, &'a self,
id: usize, id: usize,
len: usize, len: usize,
nested_refs: &mut Vec<&'a [LinkNode<Rc<BindingKind>>]>, nested_refs: &mut Vec<&'a [LinkNode<Rc<BindingKind<S>>>]>,
) { ) {
self.nested[id].iter().take(len).for_each(|it| match it { self.nested[id].iter().take(len).for_each(|it| match it {
LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]), LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]),
@ -272,7 +293,7 @@ impl BindingsBuilder {
}); });
} }
fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings>) { fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings<S>>) {
let last = &self.nodes[idx]; let last = &self.nodes[idx];
let mut nested_refs: Vec<&[_]> = Vec::new(); let mut nested_refs: Vec<&[_]> = Vec::new();
self.nested[nested_idx].iter().for_each(|it| match *it { self.nested[nested_idx].iter().for_each(|it| match *it {
@ -283,7 +304,7 @@ impl BindingsBuilder {
nested.extend(nested_refs.into_iter().map(|iter| self.build_inner(iter))); nested.extend(nested_refs.into_iter().map(|iter| self.build_inner(iter)));
} }
fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind>) { fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind<S>>) {
self.nodes[id].iter().take(len).for_each(|it| match it { self.nodes[id].iter().take(len).for_each(|it| match it {
LinkNode::Node(it) => nodes.push(it), LinkNode::Node(it) => nodes.push(it),
LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes), LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
@ -292,8 +313,8 @@ impl BindingsBuilder {
fn collect_nodes<'a>( fn collect_nodes<'a>(
&'a self, &'a self,
link_nodes: &'a [LinkNode<Rc<BindingKind>>], link_nodes: &'a [LinkNode<Rc<BindingKind<S>>>],
nodes: &mut Vec<&'a BindingKind>, nodes: &mut Vec<&'a BindingKind<S>>,
) { ) {
link_nodes.iter().for_each(|it| match it { link_nodes.iter().for_each(|it| match it {
LinkNode::Node(it) => nodes.push(it), LinkNode::Node(it) => nodes.push(it),
@ -303,22 +324,22 @@ impl BindingsBuilder {
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct MatchState<'t> { struct MatchState<'t, S> {
/// The position of the "dot" in this matcher /// The position of the "dot" in this matcher
dot: OpDelimitedIter<'t>, dot: OpDelimitedIter<'t, S>,
/// Token subtree stack /// Token subtree stack
/// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. ) /// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. )
/// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does /// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
/// that where the bottom of the stack is the outermost matcher. /// that where the bottom of the stack is the outermost matcher.
stack: SmallVec<[OpDelimitedIter<'t>; 4]>, stack: SmallVec<[OpDelimitedIter<'t, S>; 4]>,
/// The "parent" matcher position if we are in a repetition. That is, the matcher position just /// The "parent" matcher position if we are in a repetition. That is, the matcher position just
/// before we enter the repetition. /// before we enter the repetition.
up: Option<Box<MatchState<'t>>>, up: Option<Box<MatchState<'t, S>>>,
/// The separator if we are in a repetition. /// The separator if we are in a repetition.
sep: Option<Separator>, sep: Option<Separator<S>>,
/// The KleeneOp of this sequence if we are in a repetition. /// The KleeneOp of this sequence if we are in a repetition.
sep_kind: Option<RepeatKind>, sep_kind: Option<RepeatKind>,
@ -330,7 +351,7 @@ struct MatchState<'t> {
bindings: BindingsIdx, bindings: BindingsIdx,
/// Cached result of meta variable parsing /// Cached result of meta variable parsing
meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment>>)>, meta_result: Option<(TtIter<'t, S>, ExpandResult<Option<Fragment<S>>>)>,
/// Is error occurred in this state, will `poised` to "parent" /// Is error occurred in this state, will `poised` to "parent"
is_error: bool, is_error: bool,
@ -355,16 +376,16 @@ struct MatchState<'t> {
/// - `bb_items`: the set of items that are waiting for the black-box parser. /// - `bb_items`: the set of items that are waiting for the black-box parser.
/// - `error_items`: the set of items in errors, used for error-resilient parsing /// - `error_items`: the set of items in errors, used for error-resilient parsing
#[inline] #[inline]
fn match_loop_inner<'t>( fn match_loop_inner<'t, S: Span>(
src: TtIter<'t>, src: TtIter<'t, S>,
stack: &[TtIter<'t>], stack: &[TtIter<'t, S>],
res: &mut Match, res: &mut Match<S>,
bindings_builder: &mut BindingsBuilder, bindings_builder: &mut BindingsBuilder<S>,
cur_items: &mut SmallVec<[MatchState<'t>; 1]>, cur_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
bb_items: &mut SmallVec<[MatchState<'t>; 1]>, bb_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
next_items: &mut Vec<MatchState<'t>>, next_items: &mut Vec<MatchState<'t, S>>,
eof_items: &mut SmallVec<[MatchState<'t>; 1]>, eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
error_items: &mut SmallVec<[MatchState<'t>; 1]>, error_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
is_2021: bool, is_2021: bool,
) { ) {
macro_rules! try_push { macro_rules! try_push {
@ -468,7 +489,7 @@ fn match_loop_inner<'t>(
if let Ok(subtree) = src.clone().expect_subtree() { if let Ok(subtree) = src.clone().expect_subtree() {
if subtree.delimiter.kind == delimiter.kind { if subtree.delimiter.kind == delimiter.kind {
item.stack.push(item.dot); item.stack.push(item.dot);
item.dot = tokens.iter_delimited(Some(delimiter)); item.dot = tokens.iter_delimited(Some(*delimiter));
cur_items.push(item); cur_items.push(item);
} }
} }
@ -587,9 +608,9 @@ fn match_loop_inner<'t>(
} }
} }
fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match { fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: bool) -> Match<S> {
let mut src = TtIter::new(src); let mut src = TtIter::new(src);
let mut stack: SmallVec<[TtIter<'_>; 1]> = SmallVec::new(); let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new();
let mut res = Match::default(); let mut res = Match::default();
let mut error_recover_item = None; let mut error_recover_item = None;
@ -736,16 +757,16 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match
} }
} }
fn match_meta_var( fn match_meta_var<S: Span>(
kind: MetaVarKind, kind: MetaVarKind,
input: &mut TtIter<'_>, input: &mut TtIter<'_, S>,
is_2021: bool, is_2021: bool,
) -> ExpandResult<Option<Fragment>> { ) -> ExpandResult<Option<Fragment<S>>> {
let fragment = match kind { let fragment = match kind {
MetaVarKind::Path => { MetaVarKind::Path => {
return input return input
.expect_fragment(parser::PrefixEntryPoint::Path) .expect_fragment(parser::PrefixEntryPoint::Path)
.map(|it| it.map(Fragment::Path)); .map(|it| it.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Path));
} }
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop, MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
@ -771,9 +792,21 @@ fn match_meta_var(
} }
_ => {} _ => {}
}; };
return input return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| {
.expect_fragment(parser::PrefixEntryPoint::Expr) tt.map(|tt| match tt {
.map(|tt| tt.map(Fragment::Expr)); tt::TokenTree::Leaf(leaf) => tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![leaf.into()],
},
tt::TokenTree::Subtree(mut s) => {
if s.delimiter.kind == tt::DelimiterKind::Invisible {
s.delimiter.kind = tt::DelimiterKind::Parenthesis;
}
s
}
})
.map(Fragment::Expr)
});
} }
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => { MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
let tt_result = match kind { let tt_result = match kind {
@ -796,7 +829,7 @@ fn match_meta_var(
match neg { match neg {
None => lit.into(), None => lit.into(),
Some(neg) => tt::TokenTree::Subtree(tt::Subtree { Some(neg) => tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter::unspecified(), delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![neg, lit.into()], token_trees: vec![neg, lit.into()],
}), }),
} }
@ -811,7 +844,7 @@ fn match_meta_var(
input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens)) input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens))
} }
fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) { fn collect_vars<S: Span>(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate<S>) {
for op in pattern.iter() { for op in pattern.iter() {
match op { match op {
Op::Var { name, .. } => collector_fun(name.clone()), Op::Var { name, .. } => collector_fun(name.clone()),
@ -824,38 +857,38 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate)
} }
} }
} }
impl MetaTemplate { impl<S: Span> MetaTemplate<S> {
fn iter_delimited<'a>(&'a self, delimited: Option<&'a tt::Delimiter>) -> OpDelimitedIter<'a> { fn iter_delimited(&self, delimited: Option<tt::Delimiter<S>>) -> OpDelimitedIter<'_, S> {
OpDelimitedIter { OpDelimitedIter {
inner: &self.0, inner: &self.0,
idx: 0, idx: 0,
delimited: delimited.unwrap_or(&tt::Delimiter::UNSPECIFIED), delimited: delimited.unwrap_or(tt::Delimiter::DUMMY_INVISIBLE),
} }
} }
} }
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
enum OpDelimited<'a> { enum OpDelimited<'a, S> {
Op(&'a Op), Op(&'a Op<S>),
Open, Open,
Close, Close,
} }
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
struct OpDelimitedIter<'a> { struct OpDelimitedIter<'a, S> {
inner: &'a [Op], inner: &'a [Op<S>],
delimited: &'a tt::Delimiter, delimited: tt::Delimiter<S>,
idx: usize, idx: usize,
} }
impl<'a> OpDelimitedIter<'a> { impl<'a, S: Span> OpDelimitedIter<'a, S> {
fn is_eof(&self) -> bool { fn is_eof(&self) -> bool {
let len = self.inner.len() let len = self.inner.len()
+ if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 }; + if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 };
self.idx >= len self.idx >= len
} }
fn peek(&self) -> Option<OpDelimited<'a>> { fn peek(&self) -> Option<OpDelimited<'a, S>> {
match self.delimited.kind { match self.delimited.kind {
tt::DelimiterKind::Invisible => self.inner.get(self.idx).map(OpDelimited::Op), tt::DelimiterKind::Invisible => self.inner.get(self.idx).map(OpDelimited::Op),
_ => match self.idx { _ => match self.idx {
@ -871,8 +904,8 @@ impl<'a> OpDelimitedIter<'a> {
} }
} }
impl<'a> Iterator for OpDelimitedIter<'a> { impl<'a, S: Span> Iterator for OpDelimitedIter<'a, S> {
type Item = OpDelimited<'a>; type Item = OpDelimited<'a, S>;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
let res = self.peek(); let res = self.peek();
@ -888,8 +921,8 @@ impl<'a> Iterator for OpDelimitedIter<'a> {
} }
} }
impl TtIter<'_> { impl<S: Span> TtIter<'_, S> {
fn expect_separator(&mut self, separator: &Separator) -> bool { fn expect_separator(&mut self, separator: &Separator<S>) -> bool {
let mut fork = self.clone(); let mut fork = self.clone();
let ok = match separator { let ok = match separator {
Separator::Ident(lhs) => match fork.expect_ident_or_underscore() { Separator::Ident(lhs) => match fork.expect_ident_or_underscore() {
@ -919,7 +952,7 @@ impl TtIter<'_> {
ok ok
} }
fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> { fn expect_tt(&mut self) -> Result<tt::TokenTree<S>, ()> {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) { if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) {
if punct.char == '\'' { if punct.char == '\'' {
self.expect_lifetime() self.expect_lifetime()
@ -927,7 +960,7 @@ impl TtIter<'_> {
let puncts = self.expect_glued_punct()?; let puncts = self.expect_glued_punct()?;
let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect();
Ok(tt::TokenTree::Subtree(tt::Subtree { Ok(tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter::unspecified(), delimiter: tt::Delimiter::dummy_invisible(),
token_trees, token_trees,
})) }))
} }
@ -936,7 +969,7 @@ impl TtIter<'_> {
} }
} }
fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> { fn expect_lifetime(&mut self) -> Result<tt::TokenTree<S>, ()> {
let punct = self.expect_single_punct()?; let punct = self.expect_single_punct()?;
if punct.char != '\'' { if punct.char != '\'' {
return Err(()); return Err(());
@ -944,7 +977,7 @@ impl TtIter<'_> {
let ident = self.expect_ident_or_underscore()?; let ident = self.expect_ident_or_underscore()?;
Ok(tt::Subtree { Ok(tt::Subtree {
delimiter: tt::Delimiter::unspecified(), delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![ token_trees: vec![
tt::Leaf::Punct(*punct).into(), tt::Leaf::Punct(*punct).into(),
tt::Leaf::Ident(ident.clone()).into(), tt::Leaf::Ident(ident.clone()).into(),
@ -953,7 +986,7 @@ impl TtIter<'_> {
.into()) .into())
} }
fn eat_char(&mut self, c: char) -> Option<tt::TokenTree> { fn eat_char(&mut self, c: char) -> Option<tt::TokenTree<S>> {
let mut fork = self.clone(); let mut fork = self.clone();
match fork.expect_char(c) { match fork.expect_char(c) {
Ok(_) => { Ok(_) => {

Some files were not shown because too many files have changed in this diff Show More