Auto merge of #15959 - Veykril:macro-shower3, r=lnicola

TokenMap -> SpanMap rewrite

Opening early so I can have an overview over the full diff more easily, still very unfinished and lots of work to be done.

The gist of what this PR does is move away from assigning IDs to tokens in arguments and expansions and instead gives the subtrees the text ranges they are sourced from (made relative to some item for incrementality). This means we now only have a single map per expension, opposed to map for expansion and arguments.

A few of the things that are not done yet (in arbitrary order):
- [x] generally clean up the current mess
- [x] proc-macros, have been completely ignored so far
- [x] syntax fixups, has been commented out for the time being needs to be rewritten on top of some marker SyntaxContextId
- [x] macro invocation syntax contexts are not properly passed around yet, so $crate hygiene does not work in all cases (but most)
  - [x] builtin macros do not set spans properly, $crate basically does not work with them rn (which we use)
~~- [ ] remove all uses of dummy spans (or if that does not work, change the dummy entries for dummy spans so that tests will not silently pass due to havin a file id for the dummy file)~~
  - [x] de-queryfy `macro_expand`, the sole caller of it is `parse_macro_expansion`, and both of these are lru-cached with the same limit so having it be a query is pointless
- [x] docs and more docs
- [x] fix eager macro spans and other stuff
  - [x] simplify include! handling
- [x] Figure out how to undo the sudden `()` expression wrapping in expansions / alternatively prioritize getting invisible delimiters working again
- [x] Simplify InFile stuff and HirFIleId extensions
~~- [ ] span crate containing all the file ids, span stuff, ast ids. Then remove the dependency injection generics from tt and mbe~~

Fixes https://github.com/rust-lang/rust-analyzer/issues/10300
Fixes https://github.com/rust-lang/rust-analyzer/issues/15685
This commit is contained in:
bors 2023-12-04 19:59:53 +00:00
commit e91fdf7860
133 changed files with 5403 additions and 4205 deletions

6
Cargo.lock generated
View File

@ -1255,6 +1255,9 @@ checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
name = "proc-macro-api"
version = "0.0.0"
dependencies = [
"base-db",
"indexmap",
"la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"memmap2",
"object 0.32.0",
"paths",
@ -1263,6 +1266,7 @@ dependencies = [
"serde_json",
"snap",
"stdx",
"text-size",
"tracing",
"triomphe",
"tt",
@ -1751,6 +1755,7 @@ dependencies = [
"always-assert",
"backtrace",
"crossbeam-channel",
"itertools",
"jod-thread",
"libc",
"miow",
@ -2010,6 +2015,7 @@ version = "0.0.0"
dependencies = [
"smol_str",
"stdx",
"text-size",
]
[[package]]

View File

@ -12,7 +12,7 @@ authors = ["rust-analyzer team"]
[profile.dev]
# Disabling debug info speeds up builds a bunch,
# and we don't rely on it for debugging that much.
debug = 0
debug = 1
[profile.dev.package]
# These speed up local tests.

View File

@ -8,11 +8,12 @@ use test_utils::{
ESCAPED_CURSOR_MARKER,
};
use triomphe::Arc;
use tt::token_id::{Leaf, Subtree, TokenTree};
use tt::{Leaf, Subtree, TokenTree};
use vfs::{file_set::FileSet, VfsPath};
use crate::{
input::{CrateName, CrateOrigin, LangCrateOrigin},
span::SpanData,
Change, CrateDisplayName, CrateGraph, CrateId, Dependency, DependencyKind, Edition, Env,
FileId, FilePosition, FileRange, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
ProcMacros, ReleaseChannel, SourceDatabaseExt, SourceRoot, SourceRootId,
@ -539,10 +540,13 @@ struct IdentityProcMacroExpander;
impl ProcMacroExpander for IdentityProcMacroExpander {
fn expand(
&self,
subtree: &Subtree,
_: Option<&Subtree>,
subtree: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone())
}
}
@ -553,10 +557,13 @@ struct AttributeInputReplaceProcMacroExpander;
impl ProcMacroExpander for AttributeInputReplaceProcMacroExpander {
fn expand(
&self,
_: &Subtree,
attrs: Option<&Subtree>,
_: &Subtree<SpanData>,
attrs: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
attrs
.cloned()
.ok_or_else(|| ProcMacroExpansionError::Panic("Expected attribute input".into()))
@ -568,11 +575,14 @@ struct MirrorProcMacroExpander;
impl ProcMacroExpander for MirrorProcMacroExpander {
fn expand(
&self,
input: &Subtree,
_: Option<&Subtree>,
input: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
fn traverse(input: &Subtree) -> Subtree {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let mut token_trees = vec![];
for tt in input.token_trees.iter().rev() {
let tt = match tt {
@ -595,13 +605,16 @@ struct ShortenProcMacroExpander;
impl ProcMacroExpander for ShortenProcMacroExpander {
fn expand(
&self,
input: &Subtree,
_: Option<&Subtree>,
input: &Subtree<SpanData>,
_: Option<&Subtree<SpanData>>,
_: &Env,
) -> Result<Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree<SpanData>, ProcMacroExpansionError> {
return Ok(traverse(input));
fn traverse(input: &Subtree) -> Subtree {
fn traverse(input: &Subtree<SpanData>) -> Subtree<SpanData> {
let token_trees = input
.token_trees
.iter()
@ -613,7 +626,7 @@ impl ProcMacroExpander for ShortenProcMacroExpander {
Subtree { delimiter: input.delimiter, token_trees }
}
fn modify_leaf(leaf: &Leaf) -> Leaf {
fn modify_leaf(leaf: &Leaf<SpanData>) -> Leaf<SpanData> {
let mut leaf = leaf.clone();
match &mut leaf {
Leaf::Literal(it) => {

View File

@ -13,9 +13,10 @@ use la_arena::{Arena, Idx};
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::SmolStr;
use triomphe::Arc;
use tt::token_id::Subtree;
use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath};
use crate::span::SpanData;
// Map from crate id to the name of the crate and path of the proc-macro. If the value is `None`,
// then the crate for the proc-macro hasn't been build yet as the build data is missing.
pub type ProcMacroPaths = FxHashMap<CrateId, Result<(Option<String>, AbsPathBuf), String>>;
@ -242,6 +243,9 @@ impl CrateDisplayName {
}
}
// FIXME: These should not be defined in here? Why does base db know about proc-macros
// ProcMacroKind is used in [`fixture`], but that module probably shouldn't be in this crate either.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct ProcMacroId(pub u32);
@ -255,10 +259,13 @@ pub enum ProcMacroKind {
pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
fn expand(
&self,
subtree: &Subtree,
attrs: Option<&Subtree>,
subtree: &tt::Subtree<SpanData>,
attrs: Option<&tt::Subtree<SpanData>>,
env: &Env,
) -> Result<Subtree, ProcMacroExpansionError>;
def_site: SpanData,
call_site: SpanData,
mixed_site: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError>;
}
#[derive(Debug)]
@ -323,7 +330,9 @@ pub struct CrateData {
pub dependencies: Vec<Dependency>,
pub origin: CrateOrigin,
pub is_proc_macro: bool,
// FIXME: These things should not be per crate! These are more per workspace crate graph level things
// FIXME: These things should not be per crate! These are more per workspace crate graph level
// things. This info does need to be somewhat present though as to prevent deduplication from
// happening across different workspaces with different layouts.
pub target_layout: TargetLayoutLoadResult,
pub channel: Option<ReleaseChannel>,
}

View File

@ -5,6 +5,7 @@
mod input;
mod change;
pub mod fixture;
pub mod span;
use std::panic;
@ -12,14 +13,13 @@ use rustc_hash::FxHashSet;
use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
use triomphe::Arc;
pub use crate::input::DependencyKind;
pub use crate::{
change::Change,
input::{
CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths, ProcMacros,
ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
DependencyKind, Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander,
ProcMacroExpansionError, ProcMacroId, ProcMacroKind, ProcMacroLoadResult, ProcMacroPaths,
ProcMacros, ReleaseChannel, SourceRoot, SourceRootId, TargetLayoutLoadResult,
},
};
pub use salsa::{self, Cancelled};
@ -68,8 +68,7 @@ pub trait FileLoader {
/// model. Everything else in rust-analyzer is derived from these queries.
#[salsa::query_group(SourceDatabaseStorage)]
pub trait SourceDatabase: FileLoader + std::fmt::Debug {
// Parses the file into the syntax tree.
#[salsa::invoke(parse_query)]
/// Parses the file into the syntax tree.
fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
/// The crate graph.
@ -81,7 +80,7 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug {
fn proc_macros(&self) -> Arc<ProcMacros>;
}
fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
let _p = profile::span("parse_query").detail(|| format!("{file_id:?}"));
let text = db.file_text(file_id);
SourceFile::parse(&text)

209
crates/base-db/src/span.rs Normal file
View File

@ -0,0 +1,209 @@
//! File and span related types.
// FIXME: This should probably be moved into its own crate.
use std::fmt;
use salsa::InternId;
use tt::SyntaxContext;
use vfs::FileId;
pub type ErasedFileAstId = la_arena::Idx<syntax::SyntaxNodePtr>;
// The first inde is always the root node's AstId
pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
la_arena::Idx::from_raw(la_arena::RawIdx::from_u32(0));
pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct SyntaxContextId(InternId);
impl fmt::Debug for SyntaxContextId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if *self == Self::SELF_REF {
f.debug_tuple("SyntaxContextId")
.field(&{
#[derive(Debug)]
#[allow(non_camel_case_types)]
struct SELF_REF;
SELF_REF
})
.finish()
} else {
f.debug_tuple("SyntaxContextId").field(&self.0).finish()
}
}
}
crate::impl_intern_key!(SyntaxContextId);
impl fmt::Display for SyntaxContextId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0.as_u32())
}
}
impl SyntaxContext for SyntaxContextId {
const DUMMY: Self = Self::ROOT;
}
// inherent trait impls please tyvm
impl SyntaxContextId {
pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
// veykril(HACK): FIXME salsa doesn't allow us fetching the id of the current input to be allocated so
// we need a special value that behaves as the current context.
pub const SELF_REF: Self =
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
pub fn is_root(self) -> bool {
self == Self::ROOT
}
}
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub struct SpanAnchor {
pub file_id: FileId,
pub ast_id: ErasedFileAstId,
}
impl fmt::Debug for SpanAnchor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("SpanAnchor").field(&self.file_id).field(&self.ast_id.into_raw()).finish()
}
}
impl tt::SpanAnchor for SpanAnchor {
const DUMMY: Self = SpanAnchor { file_id: FileId(0), ast_id: ROOT_ERASED_FILE_AST_ID };
}
/// Input to the analyzer is a set of files, where each file is identified by
/// `FileId` and contains source code. However, another source of source code in
/// Rust are macros: each macro can be thought of as producing a "temporary
/// file". To assign an id to such a file, we use the id of the macro call that
/// produced the file. So, a `HirFileId` is either a `FileId` (source code
/// written by user), or a `MacroCallId` (source code produced by macro).
///
/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
/// containing the call plus the offset of the macro call in the file. Note that
/// this is a recursive definition! However, the size_of of `HirFileId` is
/// finite (because everything bottoms out at the real `FileId`) and small
/// (`MacroCallId` uses the location interning. You can check details here:
/// <https://en.wikipedia.org/wiki/String_interning>).
///
/// The two variants are encoded in a single u32 which are differentiated by the MSB.
/// If the MSB is 0, the value represents a `FileId`, otherwise the remaining 31 bits represent a
/// `MacroCallId`.
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct HirFileId(u32);
impl From<HirFileId> for u32 {
fn from(value: HirFileId) -> Self {
value.0
}
}
impl From<u32> for HirFileId {
fn from(value: u32) -> Self {
HirFileId(value)
}
}
impl From<MacroCallId> for HirFileId {
fn from(value: MacroCallId) -> Self {
value.as_file()
}
}
impl fmt::Debug for HirFileId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.repr().fmt(f)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroFileId {
pub macro_call_id: MacroCallId,
}
/// `MacroCallId` identifies a particular macro invocation, like
/// `println!("Hello, {}", world)`.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct MacroCallId(salsa::InternId);
crate::impl_intern_key!(MacroCallId);
impl MacroCallId {
pub fn as_file(self) -> HirFileId {
MacroFileId { macro_call_id: self }.into()
}
pub fn as_macro_file(self) -> MacroFileId {
MacroFileId { macro_call_id: self }
}
}
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub enum HirFileIdRepr {
FileId(FileId),
MacroFile(MacroFileId),
}
impl fmt::Debug for HirFileIdRepr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::FileId(arg0) => f.debug_tuple("FileId").field(&arg0.0).finish(),
Self::MacroFile(arg0) => {
f.debug_tuple("MacroFile").field(&arg0.macro_call_id.0).finish()
}
}
}
}
impl From<FileId> for HirFileId {
fn from(FileId(id): FileId) -> Self {
assert!(id < Self::MAX_FILE_ID);
HirFileId(id)
}
}
impl From<MacroFileId> for HirFileId {
fn from(MacroFileId { macro_call_id: MacroCallId(id) }: MacroFileId) -> Self {
let id = id.as_u32();
assert!(id < Self::MAX_FILE_ID);
HirFileId(id | Self::MACRO_FILE_TAG_MASK)
}
}
impl HirFileId {
const MAX_FILE_ID: u32 = u32::MAX ^ Self::MACRO_FILE_TAG_MASK;
const MACRO_FILE_TAG_MASK: u32 = 1 << 31;
#[inline]
pub fn is_macro(self) -> bool {
self.0 & Self::MACRO_FILE_TAG_MASK != 0
}
#[inline]
pub fn macro_file(self) -> Option<MacroFileId> {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => None,
_ => Some(MacroFileId {
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
}),
}
}
#[inline]
pub fn file_id(self) -> Option<FileId> {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => Some(FileId(self.0)),
_ => None,
}
}
#[inline]
pub fn repr(self) -> HirFileIdRepr {
match self.0 & Self::MACRO_FILE_TAG_MASK {
0 => HirFileIdRepr::FileId(FileId(self.0)),
_ => HirFileIdRepr::MacroFile(MacroFileId {
macro_call_id: MacroCallId(InternId::from(self.0 ^ Self::MACRO_FILE_TAG_MASK)),
}),
}
}
}

View File

@ -1,37 +1,31 @@
use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect};
use mbe::syntax_node_to_token_tree;
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap};
use syntax::{ast, AstNode};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
fn assert_parse_result(input: &str, expected: CfgExpr) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected);
}
fn check_dnf(input: &str, expect: Expect) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual);
}
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
@ -40,11 +34,9 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
#[track_caller]
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap);
let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();

View File

@ -421,6 +421,7 @@ impl AttrsWithOwner {
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id()]),
db.span_map(src.file_id).as_ref(),
)
}
GenericParamId::TypeParamId(it) => {
@ -428,11 +429,16 @@ impl AttrsWithOwner {
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id()]),
db.span_map(src.file_id).as_ref(),
)
}
GenericParamId::LifetimeParamId(it) => {
let src = it.parent.child_source(db);
RawAttrs::from_attrs_owner(db.upcast(), src.with_value(&src.value[it.local_id]))
RawAttrs::from_attrs_owner(
db.upcast(),
src.with_value(&src.value[it.local_id]),
db.span_map(src.file_id).as_ref(),
)
}
},
AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it),

View File

@ -1,17 +1,20 @@
//! This module contains tests for doc-expression parsing.
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
use base_db::FileId;
use hir_expand::span::{RealSpanMap, SpanMapRef};
use mbe::syntax_node_to_token_tree;
use syntax::{ast, AstNode};
use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) {
let (tt, _) = {
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
syntax_node_to_token_tree(tt.syntax())
};
let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(
tt.syntax(),
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId(0))),
);
let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected);
}

View File

@ -1025,7 +1025,7 @@ impl ExprCollector<'_> {
let id = collector(self, Some(expansion.tree()));
self.ast_id_map = prev_ast_id_map;
self.expander.exit(self.db, mark);
self.expander.exit(mark);
id
}
None => collector(self, None),

View File

@ -143,7 +143,6 @@ mod m {
#[test]
fn desugar_builtin_format_args() {
// Regression test for a path resolution bug introduced with inner item handling.
let (db, body, def) = lower(
r#"
//- minicore: fmt
@ -221,3 +220,115 @@ fn main() {
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}
#[test]
fn test_macro_hygiene() {
let (db, body, def) = lower(
r##"
//- minicore: fmt, from
//- /main.rs
mod error;
use crate::error::error;
fn main() {
// _ = forces body expansion instead of block def map expansion
_ = error!("Failed to resolve path `{}`", node.text());
}
//- /error.rs
macro_rules! _error {
($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))}
}
pub(crate) use _error as error;
macro_rules! _intermediate {
($arg:expr) => {$crate::error::SsrError::new($arg)}
}
pub(crate) use _intermediate as intermediate;
pub struct SsrError(pub(crate) core::fmt::Arguments);
impl SsrError {
pub(crate) fn new(message: impl Into<core::fmt::Arguments>) -> SsrError {
SsrError(message.into())
}
}
"##,
);
assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]);
expect![[r#"
fn main() {
_ = $crate::error::SsrError::new(
builtin#lang(Arguments::new_v1_formatted)(
&[
"\"Failed to resolve path `", "`\"",
],
&[
builtin#lang(Argument::new_display)(
&node.text(),
),
],
&[
builtin#lang(Placeholder::new)(
0usize,
' ',
builtin#lang(Alignment::Unknown),
0u32,
builtin#lang(Count::Implied),
builtin#lang(Count::Implied),
),
],
unsafe {
builtin#lang(UnsafeArg::new)()
},
),
);
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}
#[test]
fn regression_10300() {
let (db, body, def) = lower(
r#"
//- minicore: concat, panic
mod private {
pub use core::concat;
}
macro_rules! m {
() => {
panic!(concat!($crate::private::concat!("cc")));
};
}
fn f() {
m!();
}
"#,
);
let (_, source_map) = db.body_with_source_map(def.into());
assert_eq!(source_map.diagnostics(), &[]);
for (_, def_map) in body.blocks(&db) {
assert_eq!(def_map.diagnostics(), &[]);
}
expect![[r#"
fn f() {
$crate::panicking::panic_fmt(
builtin#lang(Arguments::new_v1_formatted)(
&[
"\"cc\"",
],
&[],
&[],
unsafe {
builtin#lang(UnsafeArg::new)()
},
),
);
}"#]]
.assert_eq(&body.pretty_print(&db, def))
}

View File

@ -663,7 +663,7 @@ impl<'a> AssocItemCollector<'a> {
self.module_id.local_id,
MacroCallKind::Attr {
ast_id,
attr_args: Arc::new((tt::Subtree::empty(), Default::default())),
attr_args: None,
invoc_attr_index: attr.id,
},
attr.path().clone(),
@ -706,7 +706,7 @@ impl<'a> AssocItemCollector<'a> {
}
AssocItem::MacroCall(call) => {
let file_id = self.expander.current_file_id();
let MacroCall { ast_id, expand_to, ref path } = item_tree[call];
let MacroCall { ast_id, expand_to, call_site, ref path } = item_tree[call];
let module = self.expander.module.local_id;
let resolver = |path| {
@ -725,6 +725,7 @@ impl<'a> AssocItemCollector<'a> {
match macro_call_as_call_id(
self.db.upcast(),
&AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
call_site,
expand_to,
self.expander.module.krate(),
resolver,
@ -793,7 +794,7 @@ impl<'a> AssocItemCollector<'a> {
self.collect(&item_tree, tree_id, &iter);
self.expander.exit(self.db, mark);
self.expander.exit(mark);
}
}

View File

@ -4,21 +4,21 @@ use base_db::CrateId;
use cfg::CfgOptions;
use drop_bomb::DropBomb;
use hir_expand::{
attrs::RawAttrs, hygiene::Hygiene, mod_path::ModPath, ExpandError, ExpandResult, HirFileId,
InFile, MacroCallId, UnresolvedMacro,
attrs::RawAttrs, mod_path::ModPath, span::SpanMap, ExpandError, ExpandResult, HirFileId,
InFile, MacroCallId,
};
use limit::Limit;
use syntax::{ast, Parse, SyntaxNode};
use crate::{
attr::Attrs, db::DefDatabase, lower::LowerCtx, macro_id_to_def_id, path::Path, AsMacroCall,
MacroId, ModuleId,
MacroId, ModuleId, UnresolvedMacro,
};
#[derive(Debug)]
pub struct Expander {
cfg_options: CfgOptions,
hygiene: Hygiene,
span_map: SpanMap,
krate: CrateId,
pub(crate) current_file_id: HirFileId,
pub(crate) module: ModuleId,
@ -41,7 +41,7 @@ impl Expander {
recursion_depth: 0,
recursion_limit,
cfg_options: db.crate_graph()[module.krate].cfg_options.clone(),
hygiene: Hygiene::new(db.upcast(), current_file_id),
span_map: db.span_map(current_file_id),
krate: module.krate,
}
}
@ -94,8 +94,8 @@ impl Expander {
ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) }
}
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
self.hygiene = Hygiene::new(db.upcast(), mark.file_id);
pub fn exit(&mut self, mut mark: Mark) {
self.span_map = mark.span_map;
self.current_file_id = mark.file_id;
if self.recursion_depth == u32::MAX {
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
@ -110,7 +110,7 @@ impl Expander {
}
pub fn ctx<'a>(&self, db: &'a dyn DefDatabase) -> LowerCtx<'a> {
LowerCtx::new(db, &self.hygiene, self.current_file_id)
LowerCtx::new(db, self.span_map.clone(), self.current_file_id)
}
pub(crate) fn to_source<T>(&self, value: T) -> InFile<T> {
@ -118,7 +118,7 @@ impl Expander {
}
pub(crate) fn parse_attrs(&self, db: &dyn DefDatabase, owner: &dyn ast::HasAttrs) -> Attrs {
Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, &self.hygiene))
Attrs::filter(db, self.krate, RawAttrs::new(db.upcast(), owner, self.span_map.as_ref()))
}
pub(crate) fn cfg_options(&self) -> &CfgOptions {
@ -130,8 +130,8 @@ impl Expander {
}
pub(crate) fn parse_path(&mut self, db: &dyn DefDatabase, path: ast::Path) -> Option<Path> {
let ctx = LowerCtx::new(db, &self.hygiene, self.current_file_id);
Path::from_src(path, &ctx)
let ctx = LowerCtx::new(db, self.span_map.clone(), self.current_file_id);
Path::from_src(&ctx, path)
}
fn within_limit<F, T: ast::AstNode>(
@ -174,10 +174,11 @@ impl Expander {
let parse = value.cast::<T>()?;
self.recursion_depth += 1;
self.hygiene = Hygiene::new(db.upcast(), file_id);
let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id));
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
let mark = Mark {
file_id: old_file_id,
span_map: old_span_map,
bomb: DropBomb::new("expansion mark dropped"),
};
Some((mark, parse))
@ -190,5 +191,6 @@ impl Expander {
#[derive(Debug)]
pub struct Mark {
file_id: HirFileId,
span_map: SpanMap,
bomb: DropBomb,
}

View File

@ -586,7 +586,7 @@ fn find_local_import_locations(
#[cfg(test)]
mod tests {
use base_db::fixture::WithFixture;
use hir_expand::hygiene::Hygiene;
use hir_expand::db::ExpandDatabase;
use syntax::ast::AstNode;
use crate::test_db::TestDB;
@ -608,7 +608,8 @@ mod tests {
let parsed_path_file = syntax::SourceFile::parse(&format!("use {path};"));
let ast_path =
parsed_path_file.syntax_node().descendants().find_map(syntax::ast::Path::cast).unwrap();
let mod_path = ModPath::from_src(&db, ast_path, &Hygiene::new_unhygienic()).unwrap();
let mod_path =
ModPath::from_src(&db, ast_path, db.span_map(pos.file_id.into()).as_ref()).unwrap();
let def_map = module.def_map(&db);
let resolved = def_map

View File

@ -21,7 +21,7 @@ use crate::{
db::DefDatabase,
dyn_map::{keys, DynMap},
expander::Expander,
item_tree::{AttrOwner, ItemTree},
item_tree::ItemTree,
lower::LowerCtx,
nameres::{DefMap, MacroSubNs},
src::{HasChildSource, HasSource},
@ -250,7 +250,10 @@ impl GenericParams {
&mut self,
lower_ctx: &LowerCtx<'_>,
node: &dyn HasGenericParams,
add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
add_param_attrs: impl FnMut(
Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
ast::GenericParam,
),
) {
if let Some(params) = node.generic_param_list() {
self.fill_params(lower_ctx, params, add_param_attrs)
@ -275,7 +278,10 @@ impl GenericParams {
&mut self,
lower_ctx: &LowerCtx<'_>,
params: ast::GenericParamList,
mut add_param_attrs: impl FnMut(AttrOwner, ast::GenericParam),
mut add_param_attrs: impl FnMut(
Either<Idx<TypeOrConstParamData>, Idx<LifetimeParamData>>,
ast::GenericParam,
),
) {
for type_or_const_param in params.type_or_const_params() {
match type_or_const_param {
@ -297,7 +303,7 @@ impl GenericParams {
type_param.type_bound_list(),
Either::Left(type_ref),
);
add_param_attrs(idx.into(), ast::GenericParam::TypeParam(type_param));
add_param_attrs(Either::Left(idx), ast::GenericParam::TypeParam(type_param));
}
ast::TypeOrConstParam::Const(const_param) => {
let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
@ -310,7 +316,7 @@ impl GenericParams {
default: ConstRef::from_const_param(lower_ctx, &const_param),
};
let idx = self.type_or_consts.alloc(param.into());
add_param_attrs(idx.into(), ast::GenericParam::ConstParam(const_param));
add_param_attrs(Either::Left(idx), ast::GenericParam::ConstParam(const_param));
}
}
}
@ -325,7 +331,7 @@ impl GenericParams {
lifetime_param.type_bound_list(),
Either::Right(lifetime_ref),
);
add_param_attrs(idx.into(), ast::GenericParam::LifetimeParam(lifetime_param));
add_param_attrs(Either::Right(idx), ast::GenericParam::LifetimeParam(lifetime_param));
}
}
@ -433,7 +439,7 @@ impl GenericParams {
let ctx = expander.ctx(db);
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref);
exp.1.exit(db, mark);
exp.1.exit(mark);
}
}
});

View File

@ -112,6 +112,7 @@ pub struct ItemScope {
#[derive(Debug, PartialEq, Eq)]
struct DeriveMacroInvocation {
attr_id: AttrId,
/// The `#[derive]` call
attr_call_id: MacroCallId,
derive_call_ids: SmallVec<[Option<MacroCallId>; 1]>,
}
@ -401,6 +402,14 @@ impl ItemScope {
})
}
pub fn derive_macro_invoc(
&self,
ast_id: AstId<ast::Adt>,
attr_id: AttrId,
) -> Option<MacroCallId> {
Some(self.derive_macros.get(&ast_id)?.iter().find(|it| it.attr_id == attr_id)?.attr_call_id)
}
// FIXME: This is only used in collection, we should move the relevant parts of it out of ItemScope
pub(crate) fn unnamed_trait_vis(&self, tr: TraitId) -> Option<Visibility> {
self.unnamed_trait_imports.get(&tr).copied().map(|(a, _)| a)

View File

@ -42,12 +42,11 @@ use std::{
};
use ast::{AstNode, HasName, StructKind};
use base_db::CrateId;
use base_db::{span::SyntaxContextId, CrateId};
use either::Either;
use hir_expand::{
ast_id_map::{AstIdNode, FileAstId},
attrs::RawAttrs,
hygiene::Hygiene,
name::{name, AsName, Name},
ExpandTo, HirFileId, InFile,
};
@ -118,7 +117,7 @@ impl ItemTree {
let mut item_tree = match_ast! {
match syntax {
ast::SourceFile(file) => {
top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.hygiene()));
top_attrs = Some(RawAttrs::new(db.upcast(), &file, ctx.span_map()));
ctx.lower_module_items(&file)
},
ast::MacroItems(items) => {
@ -749,6 +748,7 @@ pub struct MacroCall {
pub path: Interned<ModPath>,
pub ast_id: FileAstId<ast::MacroCall>,
pub expand_to: ExpandTo,
pub call_site: SyntaxContextId,
}
#[derive(Debug, Clone, Eq, PartialEq)]
@ -778,9 +778,9 @@ impl Use {
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
let hygiene = Hygiene::new(db.upcast(), file_id);
let (_, source_map) =
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree");
let span_map = db.span_map(file_id);
let (_, source_map) = lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
.expect("failed to lower use tree");
source_map[index].clone()
}
/// Maps a `UseTree` contained in this import back to its AST node.
@ -793,8 +793,10 @@ impl Use {
// Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
let hygiene = Hygiene::new(db.upcast(), file_id);
lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree").1
let span_map = db.span_map(file_id);
lower::lower_use_tree(db, span_map.as_ref(), ast_use_tree)
.expect("failed to lower use tree")
.1
}
}

View File

@ -2,12 +2,13 @@
use std::collections::hash_map::Entry;
use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
use hir_expand::{ast_id_map::AstIdMap, span::SpanMapRef, HirFileId};
use syntax::ast::{self, HasModuleItem, HasTypeBounds};
use crate::{
generics::{GenericParams, TypeParamData, TypeParamProvenance},
type_ref::{LifetimeRef, TraitBoundModifier, TraitRef},
LocalLifetimeParamId, LocalTypeOrConstParamId,
};
use super::*;
@ -33,8 +34,8 @@ impl<'a> Ctx<'a> {
}
}
pub(super) fn hygiene(&self) -> &Hygiene {
self.body_ctx.hygiene()
pub(super) fn span_map(&self) -> SpanMapRef<'_> {
self.body_ctx.span_map()
}
pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
@ -79,7 +80,7 @@ impl<'a> Ctx<'a> {
pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
self.tree
.attrs
.insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.hygiene()));
.insert(AttrOwner::TopLevel, RawAttrs::new(self.db.upcast(), block, self.span_map()));
self.tree.top_level = block
.statements()
.filter_map(|stmt| match stmt {
@ -109,8 +110,7 @@ impl<'a> Ctx<'a> {
}
fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
let attrs = RawAttrs::new(self.db.upcast(), item, self.hygiene());
let item: ModItem = match item {
let mod_item: ModItem = match item {
ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
ast::Item::Union(ast) => self.lower_union(ast)?.into(),
ast::Item::Enum(ast) => self.lower_enum(ast)?.into(),
@ -129,10 +129,10 @@ impl<'a> Ctx<'a> {
ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
};
let attrs = RawAttrs::new(self.db.upcast(), item, self.span_map());
self.add_attrs(mod_item.into(), attrs);
self.add_attrs(item.into(), attrs);
Some(item)
Some(mod_item)
}
fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) {
@ -146,21 +146,32 @@ impl<'a> Ctx<'a> {
}
}
fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option<AssocItem> {
match item {
fn lower_assoc_item(&mut self, item_node: &ast::AssocItem) -> Option<AssocItem> {
let item: AssocItem = match item_node {
ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
}
}?;
let attrs = RawAttrs::new(self.db.upcast(), item_node, self.span_map());
self.add_attrs(
match item {
AssocItem::Function(it) => AttrOwner::ModItem(ModItem::Function(it)),
AssocItem::TypeAlias(it) => AttrOwner::ModItem(ModItem::TypeAlias(it)),
AssocItem::Const(it) => AttrOwner::ModItem(ModItem::Const(it)),
AssocItem::MacroCall(it) => AttrOwner::ModItem(ModItem::MacroCall(it)),
},
attrs,
);
Some(item)
}
fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
let visibility = self.lower_visibility(strukt);
let name = strukt.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(strukt);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, strukt);
let fields = self.lower_fields(&strukt.kind());
let ast_id = self.source_ast_id_map.ast_id(strukt);
let res = Struct { name, visibility, generic_params, fields, ast_id };
Some(id(self.data().structs.alloc(res)))
}
@ -184,7 +195,10 @@ impl<'a> Ctx<'a> {
for field in fields.fields() {
if let Some(data) = self.lower_record_field(&field) {
let idx = self.data().fields.alloc(data);
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &field, self.span_map()),
);
}
}
let end = self.next_field_idx();
@ -205,7 +219,7 @@ impl<'a> Ctx<'a> {
for (i, field) in fields.fields().enumerate() {
let data = self.lower_tuple_field(i, &field);
let idx = self.data().fields.alloc(data);
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.hygiene()));
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &field, self.span_map()));
}
let end = self.next_field_idx();
IdxRange::new(start..end)
@ -222,12 +236,12 @@ impl<'a> Ctx<'a> {
fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
let visibility = self.lower_visibility(union);
let name = union.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(union);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, union);
let fields = match union.record_field_list() {
Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
};
let ast_id = self.source_ast_id_map.ast_id(union);
let res = Union { name, visibility, generic_params, fields, ast_id };
Some(id(self.data().unions.alloc(res)))
}
@ -235,12 +249,12 @@ impl<'a> Ctx<'a> {
fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
let visibility = self.lower_visibility(enum_);
let name = enum_.name()?.as_name();
let ast_id = self.source_ast_id_map.ast_id(enum_);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, enum_);
let variants = match &enum_.variant_list() {
Some(variant_list) => self.lower_variants(variant_list),
None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
};
let ast_id = self.source_ast_id_map.ast_id(enum_);
let res = Enum { name, visibility, generic_params, variants, ast_id };
Some(id(self.data().enums.alloc(res)))
}
@ -252,7 +266,7 @@ impl<'a> Ctx<'a> {
let idx = self.data().variants.alloc(data);
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &variant, self.hygiene()),
RawAttrs::new(self.db.upcast(), &variant, self.span_map()),
);
}
}
@ -303,28 +317,29 @@ impl<'a> Ctx<'a> {
});
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &self_param, self.hygiene()),
RawAttrs::new(self.db.upcast(), &self_param, self.span_map()),
);
has_self_param = true;
}
for param in param_list.params() {
let ast_id = self.source_ast_id_map.ast_id(&param);
let idx = match param.dotdotdot_token() {
Some(_) => {
let ast_id = self.source_ast_id_map.ast_id(&param);
self.data()
.params
.alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) })
}
Some(_) => self
.data()
.params
.alloc(Param { type_ref: None, ast_id: ParamAstId::Param(ast_id) }),
None => {
let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
let ty = Interned::new(type_ref);
let ast_id = self.source_ast_id_map.ast_id(&param);
self.data()
.params
.alloc(Param { type_ref: Some(ty), ast_id: ParamAstId::Param(ast_id) })
}
};
self.add_attrs(idx.into(), RawAttrs::new(self.db.upcast(), &param, self.hygiene()));
self.add_attrs(
idx.into(),
RawAttrs::new(self.db.upcast(), &param, self.span_map()),
);
}
}
let end_param = self.next_param_idx();
@ -394,8 +409,8 @@ impl<'a> Ctx<'a> {
let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
let visibility = self.lower_visibility(type_alias);
let bounds = self.lower_type_bounds(type_alias);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
let ast_id = self.source_ast_id_map.ast_id(type_alias);
let generic_params = self.lower_generic_params(HasImplicitSelf::No, type_alias);
let res = TypeAlias { name, visibility, bounds, generic_params, type_ref, ast_id };
Some(id(self.data().type_aliases.alloc(res)))
}
@ -443,23 +458,17 @@ impl<'a> Ctx<'a> {
fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
let name = trait_def.name()?.as_name();
let visibility = self.lower_visibility(trait_def);
let ast_id = self.source_ast_id_map.ast_id(trait_def);
let generic_params =
self.lower_generic_params(HasImplicitSelf::Yes(trait_def.type_bound_list()), trait_def);
let is_auto = trait_def.auto_token().is_some();
let is_unsafe = trait_def.unsafe_token().is_some();
let ast_id = self.source_ast_id_map.ast_id(trait_def);
let items = trait_def
.assoc_item_list()
.into_iter()
.flat_map(|list| list.assoc_items())
.filter_map(|item| {
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
self.lower_assoc_item(&item).map(|item| {
self.add_attrs(ModItem::from(item).into(), attrs);
item
})
})
.filter_map(|item_node| self.lower_assoc_item(&item_node))
.collect();
let def = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
@ -472,17 +481,18 @@ impl<'a> Ctx<'a> {
) -> Option<FileItemTreeId<TraitAlias>> {
let name = trait_alias_def.name()?.as_name();
let visibility = self.lower_visibility(trait_alias_def);
let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
let generic_params = self.lower_generic_params(
HasImplicitSelf::Yes(trait_alias_def.type_bound_list()),
trait_alias_def,
);
let ast_id = self.source_ast_id_map.ast_id(trait_alias_def);
let alias = TraitAlias { name, visibility, generic_params, ast_id };
Some(id(self.data().trait_aliases.alloc(alias)))
}
fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
let ast_id = self.source_ast_id_map.ast_id(impl_def);
// Note that trait impls don't get implicit `Self` unlike traits, because here they are a
// type alias rather than a type parameter, so this is handled by the resolver.
let generic_params = self.lower_generic_params(HasImplicitSelf::No, impl_def);
@ -499,14 +509,8 @@ impl<'a> Ctx<'a> {
.assoc_item_list()
.into_iter()
.flat_map(|it| it.assoc_items())
.filter_map(|item| {
let assoc = self.lower_assoc_item(&item)?;
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
self.add_attrs(ModItem::from(assoc).into(), attrs);
Some(assoc)
})
.filter_map(|item| self.lower_assoc_item(&item))
.collect();
let ast_id = self.source_ast_id_map.ast_id(impl_def);
let res =
Impl { generic_params, target_trait, self_ty, is_negative, is_unsafe, items, ast_id };
Some(id(self.data().impls.alloc(res)))
@ -515,7 +519,7 @@ impl<'a> Ctx<'a> {
fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Use>> {
let visibility = self.lower_visibility(use_item);
let ast_id = self.source_ast_id_map.ast_id(use_item);
let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?;
let (use_tree, _) = lower_use_tree(self.db, self.span_map(), use_item.use_tree()?)?;
let res = Use { visibility, ast_id, use_tree };
Some(id(self.data().uses.alloc(res)))
@ -537,10 +541,16 @@ impl<'a> Ctx<'a> {
}
fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, self.hygiene())?);
let span_map = self.span_map();
let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, span_map)?);
let ast_id = self.source_ast_id_map.ast_id(m);
let expand_to = hir_expand::ExpandTo::from_call_site(m);
let res = MacroCall { path, ast_id, expand_to };
let res = MacroCall {
path,
ast_id,
expand_to,
call_site: span_map.span_for_range(m.syntax().text_range()).ctx,
};
Some(id(self.data().macro_calls.alloc(res)))
}
@ -572,15 +582,15 @@ impl<'a> Ctx<'a> {
// (in other words, the knowledge that they're in an extern block must not be used).
// This is because an extern block can contain macros whose ItemTree's top-level items
// should be considered to be in an extern block too.
let attrs = RawAttrs::new(self.db.upcast(), &item, self.hygiene());
let id: ModItem = match item {
ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(),
ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(),
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(),
ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(),
let mod_item: ModItem = match &item {
ast::ExternItem::Fn(ast) => self.lower_function(ast)?.into(),
ast::ExternItem::Static(ast) => self.lower_static(ast)?.into(),
ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(ty)?.into(),
ast::ExternItem::MacroCall(call) => self.lower_macro_call(call)?.into(),
};
self.add_attrs(id.into(), attrs);
Some(id)
let attrs = RawAttrs::new(self.db.upcast(), &item, self.span_map());
self.add_attrs(mod_item.into(), attrs);
Some(mod_item)
})
.collect()
});
@ -612,12 +622,16 @@ impl<'a> Ctx<'a> {
generics.fill_bounds(&self.body_ctx, bounds, Either::Left(self_param));
}
let add_param_attrs = |item, param| {
let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.hygiene());
let add_param_attrs = |item: Either<LocalTypeOrConstParamId, LocalLifetimeParamId>,
param| {
let attrs = RawAttrs::new(self.db.upcast(), &param, self.body_ctx.span_map());
// This is identical to the body of `Ctx::add_attrs()` but we can't call that here
// because it requires `&mut self` and the call to `generics.fill()` below also
// references `self`.
match self.tree.attrs.entry(item) {
match self.tree.attrs.entry(match item {
Either::Right(id) => id.into(),
Either::Left(id) => id.into(),
}) {
Entry::Occupied(mut entry) => {
*entry.get_mut() = entry.get().merge(attrs);
}
@ -643,7 +657,8 @@ impl<'a> Ctx<'a> {
}
fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.hygiene());
let vis =
RawVisibility::from_ast_with_span_map(self.db, item.visibility(), self.span_map());
self.data().vis.alloc(vis)
}
@ -721,7 +736,7 @@ fn lower_abi(abi: ast::Abi) -> Interned<str> {
struct UseTreeLowering<'a> {
db: &'a dyn DefDatabase,
hygiene: &'a Hygiene,
span_map: SpanMapRef<'a>,
mapping: Arena<ast::UseTree>,
}
@ -734,7 +749,7 @@ impl UseTreeLowering<'_> {
// E.g. `use something::{inner}` (prefix is `None`, path is `something`)
// or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
Some(path) => {
match ModPath::from_src(self.db.upcast(), path, self.hygiene) {
match ModPath::from_src(self.db.upcast(), path, self.span_map) {
Some(it) => Some(it),
None => return None, // FIXME: report errors somewhere
}
@ -753,7 +768,7 @@ impl UseTreeLowering<'_> {
} else {
let is_glob = tree.star_token().is_some();
let path = match tree.path() {
Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.hygiene)?),
Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.span_map)?),
None => None,
};
let alias = tree.rename().map(|a| {
@ -789,10 +804,10 @@ impl UseTreeLowering<'_> {
pub(crate) fn lower_use_tree(
db: &dyn DefDatabase,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
tree: ast::UseTree,
) -> Option<(UseTree, Arena<ast::UseTree>)> {
let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() };
let mut lowering = UseTreeLowering { db, span_map, mapping: Arena::new() };
let tree = lowering.lower_use_tree(tree)?;
Some((tree, lowering.mapping))
}

View File

@ -457,7 +457,7 @@ impl Printer<'_> {
}
}
ModItem::MacroCall(it) => {
let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it];
let MacroCall { path, ast_id: _, expand_to: _, call_site: _ } = &self.tree[it];
wln!(self, "{}!(...);", path.display(self.db.upcast()));
}
ModItem::MacroRules(it) => {

View File

@ -63,7 +63,7 @@ use std::{
panic::{RefUnwindSafe, UnwindSafe},
};
use base_db::{impl_intern_key, salsa, CrateId, ProcMacroKind};
use base_db::{impl_intern_key, salsa, span::SyntaxContextId, CrateId, ProcMacroKind};
use hir_expand::{
ast_id_map::{AstIdNode, FileAstId},
attrs::{Attr, AttrId, AttrInput},
@ -72,19 +72,18 @@ use hir_expand::{
builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
db::ExpandDatabase,
eager::expand_eager_macro_input,
hygiene::Hygiene,
name::Name,
proc_macro::ProcMacroExpander,
AstId, ExpandError, ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind,
MacroDefId, MacroDefKind, UnresolvedMacro,
MacroDefId, MacroDefKind,
};
use item_tree::ExternBlock;
use la_arena::Idx;
use nameres::DefMap;
use stdx::impl_from;
use syntax::ast;
use syntax::{ast, AstNode};
use ::tt::token_id as tt;
pub use hir_expand::tt;
use crate::{
builtin_type::BuiltinType,
@ -1166,16 +1165,20 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
) -> Result<ExpandResult<Option<MacroCallId>>, UnresolvedMacro> {
let expands_to = hir_expand::ExpandTo::from_call_site(self.value);
let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
let h = Hygiene::new(db, self.file_id);
let path = self.value.path().and_then(|path| path::ModPath::from_src(db, path, &h));
let span_map = db.span_map(self.file_id);
let path =
self.value.path().and_then(|path| path::ModPath::from_src(db, path, span_map.as_ref()));
let Some(path) = path else {
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
};
let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx;
macro_call_as_call_id_with_eager(
db,
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
call_site,
expands_to,
krate,
resolver,
@ -1200,17 +1203,19 @@ impl<T: AstIdNode> AstIdWithPath<T> {
fn macro_call_as_call_id(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
) -> Result<Option<MacroCallId>, UnresolvedMacro> {
macro_call_as_call_id_with_eager(db, call, expand_to, krate, resolver, resolver)
macro_call_as_call_id_with_eager(db, call, call_site, expand_to, krate, resolver, resolver)
.map(|res| res.value)
}
fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
@ -1222,7 +1227,7 @@ fn macro_call_as_call_id_with_eager(
let res = match def.kind {
MacroDefKind::BuiltInEager(..) => {
let macro_call = InFile::new(call.ast_id.file_id, call.ast_id.to_node(db));
expand_eager_macro_input(db, krate, macro_call, def, &|path| {
expand_eager_macro_input(db, krate, macro_call, def, call_site, &|path| {
eager_resolver(path).filter(MacroDefId::is_fn_like)
})
}
@ -1231,6 +1236,7 @@ fn macro_call_as_call_id_with_eager(
db,
krate,
MacroCallKind::FnLike { ast_id: call.ast_id, expand_to },
call_site,
)),
err: None,
},
@ -1315,6 +1321,7 @@ fn derive_macro_as_call_id(
item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId,
derive_pos: u32,
call_site: SyntaxContextId,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
@ -1329,6 +1336,7 @@ fn derive_macro_as_call_id(
derive_index: derive_pos,
derive_attr_index,
},
call_site,
);
Ok((macro_id, def_id, call_id))
}
@ -1341,15 +1349,13 @@ fn attr_macro_as_call_id(
def: MacroDefId,
) -> MacroCallId {
let arg = match macro_attr.input.as_deref() {
Some(AttrInput::TokenTree(tt)) => (
{
let mut tt = tt.0.clone();
tt.delimiter = tt::Delimiter::UNSPECIFIED;
tt
},
tt.1.clone(),
),
_ => (tt::Subtree::empty(), Default::default()),
Some(AttrInput::TokenTree(tt)) => {
let mut tt = tt.as_ref().clone();
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
Some(tt)
}
_ => None,
};
def.as_lazy_macro(
@ -1357,11 +1363,18 @@ fn attr_macro_as_call_id(
krate,
MacroCallKind::Attr {
ast_id: item_attr.ast_id,
attr_args: Arc::new(arg),
attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id,
},
macro_attr.ctxt,
)
}
#[derive(Debug)]
pub struct UnresolvedMacro {
pub path: hir_expand::mod_path::ModPath,
}
intern::impl_internable!(
crate::type_ref::TypeRef,
crate::type_ref::TraitRef,

View File

@ -3,7 +3,7 @@ use std::cell::OnceCell;
use hir_expand::{
ast_id_map::{AstIdMap, AstIdNode},
hygiene::Hygiene,
span::{SpanMap, SpanMapRef},
AstId, HirFileId, InFile,
};
use syntax::ast;
@ -13,33 +13,34 @@ use crate::{db::DefDatabase, path::Path};
pub struct LowerCtx<'a> {
pub db: &'a dyn DefDatabase,
hygiene: Hygiene,
span_map: SpanMap,
// FIXME: This optimization is probably pointless, ast id map should pretty much always exist anyways.
ast_id_map: Option<(HirFileId, OnceCell<Arc<AstIdMap>>)>,
}
impl<'a> LowerCtx<'a> {
pub fn new(db: &'a dyn DefDatabase, hygiene: &Hygiene, file_id: HirFileId) -> Self {
LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: Some((file_id, OnceCell::new())) }
pub fn new(db: &'a dyn DefDatabase, span_map: SpanMap, file_id: HirFileId) -> Self {
LowerCtx { db, span_map, ast_id_map: Some((file_id, OnceCell::new())) }
}
pub fn with_file_id(db: &'a dyn DefDatabase, file_id: HirFileId) -> Self {
LowerCtx {
db,
hygiene: Hygiene::new(db.upcast(), file_id),
span_map: db.span_map(file_id),
ast_id_map: Some((file_id, OnceCell::new())),
}
}
pub fn with_hygiene(db: &'a dyn DefDatabase, hygiene: &Hygiene) -> Self {
LowerCtx { db, hygiene: hygiene.clone(), ast_id_map: None }
pub fn with_span_map(db: &'a dyn DefDatabase, span_map: SpanMap) -> Self {
LowerCtx { db, span_map, ast_id_map: None }
}
pub(crate) fn hygiene(&self) -> &Hygiene {
&self.hygiene
pub(crate) fn span_map(&self) -> SpanMapRef<'_> {
self.span_map.as_ref()
}
pub(crate) fn lower_path(&self, ast: ast::Path) -> Option<Path> {
Path::from_src(ast, self)
Path::from_src(self, ast)
}
pub(crate) fn ast_id<N: AstIdNode>(&self, item: &N) -> Option<AstId<N>> {

View File

@ -15,7 +15,6 @@ use crate::macro_expansion_tests::check;
fn token_mapping_smoke_test() {
check(
r#"
// +tokenids
macro_rules! f {
( struct $ident:ident ) => {
struct $ident {
@ -24,26 +23,22 @@ macro_rules! f {
};
}
// +tokenids
// +spans+syntaxctxt
f!(struct MyTraitMap2);
"#,
expect![[r##"
// call ids will be shifted by Shift(30)
// +tokenids
macro_rules! f {#0
(#1 struct#2 $#3ident#4:#5ident#6 )#1 =#7>#8 {#9
struct#10 $#11ident#12 {#13
map#14:#15 :#16:#17std#18:#19:#20collections#21:#22:#23HashSet#24<#25(#26)#26>#27,#28
}#13
}#9;#29
}#0
expect![[r#"
macro_rules! f {
( struct $ident:ident ) => {
struct $ident {
map: ::std::collections::HashSet<()>,
}
};
}
// // +tokenids
// f!(struct#1 MyTraitMap2#2);
struct#10 MyTraitMap2#32 {#13
map#14:#15 ::std#18::collections#21::HashSet#24<#25(#26)#26>#27,#28
}#13
"##]],
struct#FileId(0):1@58..64\2# MyTraitMap2#FileId(0):2@31..42\0# {#FileId(0):1@72..73\2#
map#FileId(0):1@86..89\2#:#FileId(0):1@89..90\2# #FileId(0):1@89..90\2#::#FileId(0):1@91..92\2#std#FileId(0):1@93..96\2#::#FileId(0):1@96..97\2#collections#FileId(0):1@98..109\2#::#FileId(0):1@109..110\2#HashSet#FileId(0):1@111..118\2#<#FileId(0):1@118..119\2#(#FileId(0):1@119..120\2#)#FileId(0):1@120..121\2#>#FileId(0):1@121..122\2#,#FileId(0):1@122..123\2#
}#FileId(0):1@132..133\2#
"#]],
);
}
@ -53,49 +48,42 @@ fn token_mapping_floats() {
// (and related issues)
check(
r#"
// +tokenids
// +spans+syntaxctxt
macro_rules! f {
($($tt:tt)*) => {
$($tt)*
};
}
// +tokenids
// +spans+syntaxctxt
f! {
fn main() {
1;
1.0;
((1,),).0.0;
let x = 1;
}
}
"#,
expect![[r##"
// call ids will be shifted by Shift(18)
// +tokenids
macro_rules! f {#0
(#1$#2(#3$#4tt#5:#6tt#7)#3*#8)#1 =#9>#10 {#11
$#12(#13$#14tt#15)#13*#16
}#11;#17
}#0
expect![[r#"
// +spans+syntaxctxt
macro_rules! f {
($($tt:tt)*) => {
$($tt)*
};
}
// // +tokenids
// f! {
// fn#1 main#2() {
// 1#5;#6
// 1.0#7;#8
// let#9 x#10 =#11 1#12;#13
// }
// }
fn#19 main#20(#21)#21 {#22
1#23;#24
1.0#25;#26
let#27 x#28 =#29 1#30;#31
}#22
fn#FileId(0):2@30..32\0# main#FileId(0):2@33..37\0#(#FileId(0):2@37..38\0#)#FileId(0):2@38..39\0# {#FileId(0):2@40..41\0#
1#FileId(0):2@50..51\0#;#FileId(0):2@51..52\0#
1.0#FileId(0):2@61..64\0#;#FileId(0):2@64..65\0#
(#FileId(0):2@74..75\0#(#FileId(0):2@75..76\0#1#FileId(0):2@76..77\0#,#FileId(0):2@77..78\0# )#FileId(0):2@78..79\0#,#FileId(0):2@79..80\0# )#FileId(0):2@80..81\0#.#FileId(0):2@81..82\0#0#FileId(0):2@82..85\0#.#FileId(0):2@82..85\0#0#FileId(0):2@82..85\0#;#FileId(0):2@85..86\0#
let#FileId(0):2@95..98\0# x#FileId(0):2@99..100\0# =#FileId(0):2@101..102\0# 1#FileId(0):2@103..104\0#;#FileId(0):2@104..105\0#
}#FileId(0):2@110..111\0#
"##]],
"#]],
);
}
@ -105,59 +93,115 @@ fn eager_expands_with_unresolved_within() {
r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
format_args!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
concat!("hello", identity!("world"), unresolved!(), identity!("!"));
}
"#,
expect![[r##"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
builtin #format_args ("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
/* error: unresolved macro unresolved */"helloworld!";
}
"##]],
);
}
#[test]
fn token_mapping_eager() {
fn concat_spans() {
check(
r#"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
macro_rules! concat {}
macro_rules! identity {
($expr:expr) => { $expr };
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
format_args/*+tokenids*/!("{} {} {}", format_args!("{}", 0), foo, identity!(10), "bar")
#[rustc_builtin_macro]
#[macro_export]
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
concat/*+spans+syntaxctxt*/!("hello", concat!("w", identity!("o")), identity!("rld"), unresolved!(), identity!("!"));
}
}
"#,
expect![[r##"
#[rustc_builtin_macro]
#[macro_export]
macro_rules! format_args {}
macro_rules! concat {}
macro_rules! identity {
($expr:expr) => { $expr };
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
// format_args/*+tokenids*/!("{} {} {}"#1,#2 format_args#3!#4("{}"#6,#7 0#8),#9 foo#10,#11 identity#12!#13(10#15),#16 "bar"#17)
builtin#4294967295 ##4294967295format_args#4294967295 (#0"{} {} {}"#1,#2 format_args#3!#4(#5"{}"#6,#7 0#8)#5,#9 foo#10,#11 identity#12!#13(#1410#15)#14,#16 "bar"#17)#0
#[rustc_builtin_macro]
#[macro_export]
macro_rules! concat {}
macro_rules! identity {
($tt:tt) => {
$tt
}
}
fn main(foo: ()) {
/* error: unresolved macro unresolved */"helloworld!"#FileId(0):3@207..323\6#;
}
}
"##]],
);
}
#[test]
fn token_mapping_across_files() {
check(
r#"
//- /lib.rs
#[macro_use]
mod foo;
mk_struct/*+spans+syntaxctxt*/!(Foo with u32);
//- /foo.rs
macro_rules! mk_struct {
($foo:ident with $ty:ty) => { struct $foo($ty); }
}
"#,
expect![[r#"
#[macro_use]
mod foo;
struct#FileId(1):1@59..65\2# Foo#FileId(0):2@32..35\0#(#FileId(1):1@70..71\2#u32#FileId(0):2@41..44\0#)#FileId(1):1@74..75\2#;#FileId(1):1@75..76\2#
"#]],
);
}
#[test]
fn float_field_access_macro_input() {
check(

View File

@ -16,21 +16,16 @@ mod proc_macros;
use std::{iter, ops::Range, sync};
use ::mbe::TokenMap;
use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase};
use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase};
use expect_test::Expect;
use hir_expand::{
db::{DeclarativeMacroExpander, ExpandDatabase},
AstId, InFile, MacroFile,
};
use hir_expand::{db::ExpandDatabase, span::SpanMapRef, HirFileIdExt, InFile, MacroFileId};
use stdx::format_to;
use syntax::{
ast::{self, edit::IndentLevel},
AstNode, SyntaxElement,
SyntaxKind::{self, COMMENT, EOF, IDENT, LIFETIME_IDENT},
SyntaxNode, TextRange, T,
AstNode,
SyntaxKind::{COMMENT, EOF, IDENT, LIFETIME_IDENT},
SyntaxNode, T,
};
use tt::token_id::{Subtree, TokenId};
use crate::{
db::DefDatabase,
@ -39,6 +34,7 @@ use crate::{
resolver::HasResolver,
src::HasSource,
test_db::TestDB,
tt::Subtree,
AdtId, AsMacroCall, Lookup, ModuleDefId,
};
@ -88,43 +84,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
let mut text_edits = Vec::new();
let mut expansions = Vec::new();
for macro_ in source_file.syntax().descendants().filter_map(ast::Macro::cast) {
let mut show_token_ids = false;
for comment in macro_.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
show_token_ids |= comment.to_string().contains("+tokenids");
}
if !show_token_ids {
continue;
}
let call_offset = macro_.syntax().text_range().start().into();
let file_ast_id = db.ast_id_map(source.file_id).ast_id(&macro_);
let ast_id = AstId::new(source.file_id, file_ast_id.upcast());
let DeclarativeMacroExpander { mac, def_site_token_map } =
&*db.decl_macro_expander(krate, ast_id);
assert_eq!(mac.err(), None);
let tt = match &macro_ {
ast::Macro::MacroRules(mac) => mac.token_tree().unwrap(),
ast::Macro::MacroDef(_) => unimplemented!(""),
};
let tt_start = tt.syntax().text_range().start();
tt.syntax().descendants_with_tokens().filter_map(SyntaxElement::into_token).for_each(
|token| {
let range = token.text_range().checked_sub(tt_start).unwrap();
if let Some(id) = def_site_token_map.token_by_range(range) {
let offset = (range.end() + tt_start).into();
text_edits.push((offset..offset, format!("#{}", id.0)));
}
},
);
text_edits.push((
call_offset..call_offset,
format!("// call ids will be shifted by {:?}\n", mac.shift()),
));
}
for macro_call in source_file.syntax().descendants().filter_map(ast::MacroCall::cast) {
let macro_call = InFile::new(source.file_id, &macro_call);
let res = macro_call
@ -135,20 +94,22 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
})
.unwrap();
let macro_call_id = res.value.unwrap();
let macro_file = MacroFile { macro_call_id };
let macro_file = MacroFileId { macro_call_id };
let mut expansion_result = db.parse_macro_expansion(macro_file);
expansion_result.err = expansion_result.err.or(res.err);
expansions.push((macro_call.value.clone(), expansion_result, db.macro_arg(macro_call_id)));
expansions.push((macro_call.value.clone(), expansion_result));
}
for (call, exp, arg) in expansions.into_iter().rev() {
for (call, exp) in expansions.into_iter().rev() {
let mut tree = false;
let mut expect_errors = false;
let mut show_token_ids = false;
let mut show_spans = false;
let mut show_ctxt = false;
for comment in call.syntax().children_with_tokens().filter(|it| it.kind() == COMMENT) {
tree |= comment.to_string().contains("+tree");
expect_errors |= comment.to_string().contains("+errors");
show_token_ids |= comment.to_string().contains("+tokenids");
show_spans |= comment.to_string().contains("+spans");
show_ctxt |= comment.to_string().contains("+syntaxctxt");
}
let mut expn_text = String::new();
@ -164,13 +125,16 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
} else {
assert!(
parse.errors().is_empty(),
"parse errors in expansion: \n{:#?}",
parse.errors()
"parse errors in expansion: \n{:#?}\n```\n{}\n```",
parse.errors(),
parse.syntax_node(),
);
}
let pp = pretty_print_macro_expansion(
parse.syntax_node(),
show_token_ids.then_some(&*token_map),
SpanMapRef::ExpansionSpanMap(&token_map),
show_spans,
show_ctxt,
);
let indent = IndentLevel::from_node(call.syntax());
let pp = reindent(indent, pp);
@ -185,27 +149,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
let range = call.syntax().text_range();
let range: Range<usize> = range.into();
if show_token_ids {
if let Some((tree, map, _)) = arg.value.as_deref() {
let tt_range = call.token_tree().unwrap().syntax().text_range();
let mut ranges = Vec::new();
extract_id_ranges(&mut ranges, map, tree);
for (range, id) in ranges {
let idx = (tt_range.start() + range.end()).into();
text_edits.push((idx..idx, format!("#{}", id.0)));
}
}
text_edits.push((range.start..range.start, "// ".into()));
call.to_string().match_indices('\n').for_each(|(offset, _)| {
let offset = offset + 1 + range.start;
text_edits.push((offset..offset, "// ".into()));
});
text_edits.push((range.end..range.end, "\n".into()));
text_edits.push((range.end..range.end, expn_text));
} else {
text_edits.push((range, expn_text));
}
text_edits.push((range, expn_text));
}
text_edits.sort_by_key(|(range, _)| range.start);
@ -226,9 +170,22 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
}
_ => None,
};
if let Some(src) = src {
if src.file_id.is_attr_macro(&db) || src.file_id.is_custom_derive(&db) {
let pp = pretty_print_macro_expansion(src.value, None);
let call = src.file_id.call_node(&db).expect("macro file");
let mut show_spans = false;
let mut show_ctxt = false;
for comment in call.value.children_with_tokens().filter(|it| it.kind() == COMMENT) {
show_spans |= comment.to_string().contains("+spans");
show_ctxt |= comment.to_string().contains("+syntaxctxt");
}
let pp = pretty_print_macro_expansion(
src.value,
db.span_map(src.file_id).as_ref(),
show_spans,
show_ctxt,
);
format_to!(expanded_text, "\n{}", pp)
}
}
@ -237,7 +194,12 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
for impl_id in def_map[local_id].scope.impls() {
let src = impl_id.lookup(&db).source(&db);
if src.file_id.is_builtin_derive(&db) {
let pp = pretty_print_macro_expansion(src.value.syntax().clone(), None);
let pp = pretty_print_macro_expansion(
src.value.syntax().clone(),
db.span_map(src.file_id).as_ref(),
false,
false,
);
format_to!(expanded_text, "\n{}", pp)
}
}
@ -246,20 +208,6 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream
expect.assert_eq(&expanded_text);
}
fn extract_id_ranges(ranges: &mut Vec<(TextRange, TokenId)>, map: &TokenMap, tree: &Subtree) {
tree.token_trees.iter().for_each(|tree| match tree {
tt::TokenTree::Leaf(leaf) => {
let id = match leaf {
tt::Leaf::Literal(it) => it.span,
tt::Leaf::Punct(it) => it.span,
tt::Leaf::Ident(it) => it.span,
};
ranges.extend(map.ranges_by_token(id, SyntaxKind::ERROR).map(|range| (range, id)));
}
tt::TokenTree::Subtree(tree) => extract_id_ranges(ranges, map, tree),
});
}
fn reindent(indent: IndentLevel, pp: String) -> String {
if !pp.contains('\n') {
return pp;
@ -276,7 +224,12 @@ fn reindent(indent: IndentLevel, pp: String) -> String {
res
}
fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> String {
fn pretty_print_macro_expansion(
expn: SyntaxNode,
map: SpanMapRef<'_>,
show_spans: bool,
show_ctxt: bool,
) -> String {
let mut res = String::new();
let mut prev_kind = EOF;
let mut indent_level = 0;
@ -322,10 +275,22 @@ fn pretty_print_macro_expansion(expn: SyntaxNode, map: Option<&TokenMap>) -> Str
}
prev_kind = curr_kind;
format_to!(res, "{}", token);
if let Some(map) = map {
if let Some(id) = map.token_by_range(token.text_range()) {
format_to!(res, "#{}", id.0);
if show_spans || show_ctxt {
let span = map.span_for_range(token.text_range());
format_to!(res, "#");
if show_spans {
format_to!(
res,
"{:?}:{:?}@{:?}",
span.anchor.file_id,
span.anchor.ast_id.into_raw(),
span.range,
);
}
if show_ctxt {
format_to!(res, "\\{}", span.ctx);
}
format_to!(res, "#");
}
}
res
@ -342,6 +307,9 @@ impl base_db::ProcMacroExpander for IdentityWhenValidProcMacroExpander {
subtree: &Subtree,
_: Option<&Subtree>,
_: &base_db::Env,
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<Subtree, base_db::ProcMacroExpansionError> {
let (parse, _) =
::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems);

View File

@ -93,6 +93,41 @@ fn foo() {
);
}
#[test]
fn macro_rules_in_attr() {
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
check(
r#"
//- proc_macros: identity
macro_rules! id {
($($t:tt)*) => {
$($t)*
};
}
id! {
#[proc_macros::identity]
impl Foo for WrapBj {
async fn foo(&self) {
self.id().await;
}
}
}
"#,
expect![[r#"
macro_rules! id {
($($t:tt)*) => {
$($t)*
};
}
#[proc_macros::identity] impl Foo for WrapBj {
async fn foo(&self ) {
self .id().await ;
}
}
"#]],
);
}
#[test]
fn float_parsing_panic() {
// Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
@ -127,3 +162,27 @@ macro_rules! id {
"#]],
);
}
#[test]
fn float_attribute_mapping() {
check(
r#"
//- proc_macros: identity
//+spans+syntaxctxt
#[proc_macros::identity]
fn foo(&self) {
self.0. 1;
}
"#,
expect![[r#"
//+spans+syntaxctxt
#[proc_macros::identity]
fn foo(&self) {
self.0. 1;
}
fn#FileId(0):1@45..47\0# foo#FileId(0):1@48..51\0#(#FileId(0):1@51..52\0#&#FileId(0):1@52..53\0#self#FileId(0):1@53..57\0# )#FileId(0):1@57..58\0# {#FileId(0):1@59..60\0#
self#FileId(0):1@65..69\0# .#FileId(0):1@69..70\0#0#FileId(0):1@70..71\0#.#FileId(0):1@71..72\0#1#FileId(0):1@73..74\0#;#FileId(0):1@74..75\0#
}#FileId(0):1@76..77\0#"#]],
);
}

View File

@ -5,7 +5,7 @@
use std::{cmp::Ordering, iter, mem};
use base_db::{CrateId, Dependency, Edition, FileId};
use base_db::{span::SyntaxContextId, CrateId, Dependency, Edition, FileId};
use cfg::{CfgExpr, CfgOptions};
use either::Either;
use hir_expand::{
@ -14,7 +14,6 @@ use hir_expand::{
builtin_attr_macro::find_builtin_attr,
builtin_derive_macro::find_builtin_derive,
builtin_fn_macro::find_builtin_macro,
hygiene::Hygiene,
name::{name, AsName, Name},
proc_macro::ProcMacroExpander,
ExpandResult, ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc,
@ -85,8 +84,17 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
.enumerate()
.map(|(idx, it)| {
// FIXME: a hacky way to create a Name from string.
let name =
tt::Ident { text: it.name.clone(), span: tt::TokenId::unspecified() };
let name = tt::Ident {
text: it.name.clone(),
span: tt::SpanData {
range: syntax::TextRange::empty(syntax::TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
},
};
(name.as_name(), ProcMacroExpander::new(base_db::ProcMacroId(idx as u32)))
})
.collect())
@ -112,7 +120,6 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI
from_glob_import: Default::default(),
skip_attrs: Default::default(),
is_proc_macro,
hygienes: FxHashMap::default(),
};
if tree_id.is_block() {
collector.seed_with_inner(tree_id);
@ -212,9 +219,23 @@ struct MacroDirective {
#[derive(Clone, Debug, Eq, PartialEq)]
enum MacroDirectiveKind {
FnLike { ast_id: AstIdWithPath<ast::MacroCall>, expand_to: ExpandTo },
Derive { ast_id: AstIdWithPath<ast::Adt>, derive_attr: AttrId, derive_pos: usize },
Attr { ast_id: AstIdWithPath<ast::Item>, attr: Attr, mod_item: ModItem, tree: TreeId },
FnLike {
ast_id: AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo,
call_site: SyntaxContextId,
},
Derive {
ast_id: AstIdWithPath<ast::Adt>,
derive_attr: AttrId,
derive_pos: usize,
call_site: SyntaxContextId,
},
Attr {
ast_id: AstIdWithPath<ast::Item>,
attr: Attr,
mod_item: ModItem,
/* is this needed? */ tree: TreeId,
},
}
/// Walks the tree of module recursively
@ -242,12 +263,6 @@ struct DefCollector<'a> {
/// This also stores the attributes to skip when we resolve derive helpers and non-macro
/// non-builtin attributes in general.
skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
/// `Hygiene` cache, because `Hygiene` construction is expensive.
///
/// Almost all paths should have been lowered to `ModPath` during `ItemTree` construction.
/// However, `DefCollector` still needs to lower paths in attributes, in particular those in
/// derive meta item list.
hygienes: FxHashMap<HirFileId, Hygiene>,
}
impl DefCollector<'_> {
@ -315,12 +330,11 @@ impl DefCollector<'_> {
}
if *attr_name == hir_expand::name![feature] {
let hygiene = &Hygiene::new_unhygienic();
let features = attr
.parse_path_comma_token_tree(self.db.upcast(), hygiene)
.parse_path_comma_token_tree(self.db.upcast())
.into_iter()
.flatten()
.filter_map(|feat| match feat.segments() {
.filter_map(|(feat, _)| match feat.segments() {
[name] => Some(name.to_smol_str()),
_ => None,
});
@ -471,7 +485,7 @@ impl DefCollector<'_> {
directive.module_id,
MacroCallKind::Attr {
ast_id: ast_id.ast_id,
attr_args: Arc::new((tt::Subtree::empty(), Default::default())),
attr_args: None,
invoc_attr_index: attr.id,
},
attr.path().clone(),
@ -1119,10 +1133,11 @@ impl DefCollector<'_> {
let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to } => {
MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
let call_id = macro_call_as_call_id(
self.db.upcast(),
ast_id,
*call_site,
*expand_to,
self.def_map.krate,
resolver_def_id,
@ -1134,12 +1149,13 @@ impl DefCollector<'_> {
return false;
}
}
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => {
let id = derive_macro_as_call_id(
self.db,
ast_id,
*derive_attr,
*derive_pos as u32,
*call_site,
self.def_map.krate,
resolver,
);
@ -1212,7 +1228,7 @@ impl DefCollector<'_> {
};
if matches!(
def,
MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. }
MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. }
if expander.is_derive()
) {
// Resolved to `#[derive]`
@ -1234,22 +1250,10 @@ impl DefCollector<'_> {
};
let ast_id = ast_id.with_value(ast_adt_id);
let extend_unhygenic;
let hygiene = if file_id.is_macro() {
self.hygienes
.entry(file_id)
.or_insert_with(|| Hygiene::new(self.db.upcast(), file_id))
} else {
// Avoid heap allocation (`Hygiene` embraces `Arc`) and hash map entry
// when we're in an oridinary (non-macro) file.
extend_unhygenic = Hygiene::new_unhygienic();
&extend_unhygenic
};
match attr.parse_path_comma_token_tree(self.db.upcast(), hygiene) {
match attr.parse_path_comma_token_tree(self.db.upcast()) {
Some(derive_macros) => {
let mut len = 0;
for (idx, path) in derive_macros.enumerate() {
for (idx, (path, call_site)) in derive_macros.enumerate() {
let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
self.unresolved_macros.push(MacroDirective {
module_id: directive.module_id,
@ -1258,6 +1262,7 @@ impl DefCollector<'_> {
ast_id,
derive_attr: attr.id,
derive_pos: idx,
call_site,
},
container: directive.container,
});
@ -1414,11 +1419,12 @@ impl DefCollector<'_> {
for directive in &self.unresolved_macros {
match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to } => {
MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
// FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
let macro_call_as_call_id = macro_call_as_call_id(
self.db.upcast(),
ast_id,
*call_site,
*expand_to,
self.def_map.krate,
|path| {
@ -1444,7 +1450,7 @@ impl DefCollector<'_> {
));
}
}
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id,
MacroCallKind::Derive {
@ -1823,9 +1829,8 @@ impl ModCollector<'_, '_> {
cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
let mut single_imports = Vec::new();
let hygiene = Hygiene::new_unhygienic();
for attr in macro_use_attrs {
let Some(paths) = attr.parse_path_comma_token_tree(db.upcast(), &hygiene) else {
let Some(paths) = attr.parse_path_comma_token_tree(db.upcast()) else {
// `#[macro_use]` (without any paths) found, forget collected names and just import
// all visible macros.
self.def_collector.import_macros_from_extern_crate(
@ -1835,7 +1840,7 @@ impl ModCollector<'_, '_> {
);
return;
};
for path in paths {
for (path, _) in paths {
if let Some(name) = path.as_ident() {
single_imports.push(name.clone());
}
@ -2083,8 +2088,18 @@ impl ModCollector<'_, '_> {
let name = match attrs.by_key("rustc_builtin_macro").string_value() {
Some(it) => {
// FIXME: a hacky way to create a Name from string.
name =
tt::Ident { text: it.clone(), span: tt::TokenId::unspecified() }.as_name();
name = tt::Ident {
text: it.clone(),
span: tt::SpanData {
range: syntax::TextRange::empty(syntax::TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: base_db::span::ROOT_ERASED_FILE_AST_ID,
},
ctx: SyntaxContextId::ROOT,
},
}
.as_name();
&name
}
None => {
@ -2210,8 +2225,12 @@ impl ModCollector<'_, '_> {
}
}
fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) {
let ast_id = AstIdWithPath::new(self.file_id(), mac.ast_id, ModPath::clone(&mac.path));
fn collect_macro_call(
&mut self,
&MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall,
container: ItemContainerId,
) {
let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(&path));
let db = self.def_collector.db;
// FIXME: Immediately expanding in "Case 1" is insufficient since "Case 2" may also define
@ -2222,7 +2241,8 @@ impl ModCollector<'_, '_> {
if let Ok(res) = macro_call_as_call_id_with_eager(
db.upcast(),
&ast_id,
mac.expand_to,
call_site,
expand_to,
self.def_collector.def_map.krate,
|path| {
path.as_ident().and_then(|name| {
@ -2276,7 +2296,7 @@ impl ModCollector<'_, '_> {
self.def_collector.unresolved_macros.push(MacroDirective {
module_id: self.module_id,
depth: self.macro_depth + 1,
kind: MacroDirectiveKind::FnLike { ast_id, expand_to: mac.expand_to },
kind: MacroDirectiveKind::FnLike { ast_id, expand_to: expand_to, call_site },
container,
});
}
@ -2363,7 +2383,6 @@ mod tests {
from_glob_import: Default::default(),
skip_attrs: Default::default(),
is_proc_macro: false,
hygienes: FxHashMap::default(),
};
collector.seed_with_top_level();
collector.collect();

View File

@ -1,7 +1,7 @@
//! This module resolves `mod foo;` declaration to file.
use arrayvec::ArrayVec;
use base_db::{AnchoredPath, FileId};
use hir_expand::name::Name;
use hir_expand::{name::Name, HirFileIdExt};
use limit::Limit;
use syntax::SmolStr;
@ -66,7 +66,7 @@ impl ModDir {
attr_path: Option<&SmolStr>,
) -> Result<(FileId, bool, ModDir), Box<[String]>> {
let name = name.unescaped();
let orig_file_id = file_id.original_file(db.upcast());
let orig_file_id = file_id.original_file_respecting_includes(db.upcast());
let mut candidate_files = ArrayVec::<_, 2>::new();
match attr_path {

View File

@ -8,9 +8,7 @@ use base_db::{fixture::WithFixture, SourceDatabase};
use expect_test::{expect, Expect};
use triomphe::Arc;
use crate::{db::DefDatabase, test_db::TestDB};
use super::DefMap;
use crate::{db::DefDatabase, nameres::DefMap, test_db::TestDB};
fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
let db = TestDB::with_files(ra_fixture);

View File

@ -1,13 +1,19 @@
use base_db::SourceDatabaseExt;
use base_db::{SourceDatabase, SourceDatabaseExt};
use triomphe::Arc;
use crate::{db::DefDatabase, AdtId, ModuleDefId};
use super::*;
use crate::{
db::DefDatabase,
nameres::tests::{TestDB, WithFixture},
AdtId, ModuleDefId,
};
fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change: &str) {
let (mut db, pos) = TestDB::with_position(ra_fixture_initial);
let krate = db.test_crate();
let krate = {
let crate_graph = db.crate_graph();
// Some of these tests use minicore/proc-macros which will be injected as the first crate
crate_graph.iter().last().unwrap()
};
{
let events = db.log_executed(|| {
db.crate_def_map(krate);
@ -28,84 +34,199 @@ fn check_def_map_is_not_recomputed(ra_fixture_initial: &str, ra_fixture_change:
fn typing_inside_a_function_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- /lib.rs
mod foo;$0
//- /lib.rs
mod foo;$0
use crate::foo::bar::Baz;
use crate::foo::bar::Baz;
enum E { A, B }
use E::*;
enum E { A, B }
use E::*;
fn foo() -> i32 {
1 + 1
}
fn foo() -> i32 {
1 + 1
}
#[cfg(never)]
fn no() {}
//- /foo/mod.rs
pub mod bar;
#[cfg(never)]
fn no() {}
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
pub struct Baz;
",
//- /foo/bar.rs
pub struct Baz;
",
r"
mod foo;
mod foo;
use crate::foo::bar::Baz;
use crate::foo::bar::Baz;
enum E { A, B }
use E::*;
enum E { A, B }
use E::*;
fn foo() -> i32 { 92 }
fn foo() -> i32 { 92 }
#[cfg(never)]
fn no() {}
",
#[cfg(never)]
fn no() {}
",
);
}
#[test]
fn typing_inside_a_macro_should_not_invalidate_def_map() {
let (mut db, pos) = TestDB::with_position(
check_def_map_is_not_recomputed(
r"
//- /lib.rs
macro_rules! m {
($ident:ident) => {
fn f() {
$ident + $ident;
};
}
}
mod foo;
//- /lib.rs
macro_rules! m {
($ident:ident) => {
fn f() {
$ident + $ident;
};
}
}
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
m!(X);
",
//- /foo/bar.rs
$0
m!(X);
pub struct S {}
",
r"
m!(Y);
pub struct S {}
",
);
let krate = db.test_crate();
{
let events = db.log_executed(|| {
let crate_def_map = db.crate_def_map(krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 1);
});
assert!(format!("{events:?}").contains("crate_def_map"), "{events:#?}")
}
db.set_file_text(pos.file_id, Arc::from("m!(Y);"));
}
{
let events = db.log_executed(|| {
let crate_def_map = db.crate_def_map(krate);
let (_, module_data) = crate_def_map.modules.iter().last().unwrap();
assert_eq!(module_data.scope.resolutions().count(), 1);
});
assert!(!format!("{events:?}").contains("crate_def_map"), "{events:#?}")
#[test]
fn typing_inside_an_attribute_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: identity
//- /lib.rs
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
#[proc_macros::identity]
fn f() {}
",
r"
#[proc_macros::identity]
fn f() { foo }
",
);
}
#[test]
fn typing_inside_an_attribute_arg_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: identity
//- /lib.rs
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
#[proc_macros::identity]
fn f() {}
",
r"
#[proc_macros::identity(foo)]
fn f() {}
",
);
}
#[test]
fn typing_inside_macro_heavy_file_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: identity, derive_identity
//- /lib.rs
macro_rules! m {
($ident:ident) => {
fn fm() {
$ident + $ident;
};
}
}
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
fn f() {}
m!(X);
macro_rules! m2 {
($ident:ident) => {
fn f2() {
$ident + $ident;
};
}
}
m2!(X);
#[proc_macros::identity]
#[derive(proc_macros::DeriveIdentity)]
pub struct S {}
",
r"
fn f() {0}
m!(X);
macro_rules! m2 {
($ident:ident) => {
fn f2() {
$ident + $ident;
};
}
}
m2!(X);
#[proc_macros::identity]
#[derive(proc_macros::DeriveIdentity)]
pub struct S {}
",
);
}
#[test]
fn typing_inside_a_derive_should_not_invalidate_def_map() {
check_def_map_is_not_recomputed(
r"
//- proc_macros: derive_identity
//- minicore:derive
//- /lib.rs
mod foo;
//- /foo/mod.rs
pub mod bar;
//- /foo/bar.rs
$0
#[derive(proc_macros::DeriveIdentity)]
#[allow()]
struct S;
",
r"
#[derive(proc_macros::DeriveIdentity)]
#[allow(dead_code)]
struct S;
",
);
}
#[test]
fn typing_inside_a_function_should_not_invalidate_item_expansions() {

View File

@ -96,8 +96,8 @@ pub enum GenericArg {
impl Path {
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
pub fn from_src(path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
lower::lower_path(path, ctx)
pub fn from_src(ctx: &LowerCtx<'_>, path: ast::Path) -> Option<Path> {
lower::lower_path(ctx, path)
}
/// Converts a known mod path to `Path`.

View File

@ -4,8 +4,10 @@ use std::iter;
use crate::{lower::LowerCtx, type_ref::ConstRef};
use either::Either;
use hir_expand::name::{name, AsName};
use hir_expand::{
mod_path::resolve_crate_root,
name::{name, AsName},
};
use intern::Interned;
use syntax::ast::{self, AstNode, HasTypeBounds};
@ -16,12 +18,12 @@ use crate::{
/// Converts an `ast::Path` to `Path`. Works with use trees.
/// It correctly handles `$crate` based path from macro call.
pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path> {
pub(super) fn lower_path(ctx: &LowerCtx<'_>, mut path: ast::Path) -> Option<Path> {
let mut kind = PathKind::Plain;
let mut type_anchor = None;
let mut segments = Vec::new();
let mut generic_args = Vec::new();
let hygiene = ctx.hygiene();
let span_map = ctx.span_map();
loop {
let segment = path.segment()?;
@ -31,31 +33,31 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
// FIXME: this should just return name
match hygiene.name_ref_to_name(ctx.db.upcast(), name_ref) {
Either::Left(name) => {
let args = segment
.generic_arg_list()
.and_then(|it| lower_generic_args(ctx, it))
.or_else(|| {
lower_generic_args_from_fn_path(
ctx,
segment.param_list(),
segment.ret_type(),
)
})
.map(Interned::new);
if let Some(_) = args {
generic_args.resize(segments.len(), None);
generic_args.push(args);
}
segments.push(name);
}
Either::Right(crate_id) => {
kind = PathKind::DollarCrate(crate_id);
break;
}
if name_ref.text() == "$crate" {
break kind = resolve_crate_root(
ctx.db.upcast(),
span_map.span_for_range(name_ref.syntax().text_range()).ctx,
)
.map(PathKind::DollarCrate)
.unwrap_or(PathKind::Crate);
}
let name = name_ref.as_name();
let args = segment
.generic_arg_list()
.and_then(|it| lower_generic_args(ctx, it))
.or_else(|| {
lower_generic_args_from_fn_path(
ctx,
segment.param_list(),
segment.ret_type(),
)
})
.map(Interned::new);
if let Some(_) = args {
generic_args.resize(segments.len(), None);
generic_args.push(args);
}
segments.push(name);
}
ast::PathSegmentKind::SelfTypeKw => {
segments.push(name![Self]);
@ -74,7 +76,7 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// <T as Trait<A>>::Foo desugars to Trait<Self=T, A>::Foo
Some(trait_ref) => {
let Path::Normal { mod_path, generic_args: path_generic_args, .. } =
Path::from_src(trait_ref.path()?, ctx)?
Path::from_src(ctx, trait_ref.path()?)?
else {
return None;
};
@ -151,8 +153,14 @@ pub(super) fn lower_path(mut path: ast::Path, ctx: &LowerCtx<'_>) -> Option<Path
// We follow what it did anyway :)
if segments.len() == 1 && kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
if let Some(crate_id) = hygiene.local_inner_macros(ctx.db.upcast(), path) {
kind = PathKind::DollarCrate(crate_id);
let syn_ctxt = span_map.span_for_range(path.segment()?.syntax().text_range()).ctx;
if let Some(macro_call_id) = ctx.db.lookup_intern_syntax_context(syn_ctxt).outer_expn {
if ctx.db.lookup_intern_macro_call(macro_call_id).def.local_inner {
kind = match resolve_crate_root(ctx.db.upcast(), syn_ctxt) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
}
}
}
}
}

View File

@ -34,6 +34,7 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}

View File

@ -2,7 +2,7 @@
use std::iter;
use hir_expand::{hygiene::Hygiene, InFile};
use hir_expand::{span::SpanMapRef, InFile};
use la_arena::ArenaMap;
use syntax::ast;
use triomphe::Arc;
@ -34,22 +34,22 @@ impl RawVisibility {
db: &dyn DefDatabase,
node: InFile<Option<ast::Visibility>>,
) -> RawVisibility {
Self::from_ast_with_hygiene(db, node.value, &Hygiene::new(db.upcast(), node.file_id))
Self::from_ast_with_span_map(db, node.value, db.span_map(node.file_id).as_ref())
}
pub(crate) fn from_ast_with_hygiene(
pub(crate) fn from_ast_with_span_map(
db: &dyn DefDatabase,
node: Option<ast::Visibility>,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
) -> RawVisibility {
Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene)
Self::from_ast_with_span_map_and_default(db, node, RawVisibility::private(), span_map)
}
pub(crate) fn from_ast_with_hygiene_and_default(
pub(crate) fn from_ast_with_span_map_and_default(
db: &dyn DefDatabase,
node: Option<ast::Visibility>,
default: RawVisibility,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
) -> RawVisibility {
let node = match node {
None => return default,
@ -57,7 +57,7 @@ impl RawVisibility {
};
match node.kind() {
ast::VisibilityKind::In(path) => {
let path = ModPath::from_src(db.upcast(), path, hygiene);
let path = ModPath::from_src(db.upcast(), path, span_map);
let path = match path {
None => return RawVisibility::private(),
Some(path) => path,

View File

@ -12,11 +12,40 @@ use std::{
marker::PhantomData,
};
use la_arena::{Arena, Idx};
use la_arena::{Arena, Idx, RawIdx};
use profile::Count;
use rustc_hash::FxHasher;
use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
use crate::db;
pub use base_db::span::ErasedFileAstId;
/// `AstId` points to an AST node in any file.
///
/// It is stable across reparses, and can be used as salsa key/value.
pub type AstId<N> = crate::InFile<FileAstId<N>>;
impl<N: AstIdNode> AstId<N> {
pub fn to_node(&self, db: &dyn db::ExpandDatabase) -> N {
self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id))
}
pub fn to_in_file_node(&self, db: &dyn db::ExpandDatabase) -> crate::InFile<N> {
crate::InFile::new(self.file_id, self.to_ptr(db).to_node(&db.parse_or_expand(self.file_id)))
}
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> AstPtr<N> {
db.ast_id_map(self.file_id).get(self.value)
}
}
pub type ErasedAstId = crate::InFile<ErasedFileAstId>;
impl ErasedAstId {
pub fn to_ptr(&self, db: &dyn db::ExpandDatabase) -> SyntaxNodePtr {
db.ast_id_map(self.file_id).get_erased(self.value)
}
}
/// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstIdNode> {
raw: ErasedFileAstId,
@ -62,8 +91,6 @@ impl<N: AstIdNode> FileAstId<N> {
}
}
pub type ErasedFileAstId = Idx<SyntaxNodePtr>;
pub trait AstIdNode: AstNode {}
macro_rules! register_ast_id_node {
(impl AstIdNode for $($ident:ident),+ ) => {
@ -129,6 +156,11 @@ impl AstIdMap {
pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
assert!(node.parent().is_none());
let mut res = AstIdMap::default();
// make sure to allocate the root node
if !should_alloc_id(node.kind()) {
res.alloc(node);
}
// By walking the tree in breadth-first order we make sure that parents
// get lower ids then children. That is, adding a new child does not
// change parent's id. This means that, say, adding a new function to a
@ -136,9 +168,9 @@ impl AstIdMap {
bdfs(node, |it| {
if should_alloc_id(it.kind()) {
res.alloc(&it);
true
TreeOrder::BreadthFirst
} else {
false
TreeOrder::DepthFirst
}
});
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
@ -155,6 +187,11 @@ impl AstIdMap {
res
}
/// The [`AstId`] of the root node
pub fn root(&self) -> SyntaxNodePtr {
self.arena[Idx::from_raw(RawIdx::from_u32(0))].clone()
}
pub fn ast_id<N: AstIdNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, covariant: PhantomData }
@ -164,7 +201,7 @@ impl AstIdMap {
AstPtr::try_from_raw(self.arena[id.raw].clone()).unwrap()
}
pub(crate) fn get_raw(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
pub fn get_erased(&self, id: ErasedFileAstId) -> SyntaxNodePtr {
self.arena[id].clone()
}
@ -192,14 +229,20 @@ fn hash_ptr(ptr: &SyntaxNodePtr) -> u64 {
hasher.finish()
}
#[derive(Copy, Clone, PartialEq, Eq)]
enum TreeOrder {
BreadthFirst,
DepthFirst,
}
/// Walks the subtree in bdfs order, calling `f` for each node. What is bdfs
/// order? It is a mix of breadth-first and depth first orders. Nodes for which
/// `f` returns true are visited breadth-first, all the other nodes are explored
/// depth-first.
/// `f` returns [`TreeOrder::BreadthFirst`] are visited breadth-first, all the other nodes are explored
/// [`TreeOrder::DepthFirst`].
///
/// In other words, the size of the bfs queue is bound by the number of "true"
/// nodes.
fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> TreeOrder) {
let mut curr_layer = vec![node.clone()];
let mut next_layer = vec![];
while !curr_layer.is_empty() {
@ -208,7 +251,7 @@ fn bdfs(node: &SyntaxNode, mut f: impl FnMut(SyntaxNode) -> bool) {
while let Some(event) = preorder.next() {
match event {
syntax::WalkEvent::Enter(node) => {
if f(node.clone()) {
if f(node.clone()) == TreeOrder::BreadthFirst {
next_layer.extend(node.children());
preorder.skip_subtree();
}

View File

@ -1,19 +1,19 @@
//! A higher level attributes based on TokenTree, with also some shortcuts.
use std::{fmt, ops};
use base_db::CrateId;
use base_db::{span::SyntaxContextId, CrateId};
use cfg::CfgExpr;
use either::Either;
use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec};
use syntax::{ast, match_ast, AstNode, SmolStr, SyntaxNode};
use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::Arc;
use crate::{
db::ExpandDatabase,
hygiene::Hygiene,
mod_path::ModPath,
span::SpanMapRef,
tt::{self, Subtree},
InFile,
};
@ -39,16 +39,21 @@ impl ops::Deref for RawAttrs {
impl RawAttrs {
pub const EMPTY: Self = Self { entries: None };
pub fn new(db: &dyn ExpandDatabase, owner: &dyn ast::HasAttrs, hygiene: &Hygiene) -> Self {
pub fn new(
db: &dyn ExpandDatabase,
owner: &dyn ast::HasAttrs,
span_map: SpanMapRef<'_>,
) -> Self {
let entries = collect_attrs(owner)
.filter_map(|(id, attr)| match attr {
Either::Left(attr) => {
attr.meta().and_then(|meta| Attr::from_src(db, meta, hygiene, id))
attr.meta().and_then(|meta| Attr::from_src(db, meta, span_map, id))
}
Either::Right(comment) => comment.doc_comment().map(|doc| Attr {
id,
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
path: Interned::new(ModPath::from(crate::name!(doc))),
ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
}),
})
.collect::<Vec<_>>();
@ -58,9 +63,12 @@ impl RawAttrs {
Self { entries: if entries.is_empty() { None } else { Some(entries) } }
}
pub fn from_attrs_owner(db: &dyn ExpandDatabase, owner: InFile<&dyn ast::HasAttrs>) -> Self {
let hygiene = Hygiene::new(db, owner.file_id);
Self::new(db, owner.value, &hygiene)
pub fn from_attrs_owner(
db: &dyn ExpandDatabase,
owner: InFile<&dyn ast::HasAttrs>,
span_map: SpanMapRef<'_>,
) -> Self {
Self::new(db, owner.value, span_map)
}
pub fn merge(&self, other: Self) -> Self {
@ -122,12 +130,10 @@ impl RawAttrs {
let attrs = parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(
|(idx, attr)| {
let tree = Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: attr.to_vec(),
};
// FIXME hygiene
let hygiene = Hygiene::new_unhygienic();
Attr::from_tt(db, &tree, &hygiene, index.with_cfg_attr(idx))
Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
},
);
@ -185,21 +191,23 @@ pub struct Attr {
pub id: AttrId,
pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>,
pub ctxt: SyntaxContextId,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum AttrInput {
/// `#[attr = "string"]`
// FIXME: This is losing span
Literal(SmolStr),
/// `#[attr(subtree)]`
TokenTree(Box<(tt::Subtree, mbe::TokenMap)>),
TokenTree(Box<tt::Subtree>),
}
impl fmt::Display for AttrInput {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
AttrInput::Literal(lit) => write!(f, " = \"{}\"", lit.escape_debug()),
AttrInput::TokenTree(tt) => tt.0.fmt(f),
AttrInput::TokenTree(tt) => tt.fmt(f),
}
}
}
@ -208,10 +216,10 @@ impl Attr {
fn from_src(
db: &dyn ExpandDatabase,
ast: ast::Meta,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
id: AttrId,
) -> Option<Attr> {
let path = Interned::new(ModPath::from_src(db, ast.path()?, hygiene)?);
let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let value = match lit.kind() {
ast::LiteralKind::String(string) => string.value()?.into(),
@ -219,24 +227,20 @@ impl Attr {
};
Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() {
let (tree, map) = syntax_node_to_token_tree(tt.syntax());
Some(Interned::new(AttrInput::TokenTree(Box::new((tree, map)))))
let tree = syntax_node_to_token_tree(tt.syntax(), span_map);
Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
} else {
None
};
Some(Attr { id, path, input })
Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx })
}
fn from_tt(
db: &dyn ExpandDatabase,
tt: &tt::Subtree,
hygiene: &Hygiene,
id: AttrId,
) -> Option<Attr> {
let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
// FIXME: Unecessary roundtrip tt -> ast -> tt
let (parse, map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem);
let ast = ast::Meta::cast(parse.syntax_node())?;
Self::from_src(db, ast, hygiene, id)
Self::from_src(db, ast, SpanMapRef::ExpansionSpanMap(&map), id)
}
pub fn path(&self) -> &ModPath {
@ -256,7 +260,7 @@ impl Attr {
/// #[path(ident)]
pub fn single_ident_value(&self) -> Option<&tt::Ident> {
match self.input.as_deref()? {
AttrInput::TokenTree(tt) => match &*tt.0.token_trees {
AttrInput::TokenTree(tt) => match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] => Some(ident),
_ => None,
},
@ -267,7 +271,7 @@ impl Attr {
/// #[path TokenTree]
pub fn token_tree_value(&self) -> Option<&Subtree> {
match self.input.as_deref()? {
AttrInput::TokenTree(tt) => Some(&tt.0),
AttrInput::TokenTree(tt) => Some(tt),
_ => None,
}
}
@ -276,8 +280,7 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>(
&'a self,
db: &'a dyn ExpandDatabase,
hygiene: &'a Hygiene,
) -> Option<impl Iterator<Item = ModPath> + 'a> {
) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> {
let args = self.token_tree_value()?;
if args.delimiter.kind != DelimiterKind::Parenthesis {
@ -290,12 +293,13 @@ impl Attr {
if tts.is_empty() {
return None;
}
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation here.
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
// here or maybe just parse a mod path from a token tree directly
let subtree = tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
token_trees: tts.into_iter().cloned().collect(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: tts.to_vec(),
};
let (parse, _) =
let (parse, span_map) =
mbe::token_tree_to_syntax_node(&subtree, mbe::TopEntryPoint::MetaItem);
let meta = ast::Meta::cast(parse.syntax_node())?;
// Only simple paths are allowed.
@ -304,7 +308,11 @@ impl Attr {
return None;
}
let path = meta.path()?;
ModPath::from_src(db, path, hygiene)
let call_site = span_map.span_at(path.syntax().text_range().start()).ctx;
Some((
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
call_site,
))
});
Some(paths)

View File

@ -1,16 +1,22 @@
//! Builtin attributes.
use base_db::{
span::{SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use syntax::{TextRange, TextSize};
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallId, MacroCallKind};
macro_rules! register_builtin {
( $(($name:ident, $variant:ident) => $expand:ident),* ) => {
($expand_fn:ident: $(($name:ident, $variant:ident) => $expand:ident),* ) => {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum BuiltinAttrExpander {
$($variant),*
}
impl BuiltinAttrExpander {
pub fn expand(
pub fn $expand_fn(
&self,
db: &dyn ExpandDatabase,
id: MacroCallId,
@ -45,7 +51,7 @@ impl BuiltinAttrExpander {
}
}
register_builtin! {
register_builtin! { expand:
(bench, Bench) => dummy_attr_expand,
(cfg_accessible, CfgAccessible) => dummy_attr_expand,
(cfg_eval, CfgEval) => dummy_attr_expand,
@ -77,9 +83,8 @@ fn dummy_attr_expand(
///
/// As such, we expand `#[derive(Foo, bar::Bar)]` into
/// ```
/// #[Foo]
/// #[bar::Bar]
/// ();
/// #![Foo]
/// #![bar::Bar]
/// ```
/// which allows fallback path resolution in hir::Semantics to properly identify our derives.
/// Since we do not expand the attribute in nameres though, we keep the original item.
@ -98,21 +103,31 @@ fn derive_attr_expand(
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } if loc.def.is_attribute_derive() => &attr_args.0,
_ => return ExpandResult::ok(tt::Subtree::empty()),
MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
attr_args
}
_ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)),
};
pseudo_derive_attr_expansion(tt, derives)
pseudo_derive_attr_expansion(tt, derives, loc.call_site)
}
pub fn pseudo_derive_attr_expansion(
tt: &tt::Subtree,
args: &tt::Subtree,
call_site: SyntaxContextId,
) -> ExpandResult<tt::Subtree> {
let mk_leaf = |char| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char,
spacing: tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
span: tt::SpanData {
range: TextRange::empty(TextSize::new(0)),
anchor: base_db::span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: call_site,
},
}))
};
@ -122,12 +137,10 @@ pub fn pseudo_derive_attr_expansion(
.split(|tt| matches!(tt, tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', .. }))))
{
token_trees.push(mk_leaf('#'));
token_trees.push(mk_leaf('!'));
token_trees.push(mk_leaf('['));
token_trees.extend(tt.iter().cloned());
token_trees.push(mk_leaf(']'));
}
token_trees.push(mk_leaf('('));
token_trees.push(mk_leaf(')'));
token_trees.push(mk_leaf(';'));
ExpandResult::ok(tt::Subtree { delimiter: tt.delimiter, token_trees })
}

View File

@ -1,16 +1,16 @@
//! Builtin derives.
use ::tt::Ident;
use base_db::{CrateOrigin, LangCrateOrigin};
use base_db::{span::SpanData, CrateOrigin, LangCrateOrigin};
use itertools::izip;
use mbe::TokenMap;
use rustc_hash::FxHashSet;
use stdx::never;
use tracing::debug;
use crate::{
hygiene::span_with_def_site_ctxt,
name::{AsName, Name},
tt::{self, TokenId},
span::SpanMapRef,
tt,
};
use syntax::ast::{self, AstNode, FieldList, HasAttrs, HasGenericParams, HasName, HasTypeBounds};
@ -29,12 +29,15 @@ macro_rules! register_builtin {
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &ast::Adt,
token_map: &TokenMap,
token_map: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( BuiltinDeriveExpander::$trait => $expand, )*
};
expander(db, id, tt, token_map)
let span = db.lookup_intern_macro_call(id).span(db);
let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, span, tt, token_map)
}
fn find_by_name(name: &name::Name) -> Option<Self> {
@ -70,19 +73,19 @@ enum VariantShape {
Unit,
}
fn tuple_field_iterator(n: usize) -> impl Iterator<Item = tt::Ident> {
(0..n).map(|it| Ident::new(format!("f{it}"), tt::TokenId::unspecified()))
fn tuple_field_iterator(span: SpanData, n: usize) -> impl Iterator<Item = tt::Ident> {
(0..n).map(move |it| tt::Ident::new(format!("f{it}"), span))
}
impl VariantShape {
fn as_pattern(&self, path: tt::Subtree) -> tt::Subtree {
self.as_pattern_map(path, |it| quote!(#it))
fn as_pattern(&self, path: tt::Subtree, span: SpanData) -> tt::Subtree {
self.as_pattern_map(path, span, |it| quote!(span => #it))
}
fn field_names(&self) -> Vec<tt::Ident> {
fn field_names(&self, span: SpanData) -> Vec<tt::Ident> {
match self {
VariantShape::Struct(s) => s.clone(),
VariantShape::Tuple(n) => tuple_field_iterator(*n).collect(),
VariantShape::Tuple(n) => tuple_field_iterator(span, *n).collect(),
VariantShape::Unit => vec![],
}
}
@ -90,26 +93,27 @@ impl VariantShape {
fn as_pattern_map(
&self,
path: tt::Subtree,
span: SpanData,
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
) -> tt::Subtree {
match self {
VariantShape::Struct(fields) => {
let fields = fields.iter().map(|it| {
let mapped = field_map(it);
quote! { #it : #mapped , }
quote! {span => #it : #mapped , }
});
quote! {
quote! {span =>
#path { ##fields }
}
}
&VariantShape::Tuple(n) => {
let fields = tuple_field_iterator(n).map(|it| {
let fields = tuple_field_iterator(span, n).map(|it| {
let mapped = field_map(&it);
quote! {
quote! {span =>
#mapped ,
}
});
quote! {
quote! {span =>
#path ( ##fields )
}
}
@ -117,7 +121,7 @@ impl VariantShape {
}
}
fn from(tm: &TokenMap, value: Option<FieldList>) -> Result<Self, ExpandError> {
fn from(tm: SpanMapRef<'_>, value: Option<FieldList>) -> Result<Self, ExpandError> {
let r = match value {
None => VariantShape::Unit,
Some(FieldList::RecordFieldList(it)) => VariantShape::Struct(
@ -139,17 +143,17 @@ enum AdtShape {
}
impl AdtShape {
fn as_pattern(&self, name: &tt::Ident) -> Vec<tt::Subtree> {
self.as_pattern_map(name, |it| quote!(#it))
fn as_pattern(&self, span: SpanData, name: &tt::Ident) -> Vec<tt::Subtree> {
self.as_pattern_map(name, |it| quote!(span =>#it), span)
}
fn field_names(&self) -> Vec<Vec<tt::Ident>> {
fn field_names(&self, span: SpanData) -> Vec<Vec<tt::Ident>> {
match self {
AdtShape::Struct(s) => {
vec![s.field_names()]
vec![s.field_names(span)]
}
AdtShape::Enum { variants, .. } => {
variants.iter().map(|(_, fields)| fields.field_names()).collect()
variants.iter().map(|(_, fields)| fields.field_names(span)).collect()
}
AdtShape::Union => {
never!("using fields of union in derive is always wrong");
@ -162,18 +166,21 @@ impl AdtShape {
&self,
name: &tt::Ident,
field_map: impl Fn(&tt::Ident) -> tt::Subtree,
span: SpanData,
) -> Vec<tt::Subtree> {
match self {
AdtShape::Struct(s) => {
vec![s.as_pattern_map(quote! { #name }, field_map)]
vec![s.as_pattern_map(quote! {span => #name }, span, field_map)]
}
AdtShape::Enum { variants, .. } => variants
.iter()
.map(|(v, fields)| fields.as_pattern_map(quote! { #name :: #v }, &field_map))
.map(|(v, fields)| {
fields.as_pattern_map(quote! {span => #name :: #v }, span, &field_map)
})
.collect(),
AdtShape::Union => {
never!("pattern matching on union is always wrong");
vec![quote! { un }]
vec![quote! {span => un }]
}
}
}
@ -189,8 +196,12 @@ struct BasicAdtInfo {
associated_types: Vec<tt::Subtree>,
}
fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, shape) = match &adt {
fn parse_adt(
tm: SpanMapRef<'_>,
adt: &ast::Adt,
call_site: SpanData,
) -> Result<BasicAdtInfo, ExpandError> {
let (name, generic_param_list, shape) = match adt {
ast::Adt::Struct(it) => (
it.name(),
it.generic_param_list(),
@ -234,22 +245,26 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
match this {
Some(it) => {
param_type_set.insert(it.as_name());
mbe::syntax_node_to_token_tree(it.syntax()).0
mbe::syntax_node_to_token_tree(it.syntax(), tm)
}
None => {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
}
None => tt::Subtree::empty(),
}
};
let bounds = match &param {
ast::TypeOrConstParam::Type(it) => {
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
}
ast::TypeOrConstParam::Const(_) => None,
};
let ty = if let ast::TypeOrConstParam::Const(param) = param {
let ty = param
.ty()
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax()).0)
.unwrap_or_else(tt::Subtree::empty);
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm))
.unwrap_or_else(|| {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
});
Some(ty)
} else {
None
@ -282,20 +297,22 @@ fn parse_adt(tm: &TokenMap, adt: &ast::Adt) -> Result<BasicAdtInfo, ExpandError>
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p)
})
.map(|it| mbe::syntax_node_to_token_tree(it.syntax()).0)
.map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm))
.collect();
let name_token = name_to_token(&tm, name)?;
let name_token = name_to_token(tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })
}
fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Ident, ExpandError> {
fn name_to_token(
token_map: SpanMapRef<'_>,
name: Option<ast::Name>,
) -> Result<tt::Ident, ExpandError> {
let name = name.ok_or_else(|| {
debug!("parsed item has no name");
ExpandError::other("missing name")
})?;
let name_token_id =
token_map.token_by_range(name.syntax().text_range()).unwrap_or_else(TokenId::unspecified);
let name_token = tt::Ident { span: name_token_id, text: name.text().into() };
let span = token_map.span_for_range(name.syntax().text_range());
let name_token = tt::Ident { span, text: name.text().into() };
Ok(name_token)
}
@ -331,14 +348,21 @@ fn name_to_token(token_map: &TokenMap, name: Option<ast::Name>) -> Result<tt::Id
/// where B1, ..., BN are the bounds given by `bounds_paths`. Z is a phantom type, and
/// therefore does not get bound by the derived trait.
fn expand_simple_derive(
// FIXME: use
invoc_span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
trait_path: tt::Subtree,
make_trait_body: impl FnOnce(&BasicAdtInfo) -> tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let info = match parse_adt(tm, tt) {
let info = match parse_adt(tm, tt, invoc_span) {
Ok(info) => info,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
Err(e) => {
return ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: invoc_span, close: invoc_span }),
e,
)
}
};
let trait_body = make_trait_body(&info);
let mut where_block = vec![];
@ -349,13 +373,13 @@ fn expand_simple_derive(
let ident_ = ident.clone();
if let Some(b) = bound {
let ident = ident.clone();
where_block.push(quote! { #ident : #b , });
where_block.push(quote! {invoc_span => #ident : #b , });
}
if let Some(ty) = param_ty {
(quote! { const #ident : #ty , }, quote! { #ident_ , })
(quote! {invoc_span => const #ident : #ty , }, quote! {invoc_span => #ident_ , })
} else {
let bound = trait_path.clone();
(quote! { #ident : #bound , }, quote! { #ident_ , })
(quote! {invoc_span => #ident : #bound , }, quote! {invoc_span => #ident_ , })
}
})
.unzip();
@ -363,17 +387,17 @@ fn expand_simple_derive(
where_block.extend(info.associated_types.iter().map(|it| {
let it = it.clone();
let bound = trait_path.clone();
quote! { #it : #bound , }
quote! {invoc_span => #it : #bound , }
}));
let name = info.name;
let expanded = quote! {
let expanded = quote! {invoc_span =>
impl < ##params > #trait_path for #name < ##args > where ##where_block { #trait_body }
};
ExpandResult::ok(expanded)
}
fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree {
fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId, span: SpanData) -> tt::TokenTree {
// FIXME: make hygiene works for builtin derive macro
// such that $crate can be used here.
let cg = db.crate_graph();
@ -381,9 +405,9 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
let tt = if matches!(cg[krate].origin, CrateOrigin::Lang(LangCrateOrigin::Core)) {
cov_mark::hit!(test_copy_expand_in_core);
quote! { crate }
quote! {span => crate }
} else {
quote! { core }
quote! {span => core }
};
tt.token_trees[0].clone()
@ -392,56 +416,50 @@ fn find_builtin_crate(db: &dyn ExpandDatabase, id: MacroCallId) -> tt::TokenTree
fn copy_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::marker::Copy }, |_| quote! {})
let krate = find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::marker::Copy }, |_| quote! {span =>})
}
fn clone_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::clone::Clone }, |adt| {
let krate = find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::clone::Clone }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
fn clone(&self) -> Self {
#star self
}
};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
fn clone(&self) -> Self {
match #star self {}
}
};
}
let name = &adt.name;
let patterns = adt.shape.as_pattern(name);
let exprs = adt.shape.as_pattern_map(name, |it| quote! { #it .clone() });
let patterns = adt.shape.as_pattern(span, name);
let exprs = adt.shape.as_pattern_map(name, |it| quote! {span => #it .clone() }, span);
let arms = patterns.into_iter().zip(exprs.into_iter()).map(|(pat, expr)| {
let fat_arrow = fat_arrow();
quote! {
let fat_arrow = fat_arrow(span);
quote! {span =>
#pat #fat_arrow #expr,
}
});
quote! {
quote! {span =>
fn clone(&self) -> Self {
match self {
##arms
@ -451,53 +469,56 @@ fn clone_expand(
})
}
/// This function exists since `quote! { => }` doesn't work.
fn fat_arrow() -> ::tt::Subtree<TokenId> {
let eq =
tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
quote! { #eq> }
/// This function exists since `quote! {span => => }` doesn't work.
fn fat_arrow(span: SpanData) -> tt::Subtree {
let eq = tt::Punct { char: '=', spacing: ::tt::Spacing::Joint, span };
quote! {span => #eq> }
}
/// This function exists since `quote! { && }` doesn't work.
fn and_and() -> ::tt::Subtree<TokenId> {
let and =
tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span: tt::TokenId::unspecified() };
quote! { #and& }
/// This function exists since `quote! {span => && }` doesn't work.
fn and_and(span: SpanData) -> tt::Subtree {
let and = tt::Punct { char: '&', spacing: ::tt::Spacing::Joint, span };
quote! {span => #and& }
}
fn default_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::default::Default }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::default::Default }, |adt| {
let body = match &adt.shape {
AdtShape::Struct(fields) => {
let name = &adt.name;
fields
.as_pattern_map(quote!(#name), |_| quote!(#krate::default::Default::default()))
fields.as_pattern_map(
quote!(span =>#name),
span,
|_| quote!(span =>#krate::default::Default::default()),
)
}
AdtShape::Enum { default_variant, variants } => {
if let Some(d) = default_variant {
let (name, fields) = &variants[*d];
let adt_name = &adt.name;
fields.as_pattern_map(
quote!(#adt_name :: #name),
|_| quote!(#krate::default::Default::default()),
quote!(span =>#adt_name :: #name),
span,
|_| quote!(span =>#krate::default::Default::default()),
)
} else {
// FIXME: Return expand error here
quote!()
quote!(span =>)
}
}
AdtShape::Union => {
// FIXME: Return expand error here
quote!()
quote!(span =>)
}
};
quote! {
quote! {span =>
fn default() -> Self {
#body
}
@ -508,44 +529,41 @@ fn default_expand(
fn debug_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::fmt::Debug }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::fmt::Debug }, |adt| {
let for_variant = |name: String, v: &VariantShape| match v {
VariantShape::Struct(fields) => {
let for_fields = fields.iter().map(|it| {
let x_string = it.to_string();
quote! {
quote! {span =>
.field(#x_string, & #it)
}
});
quote! {
quote! {span =>
f.debug_struct(#name) ##for_fields .finish()
}
}
VariantShape::Tuple(n) => {
let for_fields = tuple_field_iterator(*n).map(|it| {
quote! {
let for_fields = tuple_field_iterator(span, *n).map(|it| {
quote! {span =>
.field( & #it)
}
});
quote! {
quote! {span =>
f.debug_tuple(#name) ##for_fields .finish()
}
}
VariantShape::Unit => quote! {
VariantShape::Unit => quote! {span =>
f.write_str(#name)
},
};
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match #star self {}
}
@ -553,20 +571,20 @@ fn debug_expand(
}
let arms = match &adt.shape {
AdtShape::Struct(fields) => {
let fat_arrow = fat_arrow();
let fat_arrow = fat_arrow(span);
let name = &adt.name;
let pat = fields.as_pattern(quote!(#name));
let pat = fields.as_pattern(quote!(span =>#name), span);
let expr = for_variant(name.to_string(), fields);
vec![quote! { #pat #fat_arrow #expr }]
vec![quote! {span => #pat #fat_arrow #expr }]
}
AdtShape::Enum { variants, .. } => variants
.iter()
.map(|(name, v)| {
let fat_arrow = fat_arrow();
let fat_arrow = fat_arrow(span);
let adt_name = &adt.name;
let pat = v.as_pattern(quote!(#adt_name :: #name));
let pat = v.as_pattern(quote!(span =>#adt_name :: #name), span);
let expr = for_variant(name.to_string(), v);
quote! {
quote! {span =>
#pat #fat_arrow #expr ,
}
})
@ -576,7 +594,7 @@ fn debug_expand(
vec![]
}
};
quote! {
quote! {span =>
fn fmt(&self, f: &mut #krate::fmt::Formatter) -> #krate::fmt::Result {
match self {
##arms
@ -589,47 +607,46 @@ fn debug_expand(
fn hash_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::hash::Hash }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::hash::Hash }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {};
return quote! {span =>};
}
if matches!(&adt.shape, AdtShape::Enum { variants, .. } if variants.is_empty()) {
let star = tt::Punct {
char: '*',
spacing: ::tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
};
return quote! {
let star = tt::Punct { char: '*', spacing: ::tt::Spacing::Alone, span };
return quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
match #star self {}
}
};
}
let arms = adt.shape.as_pattern(&adt.name).into_iter().zip(adt.shape.field_names()).map(
|(pat, names)| {
let expr = {
let it = names.iter().map(|it| quote! { #it . hash(ra_expand_state); });
quote! { {
##it
} }
};
let fat_arrow = fat_arrow();
quote! {
#pat #fat_arrow #expr ,
}
},
);
let arms =
adt.shape.as_pattern(span, &adt.name).into_iter().zip(adt.shape.field_names(span)).map(
|(pat, names)| {
let expr = {
let it =
names.iter().map(|it| quote! {span => #it . hash(ra_expand_state); });
quote! {span => {
##it
} }
};
let fat_arrow = fat_arrow(span);
quote! {span =>
#pat #fat_arrow #expr ,
}
},
);
let check_discriminant = if matches!(&adt.shape, AdtShape::Enum { .. }) {
quote! { #krate::mem::discriminant(self).hash(ra_expand_state); }
quote! {span => #krate::mem::discriminant(self).hash(ra_expand_state); }
} else {
quote! {}
quote! {span =>}
};
quote! {
quote! {span =>
fn hash<H: #krate::hash::Hasher>(&self, ra_expand_state: &mut H) {
#check_discriminant
match self {
@ -643,56 +660,58 @@ fn hash_expand(
fn eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::Eq }, |_| quote! {})
let krate = find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Eq }, |_| quote! {span =>})
}
fn partial_eq_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialEq }, |adt| {
let krate = find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialEq }, |adt| {
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote! {};
return quote! {span =>};
}
let name = &adt.name;
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
let (self_patterns, other_patterns) = self_and_other_patterns(adt, name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, names)| {
let fat_arrow = fat_arrow();
let fat_arrow = fat_arrow(span);
let body = match &*names {
[] => {
quote!(true)
quote!(span =>true)
}
[first, rest @ ..] => {
let rest = rest.iter().map(|it| {
let t1 = Ident::new(format!("{}_self", it.text), it.span);
let t2 = Ident::new(format!("{}_other", it.text), it.span);
let and_and = and_and();
quote!(#and_and #t1 .eq( #t2 ))
let t1 = tt::Ident::new(format!("{}_self", it.text), it.span);
let t2 = tt::Ident::new(format!("{}_other", it.text), it.span);
let and_and = and_and(span);
quote!(span =>#and_and #t1 .eq( #t2 ))
});
let first = {
let t1 = Ident::new(format!("{}_self", first.text), first.span);
let t2 = Ident::new(format!("{}_other", first.text), first.span);
quote!(#t1 .eq( #t2 ))
let t1 = tt::Ident::new(format!("{}_self", first.text), first.span);
let t2 = tt::Ident::new(format!("{}_other", first.text), first.span);
quote!(span =>#t1 .eq( #t2 ))
};
quote!(#first ##rest)
quote!(span =>#first ##rest)
}
};
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
let fat_arrow = fat_arrow();
quote! {
let fat_arrow = fat_arrow(span);
quote! {span =>
fn eq(&self, other: &Self) -> bool {
match (self, other) {
##arms
@ -706,35 +725,46 @@ fn partial_eq_expand(
fn self_and_other_patterns(
adt: &BasicAdtInfo,
name: &tt::Ident,
span: SpanData,
) -> (Vec<tt::Subtree>, Vec<tt::Subtree>) {
let self_patterns = adt.shape.as_pattern_map(name, |it| {
let t = Ident::new(format!("{}_self", it.text), it.span);
quote!(#t)
});
let other_patterns = adt.shape.as_pattern_map(name, |it| {
let t = Ident::new(format!("{}_other", it.text), it.span);
quote!(#t)
});
let self_patterns = adt.shape.as_pattern_map(
name,
|it| {
let t = tt::Ident::new(format!("{}_self", it.text), it.span);
quote!(span =>#t)
},
span,
);
let other_patterns = adt.shape.as_pattern_map(
name,
|it| {
let t = tt::Ident::new(format!("{}_other", it.text), it.span);
quote!(span =>#t)
},
span,
);
(self_patterns, other_patterns)
}
fn ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::Ord }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::Ord }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
right: tt::Subtree,
rest: tt::Subtree,
span: SpanData,
) -> tt::Subtree {
let fat_arrow1 = fat_arrow();
let fat_arrow2 = fat_arrow();
quote! {
let fat_arrow1 = fat_arrow(span);
let fat_arrow2 = fat_arrow(span);
quote! {span =>
match #left.cmp(&#right) {
#krate::cmp::Ordering::Equal #fat_arrow1 {
#rest
@ -745,34 +775,34 @@ fn ord_expand(
}
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote!();
return quote!(span =>);
}
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
let mut body = quote!(#krate::cmp::Ordering::Equal);
let mut body = quote!(span =>#krate::cmp::Ordering::Equal);
for f in fields.into_iter().rev() {
let t1 = Ident::new(format!("{}_self", f.text), f.span);
let t2 = Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(#t1), quote!(#t2), body);
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
}
let fat_arrow = fat_arrow();
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
let fat_arrow = fat_arrow(span);
quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
let fat_arrow = fat_arrow();
let mut body = quote! {
let fat_arrow = fat_arrow(span);
let mut body = quote! {span =>
match (self, other) {
##arms
_unused #fat_arrow #krate::cmp::Ordering::Equal
}
};
if matches!(&adt.shape, AdtShape::Enum { .. }) {
let left = quote!(#krate::intrinsics::discriminant_value(self));
let right = quote!(#krate::intrinsics::discriminant_value(other));
body = compare(krate, left, right, body);
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
body = compare(krate, left, right, body, span);
}
quote! {
quote! {span =>
fn cmp(&self, other: &Self) -> #krate::cmp::Ordering {
#body
}
@ -783,20 +813,22 @@ fn ord_expand(
fn partial_ord_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
span: SpanData,
tt: &ast::Adt,
tm: &TokenMap,
tm: SpanMapRef<'_>,
) -> ExpandResult<tt::Subtree> {
let krate = &find_builtin_crate(db, id);
expand_simple_derive(tt, tm, quote! { #krate::cmp::PartialOrd }, |adt| {
let krate = &find_builtin_crate(db, id, span);
expand_simple_derive(span, tt, tm, quote! {span => #krate::cmp::PartialOrd }, |adt| {
fn compare(
krate: &tt::TokenTree,
left: tt::Subtree,
right: tt::Subtree,
rest: tt::Subtree,
span: SpanData,
) -> tt::Subtree {
let fat_arrow1 = fat_arrow();
let fat_arrow2 = fat_arrow();
quote! {
let fat_arrow1 = fat_arrow(span);
let fat_arrow2 = fat_arrow(span);
quote! {span =>
match #left.partial_cmp(&#right) {
#krate::option::Option::Some(#krate::cmp::Ordering::Equal) #fat_arrow1 {
#rest
@ -807,37 +839,39 @@ fn partial_ord_expand(
}
if matches!(adt.shape, AdtShape::Union) {
// FIXME: Return expand error here
return quote!();
return quote!(span =>);
}
let left = quote!(#krate::intrinsics::discriminant_value(self));
let right = quote!(#krate::intrinsics::discriminant_value(other));
let left = quote!(span =>#krate::intrinsics::discriminant_value(self));
let right = quote!(span =>#krate::intrinsics::discriminant_value(other));
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names()).map(
let (self_patterns, other_patterns) = self_and_other_patterns(adt, &adt.name, span);
let arms = izip!(self_patterns, other_patterns, adt.shape.field_names(span)).map(
|(pat1, pat2, fields)| {
let mut body = quote!(#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
let mut body =
quote!(span =>#krate::option::Option::Some(#krate::cmp::Ordering::Equal));
for f in fields.into_iter().rev() {
let t1 = Ident::new(format!("{}_self", f.text), f.span);
let t2 = Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(#t1), quote!(#t2), body);
let t1 = tt::Ident::new(format!("{}_self", f.text), f.span);
let t2 = tt::Ident::new(format!("{}_other", f.text), f.span);
body = compare(krate, quote!(span =>#t1), quote!(span =>#t2), body, span);
}
let fat_arrow = fat_arrow();
quote! { ( #pat1 , #pat2 ) #fat_arrow #body , }
let fat_arrow = fat_arrow(span);
quote! {span => ( #pat1 , #pat2 ) #fat_arrow #body , }
},
);
let fat_arrow = fat_arrow();
let fat_arrow = fat_arrow(span);
let body = compare(
krate,
left,
right,
quote! {
quote! {span =>
match (self, other) {
##arms
_unused #fat_arrow #krate::option::Option::Some(#krate::cmp::Ordering::Equal)
}
},
span,
);
quote! {
quote! {span =>
fn partial_cmp(&self, other: &Self) -> #krate::option::Option::Option<#krate::cmp::Ordering> {
#body
}

View File

@ -1,17 +1,23 @@
//! Builtin macro
use base_db::{AnchoredPath, Edition, FileId};
use base_db::{
span::{SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
AnchoredPath, Edition, FileId,
};
use cfg::CfgExpr;
use either::Either;
use mbe::{parse_exprs_with_sep, parse_to_token_tree, TokenMap};
use mbe::{parse_exprs_with_sep, parse_to_token_tree};
use syntax::{
ast::{self, AstToken},
SmolStr,
};
use crate::{
db::ExpandDatabase, name, quote, tt, EagerCallInfo, ExpandError, ExpandResult, MacroCallId,
MacroCallLoc,
db::ExpandDatabase,
hygiene::span_with_def_site_ctxt,
name, quote,
tt::{self, DelimSpan},
ExpandError, ExpandResult, HirFileIdExt, MacroCallId, MacroCallLoc,
};
macro_rules! register_builtin {
@ -36,7 +42,10 @@ macro_rules! register_builtin {
let expander = match *self {
$( BuiltinFnLikeExpander::$kind => $expand, )*
};
expander(db, id, tt)
let span = db.lookup_intern_macro_call(id).span(db);
let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, tt, span)
}
}
@ -44,13 +53,16 @@ macro_rules! register_builtin {
pub fn expand(
&self,
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
id: MacroCallId,
tt: &tt::Subtree,
) -> ExpandResult<tt::Subtree> {
let expander = match *self {
$( EagerExpander::$e_kind => $e_expand, )*
};
expander(db, arg_id, tt)
let span = db.lookup_intern_macro_call(id).span(db);
let span = span_with_def_site_ctxt(db, span, id);
expander(db, id, tt, span)
}
}
@ -109,29 +121,42 @@ register_builtin! {
(option_env, OptionEnv) => option_env_expand
}
const DOLLAR_CRATE: tt::Ident =
tt::Ident { text: SmolStr::new_inline("$crate"), span: tt::TokenId::unspecified() };
fn mk_pound(span: SpanData) -> tt::Subtree {
crate::quote::IntoTt::to_subtree(
vec![crate::tt::Leaf::Punct(crate::tt::Punct {
char: '#',
spacing: crate::tt::Spacing::Alone,
span: span,
})
.into()],
span,
)
}
fn module_path_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// Just return a dummy result.
ExpandResult::ok(quote! { "module::path" })
ExpandResult::ok(quote! {span =>
"module::path"
})
}
fn line_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// dummy implementation for type-checking purposes
ExpandResult::ok(tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: "0u32".into(),
span: tt::Span::UNSPECIFIED,
span,
}))],
})
}
@ -140,26 +165,29 @@ fn log_syntax_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(quote! {})
ExpandResult::ok(quote! {span =>})
}
fn trace_macros_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(quote! {})
ExpandResult::ok(quote! {span =>})
}
fn stringify_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let pretty = ::tt::pretty(&tt.token_trees);
let expanded = quote! {
let expanded = quote! {span =>
#pretty
};
@ -170,27 +198,29 @@ fn assert_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let args = parse_exprs_with_sep(tt, ',');
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let expanded = match &*args {
[cond, panic_args @ ..] => {
let comma = tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',',
spacing: tt::Spacing::Alone,
span: tt::TokenId::unspecified(),
span,
}))],
};
let cond = cond.clone();
let panic_args = itertools::Itertools::intersperse(panic_args.iter().cloned(), comma);
quote! {{
quote! {span =>{
if !(#cond) {
#DOLLAR_CRATE::panic!(##panic_args);
#dollar_crate::panic!(##panic_args);
}
}}
}
[] => quote! {{}},
[] => quote! {span =>{}},
};
ExpandResult::ok(expanded)
@ -200,12 +230,13 @@ fn file_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// FIXME: RA purposefully lacks knowledge of absolute file names
// so just return "".
let file_name = "";
let expanded = quote! {
let expanded = quote! {span =>
#file_name
};
@ -216,16 +247,18 @@ fn format_args_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
format_args_expand_general(db, id, tt, "")
format_args_expand_general(db, id, tt, "", span)
}
fn format_args_nl_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
format_args_expand_general(db, id, tt, "\\n")
format_args_expand_general(db, id, tt, "\\n", span)
}
fn format_args_expand_general(
@ -234,11 +267,12 @@ fn format_args_expand_general(
tt: &tt::Subtree,
// FIXME: Make use of this so that mir interpretation works properly
_end_string: &str,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let pound = quote! {@PUNCT '#'};
let pound = mk_pound(span);
let mut tt = tt.clone();
tt.delimiter.kind = tt::DelimiterKind::Parenthesis;
return ExpandResult::ok(quote! {
return ExpandResult::ok(quote! {span =>
builtin #pound format_args #tt
});
}
@ -247,25 +281,25 @@ fn asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// We expand all assembly snippets to `format_args!` invocations to get format syntax
// highlighting for them.
let mut literals = Vec::new();
for tt in tt.token_trees.chunks(2) {
match tt {
[tt::TokenTree::Leaf(tt::Leaf::Literal(lit))]
| [tt::TokenTree::Leaf(tt::Leaf::Literal(lit)), tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: ',', span: _, spacing: _ }))] =>
{
let krate = DOLLAR_CRATE.clone();
literals.push(quote!(#krate::format_args!(#lit);));
let dollar_krate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
literals.push(quote!(span=>#dollar_krate::format_args!(#lit);));
}
_ => break,
}
}
let pound = quote! {@PUNCT '#'};
let expanded = quote! {
let pound = mk_pound(span);
let expanded = quote! {span =>
builtin #pound asm (
{##literals}
)
@ -277,20 +311,22 @@ fn global_asm_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
// Expand to nothing (at item-level)
ExpandResult::ok(quote! {})
ExpandResult::ok(quote! {span =>})
}
fn cfg_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let expr = CfgExpr::parse(tt);
let enabled = db.crate_graph()[loc.krate].cfg_options.check(&expr) != Some(false);
let expanded = if enabled { quote!(true) } else { quote!(false) };
let expanded = if enabled { quote!(span=>true) } else { quote!(span=>false) };
ExpandResult::ok(expanded)
}
@ -298,13 +334,15 @@ fn panic_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
// Expand to a macro call `$crate::panic::panic_{edition}`
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
quote!(#DOLLAR_CRATE::panic::panic_2021!)
quote!(span =>#dollar_crate::panic::panic_2021!)
} else {
quote!(#DOLLAR_CRATE::panic::panic_2015!)
quote!(span =>#dollar_crate::panic::panic_2015!)
};
// Pass the original arguments
@ -316,13 +354,15 @@ fn unreachable_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let loc: MacroCallLoc = db.lookup_intern_macro_call(id);
// Expand to a macro call `$crate::panic::unreachable_{edition}`
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let mut call = if db.crate_graph()[loc.krate].edition >= Edition::Edition2021 {
quote!(#DOLLAR_CRATE::panic::unreachable_2021!)
quote!(span =>#dollar_crate::panic::unreachable_2021!)
} else {
quote!(#DOLLAR_CRATE::panic::unreachable_2015!)
quote!(span =>#dollar_crate::panic::unreachable_2015!)
};
// Pass the original arguments
@ -352,6 +392,7 @@ fn compile_error_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let err = match &*tt.token_trees {
[tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) {
@ -361,13 +402,14 @@ fn compile_error_expand(
_ => ExpandError::other("`compile_error!` argument must be a string"),
};
ExpandResult { value: quote! {}, err: Some(err) }
ExpandResult { value: quote! {span =>}, err: Some(err) }
}
fn concat_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut text = String::new();
@ -407,13 +449,14 @@ fn concat_expand(
}
}
}
ExpandResult { value: quote!(#text), err }
ExpandResult { value: quote!(span =>#text), err }
}
fn concat_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut bytes = Vec::new();
let mut err = None;
@ -446,8 +489,8 @@ fn concat_bytes_expand(
}
}
}
let ident = tt::Ident { text: bytes.join(", ").into(), span: tt::TokenId::unspecified() };
ExpandResult { value: quote!([#ident]), err }
let ident = tt::Ident { text: bytes.join(", ").into(), span };
ExpandResult { value: quote!(span =>[#ident]), err }
}
fn concat_bytes_expand_subtree(
@ -480,6 +523,7 @@ fn concat_idents_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let mut err = None;
let mut ident = String::new();
@ -494,8 +538,9 @@ fn concat_idents_expand(
}
}
}
let ident = tt::Ident { text: ident.into(), span: tt::TokenId::unspecified() };
ExpandResult { value: quote!(#ident), err }
// FIXME merge spans
let ident = tt::Ident { text: ident.into(), span };
ExpandResult { value: quote!(span =>#ident), err }
}
fn relative_file(
@ -530,45 +575,48 @@ fn parse_string(tt: &tt::Subtree) -> Result<String, ExpandError> {
fn include_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
_tt: &tt::Subtree,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
match db.include_expand(arg_id) {
Ok((res, _)) => ExpandResult::ok(res.0.clone()),
Err(e) => ExpandResult::new(tt::Subtree::empty(), e),
let file_id = match include_input_to_file_id(db, arg_id, tt) {
Ok(it) => it,
Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
match parse_to_token_tree(
SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
SyntaxContextId::ROOT,
&db.file_text(file_id),
) {
Some(it) => ExpandResult::ok(it),
None => ExpandResult::new(
tt::Subtree::empty(DelimSpan { open: span, close: span }),
ExpandError::other("failed to parse included file"),
),
}
}
pub(crate) fn include_arg_to_tt(
pub fn include_input_to_file_id(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
) -> Result<(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, TokenMap)>, FileId), ExpandError> {
let loc = db.lookup_intern_macro_call(arg_id);
let Some(EagerCallInfo { arg, arg_id, .. }) = loc.eager.as_deref() else {
panic!("include_arg_to_tt called on non include macro call: {:?}", &loc.eager);
};
let path = parse_string(&arg.0)?;
let file_id = relative_file(db, *arg_id, &path, false)?;
let (subtree, map) =
parse_to_token_tree(&db.file_text(file_id)).ok_or(mbe::ExpandError::ConversionError)?;
Ok((triomphe::Arc::new((subtree, map)), file_id))
arg: &tt::Subtree,
) -> Result<FileId, ExpandError> {
relative_file(db, arg_id, &parse_string(arg)?, false)
}
fn include_bytes_expand(
_db: &dyn ExpandDatabase,
_arg_id: MacroCallId,
tt: &tt::Subtree,
_tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
if let Err(e) = parse_string(tt) {
return ExpandResult::new(tt::Subtree::empty(), e);
}
// FIXME: actually read the file here if the user asked for macro expansion
let res = tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: r#"b"""#.into(),
span: tt::TokenId::unspecified(),
span,
}))],
};
ExpandResult::ok(res)
@ -578,10 +626,13 @@ fn include_str_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let path = match parse_string(tt) {
Ok(it) => it,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
// FIXME: we're not able to read excluded files (which is most of them because
@ -591,14 +642,14 @@ fn include_str_expand(
let file_id = match relative_file(db, arg_id, &path, true) {
Ok(file_id) => file_id,
Err(_) => {
return ExpandResult::ok(quote!(""));
return ExpandResult::ok(quote!(span =>""));
}
};
let text = db.file_text(file_id);
let text = &*text;
ExpandResult::ok(quote!(#text))
ExpandResult::ok(quote!(span =>#text))
}
fn get_env_inner(db: &dyn ExpandDatabase, arg_id: MacroCallId, key: &str) -> Option<String> {
@ -610,10 +661,13 @@ fn env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
let mut err = None;
@ -630,7 +684,7 @@ fn env_expand(
// `include!("foo.rs"), which might go to infinite loop
"UNRESOLVED_ENV_VAR".to_string()
});
let expanded = quote! { #s };
let expanded = quote! {span => #s };
ExpandResult { value: expanded, err }
}
@ -639,15 +693,18 @@ fn option_env_expand(
db: &dyn ExpandDatabase,
arg_id: MacroCallId,
tt: &tt::Subtree,
span: SpanData,
) -> ExpandResult<tt::Subtree> {
let key = match parse_string(tt) {
Ok(it) => it,
Err(e) => return ExpandResult::new(tt::Subtree::empty(), e),
Err(e) => {
return ExpandResult::new(tt::Subtree::empty(DelimSpan { open: span, close: span }), e)
}
};
// FIXME: Use `DOLLAR_CRATE` when that works in eager macros.
let expanded = match get_env_inner(db, arg_id, &key) {
None => quote! { ::core::option::Option::None::<&str> },
Some(s) => quote! { ::core::option::Option::Some(#s) },
None => quote! {span => ::core::option::Option::None::<&str> },
Some(s) => quote! {span => ::core::option::Option::Some(#s) },
};
ExpandResult::ok(expanded)

View File

@ -1,22 +1,31 @@
//! Defines database & queries for macro expansion.
use base_db::{salsa, CrateId, Edition, SourceDatabase};
use base_db::{
salsa::{self, debug::DebugQueryTable},
span::SyntaxContextId,
CrateId, Edition, FileId, SourceDatabase,
};
use either::Either;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet;
use syntax::{
ast::{self, HasAttrs, HasDocComments},
AstNode, GreenNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
};
use triomphe::Arc;
use crate::{
ast_id_map::AstIdMap, builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander, fixup, hygiene::HygieneFrame, tt, AstId, BuiltinAttrExpander,
BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo, ExpandError, ExpandResult,
ExpandTo, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
MacroDefKind, MacroFile, ProcMacroExpander,
ast_id_map::AstIdMap,
attrs::RawAttrs,
builtin_attr_macro::pseudo_derive_attr_expansion,
builtin_fn_macro::EagerExpander,
fixup::{self, SyntaxFixupUndoInfo},
hygiene::{apply_mark, SyntaxContextData, Transparency},
span::{RealSpanMap, SpanMap, SpanMapRef},
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFileId, ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
@ -30,32 +39,43 @@ static TOKEN_LIMIT: Limit = Limit::new(1_048_576);
#[derive(Debug, Clone, Eq, PartialEq)]
/// Old-style `macro_rules` or the new macros 2.0
pub struct DeclarativeMacroExpander {
pub mac: mbe::DeclarativeMacro,
pub def_site_token_map: mbe::TokenMap,
pub mac: mbe::DeclarativeMacro<base_db::span::SpanData>,
pub transparency: Transparency,
}
impl DeclarativeMacroExpander {
pub fn expand(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
pub fn expand(
&self,
db: &dyn ExpandDatabase,
tt: tt::Subtree,
call_id: MacroCallId,
) -> ExpandResult<tt::Subtree> {
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(),
tt::Subtree::empty(tt::DelimSpan::DUMMY),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(tt).map_err(Into::into),
None => self
.mac
.expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency))
.map_err(Into::into),
}
}
pub fn map_id_down(&self, token_id: tt::TokenId) -> tt::TokenId {
self.mac.map_id_down(token_id)
}
pub fn map_id_up(&self, token_id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
self.mac.map_id_up(token_id)
pub fn expand_unhygienic(&self, tt: tt::Subtree) -> ExpandResult<tt::Subtree> {
match self.mac.err() {
Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan::DUMMY),
ExpandError::other(format!("invalid macro definition: {e}")),
),
None => self.mac.expand(&tt, |_| ()).map_err(Into::into),
}
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
/// Old-style `macro_rules` or the new macros 2.0
DeclarativeMacro(Arc<DeclarativeMacroExpander>),
/// Stuff like `line!` and `file!`.
BuiltIn(BuiltinFnLikeExpander),
@ -69,31 +89,6 @@ pub enum TokenExpander {
ProcMacro(ProcMacroExpander),
}
// FIXME: Get rid of these methods
impl TokenExpander {
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
match self {
TokenExpander::DeclarativeMacro(expander) => expander.map_id_down(id),
TokenExpander::BuiltIn(..)
| TokenExpander::BuiltInEager(..)
| TokenExpander::BuiltInAttr(..)
| TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => id,
}
}
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
match self {
TokenExpander::DeclarativeMacro(expander) => expander.map_id_up(id),
TokenExpander::BuiltIn(..)
| TokenExpander::BuiltInEager(..)
| TokenExpander::BuiltInAttr(..)
| TokenExpander::BuiltInDerive(..)
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
}
}
}
#[salsa::query_group(ExpandDatabaseStorage)]
pub trait ExpandDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
@ -108,8 +103,12 @@ pub trait ExpandDatabase: SourceDatabase {
// This query is LRU cached
fn parse_macro_expansion(
&self,
macro_file: MacroFile,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>;
macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)>;
#[salsa::transparent]
fn span_map(&self, file_id: HirFileId) -> SpanMap;
fn real_span_map(&self, file_id: FileId) -> Arc<RealSpanMap>;
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all.
@ -118,23 +117,21 @@ pub trait ExpandDatabase: SourceDatabase {
/// to be incremental.
#[salsa::interned]
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
#[salsa::interned]
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
/// Lowers syntactic macro call to a token tree representation.
#[salsa::transparent]
fn setup_syntax_context_root(&self) -> ();
#[salsa::transparent]
fn dump_syntax_contexts(&self) -> String;
/// Lowers syntactic macro call to a token tree representation. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
fn macro_arg(
&self,
id: MacroCallId,
) -> ValueResult<
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
Arc<Box<[SyntaxError]>>,
>;
/// Extracts syntax node, corresponding to a macro call. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
fn macro_arg_node(
&self,
id: MacroCallId,
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>>;
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>>;
/// Fetches the expander for this macro.
#[salsa::transparent]
fn macro_expander(&self, id: MacroDefId) -> TokenExpander;
@ -144,18 +141,6 @@ pub trait ExpandDatabase: SourceDatabase {
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander>;
/// Expand macro call to a token tree.
// This query is LRU cached
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Arc<tt::Subtree>>;
#[salsa::invoke(crate::builtin_fn_macro::include_arg_to_tt)]
fn include_expand(
&self,
arg_id: MacroCallId,
) -> Result<
(triomphe::Arc<(::tt::Subtree<::tt::TokenId>, mbe::TokenMap)>, base_db::FileId),
ExpandError,
>;
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way.
@ -166,8 +151,20 @@ pub trait ExpandDatabase: SourceDatabase {
&self,
macro_call: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>>;
}
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
#[inline]
pub fn span_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> SpanMap {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => SpanMap::RealSpanMap(db.real_span_map(file_id)),
HirFileIdRepr::MacroFile(m) => {
SpanMap::ExpansionSpanMap(db.parse_macro_expansion(m).value.1)
}
}
}
pub fn real_span_map(db: &dyn ExpandDatabase, file_id: FileId) -> Arc<RealSpanMap> {
Arc::new(RealSpanMap::from_file(db, file_id))
}
/// This expands the given macro call, but with different arguments. This is
@ -181,21 +178,36 @@ pub fn expand_speculative(
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let loc = db.lookup_intern_macro_call(actual_macro_call);
let token_range = token_to_map.text_range();
let span_map = RealSpanMap::absolute(FileId::BOGUS);
let span_map = SpanMapRef::RealSpanMap(&span_map);
// Build the subtree and token mapping for the speculative args
let censor = censor_for_macro_input(&loc, speculative_args);
let mut fixups = fixup::fixup_syntax(speculative_args);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
speculative_args,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => {
(mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE)
}
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, speculative_args);
let mut fixups = fixup::fixup_syntax(span_map, speculative_args);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true,
});
fixups.remove.extend(censor);
(
mbe::syntax_node_to_token_tree_modified(
speculative_args,
span_map,
fixups.append,
fixups.remove,
),
fixups.undo_info,
)
}
};
let (attr_arg, token_id) = match loc.kind {
let attr_arg = match loc.kind {
MacroCallKind::Attr { invoc_attr_index, .. } => {
let attr = if loc.def.is_attribute_derive() {
// for pseudo-derive expansion we actually pass the attribute itself only
@ -210,59 +222,45 @@ pub fn expand_speculative(
}?;
match attr.token_tree() {
Some(token_tree) => {
let (mut tree, map) = syntax_node_to_token_tree(attr.token_tree()?.syntax());
tree.delimiter = tt::Delimiter::unspecified();
let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map);
tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
let shift = mbe::Shift::new(&tt);
shift.shift_all(&mut tree);
let token_id = if token_tree.syntax().text_range().contains_range(token_range) {
let attr_input_start =
token_tree.left_delimiter_token()?.text_range().start();
let range = token_range.checked_sub(attr_input_start)?;
let token_id = shift.shift(map.token_by_range(range)?);
Some(token_id)
} else {
None
};
(Some(tree), token_id)
Some(tree)
}
_ => (None, None),
}
}
_ => (None, None),
};
let token_id = match token_id {
Some(token_id) => token_id,
// token wasn't inside an attribute input so it has to be in the general macro input
None => {
let range = token_range.checked_sub(speculative_args.text_range().start())?;
let token_id = spec_args_tmap.token_by_range(range)?;
match loc.def.kind {
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).map_id_down(token_id)
}
_ => token_id,
_ => None,
}
}
_ => None,
};
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => {
tt.delimiter = tt::Delimiter::unspecified();
expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref())
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
let call_site = loc.span(db);
expander.expand(
db,
loc.def.krate,
loc.krate,
&tt,
attr_arg.as_ref(),
call_site,
call_site,
call_site,
)
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?)
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site)
}
MacroDefKind::BuiltInDerive(expander, ..) => {
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
expander.expand(db, actual_macro_call, &adt, &spec_args_tmap)
expander.expand(db, actual_macro_call, &adt, span_map)
}
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).expand_unhygienic(tt)
}
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand(tt),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
@ -270,13 +268,14 @@ pub fn expand_speculative(
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
};
let expand_to = macro_expand_to(db, actual_macro_call);
fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
let expand_to = loc.expand_to();
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
let syntax_node = node.syntax_node();
let token = rev_tmap
.ranges_by_token(token_id, token_to_map.kind())
.ranges_with_span(span_map.span_for_range(token_to_map.text_range()))
.filter_map(|range| syntax_node.covering_element(range).into_token())
.min_by_key(|t| {
// prefer tokens of the same kind and text
@ -293,7 +292,7 @@ fn ast_id_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
fn parse_or_expand(db: &dyn ExpandDatabase, file_id: HirFileId) -> SyntaxNode {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => db.parse(file_id).tree().syntax().clone(),
HirFileIdRepr::FileId(file_id) => db.parse(file_id).syntax_node(),
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.0.syntax_node()
}
@ -312,17 +311,16 @@ fn parse_or_expand_with_err(
}
}
// FIXME: We should verify that the parsed node is one of the many macro node variants we expect
// instead of having it be untyped
fn parse_macro_expansion(
db: &dyn ExpandDatabase,
macro_file: MacroFile,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
macro_file: MacroFileId,
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
let _p = profile::span("parse_macro_expansion");
let mbe::ValueResult { value: tt, err } = db.macro_expand(macro_file.macro_call_id);
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
tracing::debug!("expanded = {}", tt.as_debug_string());
tracing::debug!("kind = {:?}", expand_to);
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let expand_to = loc.expand_to();
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
@ -333,51 +331,129 @@ fn parse_macro_expansion_error(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
) -> ExpandResult<Box<[SyntaxError]>> {
db.parse_macro_expansion(MacroFile { macro_call_id })
db.parse_macro_expansion(MacroFileId { macro_call_id })
.map(|it| it.0.errors().to_vec().into_boxed_slice())
}
fn parse_with_map(db: &dyn ExpandDatabase, file_id: HirFileId) -> (Parse<SyntaxNode>, SpanMap) {
match file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(db.parse(file_id).to_syntax(), SpanMap::RealSpanMap(db.real_span_map(file_id)))
}
HirFileIdRepr::MacroFile(macro_file) => {
let (parse, map) = db.parse_macro_expansion(macro_file).value;
(parse, SpanMap::ExpansionSpanMap(map))
}
}
}
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> ValueResult<
Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>,
Arc<Box<[SyntaxError]>>,
> {
let loc = db.lookup_intern_macro_call(id);
if let Some(EagerCallInfo { arg, arg_id: _, error: _ }) = loc.eager.as_deref() {
return ValueResult::ok(Some(Arc::new((arg.0.clone(), arg.1.clone(), Default::default()))));
}
let ValueResult { value, err } = db.macro_arg_node(id);
let Some(arg) = value else {
return ValueResult { value: None, err };
// FIXME: consider the following by putting fixup info into eager call info args
// ) -> ValueResult<Option<Arc<(tt::Subtree, SyntaxFixupUndoInfo)>>, Arc<Box<[SyntaxError]>>> {
) -> ValueResult<Option<(Arc<tt::Subtree>, SyntaxFixupUndoInfo)>, Arc<Box<[SyntaxError]>>> {
let mismatched_delimiters = |arg: &SyntaxNode| {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
let well_formed_tt =
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
Some(Arc::new(Box::new([SyntaxError::new(
"unbalanced token tree".to_owned(),
arg.text_range(),
)]) as Box<[_]>))
} else {
None
}
};
let loc = db.lookup_intern_macro_call(id);
if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..))
.then(|| loc.eager.as_deref())
.flatten()
{
ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE)))
} else {
let (parse, map) = parse_with_map(db, loc.kind.file_id());
let root = parse.syntax_node();
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
&node,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
let syntax = match loc.kind {
MacroCallKind::FnLike { ast_id, .. } => {
let node = &ast_id.to_ptr(db).to_node(&root);
let offset = node.syntax().text_range().start();
match node.token_tree() {
Some(tt) => {
let tt = tt.syntax();
if let Some(e) = mismatched_delimiters(tt) {
return ValueResult::only_err(e);
}
tt.clone()
}
None => {
return ValueResult::only_err(Arc::new(Box::new([
SyntaxError::new_at_offset("missing token tree".to_owned(), offset),
])));
}
}
}
MacroCallKind::Derive { ast_id, .. } => {
ast_id.to_ptr(db).to_node(&root).syntax().clone()
}
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
};
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => {
(mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE)
}
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, &syntax);
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true,
});
fixups.remove.extend(censor);
(
mbe::syntax_node_to_token_tree_modified(
&syntax,
map,
fixups.append,
fixups.remove,
),
fixups.undo_info,
)
}
};
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::unspecified();
}
let val = Some(Arc::new((tt, tmap, fixups.undo_info)));
match err {
Some(err) => ValueResult::new(val, err),
None => ValueResult::ok(val),
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE;
}
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
match parse.errors() {
[] => ValueResult::ok(Some((Arc::new(tt), undo_info))),
errors => ValueResult::new(
Some((Arc::new(tt), undo_info)),
// Box::<[_]>::from(res.errors()), not stable yet
Arc::new(errors.to_vec().into_boxed_slice()),
),
}
} else {
ValueResult::ok(Some((Arc::new(tt), undo_info)))
}
}
}
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
/// Certain macro calls expect some nodes in the input to be preprocessed away, namely:
/// - derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
/// - attributes expect the invoking attribute to be stripped
@ -417,103 +493,67 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
.unwrap_or_default()
}
fn macro_arg_node(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> ValueResult<Option<GreenNode>, Arc<Box<[SyntaxError]>>> {
let err = || -> Arc<Box<[_]>> {
Arc::new(Box::new([SyntaxError::new_at_offset(
"invalid macro call".to_owned(),
syntax::TextSize::from(0),
)]))
};
let loc = db.lookup_intern_macro_call(id);
let arg = if let MacroDefKind::BuiltInEager(..) = loc.def.kind {
let res = if let Some(EagerCallInfo { arg, .. }) = loc.eager.as_deref() {
Some(mbe::token_tree_to_syntax_node(&arg.0, mbe::TopEntryPoint::MacroEagerInput).0)
} else {
loc.kind
.arg(db)
.and_then(|arg| ast::TokenTree::cast(arg.value))
.map(|tt| tt.reparse_as_comma_separated_expr().to_syntax())
};
match res {
Some(res) if res.errors().is_empty() => res.syntax_node(),
Some(res) => {
return ValueResult::new(
Some(res.syntax_node().green().into()),
// Box::<[_]>::from(res.errors()), not stable yet
Arc::new(res.errors().to_vec().into_boxed_slice()),
);
}
None => return ValueResult::only_err(err()),
}
} else {
match loc.kind.arg(db) {
Some(res) => res.value,
None => return ValueResult::only_err(err()),
}
};
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
let well_formed_tt =
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
return ValueResult::only_err(Arc::new(Box::new([SyntaxError::new(
"unbalanced token tree".to_owned(),
arg.text_range(),
)])));
}
}
ValueResult::ok(Some(arg.green().into()))
}
fn decl_macro_expander(
db: &dyn ExpandDatabase,
def_crate: CrateId,
id: AstId<ast::Macro>,
) -> Arc<DeclarativeMacroExpander> {
let is_2021 = db.crate_graph()[def_crate].edition >= Edition::Edition2021;
let (mac, def_site_token_map) = match id.to_node(db) {
ast::Macro::MacroRules(macro_rules) => match macro_rules.token_tree() {
Some(arg) => {
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
(mac, def_site_token_map)
}
None => (
mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
Default::default(),
),
},
ast::Macro::MacroDef(macro_def) => match macro_def.body() {
Some(arg) => {
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
(mac, def_site_token_map)
}
None => (
mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
Default::default(),
),
},
let (root, map) = parse_with_map(db, id.file_id);
let root = root.syntax_node();
let transparency = |node| {
// ... would be nice to have the item tree here
let attrs = RawAttrs::new(db, node, map.as_ref()).filter(db, def_crate);
match &*attrs
.iter()
.find(|it| {
it.path.as_ident().and_then(|it| it.as_str()) == Some("rustc_macro_transparency")
})?
.token_tree_value()?
.token_trees
{
[tt::TokenTree::Leaf(tt::Leaf::Ident(i)), ..] => match &*i.text {
"transparent" => Some(Transparency::Transparent),
"semitransparent" => Some(Transparency::SemiTransparent),
"opaque" => Some(Transparency::Opaque),
_ => None,
},
_ => None,
}
};
Arc::new(DeclarativeMacroExpander { mac, def_site_token_map })
let (mac, transparency) = match id.to_ptr(db).to_node(&root) {
ast::Macro::MacroRules(macro_rules) => (
match macro_rules.token_tree() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021);
mac
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_rules).unwrap_or(Transparency::SemiTransparent),
),
ast::Macro::MacroDef(macro_def) => (
match macro_def.body() {
Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref());
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021);
mac
}
None => mbe::DeclarativeMacro::from_err(
mbe::ParseError::Expected("expected a token tree".into()),
is_2021,
),
},
transparency(&macro_def).unwrap_or(Transparency::Opaque),
),
};
Arc::new(DeclarativeMacroExpander { mac, transparency })
}
fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
@ -529,39 +569,31 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
}
}
fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
fn macro_expand(
db: &dyn ExpandDatabase,
macro_call_id: MacroCallId,
loc: MacroCallLoc,
) -> ExpandResult<Arc<tt::Subtree>> {
let _p = profile::span("macro_expand");
let loc = db.lookup_intern_macro_call(id);
let ExpandResult { value: tt, mut err } = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(id),
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
MacroDefKind::BuiltInDerive(expander, ..) => {
let arg = db.macro_arg_node(id).value.unwrap();
let (root, map) = parse_with_map(db, loc.kind.file_id());
let root = root.syntax_node();
let MacroCallKind::Derive { ast_id, .. } = loc.kind else { unreachable!() };
let node = ast_id.to_ptr(db).to_node(&root);
let node = SyntaxNode::new_root(arg);
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new())));
let (tmap, _) = mbe::syntax_node_to_token_map_with_modifications(
&node,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
// this cast is a bit sus, can we avoid losing the typedness here?
let adt = ast::Adt::cast(node).unwrap();
let mut res = expander.expand(db, id, &adt, &tmap);
fixup::reverse_fixups(&mut res.value, &tmap, &fixups.undo_info);
res
// FIXME: Use censoring
let _censor = censor_for_macro_input(&loc, node.syntax());
expander.expand(db, macro_call_id, &node, map.as_ref())
}
_ => {
let ValueResult { value, err } = db.macro_arg(id);
let Some(macro_arg) = value else {
let ValueResult { value, err } = db.macro_arg(macro_call_id);
let Some((macro_arg, undo_info)) = value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
@ -570,12 +602,14 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
};
};
let (arg, arg_tm, undo_info) = &*macro_arg;
let mut res = match loc.def.kind {
let arg = &*macro_arg;
match loc.def.kind {
MacroDefKind::Declarative(id) => {
db.decl_macro_expander(loc.def.krate, id).expand(arg.clone())
db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
}
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, macro_call_id, &arg).map_err(Into::into)
}
MacroDefKind::BuiltIn(it, _) => it.expand(db, id, &arg).map_err(Into::into),
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through
@ -583,11 +617,8 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
// will end up going through here again, whereas we want to just want to inspect the raw input.
// As such we just return the input subtree here.
MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => {
let mut arg = arg.clone();
fixup::reverse_fixups(&mut arg, arg_tm, undo_info);
return ExpandResult {
value: Arc::new(arg),
value: macro_arg.clone(),
err: err.map(|err| {
let mut buf = String::new();
for err in &**err {
@ -600,12 +631,16 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
}),
};
}
MacroDefKind::BuiltInEager(it, _) => it.expand(db, id, &arg).map_err(Into::into),
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, id, &arg),
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, macro_call_id, &arg).map_err(Into::into)
}
MacroDefKind::BuiltInAttr(it, _) => {
let mut res = it.expand(db, macro_call_id, &arg);
fixup::reverse_fixups(&mut res.value, &undo_info);
res
}
_ => unreachable!(),
};
fixup::reverse_fixups(&mut res.value, arg_tm, undo_info);
res
}
}
};
@ -627,10 +662,10 @@ fn macro_expand(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro_call(id);
let Some(macro_arg) = db.macro_arg(id).value else {
let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
return ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: Vec::new(),
}),
// FIXME: We should make sure to enforce an invariant that invalid macro
@ -639,47 +674,44 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
};
};
let (arg_tt, arg_tm, undo_info) = &*macro_arg;
let expander = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => expander,
_ => unreachable!(),
};
let attr_arg = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => {
let mut attr_args = attr_args.0.clone();
mbe::Shift::new(arg_tt).shift_all(&mut attr_args);
Some(attr_args)
}
MacroCallKind::Attr { attr_args: Some(attr_args), .. } => Some(&**attr_args),
_ => None,
};
let ExpandResult { value: mut tt, err } =
expander.expand(db, loc.def.krate, loc.krate, arg_tt, attr_arg.as_ref());
let call_site = loc.span(db);
let ExpandResult { value: mut tt, err } = expander.expand(
db,
loc.def.krate,
loc.krate,
&macro_arg,
attr_arg,
// FIXME
call_site,
call_site,
// FIXME
call_site,
);
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
return value;
}
fixup::reverse_fixups(&mut tt, arg_tm, undo_info);
fixup::reverse_fixups(&mut tt, &undo_info);
ExpandResult { value: Arc::new(tt), err }
}
fn hygiene_frame(db: &dyn ExpandDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
Arc::new(HygieneFrame::new(db, file_id))
}
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
db.lookup_intern_macro_call(id).expand_to()
}
fn token_tree_to_syntax_node(
tt: &tt::Subtree,
expand_to: ExpandTo,
) -> (Parse<SyntaxNode>, mbe::TokenMap) {
) -> (Parse<SyntaxNode>, ExpansionSpanMap) {
let entry_point = match expand_to {
ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
@ -695,7 +727,7 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
if TOKEN_LIMIT.check(count).is_err() {
Err(ExpandResult {
value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::UNSPECIFIED,
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: vec![],
}),
err: Some(ExpandError::other(format!(
@ -708,3 +740,44 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
Ok(())
}
}
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
db.intern_syntax_context(SyntaxContextData::root());
}
fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
let mut s = String::from("Expansions:");
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
let id = e.key;
let expn_data = e.value.as_ref().unwrap();
s.push_str(&format!(
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
id,
expn_data.kind.file_id(),
expn_data.call_site,
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
expn_data.kind.descr(),
));
}
s.push_str("\n\nSyntaxContexts:\n");
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
entries.sort_by_key(|e| e.key);
for e in entries {
struct SyntaxContextDebug<'a>(
&'a dyn ExpandDatabase,
SyntaxContextId,
&'a SyntaxContextData,
);
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.2.fancy_debug(self.1, self.0, f)
}
}
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
}
s
}

View File

@ -18,18 +18,17 @@
//!
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::CrateId;
use rustc_hash::{FxHashMap, FxHashSet};
use syntax::{ted, Parse, SyntaxNode, TextRange, TextSize, WalkEvent};
use base_db::{span::SyntaxContextId, CrateId};
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use triomphe::Arc;
use crate::{
ast::{self, AstNode},
db::ExpandDatabase,
hygiene::Hygiene,
mod_path::ModPath,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, InFile, MacroCallId, MacroCallKind,
MacroCallLoc, MacroDefId, MacroDefKind,
span::SpanMapRef,
EagerCallInfo, ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, InFile, MacroCallId,
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind,
};
pub fn expand_eager_macro_input(
@ -37,6 +36,7 @@ pub fn expand_eager_macro_input(
krate: CrateId,
macro_call: InFile<ast::MacroCall>,
def: MacroDefId,
call_site: SyntaxContextId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> {
let ast_map = db.ast_id_map(macro_call.file_id);
@ -53,75 +53,44 @@ pub fn expand_eager_macro_input(
krate,
eager: None,
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to: ExpandTo::Expr },
call_site,
});
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file());
// we need this map here as the expansion of the eager input fake file loses whitespace ...
let mut ws_mapping = FxHashMap::default();
if let Some((_, tm, _)) = db.macro_arg(arg_id).value.as_deref() {
ws_mapping.extend(tm.entries().filter_map(|(id, range)| {
Some((arg_exp_map.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)?, range))
}));
}
let mut arg_map = ExpansionSpanMap::empty();
let ExpandResult { value: expanded_eager_input, err } = {
eager_macro_recur(
db,
&Hygiene::new(db, macro_call.file_id),
&arg_exp_map,
&mut arg_map,
TextSize::new(0),
InFile::new(arg_id.as_file(), arg_exp.syntax_node()),
krate,
call_site,
resolver,
)
};
let err = parse_err.or(err);
if cfg!(debug) {
arg_map.finish();
}
let Some((expanded_eager_input, mapping)) = expanded_eager_input else {
let Some((expanded_eager_input, _mapping)) = expanded_eager_input else {
return ExpandResult { value: None, err };
};
let (mut subtree, expanded_eager_input_token_map) =
mbe::syntax_node_to_token_tree(&expanded_eager_input);
let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map);
let og_tmap = if let Some(tt) = macro_call.value.token_tree() {
let mut ids_used = FxHashSet::default();
let mut og_tmap = mbe::syntax_node_to_token_map(tt.syntax());
// The tokenmap and ids of subtree point into the expanded syntax node, but that is inaccessible from the outside
// so we need to remap them to the original input of the eager macro.
subtree.visit_ids(&mut |id| {
// Note: we discard all token ids of braces and the like here, but that's not too bad and only a temporary fix
if let Some(range) = expanded_eager_input_token_map
.first_range_by_token(id, syntax::SyntaxKind::TOMBSTONE)
{
// remap from expanded eager input to eager input expansion
if let Some(og_range) = mapping.get(&range) {
// remap from eager input expansion to original eager input
if let Some(&og_range) = ws_mapping.get(og_range) {
if let Some(og_token) = og_tmap.token_by_range(og_range) {
ids_used.insert(og_token);
return og_token;
}
}
}
}
tt::TokenId::UNSPECIFIED
});
og_tmap.filter(|id| ids_used.contains(&id));
og_tmap
} else {
Default::default()
};
subtree.delimiter = crate::tt::Delimiter::unspecified();
subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE;
let loc = MacroCallLoc {
def,
krate,
eager: Some(Box::new(EagerCallInfo {
arg: Arc::new((subtree, og_tmap)),
arg_id,
error: err.clone(),
})),
eager: Some(Box::new(EagerCallInfo { arg: Arc::new(subtree), arg_id, error: err.clone() })),
kind: MacroCallKind::FnLike { ast_id: call_id, expand_to },
call_site,
};
ExpandResult { value: Some(db.intern_macro_call(loc)), err }
@ -132,12 +101,13 @@ fn lazy_expand(
def: &MacroDefId,
macro_call: InFile<ast::MacroCall>,
krate: CrateId,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<mbe::TokenMap>)> {
call_site: SyntaxContextId,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
let expand_to = ExpandTo::from_call_site(&macro_call.value);
let ast_id = macro_call.with_value(ast_id);
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to });
let id = def.as_lazy_macro(db, krate, MacroCallKind::FnLike { ast_id, expand_to }, call_site);
let macro_file = id.as_macro_file();
db.parse_macro_expansion(macro_file)
@ -146,57 +116,59 @@ fn lazy_expand(
fn eager_macro_recur(
db: &dyn ExpandDatabase,
hygiene: &Hygiene,
span_map: &ExpansionSpanMap,
expanded_map: &mut ExpansionSpanMap,
mut offset: TextSize,
curr: InFile<SyntaxNode>,
krate: CrateId,
call_site: SyntaxContextId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<(SyntaxNode, FxHashMap<TextRange, TextRange>)>> {
) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update();
let mut mapping = FxHashMap::default();
let mut replacements = Vec::new();
// FIXME: We only report a single error inside of eager expansions
let mut error = None;
let mut offset = 0i32;
let apply_offset = |it: TextSize, offset: i32| {
TextSize::from(u32::try_from(offset + u32::from(it) as i32).unwrap_or_default())
};
let mut children = original.preorder_with_tokens();
// Collect replacement
while let Some(child) = children.next() {
let WalkEvent::Enter(child) = child else { continue };
let call = match child {
syntax::NodeOrToken::Node(node) => match ast::MacroCall::cast(node) {
WalkEvent::Enter(SyntaxElement::Node(child)) => match ast::MacroCall::cast(child) {
Some(it) => {
children.skip_subtree();
it
}
None => continue,
_ => continue,
},
syntax::NodeOrToken::Token(t) => {
mapping.insert(
TextRange::new(
apply_offset(t.text_range().start(), offset),
apply_offset(t.text_range().end(), offset),
),
t.text_range(),
);
WalkEvent::Enter(_) => continue,
WalkEvent::Leave(child) => {
if let SyntaxElement::Token(t) = child {
let start = t.text_range().start();
offset += t.text_range().len();
expanded_map.push(offset, span_map.span_at(start));
}
continue;
}
};
let def = match call.path().and_then(|path| ModPath::from_src(db, path, hygiene)) {
let def = match call
.path()
.and_then(|path| ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(span_map)))
{
Some(path) => match macro_resolver(path.clone()) {
Some(def) => def,
None => {
error =
Some(ExpandError::other(format!("unresolved macro {}", path.display(db))));
offset += call.syntax().text_range().len();
continue;
}
},
None => {
error = Some(ExpandError::other("malformed macro invocation"));
offset += call.syntax().text_range().len();
continue;
}
};
@ -207,29 +179,22 @@ fn eager_macro_recur(
krate,
curr.with_value(call.clone()),
def,
call_site,
macro_resolver,
);
match value {
Some(call_id) => {
let ExpandResult { value, err: err2 } =
let ExpandResult { value: (parse, map), err: err2 } =
db.parse_macro_expansion(call_id.as_macro_file());
if let Some(tt) = call.token_tree() {
let call_tt_start = tt.syntax().text_range().start();
let call_start =
apply_offset(call.syntax().text_range().start(), offset);
if let Some((_, arg_map, _)) = db.macro_arg(call_id).value.as_deref() {
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
value
.1
.first_range_by_token(tid, syntax::SyntaxKind::TOMBSTONE)
.map(|r| (r + call_start, range + call_tt_start))
}));
}
}
map.iter().for_each(|(o, span)| expanded_map.push(o + offset, span));
let syntax_node = parse.syntax_node();
ExpandResult {
value: Some(value.0.syntax_node().clone_for_update()),
value: Some((
syntax_node.clone_for_update(),
offset + syntax_node.text_range().len(),
)),
err: err.or(err2),
}
}
@ -242,45 +207,23 @@ fn eager_macro_recur(
| MacroDefKind::BuiltInDerive(..)
| MacroDefKind::ProcMacro(..) => {
let ExpandResult { value: (parse, tm), err } =
lazy_expand(db, &def, curr.with_value(call.clone()), krate);
let decl_mac = if let MacroDefKind::Declarative(ast_id) = def.kind {
Some(db.decl_macro_expander(def.krate, ast_id))
} else {
None
};
lazy_expand(db, &def, curr.with_value(call.clone()), krate, call_site);
// replace macro inside
let hygiene = Hygiene::new(db, parse.file_id);
let ExpandResult { value, err: error } = eager_macro_recur(
db,
&hygiene,
&tm,
expanded_map,
offset,
// FIXME: We discard parse errors here
parse.as_ref().map(|it| it.syntax_node()),
krate,
call_site,
macro_resolver,
);
let err = err.or(error);
if let Some(tt) = call.token_tree() {
let call_tt_start = tt.syntax().text_range().start();
let call_start = apply_offset(call.syntax().text_range().start(), offset);
if let Some((_tt, arg_map, _)) = parse
.file_id
.macro_file()
.and_then(|id| db.macro_arg(id.macro_call_id).value)
.as_deref()
{
mapping.extend(arg_map.entries().filter_map(|(tid, range)| {
tm.first_range_by_token(
decl_mac.as_ref().map(|it| it.map_id_down(tid)).unwrap_or(tid),
syntax::SyntaxKind::TOMBSTONE,
)
.map(|r| (r + call_start, range + call_tt_start))
}));
}
}
// FIXME: Do we need to re-use _m here?
ExpandResult { value: value.map(|(n, _m)| n), err }
ExpandResult { value, err }
}
};
if err.is_some() {
@ -288,16 +231,18 @@ fn eager_macro_recur(
}
// check if the whole original syntax is replaced
if call.syntax() == &original {
return ExpandResult { value: value.zip(Some(mapping)), err: error };
return ExpandResult { value, err: error };
}
if let Some(insert) = value {
offset += u32::from(insert.text_range().len()) as i32
- u32::from(call.syntax().text_range().len()) as i32;
replacements.push((call, insert));
match value {
Some((insert, new_offset)) => {
replacements.push((call, insert));
offset = new_offset;
}
None => offset += call.syntax().text_range().len(),
}
}
replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
ExpandResult { value: Some((original, mapping)), err: error }
ExpandResult { value: Some((original, offset)), err: error }
}

View File

@ -0,0 +1,340 @@
//! Things to wrap other things in file ids.
use std::iter;
use base_db::{
span::{HirFileId, HirFileIdRepr, MacroFileId, SyntaxContextId},
FileId, FileRange,
};
use either::Either;
use syntax::{AstNode, SyntaxNode, SyntaxToken, TextRange};
use crate::{db, ExpansionInfo, HirFileIdExt as _};
/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
///
/// Typical usages are:
///
/// * `InFile<SyntaxNode>` -- syntax node in a file
/// * `InFile<ast::FnDef>` -- ast node in a file
/// * `InFile<TextSize>` -- offset in a file
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
pub struct InFileWrapper<FileKind, T> {
pub file_id: FileKind,
pub value: T,
}
pub type InFile<T> = InFileWrapper<HirFileId, T>;
pub type InMacroFile<T> = InFileWrapper<MacroFileId, T>;
pub type InRealFile<T> = InFileWrapper<FileId, T>;
impl<FileKind, T> InFileWrapper<FileKind, T> {
pub fn new(file_id: FileKind, value: T) -> Self {
Self { file_id, value }
}
pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFileWrapper<FileKind, U> {
InFileWrapper::new(self.file_id, f(self.value))
}
}
impl<FileKind: Copy, T> InFileWrapper<FileKind, T> {
pub fn with_value<U>(&self, value: U) -> InFileWrapper<FileKind, U> {
InFileWrapper::new(self.file_id, value)
}
pub fn as_ref(&self) -> InFileWrapper<FileKind, &T> {
self.with_value(&self.value)
}
}
impl<FileKind: Copy, T: Clone> InFileWrapper<FileKind, &T> {
pub fn cloned(&self) -> InFileWrapper<FileKind, T> {
self.with_value(self.value.clone())
}
}
impl<T> From<InMacroFile<T>> for InFile<T> {
fn from(InMacroFile { file_id, value }: InMacroFile<T>) -> Self {
InFile { file_id: file_id.into(), value }
}
}
impl<T> From<InRealFile<T>> for InFile<T> {
fn from(InRealFile { file_id, value }: InRealFile<T>) -> Self {
InFile { file_id: file_id.into(), value }
}
}
// region:transpose impls
impl<FileKind, T> InFileWrapper<FileKind, Option<T>> {
pub fn transpose(self) -> Option<InFileWrapper<FileKind, T>> {
Some(InFileWrapper::new(self.file_id, self.value?))
}
}
impl<FileKind, L, R> InFileWrapper<FileKind, Either<L, R>> {
pub fn transpose(self) -> Either<InFileWrapper<FileKind, L>, InFileWrapper<FileKind, R>> {
match self.value {
Either::Left(l) => Either::Left(InFileWrapper::new(self.file_id, l)),
Either::Right(r) => Either::Right(InFileWrapper::new(self.file_id, r)),
}
}
}
// endregion:transpose impls
trait FileIdToSyntax: Copy {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode;
}
impl FileIdToSyntax for FileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse(self).syntax_node()
}
}
impl FileIdToSyntax for MacroFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_macro_expansion(self).value.0.syntax_node()
}
}
impl FileIdToSyntax for HirFileId {
fn file_syntax(self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
db.parse_or_expand(self)
}
}
#[allow(private_bounds)]
impl<FileId: FileIdToSyntax, T> InFileWrapper<FileId, T> {
pub fn file_syntax(&self, db: &dyn db::ExpandDatabase) -> SyntaxNode {
FileIdToSyntax::file_syntax(self.file_id, db)
}
}
impl<FileId: Copy, N: AstNode> InFileWrapper<FileId, N> {
pub fn syntax(&self) -> InFileWrapper<FileId, &SyntaxNode> {
self.with_value(self.value.syntax())
}
}
// region:specific impls
impl InFile<&SyntaxNode> {
pub fn ancestors_with_macros(
self,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
iter::successors(Some(self.cloned()), move |node| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => node.file_id.call_node(db),
})
}
/// Skips the attributed item that caused the macro invocation we are climbing up
pub fn ancestors_with_macros_skip_attr_item(
self,
db: &dyn db::ExpandDatabase,
) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
Some(parent) => Some(node.with_value(parent)),
None => {
let parent_node = node.file_id.call_node(db)?;
if node.file_id.is_attr_macro(db) {
// macro call was an attributed item, skip it
// FIXME: does this fail if this is a direct expansion of another macro?
parent_node.map(|node| node.parent()).transpose()
} else {
Some(parent_node)
}
}
};
iter::successors(succ(&self.cloned()), succ)
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
///
/// For attributes and derives, this will point back to the attribute only.
/// For the entire item use [`InFile::original_file_range_full`].
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range_full(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
if let Some((res, ctxt)) =
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
{
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return res;
}
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range_with_body(db)
}
}
}
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(
self,
db: &dyn db::ExpandDatabase,
) -> Option<(FileRange, SyntaxContextId)> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some((FileRange { file_id, range: self.value.text_range() }, SyntaxContextId::ROOT))
}
HirFileIdRepr::MacroFile(mac_file) => {
ExpansionInfo::new(db, mac_file).map_node_range_up(db, self.value.text_range())
}
}
}
pub fn original_syntax_node(
self,
db: &dyn db::ExpandDatabase,
) -> Option<InRealFile<SyntaxNode>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value.clone() })
}
HirFileIdRepr::MacroFile(m) => m,
};
if !self.file_id.is_attr_macro(db) {
return None;
}
let (FileRange { file_id, range }, ctx) =
ExpansionInfo::new(db, file_id).map_node_range_up(db, self.value.text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if !ctx.is_root() {
return None;
}
let anc = db.parse(file_id).syntax_node().covering_element(range);
let kind = self.value.kind();
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let value = anc.ancestors().find(|it| it.kind() == kind)?;
Some(InRealFile::new(file_id, value))
}
}
impl InFile<SyntaxToken> {
pub fn upmap_once(
self,
db: &dyn db::ExpandDatabase,
) -> Option<InFile<smallvec::SmallVec<[TextRange; 1]>>> {
Some(self.file_id.expansion_info(db)?.map_range_up_once(db, self.value.text_range()))
}
/// Falls back to the macro call range if the node cannot be mapped up fully.
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
HirFileIdRepr::MacroFile(mac_file) => {
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
.map_token_range_up(db, self.value.text_range());
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
return range;
}
// Fall back to whole macro call.
let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
loc.kind.original_call_range(db)
}
}
}
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range_opt(self, db: &dyn db::ExpandDatabase) -> Option<FileRange> {
match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
Some(FileRange { file_id, range: self.value.text_range() })
}
HirFileIdRepr::MacroFile(mac_file) => {
let (range, ctxt) = ExpansionInfo::new(db, mac_file)
.map_token_range_up(db, self.value.text_range());
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if ctxt.is_root() {
Some(range)
} else {
None
}
}
}
}
}
impl InFile<TextRange> {
/// Attempts to map the syntax node back up its macro calls.
pub fn original_file_range(self, db: &dyn db::ExpandDatabase) -> FileRange {
let (range, _ctxt) = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
(FileRange { file_id, range: self.value }, SyntaxContextId::ROOT)
}
HirFileIdRepr::MacroFile(m) => {
ExpansionInfo::new(db, m).map_token_range_up(db, self.value)
}
};
range
}
}
impl<N: AstNode> InFile<N> {
pub fn original_ast_node(self, db: &dyn db::ExpandDatabase) -> Option<InRealFile<N>> {
// This kind of upmapping can only be achieved in attribute expanded files,
// as we don't have node inputs otherwise and therefore can't find an `N` node in the input
let file_id = match self.file_id.repr() {
HirFileIdRepr::FileId(file_id) => {
return Some(InRealFile { file_id, value: self.value })
}
HirFileIdRepr::MacroFile(m) => m,
};
if !self.file_id.is_attr_macro(db) {
return None;
}
let (FileRange { file_id, range }, ctx) = ExpansionInfo::new(db, file_id)
.map_node_range_up(db, self.value.syntax().text_range())?;
// FIXME: Figure out an API that makes proper use of ctx, this only exists to
// keep pre-token map rewrite behaviour.
if !ctx.is_root() {
return None;
}
// FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes?
let anc = db.parse(file_id).syntax_node().covering_element(range);
let value = anc.ancestors().find_map(N::cast)?;
Some(InRealFile::new(file_id, value))
}
}

View File

@ -1,111 +1,122 @@
//! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them.
use std::mem;
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
use rustc_hash::FxHashMap;
use base_db::{
span::{ErasedFileAstId, SpanAnchor, SpanData},
FileId,
};
use la_arena::RawIdx;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use syntax::{
ast::{self, AstNode, HasLoopBody},
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use triomphe::Arc;
use tt::Spacing;
use crate::{
span::SpanMapRef,
tt::{Ident, Leaf, Punct, Subtree},
};
use tt::token_id::Subtree;
/// The result of calculating fixes for a syntax node -- a bunch of changes
/// (appending to and replacing nodes), the information that is needed to
/// reverse those changes afterwards, and a token map.
#[derive(Debug, Default)]
pub(crate) struct SyntaxFixups {
pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
pub(crate) append: FxHashMap<SyntaxElement, Vec<Leaf>>,
pub(crate) remove: FxHashSet<SyntaxNode>,
pub(crate) undo_info: SyntaxFixupUndoInfo,
pub(crate) token_map: TokenMap,
pub(crate) next_id: u32,
}
/// This is the information needed to reverse the fixups.
#[derive(Debug, Default, PartialEq, Eq)]
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct SyntaxFixupUndoInfo {
original: Box<[Subtree]>,
// FIXME: ThinArc<[Subtree]>
original: Option<Arc<Box<[Subtree]>>>,
}
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
impl SyntaxFixupUndoInfo {
pub(crate) const NONE: Self = SyntaxFixupUndoInfo { original: None };
}
pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
// censoring -> just don't convert the node
// replacement -> censor + append
// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how
// to remove later
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default();
let mut replace = FxHashMap::<SyntaxElement, _>::default();
let mut remove = FxHashSet::<SyntaxNode>::default();
let mut preorder = node.preorder();
let mut original = Vec::new();
let mut token_map = TokenMap::default();
let mut next_id = 0;
let dummy_range = TextRange::empty(TextSize::new(0));
// we use a file id of `FileId(!0)` to signal a fake node, and the text range's start offset as
// the index into the replacement vec but only if the end points to !0
let dummy_anchor =
SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(!0)) };
let fake_span = |range| SpanData {
range: dummy_range,
anchor: dummy_anchor,
ctx: span_map.span_for_range(range).ctx,
};
while let Some(event) = preorder.next() {
let node = match event {
syntax::WalkEvent::Enter(node) => node,
syntax::WalkEvent::Leave(_) => continue,
};
let syntax::WalkEvent::Enter(node) = event else { continue };
let node_range = node.text_range();
if can_handle_error(&node) && has_error_to_handle(&node) {
remove.insert(node.clone().into());
// the node contains an error node, we have to completely replace it by something valid
let (original_tree, new_tmap, new_next_id) =
mbe::syntax_node_to_token_tree_with_modifications(
&node,
mem::take(&mut token_map),
next_id,
Default::default(),
Default::default(),
);
token_map = new_tmap;
next_id = new_next_id;
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map);
let idx = original.len() as u32;
original.push(original_tree);
let replacement = SyntheticToken {
kind: SyntaxKind::IDENT,
let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: node.text_range(),
id: SyntheticTokenId(idx),
};
replace.insert(node.clone().into(), vec![replacement]);
span: SpanData {
range: TextRange::new(TextSize::new(idx), TextSize::new(!0)),
anchor: dummy_anchor,
ctx: span_map.span_for_range(node_range).ctx,
},
});
append.insert(node.clone().into(), vec![replacement]);
preorder.skip_subtree();
continue;
}
// In some other situations, we can fix things by just appending some tokens.
let end_range = TextRange::empty(node.text_range().end());
match_ast! {
match node {
ast::FieldExpr(it) => {
if it.name_ref().is_none() {
// incomplete field access: some_expr.|
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID,
},
span: fake_span(node_range),
}),
]);
}
},
ast::ExprStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::SEMICOLON,
text: ";".into(),
range: end_range,
id: EMPTY_ID,
},
Leaf::Punct(Punct {
char: ';',
spacing: Spacing::Alone,
span: fake_span(node_range),
}),
]);
}
},
ast::LetStmt(it) => {
if it.semicolon_token().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::SEMICOLON,
text: ";".into(),
range: end_range,
id: EMPTY_ID,
},
Leaf::Punct(Punct {
char: ';',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -117,28 +128,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(if_token.into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID,
},
span: fake_span(node_range)
}),
]);
}
if it.then_branch().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -150,46 +158,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue,
};
append.insert(while_token.into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID,
},
span: fake_span(node_range)
}),
]);
}
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
ast::LoopExpr(it) => {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -201,29 +205,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
None => continue
};
append.insert(match_token.into(), vec![
SyntheticToken {
kind: SyntaxKind::IDENT,
Leaf::Ident(Ident {
text: "__ra_fixup".into(),
range: end_range,
id: EMPTY_ID
},
span: fake_span(node_range)
}),
]);
}
if it.match_arm_list().is_none() {
// No match arms
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -234,10 +235,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
};
let [pat, in_token, iter] = [
(SyntaxKind::UNDERSCORE, "_"),
(SyntaxKind::IN_KW, "in"),
(SyntaxKind::IDENT, "__ra_fixup")
].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID});
"_",
"in",
"__ra_fixup"
].map(|text|
Leaf::Ident(Ident {
text: text.into(),
span: fake_span(node_range)
}),
);
if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
append.insert(for_token.into(), vec![pat, in_token, iter]);
@ -248,18 +254,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
if it.loop_body().is_none() {
append.insert(node.clone().into(), vec![
SyntheticToken {
kind: SyntaxKind::L_CURLY,
text: "{".into(),
range: end_range,
id: EMPTY_ID,
},
SyntheticToken {
kind: SyntaxKind::R_CURLY,
text: "}".into(),
range: end_range,
id: EMPTY_ID,
},
// FIXME: THis should be a subtree no?
Leaf::Punct(Punct {
char: '{',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
Leaf::Punct(Punct {
char: '}',
spacing: Spacing::Alone,
span: fake_span(node_range)
}),
]);
}
},
@ -267,12 +272,13 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
}
}
}
let needs_fixups = !append.is_empty() || !original.is_empty();
SyntaxFixups {
append,
replace,
token_map,
next_id,
undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() },
remove,
undo_info: SyntaxFixupUndoInfo {
original: needs_fixups.then(|| Arc::new(original.into_boxed_slice())),
},
}
}
@ -288,30 +294,32 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
}
pub(crate) fn reverse_fixups(
tt: &mut Subtree,
token_map: &TokenMap,
undo_info: &SyntaxFixupUndoInfo,
) {
pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) {
let Some(undo_info) = undo_info.original.as_deref() else { return };
let undo_info = &**undo_info;
reverse_fixups_(tt, undo_info);
}
fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
let tts = std::mem::take(&mut tt.token_trees);
tt.token_trees = tts
.into_iter()
// delete all fake nodes
.filter(|tt| match tt {
tt::TokenTree::Leaf(leaf) => {
token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID)
}
tt::TokenTree::Subtree(st) => {
token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID)
let span = leaf.span();
span.anchor.file_id != FileId(!0) || span.range.end() == TextSize::new(!0)
}
tt::TokenTree::Subtree(_) => true,
})
.flat_map(|tt| match tt {
tt::TokenTree::Subtree(mut tt) => {
reverse_fixups(&mut tt, token_map, undo_info);
reverse_fixups_(&mut tt, undo_info);
SmallVec::from_const([tt.into()])
}
tt::TokenTree::Leaf(leaf) => {
if let Some(id) = token_map.synthetic_token_id(*leaf.span()) {
let original = undo_info.original[id.0 as usize].clone();
if leaf.span().anchor.file_id == FileId(!0) {
let original = undo_info[u32::from(leaf.span().range.start()) as usize].clone();
if original.delimiter.kind == tt::DelimiterKind::Invisible {
original.token_trees.into()
} else {
@ -327,11 +335,15 @@ pub(crate) fn reverse_fixups(
#[cfg(test)]
mod tests {
use base_db::FileId;
use expect_test::{expect, Expect};
use triomphe::Arc;
use crate::tt;
use super::reverse_fixups;
use crate::{
fixup::reverse_fixups,
span::{RealSpanMap, SpanMap},
tt,
};
// The following three functions are only meant to check partial structural equivalence of
// `TokenTree`s, see the last assertion in `check()`.
@ -361,13 +373,13 @@ mod tests {
#[track_caller]
fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture);
let fixups = super::fixup_syntax(&parsed.syntax_node());
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId(0))));
let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node());
let mut tt = mbe::syntax_node_to_token_tree_modified(
&parsed.syntax_node(),
fixups.token_map,
fixups.next_id,
fixups.replace,
span_map.as_ref(),
fixups.append,
fixups.remove,
);
let actual = format!("{tt}\n");
@ -383,14 +395,15 @@ mod tests {
parse.syntax_node()
);
reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
reverse_fixups(&mut tt, &fixups.undo_info);
// the fixed-up + reversed version should be equivalent to the original input
// modulo token IDs and `Punct`s' spacing.
let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
let original_as_tt =
mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref());
assert!(
check_subtree_eq(&tt, &original_as_tt),
"different token tree: {tt:?},\n{original_as_tt:?}"
"different token tree:\n{tt:?}\n\n{original_as_tt:?}"
);
}
@ -403,7 +416,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {for _ in __ra_fixup {}}
fn foo () {for _ in __ra_fixup { }}
"#]],
)
}
@ -431,7 +444,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {for bar in qux {}}
fn foo () {for bar in qux { }}
"#]],
)
}
@ -462,7 +475,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {match __ra_fixup {}}
fn foo () {match __ra_fixup { }}
"#]],
)
}
@ -494,7 +507,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {match __ra_fixup {}}
fn foo () {match __ra_fixup { }}
"#]],
)
}
@ -609,7 +622,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {if a {}}
fn foo () {if a { }}
"#]],
)
}
@ -623,7 +636,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {if __ra_fixup {}}
fn foo () {if __ra_fixup { }}
"#]],
)
}
@ -637,7 +650,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {if __ra_fixup {} {}}
fn foo () {if __ra_fixup {} { }}
"#]],
)
}
@ -651,7 +664,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {while __ra_fixup {}}
fn foo () {while __ra_fixup { }}
"#]],
)
}
@ -665,7 +678,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {while foo {}}
fn foo () {while foo { }}
"#]],
)
}
@ -692,7 +705,7 @@ fn foo() {
}
"#,
expect![[r#"
fn foo () {loop {}}
fn foo () {loop { }}
"#]],
)
}

View File

@ -2,252 +2,241 @@
//!
//! Specifically, `ast` + `Hygiene` allows you to create a `Name`. Note that, at
//! this moment, this is horribly incomplete and handles only `$crate`.
use base_db::CrateId;
use db::TokenExpander;
use either::Either;
use mbe::Origin;
use syntax::{
ast::{self, HasDocComments},
AstNode, SyntaxKind, SyntaxNode, TextRange, TextSize,
};
use triomphe::Arc;
use std::iter;
use crate::{
db::{self, ExpandDatabase},
fixup,
name::{AsName, Name},
HirFileId, InFile, MacroCallKind, MacroCallLoc, MacroDefKind, MacroFile,
};
use base_db::span::{MacroCallId, SpanData, SyntaxContextId};
#[derive(Clone, Debug)]
pub struct Hygiene {
frames: Option<HygieneFrames>,
use crate::db::ExpandDatabase;
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
pub struct SyntaxContextData {
pub outer_expn: Option<MacroCallId>,
pub outer_transparency: Transparency,
pub parent: SyntaxContextId,
/// This context, but with all transparent and semi-transparent expansions filtered away.
pub opaque: SyntaxContextId,
/// This context, but with all transparent expansions filtered away.
pub opaque_and_semitransparent: SyntaxContextId,
}
impl Hygiene {
pub fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Hygiene {
Hygiene { frames: Some(HygieneFrames::new(db, file_id)) }
impl std::fmt::Debug for SyntaxContextData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("SyntaxContextData")
.field("outer_expn", &self.outer_expn)
.field("outer_transparency", &self.outer_transparency)
.field("parent", &self.parent)
.field("opaque", &self.opaque)
.field("opaque_and_semitransparent", &self.opaque_and_semitransparent)
.finish()
}
}
impl SyntaxContextData {
pub fn root() -> Self {
SyntaxContextData {
outer_expn: None,
outer_transparency: Transparency::Opaque,
parent: SyntaxContextId::ROOT,
opaque: SyntaxContextId::ROOT,
opaque_and_semitransparent: SyntaxContextId::ROOT,
}
}
pub fn new_unhygienic() -> Hygiene {
Hygiene { frames: None }
}
// FIXME: this should just return name
pub fn name_ref_to_name(
&self,
pub fn fancy_debug(
self,
self_id: SyntaxContextId,
db: &dyn ExpandDatabase,
name_ref: ast::NameRef,
) -> Either<Name, CrateId> {
if let Some(frames) = &self.frames {
if name_ref.text() == "$crate" {
if let Some(krate) = frames.root_crate(db, name_ref.syntax()) {
return Either::Right(krate);
}
f: &mut std::fmt::Formatter<'_>,
) -> std::fmt::Result {
write!(f, "#{self_id} parent: #{}, outer_mark: (", self.parent)?;
match self.outer_expn {
Some(id) => {
write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)?
}
None => write!(f, "root")?,
}
Either::Left(name_ref.as_name())
}
pub fn local_inner_macros(&self, db: &dyn ExpandDatabase, path: ast::Path) -> Option<CrateId> {
let mut token = path.syntax().first_token()?.text_range();
let frames = self.frames.as_ref()?;
let mut current = &frames.0;
loop {
let (mapped, origin) = current.expansion.as_ref()?.map_ident_up(db, token)?;
if origin == Origin::Def {
return if current.local_inner {
frames.root_crate(db, path.syntax())
} else {
None
};
}
current = current.call_site.as_ref()?;
token = mapped.value;
}
write!(f, ", {:?})", self.outer_transparency)
}
}
#[derive(Clone, Debug)]
struct HygieneFrames(Arc<HygieneFrame>);
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct HygieneFrame {
expansion: Option<HygieneInfo>,
// Indicate this is a local inner macro
local_inner: bool,
krate: Option<CrateId>,
call_site: Option<Arc<HygieneFrame>>,
def_site: Option<Arc<HygieneFrame>>,
/// A property of a macro expansion that determines how identifiers
/// produced by that expansion are resolved.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug)]
pub enum Transparency {
/// Identifier produced by a transparent expansion is always resolved at call-site.
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
Transparent,
/// Identifier produced by a semi-transparent expansion may be resolved
/// either at call-site or at definition-site.
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
/// Otherwise it's resolved at call-site.
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
/// but that's an implementation detail.
SemiTransparent,
/// Identifier produced by an opaque expansion is always resolved at definition-site.
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
Opaque,
}
impl HygieneFrames {
fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> Self {
// Note that this intentionally avoids the `hygiene_frame` query to avoid blowing up memory
// usage. The query is only helpful for nested `HygieneFrame`s as it avoids redundant work.
HygieneFrames(Arc::new(HygieneFrame::new(db, file_id)))
}
fn root_crate(&self, db: &dyn ExpandDatabase, node: &SyntaxNode) -> Option<CrateId> {
let mut token = node.first_token()?.text_range();
let mut result = self.0.krate;
let mut current = self.0.clone();
while let Some((mapped, origin)) =
current.expansion.as_ref().and_then(|it| it.map_ident_up(db, token))
{
result = current.krate;
let site = match origin {
Origin::Def => &current.def_site,
Origin::Call => &current.call_site,
};
let site = match site {
None => break,
Some(it) => it,
};
current = site.clone();
token = mapped.value;
}
result
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
struct HygieneInfo {
file: MacroFile,
/// The start offset of the `macro_rules!` arguments or attribute input.
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
macro_def: TokenExpander,
macro_arg: Arc<(crate::tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>,
}
impl HygieneInfo {
fn map_ident_up(
&self,
db: &dyn ExpandDatabase,
token: TextRange,
) -> Option<(InFile<TextRange>, Origin)> {
let token_id = self.exp_map.token_by_range(token)?;
let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
let loc = db.lookup_intern_macro_call(self.file.macro_call_id);
let (token_map, tt) = match &loc.kind {
MacroCallKind::Attr { attr_args, .. } => match self.macro_arg_shift.unshift(token_id) {
Some(unshifted) => {
token_id = unshifted;
(&attr_args.1, self.attr_input_or_mac_def_start?)
}
None => (&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start())),
},
_ => match origin {
mbe::Origin::Call => {
(&self.macro_arg.1, loc.kind.arg(db)?.map(|it| it.text_range().start()))
}
mbe::Origin::Def => match (&self.macro_def, &self.attr_input_or_mac_def_start) {
(TokenExpander::DeclarativeMacro(expander), Some(tt)) => {
(&expander.def_site_token_map, *tt)
}
_ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
},
},
};
let range = token_map.first_range_by_token(token_id, SyntaxKind::IDENT)?;
Some((tt.with_value(range + tt.value), origin))
}
}
fn make_hygiene_info(
pub fn span_with_def_site_ctxt(
db: &dyn ExpandDatabase,
macro_file: MacroFile,
loc: &MacroCallLoc,
) -> HygieneInfo {
let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) {
ast::Macro::MacroRules(mac) => mac.token_tree()?,
ast::Macro::MacroDef(mac) => mac.body()?,
};
Some(InFile::new(id.file_id, def_tt))
});
let attr_input_or_mac_def = def.or_else(|| match loc.kind {
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let tt = ast_id
.to_node(db)
.doc_comments_and_attrs()
.nth(invoc_attr_index.ast_index())
.and_then(Either::left)?
.token_tree()?;
Some(InFile::new(ast_id.file_id, tt))
}
_ => None,
});
span: SpanData,
expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Opaque)
}
let macro_def = db.macro_expander(loc.def);
let (_, exp_map) = db.parse_macro_expansion(macro_file).value;
let macro_arg = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
Arc::new((
tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: Vec::new() },
Default::default(),
Default::default(),
))
});
pub fn span_with_call_site_ctxt(
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::Transparent)
}
HygieneInfo {
file: macro_file,
attr_input_or_mac_def_start: attr_input_or_mac_def
.map(|it| it.map(|tt| tt.syntax().text_range().start())),
macro_arg_shift: mbe::Shift::new(&macro_arg.0),
macro_arg,
macro_def,
exp_map,
pub fn span_with_mixed_site_ctxt(
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
) -> SpanData {
span_with_ctxt_from_mark(db, span, expn_id, Transparency::SemiTransparent)
}
fn span_with_ctxt_from_mark(
db: &dyn ExpandDatabase,
span: SpanData,
expn_id: MacroCallId,
transparency: Transparency,
) -> SpanData {
SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span }
}
pub(super) fn apply_mark(
db: &dyn ExpandDatabase,
ctxt: SyntaxContextId,
call_id: MacroCallId,
transparency: Transparency,
) -> SyntaxContextId {
if transparency == Transparency::Opaque {
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
}
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db)
} else {
call_site_ctxt.normalize_to_macro_rules(db)
};
if call_site_ctxt.is_root() {
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
}
// Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
// macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition.
//
// In this case, the tokens from the macros 1.0 definition inherit the hygiene
// at their invocation. That is, we pretend that the macros 1.0 definition
// was defined at its invocation (i.e., inside the macros 2.0 definition)
// so that the macros 2.0 definition remains hygienic.
//
// See the example at `test/ui/hygiene/legacy_interaction.rs`.
for (call_id, transparency) in ctxt.marks(db) {
call_site_ctxt = apply_mark_internal(db, call_site_ctxt, call_id, transparency);
}
apply_mark_internal(db, call_site_ctxt, Some(call_id), transparency)
}
fn apply_mark_internal(
db: &dyn ExpandDatabase,
ctxt: SyntaxContextId,
call_id: Option<MacroCallId>,
transparency: Transparency,
) -> SyntaxContextId {
let syntax_context_data = db.lookup_intern_syntax_context(ctxt);
let mut opaque = syntax_context_data.opaque;
let mut opaque_and_semitransparent = syntax_context_data.opaque_and_semitransparent;
if transparency >= Transparency::Opaque {
let parent = opaque;
let new_opaque = SyntaxContextId::SELF_REF;
// But we can't just grab the to be allocated ID either as that would not deduplicate
// things!
// So we need a new salsa store type here ...
opaque = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque: new_opaque,
opaque_and_semitransparent: new_opaque,
});
}
if transparency >= Transparency::SemiTransparent {
let parent = opaque_and_semitransparent;
let new_opaque_and_semitransparent = SyntaxContextId::SELF_REF;
opaque_and_semitransparent = db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent: new_opaque_and_semitransparent,
});
}
let parent = ctxt;
db.intern_syntax_context(SyntaxContextData {
outer_expn: call_id,
outer_transparency: transparency,
parent,
opaque,
opaque_and_semitransparent,
})
}
pub trait SyntaxContextExt {
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self;
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self;
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self;
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency);
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)>;
}
#[inline(always)]
fn handle_self_ref(p: SyntaxContextId, n: SyntaxContextId) -> SyntaxContextId {
match n {
SyntaxContextId::SELF_REF => p,
_ => n,
}
}
impl HygieneFrame {
pub(crate) fn new(db: &dyn ExpandDatabase, file_id: HirFileId) -> HygieneFrame {
let (info, krate, local_inner) = match file_id.macro_file() {
None => (None, None, false),
Some(macro_file) => {
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let info = Some((make_hygiene_info(db, macro_file, &loc), loc.kind.file_id()));
match loc.def.kind {
MacroDefKind::Declarative(_) => {
(info, Some(loc.def.krate), loc.def.local_inner)
}
MacroDefKind::BuiltIn(..) => (info, Some(loc.def.krate), false),
MacroDefKind::BuiltInAttr(..) => (info, None, false),
MacroDefKind::BuiltInDerive(..) => (info, None, false),
MacroDefKind::BuiltInEager(..) => (info, None, false),
MacroDefKind::ProcMacro(..) => (info, None, false),
}
}
};
let Some((info, calling_file)) = info else {
return HygieneFrame {
expansion: None,
local_inner,
krate,
call_site: None,
def_site: None,
};
};
let def_site = info.attr_input_or_mac_def_start.map(|it| db.hygiene_frame(it.file_id));
let call_site = Some(db.hygiene_frame(calling_file));
HygieneFrame { expansion: Some(info), local_inner, krate, call_site, def_site }
impl SyntaxContextExt for SyntaxContextId {
fn normalize_to_macro_rules(self, db: &dyn ExpandDatabase) -> Self {
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque_and_semitransparent)
}
fn normalize_to_macros_2_0(self, db: &dyn ExpandDatabase) -> Self {
handle_self_ref(self, db.lookup_intern_syntax_context(self).opaque)
}
fn parent_ctxt(self, db: &dyn ExpandDatabase) -> Self {
db.lookup_intern_syntax_context(self).parent
}
fn outer_mark(self, db: &dyn ExpandDatabase) -> (Option<MacroCallId>, Transparency) {
let data = db.lookup_intern_syntax_context(self);
(data.outer_expn, data.outer_transparency)
}
fn marks(self, db: &dyn ExpandDatabase) -> Vec<(Option<MacroCallId>, Transparency)> {
let mut marks = marks_rev(self, db).collect::<Vec<_>>();
marks.reverse();
marks
}
}
// FIXME: Make this a SyntaxContextExt method once we have RPIT
pub fn marks_rev(
ctxt: SyntaxContextId,
db: &dyn ExpandDatabase,
) -> impl Iterator<Item = (Option<MacroCallId>, Transparency)> + '_ {
iter::successors(Some(ctxt), move |&mark| {
Some(mark.parent_ctxt(db)).filter(|&it| it != SyntaxContextId::ROOT)
})
.map(|ctx| ctx.outer_mark(db))
}

File diff suppressed because it is too large Load Diff

View File

@ -7,11 +7,11 @@ use std::{
use crate::{
db::ExpandDatabase,
hygiene::Hygiene,
name::{known, Name},
hygiene::{marks_rev, SyntaxContextExt, Transparency},
name::{known, AsName, Name},
span::SpanMapRef,
};
use base_db::CrateId;
use either::Either;
use base_db::{span::SyntaxContextId, CrateId};
use smallvec::SmallVec;
use syntax::{ast, AstNode};
@ -38,6 +38,7 @@ pub enum PathKind {
Crate,
/// Absolute path (::foo)
Abs,
// FIXME: Remove this
/// `$crate` from macro expansion
DollarCrate(CrateId),
}
@ -46,9 +47,9 @@ impl ModPath {
pub fn from_src(
db: &dyn ExpandDatabase,
path: ast::Path,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
) -> Option<ModPath> {
convert_path(db, None, path, hygiene)
convert_path(db, None, path, span_map)
}
pub fn from_segments(kind: PathKind, segments: impl IntoIterator<Item = Name>) -> ModPath {
@ -193,33 +194,36 @@ fn convert_path(
db: &dyn ExpandDatabase,
prefix: Option<ModPath>,
path: ast::Path,
hygiene: &Hygiene,
span_map: SpanMapRef<'_>,
) -> Option<ModPath> {
let prefix = match path.qualifier() {
Some(qual) => Some(convert_path(db, prefix, qual, hygiene)?),
Some(qual) => Some(convert_path(db, prefix, qual, span_map)?),
None => prefix,
};
let segment = path.segment()?;
let mut mod_path = match segment.kind()? {
ast::PathSegmentKind::Name(name_ref) => {
match hygiene.name_ref_to_name(db, name_ref) {
Either::Left(name) => {
// no type args in use
let mut res = prefix.unwrap_or_else(|| {
ModPath::from_kind(
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
)
});
res.segments.push(name);
res
}
Either::Right(crate_id) => {
return Some(ModPath::from_segments(
PathKind::DollarCrate(crate_id),
iter::empty(),
))
if name_ref.text() == "$crate" {
if prefix.is_some() {
return None;
}
ModPath::from_kind(
resolve_crate_root(
db,
span_map.span_for_range(name_ref.syntax().text_range()).ctx,
)
.map(PathKind::DollarCrate)
.unwrap_or(PathKind::Crate),
)
} else {
let mut res = prefix.unwrap_or_else(|| {
ModPath::from_kind(
segment.coloncolon_token().map_or(PathKind::Plain, |_| PathKind::Abs),
)
});
res.segments.push(name_ref.as_name());
res
}
}
ast::PathSegmentKind::SelfTypeKw => {
@ -261,8 +265,14 @@ fn convert_path(
// We follow what it did anyway :)
if mod_path.segments.len() == 1 && mod_path.kind == PathKind::Plain {
if let Some(_macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
if let Some(crate_id) = hygiene.local_inner_macros(db, path) {
mod_path.kind = PathKind::DollarCrate(crate_id);
let syn_ctx = span_map.span_for_range(segment.syntax().text_range()).ctx;
if let Some(macro_call_id) = db.lookup_intern_syntax_context(syn_ctx).outer_expn {
if db.lookup_intern_macro_call(macro_call_id).def.local_inner {
mod_path.kind = match resolve_crate_root(db, syn_ctx) {
Some(crate_root) => PathKind::DollarCrate(crate_root),
None => PathKind::Crate,
}
}
}
}
}
@ -270,6 +280,29 @@ fn convert_path(
Some(mod_path)
}
pub fn resolve_crate_root(db: &dyn ExpandDatabase, mut ctxt: SyntaxContextId) -> Option<CrateId> {
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
// as described in `SyntaxContext::apply_mark`, so we ignore prepended opaque marks.
// FIXME: This is only a guess and it doesn't work correctly for `macro_rules!`
// definitions actually produced by `macro` and `macro` definitions produced by
// `macro_rules!`, but at least such configurations are not stable yet.
ctxt = ctxt.normalize_to_macro_rules(db);
let mut iter = marks_rev(ctxt, db).peekable();
let mut result_mark = None;
// Find the last opaque mark from the end if it exists.
while let Some(&(mark, Transparency::Opaque)) = iter.peek() {
result_mark = Some(mark);
iter.next();
}
// Then find the last semi-transparent mark from the end if it exists.
while let Some((mark, Transparency::SemiTransparent)) = iter.next() {
result_mark = Some(mark);
}
result_mark.flatten().map(|call| db.lookup_intern_macro_call(call.into()).def.krate)
}
pub use crate::name as __name;
#[macro_export]

View File

@ -470,6 +470,7 @@ pub mod known {
pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
pub const DOLLAR_CRATE: super::Name = super::Name::new_inline("$crate");
#[macro_export]
macro_rules! name {

View File

@ -1,6 +1,6 @@
//! Proc Macro Expander stub
use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind};
use stdx::never;
use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult};
@ -33,11 +33,15 @@ impl ProcMacroExpander {
calling_crate: CrateId,
tt: &tt::Subtree,
attr_arg: Option<&tt::Subtree>,
def_site: SpanData,
call_site: SpanData,
mixed_site: SpanData,
) -> ExpandResult<tt::Subtree> {
match self.proc_macro_id {
ProcMacroId(DUMMY_ID) => {
ExpandResult::new(tt::Subtree::empty(), ExpandError::UnresolvedProcMacro(def_crate))
}
ProcMacroId(DUMMY_ID) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::UnresolvedProcMacro(def_crate),
),
ProcMacroId(id) => {
let proc_macros = db.proc_macros();
let proc_macros = match proc_macros.get(&def_crate) {
@ -45,7 +49,7 @@ impl ProcMacroExpander {
Some(Err(_)) | None => {
never!("Non-dummy expander even though there are no proc macros");
return ExpandResult::new(
tt::Subtree::empty(),
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"),
);
}
@ -59,7 +63,7 @@ impl ProcMacroExpander {
id
);
return ExpandResult::new(
tt::Subtree::empty(),
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other("Internal error"),
);
}
@ -68,7 +72,8 @@ impl ProcMacroExpander {
let krate_graph = db.crate_graph();
// Proc macros have access to the environment variables of the invoking crate.
let env = &krate_graph[calling_crate].env;
match proc_macro.expander.expand(tt, attr_arg, env) {
match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site)
{
Ok(t) => ExpandResult::ok(t),
Err(err) => match err {
// Don't discard the item in case something unexpected happened while expanding attributes
@ -78,9 +83,10 @@ impl ProcMacroExpander {
ExpandResult { value: tt.clone(), err: Some(ExpandError::other(text)) }
}
ProcMacroExpansionError::System(text)
| ProcMacroExpansionError::Panic(text) => {
ExpandResult::new(tt::Subtree::empty(), ExpandError::other(text))
}
| ProcMacroExpansionError::Panic(text) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other(text),
),
},
}
}

View File

@ -1,5 +1,7 @@
//! A simplified version of quote-crate like quasi quote macro
use base_db::span::SpanData;
// A helper macro quote macro
// FIXME:
// 1. Not all puncts are handled
@ -8,109 +10,109 @@
#[doc(hidden)]
#[macro_export]
macro_rules! __quote {
() => {
($span:ident) => {
Vec::<crate::tt::TokenTree>::new()
};
( @SUBTREE $delim:ident $($tt:tt)* ) => {
( @SUBTREE($span:ident) $delim:ident $($tt:tt)* ) => {
{
let children = $crate::__quote!($($tt)*);
let children = $crate::__quote!($span $($tt)*);
crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::$delim,
open: crate::tt::TokenId::unspecified(),
close: crate::tt::TokenId::unspecified(),
open: $span,
close: $span,
},
token_trees: $crate::quote::IntoTt::to_tokens(children),
}
}
};
( @PUNCT $first:literal ) => {
( @PUNCT($span:ident) $first:literal ) => {
{
vec![
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Alone,
span: crate::tt::TokenId::unspecified(),
span: $span,
}).into()
]
}
};
( @PUNCT $first:literal, $sec:literal ) => {
( @PUNCT($span:ident) $first:literal, $sec:literal ) => {
{
vec![
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $first,
spacing: crate::tt::Spacing::Joint,
span: crate::tt::TokenId::unspecified(),
span: $span,
}).into(),
crate::tt::Leaf::Punct(crate::tt::Punct {
char: $sec,
spacing: crate::tt::Spacing::Alone,
span: crate::tt::TokenId::unspecified(),
span: $span,
}).into()
]
}
};
// hash variable
( # $first:ident $($tail:tt)* ) => {
($span:ident # $first:ident $($tail:tt)* ) => {
{
let token = $crate::quote::ToTokenTree::to_token($first);
let token = $crate::quote::ToTokenTree::to_token($first, $span);
let mut tokens = vec![token.into()];
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
}
};
( ## $first:ident $($tail:tt)* ) => {
($span:ident ## $first:ident $($tail:tt)* ) => {
{
let mut tokens = $first.into_iter().map($crate::quote::ToTokenTree::to_token).collect::<Vec<crate::tt::TokenTree>>();
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
let mut tokens = $first.into_iter().map(|it| $crate::quote::ToTokenTree::to_token(it, $span)).collect::<Vec<crate::tt::TokenTree>>();
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
}
};
// Brace
( { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE Brace $($tt)*) };
($span:ident { $($tt:tt)* } ) => { $crate::__quote!(@SUBTREE($span) Brace $($tt)*) };
// Bracket
( [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE Bracket $($tt)*) };
($span:ident [ $($tt:tt)* ] ) => { $crate::__quote!(@SUBTREE($span) Bracket $($tt)*) };
// Parenthesis
( ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE Parenthesis $($tt)*) };
($span:ident ( $($tt:tt)* ) ) => { $crate::__quote!(@SUBTREE($span) Parenthesis $($tt)*) };
// Literal
( $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt).into()] };
($span:ident $tt:literal ) => { vec![$crate::quote::ToTokenTree::to_token($tt, $span).into()] };
// Ident
( $tt:ident ) => {
($span:ident $tt:ident ) => {
vec![ {
crate::tt::Leaf::Ident(crate::tt::Ident {
text: stringify!($tt).into(),
span: crate::tt::TokenId::unspecified(),
span: $span,
}).into()
}]
};
// Puncts
// FIXME: Not all puncts are handled
( -> ) => {$crate::__quote!(@PUNCT '-', '>')};
( & ) => {$crate::__quote!(@PUNCT '&')};
( , ) => {$crate::__quote!(@PUNCT ',')};
( : ) => {$crate::__quote!(@PUNCT ':')};
( ; ) => {$crate::__quote!(@PUNCT ';')};
( :: ) => {$crate::__quote!(@PUNCT ':', ':')};
( . ) => {$crate::__quote!(@PUNCT '.')};
( < ) => {$crate::__quote!(@PUNCT '<')};
( > ) => {$crate::__quote!(@PUNCT '>')};
( ! ) => {$crate::__quote!(@PUNCT '!')};
($span:ident -> ) => {$crate::__quote!(@PUNCT($span) '-', '>')};
($span:ident & ) => {$crate::__quote!(@PUNCT($span) '&')};
($span:ident , ) => {$crate::__quote!(@PUNCT($span) ',')};
($span:ident : ) => {$crate::__quote!(@PUNCT($span) ':')};
($span:ident ; ) => {$crate::__quote!(@PUNCT($span) ';')};
($span:ident :: ) => {$crate::__quote!(@PUNCT($span) ':', ':')};
($span:ident . ) => {$crate::__quote!(@PUNCT($span) '.')};
($span:ident < ) => {$crate::__quote!(@PUNCT($span) '<')};
($span:ident > ) => {$crate::__quote!(@PUNCT($span) '>')};
($span:ident ! ) => {$crate::__quote!(@PUNCT($span) '!')};
( $first:tt $($tail:tt)+ ) => {
($span:ident $first:tt $($tail:tt)+ ) => {
{
let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($first));
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($($tail)*));
let mut tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $first ));
let mut tail_tokens = $crate::quote::IntoTt::to_tokens($crate::__quote!($span $($tail)*));
tokens.append(&mut tail_tokens);
tokens
@ -122,19 +124,22 @@ macro_rules! __quote {
/// It probably should implement in proc-macro
#[macro_export]
macro_rules! quote {
( $($tt:tt)* ) => {
$crate::quote::IntoTt::to_subtree($crate::__quote!($($tt)*))
($span:ident=> $($tt:tt)* ) => {
$crate::quote::IntoTt::to_subtree($crate::__quote!($span $($tt)*), $span)
}
}
pub(crate) trait IntoTt {
fn to_subtree(self) -> crate::tt::Subtree;
fn to_subtree(self, span: SpanData) -> crate::tt::Subtree;
fn to_tokens(self) -> Vec<crate::tt::TokenTree>;
}
impl IntoTt for Vec<crate::tt::TokenTree> {
fn to_subtree(self) -> crate::tt::Subtree {
crate::tt::Subtree { delimiter: crate::tt::Delimiter::unspecified(), token_trees: self }
fn to_subtree(self, span: SpanData) -> crate::tt::Subtree {
crate::tt::Subtree {
delimiter: crate::tt::Delimiter::invisible_spanned(span),
token_trees: self,
}
}
fn to_tokens(self) -> Vec<crate::tt::TokenTree> {
@ -143,7 +148,7 @@ impl IntoTt for Vec<crate::tt::TokenTree> {
}
impl IntoTt for crate::tt::Subtree {
fn to_subtree(self) -> crate::tt::Subtree {
fn to_subtree(self, _: SpanData) -> crate::tt::Subtree {
self
}
@ -153,39 +158,39 @@ impl IntoTt for crate::tt::Subtree {
}
pub(crate) trait ToTokenTree {
fn to_token(self) -> crate::tt::TokenTree;
fn to_token(self, span: SpanData) -> crate::tt::TokenTree;
}
impl ToTokenTree for crate::tt::TokenTree {
fn to_token(self) -> crate::tt::TokenTree {
fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self
}
}
impl ToTokenTree for &crate::tt::TokenTree {
fn to_token(self) -> crate::tt::TokenTree {
fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self.clone()
}
}
impl ToTokenTree for crate::tt::Subtree {
fn to_token(self) -> crate::tt::TokenTree {
fn to_token(self, _: SpanData) -> crate::tt::TokenTree {
self.into()
}
}
macro_rules! impl_to_to_tokentrees {
($($ty:ty => $this:ident $im:block);*) => {
($($span:ident: $ty:ty => $this:ident $im:block);*) => {
$(
impl ToTokenTree for $ty {
fn to_token($this) -> crate::tt::TokenTree {
fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.into();
leaf.into()
}
}
impl ToTokenTree for &$ty {
fn to_token($this) -> crate::tt::TokenTree {
fn to_token($this, $span: SpanData) -> crate::tt::TokenTree {
let leaf: crate::tt::Leaf = $im.clone().into();
leaf.into()
}
@ -195,60 +200,76 @@ macro_rules! impl_to_to_tokentrees {
}
impl_to_to_tokentrees! {
u32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
usize => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
i32 => self { crate::tt::Literal{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
bool => self { crate::tt::Ident{text: self.to_string().into(), span: crate::tt::TokenId::unspecified()} };
crate::tt::Leaf => self { self };
crate::tt::Literal => self { self };
crate::tt::Ident => self { self };
crate::tt::Punct => self { self };
&str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}};
String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span: crate::tt::TokenId::unspecified()}}
span: u32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
span: usize => self { crate::tt::Literal{text: self.to_string().into(), span} };
span: i32 => self { crate::tt::Literal{text: self.to_string().into(), span} };
span: bool => self { crate::tt::Ident{text: self.to_string().into(), span} };
_span: crate::tt::Leaf => self { self };
_span: crate::tt::Literal => self { self };
_span: crate::tt::Ident => self { self };
_span: crate::tt::Punct => self { self };
span: &str => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}};
span: String => self { crate::tt::Literal{text: format!("\"{}\"", self.escape_default()).into(), span}}
}
#[cfg(test)]
mod tests {
use crate::tt;
use base_db::{
span::{SpanAnchor, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use expect_test::expect;
use syntax::{TextRange, TextSize};
const DUMMY: tt::SpanData = tt::SpanData {
range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT,
};
#[test]
fn test_quote_delimiters() {
assert_eq!(quote!({}).to_string(), "{}");
assert_eq!(quote!(()).to_string(), "()");
assert_eq!(quote!([]).to_string(), "[]");
assert_eq!(quote!(DUMMY =>{}).to_string(), "{}");
assert_eq!(quote!(DUMMY =>()).to_string(), "()");
assert_eq!(quote!(DUMMY =>[]).to_string(), "[]");
}
#[test]
fn test_quote_idents() {
assert_eq!(quote!(32).to_string(), "32");
assert_eq!(quote!(struct).to_string(), "struct");
assert_eq!(quote!(DUMMY =>32).to_string(), "32");
assert_eq!(quote!(DUMMY =>struct).to_string(), "struct");
}
#[test]
fn test_quote_hash_simple_literal() {
let a = 20;
assert_eq!(quote!(#a).to_string(), "20");
assert_eq!(quote!(DUMMY =>#a).to_string(), "20");
let s: String = "hello".into();
assert_eq!(quote!(#s).to_string(), "\"hello\"");
assert_eq!(quote!(DUMMY =>#s).to_string(), "\"hello\"");
}
fn mk_ident(name: &str) -> crate::tt::Ident {
crate::tt::Ident { text: name.into(), span: crate::tt::TokenId::unspecified() }
crate::tt::Ident { text: name.into(), span: DUMMY }
}
#[test]
fn test_quote_hash_token_tree() {
let a = mk_ident("hello");
let quoted = quote!(#a);
let quoted = quote!(DUMMY =>#a);
assert_eq!(quoted.to_string(), "hello");
let t = format!("{quoted:?}");
assert_eq!(t, "SUBTREE $$ 4294967295 4294967295\n IDENT hello 4294967295");
expect![[r#"
SUBTREE $$ SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) } SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) }
IDENT hello SpanData { range: 0..0, anchor: SpanAnchor(FileId(4294967295), 0), ctx: SyntaxContextId(0) }"#]].assert_eq(&t);
}
#[test]
fn test_quote_simple_derive_copy() {
let name = mk_ident("Foo");
let quoted = quote! {
let quoted = quote! {DUMMY =>
impl Clone for #name {
fn clone(&self) -> Self {
Self {}
@ -268,18 +289,19 @@ mod tests {
// }
let struct_name = mk_ident("Foo");
let fields = [mk_ident("name"), mk_ident("id")];
let fields = fields.iter().flat_map(|it| quote!(#it: self.#it.clone(), ).token_trees);
let fields =
fields.iter().flat_map(|it| quote!(DUMMY =>#it: self.#it.clone(), ).token_trees);
let list = crate::tt::Subtree {
delimiter: crate::tt::Delimiter {
kind: crate::tt::DelimiterKind::Brace,
open: crate::tt::TokenId::unspecified(),
close: crate::tt::TokenId::unspecified(),
open: DUMMY,
close: DUMMY,
},
token_trees: fields.collect(),
};
let quoted = quote! {
let quoted = quote! {DUMMY =>
impl Clone for #struct_name {
fn clone(&self) -> Self {
Self #list

View File

@ -0,0 +1,111 @@
//! Spanmaps allow turning absolute ranges into relative ranges for incrementality purposes as well
//! as associating spans with text ranges in a particular file.
use base_db::{
span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId, ROOT_ERASED_FILE_AST_ID},
FileId,
};
use syntax::{ast::HasModuleItem, AstNode, TextRange, TextSize};
use triomphe::Arc;
use crate::db::ExpandDatabase;
pub type ExpansionSpanMap = mbe::SpanMap<SpanData>;
/// Spanmap for a macro file or a real file
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum SpanMap {
/// Spanmap for a macro file
ExpansionSpanMap(Arc<ExpansionSpanMap>),
/// Spanmap for a real file
RealSpanMap(Arc<RealSpanMap>),
}
#[derive(Copy, Clone)]
pub enum SpanMapRef<'a> {
/// Spanmap for a macro file
ExpansionSpanMap(&'a ExpansionSpanMap),
/// Spanmap for a real file
RealSpanMap(&'a RealSpanMap),
}
impl mbe::SpanMapper<SpanData> for SpanMap {
fn span_for(&self, range: TextRange) -> SpanData {
self.span_for_range(range)
}
}
impl mbe::SpanMapper<SpanData> for SpanMapRef<'_> {
fn span_for(&self, range: TextRange) -> SpanData {
self.span_for_range(range)
}
}
impl mbe::SpanMapper<SpanData> for RealSpanMap {
fn span_for(&self, range: TextRange) -> SpanData {
self.span_for_range(range)
}
}
impl SpanMap {
pub fn span_for_range(&self, range: TextRange) -> SpanData {
match self {
Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
Self::RealSpanMap(span_map) => span_map.span_for_range(range),
}
}
pub fn as_ref(&self) -> SpanMapRef<'_> {
match self {
Self::ExpansionSpanMap(span_map) => SpanMapRef::ExpansionSpanMap(span_map),
Self::RealSpanMap(span_map) => SpanMapRef::RealSpanMap(span_map),
}
}
}
impl SpanMapRef<'_> {
pub fn span_for_range(self, range: TextRange) -> SpanData {
match self {
Self::ExpansionSpanMap(span_map) => span_map.span_at(range.start()),
Self::RealSpanMap(span_map) => span_map.span_for_range(range),
}
}
}
#[derive(PartialEq, Eq, Hash, Debug)]
pub struct RealSpanMap {
file_id: FileId,
/// Invariant: Sorted vec over TextSize
// FIXME: SortedVec<(TextSize, ErasedFileAstId)>?
pairs: Box<[(TextSize, ErasedFileAstId)]>,
}
impl RealSpanMap {
/// Creates a real file span map that returns absolute ranges (relative ranges to the root ast id).
pub fn absolute(file_id: FileId) -> Self {
RealSpanMap { file_id, pairs: Box::from([(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)]) }
}
pub fn from_file(db: &dyn ExpandDatabase, file_id: FileId) -> Self {
let mut pairs = vec![(TextSize::new(0), ROOT_ERASED_FILE_AST_ID)];
let ast_id_map = db.ast_id_map(file_id.into());
pairs.extend(
db.parse(file_id)
.tree()
.items()
.map(|item| (item.syntax().text_range().start(), ast_id_map.ast_id(&item).erase())),
);
RealSpanMap { file_id, pairs: pairs.into_boxed_slice() }
}
pub fn span_for_range(&self, range: TextRange) -> SpanData {
let start = range.start();
let idx = self
.pairs
.binary_search_by(|&(it, _)| it.cmp(&start).then(std::cmp::Ordering::Less))
.unwrap_err();
let (offset, ast_id) = self.pairs[idx - 1];
SpanData {
range: range - offset,
anchor: SpanAnchor { file_id: self.file_id, ast_id },
ctx: SyntaxContextId::ROOT,
}
}
}

View File

@ -24,7 +24,7 @@ use hir_def::{
};
use hir_expand::{
name::{AsName, Name},
HirFileId,
HirFileId, HirFileIdExt,
};
use stdx::{always, never};
use syntax::{

View File

@ -23,7 +23,7 @@ use hir_def::{
EnumVariantId, HasModule, ItemContainerId, LocalFieldId, Lookup, ModuleDefId, ModuleId,
TraitId,
};
use hir_expand::{hygiene::Hygiene, name::Name};
use hir_expand::name::Name;
use intern::{Internable, Interned};
use itertools::Itertools;
use la_arena::ArenaMap;
@ -1732,13 +1732,13 @@ impl HirDisplay for TypeRef {
f.write_joined(bounds, " + ")?;
}
TypeRef::Macro(macro_call) => {
let macro_call = macro_call.to_node(f.db.upcast());
let ctx = hir_def::lower::LowerCtx::with_hygiene(
let ctx = hir_def::lower::LowerCtx::with_span_map(
f.db.upcast(),
&Hygiene::new_unhygienic(),
f.db.span_map(macro_call.file_id),
);
let macro_call = macro_call.to_node(f.db.upcast());
match macro_call.path() {
Some(path) => match Path::from_src(path, &ctx) {
Some(path) => match Path::from_src(&ctx, path) {
Some(path) => path.hir_fmt(f)?,
None => write!(f, "{{macro}}")?,
},

View File

@ -390,6 +390,7 @@ impl InferenceContext<'_> {
}
}
#[derive(Debug)]
enum ValuePathResolution {
// It's awkward to wrap a single ID in two enums, but we need both and this saves fallible
// conversion between them + `unwrap()`.

View File

@ -407,11 +407,7 @@ impl<'a> TyLoweringContext<'a> {
drop(expander);
let ty = self.lower_ty(&type_ref);
self.expander
.borrow_mut()
.as_mut()
.unwrap()
.exit(self.db.upcast(), mark);
self.expander.borrow_mut().as_mut().unwrap().exit(mark);
Some(ty)
}
_ => {

View File

@ -21,7 +21,7 @@ use hir_def::{
AdtId, ConstId, DefWithBodyId, EnumVariantId, FunctionId, HasModule, ItemContainerId, Lookup,
StaticId, VariantId,
};
use hir_expand::{mod_path::ModPath, InFile};
use hir_expand::{mod_path::ModPath, HirFileIdExt, InFile};
use intern::Interned;
use la_arena::ArenaMap;
use rustc_hash::{FxHashMap, FxHashSet};

View File

@ -30,6 +30,7 @@ pub(crate) struct TestDB {
impl Default for TestDB {
fn default() -> Self {
let mut this = Self { storage: Default::default(), events: Default::default() };
this.setup_syntax_context_root();
this.set_expand_proc_attr_macros_with_durability(true, Durability::HIGH);
this
}

View File

@ -1,5 +1,6 @@
//! Attributes & documentation for hir types.
use base_db::FileId;
use hir_def::{
attr::AttrsWithOwner,
item_scope::ItemInNs,
@ -8,7 +9,10 @@ use hir_def::{
resolver::{HasResolver, Resolver, TypeNs},
AssocItemId, AttrDefId, ModuleDefId,
};
use hir_expand::{hygiene::Hygiene, name::Name};
use hir_expand::{
name::Name,
span::{RealSpanMap, SpanMapRef},
};
use hir_ty::db::HirDatabase;
use syntax::{ast, AstNode};
@ -234,7 +238,11 @@ fn modpath_from_str(db: &dyn HirDatabase, link: &str) -> Option<ModPath> {
if ast_path.syntax().text() != link {
return None;
}
ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())
ModPath::from_src(
db.upcast(),
ast_path,
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId(0))),
)
};
let full = try_get_modpath(link);

View File

@ -3,10 +3,27 @@
//! we didn't do that.
//!
//! But we need this for at least LRU caching at the query level.
pub use hir_def::db::*;
pub use hir_def::db::{
AttrsQuery, BlockDefMapQuery, BlockItemTreeQueryQuery, BodyQuery, BodyWithSourceMapQuery,
ConstDataQuery, ConstVisibilityQuery, CrateDefMapQueryQuery, CrateLangItemsQuery,
CrateSupportsNoStdQuery, DefDatabase, DefDatabaseStorage, EnumDataQuery,
EnumDataWithDiagnosticsQuery, ExprScopesQuery, ExternCrateDeclDataQuery,
FieldVisibilitiesQuery, FieldsAttrsQuery, FieldsAttrsSourceMapQuery, FileItemTreeQuery,
FunctionDataQuery, FunctionVisibilityQuery, GenericParamsQuery, ImplDataQuery,
ImplDataWithDiagnosticsQuery, ImportMapQuery, InternAnonymousConstQuery, InternBlockQuery,
InternConstQuery, InternDatabase, InternDatabaseStorage, InternEnumQuery,
InternExternBlockQuery, InternExternCrateQuery, InternFunctionQuery, InternImplQuery,
InternInTypeConstQuery, InternMacro2Query, InternMacroRulesQuery, InternProcMacroQuery,
InternStaticQuery, InternStructQuery, InternTraitAliasQuery, InternTraitQuery,
InternTypeAliasQuery, InternUnionQuery, InternUseQuery, LangAttrQuery, LangItemQuery,
Macro2DataQuery, MacroRulesDataQuery, ProcMacroDataQuery, StaticDataQuery, StructDataQuery,
StructDataWithDiagnosticsQuery, TraitAliasDataQuery, TraitDataQuery,
TraitDataWithDiagnosticsQuery, TypeAliasDataQuery, UnionDataQuery,
UnionDataWithDiagnosticsQuery, VariantsAttrsQuery, VariantsAttrsSourceMapQuery,
};
pub use hir_expand::db::{
AstIdMapQuery, DeclMacroExpanderQuery, ExpandDatabase, ExpandDatabaseStorage,
ExpandProcMacroQuery, HygieneFrameQuery, InternMacroCallQuery, MacroArgNodeQuery,
MacroExpandQuery, ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery,
ExpandProcMacroQuery, InternMacroCallQuery, InternSyntaxContextQuery, MacroArgQuery,
ParseMacroExpansionErrorQuery, ParseMacroExpansionQuery, RealSpanMapQuery,
};
pub use hir_ty::db::*;

View File

@ -124,7 +124,7 @@ pub use {
hir_expand::{
attrs::{Attr, AttrId},
name::{known, Name},
ExpandResult, HirFileId, InFile, MacroFile, Origin,
tt, ExpandResult, HirFileId, HirFileIdExt, InFile, InMacroFile, InRealFile, MacroFileId,
},
hir_ty::{
display::{ClosureStyle, HirDisplay, HirDisplayError, HirWrite},
@ -140,7 +140,10 @@ pub use {
#[allow(unused)]
use {
hir_def::path::Path,
hir_expand::{hygiene::Hygiene, name::AsName},
hir_expand::{
name::AsName,
span::{ExpansionSpanMap, RealSpanMap, SpanMap, SpanMapRef},
},
};
/// hir::Crate describes a single crate. It's the main interface with which
@ -3490,9 +3493,34 @@ impl Impl {
self.id.lookup(db.upcast()).container.into()
}
pub fn as_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
pub fn as_builtin_derive_path(self, db: &dyn HirDatabase) -> Option<InMacroFile<ast::Path>> {
let src = self.source(db)?;
src.file_id.as_builtin_derive_attr_node(db.upcast())
let macro_file = src.file_id.macro_file()?;
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
let (derive_attr, derive_index) = match loc.kind {
MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
let module_id = self.id.lookup(db.upcast()).container;
(
db.crate_def_map(module_id.krate())[module_id.local_id]
.scope
.derive_macro_invoc(ast_id, derive_attr_index)?,
derive_index,
)
}
_ => return None,
};
let file_id = MacroFileId { macro_call_id: derive_attr };
let path = db
.parse_macro_expansion(file_id)
.value
.0
.syntax_node()
.children()
.nth(derive_index as usize)
.and_then(<ast::Attr as AstNode>::cast)
.and_then(|it| it.path())?;
Some(InMacroFile { file_id, value: path })
}
pub fn check_orphan_rules(self, db: &dyn HirDatabase) -> bool {

View File

@ -15,10 +15,14 @@ use hir_def::{
type_ref::Mutability,
AsMacroCall, DefWithBodyId, FieldId, FunctionId, MacroId, TraitId, VariantId,
};
use hir_expand::{db::ExpandDatabase, name::AsName, ExpansionInfo, MacroCallId};
use hir_expand::{
db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, HirFileIdExt, MacroCallId,
MacroFileId, MacroFileIdExt,
};
use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::{smallvec, SmallVec};
use stdx::TupleExt;
use syntax::{
algo::skip_trivia_token,
ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
@ -114,11 +118,11 @@ pub struct Semantics<'db, DB> {
pub struct SemanticsImpl<'db> {
pub db: &'db dyn HirDatabase,
s2d_cache: RefCell<SourceToDefCache>,
expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
// Rootnode to HirFileId cache
expansion_info_cache: RefCell<FxHashMap<MacroFileId, ExpansionInfo>>,
/// Rootnode to HirFileId cache
cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
// MacroCall to its expansion's HirFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>,
/// MacroCall to its expansion's MacroFileId cache
macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, MacroFileId>>,
}
impl<DB> fmt::Debug for Semantics<'_, DB> {
@ -255,7 +259,7 @@ impl<'db> SemanticsImpl<'db> {
pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
let sa = self.analyze_no_infer(macro_call.syntax())?;
let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
let node = self.parse_or_expand(file_id);
let node = self.parse_or_expand(file_id.into());
Some(node)
}
@ -524,52 +528,54 @@ impl<'db> SemanticsImpl<'db> {
res
}
// FIXME: should only take real file inputs for simplicity
fn descend_into_macros_impl(
&self,
token: SyntaxToken,
// FIXME: We might want this to be Option<TextSize> to be able to opt out of subrange
// mapping, specifically for node downmapping
offset: TextSize,
_offset: TextSize,
f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
) {
// FIXME: Clean this up
let _p = profile::span("descend_into_macros");
let relative_token_offset = token.text_range().start().checked_sub(offset);
let parent = match token.parent() {
let sa = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) {
Some(it) => it,
None => return,
};
let sa = match self.analyze_no_infer(&parent) {
Some(it) => it,
None => return,
};
let def_map = sa.resolver.def_map();
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
let mut cache = self.expansion_info_cache.borrow_mut();
let mut mcache = self.macro_call_cache.borrow_mut();
let span = match sa.file_id.repr() {
base_db::span::HirFileIdRepr::FileId(file_id) => {
self.db.real_span_map(file_id).span_for_range(token.text_range())
}
base_db::span::HirFileIdRepr::MacroFile(macro_file) => cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
.exp_map
.span_at(token.text_range().start()),
};
let def_map = sa.resolver.def_map();
let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
let mut process_expansion_for_token =
|stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
|stack: &mut SmallVec<_>, macro_file, _token: InFile<&_>| {
let expansion_info = cache
.entry(macro_file)
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
.as_ref()?;
.or_insert_with(|| macro_file.expansion_info(self.db.upcast()));
{
let InFile { file_id, value } = expansion_info.expanded();
self.cache(value, file_id);
}
let mapped_tokens = expansion_info.map_token_down(
self.db.upcast(),
item,
token,
relative_token_offset,
)?;
let mapped_tokens = expansion_info.map_range_down(span, None)?;
let len = stack.len();
// requeue the tokens we got from mapping our current token down
stack.extend(mapped_tokens);
stack.extend(mapped_tokens.map(Into::into));
// if the length changed we have found a mapping for the token
(stack.len() != len).then_some(())
};
@ -578,26 +584,21 @@ impl<'db> SemanticsImpl<'db> {
// either due to not being in a macro-call or because its unused push it into the result vec,
// otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
while let Some(token) = stack.pop() {
self.db.unwind_if_cancelled();
let was_not_remapped = (|| {
// First expand into attribute invocations
let containing_attribute_macro_call = self.with_ctx(|ctx| {
token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
if item.attrs().next().is_none() {
// Don't force populate the dyn cache for items that don't have an attribute anyways
return None;
}
Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
Some(ctx.item_to_macro_call(token.with_value(item.clone()))?)
})
});
if let Some((call_id, item)) = containing_attribute_macro_call {
let file_id = call_id.as_file();
return process_expansion_for_token(
&mut stack,
file_id,
Some(item),
token.as_ref(),
);
if let Some(call_id) = containing_attribute_macro_call {
let file_id = call_id.as_macro_file();
return process_expansion_for_token(&mut stack, file_id, token.as_ref());
}
// Then check for token trees, that means we are either in a function-like macro or
@ -613,7 +614,8 @@ impl<'db> SemanticsImpl<'db> {
}
if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
let mcall = token.with_value(macro_call);
let mcall: hir_expand::files::InFileWrapper<HirFileId, ast::MacroCall> =
token.with_value(macro_call);
let file_id = match mcache.get(&mcall) {
Some(&it) => it,
None => {
@ -622,7 +624,7 @@ impl<'db> SemanticsImpl<'db> {
it
}
};
process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
process_expansion_for_token(&mut stack, file_id, token.as_ref())
} else if let Some(meta) = ast::Meta::cast(parent) {
// attribute we failed expansion for earlier, this might be a derive invocation
// or derive helper attribute
@ -643,11 +645,10 @@ impl<'db> SemanticsImpl<'db> {
match derive_call {
Some(call_id) => {
// resolved to a derive
let file_id = call_id.as_file();
let file_id = call_id.as_macro_file();
return process_expansion_for_token(
&mut stack,
file_id,
Some(adt.into()),
token.as_ref(),
);
}
@ -679,13 +680,11 @@ impl<'db> SemanticsImpl<'db> {
let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
let helpers =
def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
let item = Some(adt.into());
let mut res = None;
for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
res = res.or(process_expansion_for_token(
&mut stack,
derive.as_file(),
item.clone(),
derive.as_macro_file(),
token.as_ref(),
));
}
@ -737,14 +736,16 @@ impl<'db> SemanticsImpl<'db> {
pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
let node = self.find_file(node);
node.original_file_range_opt(self.db.upcast())
.filter(|(_, ctx)| ctx.is_root())
.map(TupleExt::head)
}
/// Attempts to map the node out of macro expanded files.
/// This only work for attribute expansions, as other ones do not have nodes as input.
pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
|InFile { file_id, value }| {
self.cache(find_root(value.syntax()), file_id);
|InRealFile { file_id, value }| {
self.cache(find_root(value.syntax()), file_id.into());
value
},
)
@ -755,8 +756,8 @@ impl<'db> SemanticsImpl<'db> {
pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
let InFile { file_id, .. } = self.find_file(node);
InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map(
|InFile { file_id, value }| {
self.cache(find_root(&value), file_id);
|InRealFile { file_id, value }| {
self.cache(find_root(&value), file_id.into());
value
},
)
@ -851,9 +852,9 @@ impl<'db> SemanticsImpl<'db> {
pub fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
let analyze = self.analyze(path.syntax())?;
let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
let ctx = LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
let hir_path = Path::from_src(path.clone(), &ctx)?;
let span_map = self.db.span_map(analyze.file_id);
let ctx = LowerCtx::with_span_map(self.db.upcast(), span_map);
let hir_path = Path::from_src(&ctx, path.clone())?;
match analyze.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), &hir_path)? {
TypeNs::TraitId(id) => Some(Trait { id }),
_ => None,
@ -1037,7 +1038,7 @@ impl<'db> SemanticsImpl<'db> {
fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
let mut cache = self.s2d_cache.borrow_mut();
let mut ctx = SourceToDefCtx { db: self.db, cache: &mut cache };
let mut ctx = SourceToDefCtx { db: self.db, dynmap_cache: &mut cache };
f(&mut ctx)
}
@ -1451,7 +1452,7 @@ impl SemanticsScope<'_> {
/// necessary a heuristic, as it doesn't take hygiene into account.
pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
let ctx = LowerCtx::with_file_id(self.db.upcast(), self.file_id);
let path = Path::from_src(path.clone(), &ctx)?;
let path = Path::from_src(&ctx, path.clone())?;
resolve_hir_path(self.db, &self.resolver, &path)
}

View File

@ -97,7 +97,7 @@ use hir_def::{
FunctionId, GenericDefId, GenericParamId, ImplId, LifetimeParamId, MacroId, ModuleId, StaticId,
StructId, TraitAliasId, TraitId, TypeAliasId, TypeParamId, UnionId, UseId, VariantId,
};
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, MacroCallId};
use hir_expand::{attrs::AttrId, name::AsName, HirFileId, HirFileIdExt, MacroCallId};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use stdx::{impl_from, never};
@ -112,7 +112,7 @@ pub(super) type SourceToDefCache = FxHashMap<(ChildContainer, HirFileId), DynMap
pub(super) struct SourceToDefCtx<'a, 'b> {
pub(super) db: &'b dyn HirDatabase,
pub(super) cache: &'a mut SourceToDefCache,
pub(super) dynmap_cache: &'a mut SourceToDefCache,
}
impl SourceToDefCtx<'_, '_> {
@ -300,7 +300,7 @@ impl SourceToDefCtx<'_, '_> {
fn cache_for(&mut self, container: ChildContainer, file_id: HirFileId) -> &DynMap {
let db = self.db;
self.cache
self.dynmap_cache
.entry((container, file_id))
.or_insert_with(|| container.child_by_source(db, file_id))
}

View File

@ -26,11 +26,10 @@ use hir_def::{
};
use hir_expand::{
builtin_fn_macro::BuiltinFnLikeExpander,
hygiene::Hygiene,
mod_path::path,
name,
name::{AsName, Name},
HirFileId, InFile,
HirFileId, HirFileIdExt, InFile, MacroFileId, MacroFileIdExt,
};
use hir_ty::{
diagnostics::{
@ -484,7 +483,7 @@ impl SourceAnalyzer {
macro_call: InFile<&ast::MacroCall>,
) -> Option<Macro> {
let ctx = LowerCtx::with_file_id(db.upcast(), macro_call.file_id);
let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
let path = macro_call.value.path().and_then(|ast| Path::from_src(&ctx, ast))?;
self.resolver
.resolve_path_as_macro(db.upcast(), path.mod_path()?, Some(MacroSubNs::Bang))
.map(|(it, _)| it.into())
@ -596,9 +595,8 @@ impl SourceAnalyzer {
}
// This must be a normal source file rather than macro file.
let hygiene = Hygiene::new(db.upcast(), self.file_id);
let ctx = LowerCtx::with_hygiene(db.upcast(), &hygiene);
let hir_path = Path::from_src(path.clone(), &ctx)?;
let ctx = LowerCtx::with_span_map(db.upcast(), db.span_map(self.file_id));
let hir_path = Path::from_src(&ctx, path.clone())?;
// Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
// trying to resolve foo::bar.
@ -755,14 +753,15 @@ impl SourceAnalyzer {
&self,
db: &dyn HirDatabase,
macro_call: InFile<&ast::MacroCall>,
) -> Option<HirFileId> {
) -> Option<MacroFileId> {
let krate = self.resolver.krate();
let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
self.resolver
.resolve_path_as_macro(db.upcast(), &path, Some(MacroSubNs::Bang))
.map(|(it, _)| macro_id_to_def_id(db.upcast(), it))
})?;
Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
// why the 64?
Some(macro_call_id.as_macro_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
}
pub(crate) fn resolve_variant(

View File

@ -50,13 +50,8 @@ impl DeclarationLocation {
node.as_ref().original_file_range(db.upcast())
}
pub fn original_name_range(&self, db: &dyn HirDatabase) -> Option<FileRange> {
if let Some(file_id) = self.hir_file_id.file_id() {
// fast path to prevent parsing
return Some(FileRange { file_id, range: self.name_ptr.text_range() });
}
let node = resolve_node(db, self.hir_file_id, &self.name_ptr);
node.as_ref().original_file_range_opt(db.upcast())
pub fn original_name_range(&self, db: &dyn HirDatabase) -> FileRange {
InFile::new(self.hir_file_id, self.name_ptr.text_range()).original_file_range(db.upcast())
}
}

View File

@ -3,7 +3,7 @@ use std::{
iter,
};
use hir::{HasSource, ModuleSource};
use hir::{HasSource, HirFileIdExt, ModuleSource};
use ide_db::{
assists::{AssistId, AssistKind},
base_db::FileId,

View File

@ -1,4 +1,6 @@
use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef};
use hir::{
db::HirDatabase, HasSource, HasVisibility, HirFileIdExt, ModuleDef, PathResolution, ScopeDef,
};
use ide_db::base_db::FileId;
use syntax::{
ast::{self, edit_in_place::HasVisibilityEdit, make, HasVisibility as _},

View File

@ -1,5 +1,5 @@
use crate::assist_context::{AssistContext, Assists};
use hir::{HasVisibility, HirDisplay, Module};
use hir::{HasVisibility, HirDisplay, HirFileIdExt, Module};
use ide_db::{
assists::{AssistId, AssistKind},
base_db::{FileId, Upcast},

View File

@ -1,4 +1,4 @@
use hir::{HasSource, HirDisplay, InFile};
use hir::{HasSource, HirDisplay, InRealFile};
use ide_db::assists::{AssistId, AssistKind};
use syntax::{
ast::{self, make, HasArgList},
@ -114,14 +114,14 @@ fn add_variant_to_accumulator(
parent: PathParent,
) -> Option<()> {
let db = ctx.db();
let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
let InRealFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
acc.add(
AssistId("generate_enum_variant", AssistKind::Generate),
"Generate variant",
target,
|builder| {
builder.edit_file(file_id.original_file(db));
builder.edit_file(file_id);
let node = builder.make_mut(enum_node);
let variant = make_variant(ctx, name_ref, parent);
node.variant_list().map(|it| it.add_variant(variant.clone_for_update()));

View File

@ -1,5 +1,6 @@
use hir::{
Adt, AsAssocItem, HasSource, HirDisplay, Module, PathResolution, Semantics, Type, TypeInfo,
Adt, AsAssocItem, HasSource, HirDisplay, HirFileIdExt, Module, PathResolution, Semantics, Type,
TypeInfo,
};
use ide_db::{
base_db::FileId,
@ -510,7 +511,7 @@ fn assoc_fn_target_info(
}
fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
match &target {
match target {
GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'),
}

View File

@ -1,6 +1,6 @@
use std::collections::{hash_map::Entry, HashMap};
use hir::{InFile, Module, ModuleSource};
use hir::{HirFileIdExt, InFile, InRealFile, Module, ModuleSource};
use ide_db::{
base_db::FileRange,
defs::Definition,
@ -167,7 +167,7 @@ fn used_once_in_scope(ctx: &AssistContext<'_>, def: Definition, scopes: &Vec<Sea
fn module_search_scope(db: &RootDatabase, module: hir::Module) -> Vec<SearchScope> {
let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db);
if let Some((file_id, call_source)) = file_id.original_call_node(db) {
if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db) {
(file_id, Some(call_source.text_range()))
} else {
(

View File

@ -1,4 +1,4 @@
use hir::{InFile, ModuleDef};
use hir::{HirFileIdExt, InFile, ModuleDef};
use ide_db::{helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator};
use itertools::Itertools;
use syntax::{

View File

@ -2,7 +2,7 @@
use std::iter;
use hir::{Module, ModuleSource};
use hir::{HirFileIdExt, Module, ModuleSource};
use ide_db::{
base_db::{SourceDatabaseExt, VfsPath},
FxHashSet, RootDatabase, SymbolKind,

View File

@ -97,13 +97,13 @@ impl RootDatabase {
// ExpandDatabase
hir::db::AstIdMapQuery
hir::db::ParseMacroExpansionQuery
hir::db::InternMacroCallQuery
hir::db::MacroArgNodeQuery
hir::db::DeclMacroExpanderQuery
hir::db::MacroExpandQuery
hir::db::ExpandProcMacroQuery
hir::db::HygieneFrameQuery
hir::db::InternMacroCallQuery
hir::db::InternSyntaxContextQuery
hir::db::MacroArgQuery
hir::db::ParseMacroExpansionQuery
hir::db::RealSpanMapQuery
// DefDatabase
hir::db::FileItemTreeQuery
@ -143,6 +143,13 @@ impl RootDatabase {
hir::db::FunctionVisibilityQuery
hir::db::ConstVisibilityQuery
hir::db::CrateSupportsNoStdQuery
hir::db::BlockItemTreeQueryQuery
hir::db::ExternCrateDeclDataQuery
hir::db::LangAttrQuery
hir::db::InternAnonymousConstQuery
hir::db::InternExternCrateQuery
hir::db::InternInTypeConstQuery
hir::db::InternUseQuery
// HirDatabase
hir::db::InferQueryQuery

View File

@ -144,6 +144,7 @@ impl RootDatabase {
db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
db.set_expand_proc_attr_macros_with_durability(false, Durability::HIGH);
db.update_parse_query_lru_capacity(lru_capacity);
db.setup_syntax_context_root();
db
}
@ -156,7 +157,6 @@ impl RootDatabase {
base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
// macro expansions are usually rather small, so we can afford to keep more of them alive
hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(4 * lru_capacity);
}
pub fn update_lru_capacities(&mut self, lru_capacities: &FxHashMap<Box<str>, usize>) {
@ -174,12 +174,6 @@ impl RootDatabase {
.copied()
.unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
);
hir_db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(
lru_capacities
.get(stringify!(MacroExpandQuery))
.copied()
.unwrap_or(4 * base_db::DEFAULT_PARSE_LRU_CAP),
);
macro_rules! update_lru_capacity_per_query {
($( $module:ident :: $query:ident )*) => {$(
@ -204,11 +198,10 @@ impl RootDatabase {
hir_db::AstIdMapQuery
// hir_db::ParseMacroExpansionQuery
// hir_db::InternMacroCallQuery
hir_db::MacroArgNodeQuery
hir_db::MacroArgQuery
hir_db::DeclMacroExpanderQuery
// hir_db::MacroExpandQuery
hir_db::ExpandProcMacroQuery
hir_db::HygieneFrameQuery
hir_db::ParseMacroExpansionErrorQuery
// DefDatabase

View File

@ -22,10 +22,10 @@
//! Our current behavior is ¯\_(ツ)_/¯.
use std::fmt;
use base_db::{AnchoredPathBuf, FileId, FileRange};
use base_db::{span::SyntaxContextId, AnchoredPathBuf, FileId, FileRange};
use either::Either;
use hir::{FieldSource, HasSource, InFile, ModuleSource, Semantics};
use stdx::never;
use hir::{FieldSource, HasSource, HirFileIdExt, InFile, ModuleSource, Semantics};
use stdx::{never, TupleExt};
use syntax::{
ast::{self, HasName},
AstNode, SyntaxKind, TextRange, T,
@ -103,6 +103,7 @@ impl Definition {
/// renamed and extern crate names will report its range, though a rename will introduce
/// an alias instead.
pub fn range_for_rename(self, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange> {
let syn_ctx_is_root = |(range, ctx): (_, SyntaxContextId)| ctx.is_root().then(|| range);
let res = match self {
Definition::Macro(mac) => {
let src = mac.source(sema.db)?;
@ -110,14 +111,18 @@ impl Definition {
Either::Left(it) => it.name()?,
Either::Right(it) => it.name()?,
};
src.with_value(name.syntax()).original_file_range_opt(sema.db)
src.with_value(name.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
}
Definition::Field(field) => {
let src = field.source(sema.db)?;
match &src.value {
FieldSource::Named(record_field) => {
let name = record_field.name()?;
src.with_value(name.syntax()).original_file_range_opt(sema.db)
src.with_value(name.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
}
FieldSource::Pos(_) => None,
}
@ -125,25 +130,31 @@ impl Definition {
Definition::Module(module) => {
let src = module.declaration_source(sema.db)?;
let name = src.value.name()?;
src.with_value(name.syntax()).original_file_range_opt(sema.db)
src.with_value(name.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
}
Definition::Function(it) => name_range(it, sema),
Definition::Function(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::Adt(adt) => match adt {
hir::Adt::Struct(it) => name_range(it, sema),
hir::Adt::Union(it) => name_range(it, sema),
hir::Adt::Enum(it) => name_range(it, sema),
hir::Adt::Struct(it) => name_range(it, sema).and_then(syn_ctx_is_root),
hir::Adt::Union(it) => name_range(it, sema).and_then(syn_ctx_is_root),
hir::Adt::Enum(it) => name_range(it, sema).and_then(syn_ctx_is_root),
},
Definition::Variant(it) => name_range(it, sema),
Definition::Const(it) => name_range(it, sema),
Definition::Static(it) => name_range(it, sema),
Definition::Trait(it) => name_range(it, sema),
Definition::TraitAlias(it) => name_range(it, sema),
Definition::TypeAlias(it) => name_range(it, sema),
Definition::Local(it) => name_range(it.primary_source(sema.db), sema),
Definition::Variant(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::Const(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::Static(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::Trait(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::TraitAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::TypeAlias(it) => name_range(it, sema).and_then(syn_ctx_is_root),
Definition::Local(it) => {
name_range(it.primary_source(sema.db), sema).and_then(syn_ctx_is_root)
}
Definition::GenericParam(generic_param) => match generic_param {
hir::GenericParam::LifetimeParam(lifetime_param) => {
let src = lifetime_param.source(sema.db)?;
src.with_value(src.value.lifetime()?.syntax()).original_file_range_opt(sema.db)
src.with_value(src.value.lifetime()?.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
}
_ => {
let x = match generic_param {
@ -156,22 +167,30 @@ impl Definition {
Either::Left(x) => x.name()?,
Either::Right(_) => return None,
};
src.with_value(name.syntax()).original_file_range_opt(sema.db)
src.with_value(name.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
}
},
Definition::Label(label) => {
let src = label.source(sema.db);
let lifetime = src.value.lifetime()?;
src.with_value(lifetime.syntax()).original_file_range_opt(sema.db)
src.with_value(lifetime.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
}
Definition::ExternCrateDecl(it) => {
let src = it.source(sema.db)?;
if let Some(rename) = src.value.rename() {
let name = rename.name()?;
src.with_value(name.syntax()).original_file_range_opt(sema.db)
src.with_value(name.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
} else {
let name = src.value.name_ref()?;
src.with_value(name.syntax()).original_file_range_opt(sema.db)
src.with_value(name.syntax())
.original_file_range_opt(sema.db)
.and_then(syn_ctx_is_root)
}
}
Definition::BuiltinType(_) => return None,
@ -183,7 +202,10 @@ impl Definition {
};
return res;
fn name_range<D>(def: D, sema: &Semantics<'_, RootDatabase>) -> Option<FileRange>
fn name_range<D>(
def: D,
sema: &Semantics<'_, RootDatabase>,
) -> Option<(FileRange, SyntaxContextId)>
where
D: HasSource,
D::Ast: ast::HasName,
@ -256,8 +278,10 @@ fn rename_mod(
let file_id = src.file_id.original_file(sema.db);
match src.value.name() {
Some(name) => {
if let Some(file_range) =
src.with_value(name.syntax()).original_file_range_opt(sema.db)
if let Some(file_range) = src
.with_value(name.syntax())
.original_file_range_opt(sema.db)
.map(TupleExt::head)
{
source_change.insert_source_edit(
file_id,
@ -493,7 +517,12 @@ fn source_edit_from_def(
for source in local.sources(sema.db) {
let source = match source.source.clone().original_ast_node(sema.db) {
Some(source) => source,
None => match source.source.syntax().original_file_range_opt(sema.db) {
None => match source
.source
.syntax()
.original_file_range_opt(sema.db)
.map(TupleExt::head)
{
Some(FileRange { file_id: file_id2, range }) => {
file_id = Some(file_id2);
edit.replace(range, new_name.to_owned());
@ -504,7 +533,7 @@ fn source_edit_from_def(
}
},
};
file_id = source.file_id.file_id();
file_id = Some(source.file_id);
if let Either::Left(pat) = source.value {
let name_range = pat.name().unwrap().syntax().text_range();
// special cases required for renaming fields/locals in Record patterns

View File

@ -8,7 +8,8 @@ use std::mem;
use base_db::{salsa::Database, FileId, FileRange, SourceDatabase, SourceDatabaseExt};
use hir::{
AsAssocItem, DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility,
AsAssocItem, DefWithBody, HasAttrs, HasSource, HirFileIdExt, InFile, InRealFile, ModuleSource,
Semantics, Visibility,
};
use memchr::memmem::Finder;
use nohash_hasher::IntMap;
@ -132,7 +133,8 @@ impl SearchScope {
let (file_id, range) = {
let InFile { file_id, value } = module.definition_source(db);
if let Some((file_id, call_source)) = file_id.original_call_node(db) {
if let Some(InRealFile { file_id, value: call_source }) = file_id.original_call_node(db)
{
(file_id, Some(call_source.text_range()))
} else {
(

View File

@ -21,9 +21,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -50,9 +48,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -79,9 +75,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -108,9 +102,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -137,9 +129,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -166,9 +156,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -195,9 +183,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,

View File

@ -19,9 +19,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: TYPE_ALIAS,
@ -46,9 +44,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: CONST,
@ -73,9 +69,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: CONST,
@ -102,9 +96,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: ENUM,
@ -131,9 +123,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
@ -160,9 +150,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: MACRO_DEF,
@ -187,9 +175,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STATIC,
@ -216,9 +202,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -245,11 +229,7 @@
),
loc: DeclarationLocation {
hir_file_id: MacroFile(
MacroFile {
macro_call_id: MacroCallId(
0,
),
},
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -276,9 +256,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -307,9 +285,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -338,9 +314,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -365,9 +339,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: TRAIT,
@ -394,9 +366,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
@ -423,9 +393,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: UNION,
@ -452,9 +420,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: MODULE,
@ -481,9 +447,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: MODULE,
@ -510,9 +474,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: MACRO_RULES,
@ -537,9 +499,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: FN,
@ -566,9 +526,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: MACRO_RULES,
@ -593,9 +551,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: FN,
@ -622,9 +578,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
@ -649,9 +603,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: FN,
@ -691,9 +643,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
0,
),
0,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -731,9 +681,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
1,
),
1,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
@ -760,9 +708,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
1,
),
1,
),
ptr: SyntaxNodePtr {
kind: STRUCT,
@ -789,9 +735,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
1,
),
1,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
@ -818,9 +762,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
1,
),
1,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,
@ -847,9 +789,7 @@
),
loc: DeclarationLocation {
hir_file_id: FileId(
FileId(
1,
),
1,
),
ptr: SyntaxNodePtr {
kind: USE_TREE,

View File

@ -60,9 +60,6 @@ macro_rules! compile_error { () => {} }
#[test]
fn eager_macro_concat() {
// FIXME: this is incorrectly handling `$crate`, resulting in a wrong diagnostic.
// See: https://github.com/rust-lang/rust-analyzer/issues/10300
check_diagnostics(
r#"
//- /lib.rs crate:lib deps:core
@ -80,7 +77,6 @@ macro_rules! m {
fn f() {
m!();
//^^^^ error: unresolved macro $crate::private::concat
}
//- /core.rs crate:core

View File

@ -1,7 +1,7 @@
use either::Either;
use hir::{
db::{ExpandDatabase, HirDatabase},
known, AssocItem, HirDisplay, InFile, Type,
known, AssocItem, HirDisplay, HirFileIdExt, InFile, Type,
};
use ide_db::{
assists::Assist, famous_defs::FamousDefs, imports::import_assets::item_for_path_search,

View File

@ -1,4 +1,5 @@
use hir::db::ExpandDatabase;
use hir::HirFileIdExt;
use ide_db::{assists::Assist, source_change::SourceChange};
use syntax::{ast, SyntaxNode};
use syntax::{match_ast, AstNode};

View File

@ -1,5 +1,5 @@
use either::Either;
use hir::{db::ExpandDatabase, HasSource, HirDisplay, Semantics};
use hir::{db::ExpandDatabase, HasSource, HirDisplay, HirFileIdExt, Semantics};
use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
use syntax::{
ast::{self, edit::IndentLevel, make},

View File

@ -1,4 +1,4 @@
use hir::{db::ExpandDatabase, InFile};
use hir::{db::ExpandDatabase, HirFileIdExt, InFile};
use ide_db::source_change::SourceChange;
use syntax::{
ast::{self, HasArgList},

View File

@ -1,4 +1,4 @@
use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, InFile, Type};
use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, HirFileIdExt, InFile, Type};
use ide_db::{famous_defs::FamousDefs, source_change::SourceChange};
use syntax::{
ast::{self, BlockExpr, ExprStmt},

View File

@ -33,7 +33,7 @@ pub(crate) fn typed_hole(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Di
fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option<Vec<Assist>> {
let db = ctx.sema.db;
let root = db.parse_or_expand(d.expr.file_id);
let original_range =
let (original_range, _) =
d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?;
let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?;
let mut assists = vec![];

View File

@ -1,4 +1,4 @@
use hir::db::ExpandDatabase;
use hir::{db::ExpandDatabase, HirFileIdExt};
use ide_db::{assists::Assist, base_db::AnchoredPathBuf, source_change::FileSystemEdit};
use itertools::Itertools;
use syntax::AstNode;
@ -150,11 +150,9 @@ mod baz {}
],
),
main_node: Some(
InFile {
InFileWrapper {
file_id: FileId(
FileId(
0,
),
0,
),
value: MODULE@0..8
MOD_KW@0..3 "mod"

View File

@ -1,4 +1,4 @@
use hir::{HasSource, InFile, Semantics};
use hir::{HasSource, InFile, InRealFile, Semantics};
use ide_db::{
base_db::{FileId, FilePosition, FileRange},
defs::Definition,
@ -149,8 +149,8 @@ pub(crate) fn annotations(
node: InFile<T>,
source_file_id: FileId,
) -> Option<(TextRange, Option<TextRange>)> {
if let Some(InFile { file_id, value }) = node.original_ast_node(db) {
if file_id == source_file_id.into() {
if let Some(InRealFile { file_id, value }) = node.original_ast_node(db) {
if file_id == source_file_id {
return Some((
value.syntax().text_range(),
value.name().map(|name| name.syntax().text_range()),

View File

@ -149,7 +149,7 @@ mod tests {
fn check_hierarchy(
ra_fixture: &str,
expected: Expect,
expected_nav: Expect,
expected_incoming: Expect,
expected_outgoing: Expect,
) {
@ -158,7 +158,7 @@ mod tests {
let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info;
assert_eq!(navs.len(), 1);
let nav = navs.pop().unwrap();
expected.assert_eq(&nav.debug_render());
expected_nav.assert_eq(&nav.debug_render());
let item_pos =
FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() };

View File

@ -1,4 +1,4 @@
use hir::Semantics;
use hir::{HirFileIdExt, InFile, Semantics};
use ide_db::{
base_db::FileId, helpers::pick_best_token,
syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
@ -49,7 +49,9 @@ pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<
let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
// up map out of the #[derive] expansion
let token = hir::InFile::new(hir_file, descended).upmap(db)?.value;
let InFile { file_id, value: tokens } =
hir::InFile::new(hir_file, descended).upmap_once(db)?;
let token = sema.parse_or_expand(file_id).covering_element(tokens[0]).into_token()?;
let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
let expansions = sema.expand_derive_macro(&attr)?;
let idx = attr

View File

@ -60,13 +60,13 @@ pub(crate) fn goto_definition(
.into_iter()
.filter_map(|token| {
let parent = token.parent()?;
if let Some(tt) = ast::TokenTree::cast(parent) {
if let Some(tt) = ast::TokenTree::cast(parent.clone()) {
if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), file_id) {
return Some(vec![x]);
}
}
Some(
IdentClass::classify_token(sema, &token)?
IdentClass::classify_node(sema, &parent)?
.definitions()
.into_iter()
.flat_map(|def| {
@ -392,6 +392,8 @@ fn bar() {
);
}
// FIXME: We should emit two targets here, one for the identifier in the declaration, one for
// the macro call
#[test]
fn goto_def_for_macro_defined_fn_no_arg() {
check(
@ -399,11 +401,11 @@ fn bar() {
//- /lib.rs
macro_rules! define_fn {
() => (fn foo() {})
}
define_fn!();
//^^^^^^^^^^^^^
fn bar() {
$0foo();
}

View File

@ -249,7 +249,7 @@ impl T for &Foo {}
r#"
//- minicore: copy, derive
#[derive(Copy)]
//^^^^^^^^^^^^^^^
//^^^^
struct Foo$0;
"#,
);

View File

@ -2,6 +2,7 @@
//!
//! Tests live in [`bind_pat`][super::bind_pat] module.
use ide_db::{base_db::FileId, famous_defs::FamousDefs};
use stdx::TupleExt;
use syntax::ast::{self, AstNode};
use text_edit::{TextRange, TextSize};
@ -73,7 +74,9 @@ pub(super) fn hints(
capture.display_place(sema.db)
),
None,
source.name().and_then(|name| name.syntax().original_file_range_opt(sema.db)),
source.name().and_then(|name| {
name.syntax().original_file_range_opt(sema.db).map(TupleExt::head)
}),
);
acc.push(InlayHint {
needs_resolve: label.needs_resolve(),

View File

@ -4,8 +4,8 @@ use std::fmt;
use either::Either;
use hir::{
symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource, HirDisplay, HirFileId,
InFile, LocalSource, ModuleSource,
db::ExpandDatabase, symbols::FileSymbol, AssocItem, FieldSource, HasContainer, HasSource,
HirDisplay, HirFileId, InFile, LocalSource, ModuleSource,
};
use ide_db::{
base_db::{FileId, FileRange},
@ -40,6 +40,8 @@ pub struct NavigationTarget {
/// comments, and `focus_range` is the range of the identifier.
///
/// Clients should place the cursor on this range when navigating to this target.
///
/// This range must be contained within [`Self::full_range`].
pub focus_range: Option<TextRange>,
pub name: SmolStr,
pub kind: Option<SymbolKind>,
@ -166,13 +168,14 @@ impl NavigationTarget {
impl TryToNav for FileSymbol {
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
let full_range = self.loc.original_range(db);
let focus_range = self.loc.original_name_range(db).and_then(|it| {
if it.file_id == full_range.file_id {
Some(it.range)
} else {
None
}
});
let focus_range = self.loc.original_name_range(db);
let focus_range = if focus_range.file_id == full_range.file_id
&& full_range.range.contains_range(focus_range.range)
{
Some(focus_range.range)
} else {
None
};
Some(NavigationTarget {
file_id: full_range.file_id,
@ -363,11 +366,11 @@ impl ToNav for hir::Module {
impl TryToNav for hir::Impl {
fn try_to_nav(&self, db: &RootDatabase) -> Option<NavigationTarget> {
let InFile { file_id, value } = self.source(db)?;
let derive_attr = self.as_builtin_derive(db);
let derive_path = self.as_builtin_derive_path(db);
let (focus, syntax) = match &derive_attr {
Some(attr) => (None, attr.value.syntax()),
None => (value.self_ty(), value.syntax()),
let (file_id, focus, syntax) = match &derive_path {
Some(attr) => (attr.file_id.into(), None, attr.value.syntax()),
None => (file_id, value.self_ty(), value.syntax()),
};
let (file_id, full_range, focus_range) = orig_range_with_focus(db, file_id, syntax, focus);
@ -628,19 +631,30 @@ impl TryToNav for hir::ConstParam {
}
}
/// Returns the original range of the syntax node, and the range of the name mapped out of macro expansions
/// Additionally verifies that the name span is in bounds and related to the original range.
fn orig_range_with_focus(
db: &RootDatabase,
hir_file: HirFileId,
value: &SyntaxNode,
name: Option<impl AstNode>,
) -> (FileId, TextRange, Option<TextRange>) {
let FileRange { file_id, range: full_range } =
InFile::new(hir_file, value).original_file_range(db);
let FileRange { file_id, range } =
match InFile::new(hir_file, value).original_file_range_opt(db) {
Some((range, ctxt)) if ctxt.is_root() => range,
_ => db
.lookup_intern_macro_call(hir_file.macro_file().unwrap().macro_call_id)
.kind
.original_call_range(db),
};
let focus_range = name
.and_then(|it| InFile::new(hir_file, it.syntax()).original_file_range_opt(db))
.and_then(|range| if range.file_id == file_id { Some(range.range) } else { None });
.filter(|(frange, ctxt)| {
ctxt.is_root() && frange.file_id == file_id && frange.range.contains_range(frange.range)
})
.map(|(frange, _ctxt)| frange.range);
(file_id, full_range, focus_range)
(file_id, range, focus_range)
}
#[cfg(test)]

View File

@ -4,7 +4,7 @@
//! tests. This module also implements a couple of magic tricks, like renaming
//! `self` and to `self` (to switch between associated function and method).
use hir::{AsAssocItem, InFile, Semantics};
use hir::{AsAssocItem, HirFileIdExt, InFile, Semantics};
use ide_db::{
base_db::FileId,
defs::{Definition, NameClass, NameRefClass},

View File

@ -2,7 +2,7 @@ use std::fmt;
use ast::HasName;
use cfg::CfgExpr;
use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, Semantics};
use hir::{db::HirDatabase, AsAssocItem, HasAttrs, HasSource, HirFileIdExt, Semantics};
use ide_assists::utils::test_related_attribute;
use ide_db::{
base_db::{FilePosition, FileRange},

View File

@ -3,7 +3,7 @@
use std::collections::HashMap;
use hir::{db::HirDatabase, Crate, Module};
use hir::{db::HirDatabase, Crate, HirFileIdExt, Module};
use ide_db::helpers::get_definition;
use ide_db::{
base_db::{FileId, FileRange, SourceDatabaseExt},
@ -243,6 +243,7 @@ mod tests {
}
}
#[track_caller]
fn check_definitions(ra_fixture: &str) {
let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
let s = StaticIndex::compute(&analysis);

View File

@ -2,7 +2,7 @@ use std::{fmt, marker::PhantomData};
use hir::{
db::{AstIdMapQuery, AttrsQuery, BlockDefMapQuery, ParseMacroExpansionQuery},
Attr, Attrs, ExpandResult, MacroFile, Module,
Attr, Attrs, ExpandResult, MacroFileId, Module,
};
use ide_db::{
base_db::{
@ -199,8 +199,12 @@ impl StatCollect<FileId, Parse<ast::SourceFile>> for SyntaxTreeStats<false> {
}
}
impl<M> StatCollect<MacroFile, ExpandResult<(Parse<SyntaxNode>, M)>> for SyntaxTreeStats<true> {
fn collect_entry(&mut self, _: MacroFile, value: Option<ExpandResult<(Parse<SyntaxNode>, M)>>) {
impl<M> StatCollect<MacroFileId, ExpandResult<(Parse<SyntaxNode>, M)>> for SyntaxTreeStats<true> {
fn collect_entry(
&mut self,
_: MacroFileId,
value: Option<ExpandResult<(Parse<SyntaxNode>, M)>>,
) {
self.total += 1;
self.retained += value.is_some() as usize;
}

View File

@ -1,6 +1,6 @@
//! Computes color for a single element.
use hir::{AsAssocItem, HasVisibility, Semantics};
use hir::{AsAssocItem, HasVisibility, HirFileIdExt, Semantics};
use ide_db::{
defs::{Definition, IdentClass, NameClass, NameRefClass},
FxHashMap, RootDatabase, SymbolKind,

View File

@ -127,7 +127,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">+</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">#</span><span class="variable">x</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">27</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">-</span><span class="numeric_literal macro">5</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="numeric_literal">0</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span>, <span class="numeric_literal macro">-</span><span class="numeric_literal macro">5</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"</span><span class="format_specifier">{</span><span class="format_specifier">:</span><span class="format_specifier">#</span><span class="numeric_literal">0</span><span class="numeric_literal">10</span><span class="variable">x</span><span class="format_specifier">}</span><span class="string_literal macro">!"</span><span class="comma macro">,</span> <span class="numeric_literal macro">27</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">0</span><span class="format_specifier">}</span><span class="string_literal macro"> is </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">5</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0.01</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>
<span class="macro">println</span><span class="macro_bang">!</span><span class="parenthesis macro">(</span><span class="string_literal macro">"Hello </span><span class="format_specifier">{</span><span class="numeric_literal">1</span><span class="format_specifier">}</span><span class="string_literal macro"> is </span><span class="format_specifier">{</span><span class="numeric_literal">2</span><span class="format_specifier">:</span><span class="format_specifier">.</span><span class="numeric_literal">0</span><span class="format_specifier">$</span><span class="format_specifier">}</span><span class="string_literal macro">"</span><span class="comma macro">,</span> <span class="numeric_literal macro">5</span><span class="comma macro">,</span> <span class="string_literal macro">"x"</span><span class="comma macro">,</span> <span class="numeric_literal macro">0.01</span><span class="parenthesis macro">)</span><span class="semicolon">;</span>

View File

@ -4,19 +4,19 @@
// to run rust-analyzer as a library.
use std::{collections::hash_map::Entry, mem, path::Path, sync};
use ::tt::token_id as tt;
use crossbeam_channel::{unbounded, Receiver};
use ide::{AnalysisHost, Change, SourceRoot};
use ide_db::{
base_db::{
CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
ProcMacroLoadResult, ProcMacros,
span::SpanData, CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
ProcMacroKind, ProcMacroLoadResult, ProcMacros,
},
FxHashMap,
};
use itertools::Itertools;
use proc_macro_api::{MacroDylib, ProcMacroServer};
use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace};
use tt::DelimSpan;
use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath};
pub struct LoadCargoConfig {
@ -374,12 +374,15 @@ struct Expander(proc_macro_api::ProcMacro);
impl ProcMacroExpander for Expander {
fn expand(
&self,
subtree: &tt::Subtree,
attrs: Option<&tt::Subtree>,
subtree: &tt::Subtree<SpanData>,
attrs: Option<&tt::Subtree<SpanData>>,
env: &Env,
) -> Result<tt::Subtree, ProcMacroExpansionError> {
def_site: SpanData,
call_site: SpanData,
mixed_site: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
match self.0.expand(subtree, attrs, env) {
match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) {
Ok(Ok(subtree)) => Ok(subtree),
Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
@ -394,10 +397,13 @@ struct IdentityExpander;
impl ProcMacroExpander for IdentityExpander {
fn expand(
&self,
subtree: &tt::Subtree,
_: Option<&tt::Subtree>,
subtree: &tt::Subtree<SpanData>,
_: Option<&tt::Subtree<SpanData>>,
_: &Env,
) -> Result<tt::Subtree, ProcMacroExpansionError> {
_: SpanData,
_: SpanData,
_: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
Ok(subtree.clone())
}
}
@ -409,11 +415,14 @@ struct EmptyExpander;
impl ProcMacroExpander for EmptyExpander {
fn expand(
&self,
_: &tt::Subtree,
_: Option<&tt::Subtree>,
_: &tt::Subtree<SpanData>,
_: Option<&tt::Subtree<SpanData>>,
_: &Env,
) -> Result<tt::Subtree, ProcMacroExpansionError> {
Ok(tt::Subtree::empty())
call_site: SpanData,
_: SpanData,
_: SpanData,
) -> Result<tt::Subtree<SpanData>, ProcMacroExpansionError> {
Ok(tt::Subtree::empty(DelimSpan { open: call_site, close: call_site }))
}
}

View File

@ -9,7 +9,7 @@ use test_utils::{bench, bench_fixture, skip_slow_tests};
use crate::{
parser::{MetaVarKind, Op, RepeatKind, Separator},
syntax_node_to_token_tree, tt, DeclarativeMacro,
syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap, DUMMY,
};
#[test]
@ -38,7 +38,7 @@ fn benchmark_expand_macro_rules() {
invocations
.into_iter()
.map(|(id, tt)| {
let res = rules[&id].expand(tt);
let res = rules[&id].expand(&tt, |_| ());
assert!(res.err.is_none());
res.value.token_trees.len()
})
@ -47,14 +47,14 @@ fn benchmark_expand_macro_rules() {
assert_eq!(hash, 69413);
}
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<DummyTestSpanData>> {
macro_rules_fixtures_tt()
.into_iter()
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true)))
.collect()
}
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>> {
let fixture = bench_fixture::numerous_macro_rules();
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
@ -64,14 +64,17 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
.filter_map(ast::MacroRules::cast)
.map(|rule| {
let id = rule.name().unwrap().to_string();
let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax());
let def_tt =
syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap);
(id, def_tt)
})
.collect()
}
/// Generate random invocation fixtures from rules
fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(String, tt::Subtree)> {
fn invocation_fixtures(
rules: &FxHashMap<String, DeclarativeMacro<DummyTestSpanData>>,
) -> Vec<(String, tt::Subtree<DummyTestSpanData>)> {
let mut seed = 123456789;
let mut res = Vec::new();
@ -93,8 +96,8 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
loop {
let mut subtree = tt::Subtree {
delimiter: tt::Delimiter {
open: tt::TokenId::UNSPECIFIED,
close: tt::TokenId::UNSPECIFIED,
open: DUMMY,
close: DUMMY,
kind: tt::DelimiterKind::Invisible,
},
token_trees: vec![],
@ -102,7 +105,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
for op in rule.lhs.iter() {
collect_from_op(op, &mut subtree, &mut seed);
}
if it.expand(subtree.clone()).err.is_none() {
if it.expand(&subtree, |_| ()).err.is_none() {
res.push((name.clone(), subtree));
break;
}
@ -116,7 +119,11 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
}
return res;
fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) {
fn collect_from_op(
op: &Op<DummyTestSpanData>,
parent: &mut tt::Subtree<DummyTestSpanData>,
seed: &mut usize,
) {
return match op {
Op::Var { kind, .. } => match kind.as_ref() {
Some(MetaVarKind::Ident) => parent.token_trees.push(make_ident("foo")),
@ -202,38 +209,21 @@ fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(Stri
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
*seed
}
fn make_ident(ident: &str) -> tt::TokenTree {
tt::Leaf::Ident(tt::Ident {
span: tt::TokenId::unspecified(),
text: SmolStr::new(ident),
})
.into()
fn make_ident(ident: &str) -> tt::TokenTree<DummyTestSpanData> {
tt::Leaf::Ident(tt::Ident { span: DUMMY, text: SmolStr::new(ident) }).into()
}
fn make_punct(char: char) -> tt::TokenTree {
tt::Leaf::Punct(tt::Punct {
span: tt::TokenId::unspecified(),
char,
spacing: tt::Spacing::Alone,
})
.into()
fn make_punct(char: char) -> tt::TokenTree<DummyTestSpanData> {
tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }).into()
}
fn make_literal(lit: &str) -> tt::TokenTree {
tt::Leaf::Literal(tt::Literal {
span: tt::TokenId::unspecified(),
text: SmolStr::new(lit),
})
.into()
fn make_literal(lit: &str) -> tt::TokenTree<DummyTestSpanData> {
tt::Leaf::Literal(tt::Literal { span: DUMMY, text: SmolStr::new(lit) }).into()
}
fn make_subtree(
kind: tt::DelimiterKind,
token_trees: Option<Vec<tt::TokenTree>>,
) -> tt::TokenTree {
token_trees: Option<Vec<tt::TokenTree<DummyTestSpanData>>>,
) -> tt::TokenTree<DummyTestSpanData> {
tt::Subtree {
delimiter: tt::Delimiter {
open: tt::TokenId::unspecified(),
close: tt::TokenId::unspecified(),
kind,
},
delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind },
token_trees: token_trees.unwrap_or_default(),
}
.into()

View File

@ -7,15 +7,17 @@ mod transcriber;
use rustc_hash::FxHashMap;
use syntax::SmolStr;
use tt::Span;
use crate::{parser::MetaVarKind, tt, ExpandError, ExpandResult};
use crate::{parser::MetaVarKind, ExpandError, ExpandResult};
pub(crate) fn expand_rules(
rules: &[crate::Rule],
input: &tt::Subtree,
pub(crate) fn expand_rules<S: Span>(
rules: &[crate::Rule<S>],
input: &tt::Subtree<S>,
marker: impl Fn(&mut S) + Copy,
is_2021: bool,
) -> ExpandResult<tt::Subtree> {
let mut match_: Option<(matcher::Match, &crate::Rule)> = None;
) -> ExpandResult<tt::Subtree<S>> {
let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None;
for rule in rules {
let new_match = matcher::match_(&rule.lhs, input, is_2021);
@ -24,7 +26,7 @@ pub(crate) fn expand_rules(
// Unconditionally returning the transcription here makes the
// `test_repeat_bad_var` test fail.
let ExpandResult { value, err: transcribe_err } =
transcriber::transcribe(&rule.rhs, &new_match.bindings);
transcriber::transcribe(&rule.rhs, &new_match.bindings, marker);
if transcribe_err.is_none() {
return ExpandResult::ok(value);
}
@ -43,11 +45,11 @@ pub(crate) fn expand_rules(
if let Some((match_, rule)) = match_ {
// if we got here, there was no match without errors
let ExpandResult { value, err: transcribe_err } =
transcriber::transcribe(&rule.rhs, &match_.bindings);
transcriber::transcribe(&rule.rhs, &match_.bindings, marker);
ExpandResult { value, err: match_.err.or(transcribe_err) }
} else {
ExpandResult::new(
tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] },
tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] },
ExpandError::NoMatchingRule,
)
}
@ -98,23 +100,29 @@ pub(crate) fn expand_rules(
/// In other words, `Bindings` is a *multi* mapping from `SmolStr` to
/// `tt::TokenTree`, where the index to select a particular `TokenTree` among
/// many is not a plain `usize`, but a `&[usize]`.
#[derive(Debug, Default, Clone, PartialEq, Eq)]
struct Bindings {
inner: FxHashMap<SmolStr, Binding>,
#[derive(Debug, Clone, PartialEq, Eq)]
struct Bindings<S> {
inner: FxHashMap<SmolStr, Binding<S>>,
}
impl<S> Default for Bindings<S> {
fn default() -> Self {
Self { inner: Default::default() }
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
enum Binding {
Fragment(Fragment),
Nested(Vec<Binding>),
enum Binding<S> {
Fragment(Fragment<S>),
Nested(Vec<Binding<S>>),
Empty,
Missing(MetaVarKind),
}
#[derive(Debug, Clone, PartialEq, Eq)]
enum Fragment {
enum Fragment<S> {
/// token fragments are just copy-pasted into the output
Tokens(tt::TokenTree),
Tokens(tt::TokenTree<S>),
/// Expr ast fragments are surrounded with `()` on insertion to preserve
/// precedence. Note that this impl is different from the one currently in
/// `rustc` -- `rustc` doesn't translate fragments into token trees at all.
@ -122,7 +130,7 @@ enum Fragment {
/// At one point in time, we tried to use "fake" delimiters here à la
/// proc-macro delimiter=none. As we later discovered, "none" delimiters are
/// tricky to handle in the parser, and rustc doesn't handle those either.
Expr(tt::TokenTree),
Expr(tt::Subtree<S>),
/// There are roughly two types of paths: paths in expression context, where a
/// separator `::` between an identifier and its following generic argument list
/// is mandatory, and paths in type context, where `::` can be omitted.
@ -132,5 +140,5 @@ enum Fragment {
/// and is trasncribed as an expression-context path, verbatim transcription
/// would cause a syntax error. We need to fix it up just before transcribing;
/// see `transcriber::fix_up_and_push_path_tt()`.
Path(tt::TokenTree),
Path(tt::Subtree<S>),
}

View File

@ -63,21 +63,21 @@ use std::rc::Rc;
use smallvec::{smallvec, SmallVec};
use syntax::SmolStr;
use tt::Span;
use crate::{
expander::{Binding, Bindings, ExpandResult, Fragment},
parser::{MetaVarKind, Op, RepeatKind, Separator},
tt,
tt_iter::TtIter,
ExpandError, MetaTemplate, ValueResult,
};
impl Bindings {
impl<S: Span> Bindings<S> {
fn push_optional(&mut self, name: &SmolStr) {
// FIXME: Do we have a better way to represent an empty token ?
// Insert an empty subtree for empty token
let tt =
tt::Subtree { delimiter: tt::Delimiter::unspecified(), token_trees: vec![] }.into();
tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }.into();
self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
}
@ -85,14 +85,14 @@ impl Bindings {
self.inner.insert(name.clone(), Binding::Empty);
}
fn bindings(&self) -> impl Iterator<Item = &Binding> {
fn bindings(&self) -> impl Iterator<Item = &Binding<S>> {
self.inner.values()
}
}
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub(super) struct Match {
pub(super) bindings: Bindings,
#[derive(Clone, Debug, PartialEq, Eq)]
pub(super) struct Match<S> {
pub(super) bindings: Bindings<S>,
/// We currently just keep the first error and count the rest to compare matches.
pub(super) err: Option<ExpandError>,
pub(super) err_count: usize,
@ -102,7 +102,19 @@ pub(super) struct Match {
pub(super) bound_count: usize,
}
impl Match {
impl<S> Default for Match<S> {
fn default() -> Self {
Self {
bindings: Default::default(),
err: Default::default(),
err_count: Default::default(),
unmatched_tts: Default::default(),
bound_count: Default::default(),
}
}
}
impl<S> Match<S> {
fn add_err(&mut self, err: ExpandError) {
let prev_err = self.err.take();
self.err = prev_err.or(Some(err));
@ -111,12 +123,16 @@ impl Match {
}
/// Matching errors are added to the `Match`.
pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool) -> Match {
pub(super) fn match_<S: Span>(
pattern: &MetaTemplate<S>,
input: &tt::Subtree<S>,
is_2021: bool,
) -> Match<S> {
let mut res = match_loop(pattern, input, is_2021);
res.bound_count = count(res.bindings.bindings());
return res;
fn count<'a>(bindings: impl Iterator<Item = &'a Binding>) -> usize {
fn count<'a, S: 'a>(bindings: impl Iterator<Item = &'a Binding<S>>) -> usize {
bindings
.map(|it| match it {
Binding::Fragment(_) => 1,
@ -129,10 +145,10 @@ pub(super) fn match_(pattern: &MetaTemplate, input: &tt::Subtree, is_2021: bool)
}
#[derive(Debug, Clone)]
enum BindingKind {
enum BindingKind<S> {
Empty(SmolStr),
Optional(SmolStr),
Fragment(SmolStr, Fragment),
Fragment(SmolStr, Fragment<S>),
Missing(SmolStr, MetaVarKind),
Nested(usize, usize),
}
@ -146,13 +162,18 @@ enum LinkNode<T> {
Parent { idx: usize, len: usize },
}
#[derive(Default)]
struct BindingsBuilder {
nodes: Vec<Vec<LinkNode<Rc<BindingKind>>>>,
struct BindingsBuilder<S> {
nodes: Vec<Vec<LinkNode<Rc<BindingKind<S>>>>>,
nested: Vec<Vec<LinkNode<usize>>>,
}
impl BindingsBuilder {
impl<S> Default for BindingsBuilder<S> {
fn default() -> Self {
Self { nodes: Default::default(), nested: Default::default() }
}
}
impl<S: Span> BindingsBuilder<S> {
fn alloc(&mut self) -> BindingsIdx {
let idx = self.nodes.len();
self.nodes.push(Vec::new());
@ -189,7 +210,7 @@ impl BindingsBuilder {
self.nodes[idx.0].push(LinkNode::Node(Rc::new(BindingKind::Optional(var.clone()))));
}
fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment) {
fn push_fragment(&mut self, idx: &mut BindingsIdx, var: &SmolStr, fragment: Fragment<S>) {
self.nodes[idx.0]
.push(LinkNode::Node(Rc::new(BindingKind::Fragment(var.clone(), fragment))));
}
@ -210,11 +231,11 @@ impl BindingsBuilder {
idx.0 = new_idx;
}
fn build(self, idx: &BindingsIdx) -> Bindings {
fn build(self, idx: &BindingsIdx) -> Bindings<S> {
self.build_inner(&self.nodes[idx.0])
}
fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind>>]) -> Bindings {
fn build_inner(&self, link_nodes: &[LinkNode<Rc<BindingKind<S>>>]) -> Bindings<S> {
let mut bindings = Bindings::default();
let mut nodes = Vec::new();
self.collect_nodes(link_nodes, &mut nodes);
@ -264,7 +285,7 @@ impl BindingsBuilder {
&'a self,
id: usize,
len: usize,
nested_refs: &mut Vec<&'a [LinkNode<Rc<BindingKind>>]>,
nested_refs: &mut Vec<&'a [LinkNode<Rc<BindingKind<S>>>]>,
) {
self.nested[id].iter().take(len).for_each(|it| match it {
LinkNode::Node(id) => nested_refs.push(&self.nodes[*id]),
@ -272,7 +293,7 @@ impl BindingsBuilder {
});
}
fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings>) {
fn collect_nested(&self, idx: usize, nested_idx: usize, nested: &mut Vec<Bindings<S>>) {
let last = &self.nodes[idx];
let mut nested_refs: Vec<&[_]> = Vec::new();
self.nested[nested_idx].iter().for_each(|it| match *it {
@ -283,7 +304,7 @@ impl BindingsBuilder {
nested.extend(nested_refs.into_iter().map(|iter| self.build_inner(iter)));
}
fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind>) {
fn collect_nodes_ref<'a>(&'a self, id: usize, len: usize, nodes: &mut Vec<&'a BindingKind<S>>) {
self.nodes[id].iter().take(len).for_each(|it| match it {
LinkNode::Node(it) => nodes.push(it),
LinkNode::Parent { idx, len } => self.collect_nodes_ref(*idx, *len, nodes),
@ -292,8 +313,8 @@ impl BindingsBuilder {
fn collect_nodes<'a>(
&'a self,
link_nodes: &'a [LinkNode<Rc<BindingKind>>],
nodes: &mut Vec<&'a BindingKind>,
link_nodes: &'a [LinkNode<Rc<BindingKind<S>>>],
nodes: &mut Vec<&'a BindingKind<S>>,
) {
link_nodes.iter().for_each(|it| match it {
LinkNode::Node(it) => nodes.push(it),
@ -303,22 +324,22 @@ impl BindingsBuilder {
}
#[derive(Debug, Clone)]
struct MatchState<'t> {
struct MatchState<'t, S> {
/// The position of the "dot" in this matcher
dot: OpDelimitedIter<'t>,
dot: OpDelimitedIter<'t, S>,
/// Token subtree stack
/// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. )
/// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
/// that where the bottom of the stack is the outermost matcher.
stack: SmallVec<[OpDelimitedIter<'t>; 4]>,
stack: SmallVec<[OpDelimitedIter<'t, S>; 4]>,
/// The "parent" matcher position if we are in a repetition. That is, the matcher position just
/// before we enter the repetition.
up: Option<Box<MatchState<'t>>>,
up: Option<Box<MatchState<'t, S>>>,
/// The separator if we are in a repetition.
sep: Option<Separator>,
sep: Option<Separator<S>>,
/// The KleeneOp of this sequence if we are in a repetition.
sep_kind: Option<RepeatKind>,
@ -330,7 +351,7 @@ struct MatchState<'t> {
bindings: BindingsIdx,
/// Cached result of meta variable parsing
meta_result: Option<(TtIter<'t>, ExpandResult<Option<Fragment>>)>,
meta_result: Option<(TtIter<'t, S>, ExpandResult<Option<Fragment<S>>>)>,
/// Is error occurred in this state, will `poised` to "parent"
is_error: bool,
@ -355,16 +376,16 @@ struct MatchState<'t> {
/// - `bb_items`: the set of items that are waiting for the black-box parser.
/// - `error_items`: the set of items in errors, used for error-resilient parsing
#[inline]
fn match_loop_inner<'t>(
src: TtIter<'t>,
stack: &[TtIter<'t>],
res: &mut Match,
bindings_builder: &mut BindingsBuilder,
cur_items: &mut SmallVec<[MatchState<'t>; 1]>,
bb_items: &mut SmallVec<[MatchState<'t>; 1]>,
next_items: &mut Vec<MatchState<'t>>,
eof_items: &mut SmallVec<[MatchState<'t>; 1]>,
error_items: &mut SmallVec<[MatchState<'t>; 1]>,
fn match_loop_inner<'t, S: Span>(
src: TtIter<'t, S>,
stack: &[TtIter<'t, S>],
res: &mut Match<S>,
bindings_builder: &mut BindingsBuilder<S>,
cur_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
bb_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
next_items: &mut Vec<MatchState<'t, S>>,
eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
error_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
is_2021: bool,
) {
macro_rules! try_push {
@ -468,7 +489,7 @@ fn match_loop_inner<'t>(
if let Ok(subtree) = src.clone().expect_subtree() {
if subtree.delimiter.kind == delimiter.kind {
item.stack.push(item.dot);
item.dot = tokens.iter_delimited(Some(delimiter));
item.dot = tokens.iter_delimited(Some(*delimiter));
cur_items.push(item);
}
}
@ -587,9 +608,9 @@ fn match_loop_inner<'t>(
}
}
fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match {
fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: bool) -> Match<S> {
let mut src = TtIter::new(src);
let mut stack: SmallVec<[TtIter<'_>; 1]> = SmallVec::new();
let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new();
let mut res = Match::default();
let mut error_recover_item = None;
@ -736,16 +757,16 @@ fn match_loop(pattern: &MetaTemplate, src: &tt::Subtree, is_2021: bool) -> Match
}
}
fn match_meta_var(
fn match_meta_var<S: Span>(
kind: MetaVarKind,
input: &mut TtIter<'_>,
input: &mut TtIter<'_, S>,
is_2021: bool,
) -> ExpandResult<Option<Fragment>> {
) -> ExpandResult<Option<Fragment<S>>> {
let fragment = match kind {
MetaVarKind::Path => {
return input
.expect_fragment(parser::PrefixEntryPoint::Path)
.map(|it| it.map(Fragment::Path));
.map(|it| it.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Path));
}
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
@ -771,9 +792,21 @@ fn match_meta_var(
}
_ => {}
};
return input
.expect_fragment(parser::PrefixEntryPoint::Expr)
.map(|tt| tt.map(Fragment::Expr));
return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| {
tt.map(|tt| match tt {
tt::TokenTree::Leaf(leaf) => tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![leaf.into()],
},
tt::TokenTree::Subtree(mut s) => {
if s.delimiter.kind == tt::DelimiterKind::Invisible {
s.delimiter.kind = tt::DelimiterKind::Parenthesis;
}
s
}
})
.map(Fragment::Expr)
});
}
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
let tt_result = match kind {
@ -796,7 +829,7 @@ fn match_meta_var(
match neg {
None => lit.into(),
Some(neg) => tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![neg, lit.into()],
}),
}
@ -811,7 +844,7 @@ fn match_meta_var(
input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens))
}
fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate) {
fn collect_vars<S: Span>(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate<S>) {
for op in pattern.iter() {
match op {
Op::Var { name, .. } => collector_fun(name.clone()),
@ -824,38 +857,38 @@ fn collect_vars(collector_fun: &mut impl FnMut(SmolStr), pattern: &MetaTemplate)
}
}
}
impl MetaTemplate {
fn iter_delimited<'a>(&'a self, delimited: Option<&'a tt::Delimiter>) -> OpDelimitedIter<'a> {
impl<S: Span> MetaTemplate<S> {
fn iter_delimited(&self, delimited: Option<tt::Delimiter<S>>) -> OpDelimitedIter<'_, S> {
OpDelimitedIter {
inner: &self.0,
idx: 0,
delimited: delimited.unwrap_or(&tt::Delimiter::UNSPECIFIED),
delimited: delimited.unwrap_or(tt::Delimiter::DUMMY_INVISIBLE),
}
}
}
#[derive(Debug, Clone, Copy)]
enum OpDelimited<'a> {
Op(&'a Op),
enum OpDelimited<'a, S> {
Op(&'a Op<S>),
Open,
Close,
}
#[derive(Debug, Clone, Copy)]
struct OpDelimitedIter<'a> {
inner: &'a [Op],
delimited: &'a tt::Delimiter,
struct OpDelimitedIter<'a, S> {
inner: &'a [Op<S>],
delimited: tt::Delimiter<S>,
idx: usize,
}
impl<'a> OpDelimitedIter<'a> {
impl<'a, S: Span> OpDelimitedIter<'a, S> {
fn is_eof(&self) -> bool {
let len = self.inner.len()
+ if self.delimited.kind != tt::DelimiterKind::Invisible { 2 } else { 0 };
self.idx >= len
}
fn peek(&self) -> Option<OpDelimited<'a>> {
fn peek(&self) -> Option<OpDelimited<'a, S>> {
match self.delimited.kind {
tt::DelimiterKind::Invisible => self.inner.get(self.idx).map(OpDelimited::Op),
_ => match self.idx {
@ -871,8 +904,8 @@ impl<'a> OpDelimitedIter<'a> {
}
}
impl<'a> Iterator for OpDelimitedIter<'a> {
type Item = OpDelimited<'a>;
impl<'a, S: Span> Iterator for OpDelimitedIter<'a, S> {
type Item = OpDelimited<'a, S>;
fn next(&mut self) -> Option<Self::Item> {
let res = self.peek();
@ -888,8 +921,8 @@ impl<'a> Iterator for OpDelimitedIter<'a> {
}
}
impl TtIter<'_> {
fn expect_separator(&mut self, separator: &Separator) -> bool {
impl<S: Span> TtIter<'_, S> {
fn expect_separator(&mut self, separator: &Separator<S>) -> bool {
let mut fork = self.clone();
let ok = match separator {
Separator::Ident(lhs) => match fork.expect_ident_or_underscore() {
@ -919,7 +952,7 @@ impl TtIter<'_> {
ok
}
fn expect_tt(&mut self) -> Result<tt::TokenTree, ()> {
fn expect_tt(&mut self) -> Result<tt::TokenTree<S>, ()> {
if let Some(tt::TokenTree::Leaf(tt::Leaf::Punct(punct))) = self.peek_n(0) {
if punct.char == '\'' {
self.expect_lifetime()
@ -927,7 +960,7 @@ impl TtIter<'_> {
let puncts = self.expect_glued_punct()?;
let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect();
Ok(tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees,
}))
}
@ -936,7 +969,7 @@ impl TtIter<'_> {
}
}
fn expect_lifetime(&mut self) -> Result<tt::TokenTree, ()> {
fn expect_lifetime(&mut self) -> Result<tt::TokenTree<S>, ()> {
let punct = self.expect_single_punct()?;
if punct.char != '\'' {
return Err(());
@ -944,7 +977,7 @@ impl TtIter<'_> {
let ident = self.expect_ident_or_underscore()?;
Ok(tt::Subtree {
delimiter: tt::Delimiter::unspecified(),
delimiter: tt::Delimiter::dummy_invisible(),
token_trees: vec![
tt::Leaf::Punct(*punct).into(),
tt::Leaf::Ident(ident.clone()).into(),
@ -953,7 +986,7 @@ impl TtIter<'_> {
.into())
}
fn eat_char(&mut self, c: char) -> Option<tt::TokenTree> {
fn eat_char(&mut self, c: char) -> Option<tt::TokenTree<S>> {
let mut fork = self.clone();
match fork.expect_char(c) {
Ok(_) => {

Some files were not shown because too many files have changed in this diff Show More