Various fixes

This commit is contained in:
Florian Diebold 2022-02-09 16:30:10 +01:00
parent bdb7ae5dd0
commit 63fd643d72
5 changed files with 75 additions and 25 deletions

View File

@ -111,7 +111,7 @@ pub trait AstDatabase: SourceDatabase {
fn macro_arg(
&self,
id: MacroCallId,
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupMap)>>;
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>>;
/// Extracts syntax node, corresponding to a macro call. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
@ -151,8 +151,10 @@ pub fn expand_speculative(
let censor = censor_for_macro_input(&loc, &speculative_args);
let mut fixups = fixup::fixup_syntax(&speculative_args);
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
let (mut tt, spec_args_tmap) = mbe::syntax_node_to_token_tree_with_modifications(
let (mut tt, spec_args_tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
&speculative_args,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
@ -202,7 +204,7 @@ pub fn expand_speculative(
// Do the actual expansion, we need to directly expand the proc macro due to the attribute args
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let speculative_expansion = if let MacroDefKind::ProcMacro(expander, ..) = loc.def.kind {
let mut speculative_expansion = if let MacroDefKind::ProcMacro(expander, ..) = loc.def.kind {
tt.delimiter = None;
expander.expand(db, loc.krate, &tt, attr_arg.as_ref())
} else {
@ -210,6 +212,7 @@ pub fn expand_speculative(
};
let expand_to = macro_expand_to(db, actual_macro_call);
fixup::reverse_fixups(&mut speculative_expansion.value, &spec_args_tmap, &fixups.undo_info);
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
let range = rev_tmap.first_range_by_token(token_id, token_to_map.kind())?;
@ -300,7 +303,7 @@ fn parse_macro_expansion(
fn macro_arg(
db: &dyn AstDatabase,
id: MacroCallId,
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupMap)>> {
) -> Option<Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>> {
let arg = db.macro_arg_text(id)?;
let loc = db.lookup_intern_macro_call(id);
@ -308,15 +311,20 @@ fn macro_arg(
let censor = censor_for_macro_input(&loc, &node);
let mut fixups = fixup::fixup_syntax(&node);
fixups.replace.extend(censor.into_iter().map(|node| (node, Vec::new())));
let (mut tt, tmap) =
mbe::syntax_node_to_token_tree_with_modifications(&node, fixups.replace, fixups.append);
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
&node,
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = None;
}
Some(Arc::new((tt, tmap, fixups.map)))
Some(Arc::new((tt, tmap, fixups.undo_info)))
}
fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<SyntaxNode> {

View File

@ -1,3 +1,7 @@
//! To make attribute macros work reliably when typing, we need to take care to
//! fix up syntax errors in the code we're passing to them.
use std::mem;
use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
use rustc_hash::FxHashMap;
use syntax::{
@ -6,16 +10,22 @@ use syntax::{
};
use tt::Subtree;
/// The result of calculating fixes for a syntax node -- a bunch of changes
/// (appending to and replacing nodes), the information that is needed to
/// reverse those changes afterwards, and a token map.
#[derive(Debug)]
pub struct SyntaxFixups {
pub append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
pub replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
pub map: SyntaxFixupMap,
pub undo_info: SyntaxFixupUndoInfo,
pub token_map: TokenMap,
pub next_id: u32,
}
/// This is the information needed to reverse the fixups.
#[derive(Debug, PartialEq, Eq)]
pub struct SyntaxFixupMap {
original: Vec<(Subtree, TokenMap)>,
pub struct SyntaxFixupUndoInfo {
original: Vec<Subtree>,
}
const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
@ -25,15 +35,26 @@ pub fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
let mut replace = FxHashMap::default();
let mut preorder = node.preorder();
let mut original = Vec::new();
let mut token_map = TokenMap::default();
let mut next_id = 0;
while let Some(event) = preorder.next() {
let node = match event {
syntax::WalkEvent::Enter(node) => node,
syntax::WalkEvent::Leave(_) => continue,
};
if can_handle_error(&node) && has_error_to_handle(&node) {
// the node contains an error node, we have to completely replace it by something valid
let original_tree = mbe::syntax_node_to_token_tree(&node);
// TODO handle token ids / token map
let (original_tree, new_tmap, new_next_id) =
mbe::syntax_node_to_token_tree_with_modifications(
&node,
mem::take(&mut token_map),
next_id,
Default::default(),
Default::default(),
);
token_map = new_tmap;
next_id = new_next_id;
let idx = original.len() as u32;
original.push(original_tree);
let replacement = SyntheticToken {
@ -46,6 +67,8 @@ pub fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
preorder.skip_subtree();
continue;
}
// In some other situations, we can fix things by just appending some tokens.
let end_range = TextRange::empty(node.text_range().end());
match_ast! {
match node {
@ -78,7 +101,13 @@ pub fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
}
}
}
SyntaxFixups { append, replace, map: SyntaxFixupMap { original } }
SyntaxFixups {
append,
replace,
token_map,
next_id,
undo_info: SyntaxFixupUndoInfo { original },
}
}
fn has_error(node: &SyntaxNode) -> bool {
@ -93,7 +122,7 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool {
has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
}
pub fn reverse_fixups(tt: &mut Subtree, token_map: &TokenMap, fixup_map: &SyntaxFixupMap) {
pub fn reverse_fixups(tt: &mut Subtree, token_map: &TokenMap, undo_info: &SyntaxFixupUndoInfo) {
tt.token_trees.retain(|tt| match tt {
tt::TokenTree::Leaf(leaf) => {
token_map.synthetic_token_id(leaf.id()).is_none()
@ -102,10 +131,10 @@ pub fn reverse_fixups(tt: &mut Subtree, token_map: &TokenMap, fixup_map: &Syntax
_ => true,
});
tt.token_trees.iter_mut().for_each(|tt| match tt {
tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, fixup_map),
tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info),
tt::TokenTree::Leaf(leaf) => {
if let Some(id) = token_map.synthetic_token_id(leaf.id()) {
let (original, _original_tmap) = &fixup_map.original[id.0 as usize];
let original = &undo_info.original[id.0 as usize];
*tt = tt::TokenTree::Subtree(original.clone());
}
}
@ -123,8 +152,10 @@ mod tests {
let parsed = syntax::SourceFile::parse(ra_fixture);
eprintln!("parse: {:#?}", parsed.syntax_node());
let fixups = super::fixup_syntax(&parsed.syntax_node());
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_with_modifications(
let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
&parsed.syntax_node(),
fixups.token_map,
fixups.next_id,
fixups.replace,
fixups.append,
);
@ -144,7 +175,7 @@ mod tests {
parse.syntax_node()
);
reverse_fixups(&mut tt, &tmap, &fixups.map);
reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
// the fixed-up + reversed version should be equivalent to the original input
// (but token IDs don't matter)

View File

@ -128,7 +128,7 @@ struct HygieneInfo {
attr_input_or_mac_def_start: Option<InFile<TextSize>>,
macro_def: Arc<TokenExpander>,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupMap)>,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
macro_arg_shift: mbe::Shift,
exp_map: Arc<mbe::TokenMap>,
}

View File

@ -427,7 +427,7 @@ pub struct ExpansionInfo {
attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
macro_def: Arc<TokenExpander>,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupMap)>,
macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
/// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
/// and as such we need to shift tokens if they are part of an attributes input instead of their item.
macro_arg_shift: mbe::Shift,

View File

@ -15,23 +15,32 @@ use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
/// Convert the syntax node to a `TokenTree` (what macro
/// will consume).
pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
syntax_node_to_token_tree_with_modifications(node, Default::default(), Default::default())
let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications(
node,
Default::default(),
0,
Default::default(),
Default::default(),
);
(subtree, token_map)
}
/// Convert the syntax node to a `TokenTree` (what macro will consume)
/// with the censored range excluded.
pub fn syntax_node_to_token_tree_with_modifications(
node: &SyntaxNode,
existing_token_map: TokenMap,
next_id: u32,
replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
) -> (tt::Subtree, TokenMap) {
) -> (tt::Subtree, TokenMap, u32) {
let global_offset = node.text_range().start();
let mut c = Convertor::new(node, global_offset, replace, append);
let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append);
let subtree = convert_tokens(&mut c);
c.id_alloc.map.shrink_to_fit();
always!(c.replace.is_empty(), "replace: {:?}", c.replace);
always!(c.append.is_empty(), "append: {:?}", c.append);
(subtree, c.id_alloc.map)
(subtree, c.id_alloc.map, c.id_alloc.next_id)
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@ -510,6 +519,8 @@ impl Convertor {
fn new(
node: &SyntaxNode,
global_offset: TextSize,
existing_token_map: TokenMap,
next_id: u32,
mut replace: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
mut append: FxHashMap<SyntaxNode, Vec<SyntheticToken>>,
) -> Convertor {
@ -517,7 +528,7 @@ impl Convertor {
let mut preorder = node.preorder_with_tokens();
let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
Convertor {
id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } },
current: first,
current_synthetic: synthetic,
preorder,