Combine all tokensource to one and refactoring

This commit is contained in:
Edwin Cheng 2019-04-08 00:12:07 +08:00
parent 7f1e93a3c9
commit a7254201df
5 changed files with 94 additions and 86 deletions

View File

@ -21,6 +21,7 @@ mod mbe_expander;
mod syntax_bridge;
mod tt_cursor;
mod subtree_source;
mod subtree_parser;
use ra_syntax::SmolStr;

View File

@ -0,0 +1,59 @@
use crate::subtree_source::SubtreeTokenSource;
use ra_parser::{TokenSource, TreeSink};
use ra_syntax::{SyntaxKind};
struct OffsetTokenSink {
token_pos: usize,
}
impl TreeSink for OffsetTokenSink {
fn token(&mut self, _kind: SyntaxKind, n_tokens: u8) {
self.token_pos += n_tokens as usize;
}
fn start_node(&mut self, _kind: SyntaxKind) {}
fn finish_node(&mut self) {}
fn error(&mut self, _error: ra_parser::ParseError) {}
}
pub(crate) struct Parser<'a> {
subtree: &'a tt::Subtree,
pos: &'a mut usize,
}
impl<'a> Parser<'a> {
pub fn new(pos: &'a mut usize, subtree: &'a tt::Subtree) -> Parser<'a> {
Parser { pos, subtree }
}
pub fn parse_path(self) -> Option<tt::TokenTree> {
self.parse(ra_parser::parse_path)
}
fn parse<F>(self, f: F) -> Option<tt::TokenTree>
where
F: FnOnce(&dyn TokenSource, &mut dyn TreeSink),
{
let mut src = SubtreeTokenSource::new(self.subtree);
src.advance(*self.pos, true);
let mut sink = OffsetTokenSink { token_pos: 0 };
f(&src, &mut sink);
self.finish(sink.token_pos, &mut src)
}
fn finish(self, parsed_token: usize, src: &mut SubtreeTokenSource) -> Option<tt::TokenTree> {
let res = src.bump_n(parsed_token, self.pos);
let res: Vec<_> = res.into_iter().cloned().collect();
match res.len() {
0 => None,
1 => Some(res[0].clone()),
_ => Some(tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter::None,
token_trees: res,
})),
}
}
}

View File

@ -9,12 +9,12 @@ struct TtToken {
pub n_tokens: usize,
}
/// SubtreeSourceQuerier let outside to query internal tokens as string
pub(crate) struct SubtreeSourceQuerier<'a> {
/// Querier let outside to query internal tokens as string
pub(crate) struct Querier<'a> {
src: &'a SubtreeTokenSource<'a>,
}
impl<'a> SubtreeSourceQuerier<'a> {
impl<'a> Querier<'a> {
pub(crate) fn token(&self, uidx: usize) -> (SyntaxKind, &SmolStr) {
let tkn = &self.src.tokens[uidx];
(tkn.kind, &tkn.text)
@ -32,7 +32,8 @@ impl<'a> SubtreeTokenSource<'a> {
SubtreeTokenSource { tokens: TtTokenBuilder::build(subtree), tt_pos: 0, subtree }
}
pub fn advance(&mut self, curr: usize, skip_first_delimiter: bool) {
// Advance token source and skip the first delimiter
pub fn advance(&mut self, n_token: usize, skip_first_delimiter: bool) {
if skip_first_delimiter {
self.tt_pos += 1;
}
@ -47,32 +48,20 @@ impl<'a> SubtreeTokenSource<'a> {
// Such that we cannot simpliy advance the cursor
// We have to bump it one by one
let mut pos = 0;
while pos < curr {
while pos < n_token {
pos += self.bump(&self.subtree.token_trees[pos]);
}
}
pub fn querier(&self) -> SubtreeSourceQuerier {
SubtreeSourceQuerier { src: self }
}
fn count(&self, tt: &tt::TokenTree) -> usize {
assert!(!self.tokens.is_empty());
TtTokenBuilder::count_tt_tokens(tt, None)
}
pub(crate) fn bump(&mut self, tt: &tt::TokenTree) -> usize {
let cur = &self.tokens[self.tt_pos];
let n_tokens = cur.n_tokens;
self.tt_pos += self.count(tt);
n_tokens
pub fn querier(&self) -> Querier {
Querier { src: self }
}
pub(crate) fn bump_n(
&mut self,
n_tokens: usize,
mut token_pos: usize,
) -> (usize, Vec<&tt::TokenTree>) {
n_tt_tokens: usize,
token_pos: &mut usize,
) -> Vec<&tt::TokenTree> {
let mut res = vec![];
// Matching `TtToken` cursor to `tt::TokenTree` cursor
// It is because TtToken is not One to One mapping to tt::Token
@ -83,17 +72,28 @@ impl<'a> SubtreeTokenSource<'a> {
//
// Such that we cannot simpliy advance the cursor
// We have to bump it one by one
let next_pos = self.tt_pos + n_tokens;
let old_token_pos = token_pos;
let next_pos = self.tt_pos + n_tt_tokens;
while self.tt_pos < next_pos {
let current = &self.subtree.token_trees[token_pos];
let current = &self.subtree.token_trees[*token_pos];
let n = self.bump(current);
res.extend((0..n).map(|i| &self.subtree.token_trees[token_pos + i]));
token_pos += n;
res.extend((0..n).map(|i| &self.subtree.token_trees[*token_pos + i]));
*token_pos += n;
}
(token_pos - old_token_pos, res)
res
}
fn count(&self, tt: &tt::TokenTree) -> usize {
assert!(!self.tokens.is_empty());
TtTokenBuilder::count_tt_tokens(tt, None)
}
fn bump(&mut self, tt: &tt::TokenTree) -> usize {
let cur = &self.tokens[self.tt_pos];
let n_tokens = cur.n_tokens;
self.tt_pos += self.count(tt);
n_tokens
}
}

View File

@ -4,7 +4,7 @@ use ra_syntax::{
ast, SyntaxKind::*, TextUnit
};
use crate::subtree_source::{SubtreeTokenSource, SubtreeSourceQuerier};
use crate::subtree_source::{SubtreeTokenSource, Querier};
/// Maps `tt::TokenId` to the relative range of the original token.
#[derive(Default)]
@ -107,14 +107,14 @@ fn convert_tt(
struct TtTreeSink<'a> {
buf: String,
src_querier: SubtreeSourceQuerier<'a>,
src_querier: Querier<'a>,
text_pos: TextUnit,
token_pos: usize,
inner: SyntaxTreeBuilder,
}
impl<'a> TtTreeSink<'a> {
fn new(src_querier: SubtreeSourceQuerier<'a>) -> TtTreeSink {
fn new(src_querier: Querier<'a>) -> TtTreeSink {
TtTreeSink {
buf: String::new(),
src_querier,

View File

@ -1,25 +1,5 @@
use crate::ParseError;
use crate::subtree_source::SubtreeTokenSource;
use ra_parser::{TokenSource, TreeSink};
use ra_syntax::{
SyntaxKind
};
struct SubtreeTokenSink {
token_pos: usize,
}
impl TreeSink for SubtreeTokenSink {
fn token(&mut self, _kind: SyntaxKind, n_tokens: u8) {
self.token_pos += n_tokens as usize;
}
fn start_node(&mut self, _kind: SyntaxKind) {}
fn finish_node(&mut self) {}
fn error(&mut self, _error: ra_parser::ParseError) {}
}
use crate::subtree_parser::Parser;
#[derive(Clone)]
pub(crate) struct TtCursor<'a> {
@ -99,41 +79,9 @@ impl<'a> TtCursor<'a> {
})
}
fn eat_parse_result(
&mut self,
parsed_token: usize,
src: &mut SubtreeTokenSource,
) -> Option<tt::TokenTree> {
let (adv, res) = src.bump_n(parsed_token, self.pos);
self.pos += adv;
let res: Vec<_> = res.into_iter().cloned().collect();
match res.len() {
0 => None,
1 => Some(res[0].clone()),
_ => Some(tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter::None,
token_trees: res,
})),
}
}
fn eat_parse<F>(&mut self, f: F) -> Option<tt::TokenTree>
where
F: FnOnce(&dyn TokenSource, &mut dyn TreeSink),
{
let mut src = SubtreeTokenSource::new(self.subtree);
src.advance(self.pos, true);
let mut sink = SubtreeTokenSink { token_pos: 0 };
f(&src, &mut sink);
self.eat_parse_result(sink.token_pos, &mut src)
}
pub(crate) fn eat_path(&mut self) -> Option<tt::TokenTree> {
self.eat_parse(ra_parser::parse_path)
let parser = Parser::new(&mut self.pos, self.subtree);
parser.parse_path()
}
pub(crate) fn expect_char(&mut self, char: char) -> Result<(), ParseError> {