From d833ded3b4a12280f3e63e182ca7268a17cff90d Mon Sep 17 00:00:00 2001 From: Edwin Cheng Date: Mon, 27 May 2019 22:56:21 +0800 Subject: Remove Queier and SubtreeWalk --- crates/ra_mbe/src/subtree_source.rs | 86 +++++++++++----------------- crates/ra_mbe/src/syntax_bridge.rs | 110 +++++++++++++++++++++++------------- 2 files changed, 104 insertions(+), 92 deletions(-) (limited to 'crates') diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index c4f79f38a..2ba0b1601 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs @@ -1,13 +1,8 @@ use ra_parser::{TokenSource, Token}; use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T}; use std::cell::{RefCell, Cell}; -use std::sync::Arc; use tt::buffer::{TokenBuffer, Cursor}; -pub(crate) trait Querier { - fn token(&self, uidx: usize) -> (SyntaxKind, SmolStr, bool); -} - #[derive(Debug, Clone, Eq, PartialEq)] struct TtToken { pub kind: SyntaxKind, @@ -15,20 +10,47 @@ struct TtToken { pub text: SmolStr, } -// A wrapper class for ref cell -#[derive(Debug)] -pub(crate) struct SubtreeWalk<'a> { +pub(crate) struct SubtreeTokenSource<'a> { start: Cursor<'a>, cursor: Cell>, cached: RefCell>>, + curr: (Token, usize), +} + +impl<'a> SubtreeTokenSource<'a> { + // Helper function used in test + #[allow(unused)] + pub fn text(&self) -> SmolStr { + match self.get(self.curr.1) { + Some(tt) => tt.text, + _ => SmolStr::new(""), + } + } } -impl<'a> SubtreeWalk<'a> { - fn new(cursor: Cursor<'a>) -> Self { - SubtreeWalk { +impl<'a> SubtreeTokenSource<'a> { + pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { + let cursor = buffer.begin(); + + let mut res = SubtreeTokenSource { + curr: (Token { kind: EOF, is_jointed_to_next: false }, 0), start: cursor, cursor: Cell::new(cursor), cached: RefCell::new(Vec::with_capacity(10)), + }; + res.curr = (res.mk_token(0), 0); + res + } + + pub(crate) fn bump_n(&mut self, parsed_tokens: usize) -> Vec { + let res = self.collect_token_trees(parsed_tokens); + res + } + + fn mk_token(&self, pos: usize) -> Token { + match self.get(pos) { + Some(tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next }, + None => Token { kind: EOF, is_jointed_to_next: false }, } } @@ -109,46 +131,6 @@ impl<'a> SubtreeWalk<'a> { } } -impl<'a> Querier for SubtreeWalk<'a> { - fn token(&self, uidx: usize) -> (SyntaxKind, SmolStr, bool) { - self.get(uidx) - .map(|tkn| (tkn.kind, tkn.text, tkn.is_joint_to_next)) - .unwrap_or_else(|| (SyntaxKind::EOF, "".into(), false)) - } -} - -pub(crate) struct SubtreeTokenSource<'a> { - walker: Arc>, - curr: (Token, usize), -} - -impl<'a> SubtreeTokenSource<'a> { - pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { - let mut res = SubtreeTokenSource { - walker: Arc::new(SubtreeWalk::new(buffer.begin())), - curr: (Token { kind: EOF, is_jointed_to_next: false }, 0), - }; - res.curr = (res.mk_token(0), 0); - res - } - - pub fn querier(&self) -> Arc> { - self.walker.clone() - } - - pub(crate) fn bump_n(&mut self, parsed_tokens: usize) -> Vec { - let res = self.walker.collect_token_trees(parsed_tokens); - res - } - - fn mk_token(&self, pos: usize) -> Token { - match self.walker.get(pos) { - Some(tt) => Token { kind: tt.kind, is_jointed_to_next: tt.is_joint_to_next }, - None => Token { kind: EOF, is_jointed_to_next: false }, - } - } -} - impl<'a> TokenSource for SubtreeTokenSource<'a> { fn current(&self) -> Token { self.curr.0 @@ -170,7 +152,7 @@ impl<'a> TokenSource for SubtreeTokenSource<'a> { /// Is the current token a specified keyword? fn is_keyword(&self, kw: &str) -> bool { - match self.walker.get(self.curr.1) { + match self.get(self.curr.1) { Some(t) => t.text == *kw, _ => false, } diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 0a7e50c4e..058f65ffd 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -3,8 +3,9 @@ use ra_syntax::{ AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, ast, SyntaxKind::*, TextUnit, T }; +use tt::buffer::Cursor; -use crate::subtree_source::{SubtreeTokenSource, Querier}; +use crate::subtree_source::{SubtreeTokenSource}; use crate::ExpandError; /// Maps `tt::TokenId` to the relative range of the original token. @@ -51,8 +52,7 @@ where { let buffer = tt::buffer::TokenBuffer::new(&[tt.clone().into()]); let mut token_source = SubtreeTokenSource::new(&buffer); - let querier = token_source.querier(); - let mut tree_sink = TtTreeSink::new(querier.as_ref()); + let mut tree_sink = TtTreeSink::new(buffer.begin()); f(&mut token_source, &mut tree_sink); if tree_sink.roots.len() != 1 { return Err(ExpandError::ConversionError); @@ -259,11 +259,10 @@ fn convert_tt( Some(res) } -struct TtTreeSink<'a, Q: Querier> { +struct TtTreeSink<'a> { buf: String, - src_querier: &'a Q, + cursor: Cursor<'a>, text_pos: TextUnit, - token_pos: usize, inner: SyntaxTreeBuilder, // Number of roots @@ -271,52 +270,79 @@ struct TtTreeSink<'a, Q: Querier> { roots: smallvec::SmallVec<[usize; 1]>, } -impl<'a, Q: Querier> TtTreeSink<'a, Q> { - fn new(src_querier: &'a Q) -> Self { +impl<'a> TtTreeSink<'a> { + fn new(cursor: Cursor<'a>) -> Self { TtTreeSink { buf: String::new(), - src_querier, + cursor, text_pos: 0.into(), - token_pos: 0, inner: SyntaxTreeBuilder::default(), roots: smallvec::SmallVec::new(), } } } -fn is_delimiter(kind: SyntaxKind) -> bool { - match kind { - T!['('] | T!['['] | T!['{'] | T![')'] | T![']'] | T!['}'] => true, - _ => false, - } +fn delim_to_str(d: tt::Delimiter, closing: bool) -> SmolStr { + let texts = match d { + tt::Delimiter::Parenthesis => "()", + tt::Delimiter::Brace => "{}", + tt::Delimiter::Bracket => "[]", + tt::Delimiter::None => "", + }; + + let idx = closing as usize; + let text = if texts.len() > 0 { &texts[idx..texts.len() - (1 - idx)] } else { "" }; + text.into() } -impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { +impl<'a> TreeSink for TtTreeSink<'a> { fn token(&mut self, kind: SyntaxKind, n_tokens: u8) { if kind == L_DOLLAR || kind == R_DOLLAR { - self.token_pos += n_tokens as usize; + if let Some(_) = self.cursor.end() { + self.cursor = self.cursor.bump(); + } else { + self.cursor = self.cursor.subtree().unwrap(); + } return; } for _ in 0..n_tokens { - self.buf += &self.src_querier.token(self.token_pos).1; - self.token_pos += 1; + if self.cursor.eof() { + break; + } + + match self.cursor.token_tree() { + Some(tt::TokenTree::Leaf(leaf)) => { + self.cursor = self.cursor.bump(); + self.buf += &format!("{}", leaf); + } + Some(tt::TokenTree::Subtree(subtree)) => { + self.cursor = self.cursor.subtree().unwrap(); + self.buf += &delim_to_str(subtree.delimiter, false); + } + None => { + if let Some(parent) = self.cursor.end() { + self.cursor = self.cursor.bump(); + self.buf += &delim_to_str(parent.delimiter, true); + } + } + }; } + self.text_pos += TextUnit::of_str(&self.buf); let text = SmolStr::new(self.buf.as_str()); self.buf.clear(); self.inner.token(kind, text); - // Add a white space between tokens, only if both are not delimiters - if !is_delimiter(kind) { - let (last_kind, _, last_joint_to_next) = self.src_querier.token(self.token_pos - 1); - if !last_joint_to_next && last_kind.is_punct() { - let (cur_kind, _, _) = self.src_querier.token(self.token_pos); - if !is_delimiter(cur_kind) { - if cur_kind.is_punct() { - self.inner.token(WHITESPACE, " ".into()); - } - } + // Add whitespace between adjoint puncts + let next = self.cursor.bump(); + if let ( + Some(tt::TokenTree::Leaf(tt::Leaf::Punct(curr))), + Some(tt::TokenTree::Leaf(tt::Leaf::Punct(_))), + ) = (self.cursor.token_tree(), next.token_tree()) + { + if curr.spacing == tt::Spacing::Alone { + self.inner.token(WHITESPACE, " ".into()); } } } @@ -344,6 +370,7 @@ impl<'a, Q: Querier> TreeSink for TtTreeSink<'a, Q> { mod tests { use super::*; use crate::tests::{expand, create_rules}; + use ra_parser::TokenSource; #[test] fn convert_tt_token_source() { @@ -363,24 +390,27 @@ mod tests { ); let expansion = expand(&rules, "literals!(foo)"); let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]); - let tt_src = SubtreeTokenSource::new(&buffer); - - let query = tt_src.querier(); + let mut tt_src = SubtreeTokenSource::new(&buffer); + let mut tokens = vec![]; + while tt_src.current().kind != EOF { + tokens.push((tt_src.current().kind, tt_src.text())); + tt_src.bump(); + } // [${] // [let] [a] [=] ['c'] [;] - assert_eq!(query.token(2 + 3).1, "'c'"); - assert_eq!(query.token(2 + 3).0, CHAR); + assert_eq!(tokens[2 + 3].1, "'c'"); + assert_eq!(tokens[2 + 3].0, CHAR); // [let] [c] [=] [1000] [;] - assert_eq!(query.token(2 + 5 + 3).1, "1000"); - assert_eq!(query.token(2 + 5 + 3).0, INT_NUMBER); + assert_eq!(tokens[2 + 5 + 3].1, "1000"); + assert_eq!(tokens[2 + 5 + 3].0, INT_NUMBER); // [let] [f] [=] [12E+99_f64] [;] - assert_eq!(query.token(2 + 10 + 3).1, "12E+99_f64"); - assert_eq!(query.token(2 + 10 + 3).0, FLOAT_NUMBER); + assert_eq!(tokens[2 + 10 + 3].1, "12E+99_f64"); + assert_eq!(tokens[2 + 10 + 3].0, FLOAT_NUMBER); // [let] [s] [=] ["rust1"] [;] - assert_eq!(query.token(2 + 15 + 3).1, "\"rust1\""); - assert_eq!(query.token(2 + 15 + 3).0, STRING); + assert_eq!(tokens[2 + 15 + 3].1, "\"rust1\""); + assert_eq!(tokens[2 + 15 + 3].0, STRING); } #[test] -- cgit v1.2.3