From 27bf62b70eeb6f4cb620be5630c4c4506be3539f Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Mon, 24 May 2021 18:43:42 +0200 Subject: Move `TokenMap` to its own file --- crates/mbe/src/lib.rs | 4 +- crates/mbe/src/syntax_bridge.rs | 83 ++--------------------------------------- crates/mbe/src/token_map.rs | 83 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 89 insertions(+), 81 deletions(-) create mode 100644 crates/mbe/src/token_map.rs (limited to 'crates/mbe/src') diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 3af5bc18b..b7aa64713 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -14,6 +14,7 @@ mod tests; #[cfg(test)] mod benchmark; +mod token_map; use std::fmt; @@ -65,8 +66,9 @@ impl fmt::Display for ExpandError { pub use crate::syntax_bridge::{ ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, - token_tree_to_syntax_node, TokenMap, + token_tree_to_syntax_node, }; +pub use crate::token_map::TokenMap; /// This struct contains AST for a single `macro_rules` definition. What might /// be very confusing is that AST has almost exactly the same shape as diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index b13168bd3..b11172caf 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -10,36 +10,8 @@ use syntax::{ }; use tt::buffer::{Cursor, TokenBuffer}; -use crate::ExpandError; use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum TokenTextRange { - Token(TextRange), - Delimiter(TextRange), -} - -impl TokenTextRange { - pub fn by_kind(self, kind: SyntaxKind) -> Option { - match self { - TokenTextRange::Token(it) => Some(it), - TokenTextRange::Delimiter(it) => match kind { - T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), - T!['}'] | T![')'] | T![']'] => { - Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) - } - _ => None, - }, - } - } -} - -/// Maps `tt::TokenId` to the relative range of the original token. -#[derive(Debug, PartialEq, Eq, Clone, Default)] -pub struct TokenMap { - /// Maps `tt::TokenId` to the *relative* source range. - entries: Vec<(tt::TokenId, TokenTextRange)>, -} +use crate::{ExpandError, TokenMap}; /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro /// will consume). @@ -53,7 +25,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { let global_offset = node.text_range().start(); let mut c = Convertor::new(node, global_offset); let subtree = c.go(); - c.id_alloc.map.entries.shrink_to_fit(); + c.id_alloc.map.shrink_to_fit(); (subtree, c.id_alloc.map) } @@ -149,55 +121,6 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { res } -impl TokenMap { - pub fn token_by_range(&self, relative_range: TextRange) -> Option { - let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { - TokenTextRange::Token(it) => *it == relative_range, - TokenTextRange::Delimiter(it) => { - let open = TextRange::at(it.start(), 1.into()); - let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); - open == relative_range || close == relative_range - } - })?; - Some(token_id) - } - - pub fn range_by_token(&self, token_id: tt::TokenId) -> Option { - let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; - Some(range) - } - - fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { - self.entries.push((token_id, TokenTextRange::Token(relative_range))); - } - - fn insert_delim( - &mut self, - token_id: tt::TokenId, - open_relative_range: TextRange, - close_relative_range: TextRange, - ) -> usize { - let res = self.entries.len(); - let cover = open_relative_range.cover(close_relative_range); - - self.entries.push((token_id, TokenTextRange::Delimiter(cover))); - res - } - - fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { - let (_, token_text_range) = &mut self.entries[idx]; - if let TokenTextRange::Delimiter(dim) = token_text_range { - let cover = dim.cover(close_relative_range); - *token_text_range = TokenTextRange::Delimiter(cover); - } - } - - fn remove_delim(&mut self, idx: usize) { - // FIXME: This could be accidentally quadratic - self.entries.remove(idx); - } -} - /// Returns the textual content of a doc comment block as a quoted string /// That is, strips leading `///` (or `/**`, etc) /// and strips the ending `*/` @@ -634,7 +557,7 @@ impl<'a> TtTreeSink<'a> { } fn finish(mut self) -> (Parse, TokenMap) { - self.token_map.entries.shrink_to_fit(); + self.token_map.shrink_to_fit(); (self.inner.finish(), self.token_map) } } diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs new file mode 100644 index 000000000..58c9f5aa5 --- /dev/null +++ b/crates/mbe/src/token_map.rs @@ -0,0 +1,83 @@ +use parser::{SyntaxKind, T}; +use syntax::{TextRange, TextSize}; + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum TokenTextRange { + Token(TextRange), + Delimiter(TextRange), +} + +impl TokenTextRange { + pub fn by_kind(self, kind: SyntaxKind) -> Option { + match self { + TokenTextRange::Token(it) => Some(it), + TokenTextRange::Delimiter(it) => match kind { + T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), + T!['}'] | T![')'] | T![']'] => { + Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) + } + _ => None, + }, + } + } +} + +/// Maps `tt::TokenId` to the relative range of the original token. +#[derive(Debug, PartialEq, Eq, Clone, Default)] +pub struct TokenMap { + /// Maps `tt::TokenId` to the *relative* source range. + entries: Vec<(tt::TokenId, TokenTextRange)>, +} + +impl TokenMap { + pub fn token_by_range(&self, relative_range: TextRange) -> Option { + let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { + TokenTextRange::Token(it) => *it == relative_range, + TokenTextRange::Delimiter(it) => { + let open = TextRange::at(it.start(), 1.into()); + let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); + open == relative_range || close == relative_range + } + })?; + Some(token_id) + } + + pub fn range_by_token(&self, token_id: tt::TokenId) -> Option { + let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; + Some(range) + } + + pub(crate) fn shrink_to_fit(&mut self) { + self.entries.shrink_to_fit(); + } + + pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { + self.entries.push((token_id, TokenTextRange::Token(relative_range))); + } + + pub(crate) fn insert_delim( + &mut self, + token_id: tt::TokenId, + open_relative_range: TextRange, + close_relative_range: TextRange, + ) -> usize { + let res = self.entries.len(); + let cover = open_relative_range.cover(close_relative_range); + + self.entries.push((token_id, TokenTextRange::Delimiter(cover))); + res + } + + pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { + let (_, token_text_range) = &mut self.entries[idx]; + if let TokenTextRange::Delimiter(dim) = token_text_range { + let cover = dim.cover(close_relative_range); + *token_text_range = TokenTextRange::Delimiter(cover); + } + } + + pub(crate) fn remove_delim(&mut self, idx: usize) { + // FIXME: This could be accidentally quadratic + self.entries.remove(idx); + } +} -- cgit v1.2.3