From 27bf62b70eeb6f4cb620be5630c4c4506be3539f Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Mon, 24 May 2021 18:43:42 +0200 Subject: Move `TokenMap` to its own file --- crates/mbe/src/lib.rs | 4 +- crates/mbe/src/syntax_bridge.rs | 83 ++--------------------------------------- crates/mbe/src/token_map.rs | 83 +++++++++++++++++++++++++++++++++++++++++ 3 files changed, 89 insertions(+), 81 deletions(-) create mode 100644 crates/mbe/src/token_map.rs (limited to 'crates') diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 3af5bc18b..b7aa64713 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -14,6 +14,7 @@ mod tests; #[cfg(test)] mod benchmark; +mod token_map; use std::fmt; @@ -65,8 +66,9 @@ impl fmt::Display for ExpandError { pub use crate::syntax_bridge::{ ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, - token_tree_to_syntax_node, TokenMap, + token_tree_to_syntax_node, }; +pub use crate::token_map::TokenMap; /// This struct contains AST for a single `macro_rules` definition. What might /// be very confusing is that AST has almost exactly the same shape as diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index b13168bd3..b11172caf 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -10,36 +10,8 @@ use syntax::{ }; use tt::buffer::{Cursor, TokenBuffer}; -use crate::ExpandError; use crate::{subtree_source::SubtreeTokenSource, tt_iter::TtIter}; - -#[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum TokenTextRange { - Token(TextRange), - Delimiter(TextRange), -} - -impl TokenTextRange { - pub fn by_kind(self, kind: SyntaxKind) -> Option { - match self { - TokenTextRange::Token(it) => Some(it), - TokenTextRange::Delimiter(it) => match kind { - T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), - T!['}'] | T![')'] | T![']'] => { - Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) - } - _ => None, - }, - } - } -} - -/// Maps `tt::TokenId` to the relative range of the original token. -#[derive(Debug, PartialEq, Eq, Clone, Default)] -pub struct TokenMap { - /// Maps `tt::TokenId` to the *relative* source range. - entries: Vec<(tt::TokenId, TokenTextRange)>, -} +use crate::{ExpandError, TokenMap}; /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro /// will consume). @@ -53,7 +25,7 @@ pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) { let global_offset = node.text_range().start(); let mut c = Convertor::new(node, global_offset); let subtree = c.go(); - c.id_alloc.map.entries.shrink_to_fit(); + c.id_alloc.map.shrink_to_fit(); (subtree, c.id_alloc.map) } @@ -149,55 +121,6 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec { res } -impl TokenMap { - pub fn token_by_range(&self, relative_range: TextRange) -> Option { - let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { - TokenTextRange::Token(it) => *it == relative_range, - TokenTextRange::Delimiter(it) => { - let open = TextRange::at(it.start(), 1.into()); - let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); - open == relative_range || close == relative_range - } - })?; - Some(token_id) - } - - pub fn range_by_token(&self, token_id: tt::TokenId) -> Option { - let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; - Some(range) - } - - fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { - self.entries.push((token_id, TokenTextRange::Token(relative_range))); - } - - fn insert_delim( - &mut self, - token_id: tt::TokenId, - open_relative_range: TextRange, - close_relative_range: TextRange, - ) -> usize { - let res = self.entries.len(); - let cover = open_relative_range.cover(close_relative_range); - - self.entries.push((token_id, TokenTextRange::Delimiter(cover))); - res - } - - fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { - let (_, token_text_range) = &mut self.entries[idx]; - if let TokenTextRange::Delimiter(dim) = token_text_range { - let cover = dim.cover(close_relative_range); - *token_text_range = TokenTextRange::Delimiter(cover); - } - } - - fn remove_delim(&mut self, idx: usize) { - // FIXME: This could be accidentally quadratic - self.entries.remove(idx); - } -} - /// Returns the textual content of a doc comment block as a quoted string /// That is, strips leading `///` (or `/**`, etc) /// and strips the ending `*/` @@ -634,7 +557,7 @@ impl<'a> TtTreeSink<'a> { } fn finish(mut self) -> (Parse, TokenMap) { - self.token_map.entries.shrink_to_fit(); + self.token_map.shrink_to_fit(); (self.inner.finish(), self.token_map) } } diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs new file mode 100644 index 000000000..58c9f5aa5 --- /dev/null +++ b/crates/mbe/src/token_map.rs @@ -0,0 +1,83 @@ +use parser::{SyntaxKind, T}; +use syntax::{TextRange, TextSize}; + +#[derive(Debug, PartialEq, Eq, Clone, Copy)] +pub enum TokenTextRange { + Token(TextRange), + Delimiter(TextRange), +} + +impl TokenTextRange { + pub fn by_kind(self, kind: SyntaxKind) -> Option { + match self { + TokenTextRange::Token(it) => Some(it), + TokenTextRange::Delimiter(it) => match kind { + T!['{'] | T!['('] | T!['['] => Some(TextRange::at(it.start(), 1.into())), + T!['}'] | T![')'] | T![']'] => { + Some(TextRange::at(it.end() - TextSize::of('}'), 1.into())) + } + _ => None, + }, + } + } +} + +/// Maps `tt::TokenId` to the relative range of the original token. +#[derive(Debug, PartialEq, Eq, Clone, Default)] +pub struct TokenMap { + /// Maps `tt::TokenId` to the *relative* source range. + entries: Vec<(tt::TokenId, TokenTextRange)>, +} + +impl TokenMap { + pub fn token_by_range(&self, relative_range: TextRange) -> Option { + let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { + TokenTextRange::Token(it) => *it == relative_range, + TokenTextRange::Delimiter(it) => { + let open = TextRange::at(it.start(), 1.into()); + let close = TextRange::at(it.end() - TextSize::of('}'), 1.into()); + open == relative_range || close == relative_range + } + })?; + Some(token_id) + } + + pub fn range_by_token(&self, token_id: tt::TokenId) -> Option { + let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; + Some(range) + } + + pub(crate) fn shrink_to_fit(&mut self) { + self.entries.shrink_to_fit(); + } + + pub(crate) fn insert(&mut self, token_id: tt::TokenId, relative_range: TextRange) { + self.entries.push((token_id, TokenTextRange::Token(relative_range))); + } + + pub(crate) fn insert_delim( + &mut self, + token_id: tt::TokenId, + open_relative_range: TextRange, + close_relative_range: TextRange, + ) -> usize { + let res = self.entries.len(); + let cover = open_relative_range.cover(close_relative_range); + + self.entries.push((token_id, TokenTextRange::Delimiter(cover))); + res + } + + pub(crate) fn update_close_delim(&mut self, idx: usize, close_relative_range: TextRange) { + let (_, token_text_range) = &mut self.entries[idx]; + if let TokenTextRange::Delimiter(dim) = token_text_range { + let cover = dim.cover(close_relative_range); + *token_text_range = TokenTextRange::Delimiter(cover); + } + } + + pub(crate) fn remove_delim(&mut self, idx: usize) { + // FIXME: This could be accidentally quadratic + self.entries.remove(idx); + } +} -- cgit v1.2.3 From 489ae7a800b02dbdacc1f949c067fa557333e7a1 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Mon, 24 May 2021 20:29:48 +0200 Subject: Make `TokenTextRange` private --- crates/hir_expand/src/db.rs | 2 +- crates/hir_expand/src/hygiene.rs | 2 +- crates/hir_expand/src/lib.rs | 4 ++-- crates/mbe/src/tests/expand.rs | 5 ++--- crates/mbe/src/token_map.rs | 8 ++++---- 5 files changed, 10 insertions(+), 11 deletions(-) (limited to 'crates') diff --git a/crates/hir_expand/src/db.rs b/crates/hir_expand/src/db.rs index 5c769c1bf..03637878b 100644 --- a/crates/hir_expand/src/db.rs +++ b/crates/hir_expand/src/db.rs @@ -155,7 +155,7 @@ pub fn expand_hypothetical( mbe::token_tree_to_syntax_node(&hypothetical_expansion.value, fragment_kind).ok()?; let token_id = macro_def.map_id_down(token_id); - let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; + let range = tmap_2.range_by_token(token_id, token_to_map.kind())?; let token = node.syntax_node().covering_element(range).into_token()?; Some((node.syntax_node(), token)) } diff --git a/crates/hir_expand/src/hygiene.rs b/crates/hir_expand/src/hygiene.rs index 38e09fdd4..d98913907 100644 --- a/crates/hir_expand/src/hygiene.rs +++ b/crates/hir_expand/src/hygiene.rs @@ -154,7 +154,7 @@ impl HygieneInfo { }, }; - let range = token_map.range_by_token(token_id)?.by_kind(SyntaxKind::IDENT)?; + let range = token_map.range_by_token(token_id, SyntaxKind::IDENT)?; Some((tt.with_value(range + tt.value), origin)) } } diff --git a/crates/hir_expand/src/lib.rs b/crates/hir_expand/src/lib.rs index 92c679dd2..6be4516a3 100644 --- a/crates/hir_expand/src/lib.rs +++ b/crates/hir_expand/src/lib.rs @@ -329,7 +329,7 @@ impl ExpansionInfo { let token_id = self.macro_arg.1.token_by_range(range)?; let token_id = self.macro_def.map_id_down(token_id); - let range = self.exp_map.range_by_token(token_id)?.by_kind(token.value.kind())?; + let range = self.exp_map.range_by_token(token_id, token.value.kind())?; let token = self.expanded.value.covering_element(range).into_token()?; @@ -354,7 +354,7 @@ impl ExpansionInfo { }, }; - let range = token_map.range_by_token(token_id)?.by_kind(token.value.kind())?; + let range = token_map.range_by_token(token_id, token.value.kind())?; let token = tt.value.covering_element(range + tt.value.text_range().start()).into_token()?; Some((tt.with_value(token), origin)) diff --git a/crates/mbe/src/tests/expand.rs b/crates/mbe/src/tests/expand.rs index 3a1d840ea..5f173f513 100644 --- a/crates/mbe/src/tests/expand.rs +++ b/crates/mbe/src/tests/expand.rs @@ -58,9 +58,8 @@ macro_rules! foobar { let (node, token_map) = token_tree_to_syntax_node(&expanded, FragmentKind::Items).unwrap(); let content = node.syntax_node().to_string(); - let get_text = |id, kind| -> String { - content[token_map.range_by_token(id).unwrap().by_kind(kind).unwrap()].to_string() - }; + let get_text = + |id, kind| -> String { content[token_map.range_by_token(id, kind).unwrap()].to_string() }; assert_eq!(expanded.token_trees.len(), 4); // {($e:ident) => { fn $e() {} }} diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index 58c9f5aa5..0567475be 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -2,13 +2,13 @@ use parser::{SyntaxKind, T}; use syntax::{TextRange, TextSize}; #[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum TokenTextRange { +enum TokenTextRange { Token(TextRange), Delimiter(TextRange), } impl TokenTextRange { - pub fn by_kind(self, kind: SyntaxKind) -> Option { + fn by_kind(self, kind: SyntaxKind) -> Option { match self { TokenTextRange::Token(it) => Some(it), TokenTextRange::Delimiter(it) => match kind { @@ -42,9 +42,9 @@ impl TokenMap { Some(token_id) } - pub fn range_by_token(&self, token_id: tt::TokenId) -> Option { + pub fn range_by_token(&self, token_id: tt::TokenId, kind: SyntaxKind) -> Option { let &(_, range) = self.entries.iter().find(|(tid, _)| *tid == token_id)?; - Some(range) + range.by_kind(kind) } pub(crate) fn shrink_to_fit(&mut self) { -- cgit v1.2.3 From c8f40b1503cb461b935f5fb0a44fa8e26976c363 Mon Sep 17 00:00:00 2001 From: Jonas Schievink Date: Mon, 24 May 2021 21:47:01 +0200 Subject: Fixup --- crates/mbe/src/lib.rs | 10 ++++++---- crates/mbe/src/token_map.rs | 2 ++ 2 files changed, 8 insertions(+), 4 deletions(-) (limited to 'crates') diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index b7aa64713..b95374b76 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -64,11 +64,13 @@ impl fmt::Display for ExpandError { } } -pub use crate::syntax_bridge::{ - ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, - token_tree_to_syntax_node, +pub use crate::{ + syntax_bridge::{ + ast_to_token_tree, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, + token_tree_to_syntax_node, + }, + token_map::TokenMap, }; -pub use crate::token_map::TokenMap; /// This struct contains AST for a single `macro_rules` definition. What might /// be very confusing is that AST has almost exactly the same shape as diff --git a/crates/mbe/src/token_map.rs b/crates/mbe/src/token_map.rs index 0567475be..6df3de3b3 100644 --- a/crates/mbe/src/token_map.rs +++ b/crates/mbe/src/token_map.rs @@ -1,3 +1,5 @@ +//! Mapping between `TokenId`s and the token's position in macro definitions or inputs. + use parser::{SyntaxKind, T}; use syntax::{TextRange, TextSize}; -- cgit v1.2.3