From 2eaa8c94a8a6b5cd86139c5e010ae95268b28658 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sat, 16 Nov 2019 16:49:26 +0300 Subject: Goto definition works inside macros --- crates/ra_mbe/src/syntax_bridge.rs | 6 ++++++ 1 file changed, 6 insertions(+) (limited to 'crates/ra_mbe') diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 3f57ce3b5..37382d2df 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -77,6 +77,12 @@ pub fn token_tree_to_syntax_node( } impl TokenMap { + pub fn token_by_offset(&self, relative_offset: TextUnit) -> Option { + let (idx, _) = + self.tokens.iter().enumerate().find(|(_, range)| range.contains(relative_offset))?; + Some(tt::TokenId(idx as u32)) + } + pub fn relative_range_of(&self, tt: tt::TokenId) -> Option { let idx = tt.0 as usize; self.tokens.get(idx).copied() -- cgit v1.2.3 From 7e2f4b30db8a0d734b3a1fc9f6ad77b2adc9aa2a Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sun, 17 Nov 2019 18:35:05 +0300 Subject: Disable doctests --- crates/ra_mbe/Cargo.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'crates/ra_mbe') diff --git a/crates/ra_mbe/Cargo.toml b/crates/ra_mbe/Cargo.toml index b02e45ee3..a3fc01f63 100644 --- a/crates/ra_mbe/Cargo.toml +++ b/crates/ra_mbe/Cargo.toml @@ -4,6 +4,9 @@ name = "ra_mbe" version = "0.1.0" authors = ["rust-analyzer developers"] +[lib] +doctest = false + [dependencies] ra_syntax = { path = "../ra_syntax" } ra_parser = { path = "../ra_parser" } @@ -14,4 +17,3 @@ log = "0.4.5" [dev-dependencies] test_utils = { path = "../test_utils" } - -- cgit v1.2.3 From c8f858d04323f93a4bacb143d92c976b2bc1e179 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sun, 17 Nov 2019 19:11:43 +0300 Subject: Make shift an implementation detail of mbe --- crates/ra_mbe/src/lib.rs | 106 ++++++++++++++++++++++++++++++----------------- 1 file changed, 68 insertions(+), 38 deletions(-) (limited to 'crates/ra_mbe') diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 8a31d1c36..58ca95368 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs @@ -40,49 +40,75 @@ pub use crate::syntax_bridge::{ /// and `$()*` have special meaning (see `Var` and `Repeat` data structures) #[derive(Clone, Debug, PartialEq, Eq)] pub struct MacroRules { - pub(crate) rules: Vec, + rules: Vec, /// Highest id of the token we have in TokenMap - pub(crate) shift: u32, + shift: Shift, } #[derive(Clone, Debug, PartialEq, Eq)] -pub(crate) struct Rule { - pub(crate) lhs: tt::Subtree, - pub(crate) rhs: tt::Subtree, +struct Rule { + lhs: tt::Subtree, + rhs: tt::Subtree, } -// Find the max token id inside a subtree -fn max_id(subtree: &tt::Subtree) -> Option { - subtree - .token_trees - .iter() - .filter_map(|tt| match tt { - tt::TokenTree::Subtree(subtree) => max_id(subtree), - tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) - if ident.id != tt::TokenId::unspecified() => - { - Some(ident.id.0) +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +struct Shift(u32); + +impl Shift { + fn new(tt: &tt::Subtree) -> Shift { + // Note that TokenId is started from zero, + // We have to add 1 to prevent duplication. + let value = max_id(tt).map_or(0, |it| it + 1); + return Shift(value); + + // Find the max token id inside a subtree + fn max_id(subtree: &tt::Subtree) -> Option { + subtree + .token_trees + .iter() + .filter_map(|tt| match tt { + tt::TokenTree::Subtree(subtree) => max_id(subtree), + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) + if ident.id != tt::TokenId::unspecified() => + { + Some(ident.id.0) + } + _ => None, + }) + .max() + } + } + + /// Shift given TokenTree token id + fn shift_all(self, tt: &mut tt::Subtree) { + for t in tt.token_trees.iter_mut() { + match t { + tt::TokenTree::Leaf(leaf) => match leaf { + tt::Leaf::Ident(ident) => ident.id = self.shift(ident.id), + _ => (), + }, + tt::TokenTree::Subtree(tt) => self.shift_all(tt), } - _ => None, - }) - .max() -} + } + } -/// Shift given TokenTree token id -fn shift_subtree(tt: &mut tt::Subtree, shift: u32) { - for t in tt.token_trees.iter_mut() { - match t { - tt::TokenTree::Leaf(leaf) => match leaf { - tt::Leaf::Ident(ident) if ident.id != tt::TokenId::unspecified() => { - ident.id.0 += shift; - } - _ => (), - }, - tt::TokenTree::Subtree(tt) => shift_subtree(tt, shift), + fn shift(self, id: tt::TokenId) -> tt::TokenId { + if id == tt::TokenId::unspecified() { + return id; } + tt::TokenId(id.0 + self.0) + } + + fn unshift(self, id: tt::TokenId) -> Option { + id.0.checked_sub(self.0).map(tt::TokenId) } } +pub enum Origin { + Def, + Call, +} + impl MacroRules { pub fn parse(tt: &tt::Subtree) -> Result { // Note: this parsing can be implemented using mbe machinery itself, by @@ -105,21 +131,25 @@ impl MacroRules { validate(&rule.lhs)?; } - // Note that TokenId is started from zero, - // We have to add 1 to prevent duplication. - let shift = max_id(tt).map_or(0, |it| it + 1); - Ok(MacroRules { rules, shift }) + Ok(MacroRules { rules, shift: Shift::new(tt) }) } pub fn expand(&self, tt: &tt::Subtree) -> Result { // apply shift let mut tt = tt.clone(); - shift_subtree(&mut tt, self.shift); + self.shift.shift_all(&mut tt); mbe_expander::expand(self, &tt) } - pub fn shift(&self) -> u32 { - self.shift + pub fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId { + self.shift.shift(id) + } + + pub fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, Origin) { + match self.shift.unshift(id) { + Some(id) => (id, Origin::Call), + None => (id, Origin::Def), + } } } -- cgit v1.2.3 From fd52d721e1ed9794048d63e546f43805d24d7ab8 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sun, 17 Nov 2019 20:15:55 +0300 Subject: More correct expansion mapping We can't really map arbitrary ranges, we only can map tokens --- crates/ra_mbe/src/syntax_bridge.rs | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) (limited to 'crates/ra_mbe') diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 37382d2df..8398c9ac7 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -77,14 +77,14 @@ pub fn token_tree_to_syntax_node( } impl TokenMap { - pub fn token_by_offset(&self, relative_offset: TextUnit) -> Option { + pub fn token_by_range(&self, relative_range: TextRange) -> Option { let (idx, _) = - self.tokens.iter().enumerate().find(|(_, range)| range.contains(relative_offset))?; + self.tokens.iter().enumerate().find(|(_, range)| **range == relative_range)?; Some(tt::TokenId(idx as u32)) } - pub fn relative_range_of(&self, tt: tt::TokenId) -> Option { - let idx = tt.0 as usize; + pub fn relative_range_of(&self, token_id: tt::TokenId) -> Option { + let idx = token_id.0 as usize; self.tokens.get(idx).copied() } @@ -96,6 +96,11 @@ impl TokenMap { } impl RevTokenMap { + pub fn range_by_token(&self, token_id: tt::TokenId) -> Option { + let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?; + Some(r) + } + fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) { self.ranges.push((relative_range, token_id.clone())) } -- cgit v1.2.3