From b79d6789236bb53c5818949cc2960b5c4991cbeb Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Mon, 18 Nov 2019 15:08:39 +0300 Subject: Token-based reverse-mapping --- crates/ra_hir_expand/src/lib.rs | 51 ++++++++++++++++---------------------- crates/ra_ide_api/src/expand.rs | 33 ++++++++++++++++++------ crates/ra_mbe/src/syntax_bridge.rs | 6 ++++- 3 files changed, 52 insertions(+), 38 deletions(-) diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs index 57e2e6cb1..73ec1688c 100644 --- a/crates/ra_hir_expand/src/lib.rs +++ b/crates/ra_hir_expand/src/lib.rs @@ -20,7 +20,7 @@ use ra_db::{salsa, CrateId, FileId}; use ra_syntax::{ algo, ast::{self, AstNode}, - SyntaxNode, SyntaxToken, TextRange, TextUnit, + SyntaxNode, SyntaxToken, TextUnit, }; use crate::ast_id_map::FileAstId; @@ -79,22 +79,17 @@ impl HirFileId { HirFileIdRepr::MacroFile(macro_file) => { let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id); - let arg_start = loc.ast_id.to_node(db).token_tree()?.syntax().text_range().start(); - let def_start = - loc.def.ast_id.to_node(db).token_tree()?.syntax().text_range().start(); + let arg_tt = loc.ast_id.to_node(db).token_tree()?; + let def_tt = loc.def.ast_id.to_node(db).token_tree()?; let macro_def = db.macro_def(loc.def)?; let (parse, exp_map) = db.parse_macro(macro_file)?; - let expanded = Source::new(self, parse.syntax_node()); let macro_arg = db.macro_arg(macro_file.macro_call_id)?; - let arg_start = (loc.ast_id.file_id, arg_start); - let def_start = (loc.def.ast_id.file_id, def_start); - Some(ExpansionInfo { - expanded, - arg_start, - def_start, + expanded: Source::new(self, parse.syntax_node()), + arg: Source::new(loc.ast_id.file_id, arg_tt), + def: Source::new(loc.ast_id.file_id, def_tt), macro_arg, macro_def, exp_map, @@ -159,8 +154,8 @@ impl MacroCallId { #[derive(Debug, Clone, PartialEq, Eq)] pub struct ExpansionInfo { expanded: Source, - arg_start: (HirFileId, TextUnit), - def_start: (HirFileId, TextUnit), + arg: Source, + def: Source, macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, @@ -169,8 +164,9 @@ pub struct ExpansionInfo { impl ExpansionInfo { pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option> { - assert_eq!(token.file_id, self.arg_start.0); - let range = token.ast.text_range().checked_sub(self.arg_start.1)?; + assert_eq!(token.file_id, self.arg.file_id); + let range = + token.ast.text_range().checked_sub(self.arg.ast.syntax().text_range().start())?; let token_id = self.macro_arg.1.token_by_range(range)?; let token_id = self.macro_def.0.map_id_down(token_id); @@ -181,25 +177,22 @@ impl ExpansionInfo { Some(self.expanded.with_ast(token)) } - // FIXME: a more correct signature would be - // `pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option>` - pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> { - let token_id = look_in_rev_map(&self.exp_map, from)?; + pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option> { + let token_id = self.exp_map.token_by_range(token.ast.text_range())?; let (token_id, origin) = self.macro_def.0.map_id_up(token_id); - - let (token_map, (file_id, start_offset)) = match origin { - mbe::Origin::Call => (&self.macro_arg.1, self.arg_start), - mbe::Origin::Def => (&self.macro_def.1, self.def_start), + let (token_map, tt) = match origin { + mbe::Origin::Call => (&self.macro_arg.1, &self.arg), + mbe::Origin::Def => (&self.macro_def.1, &self.def), }; let range = token_map.relative_range_of(token_id)?; - - return Some((file_id, range + start_offset)); - - fn look_in_rev_map(exp_map: &mbe::RevTokenMap, from: TextRange) -> Option { - exp_map.ranges.iter().find(|&it| it.0.is_subrange(&from)).map(|it| it.1) - } + let token = algo::find_covering_element( + tt.ast.syntax(), + range + tt.ast.syntax().text_range().start(), + ) + .into_token()?; + Some(tt.with_ast(token)) } } diff --git a/crates/ra_ide_api/src/expand.rs b/crates/ra_ide_api/src/expand.rs index 5f1fb9a12..7f59e46d2 100644 --- a/crates/ra_ide_api/src/expand.rs +++ b/crates/ra_ide_api/src/expand.rs @@ -8,15 +8,32 @@ use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxToken}; use crate::{db::RootDatabase, FileRange}; pub(crate) fn original_range(db: &RootDatabase, node: Source<&SyntaxNode>) -> FileRange { - let text_range = node.ast.text_range(); - let (file_id, range) = node - .file_id - .expansion_info(db) - .and_then(|expansion_info| expansion_info.find_range(text_range)) - .unwrap_or((node.file_id, text_range)); + let expansion = match node.file_id.expansion_info(db) { + None => { + return FileRange { + file_id: node.file_id.original_file(db), + range: node.ast.text_range(), + } + } + Some(it) => it, + }; + // FIXME: the following completely wrong. + // + // *First*, we should try to map first and last tokens of node, and, if that + // fails, return the range of the overall macro expansions. + // + // *Second*, we should handle recurside macro expansions + + let token = node + .ast + .descendants_with_tokens() + .filter_map(|it| it.into_token()) + .find_map(|it| expansion.map_token_up(node.with_ast(&it))); - // FIXME: handle recursive macro generated macro - FileRange { file_id: file_id.original_file(db), range } + match token { + Some(it) => FileRange { file_id: it.file_id.original_file(db), range: it.ast.text_range() }, + None => FileRange { file_id: node.file_id.original_file(db), range: node.ast.text_range() }, + } } pub(crate) fn descend_into_macros( diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 8398c9ac7..fe3b70b8d 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -20,7 +20,7 @@ pub struct TokenMap { /// Maps relative range of the expanded syntax node to `tt::TokenId` #[derive(Debug, PartialEq, Eq, Default)] pub struct RevTokenMap { - pub ranges: Vec<(TextRange, tt::TokenId)>, + ranges: Vec<(TextRange, tt::TokenId)>, } /// Convert the syntax tree (what user has written) to a `TokenTree` (what macro @@ -96,6 +96,10 @@ impl TokenMap { } impl RevTokenMap { + pub fn token_by_range(&self, relative_range: TextRange) -> Option { + self.ranges.iter().find(|&it| it.0 == relative_range).map(|it| it.1) + } + pub fn range_by_token(&self, token_id: tt::TokenId) -> Option { let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?; Some(r) -- cgit v1.2.3