From fd52d721e1ed9794048d63e546f43805d24d7ab8 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Sun, 17 Nov 2019 20:15:55 +0300 Subject: More correct expansion mapping We can't really map arbitrary ranges, we only can map tokens --- crates/ra_hir/src/source_binder.rs | 13 ++++++--- crates/ra_hir_expand/src/lib.rs | 44 +++++++++++++++++++---------- crates/ra_ide_api/src/goto_definition.rs | 48 +++++++++++++++++--------------- crates/ra_mbe/src/syntax_bridge.rs | 13 ++++++--- 4 files changed, 74 insertions(+), 44 deletions(-) (limited to 'crates') diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 75a467fb3..f0ed8e2b2 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs @@ -16,7 +16,7 @@ use ra_syntax::{ ast::{self, AstNode}, match_ast, AstPtr, SyntaxKind::*, - SyntaxNode, SyntaxNodePtr, TextRange, TextUnit, + SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit, }; use crate::{ @@ -131,11 +131,16 @@ pub struct Expansion { } impl Expansion { - pub fn translate_offset(&self, db: &impl HirDatabase, offset: TextUnit) -> Option { + pub fn map_token_down( + &self, + db: &impl HirDatabase, + token: Source<&SyntaxToken>, + ) -> Option> { let exp_info = self.file_id().expansion_info(db)?; - exp_info.translate_offset(offset) + exp_info.map_token_down(token) } - pub fn file_id(&self) -> HirFileId { + + fn file_id(&self) -> HirFileId { self.macro_call_id.as_file(MacroFileKind::Items) } } diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs index 5927a03ba..57e2e6cb1 100644 --- a/crates/ra_hir_expand/src/lib.rs +++ b/crates/ra_hir_expand/src/lib.rs @@ -18,8 +18,9 @@ use std::sync::Arc; use ra_db::{salsa, CrateId, FileId}; use ra_syntax::{ + algo, ast::{self, AstNode}, - SyntaxNode, TextRange, TextUnit, + SyntaxNode, SyntaxToken, TextRange, TextUnit, }; use crate::ast_id_map::FileAstId; @@ -83,13 +84,21 @@ impl HirFileId { loc.def.ast_id.to_node(db).token_tree()?.syntax().text_range().start(); let macro_def = db.macro_def(loc.def)?; - let exp_map = db.parse_macro(macro_file)?.1; + let (parse, exp_map) = db.parse_macro(macro_file)?; + let expanded = Source::new(self, parse.syntax_node()); let macro_arg = db.macro_arg(macro_file.macro_call_id)?; let arg_start = (loc.ast_id.file_id, arg_start); let def_start = (loc.def.ast_id.file_id, def_start); - Some(ExpansionInfo { arg_start, def_start, macro_arg, macro_def, exp_map }) + Some(ExpansionInfo { + expanded, + arg_start, + def_start, + macro_arg, + macro_def, + exp_map, + }) } } } @@ -146,27 +155,34 @@ impl MacroCallId { } } -#[derive(Debug, Clone, PartialEq, Eq)] /// ExpansionInfo mainly describes how to map text range between src and expanded macro +#[derive(Debug, Clone, PartialEq, Eq)] pub struct ExpansionInfo { - pub(crate) arg_start: (HirFileId, TextUnit), - pub(crate) def_start: (HirFileId, TextUnit), + expanded: Source, + arg_start: (HirFileId, TextUnit), + def_start: (HirFileId, TextUnit), - pub(crate) macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, - pub(crate) macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, - pub(crate) exp_map: Arc, + macro_def: Arc<(db::TokenExpander, mbe::TokenMap)>, + macro_arg: Arc<(tt::Subtree, mbe::TokenMap)>, + exp_map: Arc, } impl ExpansionInfo { - pub fn translate_offset(&self, offset: TextUnit) -> Option { - let offset = offset.checked_sub(self.arg_start.1)?; - let token_id = self.macro_arg.1.token_by_offset(offset)?; + pub fn map_token_down(&self, token: Source<&SyntaxToken>) -> Option> { + assert_eq!(token.file_id, self.arg_start.0); + let range = token.ast.text_range().checked_sub(self.arg_start.1)?; + let token_id = self.macro_arg.1.token_by_range(range)?; let token_id = self.macro_def.0.map_id_down(token_id); - let (r, _) = self.exp_map.ranges.iter().find(|(_, tid)| *tid == token_id)?; - Some(r.start()) + let range = self.exp_map.range_by_token(token_id)?; + + let token = algo::find_covering_element(&self.expanded.ast, range).into_token()?; + + Some(self.expanded.with_ast(token)) } + // FIXME: a more correct signature would be + // `pub fn map_token_up(&self, token: Source<&SyntaxToken>) -> Option>` pub fn find_range(&self, from: TextRange) -> Option<(HirFileId, TextRange)> { let token_id = look_in_rev_map(&self.exp_map, from)?; diff --git a/crates/ra_ide_api/src/goto_definition.rs b/crates/ra_ide_api/src/goto_definition.rs index 70baa294f..b693a4c31 100644 --- a/crates/ra_ide_api/src/goto_definition.rs +++ b/crates/ra_ide_api/src/goto_definition.rs @@ -4,9 +4,8 @@ use std::iter::successors; use hir::{db::AstDatabase, Source}; use ra_syntax::{ - algo::find_node_at_offset, ast::{self, DocCommentsOwner}, - match_ast, AstNode, SyntaxNode, TextUnit, + match_ast, AstNode, SyntaxNode, SyntaxToken, }; use crate::{ @@ -20,37 +19,42 @@ pub(crate) fn goto_definition( db: &RootDatabase, position: FilePosition, ) -> Option>> { - let offset = descend_into_macros(db, position); + let token = descend_into_macros(db, position)?; - let syntax = db.parse_or_expand(offset.file_id)?; + let res = match_ast! { + match (token.ast.parent()) { + ast::NameRef(name_ref) => { + let navs = reference_definition(db, token.with_ast(&name_ref)).to_vec(); + RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec()) + }, + ast::Name(name) => { + let navs = name_definition(db, token.with_ast(&name))?; + RangeInfo::new(name.syntax().text_range(), navs) - if let Some(name_ref) = find_node_at_offset::(&syntax, offset.ast) { - let navs = reference_definition(db, offset.with_ast(&name_ref)).to_vec(); - return Some(RangeInfo::new(name_ref.syntax().text_range(), navs.to_vec())); - } - if let Some(name) = find_node_at_offset::(&syntax, offset.ast) { - let navs = name_definition(db, offset.with_ast(&name))?; - return Some(RangeInfo::new(name.syntax().text_range(), navs)); - } - None + }, + _ => return None, + } + }; + + Some(res) } -fn descend_into_macros(db: &RootDatabase, position: FilePosition) -> Source { - successors(Some(Source::new(position.file_id.into(), position.offset)), |offset| { - let syntax = db.parse_or_expand(offset.file_id)?; - let macro_call = find_node_at_offset::(&syntax, offset.ast)?; +fn descend_into_macros(db: &RootDatabase, position: FilePosition) -> Option> { + let file = db.parse_or_expand(position.file_id.into())?; + let token = file.token_at_offset(position.offset).filter(|it| !it.kind().is_trivia()).next()?; + + successors(Some(Source::new(position.file_id.into(), token)), |token| { + let macro_call = token.ast.ancestors().find_map(ast::MacroCall::cast)?; let tt = macro_call.token_tree()?; - if !tt.syntax().text_range().contains(offset.ast) { + if !token.ast.text_range().is_subrange(&tt.syntax().text_range()) { return None; } let source_analyzer = - hir::SourceAnalyzer::new(db, offset.with_ast(macro_call.syntax()), None); + hir::SourceAnalyzer::new(db, token.with_ast(token.ast.parent()).as_ref(), None); let exp = source_analyzer.expand(db, ¯o_call)?; - let next_offset = exp.translate_offset(db, offset.ast)?; - Some(Source::new(exp.file_id(), next_offset)) + exp.map_token_down(db, token.as_ref()) }) .last() - .unwrap() } #[derive(Debug)] diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 37382d2df..8398c9ac7 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs @@ -77,14 +77,14 @@ pub fn token_tree_to_syntax_node( } impl TokenMap { - pub fn token_by_offset(&self, relative_offset: TextUnit) -> Option { + pub fn token_by_range(&self, relative_range: TextRange) -> Option { let (idx, _) = - self.tokens.iter().enumerate().find(|(_, range)| range.contains(relative_offset))?; + self.tokens.iter().enumerate().find(|(_, range)| **range == relative_range)?; Some(tt::TokenId(idx as u32)) } - pub fn relative_range_of(&self, tt: tt::TokenId) -> Option { - let idx = tt.0 as usize; + pub fn relative_range_of(&self, token_id: tt::TokenId) -> Option { + let idx = token_id.0 as usize; self.tokens.get(idx).copied() } @@ -96,6 +96,11 @@ impl TokenMap { } impl RevTokenMap { + pub fn range_by_token(&self, token_id: tt::TokenId) -> Option { + let &(r, _) = self.ranges.iter().find(|(_, tid)| *tid == token_id)?; + Some(r) + } + fn add(&mut self, relative_range: TextRange, token_id: tt::TokenId) { self.ranges.push((relative_range, token_id.clone())) } -- cgit v1.2.3