diff options
author | Florian Diebold <[email protected]> | 2020-03-07 14:27:03 +0000 |
---|---|---|
committer | Florian Diebold <[email protected]> | 2020-03-07 14:48:06 +0000 |
commit | 24e98121d81b75bafcd9c6005548776c00de8401 (patch) | |
tree | 976841b2501ab4501613a5f66b1004cd67f7e369 /crates/ra_hir/src | |
parent | aff82cf7ac172f213cb5dcca637cb2c5332294c1 (diff) |
Try to complete within macros
Diffstat (limited to 'crates/ra_hir/src')
-rw-r--r-- | crates/ra_hir/src/semantics.rs | 55 |
1 files changed, 53 insertions, 2 deletions
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 965d185a4..56bd763c7 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -6,13 +6,14 @@ use std::{cell::RefCell, fmt, iter::successors}; | |||
6 | 6 | ||
7 | use hir_def::{ | 7 | use hir_def::{ |
8 | resolver::{self, HasResolver, Resolver}, | 8 | resolver::{self, HasResolver, Resolver}, |
9 | TraitId, | 9 | AsMacroCall, TraitId, |
10 | }; | 10 | }; |
11 | use hir_expand::ExpansionInfo; | 11 | use hir_expand::ExpansionInfo; |
12 | use ra_db::{FileId, FileRange}; | 12 | use ra_db::{FileId, FileRange}; |
13 | use ra_prof::profile; | 13 | use ra_prof::profile; |
14 | use ra_syntax::{ | 14 | use ra_syntax::{ |
15 | algo::skip_trivia_token, ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit, | 15 | algo::{self, skip_trivia_token}, |
16 | ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit, | ||
16 | }; | 17 | }; |
17 | use rustc_hash::{FxHashMap, FxHashSet}; | 18 | use rustc_hash::{FxHashMap, FxHashSet}; |
18 | 19 | ||
@@ -70,6 +71,37 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
70 | Some(node) | 71 | Some(node) |
71 | } | 72 | } |
72 | 73 | ||
74 | pub fn expand_hypothetical( | ||
75 | &self, | ||
76 | actual_macro_call: &ast::MacroCall, | ||
77 | hypothetical_call: &ast::MacroCall, | ||
78 | token_to_map: SyntaxToken, | ||
79 | ) -> Option<(SyntaxNode, SyntaxToken)> { | ||
80 | let macro_call = | ||
81 | self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); | ||
82 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | ||
83 | let macro_call_id = macro_call | ||
84 | .as_call_id(self.db, |path| sa.resolver.resolve_path_as_macro(self.db, &path))?; | ||
85 | let macro_file = macro_call_id.as_file().macro_file().unwrap(); | ||
86 | let (tt, tmap_1) = | ||
87 | hir_expand::syntax_node_to_token_tree(hypothetical_call.token_tree().unwrap().syntax()) | ||
88 | .unwrap(); | ||
89 | let range = token_to_map | ||
90 | .text_range() | ||
91 | .checked_sub(hypothetical_call.token_tree().unwrap().syntax().text_range().start())?; | ||
92 | let token_id = tmap_1.token_by_range(range)?; | ||
93 | let macro_def = hir_expand::db::expander(self.db, macro_call_id)?; | ||
94 | let (node, tmap_2) = hir_expand::db::parse_macro_with_arg( | ||
95 | self.db, | ||
96 | macro_file, | ||
97 | Some(std::sync::Arc::new((tt, tmap_1))), | ||
98 | )?; | ||
99 | let token_id = macro_def.0.map_id_down(token_id); | ||
100 | let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?; | ||
101 | let token = algo::find_covering_element(&node.syntax_node(), range).into_token()?; | ||
102 | Some((node.syntax_node(), token)) | ||
103 | } | ||
104 | |||
73 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | 105 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { |
74 | let parent = token.parent(); | 106 | let parent = token.parent(); |
75 | let parent = self.find_file(parent); | 107 | let parent = self.find_file(parent); |
@@ -104,6 +136,25 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
104 | node.ancestors_with_macros(self.db).map(|it| it.value) | 136 | node.ancestors_with_macros(self.db).map(|it| it.value) |
105 | } | 137 | } |
106 | 138 | ||
139 | pub fn ancestors_at_offset_with_macros( | ||
140 | &self, | ||
141 | node: &SyntaxNode, | ||
142 | offset: TextUnit, | ||
143 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
144 | use itertools::Itertools; | ||
145 | node.token_at_offset(offset) | ||
146 | .map(|token| self.ancestors_with_macros(token.parent())) | ||
147 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) | ||
148 | } | ||
149 | |||
150 | pub fn find_node_at_offset_with_macros<N: AstNode>( | ||
151 | &self, | ||
152 | node: &SyntaxNode, | ||
153 | offset: TextUnit, | ||
154 | ) -> Option<N> { | ||
155 | self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | ||
156 | } | ||
157 | |||
107 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | 158 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { |
108 | self.analyze(expr.syntax()).type_of(self.db, &expr) | 159 | self.analyze(expr.syntax()).type_of(self.db, &expr) |
109 | } | 160 | } |