aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_assists/src/assist_ctx.rs10
-rw-r--r--crates/ra_assists/src/assists/add_explicit_type.rs2
-rw-r--r--crates/ra_assists/src/assists/add_missing_impl_members.rs3
-rw-r--r--crates/ra_assists/src/assists/fill_match_arms.rs3
-rw-r--r--crates/ra_assists/src/assists/inline_local_variable.rs2
-rw-r--r--crates/ra_hir/src/source_binder.rs43
-rw-r--r--crates/ra_hir_def/src/nameres.rs4
-rw-r--r--crates/ra_hir_expand/src/lib.rs12
8 files changed, 49 insertions, 30 deletions
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs
index 1908bdec9..71f7ce1b1 100644
--- a/crates/ra_assists/src/assist_ctx.rs
+++ b/crates/ra_assists/src/assist_ctx.rs
@@ -1,6 +1,5 @@
1//! This module defines `AssistCtx` -- the API surface that is exposed to assists. 1//! This module defines `AssistCtx` -- the API surface that is exposed to assists.
2 2use hir::{db::HirDatabase, SourceAnalyzer};
3use hir::db::HirDatabase;
4use ra_db::FileRange; 3use ra_db::FileRange;
5use ra_fmt::{leading_indent, reindent}; 4use ra_fmt::{leading_indent, reindent};
6use ra_syntax::{ 5use ra_syntax::{
@@ -113,6 +112,13 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> {
113 pub(crate) fn covering_element(&self) -> SyntaxElement { 112 pub(crate) fn covering_element(&self) -> SyntaxElement {
114 find_covering_element(self.source_file.syntax(), self.frange.range) 113 find_covering_element(self.source_file.syntax(), self.frange.range)
115 } 114 }
115 pub(crate) fn source_analyzer(
116 &self,
117 node: &SyntaxNode,
118 offset: Option<TextUnit>,
119 ) -> SourceAnalyzer {
120 SourceAnalyzer::new(self.db, self.frange.file_id, node, offset)
121 }
116 122
117 pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement { 123 pub(crate) fn covering_node_for_range(&self, range: TextRange) -> SyntaxElement {
118 find_covering_element(self.source_file.syntax(), range) 124 find_covering_element(self.source_file.syntax(), range)
diff --git a/crates/ra_assists/src/assists/add_explicit_type.rs b/crates/ra_assists/src/assists/add_explicit_type.rs
index ddda1a0f2..562a09685 100644
--- a/crates/ra_assists/src/assists/add_explicit_type.rs
+++ b/crates/ra_assists/src/assists/add_explicit_type.rs
@@ -40,7 +40,7 @@ pub(crate) fn add_explicit_type(ctx: AssistCtx<impl HirDatabase>) -> Option<Assi
40 } 40 }
41 // Infer type 41 // Infer type
42 let db = ctx.db; 42 let db = ctx.db;
43 let analyzer = hir::SourceAnalyzer::new(db, ctx.frange.file_id, stmt.syntax(), None); 43 let analyzer = ctx.source_analyzer(stmt.syntax(), None);
44 let ty = analyzer.type_of(db, &expr)?; 44 let ty = analyzer.type_of(db, &expr)?;
45 // Assist not applicable if the type is unknown 45 // Assist not applicable if the type is unknown
46 if is_unknown(&ty) { 46 if is_unknown(&ty) {
diff --git a/crates/ra_assists/src/assists/add_missing_impl_members.rs b/crates/ra_assists/src/assists/add_missing_impl_members.rs
index 41de23921..91af161ee 100644
--- a/crates/ra_assists/src/assists/add_missing_impl_members.rs
+++ b/crates/ra_assists/src/assists/add_missing_impl_members.rs
@@ -100,8 +100,7 @@ fn add_missing_impl_members_inner(
100 let impl_item_list = impl_node.item_list()?; 100 let impl_item_list = impl_node.item_list()?;
101 101
102 let trait_def = { 102 let trait_def = {
103 let file_id = ctx.frange.file_id; 103 let analyzer = ctx.source_analyzer(impl_node.syntax(), None);
104 let analyzer = hir::SourceAnalyzer::new(ctx.db, file_id, impl_node.syntax(), None);
105 104
106 resolve_target_trait_def(ctx.db, &analyzer, &impl_node)? 105 resolve_target_trait_def(ctx.db, &analyzer, &impl_node)?
107 }; 106 };
diff --git a/crates/ra_assists/src/assists/fill_match_arms.rs b/crates/ra_assists/src/assists/fill_match_arms.rs
index 2b74f355c..b851c2082 100644
--- a/crates/ra_assists/src/assists/fill_match_arms.rs
+++ b/crates/ra_assists/src/assists/fill_match_arms.rs
@@ -47,8 +47,7 @@ pub(crate) fn fill_match_arms(ctx: AssistCtx<impl HirDatabase>) -> Option<Assist
47 47
48 let expr = match_expr.expr()?; 48 let expr = match_expr.expr()?;
49 let enum_def = { 49 let enum_def = {
50 let file_id = ctx.frange.file_id; 50 let analyzer = ctx.source_analyzer(expr.syntax(), None);
51 let analyzer = hir::SourceAnalyzer::new(ctx.db, file_id, expr.syntax(), None);
52 resolve_enum_def(ctx.db, &analyzer, &expr)? 51 resolve_enum_def(ctx.db, &analyzer, &expr)?
53 }; 52 };
54 let variant_list = enum_def.variant_list()?; 53 let variant_list = enum_def.variant_list()?;
diff --git a/crates/ra_assists/src/assists/inline_local_variable.rs b/crates/ra_assists/src/assists/inline_local_variable.rs
index a7fd9b6d2..18a34502c 100644
--- a/crates/ra_assists/src/assists/inline_local_variable.rs
+++ b/crates/ra_assists/src/assists/inline_local_variable.rs
@@ -45,7 +45,7 @@ pub(crate) fn inline_local_varialbe(ctx: AssistCtx<impl HirDatabase>) -> Option<
45 } else { 45 } else {
46 let_stmt.syntax().text_range() 46 let_stmt.syntax().text_range()
47 }; 47 };
48 let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, bind_pat.syntax(), None); 48 let analyzer = ctx.source_analyzer(bind_pat.syntax(), None);
49 let refs = analyzer.find_all_refs(&bind_pat); 49 let refs = analyzer.find_all_refs(&bind_pat);
50 50
51 let mut wrap_in_parens = vec![true; refs.len()]; 51 let mut wrap_in_parens = vec![true; refs.len()];
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs
index f08827ed3..540ddd0b5 100644
--- a/crates/ra_hir/src/source_binder.rs
+++ b/crates/ra_hir/src/source_binder.rs
@@ -91,7 +91,7 @@ fn def_with_body_from_child_node(
91#[derive(Debug)] 91#[derive(Debug)]
92pub struct SourceAnalyzer { 92pub struct SourceAnalyzer {
93 // FIXME: this doesn't handle macros at all 93 // FIXME: this doesn't handle macros at all
94 file_id: FileId, 94 file_id: HirFileId,
95 resolver: Resolver, 95 resolver: Resolver,
96 body_owner: Option<DefWithBody>, 96 body_owner: Option<DefWithBody>,
97 body_source_map: Option<Arc<BodySourceMap>>, 97 body_source_map: Option<Arc<BodySourceMap>>,
@@ -141,13 +141,16 @@ impl SourceAnalyzer {
141 node: &SyntaxNode, 141 node: &SyntaxNode,
142 offset: Option<TextUnit>, 142 offset: Option<TextUnit>,
143 ) -> SourceAnalyzer { 143 ) -> SourceAnalyzer {
144 let def_with_body = def_with_body_from_child_node(db, Source::new(file_id.into(), node)); 144 let node_source = Source::new(file_id.into(), node);
145 let def_with_body = def_with_body_from_child_node(db, node_source);
145 if let Some(def) = def_with_body { 146 if let Some(def) = def_with_body {
146 let source_map = def.body_source_map(db); 147 let source_map = def.body_source_map(db);
147 let scopes = def.expr_scopes(db); 148 let scopes = def.expr_scopes(db);
148 let scope = match offset { 149 let scope = match offset {
149 None => scope_for(&scopes, &source_map, file_id.into(), &node), 150 None => scope_for(&scopes, &source_map, node_source),
150 Some(offset) => scope_for_offset(&scopes, &source_map, file_id.into(), offset), 151 Some(offset) => {
152 scope_for_offset(&scopes, &source_map, Source::new(file_id.into(), offset))
153 }
151 }; 154 };
152 let resolver = expr::resolver_for_scope(db, def, scope); 155 let resolver = expr::resolver_for_scope(db, def, scope);
153 SourceAnalyzer { 156 SourceAnalyzer {
@@ -156,7 +159,7 @@ impl SourceAnalyzer {
156 body_source_map: Some(source_map), 159 body_source_map: Some(source_map),
157 infer: Some(def.infer(db)), 160 infer: Some(def.infer(db)),
158 scopes: Some(scopes), 161 scopes: Some(scopes),
159 file_id, 162 file_id: file_id.into(),
160 } 163 }
161 } else { 164 } else {
162 SourceAnalyzer { 165 SourceAnalyzer {
@@ -168,18 +171,18 @@ impl SourceAnalyzer {
168 body_source_map: None, 171 body_source_map: None,
169 infer: None, 172 infer: None,
170 scopes: None, 173 scopes: None,
171 file_id, 174 file_id: file_id.into(),
172 } 175 }
173 } 176 }
174 } 177 }
175 178
176 fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> { 179 fn expr_id(&self, expr: &ast::Expr) -> Option<ExprId> {
177 let src = Source { file_id: self.file_id.into(), ast: expr }; 180 let src = Source { file_id: self.file_id, ast: expr };
178 self.body_source_map.as_ref()?.node_expr(src) 181 self.body_source_map.as_ref()?.node_expr(src)
179 } 182 }
180 183
181 fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> { 184 fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
182 let src = Source { file_id: self.file_id.into(), ast: pat }; 185 let src = Source { file_id: self.file_id, ast: pat };
183 self.body_source_map.as_ref()?.node_pat(src) 186 self.body_source_map.as_ref()?.node_pat(src)
184 } 187 }
185 188
@@ -287,7 +290,7 @@ impl SourceAnalyzer {
287 let name = name_ref.as_name(); 290 let name = name_ref.as_name();
288 let source_map = self.body_source_map.as_ref()?; 291 let source_map = self.body_source_map.as_ref()?;
289 let scopes = self.scopes.as_ref()?; 292 let scopes = self.scopes.as_ref()?;
290 let scope = scope_for(scopes, source_map, self.file_id.into(), name_ref.syntax())?; 293 let scope = scope_for(scopes, source_map, Source::new(self.file_id, name_ref.syntax()))?;
291 let entry = scopes.resolve_name_in_scope(scope, &name)?; 294 let entry = scopes.resolve_name_in_scope(scope, &name)?;
292 Some(ScopeEntryWithSyntax { 295 Some(ScopeEntryWithSyntax {
293 name: entry.name().clone(), 296 name: entry.name().clone(),
@@ -408,20 +411,19 @@ impl SourceAnalyzer {
408fn scope_for( 411fn scope_for(
409 scopes: &ExprScopes, 412 scopes: &ExprScopes,
410 source_map: &BodySourceMap, 413 source_map: &BodySourceMap,
411 file_id: HirFileId, 414 node: Source<&SyntaxNode>,
412 node: &SyntaxNode,
413) -> Option<ScopeId> { 415) -> Option<ScopeId> {
414 node.ancestors() 416 node.ast
417 .ancestors()
415 .filter_map(ast::Expr::cast) 418 .filter_map(ast::Expr::cast)
416 .filter_map(|it| source_map.node_expr(Source { file_id, ast: &it })) 419 .filter_map(|it| source_map.node_expr(Source::new(node.file_id, &it)))
417 .find_map(|it| scopes.scope_for(it)) 420 .find_map(|it| scopes.scope_for(it))
418} 421}
419 422
420fn scope_for_offset( 423fn scope_for_offset(
421 scopes: &ExprScopes, 424 scopes: &ExprScopes,
422 source_map: &BodySourceMap, 425 source_map: &BodySourceMap,
423 file_id: HirFileId, 426 offset: Source<TextUnit>,
424 offset: TextUnit,
425) -> Option<ScopeId> { 427) -> Option<ScopeId> {
426 scopes 428 scopes
427 .scope_by_expr() 429 .scope_by_expr()
@@ -429,7 +431,7 @@ fn scope_for_offset(
429 .filter_map(|(id, scope)| { 431 .filter_map(|(id, scope)| {
430 let source = source_map.expr_syntax(*id)?; 432 let source = source_map.expr_syntax(*id)?;
431 // FIXME: correctly handle macro expansion 433 // FIXME: correctly handle macro expansion
432 if source.file_id != file_id { 434 if source.file_id != offset.file_id {
433 return None; 435 return None;
434 } 436 }
435 let syntax_node_ptr = 437 let syntax_node_ptr =
@@ -438,9 +440,14 @@ fn scope_for_offset(
438 }) 440 })
439 // find containing scope 441 // find containing scope
440 .min_by_key(|(ptr, _scope)| { 442 .min_by_key(|(ptr, _scope)| {
441 (!(ptr.range().start() <= offset && offset <= ptr.range().end()), ptr.range().len()) 443 (
444 !(ptr.range().start() <= offset.ast && offset.ast <= ptr.range().end()),
445 ptr.range().len(),
446 )
447 })
448 .map(|(ptr, scope)| {
449 adjust(scopes, source_map, ptr, offset.file_id, offset.ast).unwrap_or(*scope)
442 }) 450 })
443 .map(|(ptr, scope)| adjust(scopes, source_map, ptr, file_id, offset).unwrap_or(*scope))
444} 451}
445 452
446// XXX: during completion, cursor might be outside of any particular 453// XXX: during completion, cursor might be outside of any particular
diff --git a/crates/ra_hir_def/src/nameres.rs b/crates/ra_hir_def/src/nameres.rs
index 49e33ccc4..e5b073a0f 100644
--- a/crates/ra_hir_def/src/nameres.rs
+++ b/crates/ra_hir_def/src/nameres.rs
@@ -125,12 +125,12 @@ pub struct ModuleData {
125 pub impls: Vec<ImplId>, 125 pub impls: Vec<ImplId>,
126} 126}
127 127
128#[derive(Default, Debug, PartialEq, Eq, Clone)] 128#[derive(Default, Debug, PartialEq, Eq)]
129pub(crate) struct Declarations { 129pub(crate) struct Declarations {
130 fns: FxHashMap<FileAstId<ast::FnDef>, FunctionId>, 130 fns: FxHashMap<FileAstId<ast::FnDef>, FunctionId>,
131} 131}
132 132
133#[derive(Debug, Default, PartialEq, Eq, Clone)] 133#[derive(Debug, Default, PartialEq, Eq)]
134pub struct ModuleScope { 134pub struct ModuleScope {
135 items: FxHashMap<Name, Resolution>, 135 items: FxHashMap<Name, Resolution>,
136 /// Macros visable in current module in legacy textual scope 136 /// Macros visable in current module in legacy textual scope
diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs
index 437d73e94..26531cb05 100644
--- a/crates/ra_hir_expand/src/lib.rs
+++ b/crates/ra_hir_expand/src/lib.rs
@@ -223,9 +223,12 @@ impl<N: AstNode> AstId<N> {
223 } 223 }
224} 224}
225 225
226/// FIXME: https://github.com/matklad/with ?
226#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)] 227#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
227pub struct Source<T> { 228pub struct Source<T> {
228 pub file_id: HirFileId, 229 pub file_id: HirFileId,
230 // FIXME: this stores all kind of things, not only `ast`.
231 // There should be a better name...
229 pub ast: T, 232 pub ast: T,
230} 233}
231 234
@@ -234,11 +237,16 @@ impl<T> Source<T> {
234 Source { file_id, ast } 237 Source { file_id, ast }
235 } 238 }
236 239
240 // Similarly, naming here is stupid...
241 pub fn with_ast<U>(&self, ast: U) -> Source<U> {
242 Source::new(self.file_id, ast)
243 }
244
237 pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> { 245 pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> Source<U> {
238 Source { file_id: self.file_id, ast: f(self.ast) } 246 Source::new(self.file_id, f(self.ast))
239 } 247 }
240 pub fn as_ref(&self) -> Source<&T> { 248 pub fn as_ref(&self) -> Source<&T> {
241 Source { file_id: self.file_id, ast: &self.ast } 249 self.with_ast(&self.ast)
242 } 250 }
243 pub fn file_syntax(&self, db: &impl db::AstDatabase) -> SyntaxNode { 251 pub fn file_syntax(&self, db: &impl db::AstDatabase) -> SyntaxNode {
244 db.parse_or_expand(self.file_id).expect("source created from invalid file") 252 db.parse_or_expand(self.file_id).expect("source created from invalid file")