aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_hir/src
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2020-01-15 19:38:10 +0000
committerGitHub <[email protected]>2020-01-15 19:38:10 +0000
commitc78d269b66dd7e02321bf447eef1375c81f66a1e (patch)
tree8ec28f0ecd713783aa4d7032bdf324ace7bc8911 /crates/ra_hir/src
parentaa2e13b37f4508168fb064a79d0190fa705d8a47 (diff)
parentaaef88db0e2602e010f78e26a80d974be12c1f71 (diff)
Merge #2837
2837: Accidentally quadratic r=matklad a=matklad Our syntax highlighting is accdentally quadratic. Current state of the PR fixes it in a pretty crude way, looks like for the proper fix we need to redo how source-analyzer works. **NB:** don't be scared by diff stats, that's mostly a test-data file Co-authored-by: Aleksey Kladov <[email protected]>
Diffstat (limited to 'crates/ra_hir/src')
-rw-r--r--crates/ra_hir/src/lib.rs4
-rw-r--r--crates/ra_hir/src/source_analyzer.rs138
-rw-r--r--crates/ra_hir/src/source_binder.rs173
3 files changed, 217 insertions, 98 deletions
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs
index a1cf89010..a2350573c 100644
--- a/crates/ra_hir/src/lib.rs
+++ b/crates/ra_hir/src/lib.rs
@@ -8,7 +8,7 @@
8#![recursion_limit = "512"] 8#![recursion_limit = "512"]
9 9
10macro_rules! impl_froms { 10macro_rules! impl_froms {
11 ($e:ident: $($v:ident $(($($sv:ident),*))?),*) => { 11 ($e:ident: $($v:ident $(($($sv:ident),*))?),*$(,)?) => {
12 $( 12 $(
13 impl From<$v> for $e { 13 impl From<$v> for $e {
14 fn from(it: $v) -> $e { 14 fn from(it: $v) -> $e {
@@ -28,6 +28,7 @@ macro_rules! impl_froms {
28 28
29pub mod db; 29pub mod db;
30pub mod source_analyzer; 30pub mod source_analyzer;
31pub mod source_binder;
31 32
32pub mod diagnostics; 33pub mod diagnostics;
33 34
@@ -47,6 +48,7 @@ pub use crate::{
47 from_source::FromSource, 48 from_source::FromSource,
48 has_source::HasSource, 49 has_source::HasSource,
49 source_analyzer::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer}, 50 source_analyzer::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
51 source_binder::SourceBinder,
50}; 52};
51 53
52pub use hir_def::{ 54pub use hir_def::{
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs
index 76e0bff34..4f8fc9602 100644
--- a/crates/ra_hir/src/source_analyzer.rs
+++ b/crates/ra_hir/src/source_analyzer.rs
@@ -14,26 +14,22 @@ use hir_def::{
14 BodySourceMap, 14 BodySourceMap,
15 }, 15 },
16 expr::{ExprId, PatId}, 16 expr::{ExprId, PatId},
17 nameres::ModuleSource, 17 resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs},
18 resolver::{self, resolver_for_scope, HasResolver, Resolver, TypeNs, ValueNs},
19 DefWithBodyId, TraitId, 18 DefWithBodyId, TraitId,
20}; 19};
21use hir_expand::{ 20use hir_expand::{
22 hygiene::Hygiene, name::AsName, AstId, HirFileId, InFile, MacroCallId, MacroCallKind, 21 hygiene::Hygiene, name::AsName, AstId, HirFileId, InFile, MacroCallId, MacroCallKind,
23}; 22};
24use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment}; 23use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment};
25use ra_prof::profile;
26use ra_syntax::{ 24use ra_syntax::{
27 ast::{self, AstNode}, 25 ast::{self, AstNode},
28 match_ast, AstPtr, 26 AstPtr, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit,
29 SyntaxKind::*,
30 SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextUnit,
31}; 27};
32use rustc_hash::FxHashSet; 28use rustc_hash::FxHashSet;
33 29
34use crate::{ 30use crate::{
35 db::HirDatabase, Adt, Const, DefWithBody, Enum, EnumVariant, FromSource, Function, ImplBlock, 31 db::HirDatabase, Adt, Const, DefWithBody, EnumVariant, Function, Local, MacroDef, Name, Path,
36 Local, MacroDef, Name, Path, ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam, 32 ScopeDef, Static, Struct, Trait, Type, TypeAlias, TypeParam,
37}; 33};
38 34
39/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of 35/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
@@ -109,37 +105,43 @@ impl SourceAnalyzer {
109 node: InFile<&SyntaxNode>, 105 node: InFile<&SyntaxNode>,
110 offset: Option<TextUnit>, 106 offset: Option<TextUnit>,
111 ) -> SourceAnalyzer { 107 ) -> SourceAnalyzer {
112 let _p = profile("SourceAnalyzer::new"); 108 crate::source_binder::SourceBinder::new(db).analyze(node, offset)
113 let def_with_body = def_with_body_from_child_node(db, node); 109 }
114 if let Some(def) = def_with_body { 110
115 let (_body, source_map) = db.body_with_source_map(def.into()); 111 pub(crate) fn new_for_body(
116 let scopes = db.expr_scopes(def.into()); 112 db: &impl HirDatabase,
117 let scope = match offset { 113 def: DefWithBodyId,
118 None => scope_for(&scopes, &source_map, node), 114 node: InFile<&SyntaxNode>,
119 Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)), 115 offset: Option<TextUnit>,
120 }; 116 ) -> SourceAnalyzer {
121 let resolver = resolver_for_scope(db, def.into(), scope); 117 let (_body, source_map) = db.body_with_source_map(def);
122 SourceAnalyzer { 118 let scopes = db.expr_scopes(def);
123 resolver, 119 let scope = match offset {
124 body_owner: Some(def), 120 None => scope_for(&scopes, &source_map, node),
125 body_source_map: Some(source_map), 121 Some(offset) => scope_for_offset(&scopes, &source_map, node.with_value(offset)),
126 infer: Some(db.infer(def.into())), 122 };
127 scopes: Some(scopes), 123 let resolver = resolver_for_scope(db, def, scope);
128 file_id: node.file_id, 124 SourceAnalyzer {
129 } 125 resolver,
130 } else { 126 body_owner: Some(def.into()),
131 SourceAnalyzer { 127 body_source_map: Some(source_map),
132 resolver: node 128 infer: Some(db.infer(def)),
133 .value 129 scopes: Some(scopes),
134 .ancestors() 130 file_id: node.file_id,
135 .find_map(|it| try_get_resolver_for_node(db, node.with_value(&it))) 131 }
136 .unwrap_or_default(), 132 }
137 body_owner: None, 133
138 body_source_map: None, 134 pub(crate) fn new_for_resolver(
139 infer: None, 135 resolver: Resolver,
140 scopes: None, 136 node: InFile<&SyntaxNode>,
141 file_id: node.file_id, 137 ) -> SourceAnalyzer {
142 } 138 SourceAnalyzer {
139 resolver,
140 body_owner: None,
141 body_source_map: None,
142 infer: None,
143 scopes: None,
144 file_id: node.file_id,
143 } 145 }
144 } 146 }
145 147
@@ -366,64 +368,6 @@ impl SourceAnalyzer {
366 } 368 }
367} 369}
368 370
369fn try_get_resolver_for_node(db: &impl HirDatabase, node: InFile<&SyntaxNode>) -> Option<Resolver> {
370 match_ast! {
371 match (node.value) {
372 ast::Module(it) => {
373 let src = node.with_value(it);
374 Some(crate::Module::from_declaration(db, src)?.id.resolver(db))
375 },
376 ast::SourceFile(it) => {
377 let src = node.with_value(ModuleSource::SourceFile(it));
378 Some(crate::Module::from_definition(db, src)?.id.resolver(db))
379 },
380 ast::StructDef(it) => {
381 let src = node.with_value(it);
382 Some(Struct::from_source(db, src)?.id.resolver(db))
383 },
384 ast::EnumDef(it) => {
385 let src = node.with_value(it);
386 Some(Enum::from_source(db, src)?.id.resolver(db))
387 },
388 ast::ImplBlock(it) => {
389 let src = node.with_value(it);
390 Some(ImplBlock::from_source(db, src)?.id.resolver(db))
391 },
392 ast::TraitDef(it) => {
393 let src = node.with_value(it);
394 Some(Trait::from_source(db, src)?.id.resolver(db))
395 },
396 _ => match node.value.kind() {
397 FN_DEF | CONST_DEF | STATIC_DEF => {
398 let def = def_with_body_from_child_node(db, node)?;
399 let def = DefWithBodyId::from(def);
400 Some(def.resolver(db))
401 }
402 // FIXME add missing cases
403 _ => None
404 }
405 }
406 }
407}
408
409fn def_with_body_from_child_node(
410 db: &impl HirDatabase,
411 child: InFile<&SyntaxNode>,
412) -> Option<DefWithBody> {
413 let _p = profile("def_with_body_from_child_node");
414 child.cloned().ancestors_with_macros(db).find_map(|node| {
415 let n = &node.value;
416 match_ast! {
417 match n {
418 ast::FnDef(def) => { return Function::from_source(db, node.with_value(def)).map(DefWithBody::from); },
419 ast::ConstDef(def) => { return Const::from_source(db, node.with_value(def)).map(DefWithBody::from); },
420 ast::StaticDef(def) => { return Static::from_source(db, node.with_value(def)).map(DefWithBody::from); },
421 _ => { None },
422 }
423 }
424 })
425}
426
427fn scope_for( 371fn scope_for(
428 scopes: &ExprScopes, 372 scopes: &ExprScopes,
429 source_map: &BodySourceMap, 373 source_map: &BodySourceMap,
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs
new file mode 100644
index 000000000..00541dbe1
--- /dev/null
+++ b/crates/ra_hir/src/source_binder.rs
@@ -0,0 +1,173 @@
1//! `SourceBinder` should be the main entry point for getting info about source code.
2//! It's main task is to map source syntax trees to hir-level IDs.
3//!
4//! It is intended to subsume `FromSource` and `SourceAnalyzer`.
5
6use hir_def::{
7 child_by_source::ChildBySource,
8 dyn_map::DynMap,
9 keys::{self, Key},
10 resolver::{HasResolver, Resolver},
11 ConstId, DefWithBodyId, EnumId, EnumVariantId, FunctionId, ImplId, ModuleId, StaticId,
12 StructFieldId, StructId, TraitId, TypeAliasId, UnionId, VariantId,
13};
14use hir_expand::InFile;
15use ra_prof::profile;
16use ra_syntax::{ast, match_ast, AstNode, SyntaxNode, TextUnit};
17use rustc_hash::FxHashMap;
18
19use crate::{db::HirDatabase, ModuleSource, SourceAnalyzer};
20
21pub struct SourceBinder<'a, DB> {
22 pub db: &'a DB,
23 child_by_source_cache: FxHashMap<ChildContainer, DynMap>,
24}
25
26impl<DB: HirDatabase> SourceBinder<'_, DB> {
27 pub fn new(db: &DB) -> SourceBinder<DB> {
28 SourceBinder { db, child_by_source_cache: FxHashMap::default() }
29 }
30
31 pub fn analyze(
32 &mut self,
33 src: InFile<&SyntaxNode>,
34 offset: Option<TextUnit>,
35 ) -> SourceAnalyzer {
36 let _p = profile("SourceBinder::analyzer");
37 let container = match self.find_container(src) {
38 Some(it) => it,
39 None => return SourceAnalyzer::new_for_resolver(Resolver::default(), src),
40 };
41
42 let resolver = match container {
43 ChildContainer::DefWithBodyId(def) => {
44 return SourceAnalyzer::new_for_body(self.db, def, src, offset)
45 }
46 ChildContainer::TraitId(it) => it.resolver(self.db),
47 ChildContainer::ImplId(it) => it.resolver(self.db),
48 ChildContainer::ModuleId(it) => it.resolver(self.db),
49 ChildContainer::EnumId(it) => it.resolver(self.db),
50 ChildContainer::VariantId(it) => it.resolver(self.db),
51 };
52 SourceAnalyzer::new_for_resolver(resolver, src)
53 }
54
55 pub fn to_def<D, T>(&mut self, src: InFile<T>) -> Option<D>
56 where
57 D: From<T::ID>,
58 T: ToId,
59 {
60 let id: T::ID = self.to_id(src)?;
61 Some(id.into())
62 }
63
64 fn to_id<T: ToId>(&mut self, src: InFile<T>) -> Option<T::ID> {
65 let container = self.find_container(src.as_ref().map(|it| it.syntax()))?;
66 let db = self.db;
67 let dyn_map =
68 &*self.child_by_source_cache.entry(container).or_insert_with(|| match container {
69 ChildContainer::DefWithBodyId(it) => it.child_by_source(db),
70 ChildContainer::ModuleId(it) => it.child_by_source(db),
71 ChildContainer::TraitId(it) => it.child_by_source(db),
72 ChildContainer::ImplId(it) => it.child_by_source(db),
73 ChildContainer::EnumId(it) => it.child_by_source(db),
74 ChildContainer::VariantId(it) => it.child_by_source(db),
75 });
76 dyn_map[T::KEY].get(&src).copied()
77 }
78
79 fn find_container(&mut self, src: InFile<&SyntaxNode>) -> Option<ChildContainer> {
80 for container in src.cloned().ancestors_with_macros(self.db).skip(1) {
81 let res: ChildContainer = match_ast! {
82 match (container.value) {
83 ast::TraitDef(it) => {
84 let def: TraitId = self.to_id(container.with_value(it))?;
85 def.into()
86 },
87 ast::ImplBlock(it) => {
88 let def: ImplId = self.to_id(container.with_value(it))?;
89 def.into()
90 },
91 ast::FnDef(it) => {
92 let def: FunctionId = self.to_id(container.with_value(it))?;
93 DefWithBodyId::from(def).into()
94 },
95 ast::StaticDef(it) => {
96 let def: StaticId = self.to_id(container.with_value(it))?;
97 DefWithBodyId::from(def).into()
98 },
99 ast::ConstDef(it) => {
100 let def: ConstId = self.to_id(container.with_value(it))?;
101 DefWithBodyId::from(def).into()
102 },
103 ast::EnumDef(it) => {
104 let def: EnumId = self.to_id(container.with_value(it))?;
105 def.into()
106 },
107 ast::StructDef(it) => {
108 let def: StructId = self.to_id(container.with_value(it))?;
109 VariantId::from(def).into()
110 },
111 ast::UnionDef(it) => {
112 let def: UnionId = self.to_id(container.with_value(it))?;
113 VariantId::from(def).into()
114 },
115 // FIXME: handle out-of-line modules here
116 _ => { continue },
117 }
118 };
119 return Some(res);
120 }
121
122 let module_source = ModuleSource::from_child_node(self.db, src);
123 let c = crate::Module::from_definition(self.db, src.with_value(module_source))?;
124 Some(c.id.into())
125 }
126}
127
128#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
129enum ChildContainer {
130 DefWithBodyId(DefWithBodyId),
131 ModuleId(ModuleId),
132 TraitId(TraitId),
133 ImplId(ImplId),
134 EnumId(EnumId),
135 VariantId(VariantId),
136}
137impl_froms! {
138 ChildContainer:
139 DefWithBodyId,
140 ModuleId,
141 TraitId,
142 ImplId,
143 EnumId,
144 VariantId,
145}
146
147pub trait ToId: Sized + AstNode + 'static {
148 type ID: Sized + Copy + 'static;
149 const KEY: Key<Self, Self::ID>;
150}
151
152macro_rules! to_id_impls {
153 ($(($id:ident, $ast:path, $key:path)),* ,) => {$(
154 impl ToId for $ast {
155 type ID = $id;
156 const KEY: Key<Self, Self::ID> = $key;
157 }
158 )*}
159}
160
161to_id_impls![
162 (StructId, ast::StructDef, keys::STRUCT),
163 (UnionId, ast::UnionDef, keys::UNION),
164 (EnumId, ast::EnumDef, keys::ENUM),
165 (TraitId, ast::TraitDef, keys::TRAIT),
166 (FunctionId, ast::FnDef, keys::FUNCTION),
167 (StaticId, ast::StaticDef, keys::STATIC),
168 (ConstId, ast::ConstDef, keys::CONST),
169 (TypeAliasId, ast::TypeAliasDef, keys::TYPE_ALIAS),
170 (ImplId, ast::ImplBlock, keys::IMPL),
171 (StructFieldId, ast::RecordFieldDef, keys::RECORD_FIELD),
172 (EnumVariantId, ast::EnumVariant, keys::ENUM_VARIANT),
173];