diff options
author | David Lattimore <[email protected]> | 2020-07-26 05:49:18 +0100 |
---|---|---|
committer | David Lattimore <[email protected]> | 2020-07-26 12:58:35 +0100 |
commit | bb587fae1dc169aca2e587bc7876611852186dd5 (patch) | |
tree | 249231d1b7f95192d9eccf3ef39b6986ad01249b /crates/ra_ssr | |
parent | 8ff40af7286b66294d8b64f0c8fdb3179a84be76 (diff) |
SSR: Move more resolution-related code into the resolving module
Diffstat (limited to 'crates/ra_ssr')
-rw-r--r-- | crates/ra_ssr/src/lib.rs | 26 | ||||
-rw-r--r-- | crates/ra_ssr/src/resolving.rs | 39 |
2 files changed, 38 insertions, 27 deletions
diff --git a/crates/ra_ssr/src/lib.rs b/crates/ra_ssr/src/lib.rs index 2fb326b45..7014a6ac6 100644 --- a/crates/ra_ssr/src/lib.rs +++ b/crates/ra_ssr/src/lib.rs | |||
@@ -51,8 +51,7 @@ pub struct MatchFinder<'db> { | |||
51 | /// Our source of information about the user's code. | 51 | /// Our source of information about the user's code. |
52 | sema: Semantics<'db, ra_ide_db::RootDatabase>, | 52 | sema: Semantics<'db, ra_ide_db::RootDatabase>, |
53 | rules: Vec<ResolvedRule>, | 53 | rules: Vec<ResolvedRule>, |
54 | scope: hir::SemanticsScope<'db>, | 54 | resolution_scope: resolving::ResolutionScope<'db>, |
55 | hygiene: hir::Hygiene, | ||
56 | } | 55 | } |
57 | 56 | ||
58 | impl<'db> MatchFinder<'db> { | 57 | impl<'db> MatchFinder<'db> { |
@@ -63,21 +62,8 @@ impl<'db> MatchFinder<'db> { | |||
63 | lookup_context: FilePosition, | 62 | lookup_context: FilePosition, |
64 | ) -> MatchFinder<'db> { | 63 | ) -> MatchFinder<'db> { |
65 | let sema = Semantics::new(db); | 64 | let sema = Semantics::new(db); |
66 | let file = sema.parse(lookup_context.file_id); | 65 | let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context); |
67 | // Find a node at the requested position, falling back to the whole file. | 66 | MatchFinder { sema: Semantics::new(db), rules: Vec::new(), resolution_scope } |
68 | let node = file | ||
69 | .syntax() | ||
70 | .token_at_offset(lookup_context.offset) | ||
71 | .left_biased() | ||
72 | .map(|token| token.parent()) | ||
73 | .unwrap_or_else(|| file.syntax().clone()); | ||
74 | let scope = sema.scope(&node); | ||
75 | MatchFinder { | ||
76 | sema: Semantics::new(db), | ||
77 | rules: Vec::new(), | ||
78 | scope, | ||
79 | hygiene: hir::Hygiene::new(db, lookup_context.file_id.into()), | ||
80 | } | ||
81 | } | 67 | } |
82 | 68 | ||
83 | /// Constructs an instance using the start of the first file in `db` as the lookup context. | 69 | /// Constructs an instance using the start of the first file in `db` as the lookup context. |
@@ -106,8 +92,7 @@ impl<'db> MatchFinder<'db> { | |||
106 | for parsed_rule in rule.parsed_rules { | 92 | for parsed_rule in rule.parsed_rules { |
107 | self.rules.push(ResolvedRule::new( | 93 | self.rules.push(ResolvedRule::new( |
108 | parsed_rule, | 94 | parsed_rule, |
109 | &self.scope, | 95 | &self.resolution_scope, |
110 | &self.hygiene, | ||
111 | self.rules.len(), | 96 | self.rules.len(), |
112 | )?); | 97 | )?); |
113 | } | 98 | } |
@@ -140,8 +125,7 @@ impl<'db> MatchFinder<'db> { | |||
140 | for parsed_rule in pattern.parsed_rules { | 125 | for parsed_rule in pattern.parsed_rules { |
141 | self.rules.push(ResolvedRule::new( | 126 | self.rules.push(ResolvedRule::new( |
142 | parsed_rule, | 127 | parsed_rule, |
143 | &self.scope, | 128 | &self.resolution_scope, |
144 | &self.hygiene, | ||
145 | self.rules.len(), | 129 | self.rules.len(), |
146 | )?); | 130 | )?); |
147 | } | 131 | } |
diff --git a/crates/ra_ssr/src/resolving.rs b/crates/ra_ssr/src/resolving.rs index 75f556785..d53981737 100644 --- a/crates/ra_ssr/src/resolving.rs +++ b/crates/ra_ssr/src/resolving.rs | |||
@@ -3,10 +3,16 @@ | |||
3 | use crate::errors::error; | 3 | use crate::errors::error; |
4 | use crate::{parsing, SsrError}; | 4 | use crate::{parsing, SsrError}; |
5 | use parsing::Placeholder; | 5 | use parsing::Placeholder; |
6 | use ra_db::FilePosition; | ||
6 | use ra_syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken}; | 7 | use ra_syntax::{ast, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken}; |
7 | use rustc_hash::{FxHashMap, FxHashSet}; | 8 | use rustc_hash::{FxHashMap, FxHashSet}; |
8 | use test_utils::mark; | 9 | use test_utils::mark; |
9 | 10 | ||
11 | pub(crate) struct ResolutionScope<'db> { | ||
12 | scope: hir::SemanticsScope<'db>, | ||
13 | hygiene: hir::Hygiene, | ||
14 | } | ||
15 | |||
10 | pub(crate) struct ResolvedRule { | 16 | pub(crate) struct ResolvedRule { |
11 | pub(crate) pattern: ResolvedPattern, | 17 | pub(crate) pattern: ResolvedPattern, |
12 | pub(crate) template: Option<ResolvedPattern>, | 18 | pub(crate) template: Option<ResolvedPattern>, |
@@ -30,12 +36,11 @@ pub(crate) struct ResolvedPath { | |||
30 | impl ResolvedRule { | 36 | impl ResolvedRule { |
31 | pub(crate) fn new( | 37 | pub(crate) fn new( |
32 | rule: parsing::ParsedRule, | 38 | rule: parsing::ParsedRule, |
33 | scope: &hir::SemanticsScope, | 39 | resolution_scope: &ResolutionScope, |
34 | hygiene: &hir::Hygiene, | ||
35 | index: usize, | 40 | index: usize, |
36 | ) -> Result<ResolvedRule, SsrError> { | 41 | ) -> Result<ResolvedRule, SsrError> { |
37 | let resolver = | 42 | let resolver = |
38 | Resolver { scope, hygiene, placeholders_by_stand_in: rule.placeholders_by_stand_in }; | 43 | Resolver { resolution_scope, placeholders_by_stand_in: rule.placeholders_by_stand_in }; |
39 | let resolved_template = if let Some(template) = rule.template { | 44 | let resolved_template = if let Some(template) = rule.template { |
40 | Some(resolver.resolve_pattern_tree(template)?) | 45 | Some(resolver.resolve_pattern_tree(template)?) |
41 | } else { | 46 | } else { |
@@ -57,8 +62,7 @@ impl ResolvedRule { | |||
57 | } | 62 | } |
58 | 63 | ||
59 | struct Resolver<'a, 'db> { | 64 | struct Resolver<'a, 'db> { |
60 | scope: &'a hir::SemanticsScope<'db>, | 65 | resolution_scope: &'a ResolutionScope<'db>, |
61 | hygiene: &'a hir::Hygiene, | ||
62 | placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>, | 66 | placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>, |
63 | } | 67 | } |
64 | 68 | ||
@@ -104,6 +108,7 @@ impl Resolver<'_, '_> { | |||
104 | && !self.path_contains_placeholder(&path) | 108 | && !self.path_contains_placeholder(&path) |
105 | { | 109 | { |
106 | let resolution = self | 110 | let resolution = self |
111 | .resolution_scope | ||
107 | .resolve_path(&path) | 112 | .resolve_path(&path) |
108 | .ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?; | 113 | .ok_or_else(|| error!("Failed to resolve path `{}`", node.text()))?; |
109 | resolved_paths.insert(node, ResolvedPath { resolution, depth }); | 114 | resolved_paths.insert(node, ResolvedPath { resolution, depth }); |
@@ -131,9 +136,31 @@ impl Resolver<'_, '_> { | |||
131 | } | 136 | } |
132 | false | 137 | false |
133 | } | 138 | } |
139 | } | ||
140 | |||
141 | impl<'db> ResolutionScope<'db> { | ||
142 | pub(crate) fn new( | ||
143 | sema: &hir::Semantics<'db, ra_ide_db::RootDatabase>, | ||
144 | lookup_context: FilePosition, | ||
145 | ) -> ResolutionScope<'db> { | ||
146 | use ra_syntax::ast::AstNode; | ||
147 | let file = sema.parse(lookup_context.file_id); | ||
148 | // Find a node at the requested position, falling back to the whole file. | ||
149 | let node = file | ||
150 | .syntax() | ||
151 | .token_at_offset(lookup_context.offset) | ||
152 | .left_biased() | ||
153 | .map(|token| token.parent()) | ||
154 | .unwrap_or_else(|| file.syntax().clone()); | ||
155 | let scope = sema.scope(&node); | ||
156 | ResolutionScope { | ||
157 | scope, | ||
158 | hygiene: hir::Hygiene::new(sema.db, lookup_context.file_id.into()), | ||
159 | } | ||
160 | } | ||
134 | 161 | ||
135 | fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> { | 162 | fn resolve_path(&self, path: &ast::Path) -> Option<hir::PathResolution> { |
136 | let hir_path = hir::Path::from_src(path.clone(), self.hygiene)?; | 163 | let hir_path = hir::Path::from_src(path.clone(), &self.hygiene)?; |
137 | // First try resolving the whole path. This will work for things like | 164 | // First try resolving the whole path. This will work for things like |
138 | // `std::collections::HashMap`, but will fail for things like | 165 | // `std::collections::HashMap`, but will fail for things like |
139 | // `std::collections::HashMap::new`. | 166 | // `std::collections::HashMap::new`. |