diff options
112 files changed, 4407 insertions, 3938 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ed9191c49..7f4e75341 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml | |||
@@ -116,9 +116,9 @@ jobs: | |||
116 | - run: npm ci | 116 | - run: npm ci |
117 | working-directory: ./editors/code | 117 | working-directory: ./editors/code |
118 | 118 | ||
119 | - run: npm audit || { sleep 10 && npm audit; } || { sleep 30 && npm audit; } | 119 | # - run: npm audit || { sleep 10 && npm audit; } || { sleep 30 && npm audit; } |
120 | if: runner.os == 'Linux' | 120 | # if: runner.os == 'Linux' |
121 | working-directory: ./editors/code | 121 | # working-directory: ./editors/code |
122 | 122 | ||
123 | - run: npm run lint | 123 | - run: npm run lint |
124 | working-directory: ./editors/code | 124 | working-directory: ./editors/code |
diff --git a/Cargo.lock b/Cargo.lock index 9e0d0714a..61ae8157a 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -52,17 +52,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" | |||
52 | checksum = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8" | 52 | checksum = "cff77d8686867eceff3105329d4698d96c2391c176d5d03adc90c7389162b5b8" |
53 | 53 | ||
54 | [[package]] | 54 | [[package]] |
55 | name = "atty" | ||
56 | version = "0.2.14" | ||
57 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
58 | checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" | ||
59 | dependencies = [ | ||
60 | "hermit-abi", | ||
61 | "libc", | ||
62 | "winapi 0.3.9", | ||
63 | ] | ||
64 | |||
65 | [[package]] | ||
66 | name = "autocfg" | 55 | name = "autocfg" |
67 | version = "1.0.0" | 56 | version = "1.0.0" |
68 | source = "registry+https://github.com/rust-lang/crates.io-index" | 57 | source = "registry+https://github.com/rust-lang/crates.io-index" |
@@ -196,18 +185,6 @@ dependencies = [ | |||
196 | ] | 185 | ] |
197 | 186 | ||
198 | [[package]] | 187 | [[package]] |
199 | name = "clicolors-control" | ||
200 | version = "1.0.1" | ||
201 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
202 | checksum = "90082ee5dcdd64dc4e9e0d37fbf3ee325419e39c0092191e0393df65518f741e" | ||
203 | dependencies = [ | ||
204 | "atty", | ||
205 | "lazy_static", | ||
206 | "libc", | ||
207 | "winapi 0.3.9", | ||
208 | ] | ||
209 | |||
210 | [[package]] | ||
211 | name = "cloudabi" | 188 | name = "cloudabi" |
212 | version = "0.1.0" | 189 | version = "0.1.0" |
213 | source = "registry+https://github.com/rust-lang/crates.io-index" | 190 | source = "registry+https://github.com/rust-lang/crates.io-index" |
@@ -218,11 +195,10 @@ dependencies = [ | |||
218 | 195 | ||
219 | [[package]] | 196 | [[package]] |
220 | name = "console" | 197 | name = "console" |
221 | version = "0.10.3" | 198 | version = "0.11.3" |
222 | source = "registry+https://github.com/rust-lang/crates.io-index" | 199 | source = "registry+https://github.com/rust-lang/crates.io-index" |
223 | checksum = "2586208b33573b7f76ccfbe5adb076394c88deaf81b84d7213969805b0a952a7" | 200 | checksum = "8c0994e656bba7b922d8dd1245db90672ffb701e684e45be58f20719d69abc5a" |
224 | dependencies = [ | 201 | dependencies = [ |
225 | "clicolors-control", | ||
226 | "encode_unicode", | 202 | "encode_unicode", |
227 | "lazy_static", | 203 | "lazy_static", |
228 | "libc", | 204 | "libc", |
@@ -352,6 +328,15 @@ dependencies = [ | |||
352 | ] | 328 | ] |
353 | 329 | ||
354 | [[package]] | 330 | [[package]] |
331 | name = "expect" | ||
332 | version = "0.1.0" | ||
333 | dependencies = [ | ||
334 | "difference", | ||
335 | "once_cell", | ||
336 | "stdx", | ||
337 | ] | ||
338 | |||
339 | [[package]] | ||
355 | name = "filetime" | 340 | name = "filetime" |
356 | version = "0.2.10" | 341 | version = "0.2.10" |
357 | source = "registry+https://github.com/rust-lang/crates.io-index" | 342 | source = "registry+https://github.com/rust-lang/crates.io-index" |
@@ -544,9 +529,9 @@ dependencies = [ | |||
544 | 529 | ||
545 | [[package]] | 530 | [[package]] |
546 | name = "insta" | 531 | name = "insta" |
547 | version = "0.16.0" | 532 | version = "0.16.1" |
548 | source = "registry+https://github.com/rust-lang/crates.io-index" | 533 | source = "registry+https://github.com/rust-lang/crates.io-index" |
549 | checksum = "8386e795fb3927131ea4cede203c529a333652eb6dc4ff29616b832b27e9b096" | 534 | checksum = "617e921abc813f96a3b00958c079e7bf1e2db998f8a04f1546dd967373a418ee" |
550 | dependencies = [ | 535 | dependencies = [ |
551 | "console", | 536 | "console", |
552 | "difference", | 537 | "difference", |
@@ -1134,6 +1119,7 @@ name = "ra_ide" | |||
1134 | version = "0.1.0" | 1119 | version = "0.1.0" |
1135 | dependencies = [ | 1120 | dependencies = [ |
1136 | "either", | 1121 | "either", |
1122 | "expect", | ||
1137 | "indexmap", | 1123 | "indexmap", |
1138 | "insta", | 1124 | "insta", |
1139 | "itertools", | 1125 | "itertools", |
@@ -1262,6 +1248,7 @@ dependencies = [ | |||
1262 | "ra_syntax", | 1248 | "ra_syntax", |
1263 | "ra_text_edit", | 1249 | "ra_text_edit", |
1264 | "rustc-hash", | 1250 | "rustc-hash", |
1251 | "test_utils", | ||
1265 | ] | 1252 | ] |
1266 | 1253 | ||
1267 | [[package]] | 1254 | [[package]] |
@@ -1464,15 +1451,18 @@ dependencies = [ | |||
1464 | "ra_hir_def", | 1451 | "ra_hir_def", |
1465 | "ra_hir_ty", | 1452 | "ra_hir_ty", |
1466 | "ra_ide", | 1453 | "ra_ide", |
1454 | "ra_ide_db", | ||
1467 | "ra_mbe", | 1455 | "ra_mbe", |
1468 | "ra_proc_macro_srv", | 1456 | "ra_proc_macro_srv", |
1469 | "ra_prof", | 1457 | "ra_prof", |
1470 | "ra_project_model", | 1458 | "ra_project_model", |
1459 | "ra_ssr", | ||
1471 | "ra_syntax", | 1460 | "ra_syntax", |
1472 | "ra_text_edit", | 1461 | "ra_text_edit", |
1473 | "ra_toolchain", | 1462 | "ra_toolchain", |
1474 | "ra_tt", | 1463 | "ra_tt", |
1475 | "rand", | 1464 | "rand", |
1465 | "rayon", | ||
1476 | "rustc-hash", | 1466 | "rustc-hash", |
1477 | "serde", | 1467 | "serde", |
1478 | "serde_json", | 1468 | "serde_json", |
diff --git a/Cargo.toml b/Cargo.toml index 5278b5a16..189f4ea2b 100644 --- a/Cargo.toml +++ b/Cargo.toml | |||
@@ -24,6 +24,8 @@ opt-level = 0 | |||
24 | opt-level = 0 | 24 | opt-level = 0 |
25 | [profile.release.package.salsa-macros] | 25 | [profile.release.package.salsa-macros] |
26 | opt-level = 0 | 26 | opt-level = 0 |
27 | [profile.release.package.tracing-attributes] | ||
28 | opt-level = 0 | ||
27 | [profile.release.package.xtask] | 29 | [profile.release.package.xtask] |
28 | opt-level = 0 | 30 | opt-level = 0 |
29 | 31 | ||
diff --git a/crates/expect/Cargo.toml b/crates/expect/Cargo.toml new file mode 100644 index 000000000..caee43106 --- /dev/null +++ b/crates/expect/Cargo.toml | |||
@@ -0,0 +1,10 @@ | |||
1 | [package] | ||
2 | name = "expect" | ||
3 | version = "0.1.0" | ||
4 | authors = ["rust-analyzer developers"] | ||
5 | edition = "2018" | ||
6 | |||
7 | [dependencies] | ||
8 | once_cell = "1" | ||
9 | difference = "2" | ||
10 | stdx = { path = "../stdx" } | ||
diff --git a/crates/expect/src/lib.rs b/crates/expect/src/lib.rs new file mode 100644 index 000000000..a5e26fade --- /dev/null +++ b/crates/expect/src/lib.rs | |||
@@ -0,0 +1,348 @@ | |||
1 | //! Snapshot testing library, see | ||
2 | //! https://github.com/rust-analyzer/rust-analyzer/pull/5101 | ||
3 | use std::{ | ||
4 | collections::HashMap, | ||
5 | env, fmt, fs, mem, | ||
6 | ops::Range, | ||
7 | panic, | ||
8 | path::{Path, PathBuf}, | ||
9 | sync::Mutex, | ||
10 | }; | ||
11 | |||
12 | use difference::Changeset; | ||
13 | use once_cell::sync::Lazy; | ||
14 | use stdx::{lines_with_ends, trim_indent}; | ||
15 | |||
16 | const HELP: &str = " | ||
17 | You can update all `expect![[]]` tests by running: | ||
18 | |||
19 | env UPDATE_EXPECT=1 cargo test | ||
20 | |||
21 | To update a single test, place the cursor on `expect` token and use `run` feature of rust-analyzer. | ||
22 | "; | ||
23 | |||
24 | fn update_expect() -> bool { | ||
25 | env::var("UPDATE_EXPECT").is_ok() | ||
26 | } | ||
27 | |||
28 | /// expect![[r#"inline snapshot"#]] | ||
29 | #[macro_export] | ||
30 | macro_rules! expect { | ||
31 | [[$data:literal]] => {$crate::Expect { | ||
32 | position: $crate::Position { | ||
33 | file: file!(), | ||
34 | line: line!(), | ||
35 | column: column!(), | ||
36 | }, | ||
37 | data: $data, | ||
38 | }}; | ||
39 | [[]] => { $crate::expect![[""]] }; | ||
40 | } | ||
41 | |||
42 | /// expect_file!["/crates/foo/test_data/bar.html"] | ||
43 | #[macro_export] | ||
44 | macro_rules! expect_file { | ||
45 | [$path:literal] => {$crate::ExpectFile { path: $path }}; | ||
46 | } | ||
47 | |||
48 | #[derive(Debug)] | ||
49 | pub struct Expect { | ||
50 | pub position: Position, | ||
51 | pub data: &'static str, | ||
52 | } | ||
53 | |||
54 | #[derive(Debug)] | ||
55 | pub struct ExpectFile { | ||
56 | pub path: &'static str, | ||
57 | } | ||
58 | |||
59 | #[derive(Debug)] | ||
60 | pub struct Position { | ||
61 | pub file: &'static str, | ||
62 | pub line: u32, | ||
63 | pub column: u32, | ||
64 | } | ||
65 | |||
66 | impl fmt::Display for Position { | ||
67 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
68 | write!(f, "{}:{}:{}", self.file, self.line, self.column) | ||
69 | } | ||
70 | } | ||
71 | |||
72 | impl Expect { | ||
73 | pub fn assert_eq(&self, actual: &str) { | ||
74 | let trimmed = self.trimmed(); | ||
75 | if &trimmed == actual { | ||
76 | return; | ||
77 | } | ||
78 | Runtime::fail_expect(self, &trimmed, actual); | ||
79 | } | ||
80 | pub fn assert_debug_eq(&self, actual: &impl fmt::Debug) { | ||
81 | let actual = format!("{:#?}\n", actual); | ||
82 | self.assert_eq(&actual) | ||
83 | } | ||
84 | |||
85 | fn trimmed(&self) -> String { | ||
86 | if !self.data.contains('\n') { | ||
87 | return self.data.to_string(); | ||
88 | } | ||
89 | trim_indent(self.data) | ||
90 | } | ||
91 | |||
92 | fn locate(&self, file: &str) -> Location { | ||
93 | let mut target_line = None; | ||
94 | let mut line_start = 0; | ||
95 | for (i, line) in lines_with_ends(file).enumerate() { | ||
96 | if i == self.position.line as usize - 1 { | ||
97 | let pat = "expect![["; | ||
98 | let offset = line.find(pat).unwrap(); | ||
99 | let literal_start = line_start + offset + pat.len(); | ||
100 | let indent = line.chars().take_while(|&it| it == ' ').count(); | ||
101 | target_line = Some((literal_start, indent)); | ||
102 | break; | ||
103 | } | ||
104 | line_start += line.len(); | ||
105 | } | ||
106 | let (literal_start, line_indent) = target_line.unwrap(); | ||
107 | let literal_length = | ||
108 | file[literal_start..].find("]]").expect("Couldn't find matching `]]` for `expect![[`."); | ||
109 | let literal_range = literal_start..literal_start + literal_length; | ||
110 | Location { line_indent, literal_range } | ||
111 | } | ||
112 | } | ||
113 | |||
114 | impl ExpectFile { | ||
115 | pub fn assert_eq(&self, actual: &str) { | ||
116 | let expected = self.read(); | ||
117 | if actual == expected { | ||
118 | return; | ||
119 | } | ||
120 | Runtime::fail_file(self, &expected, actual); | ||
121 | } | ||
122 | fn read(&self) -> String { | ||
123 | fs::read_to_string(self.abs_path()).unwrap_or_default().replace("\r\n", "\n") | ||
124 | } | ||
125 | fn write(&self, contents: &str) { | ||
126 | fs::write(self.abs_path(), contents).unwrap() | ||
127 | } | ||
128 | fn abs_path(&self) -> PathBuf { | ||
129 | workspace_root().join(self.path) | ||
130 | } | ||
131 | } | ||
132 | |||
133 | #[derive(Default)] | ||
134 | struct Runtime { | ||
135 | help_printed: bool, | ||
136 | per_file: HashMap<&'static str, FileRuntime>, | ||
137 | } | ||
138 | static RT: Lazy<Mutex<Runtime>> = Lazy::new(Default::default); | ||
139 | |||
140 | impl Runtime { | ||
141 | fn fail_expect(expect: &Expect, expected: &str, actual: &str) { | ||
142 | let mut rt = RT.lock().unwrap_or_else(|poisoned| poisoned.into_inner()); | ||
143 | if update_expect() { | ||
144 | println!("\x1b[1m\x1b[92mupdating\x1b[0m: {}", expect.position); | ||
145 | rt.per_file | ||
146 | .entry(expect.position.file) | ||
147 | .or_insert_with(|| FileRuntime::new(expect)) | ||
148 | .update(expect, actual); | ||
149 | return; | ||
150 | } | ||
151 | rt.panic(expect.position.to_string(), expected, actual); | ||
152 | } | ||
153 | |||
154 | fn fail_file(expect: &ExpectFile, expected: &str, actual: &str) { | ||
155 | let mut rt = RT.lock().unwrap_or_else(|poisoned| poisoned.into_inner()); | ||
156 | if update_expect() { | ||
157 | println!("\x1b[1m\x1b[92mupdating\x1b[0m: {}", expect.path); | ||
158 | expect.write(actual); | ||
159 | return; | ||
160 | } | ||
161 | rt.panic(expect.path.to_string(), expected, actual); | ||
162 | } | ||
163 | |||
164 | fn panic(&mut self, position: String, expected: &str, actual: &str) { | ||
165 | let print_help = !mem::replace(&mut self.help_printed, true); | ||
166 | let help = if print_help { HELP } else { "" }; | ||
167 | |||
168 | let diff = Changeset::new(actual, expected, "\n"); | ||
169 | |||
170 | println!( | ||
171 | "\n | ||
172 | \x1b[1m\x1b[91merror\x1b[97m: expect test failed\x1b[0m | ||
173 | \x1b[1m\x1b[34m-->\x1b[0m {} | ||
174 | {} | ||
175 | \x1b[1mExpect\x1b[0m: | ||
176 | ---- | ||
177 | {} | ||
178 | ---- | ||
179 | |||
180 | \x1b[1mActual\x1b[0m: | ||
181 | ---- | ||
182 | {} | ||
183 | ---- | ||
184 | |||
185 | \x1b[1mDiff\x1b[0m: | ||
186 | ---- | ||
187 | {} | ||
188 | ---- | ||
189 | ", | ||
190 | position, help, expected, actual, diff | ||
191 | ); | ||
192 | // Use resume_unwind instead of panic!() to prevent a backtrace, which is unnecessary noise. | ||
193 | panic::resume_unwind(Box::new(())); | ||
194 | } | ||
195 | } | ||
196 | |||
197 | struct FileRuntime { | ||
198 | path: PathBuf, | ||
199 | original_text: String, | ||
200 | patchwork: Patchwork, | ||
201 | } | ||
202 | |||
203 | impl FileRuntime { | ||
204 | fn new(expect: &Expect) -> FileRuntime { | ||
205 | let path = workspace_root().join(expect.position.file); | ||
206 | let original_text = fs::read_to_string(&path).unwrap(); | ||
207 | let patchwork = Patchwork::new(original_text.clone()); | ||
208 | FileRuntime { path, original_text, patchwork } | ||
209 | } | ||
210 | fn update(&mut self, expect: &Expect, actual: &str) { | ||
211 | let loc = expect.locate(&self.original_text); | ||
212 | let patch = format_patch(loc.line_indent.clone(), actual); | ||
213 | self.patchwork.patch(loc.literal_range, &patch); | ||
214 | fs::write(&self.path, &self.patchwork.text).unwrap() | ||
215 | } | ||
216 | } | ||
217 | |||
218 | #[derive(Debug)] | ||
219 | struct Location { | ||
220 | line_indent: usize, | ||
221 | literal_range: Range<usize>, | ||
222 | } | ||
223 | |||
224 | #[derive(Debug)] | ||
225 | struct Patchwork { | ||
226 | text: String, | ||
227 | indels: Vec<(Range<usize>, usize)>, | ||
228 | } | ||
229 | |||
230 | impl Patchwork { | ||
231 | fn new(text: String) -> Patchwork { | ||
232 | Patchwork { text, indels: Vec::new() } | ||
233 | } | ||
234 | fn patch(&mut self, mut range: Range<usize>, patch: &str) { | ||
235 | self.indels.push((range.clone(), patch.len())); | ||
236 | self.indels.sort_by_key(|(delete, _insert)| delete.start); | ||
237 | |||
238 | let (delete, insert) = self | ||
239 | .indels | ||
240 | .iter() | ||
241 | .take_while(|(delete, _)| delete.start < range.start) | ||
242 | .map(|(delete, insert)| (delete.end - delete.start, insert)) | ||
243 | .fold((0usize, 0usize), |(x1, y1), (x2, y2)| (x1 + x2, y1 + y2)); | ||
244 | |||
245 | for pos in &mut [&mut range.start, &mut range.end] { | ||
246 | **pos -= delete; | ||
247 | **pos += insert; | ||
248 | } | ||
249 | |||
250 | self.text.replace_range(range, &patch); | ||
251 | } | ||
252 | } | ||
253 | |||
254 | fn format_patch(line_indent: usize, patch: &str) -> String { | ||
255 | let mut max_hashes = 0; | ||
256 | let mut cur_hashes = 0; | ||
257 | for byte in patch.bytes() { | ||
258 | if byte != b'#' { | ||
259 | cur_hashes = 0; | ||
260 | continue; | ||
261 | } | ||
262 | cur_hashes += 1; | ||
263 | max_hashes = max_hashes.max(cur_hashes); | ||
264 | } | ||
265 | let hashes = &"#".repeat(max_hashes + 1); | ||
266 | let indent = &" ".repeat(line_indent); | ||
267 | let is_multiline = patch.contains('\n'); | ||
268 | |||
269 | let mut buf = String::new(); | ||
270 | buf.push('r'); | ||
271 | buf.push_str(hashes); | ||
272 | buf.push('"'); | ||
273 | if is_multiline { | ||
274 | buf.push('\n'); | ||
275 | } | ||
276 | let mut final_newline = false; | ||
277 | for line in lines_with_ends(patch) { | ||
278 | if is_multiline { | ||
279 | buf.push_str(indent); | ||
280 | buf.push_str(" "); | ||
281 | } | ||
282 | buf.push_str(line); | ||
283 | final_newline = line.ends_with('\n'); | ||
284 | } | ||
285 | if final_newline { | ||
286 | buf.push_str(indent); | ||
287 | } | ||
288 | buf.push('"'); | ||
289 | buf.push_str(hashes); | ||
290 | buf | ||
291 | } | ||
292 | |||
293 | fn workspace_root() -> PathBuf { | ||
294 | Path::new( | ||
295 | &env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()), | ||
296 | ) | ||
297 | .ancestors() | ||
298 | .nth(2) | ||
299 | .unwrap() | ||
300 | .to_path_buf() | ||
301 | } | ||
302 | |||
303 | #[cfg(test)] | ||
304 | mod tests { | ||
305 | use super::*; | ||
306 | |||
307 | #[test] | ||
308 | fn test_format_patch() { | ||
309 | let patch = format_patch(0, "hello\nworld\n"); | ||
310 | expect![[r##" | ||
311 | r#" | ||
312 | hello | ||
313 | world | ||
314 | "#"##]] | ||
315 | .assert_eq(&patch); | ||
316 | |||
317 | let patch = format_patch(4, "single line"); | ||
318 | expect![[r##"r#"single line"#"##]].assert_eq(&patch); | ||
319 | } | ||
320 | |||
321 | #[test] | ||
322 | fn test_patchwork() { | ||
323 | let mut patchwork = Patchwork::new("one two three".to_string()); | ||
324 | patchwork.patch(4..7, "zwei"); | ||
325 | patchwork.patch(0..3, "один"); | ||
326 | patchwork.patch(8..13, "3"); | ||
327 | expect![[r#" | ||
328 | Patchwork { | ||
329 | text: "один zwei 3", | ||
330 | indels: [ | ||
331 | ( | ||
332 | 0..3, | ||
333 | 8, | ||
334 | ), | ||
335 | ( | ||
336 | 4..7, | ||
337 | 4, | ||
338 | ), | ||
339 | ( | ||
340 | 8..13, | ||
341 | 1, | ||
342 | ), | ||
343 | ], | ||
344 | } | ||
345 | "#]] | ||
346 | .assert_debug_eq(&patchwork); | ||
347 | } | ||
348 | } | ||
diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index 1023d3040..844b093d4 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs | |||
@@ -132,6 +132,7 @@ impl FlycheckActor { | |||
132 | self.cancel_check_process(); | 132 | self.cancel_check_process(); |
133 | 133 | ||
134 | let mut command = self.check_command(); | 134 | let mut command = self.check_command(); |
135 | log::info!("restart flycheck {:?}", command); | ||
135 | command.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null()); | 136 | command.stdout(Stdio::piped()).stderr(Stdio::null()).stdin(Stdio::null()); |
136 | if let Ok(child) = command.spawn().map(JodChild) { | 137 | if let Ok(child) = command.spawn().map(JodChild) { |
137 | self.cargo_handle = Some(CargoHandle::spawn(child)); | 138 | self.cargo_handle = Some(CargoHandle::spawn(child)); |
diff --git a/crates/ra_assists/src/assist_context.rs b/crates/ra_assists/src/assist_context.rs index ee614de72..3640bb4d2 100644 --- a/crates/ra_assists/src/assist_context.rs +++ b/crates/ra_assists/src/assist_context.rs | |||
@@ -55,7 +55,6 @@ use crate::{ | |||
55 | pub(crate) struct AssistContext<'a> { | 55 | pub(crate) struct AssistContext<'a> { |
56 | pub(crate) config: &'a AssistConfig, | 56 | pub(crate) config: &'a AssistConfig, |
57 | pub(crate) sema: Semantics<'a, RootDatabase>, | 57 | pub(crate) sema: Semantics<'a, RootDatabase>, |
58 | pub(crate) db: &'a RootDatabase, | ||
59 | pub(crate) frange: FileRange, | 58 | pub(crate) frange: FileRange, |
60 | source_file: SourceFile, | 59 | source_file: SourceFile, |
61 | } | 60 | } |
@@ -67,8 +66,11 @@ impl<'a> AssistContext<'a> { | |||
67 | frange: FileRange, | 66 | frange: FileRange, |
68 | ) -> AssistContext<'a> { | 67 | ) -> AssistContext<'a> { |
69 | let source_file = sema.parse(frange.file_id); | 68 | let source_file = sema.parse(frange.file_id); |
70 | let db = sema.db; | 69 | AssistContext { config, sema, frange, source_file } |
71 | AssistContext { config, sema, db, frange, source_file } | 70 | } |
71 | |||
72 | pub(crate) fn db(&self) -> &RootDatabase { | ||
73 | self.sema.db | ||
72 | } | 74 | } |
73 | 75 | ||
74 | // NB, this ignores active selection. | 76 | // NB, this ignores active selection. |
diff --git a/crates/ra_assists/src/ast_transform.rs b/crates/ra_assists/src/ast_transform.rs index 00fa95b6c..01adb834c 100644 --- a/crates/ra_assists/src/ast_transform.rs +++ b/crates/ra_assists/src/ast_transform.rs | |||
@@ -2,7 +2,6 @@ | |||
2 | use rustc_hash::FxHashMap; | 2 | use rustc_hash::FxHashMap; |
3 | 3 | ||
4 | use hir::{HirDisplay, PathResolution, SemanticsScope}; | 4 | use hir::{HirDisplay, PathResolution, SemanticsScope}; |
5 | use ra_ide_db::RootDatabase; | ||
6 | use ra_syntax::{ | 5 | use ra_syntax::{ |
7 | algo::SyntaxRewriter, | 6 | algo::SyntaxRewriter, |
8 | ast::{self, AstNode}, | 7 | ast::{self, AstNode}, |
@@ -32,14 +31,14 @@ impl<'a> AstTransform<'a> for NullTransformer { | |||
32 | } | 31 | } |
33 | 32 | ||
34 | pub struct SubstituteTypeParams<'a> { | 33 | pub struct SubstituteTypeParams<'a> { |
35 | source_scope: &'a SemanticsScope<'a, RootDatabase>, | 34 | source_scope: &'a SemanticsScope<'a>, |
36 | substs: FxHashMap<hir::TypeParam, ast::TypeRef>, | 35 | substs: FxHashMap<hir::TypeParam, ast::TypeRef>, |
37 | previous: Box<dyn AstTransform<'a> + 'a>, | 36 | previous: Box<dyn AstTransform<'a> + 'a>, |
38 | } | 37 | } |
39 | 38 | ||
40 | impl<'a> SubstituteTypeParams<'a> { | 39 | impl<'a> SubstituteTypeParams<'a> { |
41 | pub fn for_trait_impl( | 40 | pub fn for_trait_impl( |
42 | source_scope: &'a SemanticsScope<'a, RootDatabase>, | 41 | source_scope: &'a SemanticsScope<'a>, |
43 | // FIXME: there's implicit invariant that `trait_` and `source_scope` match... | 42 | // FIXME: there's implicit invariant that `trait_` and `source_scope` match... |
44 | trait_: hir::Trait, | 43 | trait_: hir::Trait, |
45 | impl_def: ast::ImplDef, | 44 | impl_def: ast::ImplDef, |
@@ -126,16 +125,13 @@ impl<'a> AstTransform<'a> for SubstituteTypeParams<'a> { | |||
126 | } | 125 | } |
127 | 126 | ||
128 | pub struct QualifyPaths<'a> { | 127 | pub struct QualifyPaths<'a> { |
129 | target_scope: &'a SemanticsScope<'a, RootDatabase>, | 128 | target_scope: &'a SemanticsScope<'a>, |
130 | source_scope: &'a SemanticsScope<'a, RootDatabase>, | 129 | source_scope: &'a SemanticsScope<'a>, |
131 | previous: Box<dyn AstTransform<'a> + 'a>, | 130 | previous: Box<dyn AstTransform<'a> + 'a>, |
132 | } | 131 | } |
133 | 132 | ||
134 | impl<'a> QualifyPaths<'a> { | 133 | impl<'a> QualifyPaths<'a> { |
135 | pub fn new( | 134 | pub fn new(target_scope: &'a SemanticsScope<'a>, source_scope: &'a SemanticsScope<'a>) -> Self { |
136 | target_scope: &'a SemanticsScope<'a, RootDatabase>, | ||
137 | source_scope: &'a SemanticsScope<'a, RootDatabase>, | ||
138 | ) -> Self { | ||
139 | Self { target_scope, source_scope, previous: Box::new(NullTransformer) } | 135 | Self { target_scope, source_scope, previous: Box::new(NullTransformer) } |
140 | } | 136 | } |
141 | 137 | ||
@@ -156,7 +152,7 @@ impl<'a> QualifyPaths<'a> { | |||
156 | let resolution = self.source_scope.resolve_hir_path(&hir_path?)?; | 152 | let resolution = self.source_scope.resolve_hir_path(&hir_path?)?; |
157 | match resolution { | 153 | match resolution { |
158 | PathResolution::Def(def) => { | 154 | PathResolution::Def(def) => { |
159 | let found_path = from.find_use_path(self.source_scope.db, def)?; | 155 | let found_path = from.find_use_path(self.source_scope.db.upcast(), def)?; |
160 | let mut path = path_to_ast(found_path); | 156 | let mut path = path_to_ast(found_path); |
161 | 157 | ||
162 | let type_args = p | 158 | let type_args = p |
diff --git a/crates/ra_assists/src/handlers/add_explicit_type.rs b/crates/ra_assists/src/handlers/add_explicit_type.rs index 90b06a625..11df922a2 100644 --- a/crates/ra_assists/src/handlers/add_explicit_type.rs +++ b/crates/ra_assists/src/handlers/add_explicit_type.rs | |||
@@ -57,7 +57,7 @@ pub(crate) fn add_explicit_type(acc: &mut Assists, ctx: &AssistContext) -> Optio | |||
57 | return None; | 57 | return None; |
58 | } | 58 | } |
59 | 59 | ||
60 | let inferred_type = ty.display_source_code(ctx.db, module.into()).ok()?; | 60 | let inferred_type = ty.display_source_code(ctx.db(), module.into()).ok()?; |
61 | acc.add( | 61 | acc.add( |
62 | AssistId("add_explicit_type"), | 62 | AssistId("add_explicit_type"), |
63 | format!("Insert explicit type `{}`", inferred_type), | 63 | format!("Insert explicit type `{}`", inferred_type), |
diff --git a/crates/ra_assists/src/handlers/add_function.rs b/crates/ra_assists/src/handlers/add_function.rs index 1cfbd75aa..fc4e82309 100644 --- a/crates/ra_assists/src/handlers/add_function.rs +++ b/crates/ra_assists/src/handlers/add_function.rs | |||
@@ -117,7 +117,7 @@ impl FunctionBuilder { | |||
117 | let mut file = ctx.frange.file_id; | 117 | let mut file = ctx.frange.file_id; |
118 | let target = match &target_module { | 118 | let target = match &target_module { |
119 | Some(target_module) => { | 119 | Some(target_module) => { |
120 | let module_source = target_module.definition_source(ctx.db); | 120 | let module_source = target_module.definition_source(ctx.db()); |
121 | let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?; | 121 | let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?; |
122 | file = in_file; | 122 | file = in_file; |
123 | target | 123 | target |
@@ -269,7 +269,7 @@ fn fn_arg_type( | |||
269 | return None; | 269 | return None; |
270 | } | 270 | } |
271 | 271 | ||
272 | if let Ok(rendered) = ty.display_source_code(ctx.db, target_module.into()) { | 272 | if let Ok(rendered) = ty.display_source_code(ctx.db(), target_module.into()) { |
273 | Some(rendered) | 273 | Some(rendered) |
274 | } else { | 274 | } else { |
275 | None | 275 | None |
diff --git a/crates/ra_assists/src/handlers/add_missing_impl_members.rs b/crates/ra_assists/src/handlers/add_missing_impl_members.rs index abacd4065..77e092f62 100644 --- a/crates/ra_assists/src/handlers/add_missing_impl_members.rs +++ b/crates/ra_assists/src/handlers/add_missing_impl_members.rs | |||
@@ -128,9 +128,9 @@ fn add_missing_impl_members_inner( | |||
128 | let missing_items = get_missing_assoc_items(&ctx.sema, &impl_def) | 128 | let missing_items = get_missing_assoc_items(&ctx.sema, &impl_def) |
129 | .iter() | 129 | .iter() |
130 | .map(|i| match i { | 130 | .map(|i| match i { |
131 | hir::AssocItem::Function(i) => ast::AssocItem::FnDef(i.source(ctx.db).value), | 131 | hir::AssocItem::Function(i) => ast::AssocItem::FnDef(i.source(ctx.db()).value), |
132 | hir::AssocItem::TypeAlias(i) => ast::AssocItem::TypeAliasDef(i.source(ctx.db).value), | 132 | hir::AssocItem::TypeAlias(i) => ast::AssocItem::TypeAliasDef(i.source(ctx.db()).value), |
133 | hir::AssocItem::Const(i) => ast::AssocItem::ConstDef(i.source(ctx.db).value), | 133 | hir::AssocItem::Const(i) => ast::AssocItem::ConstDef(i.source(ctx.db()).value), |
134 | }) | 134 | }) |
135 | .filter(|t| def_name(&t).is_some()) | 135 | .filter(|t| def_name(&t).is_some()) |
136 | .filter(|t| match t { | 136 | .filter(|t| match t { |
diff --git a/crates/ra_assists/src/handlers/add_new.rs b/crates/ra_assists/src/handlers/add_new.rs index 837aa8377..e41b2aa06 100644 --- a/crates/ra_assists/src/handlers/add_new.rs +++ b/crates/ra_assists/src/handlers/add_new.rs | |||
@@ -122,7 +122,7 @@ fn generate_impl_text(strukt: &ast::StructDef, code: &str) -> String { | |||
122 | // FIXME: change the new fn checking to a more semantic approach when that's more | 122 | // FIXME: change the new fn checking to a more semantic approach when that's more |
123 | // viable (e.g. we process proc macros, etc) | 123 | // viable (e.g. we process proc macros, etc) |
124 | fn find_struct_impl(ctx: &AssistContext, strukt: &ast::StructDef) -> Option<Option<ast::ImplDef>> { | 124 | fn find_struct_impl(ctx: &AssistContext, strukt: &ast::StructDef) -> Option<Option<ast::ImplDef>> { |
125 | let db = ctx.db; | 125 | let db = ctx.db(); |
126 | let module = strukt.syntax().ancestors().find(|node| { | 126 | let module = strukt.syntax().ancestors().find(|node| { |
127 | ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind()) | 127 | ast::Module::can_cast(node.kind()) || ast::SourceFile::can_cast(node.kind()) |
128 | })?; | 128 | })?; |
diff --git a/crates/ra_assists/src/handlers/auto_import.rs b/crates/ra_assists/src/handlers/auto_import.rs index d1cafa7d9..7b6499a08 100644 --- a/crates/ra_assists/src/handlers/auto_import.rs +++ b/crates/ra_assists/src/handlers/auto_import.rs | |||
@@ -5,7 +5,7 @@ use hir::{ | |||
5 | AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, Semantics, Trait, | 5 | AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution, Semantics, Trait, |
6 | Type, | 6 | Type, |
7 | }; | 7 | }; |
8 | use ra_ide_db::{imports_locator::ImportsLocator, RootDatabase}; | 8 | use ra_ide_db::{imports_locator, RootDatabase}; |
9 | use ra_prof::profile; | 9 | use ra_prof::profile; |
10 | use ra_syntax::{ | 10 | use ra_syntax::{ |
11 | ast::{self, AstNode}, | 11 | ast::{self, AstNode}, |
@@ -35,8 +35,8 @@ use crate::{utils::insert_use_statement, AssistContext, AssistId, Assists, Group | |||
35 | // # pub mod std { pub mod collections { pub struct HashMap { } } } | 35 | // # pub mod std { pub mod collections { pub struct HashMap { } } } |
36 | // ``` | 36 | // ``` |
37 | pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { | 37 | pub(crate) fn auto_import(acc: &mut Assists, ctx: &AssistContext) -> Option<()> { |
38 | let auto_import_assets = AutoImportAssets::new(&ctx)?; | 38 | let auto_import_assets = AutoImportAssets::new(ctx)?; |
39 | let proposed_imports = auto_import_assets.search_for_imports(ctx.db); | 39 | let proposed_imports = auto_import_assets.search_for_imports(ctx); |
40 | if proposed_imports.is_empty() { | 40 | if proposed_imports.is_empty() { |
41 | return None; | 41 | return None; |
42 | } | 42 | } |
@@ -127,11 +127,11 @@ impl AutoImportAssets { | |||
127 | GroupLabel(name) | 127 | GroupLabel(name) |
128 | } | 128 | } |
129 | 129 | ||
130 | fn search_for_imports(&self, db: &RootDatabase) -> BTreeSet<ModPath> { | 130 | fn search_for_imports(&self, ctx: &AssistContext) -> BTreeSet<ModPath> { |
131 | let _p = profile("auto_import::search_for_imports"); | 131 | let _p = profile("auto_import::search_for_imports"); |
132 | let db = ctx.db(); | ||
132 | let current_crate = self.module_with_name_to_import.krate(); | 133 | let current_crate = self.module_with_name_to_import.krate(); |
133 | ImportsLocator::new(db, current_crate) | 134 | imports_locator::find_imports(&ctx.sema, current_crate, &self.get_search_query()) |
134 | .find_imports(&self.get_search_query()) | ||
135 | .into_iter() | 135 | .into_iter() |
136 | .filter_map(|candidate| match &self.import_candidate { | 136 | .filter_map(|candidate| match &self.import_candidate { |
137 | ImportCandidate::TraitAssocItem(assoc_item_type, _) => { | 137 | ImportCandidate::TraitAssocItem(assoc_item_type, _) => { |
diff --git a/crates/ra_assists/src/handlers/extract_struct_from_enum_variant.rs b/crates/ra_assists/src/handlers/extract_struct_from_enum_variant.rs index 43b4584b4..ca19cf198 100644 --- a/crates/ra_assists/src/handlers/extract_struct_from_enum_variant.rs +++ b/crates/ra_assists/src/handlers/extract_struct_from_enum_variant.rs | |||
@@ -37,15 +37,15 @@ pub(crate) fn extract_struct_from_enum_variant( | |||
37 | }; | 37 | }; |
38 | let variant_name = variant.name()?.to_string(); | 38 | let variant_name = variant.name()?.to_string(); |
39 | let variant_hir = ctx.sema.to_def(&variant)?; | 39 | let variant_hir = ctx.sema.to_def(&variant)?; |
40 | if existing_struct_def(ctx.db, &variant_name, &variant_hir) { | 40 | if existing_struct_def(ctx.db(), &variant_name, &variant_hir) { |
41 | return None; | 41 | return None; |
42 | } | 42 | } |
43 | let enum_ast = variant.parent_enum(); | 43 | let enum_ast = variant.parent_enum(); |
44 | let visibility = enum_ast.visibility(); | 44 | let visibility = enum_ast.visibility(); |
45 | let enum_hir = ctx.sema.to_def(&enum_ast)?; | 45 | let enum_hir = ctx.sema.to_def(&enum_ast)?; |
46 | let variant_hir_name = variant_hir.name(ctx.db); | 46 | let variant_hir_name = variant_hir.name(ctx.db()); |
47 | let enum_module_def = ModuleDef::from(enum_hir); | 47 | let enum_module_def = ModuleDef::from(enum_hir); |
48 | let current_module = enum_hir.module(ctx.db); | 48 | let current_module = enum_hir.module(ctx.db()); |
49 | let target = variant.syntax().text_range(); | 49 | let target = variant.syntax().text_range(); |
50 | acc.add( | 50 | acc.add( |
51 | AssistId("extract_struct_from_enum_variant"), | 51 | AssistId("extract_struct_from_enum_variant"), |
@@ -53,7 +53,7 @@ pub(crate) fn extract_struct_from_enum_variant( | |||
53 | target, | 53 | target, |
54 | |builder| { | 54 | |builder| { |
55 | let definition = Definition::ModuleDef(ModuleDef::EnumVariant(variant_hir)); | 55 | let definition = Definition::ModuleDef(ModuleDef::EnumVariant(variant_hir)); |
56 | let res = definition.find_usages(&ctx.db, None); | 56 | let res = definition.find_usages(&ctx.sema, None); |
57 | let start_offset = variant.parent_enum().syntax().text_range().start(); | 57 | let start_offset = variant.parent_enum().syntax().text_range().start(); |
58 | let mut visited_modules_set = FxHashSet::default(); | 58 | let mut visited_modules_set = FxHashSet::default(); |
59 | visited_modules_set.insert(current_module); | 59 | visited_modules_set.insert(current_module); |
@@ -101,7 +101,7 @@ fn insert_import( | |||
101 | enum_module_def: &ModuleDef, | 101 | enum_module_def: &ModuleDef, |
102 | variant_hir_name: &Name, | 102 | variant_hir_name: &Name, |
103 | ) -> Option<()> { | 103 | ) -> Option<()> { |
104 | let db = ctx.db; | 104 | let db = ctx.db(); |
105 | let mod_path = module.find_use_path(db, enum_module_def.clone()); | 105 | let mod_path = module.find_use_path(db, enum_module_def.clone()); |
106 | if let Some(mut mod_path) = mod_path { | 106 | if let Some(mut mod_path) = mod_path { |
107 | mod_path.segments.pop(); | 107 | mod_path.segments.pop(); |
diff --git a/crates/ra_assists/src/handlers/fill_match_arms.rs b/crates/ra_assists/src/handlers/fill_match_arms.rs index 64270c86f..5b1235682 100644 --- a/crates/ra_assists/src/handlers/fill_match_arms.rs +++ b/crates/ra_assists/src/handlers/fill_match_arms.rs | |||
@@ -51,11 +51,11 @@ pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option< | |||
51 | let module = ctx.sema.scope(expr.syntax()).module()?; | 51 | let module = ctx.sema.scope(expr.syntax()).module()?; |
52 | 52 | ||
53 | let missing_arms: Vec<MatchArm> = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) { | 53 | let missing_arms: Vec<MatchArm> = if let Some(enum_def) = resolve_enum_def(&ctx.sema, &expr) { |
54 | let variants = enum_def.variants(ctx.db); | 54 | let variants = enum_def.variants(ctx.db()); |
55 | 55 | ||
56 | let mut variants = variants | 56 | let mut variants = variants |
57 | .into_iter() | 57 | .into_iter() |
58 | .filter_map(|variant| build_pat(ctx.db, module, variant)) | 58 | .filter_map(|variant| build_pat(ctx.db(), module, variant)) |
59 | .filter(|variant_pat| is_variant_missing(&mut arms, variant_pat)) | 59 | .filter(|variant_pat| is_variant_missing(&mut arms, variant_pat)) |
60 | .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block())) | 60 | .map(|pat| make::match_arm(iter::once(pat), make::expr_empty_block())) |
61 | .collect::<Vec<_>>(); | 61 | .collect::<Vec<_>>(); |
@@ -84,11 +84,11 @@ pub(crate) fn fill_match_arms(acc: &mut Assists, ctx: &AssistContext) -> Option< | |||
84 | // where each tuple represents a proposed match arm. | 84 | // where each tuple represents a proposed match arm. |
85 | enum_defs | 85 | enum_defs |
86 | .into_iter() | 86 | .into_iter() |
87 | .map(|enum_def| enum_def.variants(ctx.db)) | 87 | .map(|enum_def| enum_def.variants(ctx.db())) |
88 | .multi_cartesian_product() | 88 | .multi_cartesian_product() |
89 | .map(|variants| { | 89 | .map(|variants| { |
90 | let patterns = | 90 | let patterns = |
91 | variants.into_iter().filter_map(|variant| build_pat(ctx.db, module, variant)); | 91 | variants.into_iter().filter_map(|variant| build_pat(ctx.db(), module, variant)); |
92 | ast::Pat::from(make::tuple_pat(patterns)) | 92 | ast::Pat::from(make::tuple_pat(patterns)) |
93 | }) | 93 | }) |
94 | .filter(|variant_pat| is_variant_missing(&mut arms, variant_pat)) | 94 | .filter(|variant_pat| is_variant_missing(&mut arms, variant_pat)) |
diff --git a/crates/ra_assists/src/handlers/fix_visibility.rs b/crates/ra_assists/src/handlers/fix_visibility.rs index 19d4dac5e..c0f57c329 100644 --- a/crates/ra_assists/src/handlers/fix_visibility.rs +++ b/crates/ra_assists/src/handlers/fix_visibility.rs | |||
@@ -41,14 +41,14 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext) -> O | |||
41 | }; | 41 | }; |
42 | 42 | ||
43 | let current_module = ctx.sema.scope(&path.syntax()).module()?; | 43 | let current_module = ctx.sema.scope(&path.syntax()).module()?; |
44 | let target_module = def.module(ctx.db)?; | 44 | let target_module = def.module(ctx.db())?; |
45 | 45 | ||
46 | let vis = target_module.visibility_of(ctx.db, &def)?; | 46 | let vis = target_module.visibility_of(ctx.db(), &def)?; |
47 | if vis.is_visible_from(ctx.db, current_module.into()) { | 47 | if vis.is_visible_from(ctx.db(), current_module.into()) { |
48 | return None; | 48 | return None; |
49 | }; | 49 | }; |
50 | 50 | ||
51 | let (offset, target, target_file, target_name) = target_data_for_def(ctx.db, def)?; | 51 | let (offset, target, target_file, target_name) = target_data_for_def(ctx.db(), def)?; |
52 | 52 | ||
53 | let missing_visibility = | 53 | let missing_visibility = |
54 | if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" }; | 54 | if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" }; |
@@ -72,16 +72,16 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext) -> | |||
72 | let (record_field_def, _) = ctx.sema.resolve_record_field(&record_field)?; | 72 | let (record_field_def, _) = ctx.sema.resolve_record_field(&record_field)?; |
73 | 73 | ||
74 | let current_module = ctx.sema.scope(record_field.syntax()).module()?; | 74 | let current_module = ctx.sema.scope(record_field.syntax()).module()?; |
75 | let visibility = record_field_def.visibility(ctx.db); | 75 | let visibility = record_field_def.visibility(ctx.db()); |
76 | if visibility.is_visible_from(ctx.db, current_module.into()) { | 76 | if visibility.is_visible_from(ctx.db(), current_module.into()) { |
77 | return None; | 77 | return None; |
78 | } | 78 | } |
79 | 79 | ||
80 | let parent = record_field_def.parent_def(ctx.db); | 80 | let parent = record_field_def.parent_def(ctx.db()); |
81 | let parent_name = parent.name(ctx.db); | 81 | let parent_name = parent.name(ctx.db()); |
82 | let target_module = parent.module(ctx.db); | 82 | let target_module = parent.module(ctx.db()); |
83 | 83 | ||
84 | let in_file_source = record_field_def.source(ctx.db); | 84 | let in_file_source = record_field_def.source(ctx.db()); |
85 | let (offset, target) = match in_file_source.value { | 85 | let (offset, target) = match in_file_source.value { |
86 | hir::FieldSource::Named(it) => { | 86 | hir::FieldSource::Named(it) => { |
87 | let s = it.syntax(); | 87 | let s = it.syntax(); |
@@ -95,9 +95,9 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext) -> | |||
95 | 95 | ||
96 | let missing_visibility = | 96 | let missing_visibility = |
97 | if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" }; | 97 | if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" }; |
98 | let target_file = in_file_source.file_id.original_file(ctx.db); | 98 | let target_file = in_file_source.file_id.original_file(ctx.db()); |
99 | 99 | ||
100 | let target_name = record_field_def.name(ctx.db); | 100 | let target_name = record_field_def.name(ctx.db()); |
101 | let assist_label = | 101 | let assist_label = |
102 | format!("Change visibility of {}.{} to {}", parent_name, target_name, missing_visibility); | 102 | format!("Change visibility of {}.{} to {}", parent_name, target_name, missing_visibility); |
103 | 103 | ||
diff --git a/crates/ra_assists/src/handlers/inline_local_variable.rs b/crates/ra_assists/src/handlers/inline_local_variable.rs index d26e68847..259839535 100644 --- a/crates/ra_assists/src/handlers/inline_local_variable.rs +++ b/crates/ra_assists/src/handlers/inline_local_variable.rs | |||
@@ -44,7 +44,7 @@ pub(crate) fn inline_local_variable(acc: &mut Assists, ctx: &AssistContext) -> O | |||
44 | 44 | ||
45 | let def = ctx.sema.to_def(&bind_pat)?; | 45 | let def = ctx.sema.to_def(&bind_pat)?; |
46 | let def = Definition::Local(def); | 46 | let def = Definition::Local(def); |
47 | let refs = def.find_usages(ctx.db, None); | 47 | let refs = def.find_usages(&ctx.sema, None); |
48 | if refs.is_empty() { | 48 | if refs.is_empty() { |
49 | mark::hit!(test_not_applicable_if_variable_unused); | 49 | mark::hit!(test_not_applicable_if_variable_unused); |
50 | return None; | 50 | return None; |
diff --git a/crates/ra_assists/src/handlers/reorder_fields.rs b/crates/ra_assists/src/handlers/reorder_fields.rs index bc58ce5fe..b8cf30e7f 100644 --- a/crates/ra_assists/src/handlers/reorder_fields.rs +++ b/crates/ra_assists/src/handlers/reorder_fields.rs | |||
@@ -90,10 +90,10 @@ fn struct_definition(path: &ast::Path, sema: &Semantics<RootDatabase>) -> Option | |||
90 | fn compute_fields_ranks(path: &ast::Path, ctx: &AssistContext) -> Option<FxHashMap<String, usize>> { | 90 | fn compute_fields_ranks(path: &ast::Path, ctx: &AssistContext) -> Option<FxHashMap<String, usize>> { |
91 | Some( | 91 | Some( |
92 | struct_definition(path, &ctx.sema)? | 92 | struct_definition(path, &ctx.sema)? |
93 | .fields(ctx.db) | 93 | .fields(ctx.db()) |
94 | .iter() | 94 | .iter() |
95 | .enumerate() | 95 | .enumerate() |
96 | .map(|(idx, field)| (field.name(ctx.db).to_string(), idx)) | 96 | .map(|(idx, field)| (field.name(ctx.db()).to_string(), idx)) |
97 | .collect(), | 97 | .collect(), |
98 | ) | 98 | ) |
99 | } | 99 | } |
diff --git a/crates/ra_db/src/fixture.rs b/crates/ra_db/src/fixture.rs index 4f4fb4494..209713987 100644 --- a/crates/ra_db/src/fixture.rs +++ b/crates/ra_db/src/fixture.rs | |||
@@ -149,15 +149,17 @@ fn with_files( | |||
149 | let crate_id = crate_graph.add_crate_root( | 149 | let crate_id = crate_graph.add_crate_root( |
150 | file_id, | 150 | file_id, |
151 | meta.edition, | 151 | meta.edition, |
152 | Some(CrateName::new(&krate).unwrap()), | 152 | Some(krate.clone()), |
153 | meta.cfg, | 153 | meta.cfg, |
154 | meta.env, | 154 | meta.env, |
155 | Default::default(), | 155 | Default::default(), |
156 | ); | 156 | ); |
157 | let prev = crates.insert(krate.clone(), crate_id); | 157 | let crate_name = CrateName::new(&krate).unwrap(); |
158 | let prev = crates.insert(crate_name.clone(), crate_id); | ||
158 | assert!(prev.is_none()); | 159 | assert!(prev.is_none()); |
159 | for dep in meta.deps { | 160 | for dep in meta.deps { |
160 | crate_deps.push((krate.clone(), dep)) | 161 | let dep = CrateName::new(&dep).unwrap(); |
162 | crate_deps.push((crate_name.clone(), dep)) | ||
161 | } | 163 | } |
162 | } else if meta.path == "/main.rs" || meta.path == "/lib.rs" { | 164 | } else if meta.path == "/main.rs" || meta.path == "/lib.rs" { |
163 | assert!(default_crate_root.is_none()); | 165 | assert!(default_crate_root.is_none()); |
diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs index 7f3660118..aaa492759 100644 --- a/crates/ra_db/src/input.rs +++ b/crates/ra_db/src/input.rs | |||
@@ -67,7 +67,7 @@ pub struct CrateGraph { | |||
67 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] | 67 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] |
68 | pub struct CrateId(pub u32); | 68 | pub struct CrateId(pub u32); |
69 | 69 | ||
70 | #[derive(Debug, Clone, PartialEq, Eq)] | 70 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
71 | pub struct CrateName(SmolStr); | 71 | pub struct CrateName(SmolStr); |
72 | 72 | ||
73 | impl CrateName { | 73 | impl CrateName { |
@@ -94,6 +94,13 @@ impl fmt::Display for CrateName { | |||
94 | } | 94 | } |
95 | } | 95 | } |
96 | 96 | ||
97 | impl ops::Deref for CrateName { | ||
98 | type Target = str; | ||
99 | fn deref(&self) -> &Self::Target { | ||
100 | &*self.0 | ||
101 | } | ||
102 | } | ||
103 | |||
97 | #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] | 104 | #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] |
98 | pub struct ProcMacroId(pub u32); | 105 | pub struct ProcMacroId(pub u32); |
99 | 106 | ||
@@ -117,7 +124,7 @@ pub struct CrateData { | |||
117 | /// The name to display to the end user. | 124 | /// The name to display to the end user. |
118 | /// This actual crate name can be different in a particular dependent crate | 125 | /// This actual crate name can be different in a particular dependent crate |
119 | /// or may even be missing for some cases, such as a dummy crate for the code snippet. | 126 | /// or may even be missing for some cases, such as a dummy crate for the code snippet. |
120 | pub display_name: Option<CrateName>, | 127 | pub display_name: Option<String>, |
121 | pub cfg_options: CfgOptions, | 128 | pub cfg_options: CfgOptions, |
122 | pub env: Env, | 129 | pub env: Env, |
123 | pub dependencies: Vec<Dependency>, | 130 | pub dependencies: Vec<Dependency>, |
@@ -138,7 +145,7 @@ pub struct Env { | |||
138 | #[derive(Debug, Clone, PartialEq, Eq)] | 145 | #[derive(Debug, Clone, PartialEq, Eq)] |
139 | pub struct Dependency { | 146 | pub struct Dependency { |
140 | pub crate_id: CrateId, | 147 | pub crate_id: CrateId, |
141 | pub name: SmolStr, | 148 | pub name: CrateName, |
142 | } | 149 | } |
143 | 150 | ||
144 | impl CrateGraph { | 151 | impl CrateGraph { |
@@ -146,7 +153,7 @@ impl CrateGraph { | |||
146 | &mut self, | 153 | &mut self, |
147 | file_id: FileId, | 154 | file_id: FileId, |
148 | edition: Edition, | 155 | edition: Edition, |
149 | display_name: Option<CrateName>, | 156 | display_name: Option<String>, |
150 | cfg_options: CfgOptions, | 157 | cfg_options: CfgOptions, |
151 | env: Env, | 158 | env: Env, |
152 | proc_macro: Vec<(SmolStr, Arc<dyn ra_tt::TokenExpander>)>, | 159 | proc_macro: Vec<(SmolStr, Arc<dyn ra_tt::TokenExpander>)>, |
@@ -178,7 +185,7 @@ impl CrateGraph { | |||
178 | if self.dfs_find(from, to, &mut FxHashSet::default()) { | 185 | if self.dfs_find(from, to, &mut FxHashSet::default()) { |
179 | return Err(CyclicDependenciesError); | 186 | return Err(CyclicDependenciesError); |
180 | } | 187 | } |
181 | self.arena.get_mut(&from).unwrap().add_dep(name.0, to); | 188 | self.arena.get_mut(&from).unwrap().add_dep(name, to); |
182 | Ok(()) | 189 | Ok(()) |
183 | } | 190 | } |
184 | 191 | ||
@@ -190,6 +197,23 @@ impl CrateGraph { | |||
190 | self.arena.keys().copied() | 197 | self.arena.keys().copied() |
191 | } | 198 | } |
192 | 199 | ||
200 | /// Returns an iterator over all transitive dependencies of the given crate. | ||
201 | pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> + '_ { | ||
202 | let mut worklist = vec![of]; | ||
203 | let mut deps = FxHashSet::default(); | ||
204 | |||
205 | while let Some(krate) = worklist.pop() { | ||
206 | if !deps.insert(krate) { | ||
207 | continue; | ||
208 | } | ||
209 | |||
210 | worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id)); | ||
211 | } | ||
212 | |||
213 | deps.remove(&of); | ||
214 | deps.into_iter() | ||
215 | } | ||
216 | |||
193 | // FIXME: this only finds one crate with the given root; we could have multiple | 217 | // FIXME: this only finds one crate with the given root; we could have multiple |
194 | pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> { | 218 | pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> { |
195 | let (&crate_id, _) = | 219 | let (&crate_id, _) = |
@@ -247,7 +271,7 @@ impl CrateId { | |||
247 | } | 271 | } |
248 | 272 | ||
249 | impl CrateData { | 273 | impl CrateData { |
250 | fn add_dep(&mut self, name: SmolStr, crate_id: CrateId) { | 274 | fn add_dep(&mut self, name: CrateName, crate_id: CrateId) { |
251 | self.dependencies.push(Dependency { name, crate_id }) | 275 | self.dependencies.push(Dependency { name, crate_id }) |
252 | } | 276 | } |
253 | } | 277 | } |
@@ -429,7 +453,10 @@ mod tests { | |||
429 | .is_ok()); | 453 | .is_ok()); |
430 | assert_eq!( | 454 | assert_eq!( |
431 | graph[crate1].dependencies, | 455 | graph[crate1].dependencies, |
432 | vec![Dependency { crate_id: crate2, name: "crate_name_with_dashes".into() }] | 456 | vec![Dependency { |
457 | crate_id: crate2, | ||
458 | name: CrateName::new("crate_name_with_dashes").unwrap() | ||
459 | }] | ||
433 | ); | 460 | ); |
434 | } | 461 | } |
435 | } | 462 | } |
diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs index 4a3ba57da..1ddacc1f6 100644 --- a/crates/ra_db/src/lib.rs +++ b/crates/ra_db/src/lib.rs | |||
@@ -80,7 +80,7 @@ pub struct FilePosition { | |||
80 | pub offset: TextSize, | 80 | pub offset: TextSize, |
81 | } | 81 | } |
82 | 82 | ||
83 | #[derive(Clone, Copy, Debug)] | 83 | #[derive(Clone, Copy, Debug, Eq, PartialEq)] |
84 | pub struct FileRange { | 84 | pub struct FileRange { |
85 | pub file_id: FileId, | 85 | pub file_id: FileId, |
86 | pub range: TextRange, | 86 | pub range: TextRange, |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index e86077dd6..1b3525011 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -31,7 +31,7 @@ use hir_ty::{ | |||
31 | ApplicationTy, Canonical, GenericPredicate, InEnvironment, Substs, TraitEnvironment, Ty, | 31 | ApplicationTy, Canonical, GenericPredicate, InEnvironment, Substs, TraitEnvironment, Ty, |
32 | TyDefId, TypeCtor, | 32 | TyDefId, TypeCtor, |
33 | }; | 33 | }; |
34 | use ra_db::{CrateId, CrateName, Edition, FileId}; | 34 | use ra_db::{CrateId, Edition, FileId}; |
35 | use ra_prof::profile; | 35 | use ra_prof::profile; |
36 | use ra_syntax::ast::{self, AttrsOwner, NameOwner}; | 36 | use ra_syntax::ast::{self, AttrsOwner, NameOwner}; |
37 | use rustc_hash::FxHashSet; | 37 | use rustc_hash::FxHashSet; |
@@ -94,8 +94,8 @@ impl Crate { | |||
94 | db.crate_graph()[self.id].edition | 94 | db.crate_graph()[self.id].edition |
95 | } | 95 | } |
96 | 96 | ||
97 | pub fn display_name(self, db: &dyn HirDatabase) -> Option<CrateName> { | 97 | pub fn display_name(self, db: &dyn HirDatabase) -> Option<String> { |
98 | db.crate_graph()[self.id].display_name.as_ref().cloned() | 98 | db.crate_graph()[self.id].display_name.clone() |
99 | } | 99 | } |
100 | 100 | ||
101 | pub fn query_external_importables( | 101 | pub fn query_external_importables( |
@@ -1053,12 +1053,14 @@ pub struct ImplDef { | |||
1053 | 1053 | ||
1054 | impl ImplDef { | 1054 | impl ImplDef { |
1055 | pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<ImplDef> { | 1055 | pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<ImplDef> { |
1056 | let impls = db.impls_in_crate(krate.id); | 1056 | let inherent = db.inherent_impls_in_crate(krate.id); |
1057 | impls.all_impls().map(Self::from).collect() | 1057 | let trait_ = db.trait_impls_in_crate(krate.id); |
1058 | |||
1059 | inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect() | ||
1058 | } | 1060 | } |
1059 | pub fn for_trait(db: &dyn HirDatabase, krate: Crate, trait_: Trait) -> Vec<ImplDef> { | 1061 | pub fn for_trait(db: &dyn HirDatabase, krate: Crate, trait_: Trait) -> Vec<ImplDef> { |
1060 | let impls = db.impls_in_crate(krate.id); | 1062 | let impls = db.trait_impls_in_crate(krate.id); |
1061 | impls.lookup_impl_defs_for_trait(trait_.id).map(Self::from).collect() | 1063 | impls.for_trait(trait_.id).map(Self::from).collect() |
1062 | } | 1064 | } |
1063 | 1065 | ||
1064 | pub fn target_trait(self, db: &dyn HirDatabase) -> Option<TypeRef> { | 1066 | pub fn target_trait(self, db: &dyn HirDatabase) -> Option<TypeRef> { |
@@ -1187,7 +1189,7 @@ impl Type { | |||
1187 | None => return false, | 1189 | None => return false, |
1188 | }; | 1190 | }; |
1189 | 1191 | ||
1190 | let canonical_ty = Canonical { value: self.ty.value.clone(), num_vars: 0 }; | 1192 | let canonical_ty = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; |
1191 | method_resolution::implements_trait( | 1193 | method_resolution::implements_trait( |
1192 | &canonical_ty, | 1194 | &canonical_ty, |
1193 | db, | 1195 | db, |
@@ -1211,7 +1213,7 @@ impl Type { | |||
1211 | self.ty.environment.clone(), | 1213 | self.ty.environment.clone(), |
1212 | hir_ty::Obligation::Trait(trait_ref), | 1214 | hir_ty::Obligation::Trait(trait_ref), |
1213 | ), | 1215 | ), |
1214 | num_vars: 0, | 1216 | kinds: Arc::new([]), |
1215 | }; | 1217 | }; |
1216 | 1218 | ||
1217 | db.trait_solve(self.krate, goal).is_some() | 1219 | db.trait_solve(self.krate, goal).is_some() |
@@ -1286,7 +1288,7 @@ impl Type { | |||
1286 | pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a { | 1288 | pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a { |
1287 | // There should be no inference vars in types passed here | 1289 | // There should be no inference vars in types passed here |
1288 | // FIXME check that? | 1290 | // FIXME check that? |
1289 | let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 }; | 1291 | let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; |
1290 | let environment = self.ty.environment.clone(); | 1292 | let environment = self.ty.environment.clone(); |
1291 | let ty = InEnvironment { value: canonical, environment }; | 1293 | let ty = InEnvironment { value: canonical, environment }; |
1292 | autoderef(db, Some(self.krate), ty) | 1294 | autoderef(db, Some(self.krate), ty) |
@@ -1303,10 +1305,10 @@ impl Type { | |||
1303 | mut callback: impl FnMut(AssocItem) -> Option<T>, | 1305 | mut callback: impl FnMut(AssocItem) -> Option<T>, |
1304 | ) -> Option<T> { | 1306 | ) -> Option<T> { |
1305 | for krate in self.ty.value.def_crates(db, krate.id)? { | 1307 | for krate in self.ty.value.def_crates(db, krate.id)? { |
1306 | let impls = db.impls_in_crate(krate); | 1308 | let impls = db.inherent_impls_in_crate(krate); |
1307 | 1309 | ||
1308 | for impl_def in impls.lookup_impl_defs(&self.ty.value) { | 1310 | for impl_def in impls.for_self_ty(&self.ty.value) { |
1309 | for &item in db.impl_data(impl_def).items.iter() { | 1311 | for &item in db.impl_data(*impl_def).items.iter() { |
1310 | if let Some(result) = callback(item.into()) { | 1312 | if let Some(result) = callback(item.into()) { |
1311 | return Some(result); | 1313 | return Some(result); |
1312 | } | 1314 | } |
@@ -1327,7 +1329,7 @@ impl Type { | |||
1327 | // There should be no inference vars in types passed here | 1329 | // There should be no inference vars in types passed here |
1328 | // FIXME check that? | 1330 | // FIXME check that? |
1329 | // FIXME replace Unknown by bound vars here | 1331 | // FIXME replace Unknown by bound vars here |
1330 | let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 }; | 1332 | let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; |
1331 | 1333 | ||
1332 | let env = self.ty.environment.clone(); | 1334 | let env = self.ty.environment.clone(); |
1333 | let krate = krate.id; | 1335 | let krate = krate.id; |
@@ -1358,7 +1360,7 @@ impl Type { | |||
1358 | // There should be no inference vars in types passed here | 1360 | // There should be no inference vars in types passed here |
1359 | // FIXME check that? | 1361 | // FIXME check that? |
1360 | // FIXME replace Unknown by bound vars here | 1362 | // FIXME replace Unknown by bound vars here |
1361 | let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 }; | 1363 | let canonical = Canonical { value: self.ty.value.clone(), kinds: Arc::new([]) }; |
1362 | 1364 | ||
1363 | let env = self.ty.environment.clone(); | 1365 | let env = self.ty.environment.clone(); |
1364 | let krate = krate.id; | 1366 | let krate = krate.id; |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index bb67952de..cb48ca065 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -16,10 +16,10 @@ pub use hir_expand::db::{ | |||
16 | pub use hir_ty::db::{ | 16 | pub use hir_ty::db::{ |
17 | AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, FieldTypesQuery, | 17 | AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, FieldTypesQuery, |
18 | GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase, | 18 | GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase, |
19 | HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, ImplsFromDepsQuery, | 19 | HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, InferQueryQuery, |
20 | ImplsInCrateQuery, InferQueryQuery, InternAssocTyValueQuery, InternChalkImplQuery, | 20 | InherentImplsInCrateQuery, InternAssocTyValueQuery, InternChalkImplQuery, InternTypeCtorQuery, |
21 | InternTypeCtorQuery, InternTypeParamIdQuery, ReturnTypeImplTraitsQuery, StructDatumQuery, | 21 | InternTypeParamIdQuery, ReturnTypeImplTraitsQuery, StructDatumQuery, TraitDatumQuery, |
22 | TraitDatumQuery, TraitSolveQuery, TyQuery, ValueTyQuery, | 22 | TraitImplsInCrateQuery, TraitImplsInDepsQuery, TraitSolveQuery, TyQuery, ValueTyQuery, |
23 | }; | 23 | }; |
24 | 24 | ||
25 | #[test] | 25 | #[test] |
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 6a49c424a..3d78f71c1 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -83,6 +83,11 @@ impl PathResolution { | |||
83 | /// Primary API to get semantic information, like types, from syntax trees. | 83 | /// Primary API to get semantic information, like types, from syntax trees. |
84 | pub struct Semantics<'db, DB> { | 84 | pub struct Semantics<'db, DB> { |
85 | pub db: &'db DB, | 85 | pub db: &'db DB, |
86 | imp: SemanticsImpl<'db>, | ||
87 | } | ||
88 | |||
89 | pub struct SemanticsImpl<'db> { | ||
90 | pub db: &'db dyn HirDatabase, | ||
86 | s2d_cache: RefCell<SourceToDefCache>, | 91 | s2d_cache: RefCell<SourceToDefCache>, |
87 | cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, | 92 | cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>, |
88 | } | 93 | } |
@@ -95,20 +100,180 @@ impl<DB> fmt::Debug for Semantics<'_, DB> { | |||
95 | 100 | ||
96 | impl<'db, DB: HirDatabase> Semantics<'db, DB> { | 101 | impl<'db, DB: HirDatabase> Semantics<'db, DB> { |
97 | pub fn new(db: &DB) -> Semantics<DB> { | 102 | pub fn new(db: &DB) -> Semantics<DB> { |
98 | Semantics { db, s2d_cache: Default::default(), cache: Default::default() } | 103 | let impl_ = SemanticsImpl::new(db); |
104 | Semantics { db, imp: impl_ } | ||
99 | } | 105 | } |
100 | 106 | ||
101 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { | 107 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { |
102 | let tree = self.db.parse(file_id).tree(); | 108 | self.imp.parse(file_id) |
103 | self.cache(tree.syntax().clone(), file_id.into()); | ||
104 | tree | ||
105 | } | 109 | } |
106 | 110 | ||
107 | pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST { | 111 | pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST { |
108 | let file_id = d.source().file_id; | 112 | let file_id = d.source().file_id; |
109 | let root = self.db.parse_or_expand(file_id).unwrap(); | 113 | let root = self.db.parse_or_expand(file_id).unwrap(); |
110 | self.cache(root, file_id); | 114 | self.imp.cache(root, file_id); |
111 | d.ast(self.db) | 115 | d.ast(self.db.upcast()) |
116 | } | ||
117 | |||
118 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | ||
119 | self.imp.expand(macro_call) | ||
120 | } | ||
121 | |||
122 | pub fn expand_hypothetical( | ||
123 | &self, | ||
124 | actual_macro_call: &ast::MacroCall, | ||
125 | hypothetical_args: &ast::TokenTree, | ||
126 | token_to_map: SyntaxToken, | ||
127 | ) -> Option<(SyntaxNode, SyntaxToken)> { | ||
128 | self.imp.expand_hypothetical(actual_macro_call, hypothetical_args, token_to_map) | ||
129 | } | ||
130 | |||
131 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | ||
132 | self.imp.descend_into_macros(token) | ||
133 | } | ||
134 | |||
135 | pub fn descend_node_at_offset<N: ast::AstNode>( | ||
136 | &self, | ||
137 | node: &SyntaxNode, | ||
138 | offset: TextSize, | ||
139 | ) -> Option<N> { | ||
140 | self.imp.descend_node_at_offset(node, offset).find_map(N::cast) | ||
141 | } | ||
142 | |||
143 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { | ||
144 | self.imp.original_range(node) | ||
145 | } | ||
146 | |||
147 | pub fn diagnostics_range(&self, diagnostics: &dyn Diagnostic) -> FileRange { | ||
148 | self.imp.diagnostics_range(diagnostics) | ||
149 | } | ||
150 | |||
151 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
152 | self.imp.ancestors_with_macros(node) | ||
153 | } | ||
154 | |||
155 | pub fn ancestors_at_offset_with_macros( | ||
156 | &self, | ||
157 | node: &SyntaxNode, | ||
158 | offset: TextSize, | ||
159 | ) -> impl Iterator<Item = SyntaxNode> + '_ { | ||
160 | self.imp.ancestors_at_offset_with_macros(node, offset) | ||
161 | } | ||
162 | |||
163 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, | ||
164 | /// search up until it is of the target AstNode type | ||
165 | pub fn find_node_at_offset_with_macros<N: AstNode>( | ||
166 | &self, | ||
167 | node: &SyntaxNode, | ||
168 | offset: TextSize, | ||
169 | ) -> Option<N> { | ||
170 | self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | ||
171 | } | ||
172 | |||
173 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, | ||
174 | /// descend it and find again | ||
175 | pub fn find_node_at_offset_with_descend<N: AstNode>( | ||
176 | &self, | ||
177 | node: &SyntaxNode, | ||
178 | offset: TextSize, | ||
179 | ) -> Option<N> { | ||
180 | if let Some(it) = find_node_at_offset(&node, offset) { | ||
181 | return Some(it); | ||
182 | } | ||
183 | |||
184 | self.imp.descend_node_at_offset(node, offset).find_map(N::cast) | ||
185 | } | ||
186 | |||
187 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | ||
188 | self.imp.type_of_expr(expr) | ||
189 | } | ||
190 | |||
191 | pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<Type> { | ||
192 | self.imp.type_of_pat(pat) | ||
193 | } | ||
194 | |||
195 | pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> { | ||
196 | self.imp.resolve_method_call(call) | ||
197 | } | ||
198 | |||
199 | pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> { | ||
200 | self.imp.resolve_field(field) | ||
201 | } | ||
202 | |||
203 | pub fn resolve_record_field(&self, field: &ast::RecordField) -> Option<(Field, Option<Local>)> { | ||
204 | self.imp.resolve_record_field(field) | ||
205 | } | ||
206 | |||
207 | pub fn resolve_record_field_pat(&self, field: &ast::RecordFieldPat) -> Option<Field> { | ||
208 | self.imp.resolve_record_field_pat(field) | ||
209 | } | ||
210 | |||
211 | pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { | ||
212 | self.imp.resolve_macro_call(macro_call) | ||
213 | } | ||
214 | |||
215 | pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> { | ||
216 | self.imp.resolve_path(path) | ||
217 | } | ||
218 | |||
219 | pub fn resolve_variant(&self, record_lit: ast::RecordLit) -> Option<VariantId> { | ||
220 | self.imp.resolve_variant(record_lit) | ||
221 | } | ||
222 | |||
223 | pub fn lower_path(&self, path: &ast::Path) -> Option<Path> { | ||
224 | self.imp.lower_path(path) | ||
225 | } | ||
226 | |||
227 | pub fn resolve_bind_pat_to_const(&self, pat: &ast::BindPat) -> Option<ModuleDef> { | ||
228 | self.imp.resolve_bind_pat_to_const(pat) | ||
229 | } | ||
230 | |||
231 | // FIXME: use this instead? | ||
232 | // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; | ||
233 | |||
234 | pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> { | ||
235 | self.imp.record_literal_missing_fields(literal) | ||
236 | } | ||
237 | |||
238 | pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> { | ||
239 | self.imp.record_pattern_missing_fields(pattern) | ||
240 | } | ||
241 | |||
242 | pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { | ||
243 | let src = self.imp.find_file(src.syntax().clone()).with_value(src).cloned(); | ||
244 | T::to_def(&self.imp, src) | ||
245 | } | ||
246 | |||
247 | pub fn to_module_def(&self, file: FileId) -> Option<Module> { | ||
248 | self.imp.to_module_def(file) | ||
249 | } | ||
250 | |||
251 | pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { | ||
252 | self.imp.scope(node) | ||
253 | } | ||
254 | |||
255 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { | ||
256 | self.imp.scope_at_offset(node, offset) | ||
257 | } | ||
258 | |||
259 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { | ||
260 | self.imp.scope_for_def(def) | ||
261 | } | ||
262 | |||
263 | pub fn assert_contains_node(&self, node: &SyntaxNode) { | ||
264 | self.imp.assert_contains_node(node) | ||
265 | } | ||
266 | } | ||
267 | |||
268 | impl<'db> SemanticsImpl<'db> { | ||
269 | pub fn new(db: &'db dyn HirDatabase) -> Self { | ||
270 | Self { db, s2d_cache: Default::default(), cache: Default::default() } | ||
271 | } | ||
272 | |||
273 | pub fn parse(&self, file_id: FileId) -> ast::SourceFile { | ||
274 | let tree = self.db.parse(file_id).tree(); | ||
275 | self.cache(tree.syntax().clone(), file_id.into()); | ||
276 | tree | ||
112 | } | 277 | } |
113 | 278 | ||
114 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | 279 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { |
@@ -130,9 +295,15 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
130 | self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); | 295 | self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call); |
131 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | 296 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); |
132 | let krate = sa.resolver.krate()?; | 297 | let krate = sa.resolver.krate()?; |
133 | let macro_call_id = macro_call | 298 | let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { |
134 | .as_call_id(self.db, krate, |path| sa.resolver.resolve_path_as_macro(self.db, &path))?; | 299 | sa.resolver.resolve_path_as_macro(self.db.upcast(), &path) |
135 | hir_expand::db::expand_hypothetical(self.db, macro_call_id, hypothetical_args, token_to_map) | 300 | })?; |
301 | hir_expand::db::expand_hypothetical( | ||
302 | self.db.upcast(), | ||
303 | macro_call_id, | ||
304 | hypothetical_args, | ||
305 | token_to_map, | ||
306 | ) | ||
136 | } | 307 | } |
137 | 308 | ||
138 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { | 309 | pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { |
@@ -147,7 +318,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
147 | return None; | 318 | return None; |
148 | } | 319 | } |
149 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; | 320 | let file_id = sa.expand(self.db, token.with_value(¯o_call))?; |
150 | let token = file_id.expansion_info(self.db)?.map_token_down(token.as_ref())?; | 321 | let token = file_id.expansion_info(self.db.upcast())?.map_token_down(token.as_ref())?; |
151 | 322 | ||
152 | self.cache(find_root(&token.value.parent()), token.file_id); | 323 | self.cache(find_root(&token.value.parent()), token.file_id); |
153 | 324 | ||
@@ -159,15 +330,16 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
159 | token.value | 330 | token.value |
160 | } | 331 | } |
161 | 332 | ||
162 | pub fn descend_node_at_offset<N: ast::AstNode>( | 333 | pub fn descend_node_at_offset( |
163 | &self, | 334 | &self, |
164 | node: &SyntaxNode, | 335 | node: &SyntaxNode, |
165 | offset: TextSize, | 336 | offset: TextSize, |
166 | ) -> Option<N> { | 337 | ) -> impl Iterator<Item = SyntaxNode> + '_ { |
167 | // Handle macro token cases | 338 | // Handle macro token cases |
168 | node.token_at_offset(offset) | 339 | node.token_at_offset(offset) |
169 | .map(|token| self.descend_into_macros(token)) | 340 | .map(|token| self.descend_into_macros(token)) |
170 | .find_map(|it| self.ancestors_with_macros(it.parent()).find_map(N::cast)) | 341 | .map(|it| self.ancestors_with_macros(it.parent())) |
342 | .flatten() | ||
171 | } | 343 | } |
172 | 344 | ||
173 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { | 345 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { |
@@ -184,7 +356,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
184 | 356 | ||
185 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { | 357 | pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ { |
186 | let node = self.find_file(node); | 358 | let node = self.find_file(node); |
187 | node.ancestors_with_macros(self.db).map(|it| it.value) | 359 | node.ancestors_with_macros(self.db.upcast()).map(|it| it.value) |
188 | } | 360 | } |
189 | 361 | ||
190 | pub fn ancestors_at_offset_with_macros( | 362 | pub fn ancestors_at_offset_with_macros( |
@@ -197,29 +369,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
197 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) | 369 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) |
198 | } | 370 | } |
199 | 371 | ||
200 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, | ||
201 | /// search up until it is of the target AstNode type | ||
202 | pub fn find_node_at_offset_with_macros<N: AstNode>( | ||
203 | &self, | ||
204 | node: &SyntaxNode, | ||
205 | offset: TextSize, | ||
206 | ) -> Option<N> { | ||
207 | self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | ||
208 | } | ||
209 | |||
210 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, | ||
211 | /// descend it and find again | ||
212 | pub fn find_node_at_offset_with_descend<N: AstNode>( | ||
213 | &self, | ||
214 | node: &SyntaxNode, | ||
215 | offset: TextSize, | ||
216 | ) -> Option<N> { | ||
217 | if let Some(it) = find_node_at_offset(&node, offset) { | ||
218 | return Some(it); | ||
219 | } | ||
220 | self.descend_node_at_offset(&node, offset) | ||
221 | } | ||
222 | |||
223 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | 372 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { |
224 | self.analyze(expr.syntax()).type_of(self.db, &expr) | 373 | self.analyze(expr.syntax()).type_of(self.db, &expr) |
225 | } | 374 | } |
@@ -267,9 +416,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
267 | self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) | 416 | self.analyze(pat.syntax()).resolve_bind_pat_to_const(self.db, pat) |
268 | } | 417 | } |
269 | 418 | ||
270 | // FIXME: use this instead? | ||
271 | // pub fn resolve_name_ref(&self, name_ref: &ast::NameRef) -> Option<???>; | ||
272 | |||
273 | pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> { | 419 | pub fn record_literal_missing_fields(&self, literal: &ast::RecordLit) -> Vec<(Field, Type)> { |
274 | self.analyze(literal.syntax()) | 420 | self.analyze(literal.syntax()) |
275 | .record_literal_missing_fields(self.db, literal) | 421 | .record_literal_missing_fields(self.db, literal) |
@@ -282,11 +428,6 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
282 | .unwrap_or_default() | 428 | .unwrap_or_default() |
283 | } | 429 | } |
284 | 430 | ||
285 | pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> { | ||
286 | let src = self.find_file(src.syntax().clone()).with_value(src).cloned(); | ||
287 | T::to_def(self, src) | ||
288 | } | ||
289 | |||
290 | fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T { | 431 | fn with_ctx<F: FnOnce(&mut SourceToDefCtx) -> T, T>(&self, f: F) -> T { |
291 | let mut cache = self.s2d_cache.borrow_mut(); | 432 | let mut cache = self.s2d_cache.borrow_mut(); |
292 | let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; | 433 | let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache }; |
@@ -297,20 +438,20 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
297 | self.with_ctx(|ctx| ctx.file_to_def(file)).map(Module::from) | 438 | self.with_ctx(|ctx| ctx.file_to_def(file)).map(Module::from) |
298 | } | 439 | } |
299 | 440 | ||
300 | pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db, DB> { | 441 | pub fn scope(&self, node: &SyntaxNode) -> SemanticsScope<'db> { |
301 | let node = self.find_file(node.clone()); | 442 | let node = self.find_file(node.clone()); |
302 | let resolver = self.analyze2(node.as_ref(), None).resolver; | 443 | let resolver = self.analyze2(node.as_ref(), None).resolver; |
303 | SemanticsScope { db: self.db, resolver } | 444 | SemanticsScope { db: self.db, resolver } |
304 | } | 445 | } |
305 | 446 | ||
306 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db, DB> { | 447 | pub fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> SemanticsScope<'db> { |
307 | let node = self.find_file(node.clone()); | 448 | let node = self.find_file(node.clone()); |
308 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; | 449 | let resolver = self.analyze2(node.as_ref(), Some(offset)).resolver; |
309 | SemanticsScope { db: self.db, resolver } | 450 | SemanticsScope { db: self.db, resolver } |
310 | } | 451 | } |
311 | 452 | ||
312 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db, DB> { | 453 | pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> { |
313 | let resolver = def.id.resolver(self.db); | 454 | let resolver = def.id.resolver(self.db.upcast()); |
314 | SemanticsScope { db: self.db, resolver } | 455 | SemanticsScope { db: self.db, resolver } |
315 | } | 456 | } |
316 | 457 | ||
@@ -331,17 +472,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
331 | ChildContainer::DefWithBodyId(def) => { | 472 | ChildContainer::DefWithBodyId(def) => { |
332 | return SourceAnalyzer::new_for_body(self.db, def, src, offset) | 473 | return SourceAnalyzer::new_for_body(self.db, def, src, offset) |
333 | } | 474 | } |
334 | ChildContainer::TraitId(it) => it.resolver(self.db), | 475 | ChildContainer::TraitId(it) => it.resolver(self.db.upcast()), |
335 | ChildContainer::ImplId(it) => it.resolver(self.db), | 476 | ChildContainer::ImplId(it) => it.resolver(self.db.upcast()), |
336 | ChildContainer::ModuleId(it) => it.resolver(self.db), | 477 | ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()), |
337 | ChildContainer::EnumId(it) => it.resolver(self.db), | 478 | ChildContainer::EnumId(it) => it.resolver(self.db.upcast()), |
338 | ChildContainer::VariantId(it) => it.resolver(self.db), | 479 | ChildContainer::VariantId(it) => it.resolver(self.db.upcast()), |
339 | ChildContainer::GenericDefId(it) => it.resolver(self.db), | 480 | ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()), |
340 | }; | 481 | }; |
341 | SourceAnalyzer::new_for_resolver(resolver, src) | 482 | SourceAnalyzer::new_for_resolver(resolver, src) |
342 | } | 483 | } |
343 | 484 | ||
344 | fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { | 485 | pub fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) { |
345 | assert!(root_node.parent().is_none()); | 486 | assert!(root_node.parent().is_none()); |
346 | let mut cache = self.cache.borrow_mut(); | 487 | let mut cache = self.cache.borrow_mut(); |
347 | let prev = cache.insert(root_node, file_id); | 488 | let prev = cache.insert(root_node, file_id); |
@@ -357,7 +498,7 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
357 | cache.get(root_node).copied() | 498 | cache.get(root_node).copied() |
358 | } | 499 | } |
359 | 500 | ||
360 | fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> { | 501 | pub fn find_file(&self, node: SyntaxNode) -> InFile<SyntaxNode> { |
361 | let root_node = find_root(&node); | 502 | let root_node = find_root(&node); |
362 | let file_id = self.lookup(&root_node).unwrap_or_else(|| { | 503 | let file_id = self.lookup(&root_node).unwrap_or_else(|| { |
363 | panic!( | 504 | panic!( |
@@ -382,14 +523,14 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
382 | pub trait ToDef: AstNode + Clone { | 523 | pub trait ToDef: AstNode + Clone { |
383 | type Def; | 524 | type Def; |
384 | 525 | ||
385 | fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: InFile<Self>) -> Option<Self::Def>; | 526 | fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def>; |
386 | } | 527 | } |
387 | 528 | ||
388 | macro_rules! to_def_impls { | 529 | macro_rules! to_def_impls { |
389 | ($(($def:path, $ast:path, $meth:ident)),* ,) => {$( | 530 | ($(($def:path, $ast:path, $meth:ident)),* ,) => {$( |
390 | impl ToDef for $ast { | 531 | impl ToDef for $ast { |
391 | type Def = $def; | 532 | type Def = $def; |
392 | fn to_def<DB: HirDatabase>(sema: &Semantics<DB>, src: InFile<Self>) -> Option<Self::Def> { | 533 | fn to_def(sema: &SemanticsImpl, src: InFile<Self>) -> Option<Self::Def> { |
393 | sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) | 534 | sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from) |
394 | } | 535 | } |
395 | } | 536 | } |
@@ -419,12 +560,12 @@ fn find_root(node: &SyntaxNode) -> SyntaxNode { | |||
419 | node.ancestors().last().unwrap() | 560 | node.ancestors().last().unwrap() |
420 | } | 561 | } |
421 | 562 | ||
422 | pub struct SemanticsScope<'a, DB> { | 563 | pub struct SemanticsScope<'a> { |
423 | pub db: &'a DB, | 564 | pub db: &'a dyn HirDatabase, |
424 | resolver: Resolver, | 565 | resolver: Resolver, |
425 | } | 566 | } |
426 | 567 | ||
427 | impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> { | 568 | impl<'a> SemanticsScope<'a> { |
428 | pub fn module(&self) -> Option<Module> { | 569 | pub fn module(&self) -> Option<Module> { |
429 | Some(Module { id: self.resolver.module()? }) | 570 | Some(Module { id: self.resolver.module()? }) |
430 | } | 571 | } |
@@ -433,13 +574,13 @@ impl<'a, DB: HirDatabase> SemanticsScope<'a, DB> { | |||
433 | // FIXME: rename to visible_traits to not repeat scope? | 574 | // FIXME: rename to visible_traits to not repeat scope? |
434 | pub fn traits_in_scope(&self) -> FxHashSet<TraitId> { | 575 | pub fn traits_in_scope(&self) -> FxHashSet<TraitId> { |
435 | let resolver = &self.resolver; | 576 | let resolver = &self.resolver; |
436 | resolver.traits_in_scope(self.db) | 577 | resolver.traits_in_scope(self.db.upcast()) |
437 | } | 578 | } |
438 | 579 | ||
439 | pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { | 580 | pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) { |
440 | let resolver = &self.resolver; | 581 | let resolver = &self.resolver; |
441 | 582 | ||
442 | resolver.process_all_names(self.db, &mut |name, def| { | 583 | resolver.process_all_names(self.db.upcast(), &mut |name, def| { |
443 | let def = match def { | 584 | let def = match def { |
444 | resolver::ScopeDef::PerNs(it) => { | 585 | resolver::ScopeDef::PerNs(it) => { |
445 | let items = ScopeDef::all_items(it); | 586 | let items = ScopeDef::all_items(it); |
diff --git a/crates/ra_hir_def/src/item_scope.rs b/crates/ra_hir_def/src/item_scope.rs index 4d446c707..beeb98559 100644 --- a/crates/ra_hir_def/src/item_scope.rs +++ b/crates/ra_hir_def/src/item_scope.rs | |||
@@ -1,6 +1,8 @@ | |||
1 | //! Describes items defined or visible (ie, imported) in a certain scope. | 1 | //! Describes items defined or visible (ie, imported) in a certain scope. |
2 | //! This is shared between modules and blocks. | 2 | //! This is shared between modules and blocks. |
3 | 3 | ||
4 | use std::collections::hash_map::Entry; | ||
5 | |||
4 | use hir_expand::name::Name; | 6 | use hir_expand::name::Name; |
5 | use once_cell::sync::Lazy; | 7 | use once_cell::sync::Lazy; |
6 | use ra_db::CrateId; | 8 | use ra_db::CrateId; |
@@ -27,7 +29,11 @@ pub struct PerNsGlobImports { | |||
27 | 29 | ||
28 | #[derive(Debug, Default, PartialEq, Eq)] | 30 | #[derive(Debug, Default, PartialEq, Eq)] |
29 | pub struct ItemScope { | 31 | pub struct ItemScope { |
30 | visible: FxHashMap<Name, PerNs>, | 32 | types: FxHashMap<Name, (ModuleDefId, Visibility)>, |
33 | values: FxHashMap<Name, (ModuleDefId, Visibility)>, | ||
34 | macros: FxHashMap<Name, (MacroDefId, Visibility)>, | ||
35 | unresolved: FxHashSet<Name>, | ||
36 | |||
31 | defs: Vec<ModuleDefId>, | 37 | defs: Vec<ModuleDefId>, |
32 | impls: Vec<ImplId>, | 38 | impls: Vec<ImplId>, |
33 | /// Macros visible in current module in legacy textual scope | 39 | /// Macros visible in current module in legacy textual scope |
@@ -65,14 +71,16 @@ pub(crate) enum BuiltinShadowMode { | |||
65 | /// Other methods will only resolve values, types and module scoped macros only. | 71 | /// Other methods will only resolve values, types and module scoped macros only. |
66 | impl ItemScope { | 72 | impl ItemScope { |
67 | pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a { | 73 | pub fn entries<'a>(&'a self) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a { |
68 | //FIXME: shadowing | 74 | // FIXME: shadowing |
69 | self.visible.iter().map(|(n, def)| (n, *def)) | 75 | let keys: FxHashSet<_> = self |
70 | } | 76 | .types |
71 | 77 | .keys() | |
72 | pub fn entries_without_primitives<'a>( | 78 | .chain(self.values.keys()) |
73 | &'a self, | 79 | .chain(self.macros.keys()) |
74 | ) -> impl Iterator<Item = (&'a Name, PerNs)> + 'a { | 80 | .chain(self.unresolved.iter()) |
75 | self.visible.iter().map(|(n, def)| (n, *def)) | 81 | .collect(); |
82 | |||
83 | keys.into_iter().map(move |name| (name, self.get(name))) | ||
76 | } | 84 | } |
77 | 85 | ||
78 | pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ { | 86 | pub fn declarations(&self) -> impl Iterator<Item = ModuleDefId> + '_ { |
@@ -91,7 +99,7 @@ impl ItemScope { | |||
91 | 99 | ||
92 | /// Iterate over all module scoped macros | 100 | /// Iterate over all module scoped macros |
93 | pub(crate) fn macros<'a>(&'a self) -> impl Iterator<Item = (&'a Name, MacroDefId)> + 'a { | 101 | pub(crate) fn macros<'a>(&'a self) -> impl Iterator<Item = (&'a Name, MacroDefId)> + 'a { |
94 | self.visible.iter().filter_map(|(name, def)| def.take_macros().map(|macro_| (name, macro_))) | 102 | self.entries().filter_map(|(name, def)| def.take_macros().map(|macro_| (name, macro_))) |
95 | } | 103 | } |
96 | 104 | ||
97 | /// Iterate over all legacy textual scoped macros visible at the end of the module | 105 | /// Iterate over all legacy textual scoped macros visible at the end of the module |
@@ -101,12 +109,16 @@ impl ItemScope { | |||
101 | 109 | ||
102 | /// Get a name from current module scope, legacy macros are not included | 110 | /// Get a name from current module scope, legacy macros are not included |
103 | pub(crate) fn get(&self, name: &Name) -> PerNs { | 111 | pub(crate) fn get(&self, name: &Name) -> PerNs { |
104 | self.visible.get(name).copied().unwrap_or_else(PerNs::none) | 112 | PerNs { |
113 | types: self.types.get(name).copied(), | ||
114 | values: self.values.get(name).copied(), | ||
115 | macros: self.macros.get(name).copied(), | ||
116 | } | ||
105 | } | 117 | } |
106 | 118 | ||
107 | pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> { | 119 | pub(crate) fn name_of(&self, item: ItemInNs) -> Option<(&Name, Visibility)> { |
108 | for (name, per_ns) in &self.visible { | 120 | for (name, per_ns) in self.entries() { |
109 | if let Some(vis) = item.match_with(*per_ns) { | 121 | if let Some(vis) = item.match_with(per_ns) { |
110 | return Some((name, vis)); | 122 | return Some((name, vis)); |
111 | } | 123 | } |
112 | } | 124 | } |
@@ -114,8 +126,8 @@ impl ItemScope { | |||
114 | } | 126 | } |
115 | 127 | ||
116 | pub(crate) fn traits<'a>(&'a self) -> impl Iterator<Item = TraitId> + 'a { | 128 | pub(crate) fn traits<'a>(&'a self) -> impl Iterator<Item = TraitId> + 'a { |
117 | self.visible.values().filter_map(|def| match def.take_types() { | 129 | self.types.values().filter_map(|(def, _)| match def { |
118 | Some(ModuleDefId::TraitId(t)) => Some(t), | 130 | ModuleDefId::TraitId(t) => Some(*t), |
119 | _ => None, | 131 | _ => None, |
120 | }) | 132 | }) |
121 | } | 133 | } |
@@ -138,21 +150,30 @@ impl ItemScope { | |||
138 | 150 | ||
139 | pub(crate) fn push_res(&mut self, name: Name, def: PerNs) -> bool { | 151 | pub(crate) fn push_res(&mut self, name: Name, def: PerNs) -> bool { |
140 | let mut changed = false; | 152 | let mut changed = false; |
141 | let existing = self.visible.entry(name).or_default(); | ||
142 | 153 | ||
143 | if existing.types.is_none() && def.types.is_some() { | 154 | if let Some(types) = def.types { |
144 | existing.types = def.types; | 155 | self.types.entry(name.clone()).or_insert_with(|| { |
145 | changed = true; | 156 | changed = true; |
157 | types | ||
158 | }); | ||
146 | } | 159 | } |
147 | 160 | if let Some(values) = def.values { | |
148 | if existing.values.is_none() && def.values.is_some() { | 161 | self.values.entry(name.clone()).or_insert_with(|| { |
149 | existing.values = def.values; | 162 | changed = true; |
150 | changed = true; | 163 | values |
164 | }); | ||
165 | } | ||
166 | if let Some(macros) = def.macros { | ||
167 | self.macros.entry(name.clone()).or_insert_with(|| { | ||
168 | changed = true; | ||
169 | macros | ||
170 | }); | ||
151 | } | 171 | } |
152 | 172 | ||
153 | if existing.macros.is_none() && def.macros.is_some() { | 173 | if def.is_none() { |
154 | existing.macros = def.macros; | 174 | if self.unresolved.insert(name) { |
155 | changed = true; | 175 | changed = true; |
176 | } | ||
156 | } | 177 | } |
157 | 178 | ||
158 | changed | 179 | changed |
@@ -166,17 +187,17 @@ impl ItemScope { | |||
166 | def_import_type: ImportType, | 187 | def_import_type: ImportType, |
167 | ) -> bool { | 188 | ) -> bool { |
168 | let mut changed = false; | 189 | let mut changed = false; |
169 | let existing = self.visible.entry(lookup.1.clone()).or_default(); | ||
170 | 190 | ||
171 | macro_rules! check_changed { | 191 | macro_rules! check_changed { |
172 | ( | 192 | ( |
173 | $changed:ident, | 193 | $changed:ident, |
174 | ( $existing:ident / $def:ident ) . $field:ident, | 194 | ( $this:ident / $def:ident ) . $field:ident, |
175 | $glob_imports:ident [ $lookup:ident ], | 195 | $glob_imports:ident [ $lookup:ident ], |
176 | $def_import_type:ident | 196 | $def_import_type:ident |
177 | ) => { | 197 | ) => {{ |
178 | match ($existing.$field, $def.$field) { | 198 | let existing = $this.$field.entry($lookup.1.clone()); |
179 | (None, Some(_)) => { | 199 | match (existing, $def.$field) { |
200 | (Entry::Vacant(entry), Some(_)) => { | ||
180 | match $def_import_type { | 201 | match $def_import_type { |
181 | ImportType::Glob => { | 202 | ImportType::Glob => { |
182 | $glob_imports.$field.insert($lookup.clone()); | 203 | $glob_imports.$field.insert($lookup.clone()); |
@@ -186,32 +207,42 @@ impl ItemScope { | |||
186 | } | 207 | } |
187 | } | 208 | } |
188 | 209 | ||
189 | $existing.$field = $def.$field; | 210 | if let Some(fld) = $def.$field { |
211 | entry.insert(fld); | ||
212 | } | ||
190 | $changed = true; | 213 | $changed = true; |
191 | } | 214 | } |
192 | (Some(_), Some(_)) | 215 | (Entry::Occupied(mut entry), Some(_)) |
193 | if $glob_imports.$field.contains(&$lookup) | 216 | if $glob_imports.$field.contains(&$lookup) |
194 | && matches!($def_import_type, ImportType::Named) => | 217 | && matches!($def_import_type, ImportType::Named) => |
195 | { | 218 | { |
196 | mark::hit!(import_shadowed); | 219 | mark::hit!(import_shadowed); |
197 | $glob_imports.$field.remove(&$lookup); | 220 | $glob_imports.$field.remove(&$lookup); |
198 | $existing.$field = $def.$field; | 221 | if let Some(fld) = $def.$field { |
222 | entry.insert(fld); | ||
223 | } | ||
199 | $changed = true; | 224 | $changed = true; |
200 | } | 225 | } |
201 | _ => {} | 226 | _ => {} |
202 | } | 227 | } |
203 | }; | 228 | }}; |
204 | } | 229 | } |
205 | 230 | ||
206 | check_changed!(changed, (existing / def).types, glob_imports[lookup], def_import_type); | 231 | check_changed!(changed, (self / def).types, glob_imports[lookup], def_import_type); |
207 | check_changed!(changed, (existing / def).values, glob_imports[lookup], def_import_type); | 232 | check_changed!(changed, (self / def).values, glob_imports[lookup], def_import_type); |
208 | check_changed!(changed, (existing / def).macros, glob_imports[lookup], def_import_type); | 233 | check_changed!(changed, (self / def).macros, glob_imports[lookup], def_import_type); |
234 | |||
235 | if def.is_none() { | ||
236 | if self.unresolved.insert(lookup.1) { | ||
237 | changed = true; | ||
238 | } | ||
239 | } | ||
209 | 240 | ||
210 | changed | 241 | changed |
211 | } | 242 | } |
212 | 243 | ||
213 | pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator<Item = (Name, PerNs)> + 'a { | 244 | pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator<Item = (Name, PerNs)> + 'a { |
214 | self.visible.iter().map(|(name, res)| (name.clone(), *res)) | 245 | self.entries().map(|(name, res)| (name.clone(), res)) |
215 | } | 246 | } |
216 | 247 | ||
217 | pub(crate) fn collect_legacy_macros(&self) -> FxHashMap<Name, MacroDefId> { | 248 | pub(crate) fn collect_legacy_macros(&self) -> FxHashMap<Name, MacroDefId> { |
diff --git a/crates/ra_hir_def/src/item_tree/lower.rs b/crates/ra_hir_def/src/item_tree/lower.rs index 5149dd141..06743d7fc 100644 --- a/crates/ra_hir_def/src/item_tree/lower.rs +++ b/crates/ra_hir_def/src/item_tree/lower.rs | |||
@@ -211,7 +211,7 @@ impl Ctx { | |||
211 | fn lower_record_field(&mut self, field: &ast::RecordFieldDef) -> Option<Field> { | 211 | fn lower_record_field(&mut self, field: &ast::RecordFieldDef) -> Option<Field> { |
212 | let name = field.name()?.as_name(); | 212 | let name = field.name()?.as_name(); |
213 | let visibility = self.lower_visibility(field); | 213 | let visibility = self.lower_visibility(field); |
214 | let type_ref = self.lower_type_ref(&field.ascribed_type()?); | 214 | let type_ref = self.lower_type_ref_opt(field.ascribed_type()); |
215 | let res = Field { name, type_ref, visibility }; | 215 | let res = Field { name, type_ref, visibility }; |
216 | Some(res) | 216 | Some(res) |
217 | } | 217 | } |
diff --git a/crates/ra_hir_def/src/resolver.rs b/crates/ra_hir_def/src/resolver.rs index 15fdd9019..0bf51eb7b 100644 --- a/crates/ra_hir_def/src/resolver.rs +++ b/crates/ra_hir_def/src/resolver.rs | |||
@@ -511,11 +511,9 @@ impl Scope { | |||
511 | }); | 511 | }); |
512 | } | 512 | } |
513 | } | 513 | } |
514 | Scope::LocalItemsScope(body) => { | 514 | Scope::LocalItemsScope(body) => body.item_scope.entries().for_each(|(name, def)| { |
515 | body.item_scope.entries_without_primitives().for_each(|(name, def)| { | 515 | f(name.clone(), ScopeDef::PerNs(def)); |
516 | f(name.clone(), ScopeDef::PerNs(def)); | 516 | }), |
517 | }) | ||
518 | } | ||
519 | Scope::GenericParams { params, def } => { | 517 | Scope::GenericParams { params, def } => { |
520 | for (local_id, param) in params.types.iter() { | 518 | for (local_id, param) in params.types.iter() { |
521 | if let Some(name) = ¶m.name { | 519 | if let Some(name) = ¶m.name { |
diff --git a/crates/ra_hir_expand/src/builtin_derive.rs b/crates/ra_hir_expand/src/builtin_derive.rs index 26b667b55..f2d664863 100644 --- a/crates/ra_hir_expand/src/builtin_derive.rs +++ b/crates/ra_hir_expand/src/builtin_derive.rs | |||
@@ -161,7 +161,7 @@ fn find_builtin_crate(db: &dyn AstDatabase, id: LazyMacroId) -> tt::TokenTree { | |||
161 | // XXX | 161 | // XXX |
162 | // All crates except core itself should have a dependency on core, | 162 | // All crates except core itself should have a dependency on core, |
163 | // We detect `core` by seeing whether it doesn't have such a dependency. | 163 | // We detect `core` by seeing whether it doesn't have such a dependency. |
164 | let tt = if cg[krate].dependencies.iter().any(|dep| dep.name == "core") { | 164 | let tt = if cg[krate].dependencies.iter().any(|dep| &*dep.name == "core") { |
165 | quote! { core } | 165 | quote! { core } |
166 | } else { | 166 | } else { |
167 | quote! { crate } | 167 | quote! { crate } |
diff --git a/crates/ra_hir_expand/src/diagnostics.rs b/crates/ra_hir_expand/src/diagnostics.rs index 99209c6e8..545cff9bd 100644 --- a/crates/ra_hir_expand/src/diagnostics.rs +++ b/crates/ra_hir_expand/src/diagnostics.rs | |||
@@ -28,7 +28,7 @@ pub trait Diagnostic: Any + Send + Sync + fmt::Debug + 'static { | |||
28 | 28 | ||
29 | pub trait AstDiagnostic { | 29 | pub trait AstDiagnostic { |
30 | type AST; | 30 | type AST; |
31 | fn ast(&self, db: &impl AstDatabase) -> Self::AST; | 31 | fn ast(&self, db: &dyn AstDatabase) -> Self::AST; |
32 | } | 32 | } |
33 | 33 | ||
34 | impl dyn Diagnostic { | 34 | impl dyn Diagnostic { |
diff --git a/crates/ra_hir_expand/src/name.rs b/crates/ra_hir_expand/src/name.rs index 1b0303685..969a2e5b8 100644 --- a/crates/ra_hir_expand/src/name.rs +++ b/crates/ra_hir_expand/src/name.rs | |||
@@ -117,7 +117,7 @@ impl AsName for ast::FieldKind { | |||
117 | 117 | ||
118 | impl AsName for ra_db::Dependency { | 118 | impl AsName for ra_db::Dependency { |
119 | fn as_name(&self) -> Name { | 119 | fn as_name(&self) -> Name { |
120 | Name::new_text(self.name.clone()) | 120 | Name::new_text(SmolStr::new(&*self.name)) |
121 | } | 121 | } |
122 | } | 122 | } |
123 | 123 | ||
diff --git a/crates/ra_hir_ty/src/autoderef.rs b/crates/ra_hir_ty/src/autoderef.rs index 1b0f84c5c..c727012c6 100644 --- a/crates/ra_hir_ty/src/autoderef.rs +++ b/crates/ra_hir_ty/src/autoderef.rs | |||
@@ -37,7 +37,7 @@ pub(crate) fn deref( | |||
37 | ty: InEnvironment<&Canonical<Ty>>, | 37 | ty: InEnvironment<&Canonical<Ty>>, |
38 | ) -> Option<Canonical<Ty>> { | 38 | ) -> Option<Canonical<Ty>> { |
39 | if let Some(derefed) = ty.value.value.builtin_deref() { | 39 | if let Some(derefed) = ty.value.value.builtin_deref() { |
40 | Some(Canonical { value: derefed, num_vars: ty.value.num_vars }) | 40 | Some(Canonical { value: derefed, kinds: ty.value.kinds.clone() }) |
41 | } else { | 41 | } else { |
42 | deref_by_trait(db, krate, ty) | 42 | deref_by_trait(db, krate, ty) |
43 | } | 43 | } |
@@ -68,8 +68,8 @@ fn deref_by_trait( | |||
68 | 68 | ||
69 | // Check that the type implements Deref at all | 69 | // Check that the type implements Deref at all |
70 | let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() }; | 70 | let trait_ref = TraitRef { trait_: deref_trait, substs: parameters.clone() }; |
71 | let implements_goal = super::Canonical { | 71 | let implements_goal = Canonical { |
72 | num_vars: ty.value.num_vars, | 72 | kinds: ty.value.kinds.clone(), |
73 | value: InEnvironment { | 73 | value: InEnvironment { |
74 | value: Obligation::Trait(trait_ref), | 74 | value: Obligation::Trait(trait_ref), |
75 | environment: ty.environment.clone(), | 75 | environment: ty.environment.clone(), |
@@ -81,7 +81,7 @@ fn deref_by_trait( | |||
81 | 81 | ||
82 | // Now do the assoc type projection | 82 | // Now do the assoc type projection |
83 | let projection = super::traits::ProjectionPredicate { | 83 | let projection = super::traits::ProjectionPredicate { |
84 | ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.num_vars)), | 84 | ty: Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, ty.value.kinds.len())), |
85 | projection_ty: super::ProjectionTy { associated_ty: target, parameters }, | 85 | projection_ty: super::ProjectionTy { associated_ty: target, parameters }, |
86 | }; | 86 | }; |
87 | 87 | ||
@@ -89,7 +89,8 @@ fn deref_by_trait( | |||
89 | 89 | ||
90 | let in_env = InEnvironment { value: obligation, environment: ty.environment }; | 90 | let in_env = InEnvironment { value: obligation, environment: ty.environment }; |
91 | 91 | ||
92 | let canonical = super::Canonical { num_vars: 1 + ty.value.num_vars, value: in_env }; | 92 | let canonical = |
93 | Canonical::new(in_env, ty.value.kinds.iter().copied().chain(Some(super::TyKind::General))); | ||
93 | 94 | ||
94 | let solution = db.trait_solve(krate, canonical)?; | 95 | let solution = db.trait_solve(krate, canonical)?; |
95 | 96 | ||
@@ -110,7 +111,7 @@ fn deref_by_trait( | |||
110 | // assumptions will be broken. We would need to properly introduce | 111 | // assumptions will be broken. We would need to properly introduce |
111 | // new variables in that case | 112 | // new variables in that case |
112 | 113 | ||
113 | for i in 1..vars.0.num_vars { | 114 | for i in 1..vars.0.kinds.len() { |
114 | if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1)) | 115 | if vars.0.value[i - 1] != Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, i - 1)) |
115 | { | 116 | { |
116 | warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution); | 117 | warn!("complex solution for derefing {:?}: {:?}, ignoring", ty.value, solution); |
@@ -119,7 +120,7 @@ fn deref_by_trait( | |||
119 | } | 120 | } |
120 | Some(Canonical { | 121 | Some(Canonical { |
121 | value: vars.0.value[vars.0.value.len() - 1].clone(), | 122 | value: vars.0.value[vars.0.value.len() - 1].clone(), |
122 | num_vars: vars.0.num_vars, | 123 | kinds: vars.0.kinds.clone(), |
123 | }) | 124 | }) |
124 | } | 125 | } |
125 | Solution::Ambig(_) => { | 126 | Solution::Ambig(_) => { |
diff --git a/crates/ra_hir_ty/src/db.rs b/crates/ra_hir_ty/src/db.rs index cad553273..dc06c0ee7 100644 --- a/crates/ra_hir_ty/src/db.rs +++ b/crates/ra_hir_ty/src/db.rs | |||
@@ -11,7 +11,7 @@ use ra_db::{impl_intern_key, salsa, CrateId, Upcast}; | |||
11 | use ra_prof::profile; | 11 | use ra_prof::profile; |
12 | 12 | ||
13 | use crate::{ | 13 | use crate::{ |
14 | method_resolution::CrateImplDefs, | 14 | method_resolution::{InherentImpls, TraitImpls}, |
15 | traits::{chalk, AssocTyValue, Impl}, | 15 | traits::{chalk, AssocTyValue, Impl}, |
16 | Binders, CallableDef, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig, | 16 | Binders, CallableDef, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig, |
17 | ReturnTypeImplTraits, TraitRef, Ty, TyDefId, TypeCtor, ValueTyDefId, | 17 | ReturnTypeImplTraits, TraitRef, Ty, TyDefId, TypeCtor, ValueTyDefId, |
@@ -67,11 +67,14 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { | |||
67 | #[salsa::invoke(crate::lower::generic_defaults_query)] | 67 | #[salsa::invoke(crate::lower::generic_defaults_query)] |
68 | fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>; | 68 | fn generic_defaults(&self, def: GenericDefId) -> Arc<[Binders<Ty>]>; |
69 | 69 | ||
70 | #[salsa::invoke(crate::method_resolution::CrateImplDefs::impls_in_crate_query)] | 70 | #[salsa::invoke(InherentImpls::inherent_impls_in_crate_query)] |
71 | fn impls_in_crate(&self, krate: CrateId) -> Arc<CrateImplDefs>; | 71 | fn inherent_impls_in_crate(&self, krate: CrateId) -> Arc<InherentImpls>; |
72 | 72 | ||
73 | #[salsa::invoke(crate::method_resolution::CrateImplDefs::impls_from_deps_query)] | 73 | #[salsa::invoke(TraitImpls::trait_impls_in_crate_query)] |
74 | fn impls_from_deps(&self, krate: CrateId) -> Arc<CrateImplDefs>; | 74 | fn trait_impls_in_crate(&self, krate: CrateId) -> Arc<TraitImpls>; |
75 | |||
76 | #[salsa::invoke(TraitImpls::trait_impls_in_deps_query)] | ||
77 | fn trait_impls_in_deps(&self, krate: CrateId) -> Arc<TraitImpls>; | ||
75 | 78 | ||
76 | // Interned IDs for Chalk integration | 79 | // Interned IDs for Chalk integration |
77 | #[salsa::interned] | 80 | #[salsa::interned] |
diff --git a/crates/ra_hir_ty/src/diagnostics.rs b/crates/ra_hir_ty/src/diagnostics.rs index a59efb347..0289911de 100644 --- a/crates/ra_hir_ty/src/diagnostics.rs +++ b/crates/ra_hir_ty/src/diagnostics.rs | |||
@@ -32,7 +32,7 @@ impl Diagnostic for NoSuchField { | |||
32 | impl AstDiagnostic for NoSuchField { | 32 | impl AstDiagnostic for NoSuchField { |
33 | type AST = ast::RecordField; | 33 | type AST = ast::RecordField; |
34 | 34 | ||
35 | fn ast(&self, db: &impl AstDatabase) -> Self::AST { | 35 | fn ast(&self, db: &dyn AstDatabase) -> Self::AST { |
36 | let root = db.parse_or_expand(self.source().file_id).unwrap(); | 36 | let root = db.parse_or_expand(self.source().file_id).unwrap(); |
37 | let node = self.source().value.to_node(&root); | 37 | let node = self.source().value.to_node(&root); |
38 | ast::RecordField::cast(node).unwrap() | 38 | ast::RecordField::cast(node).unwrap() |
@@ -65,7 +65,7 @@ impl Diagnostic for MissingFields { | |||
65 | impl AstDiagnostic for MissingFields { | 65 | impl AstDiagnostic for MissingFields { |
66 | type AST = ast::RecordFieldList; | 66 | type AST = ast::RecordFieldList; |
67 | 67 | ||
68 | fn ast(&self, db: &impl AstDatabase) -> Self::AST { | 68 | fn ast(&self, db: &dyn AstDatabase) -> Self::AST { |
69 | let root = db.parse_or_expand(self.source().file_id).unwrap(); | 69 | let root = db.parse_or_expand(self.source().file_id).unwrap(); |
70 | let node = self.source().value.to_node(&root); | 70 | let node = self.source().value.to_node(&root); |
71 | ast::RecordFieldList::cast(node).unwrap() | 71 | ast::RecordFieldList::cast(node).unwrap() |
@@ -135,7 +135,7 @@ impl Diagnostic for MissingOkInTailExpr { | |||
135 | impl AstDiagnostic for MissingOkInTailExpr { | 135 | impl AstDiagnostic for MissingOkInTailExpr { |
136 | type AST = ast::Expr; | 136 | type AST = ast::Expr; |
137 | 137 | ||
138 | fn ast(&self, db: &impl AstDatabase) -> Self::AST { | 138 | fn ast(&self, db: &dyn AstDatabase) -> Self::AST { |
139 | let root = db.parse_or_expand(self.file).unwrap(); | 139 | let root = db.parse_or_expand(self.file).unwrap(); |
140 | let node = self.source().value.to_node(&root); | 140 | let node = self.source().value.to_node(&root); |
141 | ast::Expr::cast(node).unwrap() | 141 | ast::Expr::cast(node).unwrap() |
@@ -163,7 +163,7 @@ impl Diagnostic for BreakOutsideOfLoop { | |||
163 | impl AstDiagnostic for BreakOutsideOfLoop { | 163 | impl AstDiagnostic for BreakOutsideOfLoop { |
164 | type AST = ast::Expr; | 164 | type AST = ast::Expr; |
165 | 165 | ||
166 | fn ast(&self, db: &impl AstDatabase) -> Self::AST { | 166 | fn ast(&self, db: &dyn AstDatabase) -> Self::AST { |
167 | let root = db.parse_or_expand(self.file).unwrap(); | 167 | let root = db.parse_or_expand(self.file).unwrap(); |
168 | let node = self.source().value.to_node(&root); | 168 | let node = self.source().value.to_node(&root); |
169 | ast::Expr::cast(node).unwrap() | 169 | ast::Expr::cast(node).unwrap() |
@@ -191,7 +191,7 @@ impl Diagnostic for MissingUnsafe { | |||
191 | impl AstDiagnostic for MissingUnsafe { | 191 | impl AstDiagnostic for MissingUnsafe { |
192 | type AST = ast::Expr; | 192 | type AST = ast::Expr; |
193 | 193 | ||
194 | fn ast(&self, db: &impl AstDatabase) -> Self::AST { | 194 | fn ast(&self, db: &dyn AstDatabase) -> Self::AST { |
195 | let root = db.parse_or_expand(self.source().file_id).unwrap(); | 195 | let root = db.parse_or_expand(self.source().file_id).unwrap(); |
196 | let node = self.source().value.to_node(&root); | 196 | let node = self.source().value.to_node(&root); |
197 | ast::Expr::cast(node).unwrap() | 197 | ast::Expr::cast(node).unwrap() |
diff --git a/crates/ra_hir_ty/src/infer/unify.rs b/crates/ra_hir_ty/src/infer/unify.rs index 269495ca0..2e895d911 100644 --- a/crates/ra_hir_ty/src/infer/unify.rs +++ b/crates/ra_hir_ty/src/infer/unify.rs | |||
@@ -9,7 +9,7 @@ use test_utils::mark; | |||
9 | use super::{InferenceContext, Obligation}; | 9 | use super::{InferenceContext, Obligation}; |
10 | use crate::{ | 10 | use crate::{ |
11 | BoundVar, Canonical, DebruijnIndex, GenericPredicate, InEnvironment, InferTy, Substs, Ty, | 11 | BoundVar, Canonical, DebruijnIndex, GenericPredicate, InEnvironment, InferTy, Substs, Ty, |
12 | TypeCtor, TypeWalk, | 12 | TyKind, TypeCtor, TypeWalk, |
13 | }; | 13 | }; |
14 | 14 | ||
15 | impl<'a> InferenceContext<'a> { | 15 | impl<'a> InferenceContext<'a> { |
@@ -86,10 +86,20 @@ where | |||
86 | } | 86 | } |
87 | 87 | ||
88 | fn into_canonicalized<T>(self, result: T) -> Canonicalized<T> { | 88 | fn into_canonicalized<T>(self, result: T) -> Canonicalized<T> { |
89 | Canonicalized { | 89 | let kinds = self |
90 | value: Canonical { value: result, num_vars: self.free_vars.len() }, | 90 | .free_vars |
91 | free_vars: self.free_vars, | 91 | .iter() |
92 | } | 92 | .map(|v| match v { |
93 | // mapping MaybeNeverTypeVar to the same kind as general ones | ||
94 | // should be fine, because as opposed to int or float type vars, | ||
95 | // they don't restrict what kind of type can go into them, they | ||
96 | // just affect fallback. | ||
97 | InferTy::TypeVar(_) | InferTy::MaybeNeverTypeVar(_) => TyKind::General, | ||
98 | InferTy::IntVar(_) => TyKind::Integer, | ||
99 | InferTy::FloatVar(_) => TyKind::Float, | ||
100 | }) | ||
101 | .collect(); | ||
102 | Canonicalized { value: Canonical { value: result, kinds }, free_vars: self.free_vars } | ||
93 | } | 103 | } |
94 | 104 | ||
95 | pub(crate) fn canonicalize_ty(mut self, ty: Ty) -> Canonicalized<Ty> { | 105 | pub(crate) fn canonicalize_ty(mut self, ty: Ty) -> Canonicalized<Ty> { |
@@ -131,26 +141,41 @@ impl<T> Canonicalized<T> { | |||
131 | ty | 141 | ty |
132 | } | 142 | } |
133 | 143 | ||
134 | pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Vec<Ty>>) { | 144 | pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical<Substs>) { |
135 | // the solution may contain new variables, which we need to convert to new inference vars | 145 | // the solution may contain new variables, which we need to convert to new inference vars |
136 | let new_vars = Substs((0..solution.num_vars).map(|_| ctx.table.new_type_var()).collect()); | 146 | let new_vars = Substs( |
147 | solution | ||
148 | .kinds | ||
149 | .iter() | ||
150 | .map(|k| match k { | ||
151 | TyKind::General => ctx.table.new_type_var(), | ||
152 | TyKind::Integer => ctx.table.new_integer_var(), | ||
153 | TyKind::Float => ctx.table.new_float_var(), | ||
154 | }) | ||
155 | .collect(), | ||
156 | ); | ||
137 | for (i, ty) in solution.value.into_iter().enumerate() { | 157 | for (i, ty) in solution.value.into_iter().enumerate() { |
138 | let var = self.free_vars[i]; | 158 | let var = self.free_vars[i]; |
139 | // eagerly replace projections in the type; we may be getting types | 159 | // eagerly replace projections in the type; we may be getting types |
140 | // e.g. from where clauses where this hasn't happened yet | 160 | // e.g. from where clauses where this hasn't happened yet |
141 | let ty = ctx.normalize_associated_types_in(ty.subst_bound_vars(&new_vars)); | 161 | let ty = ctx.normalize_associated_types_in(ty.clone().subst_bound_vars(&new_vars)); |
142 | ctx.table.unify(&Ty::Infer(var), &ty); | 162 | ctx.table.unify(&Ty::Infer(var), &ty); |
143 | } | 163 | } |
144 | } | 164 | } |
145 | } | 165 | } |
146 | 166 | ||
147 | pub fn unify(ty1: &Canonical<Ty>, ty2: &Canonical<Ty>) -> Option<Substs> { | 167 | pub fn unify(tys: &Canonical<(Ty, Ty)>) -> Option<Substs> { |
148 | let mut table = InferenceTable::new(); | 168 | let mut table = InferenceTable::new(); |
149 | let num_vars = ty1.num_vars.max(ty2.num_vars); | 169 | let vars = Substs( |
150 | let vars = | 170 | tys.kinds |
151 | Substs::builder(num_vars).fill(std::iter::repeat_with(|| table.new_type_var())).build(); | 171 | .iter() |
152 | let ty1_with_vars = ty1.value.clone().subst_bound_vars(&vars); | 172 | // we always use type vars here because we want everything to |
153 | let ty2_with_vars = ty2.value.clone().subst_bound_vars(&vars); | 173 | // fallback to Unknown in the end (kind of hacky, as below) |
174 | .map(|_| table.new_type_var()) | ||
175 | .collect(), | ||
176 | ); | ||
177 | let ty1_with_vars = tys.value.0.clone().subst_bound_vars(&vars); | ||
178 | let ty2_with_vars = tys.value.1.clone().subst_bound_vars(&vars); | ||
154 | if !table.unify(&ty1_with_vars, &ty2_with_vars) { | 179 | if !table.unify(&ty1_with_vars, &ty2_with_vars) { |
155 | return None; | 180 | return None; |
156 | } | 181 | } |
@@ -162,7 +187,7 @@ pub fn unify(ty1: &Canonical<Ty>, ty2: &Canonical<Ty>) -> Option<Substs> { | |||
162 | } | 187 | } |
163 | } | 188 | } |
164 | Some( | 189 | Some( |
165 | Substs::builder(ty1.num_vars) | 190 | Substs::builder(tys.kinds.len()) |
166 | .fill(vars.iter().map(|v| table.resolve_ty_completely(v.clone()))) | 191 | .fill(vars.iter().map(|v| table.resolve_ty_completely(v.clone()))) |
167 | .build(), | 192 | .build(), |
168 | ) | 193 | ) |
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs index c9513b752..7f3f5e771 100644 --- a/crates/ra_hir_ty/src/lib.rs +++ b/crates/ra_hir_ty/src/lib.rs | |||
@@ -662,13 +662,27 @@ impl TypeWalk for GenericPredicate { | |||
662 | 662 | ||
663 | /// Basically a claim (currently not validated / checked) that the contained | 663 | /// Basically a claim (currently not validated / checked) that the contained |
664 | /// type / trait ref contains no inference variables; any inference variables it | 664 | /// type / trait ref contains no inference variables; any inference variables it |
665 | /// contained have been replaced by bound variables, and `num_vars` tells us how | 665 | /// contained have been replaced by bound variables, and `kinds` tells us how |
666 | /// many there are. This is used to erase irrelevant differences between types | 666 | /// many there are and whether they were normal or float/int variables. This is |
667 | /// before using them in queries. | 667 | /// used to erase irrelevant differences between types before using them in |
668 | /// queries. | ||
668 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 669 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
669 | pub struct Canonical<T> { | 670 | pub struct Canonical<T> { |
670 | pub value: T, | 671 | pub value: T, |
671 | pub num_vars: usize, | 672 | pub kinds: Arc<[TyKind]>, |
673 | } | ||
674 | |||
675 | impl<T> Canonical<T> { | ||
676 | pub fn new(value: T, kinds: impl IntoIterator<Item = TyKind>) -> Self { | ||
677 | Self { value, kinds: kinds.into_iter().collect() } | ||
678 | } | ||
679 | } | ||
680 | |||
681 | #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] | ||
682 | pub enum TyKind { | ||
683 | General, | ||
684 | Integer, | ||
685 | Float, | ||
672 | } | 686 | } |
673 | 687 | ||
674 | /// A function signature as seen by type inference: Several parameter types and | 688 | /// A function signature as seen by type inference: Several parameter types and |
diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs index c19519cf1..a45febbf7 100644 --- a/crates/ra_hir_ty/src/method_resolution.rs +++ b/crates/ra_hir_ty/src/method_resolution.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | //! For details about how this works in rustc, see the method lookup page in the | 2 | //! For details about how this works in rustc, see the method lookup page in the |
3 | //! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html) | 3 | //! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html) |
4 | //! and the corresponding code mostly in librustc_typeck/check/method/probe.rs. | 4 | //! and the corresponding code mostly in librustc_typeck/check/method/probe.rs. |
5 | use std::sync::Arc; | 5 | use std::{iter, sync::Arc}; |
6 | 6 | ||
7 | use arrayvec::ArrayVec; | 7 | use arrayvec::ArrayVec; |
8 | use hir_def::{ | 8 | use hir_def::{ |
@@ -17,7 +17,8 @@ use rustc_hash::{FxHashMap, FxHashSet}; | |||
17 | use super::Substs; | 17 | use super::Substs; |
18 | use crate::{ | 18 | use crate::{ |
19 | autoderef, db::HirDatabase, primitive::FloatBitness, utils::all_super_traits, ApplicationTy, | 19 | autoderef, db::HirDatabase, primitive::FloatBitness, utils::all_super_traits, ApplicationTy, |
20 | Canonical, DebruijnIndex, InEnvironment, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk, | 20 | Canonical, DebruijnIndex, InEnvironment, TraitEnvironment, TraitRef, Ty, TyKind, TypeCtor, |
21 | TypeWalk, | ||
21 | }; | 22 | }; |
22 | 23 | ||
23 | /// This is used as a key for indexing impls. | 24 | /// This is used as a key for indexing impls. |
@@ -38,136 +39,131 @@ impl TyFingerprint { | |||
38 | } | 39 | } |
39 | } | 40 | } |
40 | 41 | ||
41 | /// A queryable and mergeable collection of impls. | 42 | /// Trait impls defined or available in some crate. |
42 | #[derive(Debug, PartialEq, Eq)] | 43 | #[derive(Debug, Eq, PartialEq)] |
43 | pub struct CrateImplDefs { | 44 | pub struct TraitImpls { |
44 | inherent_impls: FxHashMap<TyFingerprint, Vec<ImplId>>, | 45 | // If the `Option<TyFingerprint>` is `None`, the impl may apply to any self type. |
45 | impls_by_trait: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>, | 46 | map: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>, |
46 | } | 47 | } |
47 | 48 | ||
48 | impl CrateImplDefs { | 49 | impl TraitImpls { |
49 | pub(crate) fn impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<CrateImplDefs> { | 50 | pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> { |
50 | let _p = profile("impls_in_crate_query"); | 51 | let _p = profile("trait_impls_in_crate_query"); |
51 | let mut res = CrateImplDefs { | 52 | let mut impls = Self { map: FxHashMap::default() }; |
52 | inherent_impls: FxHashMap::default(), | ||
53 | impls_by_trait: FxHashMap::default(), | ||
54 | }; | ||
55 | res.fill(db, krate); | ||
56 | 53 | ||
57 | Arc::new(res) | 54 | let crate_def_map = db.crate_def_map(krate); |
55 | for (_module_id, module_data) in crate_def_map.modules.iter() { | ||
56 | for impl_id in module_data.scope.impls() { | ||
57 | let target_trait = match db.impl_trait(impl_id) { | ||
58 | Some(tr) => tr.value.trait_, | ||
59 | None => continue, | ||
60 | }; | ||
61 | let self_ty = db.impl_self_ty(impl_id); | ||
62 | let self_ty_fp = TyFingerprint::for_impl(&self_ty.value); | ||
63 | impls | ||
64 | .map | ||
65 | .entry(target_trait) | ||
66 | .or_default() | ||
67 | .entry(self_ty_fp) | ||
68 | .or_default() | ||
69 | .push(impl_id); | ||
70 | } | ||
71 | } | ||
72 | |||
73 | Arc::new(impls) | ||
58 | } | 74 | } |
59 | 75 | ||
60 | /// Collects all impls from transitive dependencies of `krate` that may be used by `krate`. | 76 | pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> { |
61 | /// | 77 | let _p = profile("trait_impls_in_deps_query"); |
62 | /// The full set of impls that can be used by `krate` is the returned map plus all the impls | ||
63 | /// from `krate` itself. | ||
64 | pub(crate) fn impls_from_deps_query( | ||
65 | db: &dyn HirDatabase, | ||
66 | krate: CrateId, | ||
67 | ) -> Arc<CrateImplDefs> { | ||
68 | let _p = profile("impls_from_deps_query"); | ||
69 | let crate_graph = db.crate_graph(); | 78 | let crate_graph = db.crate_graph(); |
70 | let mut res = CrateImplDefs { | 79 | let mut res = Self { map: FxHashMap::default() }; |
71 | inherent_impls: FxHashMap::default(), | ||
72 | impls_by_trait: FxHashMap::default(), | ||
73 | }; | ||
74 | 80 | ||
75 | // For each dependency, calculate `impls_from_deps` recursively, then add its own | 81 | for krate in crate_graph.transitive_deps(krate) { |
76 | // `impls_in_crate`. | 82 | res.merge(&db.trait_impls_in_crate(krate)); |
77 | // As we might visit crates multiple times, `merge` has to deduplicate impls to avoid | ||
78 | // wasting memory. | ||
79 | for dep in &crate_graph[krate].dependencies { | ||
80 | res.merge(&db.impls_from_deps(dep.crate_id)); | ||
81 | res.merge(&db.impls_in_crate(dep.crate_id)); | ||
82 | } | 83 | } |
83 | 84 | ||
84 | Arc::new(res) | 85 | Arc::new(res) |
85 | } | 86 | } |
86 | 87 | ||
87 | fn fill(&mut self, db: &dyn HirDatabase, krate: CrateId) { | ||
88 | let crate_def_map = db.crate_def_map(krate); | ||
89 | for (_module_id, module_data) in crate_def_map.modules.iter() { | ||
90 | for impl_id in module_data.scope.impls() { | ||
91 | match db.impl_trait(impl_id) { | ||
92 | Some(tr) => { | ||
93 | let self_ty = db.impl_self_ty(impl_id); | ||
94 | let self_ty_fp = TyFingerprint::for_impl(&self_ty.value); | ||
95 | self.impls_by_trait | ||
96 | .entry(tr.value.trait_) | ||
97 | .or_default() | ||
98 | .entry(self_ty_fp) | ||
99 | .or_default() | ||
100 | .push(impl_id); | ||
101 | } | ||
102 | None => { | ||
103 | let self_ty = db.impl_self_ty(impl_id); | ||
104 | if let Some(self_ty_fp) = TyFingerprint::for_impl(&self_ty.value) { | ||
105 | self.inherent_impls.entry(self_ty_fp).or_default().push(impl_id); | ||
106 | } | ||
107 | } | ||
108 | } | ||
109 | } | ||
110 | } | ||
111 | } | ||
112 | |||
113 | fn merge(&mut self, other: &Self) { | 88 | fn merge(&mut self, other: &Self) { |
114 | for (fp, impls) in &other.inherent_impls { | 89 | for (trait_, other_map) in &other.map { |
115 | let vec = self.inherent_impls.entry(*fp).or_default(); | 90 | let map = self.map.entry(*trait_).or_default(); |
116 | vec.extend(impls); | ||
117 | vec.sort(); | ||
118 | vec.dedup(); | ||
119 | } | ||
120 | |||
121 | for (trait_, other_map) in &other.impls_by_trait { | ||
122 | let map = self.impls_by_trait.entry(*trait_).or_default(); | ||
123 | for (fp, impls) in other_map { | 91 | for (fp, impls) in other_map { |
124 | let vec = map.entry(*fp).or_default(); | 92 | let vec = map.entry(*fp).or_default(); |
125 | vec.extend(impls); | 93 | vec.extend(impls); |
126 | vec.sort(); | ||
127 | vec.dedup(); | ||
128 | } | 94 | } |
129 | } | 95 | } |
130 | } | 96 | } |
131 | 97 | ||
132 | pub fn lookup_impl_defs(&self, ty: &Ty) -> impl Iterator<Item = ImplId> + '_ { | 98 | /// Queries all impls of the given trait. |
133 | let fingerprint = TyFingerprint::for_impl(ty); | 99 | pub fn for_trait(&self, trait_: TraitId) -> impl Iterator<Item = ImplId> + '_ { |
134 | fingerprint.and_then(|f| self.inherent_impls.get(&f)).into_iter().flatten().copied() | 100 | self.map |
135 | } | 101 | .get(&trait_) |
136 | |||
137 | pub fn lookup_impl_defs_for_trait(&self, tr: TraitId) -> impl Iterator<Item = ImplId> + '_ { | ||
138 | self.impls_by_trait | ||
139 | .get(&tr) | ||
140 | .into_iter() | 102 | .into_iter() |
141 | .flat_map(|m| m.values().flat_map(|v| v.iter().copied())) | 103 | .flat_map(|map| map.values().flat_map(|v| v.iter().copied())) |
142 | } | 104 | } |
143 | 105 | ||
144 | pub fn lookup_impl_defs_for_trait_and_ty( | 106 | /// Queries all impls of `trait_` that may apply to `self_ty`. |
107 | pub fn for_trait_and_self_ty( | ||
145 | &self, | 108 | &self, |
146 | tr: TraitId, | 109 | trait_: TraitId, |
147 | fp: TyFingerprint, | 110 | self_ty: TyFingerprint, |
148 | ) -> impl Iterator<Item = ImplId> + '_ { | 111 | ) -> impl Iterator<Item = ImplId> + '_ { |
149 | self.impls_by_trait | 112 | self.map |
150 | .get(&tr) | 113 | .get(&trait_) |
151 | .and_then(|m| m.get(&Some(fp))) | ||
152 | .into_iter() | 114 | .into_iter() |
153 | .flatten() | 115 | .flat_map(move |map| map.get(&None).into_iter().chain(map.get(&Some(self_ty)))) |
154 | .copied() | 116 | .flat_map(|v| v.iter().copied()) |
155 | .chain( | 117 | } |
156 | self.impls_by_trait | 118 | |
157 | .get(&tr) | 119 | pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ { |
158 | .and_then(|m| m.get(&None)) | 120 | self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied())) |
159 | .into_iter() | 121 | } |
160 | .flatten() | 122 | } |
161 | .copied(), | 123 | |
162 | ) | 124 | /// Inherent impls defined in some crate. |
125 | /// | ||
126 | /// Inherent impls can only be defined in the crate that also defines the self type of the impl | ||
127 | /// (note that some primitives are considered to be defined by both libcore and liballoc). | ||
128 | /// | ||
129 | /// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a | ||
130 | /// single crate. | ||
131 | #[derive(Debug, Eq, PartialEq)] | ||
132 | pub struct InherentImpls { | ||
133 | map: FxHashMap<TyFingerprint, Vec<ImplId>>, | ||
134 | } | ||
135 | |||
136 | impl InherentImpls { | ||
137 | pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> { | ||
138 | let mut map: FxHashMap<_, Vec<_>> = FxHashMap::default(); | ||
139 | |||
140 | let crate_def_map = db.crate_def_map(krate); | ||
141 | for (_module_id, module_data) in crate_def_map.modules.iter() { | ||
142 | for impl_id in module_data.scope.impls() { | ||
143 | let data = db.impl_data(impl_id); | ||
144 | if data.target_trait.is_some() { | ||
145 | continue; | ||
146 | } | ||
147 | |||
148 | let self_ty = db.impl_self_ty(impl_id); | ||
149 | if let Some(fp) = TyFingerprint::for_impl(&self_ty.value) { | ||
150 | map.entry(fp).or_default().push(impl_id); | ||
151 | } | ||
152 | } | ||
153 | } | ||
154 | |||
155 | Arc::new(Self { map }) | ||
156 | } | ||
157 | |||
158 | pub fn for_self_ty(&self, self_ty: &Ty) -> &[ImplId] { | ||
159 | match TyFingerprint::for_impl(self_ty) { | ||
160 | Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]), | ||
161 | None => &[], | ||
162 | } | ||
163 | } | 163 | } |
164 | 164 | ||
165 | pub fn all_impls<'a>(&'a self) -> impl Iterator<Item = ImplId> + 'a { | 165 | pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ { |
166 | self.inherent_impls | 166 | self.map.values().flat_map(|v| v.iter().copied()) |
167 | .values() | ||
168 | .chain(self.impls_by_trait.values().flat_map(|m| m.values())) | ||
169 | .flatten() | ||
170 | .copied() | ||
171 | } | 167 | } |
172 | } | 168 | } |
173 | 169 | ||
@@ -377,7 +373,7 @@ fn iterate_method_candidates_with_autoref( | |||
377 | return true; | 373 | return true; |
378 | } | 374 | } |
379 | let refed = Canonical { | 375 | let refed = Canonical { |
380 | num_vars: deref_chain[0].num_vars, | 376 | kinds: deref_chain[0].kinds.clone(), |
381 | value: Ty::apply_one(TypeCtor::Ref(Mutability::Shared), deref_chain[0].value.clone()), | 377 | value: Ty::apply_one(TypeCtor::Ref(Mutability::Shared), deref_chain[0].value.clone()), |
382 | }; | 378 | }; |
383 | if iterate_method_candidates_by_receiver( | 379 | if iterate_method_candidates_by_receiver( |
@@ -393,7 +389,7 @@ fn iterate_method_candidates_with_autoref( | |||
393 | return true; | 389 | return true; |
394 | } | 390 | } |
395 | let ref_muted = Canonical { | 391 | let ref_muted = Canonical { |
396 | num_vars: deref_chain[0].num_vars, | 392 | kinds: deref_chain[0].kinds.clone(), |
397 | value: Ty::apply_one(TypeCtor::Ref(Mutability::Mut), deref_chain[0].value.clone()), | 393 | value: Ty::apply_one(TypeCtor::Ref(Mutability::Mut), deref_chain[0].value.clone()), |
398 | }; | 394 | }; |
399 | if iterate_method_candidates_by_receiver( | 395 | if iterate_method_candidates_by_receiver( |
@@ -524,9 +520,9 @@ fn iterate_inherent_methods( | |||
524 | None => return false, | 520 | None => return false, |
525 | }; | 521 | }; |
526 | for krate in def_crates { | 522 | for krate in def_crates { |
527 | let impls = db.impls_in_crate(krate); | 523 | let impls = db.inherent_impls_in_crate(krate); |
528 | 524 | ||
529 | for impl_def in impls.lookup_impl_defs(&self_ty.value) { | 525 | for &impl_def in impls.for_self_ty(&self_ty.value) { |
530 | for &item in db.impl_data(impl_def).items.iter() { | 526 | for &item in db.impl_data(impl_def).items.iter() { |
531 | if !is_valid_candidate(db, name, receiver_ty, item, self_ty) { | 527 | if !is_valid_candidate(db, name, receiver_ty, item, self_ty) { |
532 | continue; | 528 | continue; |
@@ -612,18 +608,19 @@ pub(crate) fn inherent_impl_substs( | |||
612 | // we create a var for each type parameter of the impl; we need to keep in | 608 | // we create a var for each type parameter of the impl; we need to keep in |
613 | // mind here that `self_ty` might have vars of its own | 609 | // mind here that `self_ty` might have vars of its own |
614 | let vars = Substs::build_for_def(db, impl_id) | 610 | let vars = Substs::build_for_def(db, impl_id) |
615 | .fill_with_bound_vars(DebruijnIndex::INNERMOST, self_ty.num_vars) | 611 | .fill_with_bound_vars(DebruijnIndex::INNERMOST, self_ty.kinds.len()) |
616 | .build(); | 612 | .build(); |
617 | let self_ty_with_vars = db.impl_self_ty(impl_id).subst(&vars); | 613 | let self_ty_with_vars = db.impl_self_ty(impl_id).subst(&vars); |
618 | let self_ty_with_vars = | 614 | let mut kinds = self_ty.kinds.to_vec(); |
619 | Canonical { num_vars: vars.len() + self_ty.num_vars, value: self_ty_with_vars }; | 615 | kinds.extend(iter::repeat(TyKind::General).take(vars.len())); |
620 | let substs = super::infer::unify(&self_ty_with_vars, self_ty); | 616 | let tys = Canonical { kinds: kinds.into(), value: (self_ty_with_vars, self_ty.value.clone()) }; |
617 | let substs = super::infer::unify(&tys); | ||
621 | // We only want the substs for the vars we added, not the ones from self_ty. | 618 | // We only want the substs for the vars we added, not the ones from self_ty. |
622 | // Also, if any of the vars we added are still in there, we replace them by | 619 | // Also, if any of the vars we added are still in there, we replace them by |
623 | // Unknown. I think this can only really happen if self_ty contained | 620 | // Unknown. I think this can only really happen if self_ty contained |
624 | // Unknown, and in that case we want the result to contain Unknown in those | 621 | // Unknown, and in that case we want the result to contain Unknown in those |
625 | // places again. | 622 | // places again. |
626 | substs.map(|s| fallback_bound_vars(s.suffix(vars.len()), self_ty.num_vars)) | 623 | substs.map(|s| fallback_bound_vars(s.suffix(vars.len()), self_ty.kinds.len())) |
627 | } | 624 | } |
628 | 625 | ||
629 | /// This replaces any 'free' Bound vars in `s` (i.e. those with indices past | 626 | /// This replaces any 'free' Bound vars in `s` (i.e. those with indices past |
@@ -683,15 +680,15 @@ fn generic_implements_goal( | |||
683 | trait_: TraitId, | 680 | trait_: TraitId, |
684 | self_ty: Canonical<Ty>, | 681 | self_ty: Canonical<Ty>, |
685 | ) -> Canonical<InEnvironment<super::Obligation>> { | 682 | ) -> Canonical<InEnvironment<super::Obligation>> { |
686 | let num_vars = self_ty.num_vars; | 683 | let mut kinds = self_ty.kinds.to_vec(); |
687 | let substs = super::Substs::build_for_def(db, trait_) | 684 | let substs = super::Substs::build_for_def(db, trait_) |
688 | .push(self_ty.value) | 685 | .push(self_ty.value) |
689 | .fill_with_bound_vars(DebruijnIndex::INNERMOST, num_vars) | 686 | .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len()) |
690 | .build(); | 687 | .build(); |
691 | let num_vars = substs.len() - 1 + self_ty.num_vars; | 688 | kinds.extend(iter::repeat(TyKind::General).take(substs.len() - 1)); |
692 | let trait_ref = TraitRef { trait_, substs }; | 689 | let trait_ref = TraitRef { trait_, substs }; |
693 | let obligation = super::Obligation::Trait(trait_ref); | 690 | let obligation = super::Obligation::Trait(trait_ref); |
694 | Canonical { num_vars, value: InEnvironment::new(env, obligation) } | 691 | Canonical { kinds: kinds.into(), value: InEnvironment::new(env, obligation) } |
695 | } | 692 | } |
696 | 693 | ||
697 | fn autoderef_method_receiver( | 694 | fn autoderef_method_receiver( |
@@ -704,9 +701,9 @@ fn autoderef_method_receiver( | |||
704 | if let Some(Ty::Apply(ApplicationTy { ctor: TypeCtor::Array, parameters })) = | 701 | if let Some(Ty::Apply(ApplicationTy { ctor: TypeCtor::Array, parameters })) = |
705 | deref_chain.last().map(|ty| &ty.value) | 702 | deref_chain.last().map(|ty| &ty.value) |
706 | { | 703 | { |
707 | let num_vars = deref_chain.last().unwrap().num_vars; | 704 | let kinds = deref_chain.last().unwrap().kinds.clone(); |
708 | let unsized_ty = Ty::apply(TypeCtor::Slice, parameters.clone()); | 705 | let unsized_ty = Ty::apply(TypeCtor::Slice, parameters.clone()); |
709 | deref_chain.push(Canonical { value: unsized_ty, num_vars }) | 706 | deref_chain.push(Canonical { value: unsized_ty, kinds }) |
710 | } | 707 | } |
711 | deref_chain | 708 | deref_chain |
712 | } | 709 | } |
diff --git a/crates/ra_hir_ty/src/test_db.rs b/crates/ra_hir_ty/src/test_db.rs index 0481a7b12..fddf0604d 100644 --- a/crates/ra_hir_ty/src/test_db.rs +++ b/crates/ra_hir_ty/src/test_db.rs | |||
@@ -8,8 +8,10 @@ use std::{ | |||
8 | use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId, ModuleId}; | 8 | use hir_def::{db::DefDatabase, AssocItemId, ModuleDefId, ModuleId}; |
9 | use hir_expand::{db::AstDatabase, diagnostics::DiagnosticSink}; | 9 | use hir_expand::{db::AstDatabase, diagnostics::DiagnosticSink}; |
10 | use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast}; | 10 | use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast}; |
11 | use rustc_hash::FxHashSet; | 11 | use ra_syntax::TextRange; |
12 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
12 | use stdx::format_to; | 13 | use stdx::format_to; |
14 | use test_utils::extract_annotations; | ||
13 | 15 | ||
14 | use crate::{ | 16 | use crate::{ |
15 | db::HirDatabase, diagnostics::Diagnostic, expr::ExprValidator, | 17 | db::HirDatabase, diagnostics::Diagnostic, expr::ExprValidator, |
@@ -155,17 +157,27 @@ impl TestDB { | |||
155 | (buf, count) | 157 | (buf, count) |
156 | } | 158 | } |
157 | 159 | ||
158 | pub fn all_files(&self) -> Vec<FileId> { | 160 | pub fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> { |
159 | let mut res = Vec::new(); | 161 | let mut files = Vec::new(); |
160 | let crate_graph = self.crate_graph(); | 162 | let crate_graph = self.crate_graph(); |
161 | for krate in crate_graph.iter() { | 163 | for krate in crate_graph.iter() { |
162 | let crate_def_map = self.crate_def_map(krate); | 164 | let crate_def_map = self.crate_def_map(krate); |
163 | for (module_id, _) in crate_def_map.modules.iter() { | 165 | for (module_id, _) in crate_def_map.modules.iter() { |
164 | let file_id = crate_def_map[module_id].origin.file_id(); | 166 | let file_id = crate_def_map[module_id].origin.file_id(); |
165 | res.extend(file_id) | 167 | files.extend(file_id) |
166 | } | 168 | } |
167 | } | 169 | } |
168 | res | 170 | files |
171 | .into_iter() | ||
172 | .filter_map(|file_id| { | ||
173 | let text = self.file_text(file_id); | ||
174 | let annotations = extract_annotations(&text); | ||
175 | if annotations.is_empty() { | ||
176 | return None; | ||
177 | } | ||
178 | Some((file_id, annotations)) | ||
179 | }) | ||
180 | .collect() | ||
169 | } | 181 | } |
170 | } | 182 | } |
171 | 183 | ||
diff --git a/crates/ra_hir_ty/src/tests.rs b/crates/ra_hir_ty/src/tests.rs index 5424e6bb1..eeac34d14 100644 --- a/crates/ra_hir_ty/src/tests.rs +++ b/crates/ra_hir_ty/src/tests.rs | |||
@@ -28,7 +28,6 @@ use ra_syntax::{ | |||
28 | SyntaxNode, | 28 | SyntaxNode, |
29 | }; | 29 | }; |
30 | use stdx::format_to; | 30 | use stdx::format_to; |
31 | use test_utils::extract_annotations; | ||
32 | 31 | ||
33 | use crate::{ | 32 | use crate::{ |
34 | db::HirDatabase, display::HirDisplay, infer::TypeMismatch, test_db::TestDB, InferenceResult, Ty, | 33 | db::HirDatabase, display::HirDisplay, infer::TypeMismatch, test_db::TestDB, InferenceResult, Ty, |
@@ -49,9 +48,7 @@ fn check_types_source_code(ra_fixture: &str) { | |||
49 | fn check_types_impl(ra_fixture: &str, display_source: bool) { | 48 | fn check_types_impl(ra_fixture: &str, display_source: bool) { |
50 | let db = TestDB::with_files(ra_fixture); | 49 | let db = TestDB::with_files(ra_fixture); |
51 | let mut checked_one = false; | 50 | let mut checked_one = false; |
52 | for file_id in db.all_files() { | 51 | for (file_id, annotations) in db.extract_annotations() { |
53 | let text = db.parse(file_id).syntax_node().to_string(); | ||
54 | let annotations = extract_annotations(&text); | ||
55 | for (range, expected) in annotations { | 52 | for (range, expected) in annotations { |
56 | let ty = type_at_range(&db, FileRange { file_id, range }); | 53 | let ty = type_at_range(&db, FileRange { file_id, range }); |
57 | let actual = if display_source { | 54 | let actual = if display_source { |
@@ -511,6 +508,30 @@ fn no_such_field_with_feature_flag_diagnostics_on_struct_fields() { | |||
511 | } | 508 | } |
512 | 509 | ||
513 | #[test] | 510 | #[test] |
511 | fn no_such_field_with_type_macro() { | ||
512 | let diagnostics = TestDB::with_files( | ||
513 | r" | ||
514 | macro_rules! Type { | ||
515 | () => { u32 }; | ||
516 | } | ||
517 | |||
518 | struct Foo { | ||
519 | bar: Type![], | ||
520 | } | ||
521 | impl Foo { | ||
522 | fn new() -> Self { | ||
523 | Foo { bar: 0 } | ||
524 | } | ||
525 | } | ||
526 | ", | ||
527 | ) | ||
528 | .diagnostics() | ||
529 | .0; | ||
530 | |||
531 | assert_snapshot!(diagnostics, @r###""###); | ||
532 | } | ||
533 | |||
534 | #[test] | ||
514 | fn missing_record_pat_field_diagnostic() { | 535 | fn missing_record_pat_field_diagnostic() { |
515 | let diagnostics = TestDB::with_files( | 536 | let diagnostics = TestDB::with_files( |
516 | r" | 537 | r" |
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs index 01c919a7e..766790576 100644 --- a/crates/ra_hir_ty/src/tests/traits.rs +++ b/crates/ra_hir_ty/src/tests/traits.rs | |||
@@ -3029,3 +3029,21 @@ fn infer_dyn_fn_output() { | |||
3029 | "### | 3029 | "### |
3030 | ); | 3030 | ); |
3031 | } | 3031 | } |
3032 | |||
3033 | #[test] | ||
3034 | fn variable_kinds() { | ||
3035 | check_types( | ||
3036 | r#" | ||
3037 | trait Trait<T> { fn get(self, t: T) -> T; } | ||
3038 | struct S; | ||
3039 | impl Trait<u128> for S {} | ||
3040 | impl Trait<f32> for S {} | ||
3041 | fn test() { | ||
3042 | S.get(1); | ||
3043 | //^^^^^^^^ u128 | ||
3044 | S.get(1.); | ||
3045 | //^^^^^^^^ f32 | ||
3046 | } | ||
3047 | "#, | ||
3048 | ); | ||
3049 | } | ||
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs index 6f43c3a22..2a6d7faef 100644 --- a/crates/ra_hir_ty/src/traits.rs +++ b/crates/ra_hir_ty/src/traits.rs | |||
@@ -1,5 +1,5 @@ | |||
1 | //! Trait solving using Chalk. | 1 | //! Trait solving using Chalk. |
2 | use std::{panic, sync::Arc}; | 2 | use std::sync::Arc; |
3 | 3 | ||
4 | use chalk_ir::cast::Cast; | 4 | use chalk_ir::cast::Cast; |
5 | use hir_def::{ | 5 | use hir_def::{ |
@@ -8,7 +8,7 @@ use hir_def::{ | |||
8 | use ra_db::{impl_intern_key, salsa, CrateId}; | 8 | use ra_db::{impl_intern_key, salsa, CrateId}; |
9 | use ra_prof::profile; | 9 | use ra_prof::profile; |
10 | 10 | ||
11 | use crate::{db::HirDatabase, DebruijnIndex}; | 11 | use crate::{db::HirDatabase, DebruijnIndex, Substs}; |
12 | 12 | ||
13 | use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk}; | 13 | use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk}; |
14 | 14 | ||
@@ -190,15 +190,7 @@ fn solution_from_chalk( | |||
190 | solution: chalk_solve::Solution<Interner>, | 190 | solution: chalk_solve::Solution<Interner>, |
191 | ) -> Solution { | 191 | ) -> Solution { |
192 | let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<Interner>>| { | 192 | let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution<Interner>>| { |
193 | let value = subst | 193 | let result = from_chalk(db, subst); |
194 | .value | ||
195 | .iter(&Interner) | ||
196 | .map(|p| match p.ty(&Interner) { | ||
197 | Some(ty) => from_chalk(db, ty.clone()), | ||
198 | None => unimplemented!(), | ||
199 | }) | ||
200 | .collect(); | ||
201 | let result = Canonical { value, num_vars: subst.binders.len(&Interner) }; | ||
202 | SolutionVariables(result) | 194 | SolutionVariables(result) |
203 | }; | 195 | }; |
204 | match solution { | 196 | match solution { |
@@ -222,7 +214,7 @@ fn solution_from_chalk( | |||
222 | } | 214 | } |
223 | 215 | ||
224 | #[derive(Clone, Debug, PartialEq, Eq)] | 216 | #[derive(Clone, Debug, PartialEq, Eq)] |
225 | pub struct SolutionVariables(pub Canonical<Vec<Ty>>); | 217 | pub struct SolutionVariables(pub Canonical<Substs>); |
226 | 218 | ||
227 | #[derive(Clone, Debug, PartialEq, Eq)] | 219 | #[derive(Clone, Debug, PartialEq, Eq)] |
228 | /// A (possible) solution for a proposed goal. | 220 | /// A (possible) solution for a proposed goal. |
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs index 8ef4941c0..c97b81d57 100644 --- a/crates/ra_hir_ty/src/traits/chalk.rs +++ b/crates/ra_hir_ty/src/traits/chalk.rs | |||
@@ -77,8 +77,8 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> { | |||
77 | // Note: Since we're using impls_for_trait, only impls where the trait | 77 | // Note: Since we're using impls_for_trait, only impls where the trait |
78 | // can be resolved should ever reach Chalk. `impl_datum` relies on that | 78 | // can be resolved should ever reach Chalk. `impl_datum` relies on that |
79 | // and will panic if the trait can't be resolved. | 79 | // and will panic if the trait can't be resolved. |
80 | let in_deps = self.db.impls_from_deps(self.krate); | 80 | let in_deps = self.db.trait_impls_in_deps(self.krate); |
81 | let in_self = self.db.impls_in_crate(self.krate); | 81 | let in_self = self.db.trait_impls_in_crate(self.krate); |
82 | let impl_maps = [in_deps, in_self]; | 82 | let impl_maps = [in_deps, in_self]; |
83 | 83 | ||
84 | let id_to_chalk = |id: hir_def::ImplId| Impl::ImplDef(id).to_chalk(self.db); | 84 | let id_to_chalk = |id: hir_def::ImplId| Impl::ImplDef(id).to_chalk(self.db); |
@@ -87,14 +87,12 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> { | |||
87 | Some(fp) => impl_maps | 87 | Some(fp) => impl_maps |
88 | .iter() | 88 | .iter() |
89 | .flat_map(|crate_impl_defs| { | 89 | .flat_map(|crate_impl_defs| { |
90 | crate_impl_defs.lookup_impl_defs_for_trait_and_ty(trait_, fp).map(id_to_chalk) | 90 | crate_impl_defs.for_trait_and_self_ty(trait_, fp).map(id_to_chalk) |
91 | }) | 91 | }) |
92 | .collect(), | 92 | .collect(), |
93 | None => impl_maps | 93 | None => impl_maps |
94 | .iter() | 94 | .iter() |
95 | .flat_map(|crate_impl_defs| { | 95 | .flat_map(|crate_impl_defs| crate_impl_defs.for_trait(trait_).map(id_to_chalk)) |
96 | crate_impl_defs.lookup_impl_defs_for_trait(trait_).map(id_to_chalk) | ||
97 | }) | ||
98 | .collect(), | 96 | .collect(), |
99 | }; | 97 | }; |
100 | 98 | ||
diff --git a/crates/ra_hir_ty/src/traits/chalk/mapping.rs b/crates/ra_hir_ty/src/traits/chalk/mapping.rs index ac82ea831..433d6aa03 100644 --- a/crates/ra_hir_ty/src/traits/chalk/mapping.rs +++ b/crates/ra_hir_ty/src/traits/chalk/mapping.rs | |||
@@ -17,7 +17,7 @@ use crate::{ | |||
17 | primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness}, | 17 | primitive::{FloatBitness, FloatTy, IntBitness, IntTy, Signedness}, |
18 | traits::{builtin, AssocTyValue, Canonical, Impl, Obligation}, | 18 | traits::{builtin, AssocTyValue, Canonical, Impl, Obligation}, |
19 | ApplicationTy, CallableDef, GenericPredicate, InEnvironment, OpaqueTy, OpaqueTyId, | 19 | ApplicationTy, CallableDef, GenericPredicate, InEnvironment, OpaqueTy, OpaqueTyId, |
20 | ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, | 20 | ProjectionPredicate, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TyKind, TypeCtor, |
21 | }; | 21 | }; |
22 | 22 | ||
23 | use super::interner::*; | 23 | use super::interner::*; |
@@ -555,22 +555,39 @@ where |