diff options
Diffstat (limited to 'crates')
117 files changed, 6137 insertions, 1919 deletions
diff --git a/crates/paths/src/lib.rs b/crates/paths/src/lib.rs index c7ce0c42f..190c50913 100644 --- a/crates/paths/src/lib.rs +++ b/crates/paths/src/lib.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | //! relative paths. | 2 | //! relative paths. |
3 | use std::{ | 3 | use std::{ |
4 | convert::{TryFrom, TryInto}, | 4 | convert::{TryFrom, TryInto}, |
5 | ops, | 5 | io, ops, |
6 | path::{Component, Path, PathBuf}, | 6 | path::{Component, Path, PathBuf}, |
7 | }; | 7 | }; |
8 | 8 | ||
@@ -46,6 +46,9 @@ impl TryFrom<&str> for AbsPathBuf { | |||
46 | } | 46 | } |
47 | 47 | ||
48 | impl AbsPathBuf { | 48 | impl AbsPathBuf { |
49 | pub fn canonicalized(path: &Path) -> io::Result<AbsPathBuf> { | ||
50 | path.canonicalize().map(|it| AbsPathBuf::try_from(it).unwrap()) | ||
51 | } | ||
49 | pub fn as_path(&self) -> &AbsPath { | 52 | pub fn as_path(&self) -> &AbsPath { |
50 | AbsPath::new_unchecked(self.0.as_path()) | 53 | AbsPath::new_unchecked(self.0.as_path()) |
51 | } | 54 | } |
diff --git a/crates/ra_assists/src/assist_context.rs b/crates/ra_assists/src/assist_context.rs index edd8255f4..ee614de72 100644 --- a/crates/ra_assists/src/assist_context.rs +++ b/crates/ra_assists/src/assist_context.rs | |||
@@ -252,7 +252,7 @@ impl AssistBuilder { | |||
252 | pub(crate) fn rewrite(&mut self, rewriter: SyntaxRewriter) { | 252 | pub(crate) fn rewrite(&mut self, rewriter: SyntaxRewriter) { |
253 | let node = rewriter.rewrite_root().unwrap(); | 253 | let node = rewriter.rewrite_root().unwrap(); |
254 | let new = rewriter.rewrite(&node); | 254 | let new = rewriter.rewrite(&node); |
255 | algo::diff(&node, &new).into_text_edit(&mut self.edit) | 255 | algo::diff(&node, &new).into_text_edit(&mut self.edit); |
256 | } | 256 | } |
257 | 257 | ||
258 | // FIXME: kill this API | 258 | // FIXME: kill this API |
diff --git a/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs index 0197a8cf0..b4784c333 100644 --- a/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs +++ b/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs | |||
@@ -1,7 +1,10 @@ | |||
1 | use hir; | 1 | use hir; |
2 | use ra_syntax::{ast, AstNode, SmolStr, TextRange}; | 2 | use ra_syntax::{algo::SyntaxRewriter, ast, match_ast, AstNode, SmolStr, SyntaxNode}; |
3 | 3 | ||
4 | use crate::{utils::insert_use_statement, AssistContext, AssistId, Assists}; | 4 | use crate::{ |
5 | utils::{find_insert_use_container, insert_use_statement}, | ||
6 | AssistContext, AssistId, Assists, | ||
7 | }; | ||
5 | 8 | ||
6 | // Assist: replace_qualified_name_with_use | 9 | // Assist: replace_qualified_name_with_use |
7 | // | 10 | // |
@@ -39,16 +42,18 @@ pub(crate) fn replace_qualified_name_with_use( | |||
39 | target, | 42 | target, |
40 | |builder| { | 43 | |builder| { |
41 | let path_to_import = hir_path.mod_path().clone(); | 44 | let path_to_import = hir_path.mod_path().clone(); |
45 | let container = match find_insert_use_container(path.syntax(), ctx) { | ||
46 | Some(c) => c, | ||
47 | None => return, | ||
48 | }; | ||
42 | insert_use_statement(path.syntax(), &path_to_import, ctx, builder.text_edit_builder()); | 49 | insert_use_statement(path.syntax(), &path_to_import, ctx, builder.text_edit_builder()); |
43 | 50 | ||
44 | if let Some(last) = path.segment() { | 51 | // Now that we've brought the name into scope, re-qualify all paths that could be |
45 | // Here we are assuming the assist will provide a correct use statement | 52 | // affected (that is, all paths inside the node we added the `use` to). |
46 | // so we can delete the path qualifier | 53 | let mut rewriter = SyntaxRewriter::default(); |
47 | builder.delete(TextRange::new( | 54 | let syntax = container.either(|l| l.syntax().clone(), |r| r.syntax().clone()); |
48 | path.syntax().text_range().start(), | 55 | shorten_paths(&mut rewriter, syntax, path); |
49 | last.syntax().text_range().start(), | 56 | builder.rewrite(rewriter); |
50 | )); | ||
51 | } | ||
52 | }, | 57 | }, |
53 | ) | 58 | ) |
54 | } | 59 | } |
@@ -73,6 +78,69 @@ fn collect_hir_path_segments(path: &hir::Path) -> Option<Vec<SmolStr>> { | |||
73 | Some(ps) | 78 | Some(ps) |
74 | } | 79 | } |
75 | 80 | ||
81 | /// Adds replacements to `re` that shorten `path` in all descendants of `node`. | ||
82 | fn shorten_paths(rewriter: &mut SyntaxRewriter<'static>, node: SyntaxNode, path: ast::Path) { | ||
83 | for child in node.children() { | ||
84 | match_ast! { | ||
85 | match child { | ||
86 | // Don't modify `use` items, as this can break the `use` item when injecting a new | ||
87 | // import into the use tree. | ||
88 | ast::UseItem(_it) => continue, | ||
89 | // Don't descend into submodules, they don't have the same `use` items in scope. | ||
90 | ast::Module(_it) => continue, | ||
91 | |||
92 | ast::Path(p) => { | ||
93 | match maybe_replace_path(rewriter, p.clone(), path.clone()) { | ||
94 | Some(()) => {}, | ||
95 | None => shorten_paths(rewriter, p.syntax().clone(), path.clone()), | ||
96 | } | ||
97 | }, | ||
98 | _ => shorten_paths(rewriter, child, path.clone()), | ||
99 | } | ||
100 | } | ||
101 | } | ||
102 | } | ||
103 | |||
104 | fn maybe_replace_path( | ||
105 | rewriter: &mut SyntaxRewriter<'static>, | ||
106 | path: ast::Path, | ||
107 | target: ast::Path, | ||
108 | ) -> Option<()> { | ||
109 | if !path_eq(path.clone(), target.clone()) { | ||
110 | return None; | ||
111 | } | ||
112 | |||
113 | // Shorten `path`, leaving only its last segment. | ||
114 | if let Some(parent) = path.qualifier() { | ||
115 | rewriter.delete(parent.syntax()); | ||
116 | } | ||
117 | if let Some(double_colon) = path.coloncolon_token() { | ||
118 | rewriter.delete(&double_colon); | ||
119 | } | ||
120 | |||
121 | Some(()) | ||
122 | } | ||
123 | |||
124 | fn path_eq(lhs: ast::Path, rhs: ast::Path) -> bool { | ||
125 | let mut lhs_curr = lhs; | ||
126 | let mut rhs_curr = rhs; | ||
127 | loop { | ||
128 | match (lhs_curr.segment(), rhs_curr.segment()) { | ||
129 | (Some(lhs), Some(rhs)) if lhs.syntax().text() == rhs.syntax().text() => (), | ||
130 | _ => return false, | ||
131 | } | ||
132 | |||
133 | match (lhs_curr.qualifier(), rhs_curr.qualifier()) { | ||
134 | (Some(lhs), Some(rhs)) => { | ||
135 | lhs_curr = lhs; | ||
136 | rhs_curr = rhs; | ||
137 | } | ||
138 | (None, None) => return true, | ||
139 | _ => return false, | ||
140 | } | ||
141 | } | ||
142 | } | ||
143 | |||
76 | #[cfg(test)] | 144 | #[cfg(test)] |
77 | mod tests { | 145 | mod tests { |
78 | use crate::tests::{check_assist, check_assist_not_applicable}; | 146 | use crate::tests::{check_assist, check_assist_not_applicable}; |
@@ -83,10 +151,10 @@ mod tests { | |||
83 | fn test_replace_add_use_no_anchor() { | 151 | fn test_replace_add_use_no_anchor() { |
84 | check_assist( | 152 | check_assist( |
85 | replace_qualified_name_with_use, | 153 | replace_qualified_name_with_use, |
86 | " | 154 | r" |
87 | std::fmt::Debug<|> | 155 | std::fmt::Debug<|> |
88 | ", | 156 | ", |
89 | " | 157 | r" |
90 | use std::fmt::Debug; | 158 | use std::fmt::Debug; |
91 | 159 | ||
92 | Debug | 160 | Debug |
@@ -97,13 +165,13 @@ Debug | |||
97 | fn test_replace_add_use_no_anchor_with_item_below() { | 165 | fn test_replace_add_use_no_anchor_with_item_below() { |
98 | check_assist( | 166 | check_assist( |
99 | replace_qualified_name_with_use, | 167 | replace_qualified_name_with_use, |
100 | " | 168 | r" |
101 | std::fmt::Debug<|> | 169 | std::fmt::Debug<|> |
102 | 170 | ||
103 | fn main() { | 171 | fn main() { |
104 | } | 172 | } |
105 | ", | 173 | ", |
106 | " | 174 | r" |
107 | use std::fmt::Debug; | 175 | use std::fmt::Debug; |
108 | 176 | ||
109 | Debug | 177 | Debug |
@@ -118,13 +186,13 @@ fn main() { | |||
118 | fn test_replace_add_use_no_anchor_with_item_above() { | 186 | fn test_replace_add_use_no_anchor_with_item_above() { |
119 | check_assist( | 187 | check_assist( |
120 | replace_qualified_name_with_use, | 188 | replace_qualified_name_with_use, |
121 | " | 189 | r" |
122 | fn main() { | 190 | fn main() { |
123 | } | 191 | } |
124 | 192 | ||
125 | std::fmt::Debug<|> | 193 | std::fmt::Debug<|> |
126 | ", | 194 | ", |
127 | " | 195 | r" |
128 | use std::fmt::Debug; | 196 | use std::fmt::Debug; |
129 | 197 | ||
130 | fn main() { | 198 | fn main() { |
@@ -139,10 +207,10 @@ Debug | |||
139 | fn test_replace_add_use_no_anchor_2seg() { | 207 | fn test_replace_add_use_no_anchor_2seg() { |
140 | check_assist( | 208 | check_assist( |
141 | replace_qualified_name_with_use, | 209 | replace_qualified_name_with_use, |
142 | " | 210 | r" |
143 | std::fmt<|>::Debug | 211 | std::fmt<|>::Debug |
144 | ", | 212 | ", |
145 | " | 213 | r" |
146 | use std::fmt; | 214 | use std::fmt; |
147 | 215 | ||
148 | fmt::Debug | 216 | fmt::Debug |
@@ -154,13 +222,13 @@ fmt::Debug | |||
154 | fn test_replace_add_use() { | 222 | fn test_replace_add_use() { |
155 | check_assist( | 223 | check_assist( |
156 | replace_qualified_name_with_use, | 224 | replace_qualified_name_with_use, |
157 | " | 225 | r" |
158 | use stdx; | 226 | use stdx; |
159 | 227 | ||
160 | impl std::fmt::Debug<|> for Foo { | 228 | impl std::fmt::Debug<|> for Foo { |
161 | } | 229 | } |
162 | ", | 230 | ", |
163 | " | 231 | r" |
164 | use stdx; | 232 | use stdx; |
165 | use std::fmt::Debug; | 233 | use std::fmt::Debug; |
166 | 234 | ||
@@ -174,11 +242,11 @@ impl Debug for Foo { | |||
174 | fn test_replace_file_use_other_anchor() { | 242 | fn test_replace_file_use_other_anchor() { |
175 | check_assist( | 243 | check_assist( |
176 | replace_qualified_name_with_use, | 244 | replace_qualified_name_with_use, |
177 | " | 245 | r" |
178 | impl std::fmt::Debug<|> for Foo { | 246 | impl std::fmt::Debug<|> for Foo { |
179 | } | 247 | } |
180 | ", | 248 | ", |
181 | " | 249 | r" |
182 | use std::fmt::Debug; | 250 | use std::fmt::Debug; |
183 | 251 | ||
184 | impl Debug for Foo { | 252 | impl Debug for Foo { |
@@ -191,11 +259,11 @@ impl Debug for Foo { | |||
191 | fn test_replace_add_use_other_anchor_indent() { | 259 | fn test_replace_add_use_other_anchor_indent() { |
192 | check_assist( | 260 | check_assist( |
193 | replace_qualified_name_with_use, | 261 | replace_qualified_name_with_use, |
194 | " | 262 | r" |
195 | impl std::fmt::Debug<|> for Foo { | 263 | impl std::fmt::Debug<|> for Foo { |
196 | } | 264 | } |
197 | ", | 265 | ", |
198 | " | 266 | r" |
199 | use std::fmt::Debug; | 267 | use std::fmt::Debug; |
200 | 268 | ||
201 | impl Debug for Foo { | 269 | impl Debug for Foo { |
@@ -208,13 +276,13 @@ impl Debug for Foo { | |||
208 | fn test_replace_split_different() { | 276 | fn test_replace_split_different() { |
209 | check_assist( | 277 | check_assist( |
210 | replace_qualified_name_with_use, | 278 | replace_qualified_name_with_use, |
211 | " | 279 | r" |
212 | use std::fmt; | 280 | use std::fmt; |
213 | 281 | ||
214 | impl std::io<|> for Foo { | 282 | impl std::io<|> for Foo { |
215 | } | 283 | } |
216 | ", | 284 | ", |
217 | " | 285 | r" |
218 | use std::{io, fmt}; | 286 | use std::{io, fmt}; |
219 | 287 | ||
220 | impl io for Foo { | 288 | impl io for Foo { |
@@ -227,13 +295,13 @@ impl io for Foo { | |||
227 | fn test_replace_split_self_for_use() { | 295 | fn test_replace_split_self_for_use() { |
228 | check_assist( | 296 | check_assist( |
229 | replace_qualified_name_with_use, | 297 | replace_qualified_name_with_use, |
230 | " | 298 | r" |
231 | use std::fmt; | 299 | use std::fmt; |
232 | 300 | ||
233 | impl std::fmt::Debug<|> for Foo { | 301 | impl std::fmt::Debug<|> for Foo { |
234 | } | 302 | } |
235 | ", | 303 | ", |
236 | " | 304 | r" |
237 | use std::fmt::{self, Debug, }; | 305 | use std::fmt::{self, Debug, }; |
238 | 306 | ||
239 | impl Debug for Foo { | 307 | impl Debug for Foo { |
@@ -246,13 +314,13 @@ impl Debug for Foo { | |||
246 | fn test_replace_split_self_for_target() { | 314 | fn test_replace_split_self_for_target() { |
247 | check_assist( | 315 | check_assist( |
248 | replace_qualified_name_with_use, | 316 | replace_qualified_name_with_use, |
249 | " | 317 | r" |
250 | use std::fmt::Debug; | 318 | use std::fmt::Debug; |
251 | 319 | ||
252 | impl std::fmt<|> for Foo { | 320 | impl std::fmt<|> for Foo { |
253 | } | 321 | } |
254 | ", | 322 | ", |
255 | " | 323 | r" |
256 | use std::fmt::{self, Debug}; | 324 | use std::fmt::{self, Debug}; |
257 | 325 | ||
258 | impl fmt for Foo { | 326 | impl fmt for Foo { |
@@ -265,13 +333,13 @@ impl fmt for Foo { | |||
265 | fn test_replace_add_to_nested_self_nested() { | 333 | fn test_replace_add_to_nested_self_nested() { |
266 | check_assist( | 334 | check_assist( |
267 | replace_qualified_name_with_use, | 335 | replace_qualified_name_with_use, |
268 | " | 336 | r" |
269 | use std::fmt::{Debug, nested::{Display}}; | 337 | use std::fmt::{Debug, nested::{Display}}; |
270 | 338 | ||
271 | impl std::fmt::nested<|> for Foo { | 339 | impl std::fmt::nested<|> for Foo { |
272 | } | 340 | } |
273 | ", | 341 | ", |
274 | " | 342 | r" |
275 | use std::fmt::{Debug, nested::{Display, self}}; | 343 | use std::fmt::{Debug, nested::{Display, self}}; |
276 | 344 | ||
277 | impl nested for Foo { | 345 | impl nested for Foo { |
@@ -284,13 +352,13 @@ impl nested for Foo { | |||
284 | fn test_replace_add_to_nested_self_already_included() { | 352 | fn test_replace_add_to_nested_self_already_included() { |
285 | check_assist( | 353 | check_assist( |
286 | replace_qualified_name_with_use, | 354 | replace_qualified_name_with_use, |
287 | " | 355 | r" |
288 | use std::fmt::{Debug, nested::{self, Display}}; | 356 | use std::fmt::{Debug, nested::{self, Display}}; |
289 | 357 | ||
290 | impl std::fmt::nested<|> for Foo { | 358 | impl std::fmt::nested<|> for Foo { |
291 | } | 359 | } |
292 | ", | 360 | ", |
293 | " | 361 | r" |
294 | use std::fmt::{Debug, nested::{self, Display}}; | 362 | use std::fmt::{Debug, nested::{self, Display}}; |
295 | 363 | ||
296 | impl nested for Foo { | 364 | impl nested for Foo { |
@@ -303,13 +371,13 @@ impl nested for Foo { | |||
303 | fn test_replace_add_to_nested_nested() { | 371 | fn test_replace_add_to_nested_nested() { |
304 | check_assist( | 372 | check_assist( |
305 | replace_qualified_name_with_use, | 373 | replace_qualified_name_with_use, |
306 | " | 374 | r" |
307 | use std::fmt::{Debug, nested::{Display}}; | 375 | use std::fmt::{Debug, nested::{Display}}; |
308 | 376 | ||
309 | impl std::fmt::nested::Debug<|> for Foo { | 377 | impl std::fmt::nested::Debug<|> for Foo { |
310 | } | 378 | } |
311 | ", | 379 | ", |
312 | " | 380 | r" |
313 | use std::fmt::{Debug, nested::{Display, Debug}}; | 381 | use std::fmt::{Debug, nested::{Display, Debug}}; |
314 | 382 | ||
315 | impl Debug for Foo { | 383 | impl Debug for Foo { |
@@ -322,13 +390,13 @@ impl Debug for Foo { | |||
322 | fn test_replace_split_common_target_longer() { | 390 | fn test_replace_split_common_target_longer() { |
323 | check_assist( | 391 | check_assist( |
324 | replace_qualified_name_with_use, | 392 | replace_qualified_name_with_use, |
325 | " | 393 | r" |
326 | use std::fmt::Debug; | 394 | use std::fmt::Debug; |
327 | 395 | ||
328 | impl std::fmt::nested::Display<|> for Foo { | 396 | impl std::fmt::nested::Display<|> for Foo { |
329 | } | 397 | } |
330 | ", | 398 | ", |
331 | " | 399 | r" |
332 | use std::fmt::{nested::Display, Debug}; | 400 | use std::fmt::{nested::Display, Debug}; |
333 | 401 | ||
334 | impl Display for Foo { | 402 | impl Display for Foo { |
@@ -341,13 +409,13 @@ impl Display for Foo { | |||
341 | fn test_replace_split_common_use_longer() { | 409 | fn test_replace_split_common_use_longer() { |
342 | check_assist( | 410 | check_assist( |
343 | replace_qualified_name_with_use, | 411 | replace_qualified_name_with_use, |
344 | " | 412 | r" |
345 | use std::fmt::nested::Debug; | 413 | use std::fmt::nested::Debug; |
346 | 414 | ||
347 | impl std::fmt::Display<|> for Foo { | 415 | impl std::fmt::Display<|> for Foo { |
348 | } | 416 | } |
349 | ", | 417 | ", |
350 | " | 418 | r" |
351 | use std::fmt::{Display, nested::Debug}; | 419 | use std::fmt::{Display, nested::Debug}; |
352 | 420 | ||
353 | impl Display for Foo { | 421 | impl Display for Foo { |
@@ -360,7 +428,7 @@ impl Display for Foo { | |||
360 | fn test_replace_use_nested_import() { | 428 | fn test_replace_use_nested_import() { |
361 | check_assist( | 429 | check_assist( |
362 | replace_qualified_name_with_use, | 430 | replace_qualified_name_with_use, |
363 | " | 431 | r" |
364 | use crate::{ | 432 | use crate::{ |
365 | ty::{Substs, Ty}, | 433 | ty::{Substs, Ty}, |
366 | AssocItem, | 434 | AssocItem, |
@@ -368,7 +436,7 @@ use crate::{ | |||
368 | 436 | ||
369 | fn foo() { crate::ty::lower<|>::trait_env() } | 437 | fn foo() { crate::ty::lower<|>::trait_env() } |
370 | ", | 438 | ", |
371 | " | 439 | r" |
372 | use crate::{ | 440 | use crate::{ |
373 | ty::{Substs, Ty, lower}, | 441 | ty::{Substs, Ty, lower}, |
374 | AssocItem, | 442 | AssocItem, |
@@ -383,13 +451,13 @@ fn foo() { lower::trait_env() } | |||
383 | fn test_replace_alias() { | 451 | fn test_replace_alias() { |
384 | check_assist( | 452 | check_assist( |
385 | replace_qualified_name_with_use, | 453 | replace_qualified_name_with_use, |
386 | " | 454 | r" |
387 | use std::fmt as foo; | 455 | use std::fmt as foo; |
388 | 456 | ||
389 | impl foo::Debug<|> for Foo { | 457 | impl foo::Debug<|> for Foo { |
390 | } | 458 | } |
391 | ", | 459 | ", |
392 | " | 460 | r" |
393 | use std::fmt as foo; | 461 | use std::fmt as foo; |
394 | 462 | ||
395 | impl Debug for Foo { | 463 | impl Debug for Foo { |
@@ -402,7 +470,7 @@ impl Debug for Foo { | |||
402 | fn test_replace_not_applicable_one_segment() { | 470 | fn test_replace_not_applicable_one_segment() { |
403 | check_assist_not_applicable( | 471 | check_assist_not_applicable( |
404 | replace_qualified_name_with_use, | 472 | replace_qualified_name_with_use, |
405 | " | 473 | r" |
406 | impl foo<|> for Foo { | 474 | impl foo<|> for Foo { |
407 | } | 475 | } |
408 | ", | 476 | ", |
@@ -413,7 +481,7 @@ impl foo<|> for Foo { | |||
413 | fn test_replace_not_applicable_in_use() { | 481 | fn test_replace_not_applicable_in_use() { |
414 | check_assist_not_applicable( | 482 | check_assist_not_applicable( |
415 | replace_qualified_name_with_use, | 483 | replace_qualified_name_with_use, |
416 | " | 484 | r" |
417 | use std::fmt<|>; | 485 | use std::fmt<|>; |
418 | ", | 486 | ", |
419 | ); | 487 | ); |
@@ -423,14 +491,14 @@ use std::fmt<|>; | |||
423 | fn test_replace_add_use_no_anchor_in_mod_mod() { | 491 | fn test_replace_add_use_no_anchor_in_mod_mod() { |
424 | check_assist( | 492 | check_assist( |
425 | replace_qualified_name_with_use, | 493 | replace_qualified_name_with_use, |
426 | " | 494 | r" |
427 | mod foo { | 495 | mod foo { |
428 | mod bar { | 496 | mod bar { |
429 | std::fmt::Debug<|> | 497 | std::fmt::Debug<|> |
430 | } | 498 | } |
431 | } | 499 | } |
432 | ", | 500 | ", |
433 | " | 501 | r" |
434 | mod foo { | 502 | mod foo { |
435 | mod bar { | 503 | mod bar { |
436 | use std::fmt::Debug; | 504 | use std::fmt::Debug; |
@@ -446,14 +514,14 @@ mod foo { | |||
446 | fn inserts_imports_after_inner_attributes() { | 514 | fn inserts_imports_after_inner_attributes() { |
447 | check_assist( | 515 | check_assist( |
448 | replace_qualified_name_with_use, | 516 | replace_qualified_name_with_use, |
449 | " | 517 | r" |
450 | #![allow(dead_code)] | 518 | #![allow(dead_code)] |
451 | 519 | ||
452 | fn main() { | 520 | fn main() { |
453 | std::fmt::Debug<|> | 521 | std::fmt::Debug<|> |
454 | } | 522 | } |
455 | ", | 523 | ", |
456 | " | 524 | r" |
457 | #![allow(dead_code)] | 525 | #![allow(dead_code)] |
458 | use std::fmt::Debug; | 526 | use std::fmt::Debug; |
459 | 527 | ||
@@ -463,4 +531,116 @@ fn main() { | |||
463 | ", | 531 | ", |
464 | ); | 532 | ); |
465 | } | 533 | } |
534 | |||
535 | #[test] | ||
536 | fn replaces_all_affected_paths() { | ||
537 | check_assist( | ||
538 | replace_qualified_name_with_use, | ||
539 | r" | ||
540 | fn main() { | ||
541 | std::fmt::Debug<|>; | ||
542 | let x: std::fmt::Debug = std::fmt::Debug; | ||
543 | } | ||
544 | ", | ||
545 | r" | ||
546 | use std::fmt::Debug; | ||
547 | |||
548 | fn main() { | ||
549 | Debug; | ||
550 | let x: Debug = Debug; | ||
551 | } | ||
552 | ", | ||
553 | ); | ||
554 | } | ||
555 | |||
556 | #[test] | ||
557 | fn replaces_all_affected_paths_mod() { | ||
558 | check_assist( | ||
559 | replace_qualified_name_with_use, | ||
560 | r" | ||
561 | mod m { | ||
562 | fn f() { | ||
563 | std::fmt::Debug<|>; | ||
564 | let x: std::fmt::Debug = std::fmt::Debug; | ||
565 | } | ||
566 | fn g() { | ||
567 | std::fmt::Debug; | ||
568 | } | ||
569 | } | ||
570 | |||
571 | fn f() { | ||
572 | std::fmt::Debug; | ||
573 | } | ||
574 | ", | ||
575 | r" | ||
576 | mod m { | ||
577 | use std::fmt::Debug; | ||
578 | |||
579 | fn f() { | ||
580 | Debug; | ||
581 | let x: Debug = Debug; | ||
582 | } | ||
583 | fn g() { | ||
584 | Debug; | ||
585 | } | ||
586 | } | ||
587 | |||
588 | fn f() { | ||
589 | std::fmt::Debug; | ||
590 | } | ||
591 | ", | ||
592 | ); | ||
593 | } | ||
594 | |||
595 | #[test] | ||
596 | fn does_not_replace_in_submodules() { | ||
597 | check_assist( | ||
598 | replace_qualified_name_with_use, | ||
599 | r" | ||
600 | fn main() { | ||
601 | std::fmt::Debug<|>; | ||
602 | } | ||
603 | |||
604 | mod sub { | ||
605 | fn f() { | ||
606 | std::fmt::Debug; | ||
607 | } | ||
608 | } | ||
609 | ", | ||
610 | r" | ||
611 | use std::fmt::Debug; | ||
612 | |||
613 | fn main() { | ||
614 | Debug; | ||
615 | } | ||
616 | |||
617 | mod sub { | ||
618 | fn f() { | ||
619 | std::fmt::Debug; | ||
620 | } | ||
621 | } | ||
622 | ", | ||
623 | ); | ||
624 | } | ||
625 | |||
626 | #[test] | ||
627 | fn does_not_replace_in_use() { | ||
628 | check_assist( | ||
629 | replace_qualified_name_with_use, | ||
630 | r" | ||
631 | use std::fmt::Display; | ||
632 | |||
633 | fn main() { | ||
634 | std::fmt<|>; | ||
635 | } | ||
636 | ", | ||
637 | r" | ||
638 | use std::fmt::{self, Display}; | ||
639 | |||
640 | fn main() { | ||
641 | fmt; | ||
642 | } | ||
643 | ", | ||
644 | ); | ||
645 | } | ||
466 | } | 646 | } |
diff --git a/crates/ra_assists/src/utils.rs b/crates/ra_assists/src/utils.rs index 0038a9764..c1ff0de7b 100644 --- a/crates/ra_assists/src/utils.rs +++ b/crates/ra_assists/src/utils.rs | |||
@@ -13,7 +13,7 @@ use rustc_hash::FxHashSet; | |||
13 | 13 | ||
14 | use crate::assist_config::SnippetCap; | 14 | use crate::assist_config::SnippetCap; |
15 | 15 | ||
16 | pub(crate) use insert_use::insert_use_statement; | 16 | pub(crate) use insert_use::{find_insert_use_container, insert_use_statement}; |
17 | 17 | ||
18 | #[derive(Clone, Copy, Debug)] | 18 | #[derive(Clone, Copy, Debug)] |
19 | pub(crate) enum Cursor<'a> { | 19 | pub(crate) enum Cursor<'a> { |
diff --git a/crates/ra_assists/src/utils/insert_use.rs b/crates/ra_assists/src/utils/insert_use.rs index 0ee43482f..8c4f33e59 100644 --- a/crates/ra_assists/src/utils/insert_use.rs +++ b/crates/ra_assists/src/utils/insert_use.rs | |||
@@ -12,6 +12,20 @@ use ra_syntax::{ | |||
12 | use ra_text_edit::TextEditBuilder; | 12 | use ra_text_edit::TextEditBuilder; |
13 | 13 | ||
14 | use crate::assist_context::AssistContext; | 14 | use crate::assist_context::AssistContext; |
15 | use either::Either; | ||
16 | |||
17 | /// Determines the containing syntax node in which to insert a `use` statement affecting `position`. | ||
18 | pub(crate) fn find_insert_use_container( | ||
19 | position: &SyntaxNode, | ||
20 | ctx: &AssistContext, | ||
21 | ) -> Option<Either<ast::ItemList, ast::SourceFile>> { | ||
22 | ctx.sema.ancestors_with_macros(position.clone()).find_map(|n| { | ||
23 | if let Some(module) = ast::Module::cast(n.clone()) { | ||
24 | return module.item_list().map(|it| Either::Left(it)); | ||
25 | } | ||
26 | Some(Either::Right(ast::SourceFile::cast(n)?)) | ||
27 | }) | ||
28 | } | ||
15 | 29 | ||
16 | /// Creates and inserts a use statement for the given path to import. | 30 | /// Creates and inserts a use statement for the given path to import. |
17 | /// The use statement is inserted in the scope most appropriate to the | 31 | /// The use statement is inserted in the scope most appropriate to the |
@@ -24,15 +38,11 @@ pub(crate) fn insert_use_statement( | |||
24 | builder: &mut TextEditBuilder, | 38 | builder: &mut TextEditBuilder, |
25 | ) { | 39 | ) { |
26 | let target = path_to_import.to_string().split("::").map(SmolStr::new).collect::<Vec<_>>(); | 40 | let target = path_to_import.to_string().split("::").map(SmolStr::new).collect::<Vec<_>>(); |
27 | let container = ctx.sema.ancestors_with_macros(position.clone()).find_map(|n| { | 41 | let container = find_insert_use_container(position, ctx); |
28 | if let Some(module) = ast::Module::cast(n.clone()) { | ||
29 | return module.item_list().map(|it| it.syntax().clone()); | ||
30 | } | ||
31 | ast::SourceFile::cast(n).map(|it| it.syntax().clone()) | ||
32 | }); | ||
33 | 42 | ||
34 | if let Some(container) = container { | 43 | if let Some(container) = container { |
35 | let action = best_action_for_target(container, position.clone(), &target); | 44 | let syntax = container.either(|l| l.syntax().clone(), |r| r.syntax().clone()); |
45 | let action = best_action_for_target(syntax, position.clone(), &target); | ||
36 | make_assist(&action, &target, builder); | 46 | make_assist(&action, &target, builder); |
37 | } | 47 | } |
38 | } | 48 | } |
diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs index bf26048f2..e6af99035 100644 --- a/crates/ra_db/src/input.rs +++ b/crates/ra_db/src/input.rs | |||
@@ -254,12 +254,12 @@ impl CrateGraph { | |||
254 | return false; | 254 | return false; |
255 | } | 255 | } |
256 | 256 | ||
257 | if target == from { | ||
258 | return true; | ||
259 | } | ||
260 | |||
257 | for dep in &self[from].dependencies { | 261 | for dep in &self[from].dependencies { |
258 | let crate_id = dep.crate_id; | 262 | let crate_id = dep.crate_id; |
259 | if crate_id == target { | ||
260 | return true; | ||
261 | } | ||
262 | |||
263 | if self.dfs_find(target, crate_id, visited) { | 263 | if self.dfs_find(target, crate_id, visited) { |
264 | return true; | 264 | return true; |
265 | } | 265 | } |
@@ -369,7 +369,7 @@ mod tests { | |||
369 | use super::{CfgOptions, CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; | 369 | use super::{CfgOptions, CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; |
370 | 370 | ||
371 | #[test] | 371 | #[test] |
372 | fn it_should_panic_because_of_cycle_dependencies() { | 372 | fn detect_cyclic_dependency_indirect() { |
373 | let mut graph = CrateGraph::default(); | 373 | let mut graph = CrateGraph::default(); |
374 | let crate1 = graph.add_crate_root( | 374 | let crate1 = graph.add_crate_root( |
375 | FileId(1u32), | 375 | FileId(1u32), |
@@ -404,6 +404,31 @@ mod tests { | |||
404 | } | 404 | } |
405 | 405 | ||
406 | #[test] | 406 | #[test] |
407 | fn detect_cyclic_dependency_direct() { | ||
408 | let mut graph = CrateGraph::default(); | ||
409 | let crate1 = graph.add_crate_root( | ||
410 | FileId(1u32), | ||
411 | Edition2018, | ||
412 | None, | ||
413 | CfgOptions::default(), | ||
414 | Env::default(), | ||
415 | Default::default(), | ||
416 | Default::default(), | ||
417 | ); | ||
418 | let crate2 = graph.add_crate_root( | ||
419 | FileId(2u32), | ||
420 | Edition2018, | ||
421 | None, | ||
422 | CfgOptions::default(), | ||
423 | Env::default(), | ||
424 | Default::default(), | ||
425 | Default::default(), | ||
426 | ); | ||
427 | assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); | ||
428 | assert!(graph.add_dep(crate2, CrateName::new("crate2").unwrap(), crate2).is_err()); | ||
429 | } | ||
430 | |||
431 | #[test] | ||
407 | fn it_works() { | 432 | fn it_works() { |
408 | let mut graph = CrateGraph::default(); | 433 | let mut graph = CrateGraph::default(); |
409 | let crate1 = graph.add_crate_root( | 434 | let crate1 = graph.add_crate_root( |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 1a9f6cc76..ffd5278ec 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -26,8 +26,8 @@ use hir_ty::{ | |||
26 | autoderef, | 26 | autoderef, |
27 | display::{HirDisplayError, HirFormatter}, | 27 | display::{HirDisplayError, HirFormatter}, |
28 | expr::ExprValidator, | 28 | expr::ExprValidator, |
29 | method_resolution, ApplicationTy, Canonical, InEnvironment, Substs, TraitEnvironment, Ty, | 29 | method_resolution, ApplicationTy, Canonical, GenericPredicate, InEnvironment, Substs, |
30 | TyDefId, TypeCtor, | 30 | TraitEnvironment, Ty, TyDefId, TypeCtor, |
31 | }; | 31 | }; |
32 | use ra_db::{CrateId, CrateName, Edition, FileId}; | 32 | use ra_db::{CrateId, CrateName, Edition, FileId}; |
33 | use ra_prof::profile; | 33 | use ra_prof::profile; |
@@ -186,6 +186,22 @@ impl ModuleDef { | |||
186 | 186 | ||
187 | module.visibility_of(db, self) | 187 | module.visibility_of(db, self) |
188 | } | 188 | } |
189 | |||
190 | pub fn name(self, db: &dyn HirDatabase) -> Option<Name> { | ||
191 | match self { | ||
192 | ModuleDef::Adt(it) => Some(it.name(db)), | ||
193 | ModuleDef::Trait(it) => Some(it.name(db)), | ||
194 | ModuleDef::Function(it) => Some(it.name(db)), | ||
195 | ModuleDef::EnumVariant(it) => Some(it.name(db)), | ||
196 | ModuleDef::TypeAlias(it) => Some(it.name(db)), | ||
197 | |||
198 | ModuleDef::Module(it) => it.name(db), | ||
199 | ModuleDef::Const(it) => it.name(db), | ||
200 | ModuleDef::Static(it) => it.name(db), | ||
201 | |||
202 | ModuleDef::BuiltinType(it) => Some(it.as_name()), | ||
203 | } | ||
204 | } | ||
189 | } | 205 | } |
190 | 206 | ||
191 | pub use hir_def::{ | 207 | pub use hir_def::{ |
@@ -1359,6 +1375,27 @@ impl Type { | |||
1359 | Some(adt.into()) | 1375 | Some(adt.into()) |
1360 | } | 1376 | } |
1361 | 1377 | ||
1378 | pub fn as_dyn_trait(&self) -> Option<Trait> { | ||
1379 | self.ty.value.dyn_trait().map(Into::into) | ||
1380 | } | ||
1381 | |||
1382 | pub fn as_impl_traits(&self, db: &dyn HirDatabase) -> Option<Vec<Trait>> { | ||
1383 | self.ty.value.impl_trait_bounds(db).map(|it| { | ||
1384 | it.into_iter() | ||
1385 | .filter_map(|pred| match pred { | ||
1386 | hir_ty::GenericPredicate::Implemented(trait_ref) => { | ||
1387 | Some(Trait::from(trait_ref.trait_)) | ||
1388 | } | ||
1389 | _ => None, | ||
1390 | }) | ||
1391 | .collect() | ||
1392 | }) | ||
1393 | } | ||
1394 | |||
1395 | pub fn as_associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<Trait> { | ||
1396 | self.ty.value.associated_type_parent_trait(db).map(Into::into) | ||
1397 | } | ||
1398 | |||
1362 | // FIXME: provide required accessors such that it becomes implementable from outside. | 1399 | // FIXME: provide required accessors such that it becomes implementable from outside. |
1363 | pub fn is_equal_for_find_impls(&self, other: &Type) -> bool { | 1400 | pub fn is_equal_for_find_impls(&self, other: &Type) -> bool { |
1364 | match (&self.ty.value, &other.ty.value) { | 1401 | match (&self.ty.value, &other.ty.value) { |
@@ -1380,6 +1417,80 @@ impl Type { | |||
1380 | ty: InEnvironment { value: ty, environment: self.ty.environment.clone() }, | 1417 | ty: InEnvironment { value: ty, environment: self.ty.environment.clone() }, |
1381 | } | 1418 | } |
1382 | } | 1419 | } |
1420 | |||
1421 | pub fn walk(&self, db: &dyn HirDatabase, mut cb: impl FnMut(Type)) { | ||
1422 | // TypeWalk::walk for a Ty at first visits parameters and only after that the Ty itself. | ||
1423 | // We need a different order here. | ||
1424 | |||
1425 | fn walk_substs( | ||
1426 | db: &dyn HirDatabase, | ||
1427 | type_: &Type, | ||
1428 | substs: &Substs, | ||
1429 | cb: &mut impl FnMut(Type), | ||
1430 | ) { | ||
1431 | for ty in substs.iter() { | ||
1432 | walk_type(db, &type_.derived(ty.clone()), cb); | ||
1433 | } | ||
1434 | } | ||
1435 | |||
1436 | fn walk_bounds( | ||
1437 | db: &dyn HirDatabase, | ||
1438 | type_: &Type, | ||
1439 | bounds: &[GenericPredicate], | ||
1440 | cb: &mut impl FnMut(Type), | ||
1441 | ) { | ||
1442 | for pred in bounds { | ||
1443 | match pred { | ||
1444 | GenericPredicate::Implemented(trait_ref) => { | ||
1445 | cb(type_.clone()); | ||
1446 | walk_substs(db, type_, &trait_ref.substs, cb); | ||
1447 | } | ||
1448 | _ => (), | ||
1449 | } | ||
1450 | } | ||
1451 | } | ||
1452 | |||
1453 | fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) { | ||
1454 | let ty = type_.ty.value.strip_references(); | ||
1455 | match ty { | ||
1456 | Ty::Apply(ApplicationTy { ctor, parameters }) => { | ||
1457 | match ctor { | ||
1458 | TypeCtor::Adt(_) => { | ||
1459 | cb(type_.derived(ty.clone())); | ||
1460 | } | ||
1461 | TypeCtor::AssociatedType(_) => { | ||
1462 | if let Some(_) = ty.associated_type_parent_trait(db) { | ||
1463 | cb(type_.derived(ty.clone())); | ||
1464 | } | ||
1465 | } | ||
1466 | _ => (), | ||
1467 | } | ||
1468 | |||
1469 | // adt params, tuples, etc... | ||
1470 | walk_substs(db, type_, parameters, cb); | ||
1471 | } | ||
1472 | Ty::Opaque(opaque_ty) => { | ||
1473 | if let Some(bounds) = ty.impl_trait_bounds(db) { | ||
1474 | walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb); | ||
1475 | } | ||
1476 | |||
1477 | walk_substs(db, type_, &opaque_ty.parameters, cb); | ||
1478 | } | ||
1479 | Ty::Placeholder(_) => { | ||
1480 | if let Some(bounds) = ty.impl_trait_bounds(db) { | ||
1481 | walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb); | ||
1482 | } | ||
1483 | } | ||
1484 | Ty::Dyn(bounds) => { | ||
1485 | walk_bounds(db, &type_.derived(ty.clone()), bounds.as_ref(), cb); | ||
1486 | } | ||
1487 | |||
1488 | _ => (), | ||
1489 | } | ||
1490 | } | ||
1491 | |||
1492 | walk_type(db, self, &mut cb); | ||
1493 | } | ||
1383 | } | 1494 | } |
1384 | 1495 | ||
1385 | impl HirDisplay for Type { | 1496 | impl HirDisplay for Type { |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index b6b665de1..b25dac28e 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -16,7 +16,7 @@ pub use hir_expand::db::{ | |||
16 | pub use hir_ty::db::{ | 16 | pub use hir_ty::db::{ |
17 | AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, FieldTypesQuery, | 17 | AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, FieldTypesQuery, |
18 | GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase, | 18 | GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase, |
19 | HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, ImplsForTraitQuery, | 19 | HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, ImplsFromDepsQuery, |
20 | ImplsInCrateQuery, InferQueryQuery, InternAssocTyValueQuery, InternChalkImplQuery, | 20 | ImplsInCrateQuery, InferQueryQuery, InternAssocTyValueQuery, InternChalkImplQuery, |
21 | InternTypeCtorQuery, InternTypeParamIdQuery, ReturnTypeImplTraitsQuery, StructDatumQuery, | 21 | InternTypeCtorQuery, InternTypeParamIdQuery, ReturnTypeImplTraitsQuery, StructDatumQuery, |
22 | TraitDatumQuery, TraitSolveQuery, TyQuery, ValueTyQuery, | 22 | TraitDatumQuery, TraitSolveQuery, TyQuery, ValueTyQuery, |
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index a232a5856..6a49c424a 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -6,9 +6,9 @@ use std::{cell::RefCell, fmt, iter::successors}; | |||
6 | 6 | ||
7 | use hir_def::{ | 7 | use hir_def::{ |
8 | resolver::{self, HasResolver, Resolver}, | 8 | resolver::{self, HasResolver, Resolver}, |
9 | AsMacroCall, TraitId, | 9 | AsMacroCall, TraitId, VariantId, |
10 | }; | 10 | }; |
11 | use hir_expand::{hygiene::Hygiene, ExpansionInfo}; | 11 | use hir_expand::{diagnostics::AstDiagnostic, hygiene::Hygiene, ExpansionInfo}; |
12 | use hir_ty::associated_type_shorthand_candidates; | 12 | use hir_ty::associated_type_shorthand_candidates; |
13 | use itertools::Itertools; | 13 | use itertools::Itertools; |
14 | use ra_db::{FileId, FileRange}; | 14 | use ra_db::{FileId, FileRange}; |
@@ -104,6 +104,13 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
104 | tree | 104 | tree |
105 | } | 105 | } |
106 | 106 | ||
107 | pub fn ast<T: AstDiagnostic + Diagnostic>(&self, d: &T) -> <T as AstDiagnostic>::AST { | ||
108 | let file_id = d.source().file_id; | ||
109 | let root = self.db.parse_or_expand(file_id).unwrap(); | ||
110 | self.cache(root, file_id); | ||
111 | d.ast(self.db) | ||
112 | } | ||
113 | |||
107 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { | 114 | pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> { |
108 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); | 115 | let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); |
109 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); | 116 | let sa = self.analyze2(macro_call.map(|it| it.syntax()), None); |
@@ -247,6 +254,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
247 | self.analyze(path.syntax()).resolve_path(self.db, path) | 254 | self.analyze(path.syntax()).resolve_path(self.db, path) |
248 | } | 255 | } |
249 | 256 | ||
257 | pub fn resolve_variant(&self, record_lit: ast::RecordLit) -> Option<VariantId> { | ||
258 | self.analyze(record_lit.syntax()).resolve_variant(self.db, record_lit) | ||
259 | } | ||
260 | |||
250 | pub fn lower_path(&self, path: &ast::Path) -> Option<Path> { | 261 | pub fn lower_path(&self, path: &ast::Path) -> Option<Path> { |
251 | let src = self.find_file(path.syntax().clone()); | 262 | let src = self.find_file(path.syntax().clone()); |
252 | Path::from_src(path.clone(), &Hygiene::new(self.db.upcast(), src.file_id.into())) | 263 | Path::from_src(path.clone(), &Hygiene::new(self.db.upcast(), src.file_id.into())) |
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs index 7c6bbea13..757d1e397 100644 --- a/crates/ra_hir/src/source_analyzer.rs +++ b/crates/ra_hir/src/source_analyzer.rs | |||
@@ -313,6 +313,16 @@ impl SourceAnalyzer { | |||
313 | })?; | 313 | })?; |
314 | Some(macro_call_id.as_file()) | 314 | Some(macro_call_id.as_file()) |
315 | } | 315 | } |
316 | |||
317 | pub(crate) fn resolve_variant( | ||
318 | &self, | ||
319 | db: &dyn HirDatabase, | ||
320 | record_lit: ast::RecordLit, | ||
321 | ) -> Option<VariantId> { | ||
322 | let infer = self.infer.as_ref()?; | ||
323 | let expr_id = self.expr_id(db, &record_lit.into())?; | ||
324 | infer.variant_resolution_for_expr(expr_id) | ||
325 | } | ||
316 | } | 326 | } |
317 | 327 | ||
318 | fn scope_for( | 328 | fn scope_for( |
diff --git a/crates/ra_hir_def/src/body/scope.rs b/crates/ra_hir_def/src/body/scope.rs index e48ff38f9..661f00407 100644 --- a/crates/ra_hir_def/src/body/scope.rs +++ b/crates/ra_hir_def/src/body/scope.rs | |||
@@ -87,15 +87,13 @@ impl ExprScopes { | |||
87 | } | 87 | } |
88 | 88 | ||
89 | fn add_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) { | 89 | fn add_bindings(&mut self, body: &Body, scope: ScopeId, pat: PatId) { |
90 | match &body[pat] { | 90 | let pattern = &body[pat]; |
91 | Pat::Bind { name, .. } => { | 91 | if let Pat::Bind { name, .. } = pattern { |
92 | // bind can have a sub pattern, but it's actually not allowed | 92 | let entry = ScopeEntry { name: name.clone(), pat }; |
93 | // to bind to things in there | 93 | self.scopes[scope].entries.push(entry); |
94 | let entry = ScopeEntry { name: name.clone(), pat }; | ||
95 | self.scopes[scope].entries.push(entry) | ||
96 | } | ||
97 | p => p.walk_child_pats(|pat| self.add_bindings(body, scope, pat)), | ||
98 | } | 94 | } |
95 | |||
96 | pattern.walk_child_pats(|pat| self.add_bindings(body, scope, pat)); | ||
99 | } | 97 | } |
100 | 98 | ||
101 | fn add_params_bindings(&mut self, body: &Body, scope: ScopeId, params: &[PatId]) { | 99 | fn add_params_bindings(&mut self, body: &Body, scope: ScopeId, params: &[PatId]) { |
@@ -190,8 +188,8 @@ mod tests { | |||
190 | } | 188 | } |
191 | } | 189 | } |
192 | 190 | ||
193 | fn do_check(code: &str, expected: &[&str]) { | 191 | fn do_check(ra_fixture: &str, expected: &[&str]) { |
194 | let (off, code) = extract_offset(code); | 192 | let (off, code) = extract_offset(ra_fixture); |
195 | let code = { | 193 | let code = { |
196 | let mut buf = String::new(); | 194 | let mut buf = String::new(); |
197 | let off: usize = off.into(); | 195 | let off: usize = off.into(); |
@@ -300,6 +298,22 @@ mod tests { | |||
300 | ); | 298 | ); |
301 | } | 299 | } |
302 | 300 | ||
301 | #[test] | ||
302 | fn test_bindings_after_at() { | ||
303 | do_check( | ||
304 | r" | ||
305 | fn foo() { | ||
306 | match Some(()) { | ||
307 | opt @ Some(unit) => { | ||
308 | <|> | ||
309 | } | ||
310 | _ => {} | ||
311 | } | ||
312 | }", | ||
313 | &["opt", "unit"], | ||
314 | ); | ||
315 | } | ||
316 | |||
303 | fn do_check_local_name(code: &str, expected_offset: u32) { | 317 | fn do_check_local_name(code: &str, expected_offset: u32) { |
304 | let (off, code) = extract_offset(code); | 318 | let (off, code) = extract_offset(code); |
305 | 319 | ||
diff --git a/crates/ra_hir_def/src/diagnostics.rs b/crates/ra_hir_def/src/diagnostics.rs index 510c5e064..30db48f86 100644 --- a/crates/ra_hir_def/src/diagnostics.rs +++ b/crates/ra_hir_def/src/diagnostics.rs | |||
@@ -3,7 +3,6 @@ | |||
3 | use std::any::Any; | 3 | use std::any::Any; |
4 | 4 | ||
5 | use hir_expand::diagnostics::Diagnostic; | 5 | use hir_expand::diagnostics::Diagnostic; |
6 | use ra_db::RelativePathBuf; | ||
7 | use ra_syntax::{ast, AstPtr, SyntaxNodePtr}; | 6 | use ra_syntax::{ast, AstPtr, SyntaxNodePtr}; |
8 | 7 | ||
9 | use hir_expand::{HirFileId, InFile}; | 8 | use hir_expand::{HirFileId, InFile}; |
@@ -12,7 +11,7 @@ use hir_expand::{HirFileId, InFile}; | |||
12 | pub struct UnresolvedModule { | 11 | pub struct UnresolvedModule { |
13 | pub file: HirFileId, | 12 | pub file: HirFileId, |
14 | pub decl: AstPtr<ast::Module>, | 13 | pub decl: AstPtr<ast::Module>, |
15 | pub candidate: RelativePathBuf, | 14 | pub candidate: String, |
16 | } | 15 | } |
17 | 16 | ||
18 | impl Diagnostic for UnresolvedModule { | 17 | impl Diagnostic for UnresolvedModule { |
diff --git a/crates/ra_hir_def/src/lib.rs b/crates/ra_hir_def/src/lib.rs index edc59e5a8..af2a717c9 100644 --- a/crates/ra_hir_def/src/lib.rs +++ b/crates/ra_hir_def/src/lib.rs | |||
@@ -159,7 +159,7 @@ pub struct TypeAliasId(salsa::InternId); | |||
159 | type TypeAliasLoc = AssocItemLoc<ast::TypeAliasDef>; | 159 | type TypeAliasLoc = AssocItemLoc<ast::TypeAliasDef>; |
160 | impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias); | 160 | impl_intern!(TypeAliasId, TypeAliasLoc, intern_type_alias, lookup_intern_type_alias); |
161 | 161 | ||
162 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 162 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)] |
163 | pub struct ImplId(salsa::InternId); | 163 | pub struct ImplId(salsa::InternId); |
164 | type ImplLoc = ItemLoc<ast::ImplDef>; | 164 | type ImplLoc = ItemLoc<ast::ImplDef>; |
165 | impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl); | 165 | impl_intern!(ImplId, ImplLoc, intern_impl, lookup_intern_impl); |
diff --git a/crates/ra_hir_def/src/nameres.rs b/crates/ra_hir_def/src/nameres.rs index f279c2ad4..b8560fdc9 100644 --- a/crates/ra_hir_def/src/nameres.rs +++ b/crates/ra_hir_def/src/nameres.rs | |||
@@ -119,13 +119,6 @@ impl Default for ModuleOrigin { | |||
119 | } | 119 | } |
120 | 120 | ||
121 | impl ModuleOrigin { | 121 | impl ModuleOrigin { |
122 | pub(crate) fn not_sure_file(file: Option<FileId>, declaration: AstId<ast::Module>) -> Self { | ||
123 | match file { | ||
124 | None => ModuleOrigin::Inline { definition: declaration }, | ||
125 | Some(definition) => ModuleOrigin::File { declaration, definition }, | ||
126 | } | ||
127 | } | ||
128 | |||
129 | fn declaration(&self) -> Option<AstId<ast::Module>> { | 122 | fn declaration(&self) -> Option<AstId<ast::Module>> { |
130 | match self { | 123 | match self { |
131 | ModuleOrigin::File { declaration: module, .. } | 124 | ModuleOrigin::File { declaration: module, .. } |
@@ -296,7 +289,6 @@ pub enum ModuleSource { | |||
296 | 289 | ||
297 | mod diagnostics { | 290 | mod diagnostics { |
298 | use hir_expand::diagnostics::DiagnosticSink; | 291 | use hir_expand::diagnostics::DiagnosticSink; |
299 | use ra_db::RelativePathBuf; | ||
300 | use ra_syntax::{ast, AstPtr}; | 292 | use ra_syntax::{ast, AstPtr}; |
301 | 293 | ||
302 | use crate::{db::DefDatabase, diagnostics::UnresolvedModule, nameres::LocalModuleId, AstId}; | 294 | use crate::{db::DefDatabase, diagnostics::UnresolvedModule, nameres::LocalModuleId, AstId}; |
@@ -306,7 +298,7 @@ mod diagnostics { | |||
306 | UnresolvedModule { | 298 | UnresolvedModule { |
307 | module: LocalModuleId, | 299 | module: LocalModuleId, |
308 | declaration: AstId<ast::Module>, | 300 | declaration: AstId<ast::Module>, |
309 | candidate: RelativePathBuf, | 301 | candidate: String, |
310 | }, | 302 | }, |
311 | } | 303 | } |
312 | 304 | ||
diff --git a/crates/ra_hir_def/src/nameres/collector.rs b/crates/ra_hir_def/src/nameres/collector.rs index 976e5e585..b8f6aac8f 100644 --- a/crates/ra_hir_def/src/nameres/collector.rs +++ b/crates/ra_hir_def/src/nameres/collector.rs | |||
@@ -36,8 +36,8 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: CrateDefMap) -> Cr | |||
36 | 36 | ||
37 | // populate external prelude | 37 | // populate external prelude |
38 | for dep in &crate_graph[def_map.krate].dependencies { | 38 | for dep in &crate_graph[def_map.krate].dependencies { |
39 | let dep_def_map = db.crate_def_map(dep.crate_id); | ||
40 | log::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id); | 39 | log::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id); |
40 | let dep_def_map = db.crate_def_map(dep.crate_id); | ||
41 | def_map.extern_prelude.insert( | 41 | def_map.extern_prelude.insert( |
42 | dep.as_name(), | 42 | dep.as_name(), |
43 | ModuleId { krate: dep.crate_id, local_id: dep_def_map.root }.into(), | 43 | ModuleId { krate: dep.crate_id, local_id: dep_def_map.root }.into(), |
@@ -825,7 +825,10 @@ impl ModCollector<'_, '_> { | |||
825 | let modules = &mut self.def_collector.def_map.modules; | 825 | let modules = &mut self.def_collector.def_map.modules; |
826 | let res = modules.alloc(ModuleData::default()); | 826 | let res = modules.alloc(ModuleData::default()); |
827 | modules[res].parent = Some(self.module_id); | 827 | modules[res].parent = Some(self.module_id); |
828 | modules[res].origin = ModuleOrigin::not_sure_file(definition, declaration); | 828 | modules[res].origin = match definition { |
829 | None => ModuleOrigin::Inline { definition: declaration }, | ||
830 | Some(definition) => ModuleOrigin::File { declaration, definition }, | ||
831 | }; | ||
829 | for (name, mac) in modules[self.module_id].scope.collect_legacy_macros() { | 832 | for (name, mac) in modules[self.module_id].scope.collect_legacy_macros() { |
830 | modules[res].scope.define_legacy_macro(name, mac) | 833 | modules[res].scope.define_legacy_macro(name, mac) |
831 | } | 834 | } |
diff --git a/crates/ra_hir_def/src/nameres/mod_resolution.rs b/crates/ra_hir_def/src/nameres/mod_resolution.rs index cede4a6fc..19fe0615a 100644 --- a/crates/ra_hir_def/src/nameres/mod_resolution.rs +++ b/crates/ra_hir_def/src/nameres/mod_resolution.rs | |||
@@ -44,7 +44,7 @@ impl ModDir { | |||
44 | file_id: HirFileId, | 44 | file_id: HirFileId, |
45 | name: &Name, | 45 | name: &Name, |
46 | attr_path: Option<&SmolStr>, | 46 | attr_path: Option<&SmolStr>, |
47 | ) -> Result<(FileId, ModDir), RelativePathBuf> { | 47 | ) -> Result<(FileId, ModDir), String> { |
48 | let file_id = file_id.original_file(db.upcast()); | 48 | let file_id = file_id.original_file(db.upcast()); |
49 | 49 | ||
50 | let mut candidate_files = Vec::new(); | 50 | let mut candidate_files = Vec::new(); |
@@ -52,11 +52,11 @@ impl ModDir { | |||
52 | Some(attr_path) => { | 52 | Some(attr_path) => { |
53 | let base = | 53 | let base = |
54 | if self.root_non_dir_owner { self.path.parent().unwrap() } else { &self.path }; | 54 | if self.root_non_dir_owner { self.path.parent().unwrap() } else { &self.path }; |
55 | candidate_files.push(base.join(attr_path)) | 55 | candidate_files.push(base.join(attr_path).to_string()) |
56 | } | 56 | } |
57 | None => { | 57 | None => { |
58 | candidate_files.push(self.path.join(&format!("{}.rs", name))); | 58 | candidate_files.push(self.path.join(&format!("{}.rs", name)).to_string()); |
59 | candidate_files.push(self.path.join(&format!("{}/mod.rs", name))); | 59 | candidate_files.push(self.path.join(&format!("{}/mod.rs", name)).to_string()); |
60 | } | 60 | } |
61 | }; | 61 | }; |
62 | 62 | ||
diff --git a/crates/ra_hir_ty/src/_match.rs b/crates/ra_hir_ty/src/_match.rs index fff257193..5495ce284 100644 --- a/crates/ra_hir_ty/src/_match.rs +++ b/crates/ra_hir_ty/src/_match.rs | |||
@@ -312,20 +312,16 @@ impl PatStack { | |||
312 | Self(v) | 312 | Self(v) |
313 | } | 313 | } |
314 | 314 | ||
315 | fn is_empty(&self) -> bool { | ||
316 | self.0.is_empty() | ||
317 | } | ||
318 | |||
319 | fn head(&self) -> PatIdOrWild { | ||
320 | self.0[0] | ||
321 | } | ||
322 | |||
323 | fn get_head(&self) -> Option<PatIdOrWild> { | 315 | fn get_head(&self) -> Option<PatIdOrWild> { |
324 | self.0.first().copied() | 316 | self.0.first().copied() |
325 | } | 317 | } |
326 | 318 | ||
319 | fn tail(&self) -> &[PatIdOrWild] { | ||
320 | self.0.get(1..).unwrap_or(&[]) | ||
321 | } | ||
322 | |||
327 | fn to_tail(&self) -> PatStack { | 323 | fn to_tail(&self) -> PatStack { |
328 | Self::from_slice(&self.0[1..]) | 324 | Self::from_slice(self.tail()) |
329 | } | 325 | } |
330 | 326 | ||
331 | fn replace_head_with<I, T>(&self, pats: I) -> PatStack | 327 | fn replace_head_with<I, T>(&self, pats: I) -> PatStack |
@@ -347,7 +343,7 @@ impl PatStack { | |||
347 | /// | 343 | /// |
348 | /// See the module docs and the associated documentation in rustc for details. | 344 | /// See the module docs and the associated documentation in rustc for details. |
349 | fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Option<PatStack> { | 345 | fn specialize_wildcard(&self, cx: &MatchCheckCtx) -> Option<PatStack> { |
350 | if matches!(self.head().as_pat(cx), Pat::Wild) { | 346 | if matches!(self.get_head()?.as_pat(cx), Pat::Wild) { |
351 | Some(self.to_tail()) | 347 | Some(self.to_tail()) |
352 | } else { | 348 | } else { |
353 | None | 349 | None |
@@ -362,7 +358,13 @@ impl PatStack { | |||
362 | cx: &MatchCheckCtx, | 358 | cx: &MatchCheckCtx, |
363 | constructor: &Constructor, | 359 | constructor: &Constructor, |
364 | ) -> MatchCheckResult<Option<PatStack>> { | 360 | ) -> MatchCheckResult<Option<PatStack>> { |
365 | let result = match (self.head().as_pat(cx), constructor) { | 361 | let head = match self.get_head() { |
362 | Some(head) => head, | ||
363 | None => return Ok(None), | ||
364 | }; | ||
365 | |||
366 | let head_pat = head.as_pat(cx); | ||
367 | let result = match (head_pat, constructor) { | ||
366 | (Pat::Tuple { args: ref pat_ids, ellipsis }, Constructor::Tuple { arity: _ }) => { | 368 | (Pat::Tuple { args: ref pat_ids, ellipsis }, Constructor::Tuple { arity: _ }) => { |
367 | if ellipsis.is_some() { | 369 | if ellipsis.is_some() { |
368 | // If there are ellipsis here, we should add the correct number of | 370 | // If there are ellipsis here, we should add the correct number of |
@@ -389,7 +391,7 @@ impl PatStack { | |||
389 | (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?), | 391 | (Pat::Wild, constructor) => Some(self.expand_wildcard(cx, constructor)?), |
390 | (Pat::Path(_), Constructor::Enum(constructor)) => { | 392 | (Pat::Path(_), Constructor::Enum(constructor)) => { |
391 | // unit enum variants become `Pat::Path` | 393 | // unit enum variants become `Pat::Path` |
392 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); | 394 | let pat_id = head.as_id().expect("we know this isn't a wild"); |
393 | if !enum_variant_matches(cx, pat_id, *constructor) { | 395 | if !enum_variant_matches(cx, pat_id, *constructor) { |
394 | None | 396 | None |
395 | } else { | 397 | } else { |
@@ -400,7 +402,7 @@ impl PatStack { | |||
400 | Pat::TupleStruct { args: ref pat_ids, ellipsis, .. }, | 402 | Pat::TupleStruct { args: ref pat_ids, ellipsis, .. }, |
401 | Constructor::Enum(enum_constructor), | 403 | Constructor::Enum(enum_constructor), |
402 | ) => { | 404 | ) => { |
403 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); | 405 | let pat_id = head.as_id().expect("we know this isn't a wild"); |
404 | if !enum_variant_matches(cx, pat_id, *enum_constructor) { | 406 | if !enum_variant_matches(cx, pat_id, *enum_constructor) { |
405 | None | 407 | None |
406 | } else { | 408 | } else { |
@@ -440,7 +442,7 @@ impl PatStack { | |||
440 | } | 442 | } |
441 | } | 443 | } |
442 | (Pat::Record { args: ref arg_patterns, .. }, Constructor::Enum(e)) => { | 444 | (Pat::Record { args: ref arg_patterns, .. }, Constructor::Enum(e)) => { |
443 | let pat_id = self.head().as_id().expect("we know this isn't a wild"); | 445 | let pat_id = head.as_id().expect("we know this isn't a wild"); |
444 | if !enum_variant_matches(cx, pat_id, *e) { | 446 | if !enum_variant_matches(cx, pat_id, *e) { |
445 | None | 447 | None |
446 | } else { | 448 | } else { |
@@ -486,7 +488,7 @@ impl PatStack { | |||
486 | ) -> MatchCheckResult<PatStack> { | 488 | ) -> MatchCheckResult<PatStack> { |
487 | assert_eq!( | 489 | assert_eq!( |
488 | Pat::Wild, | 490 | Pat::Wild, |
489 | self.head().as_pat(cx), | 491 | self.get_head().expect("expand_wildcard called on empty PatStack").as_pat(cx), |
490 | "expand_wildcard must only be called on PatStack with wild at head", | 492 | "expand_wildcard must only be called on PatStack with wild at head", |
491 | ); | 493 | ); |
492 | 494 | ||
@@ -504,7 +506,6 @@ impl PatStack { | |||
504 | } | 506 | } |
505 | } | 507 | } |
506 | 508 | ||
507 | #[derive(Debug)] | ||
508 | /// A collection of PatStack. | 509 | /// A collection of PatStack. |
509 | /// | 510 | /// |
510 | /// This type is modeled from the struct of the same name in `rustc`. | 511 | /// This type is modeled from the struct of the same name in `rustc`. |
@@ -531,7 +532,7 @@ impl Matrix { | |||
531 | } | 532 | } |
532 | 533 | ||
533 | fn heads(&self) -> Vec<PatIdOrWild> { | 534 | fn heads(&self) -> Vec<PatIdOrWild> { |
534 | self.0.iter().map(|p| p.head()).collect() | 535 | self.0.iter().flat_map(|p| p.get_head()).collect() |
535 | } | 536 | } |
536 | 537 | ||
537 | /// Computes `D(self)` for each contained PatStack. | 538 | /// Computes `D(self)` for each contained PatStack. |
@@ -618,13 +619,16 @@ pub(crate) fn is_useful( | |||
618 | _ => (), | 619 | _ => (), |
619 | } | 620 | } |
620 | 621 | ||
621 | if v.is_empty() { | 622 | let head = match v.get_head() { |
622 | let result = if matrix.is_empty() { Usefulness::Useful } else { Usefulness::NotUseful }; | 623 | Some(head) => head, |
624 | None => { | ||
625 | let result = if matrix.is_empty() { Usefulness::Useful } else { Usefulness::NotUseful }; | ||
623 | 626 | ||
624 | return Ok(result); | 627 | return Ok(result); |
625 | } | 628 | } |
629 | }; | ||
626 | 630 | ||
627 | if let Pat::Or(pat_ids) = v.head().as_pat(cx) { | 631 | if let Pat::Or(pat_ids) = head.as_pat(cx) { |
628 | let mut found_unimplemented = false; | 632 | let mut found_unimplemented = false; |
629 | let any_useful = pat_ids.iter().any(|&pat_id| { | 633 | let any_useful = pat_ids.iter().any(|&pat_id| { |
630 | let v = PatStack::from_pattern(pat_id); | 634 | let v = PatStack::from_pattern(pat_id); |
@@ -648,7 +652,7 @@ pub(crate) fn is_useful( | |||
648 | }; | 652 | }; |
649 | } | 653 | } |
650 | 654 | ||
651 | if let Some(constructor) = pat_constructor(cx, v.head())? { | 655 | if let Some(constructor) = pat_constructor(cx, head)? { |
652 | let matrix = matrix.specialize_constructor(&cx, &constructor)?; | 656 | let matrix = matrix.specialize_constructor(&cx, &constructor)?; |
653 | let v = v | 657 | let v = v |
654 | .specialize_constructor(&cx, &constructor)? | 658 | .specialize_constructor(&cx, &constructor)? |
@@ -837,194 +841,193 @@ mod tests { | |||
837 | 841 | ||
838 | pub(super) use crate::{diagnostics::MissingMatchArms, test_db::TestDB}; | 842 | pub(super) use crate::{diagnostics::MissingMatchArms, test_db::TestDB}; |
839 | 843 | ||
840 | pub(super) fn check_diagnostic_message(content: &str) -> String { | 844 | pub(super) fn check_diagnostic_message(ra_fixture: &str) -> String { |
841 | TestDB::with_single_file(content).0.diagnostic::<MissingMatchArms>().0 | 845 | TestDB::with_single_file(ra_fixture).0.diagnostic::<MissingMatchArms>().0 |
842 | } | 846 | } |
843 | 847 | ||
844 | pub(super) fn check_diagnostic(content: &str) { | 848 | pub(super) fn check_diagnostic(ra_fixture: &str) { |
845 | let diagnostic_count = | 849 | let diagnostic_count = |
846 | TestDB::with_single_file(content).0.diagnostic::<MissingMatchArms>().1; | 850 | TestDB::with_single_file(ra_fixture).0.diagnostic::<MissingMatchArms>().1; |
847 | 851 | ||
848 | assert_eq!(1, diagnostic_count, "no diagnostic reported"); | 852 | assert_eq!(1, diagnostic_count, "no diagnostic reported"); |
849 | } | 853 | } |
850 | 854 | ||
851 | pub(super) fn check_no_diagnostic(content: &str) { | 855 | pub(super) fn check_no_diagnostic(ra_fixture: &str) { |
852 | let diagnostic_count = | 856 | let (s, diagnostic_count) = |
853 | TestDB::with_single_file(content).0.diagnostic::<MissingMatchArms>().1; | 857 | TestDB::with_single_file(ra_fixture).0.diagnostic::<MissingMatchArms>(); |
854 | 858 | ||
855 | assert_eq!(0, diagnostic_count, "expected no diagnostic, found one"); | 859 | assert_eq!(0, diagnostic_count, "expected no diagnostic, found one: {}", s); |
856 | } | 860 | } |
857 | 861 | ||
858 | #[test] | 862 | #[test] |
859 | fn empty_tuple_no_arms_diagnostic_message() { | 863 | fn empty_tuple_no_arms_diagnostic_message() { |
860 | let content = r" | ||
861 | fn test_fn() { | ||
862 | match () { | ||
863 | } | ||
864 | } | ||
865 | "; | ||
866 | |||
867 | assert_snapshot!( | 864 | assert_snapshot!( |
868 | check_diagnostic_message(content), | 865 | check_diagnostic_message(r" |
866 | fn test_fn() { | ||
867 | match () { | ||
868 | } | ||
869 | } | ||
870 | "), | ||
869 | @"\"()\": Missing match arm\n" | 871 | @"\"()\": Missing match arm\n" |
870 | ); | 872 | ); |
871 | } | 873 | } |
872 | 874 | ||
873 | #[test] | 875 | #[test] |
874 | fn empty_tuple_no_arms() { | 876 | fn empty_tuple_no_arms() { |
875 | let content = r" | 877 | check_diagnostic( |
878 | r" | ||
876 | fn test_fn() { | 879 | fn test_fn() { |
877 | match () { | 880 | match () { |
878 | } | 881 | } |
879 | } | 882 | } |
880 | "; | 883 | ", |
881 | 884 | ); | |
882 | check_diagnostic(content); | ||
883 | } | 885 | } |
884 | 886 | ||
885 | #[test] | 887 | #[test] |
886 | fn empty_tuple_wild() { | 888 | fn empty_tuple_wild() { |
887 | let content = r" | 889 | check_no_diagnostic( |
890 | r" | ||
888 | fn test_fn() { | 891 | fn test_fn() { |
889 | match () { | 892 | match () { |
890 | _ => {} | 893 | _ => {} |
891 | } | 894 | } |
892 | } | 895 | } |
893 | "; | 896 | ", |
894 | 897 | ); | |
895 | check_no_diagnostic(content); | ||
896 | } | 898 | } |
897 | 899 | ||
898 | #[test] | 900 | #[test] |
899 | fn empty_tuple_no_diagnostic() { | 901 | fn empty_tuple_no_diagnostic() { |
900 | let content = r" | 902 | check_no_diagnostic( |
903 | r" | ||
901 | fn test_fn() { | 904 | fn test_fn() { |
902 | match () { | 905 | match () { |
903 | () => {} | 906 | () => {} |
904 | } | 907 | } |
905 | } | 908 | } |
906 | "; | 909 | ", |
907 | 910 | ); | |
908 | check_no_diagnostic(content); | ||
909 | } | 911 | } |
910 | 912 | ||
911 | #[test] | 913 | #[test] |
912 | fn tuple_of_empty_tuple_no_arms() { | 914 | fn tuple_of_empty_tuple_no_arms() { |
913 | let content = r" | 915 | check_diagnostic( |
916 | r" | ||
914 | fn test_fn() { | 917 | fn test_fn() { |
915 | match (()) { | 918 | match (()) { |
916 | } | 919 | } |
917 | } | 920 | } |
918 | "; | 921 | ", |
919 | 922 | ); | |
920 | check_diagnostic(content); | ||
921 | } | 923 | } |
922 | 924 | ||
923 | #[test] | 925 | #[test] |
924 | fn tuple_of_empty_tuple_no_diagnostic() { | 926 | fn tuple_of_empty_tuple_no_diagnostic() { |
925 | let content = r" | 927 | check_no_diagnostic( |
928 | r" | ||
926 | fn test_fn() { | 929 | fn test_fn() { |
927 | match (()) { | 930 | match (()) { |
928 | (()) => {} | 931 | (()) => {} |
929 | } | 932 | } |
930 | } | 933 | } |
931 | "; | 934 | ", |
932 | 935 | ); | |
933 | check_no_diagnostic(content); | ||
934 | } | 936 | } |
935 | 937 | ||
936 | #[test] | 938 | #[test] |
937 | fn tuple_of_two_empty_tuple_no_arms() { | 939 | fn tuple_of_two_empty_tuple_no_arms() { |
938 | let content = r" | 940 | check_diagnostic( |
941 | r" | ||
939 | fn test_fn() { | 942 | fn test_fn() { |
940 | match ((), ()) { | 943 | match ((), ()) { |
941 | } | 944 | } |
942 | } | 945 | } |
943 | "; | 946 | ", |
944 | 947 | ); | |
945 | check_diagnostic(content); | ||
946 | } | 948 | } |
947 | 949 | ||
948 | #[test] | 950 | #[test] |
949 | fn tuple_of_two_empty_tuple_no_diagnostic() { | 951 | fn tuple_of_two_empty_tuple_no_diagnostic() { |
950 | let content = r" | 952 | check_no_diagnostic( |
953 | r" | ||
951 | fn test_fn() { | 954 | fn test_fn() { |
952 | match ((), ()) { | 955 | match ((), ()) { |
953 | ((), ()) => {} | 956 | ((), ()) => {} |
954 | } | 957 | } |
955 | } | 958 | } |
956 | "; | 959 | ", |
957 | 960 | ); | |
958 | check_no_diagnostic(content); | ||
959 | } | 961 | } |
960 | 962 | ||
961 | #[test] | 963 | #[test] |
962 | fn bool_no_arms() { | 964 | fn bool_no_arms() { |
963 | let content = r" | 965 | check_diagnostic( |
966 | r" | ||
964 | fn test_fn() { | 967 | fn test_fn() { |
965 | match false { | 968 | match false { |
966 | } | 969 | } |
967 | } | 970 | } |
968 | "; | 971 | ", |
969 | 972 | ); | |
970 | check_diagnostic(content); | ||
971 | } | 973 | } |
972 | 974 | ||
973 | #[test] | 975 | #[test] |
974 | fn bool_missing_arm() { | 976 | fn bool_missing_arm() { |
975 | let content = r" | 977 | check_diagnostic( |
978 | r" | ||
976 | fn test_fn() { | 979 | fn test_fn() { |
977 | match false { | 980 | match false { |
978 | true => {} | 981 | true => {} |
979 | } | 982 | } |
980 | } | 983 | } |
981 | "; | 984 | ", |
982 | 985 | ); | |
983 | check_diagnostic(content); | ||
984 | } | 986 | } |
985 | 987 | ||
986 | #[test] | 988 | #[test] |
987 | fn bool_no_diagnostic() { | 989 | fn bool_no_diagnostic() { |
988 | let content = r" | 990 | check_no_diagnostic( |
991 | r" | ||
989 | fn test_fn() { | 992 | fn test_fn() { |
990 | match false { | 993 | match false { |
991 | true => {} | 994 | true => {} |
992 | false => {} | 995 | false => {} |
993 | } | 996 | } |
994 | } | 997 | } |
995 | "; | 998 | ", |
996 | 999 | ); | |
997 | check_no_diagnostic(content); | ||
998 | } | 1000 | } |
999 | 1001 | ||
1000 | #[test] | 1002 | #[test] |
1001 | fn tuple_of_bools_no_arms() { | 1003 | fn tuple_of_bools_no_arms() { |
1002 | let content = r" | 1004 | check_diagnostic( |
1005 | r" | ||
1003 | fn test_fn() { | 1006 | fn test_fn() { |
1004 | match (false, true) { | 1007 | match (false, true) { |
1005 | } | 1008 | } |
1006 | } | 1009 | } |
1007 | "; | 1010 | ", |
1008 | 1011 | ); | |
1009 | check_diagnostic(content); | ||
1010 | } | 1012 | } |
1011 | 1013 | ||
1012 | #[test] | 1014 | #[test] |
1013 | fn tuple_of_bools_missing_arms() { | 1015 | fn tuple_of_bools_missing_arms() { |
1014 | let content = r" | 1016 | check_diagnostic( |
1017 | r" | ||
1015 | fn test_fn() { | 1018 | fn test_fn() { |
1016 | match (false, true) { | 1019 | match (false, true) { |
1017 | (true, true) => {}, | 1020 | (true, true) => {}, |
1018 | } | 1021 | } |
1019 | } | 1022 | } |
1020 | "; | 1023 | ", |
1021 | 1024 | ); | |
1022 | check_diagnostic(content); | ||
1023 | } | 1025 | } |
1024 | 1026 | ||
1025 | #[test] | 1027 | #[test] |
1026 | fn tuple_of_bools_missing_arm() { | 1028 | fn tuple_of_bools_missing_arm() { |
1027 | let content = r" | 1029 | check_diagnostic( |
1030 | r" | ||
1028 | fn test_fn() { | 1031 | fn test_fn() { |
1029 | match (false, true) { | 1032 | match (false, true) { |
1030 | (false, true) => {}, | 1033 | (false, true) => {}, |
@@ -1032,14 +1035,14 @@ mod tests { | |||
1032 | (true, false) => {}, | 1035 | (true, false) => {}, |
1033 | } | 1036 | } |
1034 | } | 1037 | } |
1035 | "; | 1038 | ", |
1036 | 1039 | ); | |
1037 | check_diagnostic(content); | ||
1038 | } | 1040 | } |
1039 | 1041 | ||
1040 | #[test] | 1042 | #[test] |
1041 | fn tuple_of_bools_with_wilds() { | 1043 | fn tuple_of_bools_with_wilds() { |
1042 | let content = r" | 1044 | check_no_diagnostic( |
1045 | r" | ||
1043 | fn test_fn() { | 1046 | fn test_fn() { |
1044 | match (false, true) { | 1047 | match (false, true) { |
1045 | (false, _) => {}, | 1048 | (false, _) => {}, |
@@ -1047,14 +1050,14 @@ mod tests { | |||
1047 | (_, true) => {}, | 1050 | (_, true) => {}, |
1048 | } | 1051 | } |
1049 | } | 1052 | } |
1050 | "; | 1053 | ", |
1051 | 1054 | ); | |
1052 | check_no_diagnostic(content); | ||
1053 | } | 1055 | } |
1054 | 1056 | ||
1055 | #[test] | 1057 | #[test] |
1056 | fn tuple_of_bools_no_diagnostic() { | 1058 | fn tuple_of_bools_no_diagnostic() { |
1057 | let content = r" | 1059 | check_no_diagnostic( |
1060 | r" | ||
1058 | fn test_fn() { | 1061 | fn test_fn() { |
1059 | match (false, true) { | 1062 | match (false, true) { |
1060 | (true, true) => {}, | 1063 | (true, true) => {}, |
@@ -1063,27 +1066,27 @@ mod tests { | |||
1063 | (false, false) => {}, | 1066 | (false, false) => {}, |
1064 | } | 1067 | } |
1065 | } | 1068 | } |
1066 | "; | 1069 | ", |
1067 | 1070 | ); | |
1068 | check_no_diagnostic(content); | ||
1069 | } | 1071 | } |
1070 | 1072 | ||
1071 | #[test] | 1073 | #[test] |
1072 | fn tuple_of_bools_binding_missing_arms() { | 1074 | fn tuple_of_bools_binding_missing_arms() { |
1073 | let content = r" | 1075 | check_diagnostic( |
1076 | r" | ||
1074 | fn test_fn() { | 1077 | fn test_fn() { |
1075 | match (false, true) { | 1078 | match (false, true) { |
1076 | (true, _x) => {}, | 1079 | (true, _x) => {}, |
1077 | } | 1080 | } |
1078 | } | 1081 | } |
1079 | "; | 1082 | ", |
1080 | 1083 | ); | |
1081 | check_diagnostic(content); | ||
1082 | } | 1084 | } |
1083 | 1085 | ||
1084 | #[test] | 1086 | #[test] |
1085 | fn tuple_of_bools_binding_no_diagnostic() { | 1087 | fn tuple_of_bools_binding_no_diagnostic() { |
1086 | let content = r" | 1088 | check_no_diagnostic( |
1089 | r" | ||
1087 | fn test_fn() { | 1090 | fn test_fn() { |
1088 | match (false, true) { | 1091 | match (false, true) { |
1089 | (true, _x) => {}, | 1092 | (true, _x) => {}, |
@@ -1091,80 +1094,80 @@ mod tests { | |||
1091 | (false, false) => {}, | 1094 | (false, false) => {}, |
1092 | } | 1095 | } |
1093 | } | 1096 | } |
1094 | "; | 1097 | ", |
1095 | 1098 | ); | |
1096 | check_no_diagnostic(content); | ||
1097 | } | 1099 | } |
1098 | 1100 | ||
1099 | #[test] | 1101 | #[test] |
1100 | fn tuple_of_bools_with_ellipsis_at_end_no_diagnostic() { | 1102 | fn tuple_of_bools_with_ellipsis_at_end_no_diagnostic() { |
1101 | let content = r" | 1103 | check_no_diagnostic( |
1104 | r" | ||
1102 | fn test_fn() { | 1105 | fn test_fn() { |
1103 | match (false, true, false) { | 1106 | match (false, true, false) { |
1104 | (false, ..) => {}, | 1107 | (false, ..) => {}, |
1105 | (true, ..) => {}, | 1108 | (true, ..) => {}, |
1106 | } | 1109 | } |
1107 | } | 1110 | } |
1108 | "; | 1111 | ", |
1109 | 1112 | ); | |
1110 | check_no_diagnostic(content); | ||
1111 | } | 1113 | } |
1112 | 1114 | ||
1113 | #[test] | 1115 | #[test] |
1114 | fn tuple_of_bools_with_ellipsis_at_beginning_no_diagnostic() { | 1116 | fn tuple_of_bools_with_ellipsis_at_beginning_no_diagnostic() { |
1115 | let content = r" | 1117 | check_no_diagnostic( |
1118 | r" | ||
1116 | fn test_fn() { | 1119 | fn test_fn() { |
1117 | match (false, true, false) { | 1120 | match (false, true, false) { |
1118 | (.., false) => {}, | 1121 | (.., false) => {}, |
1119 | (.., true) => {}, | 1122 | (.., true) => {}, |
1120 | } | 1123 | } |
1121 | } | 1124 | } |
1122 | "; | 1125 | ", |
1123 | 1126 | ); | |
1124 | check_no_diagnostic(content); | ||
1125 | } | 1127 | } |
1126 | 1128 | ||
1127 | #[test] | 1129 | #[test] |
1128 | fn tuple_of_bools_with_ellipsis_no_diagnostic() { | 1130 | fn tuple_of_bools_with_ellipsis_no_diagnostic() { |
1129 | let content = r" | 1131 | check_no_diagnostic( |
1132 | r" | ||
1130 | fn test_fn() { | 1133 | fn test_fn() { |
1131 | match (false, true, false) { | 1134 | match (false, true, false) { |
1132 | (..) => {}, | 1135 | (..) => {}, |
1133 | } | 1136 | } |
1134 | } | 1137 | } |
1135 | "; | 1138 | ", |
1136 | 1139 | ); | |
1137 | check_no_diagnostic(content); | ||
1138 | } | 1140 | } |
1139 | 1141 | ||
1140 | #[test] | 1142 | #[test] |
1141 | fn tuple_of_tuple_and_bools_no_arms() { | 1143 | fn tuple_of_tuple_and_bools_no_arms() { |
1142 | let content = r" | 1144 | check_diagnostic( |
1145 | r" | ||
1143 | fn test_fn() { | 1146 | fn test_fn() { |
1144 | match (false, ((), false)) { | 1147 | match (false, ((), false)) { |
1145 | } | 1148 | } |
1146 | } | 1149 | } |
1147 | "; | 1150 | ", |
1148 | 1151 | ); | |
1149 | check_diagnostic(content); | ||
1150 | } | 1152 | } |
1151 | 1153 | ||
1152 | #[test] | 1154 | #[test] |
1153 | fn tuple_of_tuple_and_bools_missing_arms() { | 1155 | fn tuple_of_tuple_and_bools_missing_arms() { |
1154 | let content = r" | 1156 | check_diagnostic( |
1157 | r" | ||
1155 | fn test_fn() { | 1158 | fn test_fn() { |
1156 | match (false, ((), false)) { | 1159 | match (false, ((), false)) { |
1157 | (true, ((), true)) => {}, | 1160 | (true, ((), true)) => {}, |
1158 | } | 1161 | } |
1159 | } | 1162 | } |
1160 | "; | 1163 | ", |
1161 | 1164 | ); | |
1162 | check_diagnostic(content); | ||
1163 | } | 1165 | } |
1164 | 1166 | ||
1165 | #[test] | 1167 | #[test] |
1166 | fn tuple_of_tuple_and_bools_no_diagnostic() { | 1168 | fn tuple_of_tuple_and_bools_no_diagnostic() { |
1167 | let content = r" | 1169 | check_no_diagnostic( |
1170 | r" | ||
1168 | fn test_fn() { | 1171 | fn test_fn() { |
1169 | match (false, ((), false)) { | 1172 | match (false, ((), false)) { |
1170 | (true, ((), true)) => {}, | 1173 | (true, ((), true)) => {}, |
@@ -1173,27 +1176,27 @@ mod tests { | |||
1173 | (false, ((), false)) => {}, | 1176 | (false, ((), false)) => {}, |
1174 | } | 1177 | } |
1175 | } | 1178 | } |
1176 | "; | 1179 | ", |
1177 | 1180 | ); | |
1178 | check_no_diagnostic(content); | ||
1179 | } | 1181 | } |
1180 | 1182 | ||
1181 | #[test] | 1183 | #[test] |
1182 | fn tuple_of_tuple_and_bools_wildcard_missing_arms() { | 1184 | fn tuple_of_tuple_and_bools_wildcard_missing_arms() { |
1183 | let content = r" | 1185 | check_diagnostic( |
1186 | r" | ||
1184 | fn test_fn() { | 1187 | fn test_fn() { |
1185 | match (false, ((), false)) { | 1188 | match (false, ((), false)) { |
1186 | (true, _) => {}, | 1189 | (true, _) => {}, |
1187 | } | 1190 | } |
1188 | } | 1191 | } |
1189 | "; | 1192 | ", |
1190 | 1193 | ); | |
1191 | check_diagnostic(content); | ||
1192 | } | 1194 | } |
1193 | 1195 | ||
1194 | #[test] | 1196 | #[test] |
1195 | fn tuple_of_tuple_and_bools_wildcard_no_diagnostic() { | 1197 | fn tuple_of_tuple_and_bools_wildcard_no_diagnostic() { |
1196 | let content = r" | 1198 | check_no_diagnostic( |
1199 | r" | ||
1197 | fn test_fn() { | 1200 | fn test_fn() { |
1198 | match (false, ((), false)) { | 1201 | match (false, ((), false)) { |
1199 | (true, ((), true)) => {}, | 1202 | (true, ((), true)) => {}, |
@@ -1201,14 +1204,14 @@ mod tests { | |||
1201 | (false, _) => {}, | 1204 | (false, _) => {}, |
1202 | } | 1205 | } |
1203 | } | 1206 | } |
1204 | "; | 1207 | ", |
1205 | 1208 | ); | |
1206 | check_no_diagnostic(content); | ||
1207 | } | 1209 | } |
1208 | 1210 | ||
1209 | #[test] | 1211 | #[test] |
1210 | fn enum_no_arms() { | 1212 | fn enum_no_arms() { |
1211 | let content = r" | 1213 | check_diagnostic( |
1214 | r" | ||
1212 | enum Either { | 1215 | enum Either { |
1213 | A, | 1216 | A, |
1214 | B, | 1217 | B, |
@@ -1217,14 +1220,14 @@ mod tests { | |||
1217 | match Either::A { | 1220 | match Either::A { |
1218 | } | 1221 | } |
1219 | } | 1222 | } |
1220 | "; | 1223 | ", |
1221 | 1224 | ); | |
1222 | check_diagnostic(content); | ||
1223 | } | 1225 | } |
1224 | 1226 | ||
1225 | #[test] | 1227 | #[test] |
1226 | fn enum_missing_arms() { | 1228 | fn enum_missing_arms() { |
1227 | let content = r" | 1229 | check_diagnostic( |
1230 | r" | ||
1228 | enum Either { | 1231 | enum Either { |
1229 | A, | 1232 | A, |
1230 | B, | 1233 | B, |
@@ -1234,14 +1237,14 @@ mod tests { | |||
1234 | Either::A => {}, | 1237 | Either::A => {}, |
1235 | } | 1238 | } |
1236 | } | 1239 | } |
1237 | "; | 1240 | ", |
1238 | 1241 | ); | |
1239 | check_diagnostic(content); | ||
1240 | } | 1242 | } |
1241 | 1243 | ||
1242 | #[test] | 1244 | #[test] |
1243 | fn enum_no_diagnostic() { | 1245 | fn enum_no_diagnostic() { |
1244 | let content = r" | 1246 | check_no_diagnostic( |
1247 | r" | ||
1245 | enum Either { | 1248 | enum Either { |
1246 | A, | 1249 | A, |
1247 | B, | 1250 | B, |
@@ -1252,14 +1255,14 @@ mod tests { | |||
1252 | Either::B => {}, | 1255 | Either::B => {}, |
1253 | } | 1256 | } |
1254 | } | 1257 | } |
1255 | "; | 1258 | ", |
1256 | 1259 | ); | |
1257 | check_no_diagnostic(content); | ||
1258 | } | 1260 | } |
1259 | 1261 | ||
1260 | #[test] | 1262 | #[test] |
1261 | fn enum_ref_missing_arms() { | 1263 | fn enum_ref_missing_arms() { |
1262 | let content = r" | 1264 | check_diagnostic( |
1265 | r" | ||
1263 | enum Either { | 1266 | enum Either { |
1264 | A, | 1267 | A, |
1265 | B, | 1268 | B, |
@@ -1269,14 +1272,14 @@ mod tests { | |||
1269 | Either::A => {}, | 1272 | Either::A => {}, |
1270 | } | 1273 | } |
1271 | } | 1274 | } |
1272 | "; | 1275 | ", |
1273 | 1276 | ); | |
1274 | check_diagnostic(content); | ||
1275 | } | 1277 | } |
1276 | 1278 | ||
1277 | #[test] | 1279 | #[test] |
1278 | fn enum_ref_no_diagnostic() { | 1280 | fn enum_ref_no_diagnostic() { |
1279 | let content = r" | 1281 | check_no_diagnostic( |
1282 | r" | ||
1280 | enum Either { | 1283 | enum Either { |
1281 | A, | 1284 | A, |
1282 | B, | 1285 | B, |
@@ -1287,14 +1290,14 @@ mod tests { | |||
1287 | Either::B => {}, | 1290 | Either::B => {}, |
1288 | } | 1291 | } |
1289 | } | 1292 | } |
1290 | "; | 1293 | ", |
1291 | 1294 | ); | |
1292 | check_no_diagnostic(content); | ||
1293 | } | 1295 | } |
1294 | 1296 | ||
1295 | #[test] | 1297 | #[test] |
1296 | fn enum_containing_bool_no_arms() { | 1298 | fn enum_containing_bool_no_arms() { |
1297 | let content = r" | 1299 | check_diagnostic( |
1300 | r" | ||
1298 | enum Either { | 1301 | enum Either { |
1299 | A(bool), | 1302 | A(bool), |
1300 | B, | 1303 | B, |
@@ -1303,14 +1306,14 @@ mod tests { | |||
1303 | match Either::B { | 1306 | match Either::B { |
1304 | } | 1307 | } |
1305 | } | 1308 | } |
1306 | "; | 1309 | ", |
1307 | 1310 | ); | |
1308 | check_diagnostic(content); | ||
1309 | } | 1311 | } |
1310 | 1312 | ||
1311 | #[test] | 1313 | #[test] |
1312 | fn enum_containing_bool_missing_arms() { | 1314 | fn enum_containing_bool_missing_arms() { |
1313 | let content = r" | 1315 | check_diagnostic( |
1316 | r" | ||
1314 | enum Either { | 1317 | enum Either { |
1315 | A(bool), | 1318 | A(bool), |
1316 | B, | 1319 | B, |
@@ -1321,14 +1324,14 @@ mod tests { | |||
1321 | Either::B => (), | 1324 | Either::B => (), |
1322 | } | 1325 | } |
1323 | } | 1326 | } |
1324 | "; | 1327 | ", |
1325 | 1328 | ); | |
1326 | check_diagnostic(content); | ||
1327 | } | 1329 | } |
1328 | 1330 | ||
1329 | #[test] | 1331 | #[test] |
1330 | fn enum_containing_bool_no_diagnostic() { | 1332 | fn enum_containing_bool_no_diagnostic() { |
1331 | let content = r" | 1333 | check_no_diagnostic( |
1334 | r" | ||
1332 | enum Either { | 1335 | enum Either { |
1333 | A(bool), | 1336 | A(bool), |
1334 | B, | 1337 | B, |
@@ -1340,14 +1343,14 @@ mod tests { | |||
1340 | Either::B => (), | 1343 | Either::B => (), |
1341 | } | 1344 | } |
1342 | } | 1345 | } |
1343 | "; | 1346 | ", |
1344 | 1347 | ); | |
1345 | check_no_diagnostic(content); | ||
1346 | } | 1348 | } |
1347 | 1349 | ||
1348 | #[test] | 1350 | #[test] |
1349 | fn enum_containing_bool_with_wild_no_diagnostic() { | 1351 | fn enum_containing_bool_with_wild_no_diagnostic() { |
1350 | let content = r" | 1352 | check_no_diagnostic( |
1353 | r" | ||
1351 | enum Either { | 1354 | enum Either { |
1352 | A(bool), | 1355 | A(bool), |
1353 | B, | 1356 | B, |
@@ -1358,14 +1361,14 @@ mod tests { | |||
1358 | _ => (), | 1361 | _ => (), |
1359 | } | 1362 | } |
1360 | } | 1363 | } |
1361 | "; | 1364 | ", |
1362 | 1365 | ); | |
1363 | check_no_diagnostic(content); | ||
1364 | } | 1366 | } |
1365 | 1367 | ||
1366 | #[test] | 1368 | #[test] |
1367 | fn enum_containing_bool_with_wild_2_no_diagnostic() { | 1369 | fn enum_containing_bool_with_wild_2_no_diagnostic() { |
1368 | let content = r" | 1370 | check_no_diagnostic( |
1371 | r" | ||
1369 | enum Either { | 1372 | enum Either { |
1370 | A(bool), | 1373 | A(bool), |
1371 | B, | 1374 | B, |
@@ -1376,14 +1379,14 @@ mod tests { | |||
1376 | Either::B => (), | 1379 | Either::B => (), |
1377 | } | 1380 | } |
1378 | } | 1381 | } |
1379 | "; | 1382 | ", |
1380 | 1383 | ); | |
1381 | check_no_diagnostic(content); | ||
1382 | } | 1384 | } |
1383 | 1385 | ||
1384 | #[test] | 1386 | #[test] |
1385 | fn enum_different_sizes_missing_arms() { | 1387 | fn enum_different_sizes_missing_arms() { |
1386 | let content = r" | 1388 | check_diagnostic( |
1389 | r" | ||
1387 | enum Either { | 1390 | enum Either { |
1388 | A(bool), | 1391 | A(bool), |
1389 | B(bool, bool), | 1392 | B(bool, bool), |
@@ -1394,14 +1397,14 @@ mod tests { | |||
1394 | Either::B(false, _) => (), | 1397 | Either::B(false, _) => (), |
1395 | } | 1398 | } |
1396 | } | 1399 | } |
1397 | "; | 1400 | ", |
1398 | 1401 | ); | |
1399 | check_diagnostic(content); | ||
1400 | } | 1402 | } |
1401 | 1403 | ||
1402 | #[test] | 1404 | #[test] |
1403 | fn enum_different_sizes_no_diagnostic() { | 1405 | fn enum_different_sizes_no_diagnostic() { |
1404 | let content = r" | 1406 | check_no_diagnostic( |
1407 | r" | ||
1405 | enum Either { | 1408 | enum Either { |
1406 | A(bool), | 1409 | A(bool), |
1407 | B(bool, bool), | 1410 | B(bool, bool), |
@@ -1413,14 +1416,14 @@ mod tests { | |||
1413 | Either::B(false, _) => (), | 1416 | Either::B(false, _) => (), |
1414 | } | 1417 | } |
1415 | } | 1418 | } |
1416 | "; | 1419 | ", |
1417 | 1420 | ); | |
1418 | check_no_diagnostic(content); | ||
1419 | } | 1421 | } |
1420 | 1422 | ||
1421 | #[test] | 1423 | #[test] |
1422 | fn or_no_diagnostic() { | 1424 | fn or_no_diagnostic() { |
1423 | let content = r" | 1425 | check_no_diagnostic( |
1426 | r" | ||
1424 | enum Either { | 1427 | enum Either { |
1425 | A(bool), | 1428 | A(bool), |
1426 | B(bool, bool), | 1429 | B(bool, bool), |
@@ -1432,14 +1435,14 @@ mod tests { | |||
1432 | Either::B(false, _) => (), | 1435 | Either::B(false, _) => (), |
1433 | } | 1436 | } |
1434 | } | 1437 | } |
1435 | "; | 1438 | ", |
1436 | 1439 | ); | |
1437 | check_no_diagnostic(content); | ||
1438 | } | 1440 | } |
1439 | 1441 | ||
1440 | #[test] | 1442 | #[test] |
1441 | fn tuple_of_enum_no_diagnostic() { | 1443 | fn tuple_of_enum_no_diagnostic() { |
1442 | let content = r" | 1444 | check_no_diagnostic( |
1445 | r" | ||
1443 | enum Either { | 1446 | enum Either { |
1444 | A(bool), | 1447 | A(bool), |
1445 | B(bool, bool), | 1448 | B(bool, bool), |
@@ -1456,14 +1459,16 @@ mod tests { | |||
1456 | (Either::B(_, _), Either2::D) => (), | 1459 | (Either::B(_, _), Either2::D) => (), |
1457 | } | 1460 | } |
1458 | } | 1461 | } |
1459 | "; | 1462 | ", |
1460 | 1463 | ); | |
1461 | check_no_diagnostic(content); | ||
1462 | } | 1464 | } |
1463 | 1465 | ||
1464 | #[test] | 1466 | #[test] |
1465 | fn mismatched_types() { | 1467 | fn mismatched_types() { |
1466 | let content = r" | 1468 | // Match statements with arms that don't match the |
1469 | // expression pattern do not fire this diagnostic. | ||
1470 | check_no_diagnostic( | ||
1471 | r" | ||
1467 | enum Either { | 1472 | enum Either { |
1468 | A, | 1473 | A, |
1469 | B, | 1474 | B, |
@@ -1478,47 +1483,47 @@ mod tests { | |||
1478 | Either2::D => (), | 1483 | Either2::D => (), |
1479 | } | 1484 | } |
1480 | } | 1485 | } |
1481 | "; | 1486 | ", |
1482 | 1487 | ); | |
1483 | // Match statements with arms that don't match the | ||
1484 | // expression pattern do not fire this diagnostic. | ||
1485 | check_no_diagnostic(content); | ||
1486 | } | 1488 | } |
1487 | 1489 | ||
1488 | #[test] | 1490 | #[test] |
1489 | fn mismatched_types_with_different_arity() { | 1491 | fn mismatched_types_with_different_arity() { |
1490 | let content = r" | 1492 | // Match statements with arms that don't match the |
1493 | // expression pattern do not fire this diagnostic. | ||
1494 | check_no_diagnostic( | ||
1495 | r" | ||
1491 | fn test_fn() { | 1496 | fn test_fn() { |
1492 | match (true, false) { | 1497 | match (true, false) { |
1493 | (true, false, true) => (), | 1498 | (true, false, true) => (), |
1494 | (true) => (), | 1499 | (true) => (), |
1495 | } | 1500 | } |
1496 | } | 1501 | } |
1497 | "; | 1502 | ", |
1498 | 1503 | ); | |
1499 | // Match statements with arms that don't match the | ||
1500 | // expression pattern do not fire this diagnostic. | ||
1501 | check_no_diagnostic(content); | ||
1502 | } | 1504 | } |
1503 | 1505 | ||
1504 | #[test] | 1506 | #[test] |
1505 | fn malformed_match_arm_tuple_missing_pattern() { | 1507 | fn malformed_match_arm_tuple_missing_pattern() { |
1506 | let content = r" | 1508 | // Match statements with arms that don't match the |
1509 | // expression pattern do not fire this diagnostic. | ||
1510 | check_no_diagnostic( | ||
1511 | r" | ||
1507 | fn test_fn() { | 1512 | fn test_fn() { |
1508 | match (0) { | 1513 | match (0) { |
1509 | () => (), | 1514 | () => (), |
1510 | } | 1515 | } |
1511 | } | 1516 | } |
1512 | "; | 1517 | ", |
1513 | 1518 | ); | |
1514 | // Match statements with arms that don't match the | ||
1515 | // expression pattern do not fire this diagnostic. | ||
1516 | check_no_diagnostic(content); | ||
1517 | } | 1519 | } |
1518 | 1520 | ||
1519 | #[test] | 1521 | #[test] |
1520 | fn malformed_match_arm_tuple_enum_missing_pattern() { | 1522 | fn malformed_match_arm_tuple_enum_missing_pattern() { |
1521 | let content = r" | 1523 | // We are testing to be sure we don't panic here when the match |
1524 | // arm `Either::B` is missing its pattern. | ||
1525 | check_no_diagnostic( | ||
1526 | r" | ||
1522 | enum Either { | 1527 | enum Either { |
1523 | A, | 1528 | A, |
1524 | B(u32), | 1529 | B(u32), |
@@ -1529,32 +1534,30 @@ mod tests { | |||
1529 | Either::B() => (), | 1534 | Either::B() => (), |
1530 | } | 1535 | } |
1531 | } | 1536 | } |
1532 | "; | 1537 | ", |
1533 | 1538 | ); | |
1534 | // We are testing to be sure we don't panic here when the match | ||
1535 | // arm `Either::B` is missing its pattern. | ||
1536 | check_no_diagnostic(content); | ||
1537 | } | 1539 | } |
1538 | 1540 | ||
1539 | #[test] | 1541 | #[test] |
1540 | fn enum_not_in_scope() { | 1542 | fn enum_not_in_scope() { |
1541 | let content = r" | 1543 | // The enum is not in scope so we don't perform exhaustiveness |
1544 | // checking, but we want to be sure we don't panic here (and | ||
1545 | // we don't create a diagnostic). | ||
1546 | check_no_diagnostic( | ||
1547 | r" | ||
1542 | fn test_fn() { | 1548 | fn test_fn() { |
1543 | match Foo::Bar { | 1549 | match Foo::Bar { |
1544 | Foo::Baz => (), | 1550 | Foo::Baz => (), |
1545 | } | 1551 | } |
1546 | } | 1552 | } |
1547 | "; | 1553 | ", |
1548 | 1554 | ); | |
1549 | // The enum is not in scope so we don't perform exhaustiveness | ||
1550 | // checking, but we want to be sure we don't panic here (and | ||
1551 | // we don't create a diagnostic). | ||
1552 | check_no_diagnostic(content); | ||
1553 | } | 1555 | } |
1554 | 1556 | ||
1555 | #[test] | 1557 | #[test] |
1556 | fn expr_diverges() { | 1558 | fn expr_diverges() { |
1557 | let content = r" | 1559 | check_no_diagnostic( |
1560 | r" | ||
1558 | enum Either { | 1561 | enum Either { |
1559 | A, | 1562 | A, |
1560 | B, | 1563 | B, |
@@ -1565,14 +1568,14 @@ mod tests { | |||
1565 | Either::B => (), | 1568 | Either::B => (), |
1566 | } | 1569 | } |
1567 | } | 1570 | } |
1568 | "; | 1571 | ", |
1569 | 1572 | ); | |
1570 | check_no_diagnostic(content); | ||
1571 | } | 1573 | } |
1572 | 1574 | ||
1573 | #[test] | 1575 | #[test] |
1574 | fn expr_loop_with_break() { | 1576 | fn expr_loop_with_break() { |
1575 | let content = r" | 1577 | check_no_diagnostic( |
1578 | r" | ||
1576 | enum Either { | 1579 | enum Either { |
1577 | A, | 1580 | A, |
1578 | B, | 1581 | B, |
@@ -1583,14 +1586,14 @@ mod tests { | |||
1583 | Either::B => (), | 1586 | Either::B => (), |
1584 | } | 1587 | } |
1585 | } | 1588 | } |
1586 | "; | 1589 | ", |
1587 | 1590 | ); | |
1588 | check_no_diagnostic(content); | ||
1589 | } | 1591 | } |
1590 | 1592 | ||
1591 | #[test] | 1593 | #[test] |
1592 | fn expr_partially_diverges() { | 1594 | fn expr_partially_diverges() { |
1593 | let content = r" | 1595 | check_no_diagnostic( |
1596 | r" | ||
1594 | enum Either<T> { | 1597 | enum Either<T> { |
1595 | A(T), | 1598 | A(T), |
1596 | B, | 1599 | B, |
@@ -1604,14 +1607,14 @@ mod tests { | |||
1604 | Either::B => 0, | 1607 | Either::B => 0, |
1605 | } | 1608 | } |
1606 | } | 1609 | } |
1607 | "; | 1610 | ", |
1608 | 1611 | ); | |
1609 | check_no_diagnostic(content); | ||
1610 | } | 1612 | } |
1611 | 1613 | ||
1612 | #[test] | 1614 | #[test] |
1613 | fn enum_record_no_arms() { | 1615 | fn enum_record_no_arms() { |
1614 | let content = r" | 1616 | check_diagnostic( |
1617 | r" | ||
1615 | enum Either { | 1618 | enum Either { |
1616 | A { foo: bool }, | 1619 | A { foo: bool }, |
1617 | B, | 1620 | B, |
@@ -1621,14 +1624,14 @@ mod tests { | |||
1621 | match a { | 1624 | match a { |
1622 | } | 1625 | } |
1623 | } | 1626 | } |
1624 | "; | 1627 | ", |
1625 | 1628 | ); | |
1626 | check_diagnostic(content); | ||
1627 | } | 1629 | } |
1628 | 1630 | ||
1629 | #[test] | 1631 | #[test] |
1630 | fn enum_record_missing_arms() { | 1632 | fn enum_record_missing_arms() { |
1631 | let content = r" | 1633 | check_diagnostic( |
1634 | r" | ||
1632 | enum Either { | 1635 | enum Either { |
1633 | A { foo: bool }, | 1636 | A { foo: bool }, |
1634 | B, | 1637 | B, |
@@ -1639,14 +1642,14 @@ mod tests { | |||
1639 | Either::A { foo: true } => (), | 1642 | Either::A { foo: true } => (), |
1640 | } | 1643 | } |
1641 | } | 1644 | } |
1642 | "; | 1645 | ", |
1643 | 1646 | ); | |
1644 | check_diagnostic(content); | ||
1645 | } | 1647 | } |
1646 | 1648 | ||
1647 | #[test] | 1649 | #[test] |
1648 | fn enum_record_no_diagnostic() { | 1650 | fn enum_record_no_diagnostic() { |
1649 | let content = r" | 1651 | check_no_diagnostic( |
1652 | r" | ||
1650 | enum Either { | 1653 | enum Either { |
1651 | A { foo: bool }, | 1654 | A { foo: bool }, |
1652 | B, | 1655 | B, |
@@ -1659,14 +1662,17 @@ mod tests { | |||
1659 | Either::B => (), | 1662 | Either::B => (), |
1660 | } | 1663 | } |
1661 | } | 1664 | } |
1662 | "; | 1665 | ", |
1663 | 1666 | ); | |
1664 | check_no_diagnostic(content); | ||
1665 | } | 1667 | } |
1666 | 1668 | ||
1667 | #[test] | 1669 | #[test] |
1668 | fn enum_record_missing_field_no_diagnostic() { | 1670 | fn enum_record_missing_field_no_diagnostic() { |
1669 | let content = r" | 1671 | // When `Either::A` is missing a struct member, we don't want |
1672 | // to fire the missing match arm diagnostic. This should fire | ||
1673 | // some other diagnostic. | ||
1674 | check_no_diagnostic( | ||
1675 | r" | ||
1670 | enum Either { | 1676 | enum Either { |
1671 | A { foo: bool }, | 1677 | A { foo: bool }, |
1672 | B, | 1678 | B, |
@@ -1678,17 +1684,16 @@ mod tests { | |||
1678 | Either::B => (), | 1684 | Either::B => (), |
1679 | } | 1685 | } |
1680 | } | 1686 | } |
1681 | "; | 1687 | ", |
1682 | 1688 | ); | |
1683 | // When `Either::A` is missing a struct member, we don't want | ||
1684 | // to fire the missing match arm diagnostic. This should fire | ||
1685 | // some other diagnostic. | ||
1686 | check_no_diagnostic(content); | ||
1687 | } | 1689 | } |
1688 | 1690 | ||
1689 | #[test] | 1691 | #[test] |
1690 | fn enum_record_missing_field_missing_match_arm() { | 1692 | fn enum_record_missing_field_missing_match_arm() { |
1691 | let content = r" | 1693 | // Even though `Either::A` is missing fields, we still want to fire |
1694 | // the missing arm diagnostic here, since we know `Either::B` is missing. | ||
1695 | check_diagnostic( | ||
1696 | r" | ||
1692 | enum Either { | 1697 | enum Either { |
1693 | A { foo: bool }, | 1698 | A { foo: bool }, |
1694 | B, | 1699 | B, |
@@ -1699,16 +1704,14 @@ mod tests { | |||
1699 | Either::A { } => (), | 1704 | Either::A { } => (), |
1700 | } | 1705 | } |
1701 | } | 1706 | } |
1702 | "; | 1707 | ", |
1703 | 1708 | ); | |
1704 | // Even though `Either::A` is missing fields, we still want to fire | ||
1705 | // the missing arm diagnostic here, since we know `Either::B` is missing. | ||
1706 | check_diagnostic(content); | ||
1707 | } | 1709 | } |
1708 | 1710 | ||
1709 | #[test] | 1711 | #[test] |
1710 | fn enum_record_no_diagnostic_wild() { | 1712 | fn enum_record_no_diagnostic_wild() { |
1711 | let content = r" | 1713 | check_no_diagnostic( |
1714 | r" | ||
1712 | enum Either { | 1715 | enum Either { |
1713 | A { foo: bool }, | 1716 | A { foo: bool }, |
1714 | B, | 1717 | B, |
@@ -1720,14 +1723,14 @@ mod tests { | |||
1720 | Either::B => (), | 1723 | Either::B => (), |
1721 | } | 1724 | } |
1722 | } | 1725 | } |
1723 | "; | 1726 | ", |
1724 | 1727 | ); | |
1725 | check_no_diagnostic(content); | ||
1726 | } | 1728 | } |
1727 | 1729 | ||
1728 | #[test] | 1730 | #[test] |
1729 | fn enum_record_fields_out_of_order_missing_arm() { | 1731 | fn enum_record_fields_out_of_order_missing_arm() { |
1730 | let content = r" | 1732 | check_diagnostic( |
1733 | r" | ||
1731 | enum Either { | 1734 | enum Either { |
1732 | A { foo: bool, bar: () }, | 1735 | A { foo: bool, bar: () }, |
1733 | B, | 1736 | B, |
@@ -1739,14 +1742,14 @@ mod tests { | |||
1739 | Either::A { foo: true, bar: () } => (), | 1742 | Either::A { foo: true, bar: () } => (), |
1740 | } | 1743 | } |
1741 | } | 1744 | } |
1742 | "; | 1745 | ", |
1743 | 1746 | ); | |
1744 | check_diagnostic(content); | ||
1745 | } | 1747 | } |
1746 | 1748 | ||
1747 | #[test] | 1749 | #[test] |
1748 | fn enum_record_fields_out_of_order_no_diagnostic() { | 1750 | fn enum_record_fields_out_of_order_no_diagnostic() { |
1749 | let content = r" | 1751 | check_no_diagnostic( |
1752 | r" | ||
1750 | enum Either { | 1753 | enum Either { |
1751 | A { foo: bool, bar: () }, | 1754 | A { foo: bool, bar: () }, |
1752 | B, | 1755 | B, |
@@ -1759,89 +1762,89 @@ mod tests { | |||
1759 | Either::B => (), | 1762 | Either::B => (), |
1760 | } | 1763 | } |
1761 | } | 1764 | } |
1762 | "; | 1765 | ", |
1763 | 1766 | ); | |
1764 | check_no_diagnostic(content); | ||
1765 | } | 1767 | } |
1766 | 1768 | ||
1767 | #[test] | 1769 | #[test] |
1768 | fn enum_record_ellipsis_missing_arm() { | 1770 | fn enum_record_ellipsis_missing_arm() { |
1769 | let content = r" | 1771 | check_diagnostic( |
1770 | enum Either { | 1772 | r" |
1771 | A { foo: bool, bar: bool }, | 1773 | enum Either { |
1772 | B, | 1774 | A { foo: bool, bar: bool }, |
1773 | } | 1775 | B, |
1774 | fn test_fn() { | 1776 | } |
1775 | match Either::B { | 1777 | fn test_fn() { |
1776 | Either::A { foo: true, .. } => (), | 1778 | match Either::B { |
1777 | Either::B => (), | 1779 | Either::A { foo: true, .. } => (), |
1778 | } | 1780 | Either::B => (), |
1779 | } | 1781 | } |
1780 | "; | 1782 | } |
1781 | 1783 | ", | |
1782 | check_diagnostic(content); | 1784 | ); |
1783 | } | 1785 | } |
1784 | 1786 | ||
1785 | #[test] | 1787 | #[test] |
1786 | fn enum_record_ellipsis_no_diagnostic() { | 1788 | fn enum_record_ellipsis_no_diagnostic() { |
1787 | let content = r" | 1789 | check_no_diagnostic( |
1788 | enum Either { | 1790 | r" |
1789 | A { foo: bool, bar: bool }, | 1791 | enum Either { |
1790 | B, | 1792 | A { foo: bool, bar: bool }, |
1791 | } | 1793 | B, |
1792 | fn test_fn() { | 1794 | } |
1793 | let a = Either::A { foo: true }; | 1795 | fn test_fn() { |
1794 | match a { | 1796 | let a = Either::A { foo: true }; |
1795 | Either::A { foo: true, .. } => (), | 1797 | match a { |
1796 | Either::A { foo: false, .. } => (), | 1798 | Either::A { foo: true, .. } => (), |
1797 | Either::B => (), | 1799 | Either::A { foo: false, .. } => (), |
1798 | } | 1800 | Either::B => (), |
1799 | } | 1801 | } |
1800 | "; | 1802 | } |
1801 | 1803 | ", | |
1802 | check_no_diagnostic(content); | 1804 | ); |
1803 | } | 1805 | } |
1804 | 1806 | ||
1805 | #[test] | 1807 | #[test] |
1806 | fn enum_record_ellipsis_all_fields_missing_arm() { | 1808 | fn enum_record_ellipsis_all_fields_missing_arm() { |
1807 | let content = r" | 1809 | check_diagnostic( |
1808 | enum Either { | 1810 | r" |
1809 | A { foo: bool, bar: bool }, | 1811 | enum Either { |
1810 | B, | 1812 | A { foo: bool, bar: bool }, |
1811 | } | 1813 | B, |
1812 | fn test_fn() { | 1814 | } |
1813 | let a = Either::B; | 1815 | fn test_fn() { |
1814 | match a { | 1816 | let a = Either::B; |
1815 | Either::A { .. } => (), | 1817 | match a { |
1816 | } | 1818 | Either::A { .. } => (), |
1817 | } | 1819 | } |
1818 | "; | 1820 | } |
1819 | 1821 | ", | |
1820 | check_diagnostic(content); | 1822 | ); |
1821 | } | 1823 | } |
1822 | 1824 | ||
1823 | #[test] | 1825 | #[test] |
1824 | fn enum_record_ellipsis_all_fields_no_diagnostic() { | 1826 | fn enum_record_ellipsis_all_fields_no_diagnostic() { |
1825 | let content = r" | 1827 | check_no_diagnostic( |
1826 | enum Either { | 1828 | r" |
1827 | A { foo: bool, bar: bool }, | 1829 | enum Either { |
1828 | B, | 1830 | A { foo: bool, bar: bool }, |
1829 | } | 1831 | B, |
1830 | fn test_fn() { | 1832 | } |
1831 | let a = Either::B; | 1833 | fn test_fn() { |
1832 | match a { | 1834 | let a = Either::B; |
1833 | Either::A { .. } => (), | 1835 | match a { |
1834 | Either::B => (), | 1836 | Either::A { .. } => (), |
1835 | } | 1837 | Either::B => (), |
1836 | } | 1838 | } |
1837 | "; | 1839 | } |
1838 | 1840 | ", | |
1839 | check_no_diagnostic(content); | 1841 | ); |
1840 | } | 1842 | } |
1841 | 1843 | ||
1842 | #[test] | 1844 | #[test] |
1843 | fn enum_tuple_partial_ellipsis_no_diagnostic() { | 1845 | fn enum_tuple_partial_ellipsis_no_diagnostic() { |
1844 | let content = r" | 1846 | check_no_diagnostic( |
1847 | r" | ||
1845 | enum Either { | 1848 | enum Either { |
1846 | A(bool, bool, bool, bool), | 1849 | A(bool, bool, bool, bool), |
1847 | B, | 1850 | B, |
@@ -1855,14 +1858,14 @@ mod tests { | |||
1855 | Either::B => {}, | 1858 | Either::B => {}, |
1856 | } | 1859 | } |
1857 | } | 1860 | } |
1858 | "; | 1861 | ", |
1859 | 1862 | ); | |
1860 | check_no_diagnostic(content); | ||
1861 | } | 1863 | } |
1862 | 1864 | ||
1863 | #[test] | 1865 | #[test] |
1864 | fn enum_tuple_partial_ellipsis_2_no_diagnostic() { | 1866 | fn enum_tuple_partial_ellipsis_2_no_diagnostic() { |
1865 | let content = r" | 1867 | check_no_diagnostic( |
1868 | r" | ||
1866 | enum Either { | 1869 | enum Either { |
1867 | A(bool, bool, bool, bool), | 1870 | A(bool, bool, bool, bool), |
1868 | B, | 1871 | B, |
@@ -1876,14 +1879,14 @@ mod tests { | |||
1876 | Either::B => {}, | 1879 | Either::B => {}, |
1877 | } | 1880 | } |
1878 | } | 1881 | } |
1879 | "; | 1882 | ", |
1880 | 1883 | ); | |
1881 | check_no_diagnostic(content); | ||
1882 | } | 1884 | } |
1883 | 1885 | ||
1884 | #[test] | 1886 | #[test] |
1885 | fn enum_tuple_partial_ellipsis_missing_arm() { | 1887 | fn enum_tuple_partial_ellipsis_missing_arm() { |
1886 | let content = r" | 1888 | check_diagnostic( |
1889 | r" | ||
1887 | enum Either { | 1890 | enum Either { |
1888 | A(bool, bool, bool, bool), | 1891 | A(bool, bool, bool, bool), |
1889 | B, | 1892 | B, |
@@ -1896,14 +1899,14 @@ mod tests { | |||
1896 | Either::B => {}, | 1899 | Either::B => {}, |
1897 | } | 1900 | } |
1898 | } | 1901 | } |
1899 | "; | 1902 | ", |
1900 | 1903 | ); | |
1901 | check_diagnostic(content); | ||
1902 | } | 1904 | } |
1903 | 1905 | ||
1904 | #[test] | 1906 | #[test] |
1905 | fn enum_tuple_partial_ellipsis_2_missing_arm() { | 1907 | fn enum_tuple_partial_ellipsis_2_missing_arm() { |
1906 | let content = r" | 1908 | check_diagnostic( |
1909 | r" | ||
1907 | enum Either { | 1910 | enum Either { |
1908 | A(bool, bool, bool, bool), | 1911 | A(bool, bool, bool, bool), |
1909 | B, | 1912 | B, |
@@ -1916,14 +1919,14 @@ mod tests { | |||
1916 | Either::B => {}, | 1919 | Either::B => {}, |
1917 | } | 1920 | } |
1918 | } | 1921 | } |
1919 | "; | 1922 | ", |
1920 | 1923 | ); | |
1921 | check_diagnostic(content); | ||
1922 | } | 1924 | } |
1923 | 1925 | ||
1924 | #[test] | 1926 | #[test] |
1925 | fn enum_tuple_ellipsis_no_diagnostic() { | 1927 | fn enum_tuple_ellipsis_no_diagnostic() { |
1926 | let content = r" | 1928 | check_no_diagnostic( |
1929 | r" | ||
1927 | enum Either { | 1930 | enum Either { |
1928 | A(bool, bool, bool, bool), | 1931 | A(bool, bool, bool, bool), |
1929 | B, | 1932 | B, |
@@ -1934,51 +1937,51 @@ mod tests { | |||
1934 | Either::B => {}, | 1937 | Either::B => {}, |
1935 | } | 1938 | } |
1936 | } | 1939 | } |
1937 | "; | 1940 | ", |
1938 | 1941 | ); | |
1939 | check_no_diagnostic(content); | ||
1940 | } | 1942 | } |
1941 | 1943 | ||
1942 | #[test] | 1944 | #[test] |
1943 | fn enum_never() { | 1945 | fn enum_never() { |
1944 | let content = r" | 1946 | check_no_diagnostic( |
1947 | r" | ||
1945 | enum Never {} | 1948 | enum Never {} |
1946 | 1949 | ||
1947 | fn test_fn(never: Never) { | 1950 | fn test_fn(never: Never) { |
1948 | match never {} | 1951 | match never {} |
1949 | } | 1952 | } |
1950 | "; | 1953 | ", |
1951 | 1954 | ); | |
1952 | check_no_diagnostic(content); | ||
1953 | } | 1955 | } |
1954 | 1956 | ||
1955 | #[test] | 1957 | #[test] |
1956 | fn type_never() { | 1958 | fn type_never() { |
1957 | let content = r" | 1959 | check_no_diagnostic( |
1960 | r" | ||
1958 | fn test_fn(never: !) { | 1961 | fn test_fn(never: !) { |
1959 | match never {} | 1962 | match never {} |
1960 | } | 1963 | } |
1961 | "; | 1964 | ", |
1962 | 1965 | ); | |
1963 | check_no_diagnostic(content); | ||
1964 | } | 1966 | } |
1965 | 1967 | ||
1966 | #[test] | 1968 | #[test] |
1967 | fn enum_never_ref() { | 1969 | fn enum_never_ref() { |
1968 | let content = r" | 1970 | check_no_diagnostic( |
1971 | r" | ||
1969 | enum Never {} | 1972 | enum Never {} |
1970 | 1973 | ||
1971 | fn test_fn(never: &Never) { | 1974 | fn test_fn(never: &Never) { |
1972 | match never {} | 1975 | match never {} |
1973 | } | 1976 | } |
1974 | "; | 1977 | ", |
1975 | 1978 | ); | |
1976 | check_no_diagnostic(content); | ||
1977 | } | 1979 | } |
1978 | 1980 | ||
1979 | #[test] | 1981 | #[test] |
1980 | fn expr_diverges_missing_arm() { | 1982 | fn expr_diverges_missing_arm() { |
1981 | let content = r" | 1983 | check_no_diagnostic( |
1984 | r" | ||
1982 | enum Either { | 1985 | enum Either { |
1983 | A, | 1986 | A, |
1984 | B, | 1987 | B, |
@@ -1988,9 +1991,49 @@ mod tests { | |||
1988 | Either::A => (), | 1991 | Either::A => (), |
1989 | } | 1992 | } |
1990 | } | 1993 | } |
1991 | "; | 1994 | ", |
1995 | ); | ||
1996 | } | ||
1992 | 1997 | ||
1993 | check_no_diagnostic(content); | 1998 | #[test] |
1999 | fn or_pattern_panic() { | ||
2000 | check_no_diagnostic( | ||
2001 | r" | ||
2002 | pub enum Category { | ||
2003 | Infinity, | ||
2004 | Zero, | ||
2005 | } | ||
2006 | |||
2007 | fn panic(a: Category, b: Category) { | ||
2008 | match (a, b) { | ||
2009 | (Category::Zero | Category::Infinity, _) => {} | ||
2010 | (_, Category::Zero | Category::Infinity) => {} | ||
2011 | } | ||
2012 | } | ||
2013 | ", | ||
2014 | ); | ||
2015 | } | ||
2016 | |||
2017 | #[test] | ||
2018 | fn or_pattern_panic_2() { | ||
2019 | // FIXME: This is a false positive, but the code used to cause a panic in the match checker, | ||
2020 | // so this acts as a regression test for that. | ||
2021 | check_diagnostic( | ||
2022 | r" | ||
2023 | pub enum Category { | ||
2024 | Infinity, | ||
2025 | Zero, | ||
2026 | } | ||
2027 | |||
2028 | fn panic(a: Category, b: Category) { | ||
2029 | match (a, b) { | ||
2030 | (Category::Infinity, Category::Infinity) | (Category::Zero, Category::Zero) => {} | ||
2031 | |||
2032 | (Category::Infinity | Category::Zero, _) => {} | ||
2033 | } | ||
2034 | } | ||
2035 | ", | ||
2036 | ); | ||
1994 | } | 2037 | } |
1995 | } | 2038 | } |
1996 | 2039 | ||
@@ -2010,23 +2053,26 @@ mod false_negatives { | |||
2010 | 2053 | ||
2011 | #[test] | 2054 | #[test] |
2012 | fn integers() { | 2055 | fn integers() { |
2013 | let content = r" | 2056 | // This is a false negative. |
2057 | // We don't currently check integer exhaustiveness. | ||
2058 | check_no_diagnostic( | ||
2059 | r" | ||
2014 | fn test_fn() { | 2060 | fn test_fn() { |
2015 | match 5 { | 2061 | match 5 { |
2016 | 10 => (), | 2062 | 10 => (), |
2017 | 11..20 => (), | 2063 | 11..20 => (), |
2018 | } | 2064 | } |
2019 | } | 2065 | } |
2020 | "; | 2066 | ", |
2021 | 2067 | ); | |
2022 | // This is a false negative. | ||
2023 | // We don't currently check integer exhaustiveness. | ||
2024 | check_no_diagnostic(content); | ||
2025 | } | 2068 | } |
2026 | 2069 | ||
2027 | #[test] | 2070 | #[test] |
2028 | fn internal_or() { | 2071 | fn internal_or() { |
2029 | let content = r" | 2072 | // This is a false negative. |
2073 | // We do not currently handle patterns with internal `or`s. | ||
2074 | check_no_diagnostic( | ||
2075 | r" | ||
2030 | fn test_fn() { | 2076 | fn test_fn() { |
2031 | enum Either { | 2077 | enum Either { |
2032 | A(bool), | 2078 | A(bool), |
@@ -2036,16 +2082,18 @@ mod false_negatives { | |||
2036 | Either::A(true | false) => (), | 2082 | Either::A(true | false) => (), |
2037 | } | 2083 | } |
2038 | } | 2084 | } |
2039 | "; | 2085 | ", |
2040 | 2086 | ); | |
2041 | // This is a false negative. | ||
2042 | // We do not currently handle patterns with internal `or`s. | ||
2043 | check_no_diagnostic(content); | ||
2044 | } | 2087 | } |
2045 | 2088 | ||
2046 | #[test] | 2089 | #[test] |
2047 | fn expr_loop_missing_arm() { | 2090 | fn expr_loop_missing_arm() { |
2048 | let content = r" | 2091 | // This is a false negative. |
2092 | // We currently infer the type of `loop { break Foo::A }` to `!`, which | ||
2093 | // causes us to skip the diagnostic since `Either::A` doesn't type check | ||
2094 | // with `!`. | ||
2095 | check_diagnostic( | ||
2096 | r" | ||
2049 | enum Either { | 2097 | enum Either { |
2050 | A, | 2098 | A, |
2051 | B, | 2099 | B, |
@@ -2055,48 +2103,46 @@ mod false_negatives { | |||
2055 | Either::A => (), | 2103 | Either::A => (), |
2056 | } | 2104 | } |
2057 | } | 2105 | } |
2058 | "; | 2106 | ", |
2059 | 2107 | ); | |
2060 | // This is a false negative. | ||
2061 | // We currently infer the type of `loop { break Foo::A }` to `!`, which | ||
2062 | // causes us to skip the diagnostic since `Either::A` doesn't type check | ||
2063 | // with `!`. | ||
2064 | check_diagnostic(content); | ||
2065 | } | 2108 | } |
2066 | 2109 | ||
2067 | #[test] | 2110 | #[test] |
2068 | fn tuple_of_bools_with_ellipsis_at_end_missing_arm() { | 2111 | fn tuple_of_bools_with_ellipsis_at_end_missing_arm() { |
2069 | let content = r" | 2112 | // This is a false negative. |
2113 | // We don't currently handle tuple patterns with ellipsis. | ||
2114 | check_no_diagnostic( | ||
2115 | r" | ||
2070 | fn test_fn() { | 2116 | fn test_fn() { |
2071 | match (false, true, false) { | 2117 | match (false, true, false) { |
2072 | (false, ..) => {}, | 2118 | (false, ..) => {}, |
2073 | } | 2119 | } |
2074 | } | 2120 | } |
2075 | "; | 2121 | ", |
2076 | 2122 | ); | |
2077 | // This is a false negative. | ||
2078 | // We don't currently handle tuple patterns with ellipsis. | ||
2079 | check_no_diagnostic(content); | ||
2080 | } | 2123 | } |
2081 | 2124 | ||
2082 | #[test] | 2125 | #[test] |
2083 | fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() { | 2126 | fn tuple_of_bools_with_ellipsis_at_beginning_missing_arm() { |
2084 | let content = r" | 2127 | // This is a false negative. |
2128 | // We don't currently handle tuple patterns with ellipsis. | ||
2129 | check_no_diagnostic( | ||
2130 | r" | ||
2085 | fn test_fn() { | 2131 | fn test_fn() { |
2086 | match (false, true, false) { | 2132 | match (false, true, false) { |
2087 | (.., false) => {}, | 2133 | (.., false) => {}, |
2088 | } | 2134 | } |
2089 | } | 2135 | } |
2090 | "; | 2136 | ", |
2091 | 2137 | ); | |
2092 | // This is a false negative. | ||
2093 | // We don't currently handle tuple patterns with ellipsis. | ||
2094 | check_no_diagnostic(content); | ||
2095 | } | 2138 | } |
2096 | 2139 | ||
2097 | #[test] | 2140 | #[test] |
2098 | fn struct_missing_arm() { | 2141 | fn struct_missing_arm() { |
2099 | let content = r" | 2142 | // This is a false negative. |
2143 | // We don't currently handle structs. | ||
2144 | check_no_diagnostic( | ||
2145 | r" | ||
2100 | struct Foo { | 2146 | struct Foo { |
2101 | a: bool, | 2147 | a: bool, |
2102 | } | 2148 | } |
@@ -2105,10 +2151,7 @@ mod false_negatives { | |||
2105 | Foo { a: true } => {}, | 2151 | Foo { a: true } => {}, |
2106 | } | 2152 | } |
2107 | } | 2153 | } |
2108 | "; | 2154 | ", |
2109 | 2155 | ); | |
2110 | // This is a false negative. | ||
2111 | // We don't currently handle structs. | ||
2112 | check_no_diagnostic(content); | ||
2113 | } | 2156 | } |
2114 | } | 2157 | } |
diff --git a/crates/ra_hir_ty/src/db.rs b/crates/ra_hir_ty/src/db.rs index bf71d38d6..7889b8d2c 100644 --- a/crates/ra_hir_ty/src/db.rs +++ b/crates/ra_hir_ty/src/db.rs | |||
@@ -3,15 +3,15 @@ | |||
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use hir_def::{ | 5 | use hir_def::{ |
6 | db::DefDatabase, DefWithBodyId, FunctionId, GenericDefId, ImplId, LocalFieldId, TraitId, | 6 | db::DefDatabase, DefWithBodyId, FunctionId, GenericDefId, ImplId, LocalFieldId, TypeParamId, |
7 | TypeParamId, VariantId, | 7 | VariantId, |
8 | }; | 8 | }; |
9 | use ra_arena::map::ArenaMap; | 9 | use ra_arena::map::ArenaMap; |
10 | use ra_db::{impl_intern_key, salsa, CrateId, Upcast}; | 10 | use ra_db::{impl_intern_key, salsa, CrateId, Upcast}; |
11 | use ra_prof::profile; | 11 | use ra_prof::profile; |
12 | 12 | ||
13 | use crate::{ | 13 | use crate::{ |
14 | method_resolution::{CrateImplDefs, TyFingerprint}, | 14 | method_resolution::CrateImplDefs, |
15 | traits::{chalk, AssocTyValue, Impl}, | 15 | traits::{chalk, AssocTyValue, Impl}, |
16 | Binders, CallableDef, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig, | 16 | Binders, CallableDef, GenericPredicate, InferenceResult, OpaqueTyId, PolyFnSig, |
17 | ReturnTypeImplTraits, Substs, TraitRef, Ty, TyDefId, TypeCtor, ValueTyDefId, | 17 | ReturnTypeImplTraits, Substs, TraitRef, Ty, TyDefId, TypeCtor, ValueTyDefId, |
@@ -70,13 +70,8 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> { | |||
70 | #[salsa::invoke(crate::method_resolution::CrateImplDefs::impls_in_crate_query)] | 70 | #[salsa::invoke(crate::method_resolution::CrateImplDefs::impls_in_crate_query)] |
71 | fn impls_in_crate(&self, krate: CrateId) -> Arc<CrateImplDefs>; | 71 | fn impls_in_crate(&self, krate: CrateId) -> Arc<CrateImplDefs>; |
72 | 72 | ||
73 | #[salsa::invoke(crate::traits::impls_for_trait_query)] | 73 | #[salsa::invoke(crate::method_resolution::CrateImplDefs::impls_from_deps_query)] |
74 | fn impls_for_trait( | 74 | fn impls_from_deps(&self, krate: CrateId) -> Arc<CrateImplDefs>; |
75 | &self, | ||
76 | krate: CrateId, | ||
77 | trait_: TraitId, | ||
78 | self_ty_fp: Option<TyFingerprint>, | ||
79 | ) -> Arc<[ImplId]>; | ||
80 | 75 | ||
81 | // Interned IDs for Chalk integration | 76 | // Interned IDs for Chalk integration |
82 | #[salsa::interned] | 77 | #[salsa::interned] |
diff --git a/crates/ra_hir_ty/src/diagnostics.rs b/crates/ra_hir_ty/src/diagnostics.rs index 2c7298714..ebd9cb08f 100644 --- a/crates/ra_hir_ty/src/diagnostics.rs +++ b/crates/ra_hir_ty/src/diagnostics.rs | |||
@@ -6,7 +6,7 @@ use hir_expand::{db::AstDatabase, name::Name, HirFileId, InFile}; | |||
6 | use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr}; | 6 | use ra_syntax::{ast, AstNode, AstPtr, SyntaxNodePtr}; |
7 | use stdx::format_to; | 7 | use stdx::format_to; |
8 | 8 | ||
9 | pub use hir_def::{diagnostics::UnresolvedModule, expr::MatchArm}; | 9 | pub use hir_def::{diagnostics::UnresolvedModule, expr::MatchArm, path::Path}; |
10 | pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink}; | 10 | pub use hir_expand::diagnostics::{AstDiagnostic, Diagnostic, DiagnosticSink}; |
11 | 11 | ||
12 | #[derive(Debug)] | 12 | #[derive(Debug)] |
@@ -29,6 +29,16 @@ impl Diagnostic for NoSuchField { | |||
29 | } | 29 | } |
30 | } | 30 | } |
31 | 31 | ||
32 | impl AstDiagnostic for NoSuchField { | ||
33 | type AST = ast::RecordField; | ||
34 | |||
35 | fn ast(&self, db: &impl AstDatabase) -> Self::AST { | ||
36 | let root = db.parse_or_expand(self.source().file_id).unwrap(); | ||
37 | let node = self.source().value.to_node(&root); | ||
38 | ast::RecordField::cast(node).unwrap() | ||
39 | } | ||
40 | } | ||
41 | |||
32 | #[derive(Debug)] | 42 | #[derive(Debug)] |
33 | pub struct MissingFields { | 43 | pub struct MissingFields { |
34 | pub file: HirFileId, | 44 | pub file: HirFileId, |
diff --git a/crates/ra_hir_ty/src/infer/expr.rs b/crates/ra_hir_ty/src/infer/expr.rs index 9fd310f69..a9565a58d 100644 --- a/crates/ra_hir_ty/src/infer/expr.rs +++ b/crates/ra_hir_ty/src/infer/expr.rs | |||
@@ -10,12 +10,12 @@ use hir_def::{ | |||
10 | resolver::resolver_for_expr, | 10 | resolver::resolver_for_expr, |
11 | AdtId, AssocContainerId, FieldId, Lookup, | 11 | AdtId, AssocContainerId, FieldId, Lookup, |
12 | }; | 12 | }; |
13 | use hir_expand::name::Name; | 13 | use hir_expand::name::{name, Name}; |
14 | use ra_syntax::ast::RangeOp; | 14 | use ra_syntax::ast::RangeOp; |
15 | 15 | ||
16 | use crate::{ | 16 | use crate::{ |
17 | autoderef, method_resolution, op, | 17 | autoderef, method_resolution, op, |
18 | traits::InEnvironment, | 18 | traits::{FnTrait, InEnvironment}, |
19 | utils::{generics, variant_data, Generics}, | 19 | utils::{generics, variant_data, Generics}, |
20 | ApplicationTy, Binders, CallableDef, InferTy, IntTy, Mutability, Obligation, Rawness, Substs, | 20 | ApplicationTy, Binders, CallableDef, InferTy, IntTy, Mutability, Obligation, Rawness, Substs, |
21 | TraitRef, Ty, TypeCtor, | 21 | TraitRef, Ty, TypeCtor, |
@@ -63,6 +63,58 @@ impl<'a> InferenceContext<'a> { | |||
63 | self.resolve_ty_as_possible(ty) | 63 | self.resolve_ty_as_possible(ty) |
64 | } | 64 | } |
65 | 65 | ||
66 | fn callable_sig_from_fn_trait(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> { | ||
67 | let krate = self.resolver.krate()?; | ||
68 | let fn_once_trait = FnTrait::FnOnce.get_id(self.db, krate)?; | ||
69 | let output_assoc_type = | ||
70 | self.db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?; | ||
71 | let generic_params = generics(self.db.upcast(), fn_once_trait.into()); | ||
72 | if generic_params.len() != 2 { | ||
73 | return None; | ||
74 | } | ||
75 | |||
76 | let mut param_builder = Substs::builder(num_args); | ||
77 | let mut arg_tys = vec![]; | ||
78 | for _ in 0..num_args { | ||
79 | let arg = self.table.new_type_var(); | ||
80 | param_builder = param_builder.push(arg.clone()); | ||
81 | arg_tys.push(arg); | ||
82 | } | ||
83 | let parameters = param_builder.build(); | ||
84 | let arg_ty = Ty::Apply(ApplicationTy { | ||
85 | ctor: TypeCtor::Tuple { cardinality: num_args as u16 }, | ||
86 | parameters, | ||
87 | }); | ||
88 | let substs = Substs::build_for_generics(&generic_params) | ||
89 | .push(ty.clone()) | ||
90 | .push(arg_ty.clone()) | ||
91 | .build(); | ||
92 | |||
93 | let trait_env = Arc::clone(&self.trait_env); | ||
94 | let implements_fn_trait = | ||
95 | Obligation::Trait(TraitRef { trait_: fn_once_trait, substs: substs.clone() }); | ||
96 | let goal = self.canonicalizer().canonicalize_obligation(InEnvironment { | ||
97 | value: implements_fn_trait.clone(), | ||
98 | environment: trait_env, | ||
99 | }); | ||
100 | if self.db.trait_solve(krate, goal.value).is_some() { | ||
101 | self.obligations.push(implements_fn_trait); | ||
102 | let output_proj_ty = | ||
103 | crate::ProjectionTy { associated_ty: output_assoc_type, parameters: substs }; | ||
104 | let return_ty = self.normalize_projection_ty(output_proj_ty); | ||
105 | Some((arg_tys, return_ty)) | ||
106 | } else { | ||
107 | None | ||
108 | } | ||
109 | } | ||
110 | |||
111 | pub fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec<Ty>, Ty)> { | ||
112 | match ty.callable_sig(self.db) { | ||
113 | Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())), | ||
114 | None => self.callable_sig_from_fn_trait(ty, num_args), | ||
115 | } | ||
116 | } | ||
117 | |||
66 | fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { | 118 | fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { |
67 | let body = Arc::clone(&self.body); // avoid borrow checker problem | 119 | let body = Arc::clone(&self.body); // avoid borrow checker problem |
68 | let ty = match &body[tgt_expr] { | 120 | let ty = match &body[tgt_expr] { |
@@ -198,14 +250,23 @@ impl<'a> InferenceContext<'a> { | |||
198 | } | 250 | } |
199 | Expr::Call { callee, args } => { | 251 | Expr::Call { callee, args } => { |
200 | let callee_ty = self.infer_expr(*callee, &Expectation::none()); | 252 | let callee_ty = self.infer_expr(*callee, &Expectation::none()); |
201 | let (param_tys, ret_ty) = match callee_ty.callable_sig(self.db) { | 253 | let canonicalized = self.canonicalizer().canonicalize_ty(callee_ty.clone()); |
202 | Some(sig) => (sig.params().to_vec(), sig.ret().clone()), | 254 | let mut derefs = autoderef( |
203 | None => { | 255 | self.db, |
204 | // Not callable | 256 | self.resolver.krate(), |
205 | // FIXME: report an error | 257 | InEnvironment { |
206 | (Vec::new(), Ty::Unknown) | 258 | value: canonicalized.value.clone(), |
207 | } | 259 | environment: self.trait_env.clone(), |
208 | }; | 260 | }, |
261 | ); | ||
262 | let (param_tys, ret_ty): (Vec<Ty>, Ty) = derefs | ||
263 | .find_map(|callee_deref_ty| { | ||
264 | self.callable_sig( | ||
265 | &canonicalized.decanonicalize_ty(callee_deref_ty.value), | ||
266 | args.len(), | ||
267 | ) | ||
268 | }) | ||
269 | .unwrap_or((Vec::new(), Ty::Unknown)); | ||
209 | self.register_obligations_for_call(&callee_ty); | 270 | self.register_obligations_for_call(&callee_ty); |
210 | self.check_call_arguments(args, ¶m_tys); | 271 | self.check_call_arguments(args, ¶m_tys); |
211 | self.normalize_associated_types_in(ret_ty) | 272 | self.normalize_associated_types_in(ret_ty) |
diff --git a/crates/ra_hir_ty/src/infer/path.rs b/crates/ra_hir_ty/src/infer/path.rs index 1ad0d8397..80d7ed10e 100644 --- a/crates/ra_hir_ty/src/infer/path.rs +++ b/crates/ra_hir_ty/src/infer/path.rs | |||
@@ -81,7 +81,7 @@ impl<'a> InferenceContext<'a> { | |||
81 | let generics = crate::utils::generics(self.db.upcast(), impl_id.into()); | 81 | let generics = crate::utils::generics(self.db.upcast(), impl_id.into()); |
82 | let substs = Substs::type_params_for_generics(&generics); | 82 | let substs = Substs::type_params_for_generics(&generics); |
83 | let ty = self.db.impl_self_ty(impl_id).subst(&substs); | 83 | let ty = self.db.impl_self_ty(impl_id).subst(&substs); |
84 | if let Some((AdtId::StructId(struct_id), _)) = ty.as_adt() { | 84 | if let Some((AdtId::StructId(struct_id), substs)) = ty.as_adt() { |
85 | let ty = self.db.value_ty(struct_id.into()).subst(&substs); | 85 | let ty = self.db.value_ty(struct_id.into()).subst(&substs); |
86 | return Some(ty); | 86 | return Some(ty); |
87 | } else { | 87 | } else { |
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs index 2b9372b4b..f22232324 100644 --- a/crates/ra_hir_ty/src/lib.rs +++ b/crates/ra_hir_ty/src/lib.rs | |||
@@ -73,6 +73,7 @@ pub use lower::{ | |||
73 | pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment}; | 73 | pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment}; |
74 | 74 | ||
75 | pub use chalk_ir::{BoundVar, DebruijnIndex}; | 75 | pub use chalk_ir::{BoundVar, DebruijnIndex}; |
76 | use itertools::Itertools; | ||
76 | 77 | ||
77 | /// A type constructor or type name: this might be something like the primitive | 78 | /// A type constructor or type name: this might be something like the primitive |
78 | /// type `bool`, a struct like `Vec`, or things like function pointers or | 79 | /// type `bool`, a struct like `Vec`, or things like function pointers or |
@@ -815,6 +816,11 @@ impl Ty { | |||
815 | } | 816 | } |
816 | } | 817 | } |
817 | 818 | ||
819 | /// If this is a `dyn Trait`, returns that trait. | ||
820 | pub fn dyn_trait(&self) -> Option<TraitId> { | ||
821 | self.dyn_trait_ref().map(|it| it.trait_) | ||
822 | } | ||
823 | |||
818 | fn builtin_deref(&self) -> Option<Ty> { | 824 | fn builtin_deref(&self) -> Option<Ty> { |
819 | match self { | 825 | match self { |
820 | Ty::Apply(a_ty) => match a_ty.ctor { | 826 | Ty::Apply(a_ty) => match a_ty.ctor { |
@@ -867,13 +873,56 @@ impl Ty { | |||
867 | } | 873 | } |
868 | } | 874 | } |
869 | 875 | ||
870 | /// If this is a `dyn Trait`, returns that trait. | 876 | pub fn impl_trait_bounds(&self, db: &dyn HirDatabase) -> Option<Vec<GenericPredicate>> { |
871 | pub fn dyn_trait(&self) -> Option<TraitId> { | ||
872 | match self { | 877 | match self { |
873 | Ty::Dyn(predicates) => predicates.iter().find_map(|pred| match pred { | 878 | Ty::Opaque(opaque_ty) => { |
874 | GenericPredicate::Implemented(tr) => Some(tr.trait_), | 879 | let predicates = match opaque_ty.opaque_ty_id { |
875 | _ => None, | 880 | OpaqueTyId::ReturnTypeImplTrait(func, idx) => { |
876 | }), | 881 | db.return_type_impl_traits(func).map(|it| { |
882 | let data = (*it) | ||
883 | .as_ref() | ||
884 | .map(|rpit| rpit.impl_traits[idx as usize].bounds.clone()); | ||
885 | data.clone().subst(&opaque_ty.parameters) | ||
886 | }) | ||
887 | } | ||
888 | }; | ||
889 | |||
890 | predicates.map(|it| it.value) | ||
891 | } | ||
892 | Ty::Placeholder(id) => { | ||
893 | let generic_params = db.generic_params(id.parent); | ||
894 | let param_data = &generic_params.types[id.local_id]; | ||
895 | match param_data.provenance { | ||
896 | hir_def::generics::TypeParamProvenance::ArgumentImplTrait => { | ||
897 | let predicates = db | ||
898 | .generic_predicates_for_param(*id) | ||
899 | .into_iter() | ||
900 | .map(|pred| pred.value.clone()) | ||
901 | .collect_vec(); | ||
902 | |||
903 | Some(predicates) | ||
904 | } | ||
905 | _ => None, | ||
906 | } | ||
907 | } | ||
908 | _ => None, | ||
909 | } | ||
910 | } | ||
911 | |||
912 | pub fn associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<TraitId> { | ||
913 | match self { | ||
914 | Ty::Apply(ApplicationTy { ctor: TypeCtor::AssociatedType(type_alias_id), .. }) => { | ||
915 | match type_alias_id.lookup(db.upcast()).container { | ||
916 | AssocContainerId::TraitId(trait_id) => Some(trait_id), | ||
917 | _ => None, | ||
918 | } | ||
919 | } | ||
920 | Ty::Projection(projection_ty) => { | ||
921 | match projection_ty.associated_ty.lookup(db.upcast()).container { | ||
922 | AssocContainerId::TraitId(trait_id) => Some(trait_id), | ||
923 | _ => None, | ||
924 | } | ||
925 | } | ||
877 | _ => None, | 926 | _ => None, |
878 | } | 927 | } |
879 | } | 928 | } |
@@ -1057,5 +1106,5 @@ pub struct ReturnTypeImplTraits { | |||
1057 | 1106 | ||
1058 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] | 1107 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] |
1059 | pub(crate) struct ReturnTypeImplTrait { | 1108 | pub(crate) struct ReturnTypeImplTrait { |
1060 | pub(crate) bounds: Binders<Vec<GenericPredicate>>, | 1109 | pub bounds: Binders<Vec<GenericPredicate>>, |
1061 | } | 1110 | } |
diff --git a/crates/ra_hir_ty/src/lower.rs b/crates/ra_hir_ty/src/lower.rs index 42713928f..d5154f436 100644 --- a/crates/ra_hir_ty/src/lower.rs +++ b/crates/ra_hir_ty/src/lower.rs | |||
@@ -337,17 +337,17 @@ impl Ty { | |||
337 | TraitRef::from_resolved_path(ctx, trait_, resolved_segment, self_ty); | 337 | TraitRef::from_resolved_path(ctx, trait_, resolved_segment, self_ty); |
338 | let ty = if remaining_segments.len() == 1 { | 338 | let ty = if remaining_segments.len() == 1 { |
339 | let segment = remaining_segments.first().unwrap(); | 339 | let segment = remaining_segments.first().unwrap(); |
340 | let associated_ty = associated_type_by_name_including_super_traits( | 340 | let found = associated_type_by_name_including_super_traits( |
341 | ctx.db.upcast(), | 341 | ctx.db, |
342 | trait_ref.trait_, | 342 | trait_ref.clone(), |
343 | &segment.name, | 343 | &segment.name, |
344 | ); | 344 | ); |
345 | match associated_ty { | 345 | match found { |
346 | Some(associated_ty) => { | 346 | Some((super_trait_ref, associated_ty)) => { |
347 | // FIXME handle type parameters on the segment | 347 | // FIXME handle type parameters on the segment |
348 | Ty::Projection(ProjectionTy { | 348 | Ty::Projection(ProjectionTy { |
349 | associated_ty, | 349 | associated_ty, |
350 | parameters: trait_ref.substs, | 350 | parameters: super_trait_ref.substs, |
351 | }) | 351 | }) |
352 | } | 352 | } |
353 | None => { | 353 | None => { |
@@ -467,6 +467,9 @@ impl Ty { | |||
467 | } | 467 | } |
468 | TypeParamLoweringMode::Variable => t.substs.clone(), | 468 | TypeParamLoweringMode::Variable => t.substs.clone(), |
469 | }; | 469 | }; |
470 | // We need to shift in the bound vars, since | ||
471 | // associated_type_shorthand_candidates does not do that | ||
472 | let substs = substs.shift_bound_vars(ctx.in_binders); | ||
470 | // FIXME handle type parameters on the segment | 473 | // FIXME handle type parameters on the segment |
471 | return Some(Ty::Projection(ProjectionTy { | 474 | return Some(Ty::Projection(ProjectionTy { |
472 | associated_ty, | 475 | associated_ty, |
@@ -706,17 +709,17 @@ fn assoc_type_bindings_from_type_bound<'a>( | |||
706 | .flat_map(|segment| segment.args_and_bindings.into_iter()) | 709 | .flat_map(|segment| segment.args_and_bindings.into_iter()) |
707 | .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) | 710 | .flat_map(|args_and_bindings| args_and_bindings.bindings.iter()) |
708 | .flat_map(move |binding| { | 711 | .flat_map(move |binding| { |
709 | let associated_ty = associated_type_by_name_including_super_traits( | 712 | let found = associated_type_by_name_including_super_traits( |
710 | ctx.db.upcast(), | 713 | ctx.db, |
711 | trait_ref.trait_, | 714 | trait_ref.clone(), |
712 | &binding.name, | 715 | &binding.name, |
713 | ); | 716 | ); |
714 | let associated_ty = match associated_ty { | 717 | let (super_trait_ref, associated_ty) = match found { |
715 | None => return SmallVec::<[GenericPredicate; 1]>::new(), | 718 | None => return SmallVec::<[GenericPredicate; 1]>::new(), |
716 | Some(t) => t, | 719 | Some(t) => t, |
717 | }; | 720 | }; |
718 | let projection_ty = | 721 | let projection_ty = |
719 | ProjectionTy { associated_ty, parameters: trait_ref.substs.clone() }; | 722 | ProjectionTy { associated_ty, parameters: super_trait_ref.substs.clone() }; |
720 | let mut preds = SmallVec::with_capacity( | 723 | let mut preds = SmallVec::with_capacity( |
721 | binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), | 724 | binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(), |
722 | ); | 725 | ); |
diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs index e83b39456..ed638c195 100644 --- a/crates/ra_hir_ty/src/method_resolution.rs +++ b/crates/ra_hir_ty/src/method_resolution.rs | |||
@@ -38,18 +38,53 @@ impl TyFingerprint { | |||
38 | } | 38 | } |
39 | } | 39 | } |
40 | 40 | ||
41 | /// A queryable and mergeable collection of impls. | ||
41 | #[derive(Debug, PartialEq, Eq)] | 42 | #[derive(Debug, PartialEq, Eq)] |
42 | pub struct CrateImplDefs { | 43 | pub struct CrateImplDefs { |
43 | impls: FxHashMap<TyFingerprint, Vec<ImplId>>, | 44 | inherent_impls: FxHashMap<TyFingerprint, Vec<ImplId>>, |
44 | impls_by_trait: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>, | 45 | impls_by_trait: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>, |
45 | } | 46 | } |
46 | 47 | ||
47 | impl CrateImplDefs { | 48 | impl CrateImplDefs { |
48 | pub(crate) fn impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<CrateImplDefs> { | 49 | pub(crate) fn impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<CrateImplDefs> { |
49 | let _p = profile("impls_in_crate_query"); | 50 | let _p = profile("impls_in_crate_query"); |
50 | let mut res = | 51 | let mut res = CrateImplDefs { |
51 | CrateImplDefs { impls: FxHashMap::default(), impls_by_trait: FxHashMap::default() }; | 52 | inherent_impls: FxHashMap::default(), |
53 | impls_by_trait: FxHashMap::default(), | ||
54 | }; | ||
55 | res.fill(db, krate); | ||
56 | |||
57 | Arc::new(res) | ||
58 | } | ||
59 | |||
60 | /// Collects all impls from transitive dependencies of `krate` that may be used by `krate`. | ||
61 | /// | ||
62 | /// The full set of impls that can be used by `krate` is the returned map plus all the impls | ||
63 | /// from `krate` itself. | ||
64 | pub(crate) fn impls_from_deps_query( | ||
65 | db: &dyn HirDatabase, | ||
66 | krate: CrateId, | ||
67 | ) -> Arc<CrateImplDefs> { | ||
68 | let _p = profile("impls_from_deps_query"); | ||
69 | let crate_graph = db.crate_graph(); | ||
70 | let mut res = CrateImplDefs { | ||
71 | inherent_impls: FxHashMap::default(), | ||
72 | impls_by_trait: FxHashMap::default(), | ||
73 | }; | ||
52 | 74 | ||
75 | // For each dependency, calculate `impls_from_deps` recursively, then add its own | ||
76 | // `impls_in_crate`. | ||
77 | // As we might visit crates multiple times, `merge` has to deduplicate impls to avoid | ||
78 | // wasting memory. | ||
79 | for dep in &crate_graph[krate].dependencies { | ||
80 | res.merge(&db.impls_from_deps(dep.crate_id)); | ||
81 | res.merge(&db.impls_in_crate(dep.crate_id)); | ||
82 | } | ||
83 | |||
84 | Arc::new(res) | ||
85 | } | ||
86 | |||
87 | fn fill(&mut self, db: &dyn HirDatabase, krate: CrateId) { | ||
53 | let crate_def_map = db.crate_def_map(krate); | 88 | let crate_def_map = db.crate_def_map(krate); |
54 | for (_module_id, module_data) in crate_def_map.modules.iter() { | 89 | for (_module_id, module_data) in crate_def_map.modules.iter() { |
55 | for impl_id in module_data.scope.impls() { | 90 | for impl_id in module_data.scope.impls() { |
@@ -57,7 +92,7 @@ impl CrateImplDefs { | |||
57 | Some(tr) => { | 92 | Some(tr) => { |
58 | let self_ty = db.impl_self_ty(impl_id); | 93 | let self_ty = db.impl_self_ty(impl_id); |
59 | let self_ty_fp = TyFingerprint::for_impl(&self_ty.value); | 94 | let self_ty_fp = TyFingerprint::for_impl(&self_ty.value); |
60 | res.impls_by_trait | 95 | self.impls_by_trait |
61 | .entry(tr.value.trait_) | 96 | .entry(tr.value.trait_) |
62 | .or_default() | 97 | .or_default() |
63 | .entry(self_ty_fp) | 98 | .entry(self_ty_fp) |
@@ -67,18 +102,36 @@ impl CrateImplDefs { | |||
67 | None => { | 102 | None => { |
68 | let self_ty = db.impl_self_ty(impl_id); | 103 | let self_ty = db.impl_self_ty(impl_id); |
69 | if let Some(self_ty_fp) = TyFingerprint::for_impl(&self_ty.value) { | 104 | if let Some(self_ty_fp) = TyFingerprint::for_impl(&self_ty.value) { |
70 | res.impls.entry(self_ty_fp).or_default().push(impl_id); | 105 | self.inherent_impls.entry(self_ty_fp).or_default().push(impl_id); |
71 | } | 106 | } |
72 | } | 107 | } |
73 | } | 108 | } |
74 | } | 109 | } |
75 | } | 110 | } |
111 | } | ||
76 | 112 | ||
77 | Arc::new(res) | 113 | fn merge(&mut self, other: &Self) { |
114 | for (fp, impls) in &other.inherent_impls { | ||
115 | let vec = self.inherent_impls.entry(*fp).or_default(); | ||
116 | vec.extend(impls); | ||
117 | vec.sort(); | ||
118 | vec.dedup(); | ||
119 | } | ||
120 | |||
121 | for (trait_, other_map) in &other.impls_by_trait { | ||
122 | let map = self.impls_by_trait.entry(*trait_).or_default(); | ||
123 | for (fp, impls) in other_map { | ||
124 | let vec = map.entry(*fp).or_default(); | ||
125 | vec.extend(impls); | ||
126 | vec.sort(); | ||
127 | vec.dedup(); | ||
128 | } | ||
129 | } | ||
78 | } | 130 | } |
131 | |||
79 | pub fn lookup_impl_defs(&self, ty: &Ty) -> impl Iterator<Item = ImplId> + '_ { | 132 | pub fn lookup_impl_defs(&self, ty: &Ty) -> impl Iterator<Item = ImplId> + '_ { |
80 | let fingerprint = TyFingerprint::for_impl(ty); | 133 | let fingerprint = TyFingerprint::for_impl(ty); |
81 | fingerprint.and_then(|f| self.impls.get(&f)).into_iter().flatten().copied() | 134 | fingerprint.and_then(|f| self.inherent_impls.get(&f)).into_iter().flatten().copied() |
82 | } | 135 | } |
83 | 136 | ||
84 | pub fn lookup_impl_defs_for_trait(&self, tr: TraitId) -> impl Iterator<Item = ImplId> + '_ { | 137 | pub fn lookup_impl_defs_for_trait(&self, tr: TraitId) -> impl Iterator<Item = ImplId> + '_ { |
@@ -110,7 +163,7 @@ impl CrateImplDefs { | |||
110 | } | 163 | } |
111 | 164 | ||
112 | pub fn all_impls<'a>(&'a self) -> impl Iterator<Item = ImplId> + 'a { | 165 | pub fn all_impls<'a>(&'a self) -> impl Iterator<Item = ImplId> + 'a { |
113 | self.impls | 166 | self.inherent_impls |
114 | .values() | 167 | .values() |
115 | .chain(self.impls_by_trait.values().flat_map(|m| m.values())) | 168 | .chain(self.impls_by_trait.values().flat_map(|m| m.values())) |
116 | .flatten() | 169 | .flatten() |
diff --git a/crates/ra_hir_ty/src/tests/regression.rs b/crates/ra_hir_ty/src/tests/regression.rs index 1f004bd63..4da2e972b 100644 --- a/crates/ra_hir_ty/src/tests/regression.rs +++ b/crates/ra_hir_ty/src/tests/regression.rs | |||
@@ -633,3 +633,154 @@ where | |||
633 | "### | 633 | "### |
634 | ); | 634 | ); |
635 | } | 635 | } |
636 | |||
637 | #[test] | ||
638 | fn issue_4953() { | ||
639 | assert_snapshot!( | ||
640 | infer(r#" | ||
641 | pub struct Foo(pub i64); | ||
642 | impl Foo { | ||
643 | fn test() -> Self { Self(0i64) } | ||
644 | } | ||
645 | "#), | ||
646 | @r###" | ||
647 | 59..73 '{ Self(0i64) }': Foo | ||
648 | 61..65 'Self': Foo(i64) -> Foo | ||
649 | 61..71 'Self(0i64)': Foo | ||
650 | 66..70 '0i64': i64 | ||
651 | "### | ||
652 | ); | ||
653 | assert_snapshot!( | ||
654 | infer(r#" | ||
655 | pub struct Foo<T>(pub T); | ||
656 | impl Foo<i64> { | ||
657 | fn test() -> Self { Self(0i64) } | ||
658 | } | ||
659 | "#), | ||
660 | @r###" | ||
661 | 65..79 '{ Self(0i64) }': Foo<i64> | ||
662 | 67..71 'Self': Foo<i64>(i64) -> Foo<i64> | ||
663 | 67..77 'Self(0i64)': Foo<i64> | ||
664 | 72..76 '0i64': i64 | ||
665 | "### | ||
666 | ); | ||
667 | } | ||
668 | |||
669 | #[test] | ||
670 | fn issue_4931() { | ||
671 | assert_snapshot!( | ||
672 | infer(r#" | ||
673 | trait Div<T> { | ||
674 | type Output; | ||
675 | } | ||
676 | |||
677 | trait CheckedDiv: Div<()> {} | ||
678 | |||
679 | trait PrimInt: CheckedDiv<Output = ()> { | ||
680 | fn pow(self); | ||
681 | } | ||
682 | |||
683 | fn check<T: PrimInt>(i: T) { | ||
684 | i.pow(); | ||
685 | } | ||
686 | "#), | ||
687 | @r###" | ||
688 | 118..122 'self': Self | ||
689 | 149..150 'i': T | ||
690 | 155..171 '{ ...w(); }': () | ||
691 | 161..162 'i': T | ||
692 | 161..168 'i.pow()': () | ||
693 | "### | ||
694 | ); | ||
695 | } | ||
696 | |||
697 | #[test] | ||
698 | fn issue_4885() { | ||
699 | assert_snapshot!( | ||
700 | infer(r#" | ||
701 | #[lang = "coerce_unsized"] | ||
702 | pub trait CoerceUnsized<T> {} | ||
703 | |||
704 | trait Future { | ||
705 | type Output; | ||
706 | } | ||
707 | trait Foo<R> { | ||
708 | type Bar; | ||
709 | } | ||
710 | fn foo<R, K>(key: &K) -> impl Future<Output = K::Bar> | ||
711 | where | ||
712 | K: Foo<R>, | ||
713 | { | ||
714 | bar(key) | ||
715 | } | ||
716 | fn bar<R, K>(key: &K) -> impl Future<Output = K::Bar> | ||
717 | where | ||
718 | K: Foo<R>, | ||
719 | { | ||
720 | } | ||
721 | "#), | ||
722 | @r###" | ||
723 | 137..140 'key': &K | ||
724 | 199..215 '{ ...key) }': impl Future<Output = <K as Foo<R>>::Bar> | ||
725 | 205..208 'bar': fn bar<R, K>(&K) -> impl Future<Output = <K as Foo<R>>::Bar> | ||
726 | 205..213 'bar(key)': impl Future<Output = <K as Foo<R>>::Bar> | ||
727 | 209..212 'key': &K | ||
728 | 229..232 'key': &K | ||
729 | 291..294 '{ }': () | ||
730 | "### | ||
731 | ); | ||
732 | } | ||
733 | |||
734 | #[test] | ||
735 | fn issue_4800() { | ||
736 | assert_snapshot!( | ||
737 | infer(r#" | ||
738 | trait Debug {} | ||
739 | |||
740 | struct Foo<T>; | ||
741 | |||
742 | type E1<T> = (T, T, T); | ||
743 | type E2<T> = E1<E1<E1<(T, T, T)>>>; | ||
744 | |||
745 | impl Debug for Foo<E2<()>> {} | ||
746 | |||
747 | struct Request; | ||
748 | |||
749 | pub trait Future { | ||
750 | type Output; | ||
751 | } | ||
752 | |||
753 | pub struct PeerSet<D>; | ||
754 | |||
755 | impl<D> Service<Request> for PeerSet<D> | ||
756 | where | ||
757 | D: Discover, | ||
758 | D::Key: Debug, | ||
759 | { | ||
760 | type Error = (); | ||
761 | type Future = dyn Future<Output = Self::Error>; | ||
762 | |||
763 | fn call(&mut self) -> Self::Future { | ||
764 | loop {} | ||
765 | } | ||
766 | } | ||
767 | |||
768 | pub trait Discover { | ||
769 | type Key; | ||
770 | } | ||
771 | |||
772 | pub trait Service<Request> { | ||
773 | type Error; | ||
774 | type Future: Future<Output = Self::Error>; | ||
775 | fn call(&mut self) -> Self::Future; | ||
776 | } | ||
777 | "#), | ||
778 | @r###" | ||
779 | 380..384 'self': &mut PeerSet<D> | ||
780 | 402..425 '{ ... }': dyn Future<Output = ()> | ||
781 | 412..419 'loop {}': ! | ||
782 | 417..419 '{}': () | ||
783 | 576..580 'self': &mut Self | ||
784 | "### | ||
785 | ); | ||
786 | } | ||
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs index e81193a3c..961be4abd 100644 --- a/crates/ra_hir_ty/src/tests/traits.rs +++ b/crates/ra_hir_ty/src/tests/traits.rs | |||
@@ -2888,3 +2888,226 @@ impl<A: Step> iter::Iterator for ops::Range<A> { | |||
2888 | ); | 2888 | ); |
2889 | assert_eq!(t, "i32"); | 2889 | assert_eq!(t, "i32"); |
2890 | } | 2890 | } |
2891 | |||
2892 | #[test] | ||
2893 | fn infer_closure_arg() { | ||
2894 | assert_snapshot!( | ||
2895 | infer( | ||
2896 | r#" | ||
2897 | //- /lib.rs | ||
2898 | |||
2899 | enum Option<T> { | ||
2900 | None, | ||
2901 | Some(T) | ||
2902 | } | ||
2903 | |||
2904 | fn foo() { | ||
2905 | let s = Option::None; | ||
2906 | let f = |x: Option<i32>| {}; | ||
2907 | (&f)(s) | ||
2908 | } | ||
2909 | "# | ||
2910 | ), | ||
2911 | @r###" | ||
2912 | 137..259 '{ ... }': () | ||
2913 | 159..160 's': Option<i32> | ||
2914 | 163..175 'Option::None': Option<i32> | ||
2915 | 197..198 'f': |Option<i32>| -> () | ||
2916 | 201..220 '|x: Op...2>| {}': |Option<i32>| -> () | ||
2917 | 202..203 'x': Option<i32> | ||
2918 | 218..220 '{}': () | ||
2919 | 238..245 '(&f)(s)': () | ||
2920 | 239..241 '&f': &|Option<i32>| -> () | ||
2921 | 240..241 'f': |Option<i32>| -> () | ||
2922 | 243..244 's': Option<i32> | ||
2923 | "### | ||
2924 | ); | ||
2925 | } | ||
2926 | |||
2927 | #[test] | ||
2928 | fn infer_fn_trait_arg() { | ||
2929 | assert_snapshot!( | ||
2930 | infer( | ||
2931 | r#" | ||
2932 | //- /lib.rs deps:std | ||
2933 | |||
2934 | #[lang = "fn_once"] | ||
2935 | pub trait FnOnce<Args> { | ||
2936 | type Output; | ||
2937 | |||
2938 | extern "rust-call" fn call_once(&self, args: Args) -> Self::Output; | ||
2939 | } | ||
2940 | |||
2941 | #[lang = "fn"] | ||
2942 | pub trait Fn<Args>:FnOnce<Args> { | ||
2943 | extern "rust-call" fn call(&self, args: Args) -> Self::Output; | ||
2944 | } | ||
2945 | |||
2946 | enum Option<T> { | ||
2947 | None, | ||
2948 | Some(T) | ||
2949 | } | ||
2950 | |||
2951 | fn foo<F, T>(f: F) -> T | ||
2952 | where | ||
2953 | F: Fn(Option<i32>) -> T, | ||
2954 | { | ||
2955 | let s = None; | ||
2956 | f(s) | ||
2957 | } | ||
2958 | "# | ||
2959 | ), | ||
2960 | @r###" | ||
2961 | 183..187 'self': &Self | ||
2962 | 189..193 'args': Args | ||
2963 | 350..354 'self': &Self | ||
2964 | 356..360 'args': Args | ||
2965 | 515..516 'f': F | ||
2966 | 597..663 '{ ... }': T | ||
2967 | 619..620 's': Option<i32> | ||
2968 | 623..627 'None': Option<i32> | ||
2969 | 645..646 'f': F | ||
2970 | 645..649 'f(s)': T | ||
2971 | 647..648 's': Option<i32> | ||
2972 | "### | ||
2973 | ); | ||
2974 | } | ||
2975 | |||
2976 | #[test] | ||
2977 | fn infer_box_fn_arg() { | ||
2978 | assert_snapshot!( | ||
2979 | infer( | ||
2980 | r#" | ||
2981 | //- /lib.rs deps:std | ||
2982 | |||
2983 | #[lang = "fn_once"] | ||
2984 | pub trait FnOnce<Args> { | ||
2985 | type Output; | ||
2986 | |||
2987 | extern "rust-call" fn call_once(self, args: Args) -> Self::Output; | ||
2988 | } | ||
2989 | |||
2990 | #[lang = "deref"] | ||
2991 | pub trait Deref { | ||
2992 | type Target: ?Sized; | ||
2993 | |||
2994 | fn deref(&self) -> &Self::Target; | ||
2995 | } | ||
2996 | |||
2997 | #[lang = "owned_box"] | ||
2998 | pub struct Box<T: ?Sized> { | ||
2999 | inner: *mut T, | ||
3000 | } | ||
3001 | |||
3002 | impl<T: ?Sized> Deref for Box<T> { | ||
3003 | type Target = T; | ||
3004 | |||
3005 | fn deref(&self) -> &T { | ||
3006 | &self.inner | ||
3007 | } | ||
3008 | } | ||
3009 | |||
3010 | enum Option<T> { | ||
3011 | None, | ||
3012 | Some(T) | ||
3013 | } | ||
3014 | |||
3015 | fn foo() { | ||
3016 | let s = Option::None; | ||
3017 | let f: Box<dyn FnOnce(&Option<i32>)> = box (|ps| {}); | ||
3018 | f(&s) | ||
3019 | } | ||
3020 | "# | ||
3021 | ), | ||
3022 | @r###" | ||
3023 | 182..186 'self': Self | ||
3024 | 188..192 'args': Args | ||
3025 | 356..360 'self': &Self | ||
3026 | 622..626 'self': &Box<T> | ||
3027 | 634..685 '{ ... }': &T | ||
3028 | 656..667 '&self.inner': &*mut T | ||
3029 | 657..661 'self': &Box<T> | ||
3030 | 657..667 'self.inner': *mut T | ||
3031 | 812..957 '{ ... }': FnOnce::Output<dyn FnOnce<(&Option<i32>,)>, (&Option<i32>,)> | ||
3032 | 834..835 's': Option<i32> | ||
3033 | 838..850 'Option::None': Option<i32> | ||
3034 | 872..873 'f': Box<dyn FnOnce<(&Option<i32>,)>> | ||
3035 | 907..920 'box (|ps| {})': Box<|{unknown}| -> ()> | ||
3036 | 912..919 '|ps| {}': |{unknown}| -> () | ||
3037 | 913..915 'ps': {unknown} | ||
3038 | 917..919 '{}': () | ||
3039 | 938..939 'f': Box<dyn FnOnce<(&Option<i32>,)>> | ||
3040 | 938..943 'f(&s)': FnOnce::Output<dyn FnOnce<(&Option<i32>,)>, (&Option<i32>,)> | ||
3041 | 940..942 '&s': &Option<i32> | ||
3042 | 941..942 's': Option<i32> | ||
3043 | "### | ||
3044 | ); | ||
3045 | } | ||
3046 | |||
3047 | #[test] | ||
3048 | fn infer_dyn_fn_output() { | ||
3049 | assert_snapshot!( | ||
3050 | infer( | ||
3051 | r#" | ||
3052 | //- /lib.rs deps:std | ||
3053 | |||
3054 | #[lang = "fn_once"] | ||
3055 | pub trait FnOnce<Args> { | ||
3056 | type Output; | ||
3057 | |||
3058 | extern "rust-call" fn call_once(self, args: Args) -> Self::Output; | ||
3059 | } | ||
3060 | |||
3061 | #[lang = "fn"] | ||
3062 | pub trait Fn<Args>:FnOnce<Args> { | ||
3063 | extern "rust-call" fn call(&self, args: Args) -> Self::Output; | ||
3064 | } | ||
3065 | |||
3066 | #[lang = "deref"] | ||
3067 | pub trait Deref { | ||
3068 | type Target: ?Sized; | ||
3069 | |||
3070 | fn deref(&self) -> &Self::Target; | ||
3071 | } | ||
3072 | |||
3073 | #[lang = "owned_box"] | ||
3074 | pub struct Box<T: ?Sized> { | ||
3075 | inner: *mut T, | ||
3076 | } | ||
3077 | |||
3078 | impl<T: ?Sized> Deref for Box<T> { | ||
3079 | type Target = T; | ||
3080 | |||
3081 | fn deref(&self) -> &T { | ||
3082 | &self.inner | ||
3083 | } | ||
3084 | } | ||
3085 | |||
3086 | fn foo() { | ||
3087 | let f: Box<dyn Fn() -> i32> = box(|| 5); | ||
3088 | let x = f(); | ||
3089 | } | ||
3090 | "# | ||
3091 | ), | ||
3092 | @r###" | ||
3093 | 182..186 'self': Self | ||
3094 | 188..192 'args': Args | ||
3095 | 349..353 'self': &Self | ||
3096 | 355..359 'args': Args | ||
3097 | 523..527 'self': &Self | ||
3098 | 789..793 'self': &Box<T> | ||
3099 | 801..852 '{ ... }': &T | ||
3100 | 823..834 '&self.inner': &*mut T | ||
3101 | 824..828 'self': &Box<T> | ||
3102 | 824..834 'self.inner': *mut T | ||
3103 | 889..990 '{ ... }': () | ||
3104 | 911..912 'f': Box<dyn Fn<(), Output = i32>> | ||
3105 | 937..946 'box(|| 5)': Box<|| -> i32> | ||
3106 | 941..945 '|| 5': || -> i32 | ||
3107 | 944..945 '5': i32 | ||
3108 | 968..969 'x': FnOnce::Output<dyn Fn<(), Output = i32>, ()> | ||
3109 | 972..973 'f': Box<dyn Fn<(), Output = i32>> | ||
3110 | 972..975 'f()': FnOnce::Output<dyn Fn<(), Output = i32>, ()> | ||
3111 | "### | ||
3112 | ); | ||
3113 | } | ||
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs index 6bc6d474c..6f43c3a22 100644 --- a/crates/ra_hir_ty/src/traits.rs +++ b/crates/ra_hir_ty/src/traits.rs | |||
@@ -2,12 +2,13 @@ | |||
2 | use std::{panic, sync::Arc}; | 2 | use std::{panic, sync::Arc}; |
3 | 3 | ||
4 | use chalk_ir::cast::Cast; | 4 | use chalk_ir::cast::Cast; |
5 | use hir_def::{expr::ExprId, DefWithBodyId, ImplId, TraitId, TypeAliasId}; | 5 | use hir_def::{ |
6 | expr::ExprId, lang_item::LangItemTarget, DefWithBodyId, ImplId, TraitId, TypeAliasId, | ||
7 | }; | ||
6 | use ra_db::{impl_intern_key, salsa, CrateId}; | 8 | use ra_db::{impl_intern_key, salsa, CrateId}; |
7 | use ra_prof::profile; | 9 | use ra_prof::profile; |
8 | use rustc_hash::FxHashSet; | ||
9 | 10 | ||
10 | use crate::{db::HirDatabase, method_resolution::TyFingerprint, DebruijnIndex}; | 11 | use crate::{db::HirDatabase, DebruijnIndex}; |
11 | 12 | ||
12 | use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk}; | 13 | use super::{Canonical, GenericPredicate, HirDisplay, ProjectionTy, TraitRef, Ty, TypeWalk}; |
13 | 14 | ||
@@ -36,34 +37,6 @@ fn create_chalk_solver() -> chalk_solve::Solver<Interner> { | |||
36 | solver_choice.into_solver() | 37 | solver_choice.into_solver() |
37 | } | 38 | } |
38 | 39 | ||
39 | /// Collects impls for the given trait in the whole dependency tree of `krate`. | ||
40 | pub(crate) fn impls_for_trait_query( | ||
41 | db: &dyn HirDatabase, | ||
42 | krate: CrateId, | ||
43 | trait_: TraitId, | ||
44 | self_ty_fp: Option<TyFingerprint>, | ||
45 | ) -> Arc<[ImplId]> { | ||
46 | // FIXME: We could be a lot smarter here - because of the orphan rules and | ||
47 | // the fact that the trait and the self type need to be in the dependency | ||
48 | // tree of a crate somewhere for an impl to exist, we could skip looking in | ||
49 | // a lot of crates completely | ||
50 | let mut impls = FxHashSet::default(); | ||
51 | // We call the query recursively here. On the one hand, this means we can | ||
52 | // reuse results from queries for different crates; on the other hand, this | ||
53 | // will only ever get called for a few crates near the root of the tree (the | ||
54 | // ones the user is editing), so this may actually be a waste of memory. I'm | ||
55 | // doing it like this mainly for simplicity for now. | ||
56 | for dep in &db.crate_graph()[krate].dependencies { | ||
57 | impls.extend(db.impls_for_trait(dep.crate_id, trait_, self_ty_fp).iter()); | ||
58 | } | ||
59 | let crate_impl_defs = db.impls_in_crate(krate); | ||
60 | match self_ty_fp { | ||
61 | Some(fp) => impls.extend(crate_impl_defs.lookup_impl_defs_for_trait_and_ty(trait_, fp)), | ||
62 | None => impls.extend(crate_impl_defs.lookup_impl_defs_for_trait(trait_)), | ||
63 | } | ||
64 | impls.into_iter().collect() | ||
65 | } | ||
66 | |||
67 | /// A set of clauses that we assume to be true. E.g. if we are inside this function: | 40 | /// A set of clauses that we assume to be true. E.g. if we are inside this function: |
68 | /// ```rust | 41 | /// ```rust |
69 | /// fn foo<T: Default>(t: T) {} | 42 | /// fn foo<T: Default>(t: T) {} |
@@ -298,6 +271,14 @@ impl FnTrait { | |||
298 | FnTrait::Fn => "fn", | 271 | FnTrait::Fn => "fn", |
299 | } | 272 | } |
300 | } | 273 | } |
274 | |||
275 | pub fn get_id(&self, db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> { | ||
276 | let target = db.lang_item(krate, self.lang_item_name().into())?; | ||
277 | match target { | ||
278 | LangItemTarget::TraitId(t) => Some(t), | ||
279 | _ => None, | ||
280 | } | ||
281 | } | ||
301 | } | 282 | } |
302 | 283 | ||
303 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 284 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
diff --git a/crates/ra_hir_ty/src/traits/builtin.rs b/crates/ra_hir_ty/src/traits/builtin.rs index 88a422d2c..6d5f2d46a 100644 --- a/crates/ra_hir_ty/src/traits/builtin.rs +++ b/crates/ra_hir_ty/src/traits/builtin.rs | |||
@@ -40,7 +40,7 @@ pub(super) fn get_builtin_impls( | |||
40 | if let Ty::Apply(ApplicationTy { ctor: TypeCtor::Closure { def, expr }, .. }) = ty { | 40 | if let Ty::Apply(ApplicationTy { ctor: TypeCtor::Closure { def, expr }, .. }) = ty { |
41 | for &fn_trait in [super::FnTrait::FnOnce, super::FnTrait::FnMut, super::FnTrait::Fn].iter() | 41 | for &fn_trait in [super::FnTrait::FnOnce, super::FnTrait::FnMut, super::FnTrait::Fn].iter() |
42 | { | 42 | { |
43 | if let Some(actual_trait) = get_fn_trait(db, krate, fn_trait) { | 43 | if let Some(actual_trait) = fn_trait.get_id(db, krate) { |
44 | if trait_ == actual_trait { | 44 | if trait_ == actual_trait { |
45 | let impl_ = super::ClosureFnTraitImplData { def: *def, expr: *expr, fn_trait }; | 45 | let impl_ = super::ClosureFnTraitImplData { def: *def, expr: *expr, fn_trait }; |
46 | if check_closure_fn_trait_impl_prerequisites(db, krate, impl_) { | 46 | if check_closure_fn_trait_impl_prerequisites(db, krate, impl_) { |
@@ -128,7 +128,7 @@ fn check_closure_fn_trait_impl_prerequisites( | |||
128 | data: super::ClosureFnTraitImplData, | 128 | data: super::ClosureFnTraitImplData, |
129 | ) -> bool { | 129 | ) -> bool { |
130 | // the respective Fn/FnOnce/FnMut trait needs to exist | 130 | // the respective Fn/FnOnce/FnMut trait needs to exist |
131 | if get_fn_trait(db, krate, data.fn_trait).is_none() { | 131 | if data.fn_trait.get_id(db, krate).is_none() { |
132 | return false; | 132 | return false; |
133 | } | 133 | } |
134 | 134 | ||
@@ -136,7 +136,7 @@ fn check_closure_fn_trait_impl_prerequisites( | |||
136 | // the traits having no type params, FnOnce being a supertrait | 136 | // the traits having no type params, FnOnce being a supertrait |
137 | 137 | ||
138 | // the FnOnce trait needs to exist and have an assoc type named Output | 138 | // the FnOnce trait needs to exist and have an assoc type named Output |
139 | let fn_once_trait = match get_fn_trait(db, krate, super::FnTrait::FnOnce) { | 139 | let fn_once_trait = match (super::FnTrait::FnOnce).get_id(db, krate) { |
140 | Some(t) => t, | 140 | Some(t) => t, |
141 | None => return false, | 141 | None => return false, |
142 | }; | 142 | }; |
@@ -151,7 +151,9 @@ fn closure_fn_trait_impl_datum( | |||
151 | // for some closure |X, Y| -> Z: | 151 | // for some closure |X, Y| -> Z: |
152 | // impl<T, U, V> Fn<(T, U)> for closure<fn(T, U) -> V> { Output = V } | 152 | // impl<T, U, V> Fn<(T, U)> for closure<fn(T, U) -> V> { Output = V } |
153 | 153 | ||
154 | let trait_ = get_fn_trait(db, krate, data.fn_trait) // get corresponding fn trait | 154 | let trait_ = data |
155 | .fn_trait | ||
156 | .get_id(db, krate) // get corresponding fn trait | ||
155 | // the existence of the Fn trait has been checked before | 157 | // the existence of the Fn trait has been checked before |
156 | .expect("fn trait for closure impl missing"); | 158 | .expect("fn trait for closure impl missing"); |
157 | 159 | ||
@@ -211,7 +213,7 @@ fn closure_fn_trait_output_assoc_ty_value( | |||
211 | let output_ty = Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, num_args.into())); | 213 | let output_ty = Ty::Bound(BoundVar::new(DebruijnIndex::INNERMOST, num_args.into())); |
212 | 214 | ||
213 | let fn_once_trait = | 215 | let fn_once_trait = |
214 | get_fn_trait(db, krate, super::FnTrait::FnOnce).expect("assoc ty value should not exist"); | 216 | (super::FnTrait::FnOnce).get_id(db, krate).expect("assoc ty value should not exist"); |
215 | 217 | ||
216 | let output_ty_id = db | 218 | let output_ty_id = db |
217 | .trait_data(fn_once_trait) | 219 | .trait_data(fn_once_trait) |
@@ -360,14 +362,6 @@ fn super_trait_object_unsize_impl_datum( | |||
360 | BuiltinImplData { num_vars, trait_ref, where_clauses: Vec::new(), assoc_ty_values: Vec::new() } | 362 | BuiltinImplData { num_vars, trait_ref, where_clauses: Vec::new(), assoc_ty_values: Vec::new() } |
361 | } | 363 | } |
362 | 364 | ||
363 | fn get_fn_trait(db: &dyn HirDatabase, krate: CrateId, fn_trait: super::FnTrait) -> Option<TraitId> { | ||
364 | let target = db.lang_item(krate, fn_trait.lang_item_name().into())?; | ||
365 | match target { | ||
366 | LangItemTarget::TraitId(t) => Some(t), | ||
367 | _ => None, | ||
368 | } | ||
369 | } | ||
370 | |||
371 | fn get_unsize_trait(db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> { | 365 | fn get_unsize_trait(db: &dyn HirDatabase, krate: CrateId) -> Option<TraitId> { |
372 | let target = db.lang_item(krate, "unsize".into())?; | 366 | let target = db.lang_item(krate, "unsize".into())?; |
373 | match target { | 367 | match target { |
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs index a72a82f5a..2f35d6d49 100644 --- a/crates/ra_hir_ty/src/traits/chalk.rs +++ b/crates/ra_hir_ty/src/traits/chalk.rs | |||
@@ -74,14 +74,26 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> { | |||
74 | // Note: Since we're using impls_for_trait, only impls where the trait | 74 | // Note: Since we're using impls_for_trait, only impls where the trait |
75 | // can be resolved should ever reach Chalk. `impl_datum` relies on that | 75 | // can be resolved should ever reach Chalk. `impl_datum` relies on that |
76 | // and will panic if the trait can't be resolved. | 76 | // and will panic if the trait can't be resolved. |
77 | let mut result: Vec<_> = self | 77 | let in_deps = self.db.impls_from_deps(self.krate); |
78 | .db | 78 | let in_self = self.db.impls_in_crate(self.krate); |
79 | .impls_for_trait(self.krate, trait_, self_ty_fp) | 79 | let impl_maps = [in_deps, in_self]; |
80 | .iter() | 80 | |
81 | .copied() | 81 | let id_to_chalk = |id: hir_def::ImplId| Impl::ImplDef(id).to_chalk(self.db); |
82 | .map(Impl::ImplDef) | 82 | |
83 | .map(|impl_| impl_.to_chalk(self.db)) | 83 | let mut result: Vec<_> = match self_ty_fp { |
84 | .collect(); | 84 | Some(fp) => impl_maps |
85 | .iter() | ||
86 | .flat_map(|crate_impl_defs| { | ||
87 | crate_impl_defs.lookup_impl_defs_for_trait_and_ty(trait_, fp).map(id_to_chalk) | ||
88 | }) | ||
89 | .collect(), | ||
90 | None => impl_maps | ||
91 | .iter() | ||
92 | .flat_map(|crate_impl_defs| { | ||
93 | crate_impl_defs.lookup_impl_defs_for_trait(trait_).map(id_to_chalk) | ||
94 | }) | ||
95 | .collect(), | ||
96 | }; | ||
85 | 97 | ||
86 | let arg: Option<Ty> = | 98 | let arg: Option<Ty> = |
87 | parameters.get(1).map(|p| from_chalk(self.db, p.assert_ty_ref(&Interner).clone())); | 99 | parameters.get(1).map(|p| from_chalk(self.db, p.assert_ty_ref(&Interner).clone())); |
diff --git a/crates/ra_hir_ty/src/utils.rs b/crates/ra_hir_ty/src/utils.rs index f98350bf9..c45820ff0 100644 --- a/crates/ra_hir_ty/src/utils.rs +++ b/crates/ra_hir_ty/src/utils.rs | |||
@@ -143,13 +143,14 @@ pub(super) fn find_super_trait_path( | |||
143 | } | 143 | } |
144 | 144 | ||
145 | pub(super) fn associated_type_by_name_including_super_traits( | 145 | pub(super) fn associated_type_by_name_including_super_traits( |
146 | db: &dyn DefDatabase, | 146 | db: &dyn HirDatabase, |
147 | trait_: TraitId, | 147 | trait_ref: TraitRef, |
148 | name: &Name, | 148 | name: &Name, |
149 | ) -> Option<TypeAliasId> { | 149 | ) -> Option<(TraitRef, TypeAliasId)> { |
150 | all_super_traits(db, trait_) | 150 | all_super_trait_refs(db, trait_ref).into_iter().find_map(|t| { |
151 | .into_iter() | 151 | let assoc_type = db.trait_data(t.trait_).associated_type_by_name(name)?; |
152 | .find_map(|t| db.trait_data(t).associated_type_by_name(name)) | 152 | Some((t, assoc_type)) |
153 | }) | ||
153 | } | 154 | } |
154 | 155 | ||
155 | pub(super) fn variant_data(db: &dyn DefDatabase, var: VariantId) -> Arc<VariantData> { | 156 | pub(super) fn variant_data(db: &dyn DefDatabase, var: VariantId) -> Arc<VariantData> { |
@@ -176,6 +177,7 @@ pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics { | |||
176 | Generics { def, params: db.generic_params(def), parent_generics } | 177 | Generics { def, params: db.generic_params(def), parent_generics } |
177 | } | 178 | } |
178 | 179 | ||
180 | #[derive(Debug)] | ||
179 | pub(crate) struct Generics { | 181 | pub(crate) struct Generics { |
180 | def: GenericDefId, | 182 | def: GenericDefId, |
181 | pub(crate) params: Arc<GenericParams>, | 183 | pub(crate) params: Arc<GenericParams>, |
diff --git a/crates/ra_ide/Cargo.toml b/crates/ra_ide/Cargo.toml index 05c940605..bbc6a5c9b 100644 --- a/crates/ra_ide/Cargo.toml +++ b/crates/ra_ide/Cargo.toml | |||
@@ -29,6 +29,7 @@ ra_fmt = { path = "../ra_fmt" } | |||
29 | ra_prof = { path = "../ra_prof" } | 29 | ra_prof = { path = "../ra_prof" } |
30 | test_utils = { path = "../test_utils" } | 30 | test_utils = { path = "../test_utils" } |
31 | ra_assists = { path = "../ra_assists" } | 31 | ra_assists = { path = "../ra_assists" } |
32 | ra_ssr = { path = "../ra_ssr" } | ||
32 | 33 | ||
33 | # ra_ide should depend only on the top-level `hir` package. if you need | 34 | # ra_ide should depend only on the top-level `hir` package. if you need |
34 | # something from some `hir_xxx` subpackage, reexport the API via `hir`. | 35 | # something from some `hir_xxx` subpackage, reexport the API via `hir`. |
diff --git a/crates/ra_ide/src/diagnostics.rs b/crates/ra_ide/src/diagnostics.rs index e1bfd72f9..a88a978d7 100644 --- a/crates/ra_ide/src/diagnostics.rs +++ b/crates/ra_ide/src/diagnostics.rs | |||
@@ -8,15 +8,15 @@ use std::cell::RefCell; | |||
8 | 8 | ||
9 | use hir::{ | 9 | use hir::{ |
10 | diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}, | 10 | diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}, |
11 | Semantics, | 11 | HasSource, HirDisplay, Semantics, VariantDef, |
12 | }; | 12 | }; |
13 | use itertools::Itertools; | 13 | use itertools::Itertools; |
14 | use ra_db::{RelativePath, SourceDatabase, SourceDatabaseExt}; | 14 | use ra_db::SourceDatabase; |
15 | use ra_ide_db::RootDatabase; | 15 | use ra_ide_db::RootDatabase; |
16 | use ra_prof::profile; | 16 | use ra_prof::profile; |
17 | use ra_syntax::{ | 17 | use ra_syntax::{ |
18 | algo, | 18 | algo, |
19 | ast::{self, make, AstNode}, | 19 | ast::{self, edit::IndentLevel, make, AstNode}, |
20 | SyntaxNode, TextRange, T, | 20 | SyntaxNode, TextRange, T, |
21 | }; | 21 | }; |
22 | use ra_text_edit::{TextEdit, TextEditBuilder}; | 22 | use ra_text_edit::{TextEdit, TextEditBuilder}; |
@@ -57,14 +57,10 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> | |||
57 | }) | 57 | }) |
58 | .on::<hir::diagnostics::UnresolvedModule, _>(|d| { | 58 | .on::<hir::diagnostics::UnresolvedModule, _>(|d| { |
59 | let original_file = d.source().file_id.original_file(db); | 59 | let original_file = d.source().file_id.original_file(db); |
60 | let source_root = db.file_source_root(original_file); | 60 | let fix = Fix::new( |
61 | let path = db | 61 | "Create module", |
62 | .file_relative_path(original_file) | 62 | FileSystemEdit::CreateFile { anchor: original_file, dst: d.candidate.clone() }.into(), |
63 | .parent() | 63 | ); |
64 | .unwrap_or_else(|| RelativePath::new("")) | ||
65 | .join(&d.candidate); | ||
66 | let fix = | ||
67 | Fix::new("Create module", FileSystemEdit::CreateFile { source_root, path }.into()); | ||
68 | res.borrow_mut().push(Diagnostic { | 64 | res.borrow_mut().push(Diagnostic { |
69 | range: sema.diagnostics_range(d).range, | 65 | range: sema.diagnostics_range(d).range, |
70 | message: d.message(), | 66 | message: d.message(), |
@@ -123,7 +119,16 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> | |||
123 | severity: Severity::Error, | 119 | severity: Severity::Error, |
124 | fix: Some(fix), | 120 | fix: Some(fix), |
125 | }) | 121 | }) |
122 | }) | ||
123 | .on::<hir::diagnostics::NoSuchField, _>(|d| { | ||
124 | res.borrow_mut().push(Diagnostic { | ||
125 | range: sema.diagnostics_range(d).range, | ||
126 | message: d.message(), | ||
127 | severity: Severity::Error, | ||
128 | fix: missing_struct_field_fix(&sema, file_id, d), | ||
129 | }) | ||
126 | }); | 130 | }); |
131 | |||
127 | if let Some(m) = sema.to_module_def(file_id) { | 132 | if let Some(m) = sema.to_module_def(file_id) { |
128 | m.diagnostics(db, &mut sink); | 133 | m.diagnostics(db, &mut sink); |
129 | }; | 134 | }; |
@@ -131,6 +136,68 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> | |||
131 | res.into_inner() | 136 | res.into_inner() |
132 | } | 137 | } |
133 | 138 | ||
139 | fn missing_struct_field_fix( | ||
140 | sema: &Semantics<RootDatabase>, | ||
141 | file_id: FileId, | ||
142 | d: &hir::diagnostics::NoSuchField, | ||
143 | ) -> Option<Fix> { | ||
144 | let record_expr = sema.ast(d); | ||
145 | |||
146 | let record_lit = ast::RecordLit::cast(record_expr.syntax().parent()?.parent()?)?; | ||
147 | let def_id = sema.resolve_variant(record_lit)?; | ||
148 | let module; | ||
149 | let record_fields = match VariantDef::from(def_id) { | ||
150 | VariantDef::Struct(s) => { | ||
151 | module = s.module(sema.db); | ||
152 | let source = s.source(sema.db); | ||
153 | let fields = source.value.field_def_list()?; | ||
154 | record_field_def_list(fields)? | ||
155 | } | ||
156 | VariantDef::Union(u) => { | ||
157 | module = u.module(sema.db); | ||
158 | let source = u.source(sema.db); | ||
159 | source.value.record_field_def_list()? | ||
160 | } | ||
161 | VariantDef::EnumVariant(e) => { | ||
162 | module = e.module(sema.db); | ||
163 | let source = e.source(sema.db); | ||
164 | let fields = source.value.field_def_list()?; | ||
165 | record_field_def_list(fields)? | ||
166 | } | ||
167 | }; | ||
168 | |||
169 | let new_field_type = sema.type_of_expr(&record_expr.expr()?)?; | ||
170 | let new_field = make::record_field_def( | ||
171 | record_expr.field_name()?, | ||
172 | make::type_ref(&new_field_type.display_source_code(sema.db, module.into()).ok()?), | ||
173 | ); | ||
174 | |||
175 | let last_field = record_fields.fields().last()?; | ||
176 | let last_field_syntax = last_field.syntax(); | ||
177 | let indent = IndentLevel::from_node(last_field_syntax); | ||
178 | |||
179 | let mut new_field = format!("\n{}{}", indent, new_field); | ||
180 | |||
181 | let needs_comma = !last_field_syntax.to_string().ends_with(","); | ||
182 | if needs_comma { | ||
183 | new_field = format!(",{}", new_field); | ||
184 | } | ||
185 | |||
186 | let source_change = SourceFileEdit { | ||
187 | file_id, | ||
188 | edit: TextEdit::insert(last_field_syntax.text_range().end(), new_field), | ||
189 | }; | ||
190 | let fix = Fix::new("Create field", source_change.into()); | ||
191 | return Some(fix); | ||
192 | |||
193 | fn record_field_def_list(field_def_list: ast::FieldDefList) -> Option<ast::RecordFieldDefList> { | ||
194 | match field_def_list { | ||
195 | ast::FieldDefList::RecordFieldDefList(it) => Some(it), | ||
196 | ast::FieldDefList::TupleFieldDefList(_) => None, | ||
197 | } | ||
198 | } | ||
199 | } | ||
200 | |||
134 | fn check_unnecessary_braces_in_use_statement( | 201 | fn check_unnecessary_braces_in_use_statement( |
135 | acc: &mut Vec<Diagnostic>, | 202 | acc: &mut Vec<Diagnostic>, |
136 | file_id: FileId, | 203 | file_id: FileId, |
@@ -612,10 +679,10 @@ mod tests { | |||
612 | source_file_edits: [], | 679 | source_file_edits: [], |
613 | file_system_edits: [ | 680 | file_system_edits: [ |
614 | CreateFile { | 681 | CreateFile { |
615 | source_root: SourceRootId( | 682 | anchor: FileId( |
616 | 0, | 683 | 1, |
617 | ), | 684 | ), |
618 | path: "foo.rs", | 685 | dst: "foo.rs", |
619 | }, | 686 | }, |
620 | ], | 687 | ], |
621 | is_snippet: false, | 688 | is_snippet: false, |
@@ -795,4 +862,27 @@ fn main() { | |||
795 | check_struct_shorthand_initialization, | 862 | check_struct_shorthand_initialization, |
796 | ); | 863 | ); |
797 | } | 864 | } |
865 | |||
866 | #[test] | ||
867 | fn test_add_field_from_usage() { | ||
868 | check_apply_diagnostic_fix( | ||
869 | r" | ||
870 | fn main() { | ||
871 | Foo { bar: 3, baz: false}; | ||
872 | } | ||
873 | struct Foo { | ||
874 | bar: i32 | ||
875 | } | ||
876 | ", | ||
877 | r" | ||
878 | fn main() { | ||
879 | Foo { bar: 3, baz: false}; | ||
880 | } | ||
881 | struct Foo { | ||
882 | bar: i32, | ||
883 | baz: bool | ||
884 | } | ||
885 | ", | ||
886 | ) | ||
887 | } | ||
798 | } | 888 | } |
diff --git a/crates/ra_ide/src/display/navigation_target.rs b/crates/ra_ide/src/display/navigation_target.rs index c7bb1e69f..0b52b01ab 100644 --- a/crates/ra_ide/src/display/navigation_target.rs +++ b/crates/ra_ide/src/display/navigation_target.rs | |||
@@ -135,8 +135,8 @@ impl NavigationTarget { | |||
135 | db: &RootDatabase, | 135 | db: &RootDatabase, |
136 | node: InFile<&dyn ast::NameOwner>, | 136 | node: InFile<&dyn ast::NameOwner>, |
137 | ) -> NavigationTarget { | 137 | ) -> NavigationTarget { |
138 | //FIXME: use `_` instead of empty string | 138 | let name = |
139 | let name = node.value.name().map(|it| it.text().clone()).unwrap_or_default(); | 139 | node.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); |
140 | let focus_range = | 140 | let focus_range = |
141 | node.value.name().map(|it| original_range(db, node.with_value(it.syntax())).range); | 141 | node.value.name().map(|it| original_range(db, node.with_value(it.syntax())).range); |
142 | let frange = original_range(db, node.map(|it| it.syntax())); | 142 | let frange = original_range(db, node.map(|it| it.syntax())); |
@@ -150,6 +150,25 @@ impl NavigationTarget { | |||
150 | ) | 150 | ) |
151 | } | 151 | } |
152 | 152 | ||
153 | /// Allows `NavigationTarget` to be created from a `DocCommentsOwner` and a `NameOwner` | ||
154 | pub(crate) fn from_doc_commented( | ||
155 | db: &RootDatabase, | ||
156 | named: InFile<&dyn ast::NameOwner>, | ||
157 | node: InFile<&dyn ast::DocCommentsOwner>, | ||
158 | ) -> NavigationTarget { | ||
159 | let name = | ||
160 | named.value.name().map(|it| it.text().clone()).unwrap_or_else(|| SmolStr::new("_")); | ||
161 | let frange = original_range(db, node.map(|it| it.syntax())); | ||
162 | |||
163 | NavigationTarget::from_syntax( | ||
164 | frange.file_id, | ||
165 | name, | ||
166 | None, | ||
167 | frange.range, | ||
168 | node.value.syntax().kind(), | ||
169 | ) | ||
170 | } | ||
171 | |||
153 | fn from_syntax( | 172 | fn from_syntax( |
154 | file_id: FileId, | 173 | file_id: FileId, |
155 | name: SmolStr, | 174 | name: SmolStr, |
diff --git a/crates/ra_ide/src/hover.rs b/crates/ra_ide/src/hover.rs index ad78b7671..d870e4cbc 100644 --- a/crates/ra_ide/src/hover.rs +++ b/crates/ra_ide/src/hover.rs | |||
@@ -2,7 +2,7 @@ use std::iter::once; | |||
2 | 2 | ||
3 | use hir::{ | 3 | use hir::{ |
4 | Adt, AsAssocItem, AssocItemContainer, Documentation, FieldSource, HasSource, HirDisplay, | 4 | Adt, AsAssocItem, AssocItemContainer, Documentation, FieldSource, HasSource, HirDisplay, |
5 | ModuleDef, ModuleSource, Semantics, | 5 | Module, ModuleDef, ModuleSource, Semantics, |
6 | }; | 6 | }; |
7 | use itertools::Itertools; | 7 | use itertools::Itertools; |
8 | use ra_db::SourceDatabase; | 8 | use ra_db::SourceDatabase; |
@@ -13,7 +13,9 @@ use ra_ide_db::{ | |||
13 | use ra_syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset}; | 13 | use ra_syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxToken, TokenAtOffset}; |
14 | 14 | ||
15 | use crate::{ | 15 | use crate::{ |
16 | display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel, ToNav}, | 16 | display::{ |
17 | macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel, ToNav, TryToNav, | ||
18 | }, | ||
17 | runnables::runnable, | 19 | runnables::runnable, |
18 | FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, | 20 | FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, |
19 | }; | 21 | }; |
@@ -24,19 +26,21 @@ pub struct HoverConfig { | |||
24 | pub implementations: bool, | 26 | pub implementations: bool, |
25 | pub run: bool, | 27 | pub run: bool, |
26 | pub debug: bool, | 28 | pub debug: bool, |
29 | pub goto_type_def: bool, | ||
27 | } | 30 | } |
28 | 31 | ||
29 | impl Default for HoverConfig { | 32 | impl Default for HoverConfig { |
30 | fn default() -> Self { | 33 | fn default() -> Self { |
31 | Self { implementations: true, run: true, debug: true } | 34 | Self { implementations: true, run: true, debug: true, goto_type_def: true } |
32 | } | 35 | } |
33 | } | 36 | } |
34 | 37 | ||
35 | impl HoverConfig { | 38 | impl HoverConfig { |
36 | pub const NO_ACTIONS: Self = Self { implementations: false, run: false, debug: false }; | 39 | pub const NO_ACTIONS: Self = |
40 | Self { implementations: false, run: false, debug: false, goto_type_def: false }; | ||
37 | 41 | ||
38 | pub fn any(&self) -> bool { | 42 | pub fn any(&self) -> bool { |
39 | self.implementations || self.runnable() | 43 | self.implementations || self.runnable() || self.goto_type_def |
40 | } | 44 | } |
41 | 45 | ||
42 | pub fn none(&self) -> bool { | 46 | pub fn none(&self) -> bool { |
@@ -52,6 +56,13 @@ impl HoverConfig { | |||
52 | pub enum HoverAction { | 56 | pub enum HoverAction { |
53 | Runnable(Runnable), | 57 | Runnable(Runnable), |
54 | Implementaion(FilePosition), | 58 | Implementaion(FilePosition), |
59 | GoToType(Vec<HoverGotoTypeData>), | ||
60 | } | ||
61 | |||
62 | #[derive(Debug, Clone, Eq, PartialEq)] | ||
63 | pub struct HoverGotoTypeData { | ||
64 | pub mod_path: String, | ||
65 | pub nav: NavigationTarget, | ||
55 | } | 66 | } |
56 | 67 | ||
57 | /// Contains the results when hovering over an item | 68 | /// Contains the results when hovering over an item |
@@ -138,6 +149,10 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn | |||
138 | res.push_action(action); | 149 | res.push_action(action); |
139 | } | 150 | } |
140 | 151 | ||
152 | if let Some(action) = goto_type_action(db, name_kind) { | ||
153 | res.push_action(action); | ||
154 | } | ||
155 | |||
141 | return Some(RangeInfo::new(range, res)); | 156 | return Some(RangeInfo::new(range, res)); |
142 | } | 157 | } |
143 | } | 158 | } |
@@ -218,6 +233,44 @@ fn runnable_action( | |||
218 | } | 233 | } |
219 | } | 234 | } |
220 | 235 | ||
236 | fn goto_type_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> { | ||
237 | match def { | ||
238 | Definition::Local(it) => { | ||
239 | let mut targets: Vec<ModuleDef> = Vec::new(); | ||
240 | let mut push_new_def = |item: ModuleDef| { | ||
241 | if !targets.contains(&item) { | ||
242 | targets.push(item); | ||
243 | } | ||
244 | }; | ||
245 | |||
246 | it.ty(db).walk(db, |t| { | ||
247 | if let Some(adt) = t.as_adt() { | ||
248 | push_new_def(adt.into()); | ||
249 | } else if let Some(trait_) = t.as_dyn_trait() { | ||
250 | push_new_def(trait_.into()); | ||
251 | } else if let Some(traits) = t.as_impl_traits(db) { | ||
252 | traits.into_iter().for_each(|it| push_new_def(it.into())); | ||
253 | } else if let Some(trait_) = t.as_associated_type_parent_trait(db) { | ||
254 | push_new_def(trait_.into()); | ||
255 | } | ||
256 | }); | ||
257 | |||
258 | let targets = targets | ||
259 | .into_iter() | ||
260 | .filter_map(|it| { | ||
261 | Some(HoverGotoTypeData { | ||
262 | mod_path: mod_path(db, &it)?, | ||
263 | nav: it.try_to_nav(db)?, | ||
264 | }) | ||
265 | }) | ||
266 | .collect(); | ||
267 | |||
268 | Some(HoverAction::GoToType(targets)) | ||
269 | } | ||
270 | _ => None, | ||
271 | } | ||
272 | } | ||
273 | |||
221 | fn hover_text( | 274 | fn hover_text( |
222 | docs: Option<String>, | 275 | docs: Option<String>, |
223 | desc: Option<String>, | 276 | desc: Option<String>, |
@@ -248,25 +301,31 @@ fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option<String> | |||
248 | .map(|name| name.to_string()) | 301 | .map(|name| name.to_string()) |
249 | } | 302 | } |
250 | 303 | ||
251 | fn determine_mod_path(db: &RootDatabase, def: &Definition) -> Option<String> { | 304 | fn determine_mod_path(db: &RootDatabase, module: Module, name: Option<String>) -> String { |
252 | let mod_path = def.module(db).map(|module| { | 305 | once(db.crate_graph()[module.krate().into()].display_name.as_ref().map(ToString::to_string)) |
253 | once(db.crate_graph()[module.krate().into()].display_name.as_ref().map(ToString::to_string)) | 306 | .chain( |
254 | .chain( | 307 | module |
255 | module | 308 | .path_to_root(db) |
256 | .path_to_root(db) | 309 | .into_iter() |
257 | .into_iter() | 310 | .rev() |
258 | .rev() | 311 | .map(|it| it.name(db).map(|name| name.to_string())), |
259 | .map(|it| it.name(db).map(|name| name.to_string())), | 312 | ) |
260 | ) | 313 | .chain(once(name)) |
261 | .chain(once(definition_owner_name(db, def))) | 314 | .flatten() |
262 | .flatten() | 315 | .join("::") |
263 | .join("::") | 316 | } |
264 | }); | 317 | |
265 | mod_path | 318 | // returns None only for ModuleDef::BuiltinType |
319 | fn mod_path(db: &RootDatabase, item: &ModuleDef) -> Option<String> { | ||
320 | Some(determine_mod_path(db, item.module(db)?, item.name(db).map(|name| name.to_string()))) | ||
321 | } | ||
322 | |||
323 | fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option<String> { | ||
324 | def.module(db).map(|module| determine_mod_path(db, module, definition_owner_name(db, def))) | ||
266 | } | 325 | } |
267 | 326 | ||
268 | fn hover_text_from_name_kind(db: &RootDatabase, def: Definition) -> Option<String> { | 327 | fn hover_text_from_name_kind(db: &RootDatabase, def: Definition) -> Option<String> { |
269 | let mod_path = determine_mod_path(db, &def); | 328 | let mod_path = definition_mod_path(db, &def); |
270 | return match def { | 329 | return match def { |
271 | Definition::Macro(it) => { | 330 | Definition::Macro(it) => { |
272 | let src = it.source(db); | 331 | let src = it.source(db); |
@@ -1310,4 +1369,1045 @@ fn func(foo: i32) { if true { <|>foo; }; } | |||
1310 | ] | 1369 | ] |
1311 | "###); | 1370 | "###); |
1312 | } | 1371 | } |
1372 | |||
1373 | #[test] | ||
1374 | fn test_hover_struct_has_goto_type_action() { | ||
1375 | let (_, actions) = check_hover_result( | ||
1376 | " | ||
1377 | //- /main.rs | ||
1378 | struct S{ f1: u32 } | ||
1379 | |||
1380 | fn main() { | ||
1381 | let s<|>t = S{ f1:0 }; | ||
1382 | } | ||
1383 | ", | ||
1384 | &["S"], | ||
1385 | ); | ||
1386 | assert_debug_snapshot!(actions, | ||
1387 | @r###" | ||
1388 | [ | ||
1389 | GoToType( | ||
1390 | [ | ||
1391 | HoverGotoTypeData { | ||
1392 | mod_path: "S", | ||
1393 | nav: NavigationTarget { | ||
1394 | file_id: FileId( | ||
1395 | 1, | ||
1396 | ), | ||
1397 | full_range: 0..19, | ||
1398 | name: "S", | ||
1399 | kind: STRUCT_DEF, | ||
1400 | focus_range: Some( | ||
1401 | 7..8, | ||
1402 | ), | ||
1403 | container_name: None, | ||
1404 | description: Some( | ||
1405 | "struct S", | ||
1406 | ), | ||
1407 | docs: None, | ||
1408 | }, | ||
1409 | }, | ||
1410 | ], | ||
1411 | ), | ||
1412 | ] | ||
1413 | "###); | ||
1414 | } | ||
1415 | |||
1416 | #[test] | ||
1417 | fn test_hover_generic_struct_has_goto_type_actions() { | ||
1418 | let (_, actions) = check_hover_result( | ||
1419 | " | ||
1420 | //- /main.rs | ||
1421 | struct Arg(u32); | ||
1422 | struct S<T>{ f1: T } | ||
1423 | |||
1424 | fn main() { | ||
1425 | let s<|>t = S{ f1:Arg(0) }; | ||
1426 | } | ||
1427 | ", | ||
1428 | &["S<Arg>"], | ||
1429 | ); | ||
1430 | assert_debug_snapshot!(actions, | ||
1431 | @r###" | ||
1432 | [ | ||
1433 | GoToType( | ||
1434 | [ | ||
1435 | HoverGotoTypeData { | ||
1436 | mod_path: "S", | ||
1437 | nav: NavigationTarget { | ||
1438 | file_id: FileId( | ||
1439 | 1, | ||
1440 | ), | ||
1441 | full_range: 17..37, | ||
1442 | name: "S", | ||
1443 | kind: STRUCT_DEF, | ||
1444 | focus_range: Some( | ||
1445 | 24..25, | ||
1446 | ), | ||
1447 | container_name: None, | ||
1448 | description: Some( | ||
1449 | "struct S", | ||
1450 | ), | ||
1451 | docs: None, | ||
1452 | }, | ||
1453 | }, | ||
1454 | HoverGotoTypeData { | ||
1455 | mod_path: "Arg", | ||
1456 | nav: NavigationTarget { | ||
1457 | file_id: FileId( | ||
1458 | 1, | ||
1459 | ), | ||
1460 | full_range: 0..16, | ||
1461 | name: "Arg", | ||
1462 | kind: STRUCT_DEF, | ||
1463 | focus_range: Some( | ||
1464 | 7..10, | ||
1465 | ), | ||
1466 | container_name: None, | ||
1467 | description: Some( | ||
1468 | "struct Arg", | ||
1469 | ), | ||
1470 | docs: None, | ||
1471 | }, | ||
1472 | }, | ||
1473 | ], | ||
1474 | ), | ||
1475 | ] | ||
1476 | "###); | ||
1477 | } | ||
1478 | |||
1479 | #[test] | ||
1480 | fn test_hover_generic_struct_has_flattened_goto_type_actions() { | ||
1481 | let (_, actions) = check_hover_result( | ||
1482 | " | ||
1483 | //- /main.rs | ||
1484 | struct Arg(u32); | ||
1485 | struct S<T>{ f1: T } | ||
1486 | |||
1487 | fn main() { | ||
1488 | let s<|>t = S{ f1: S{ f1: Arg(0) } }; | ||
1489 | } | ||
1490 | ", | ||
1491 | &["S<S<Arg>>"], | ||
1492 | ); | ||
1493 | assert_debug_snapshot!(actions, | ||
1494 | @r###" | ||
1495 | [ | ||
1496 | GoToType( | ||
1497 | [ | ||
1498 | HoverGotoTypeData { | ||
1499 | mod_path: "S", | ||
1500 | nav: NavigationTarget { | ||
1501 | file_id: FileId( | ||
1502 | 1, | ||
1503 | ), | ||
1504 | full_range: 17..37, | ||
1505 | name: "S", | ||
1506 | kind: STRUCT_DEF, | ||
1507 | focus_range: Some( | ||
1508 | 24..25, | ||
1509 | ), | ||
1510 | container_name: None, | ||
1511 | description: Some( | ||
1512 | "struct S", | ||
1513 | ), | ||
1514 | docs: None, | ||
1515 | }, | ||
1516 | }, | ||
1517 | HoverGotoTypeData { | ||
1518 | mod_path: "Arg", | ||
1519 | nav: NavigationTarget { | ||
1520 | file_id: FileId( | ||
1521 | 1, | ||
1522 | ), | ||
1523 | full_range: 0..16, | ||
1524 | name: "Arg", | ||
1525 | kind: STRUCT_DEF, | ||
1526 | focus_range: Some( | ||
1527 | 7..10, | ||
1528 | ), | ||
1529 | container_name: None, | ||
1530 | description: Some( | ||
1531 | "struct Arg", | ||
1532 | ), | ||
1533 | docs: None, | ||
1534 | }, | ||
1535 | }, | ||
1536 | ], | ||
1537 | ), | ||
1538 | ] | ||
1539 | "###); | ||
1540 | } | ||
1541 | |||
1542 | #[test] | ||
1543 | fn test_hover_tuple_has_goto_type_actions() { | ||
1544 | let (_, actions) = check_hover_result( | ||
1545 | " | ||
1546 | //- /main.rs | ||
1547 | struct A(u32); | ||
1548 | struct B(u32); | ||
1549 | mod M { | ||
1550 | pub struct C(u32); | ||
1551 | } | ||
1552 | |||
1553 | fn main() { | ||
1554 | let s<|>t = (A(1), B(2), M::C(3) ); | ||
1555 | } | ||
1556 | ", | ||
1557 | &["(A, B, C)"], | ||
1558 | ); | ||
1559 | assert_debug_snapshot!(actions, | ||
1560 | @r###" | ||
1561 | [ | ||
1562 | GoToType( | ||
1563 | [ | ||
1564 | HoverGotoTypeData { | ||
1565 | mod_path: "A", | ||
1566 | nav: NavigationTarget { | ||
1567 | file_id: FileId( | ||
1568 | 1, | ||
1569 | ), | ||
1570 | full_range: 0..14, | ||
1571 | name: "A", | ||
1572 | kind: STRUCT_DEF, | ||
1573 | focus_range: Some( | ||
1574 | 7..8, | ||
1575 | ), | ||
1576 | container_name: None, | ||
1577 | description: Some( | ||
1578 | "struct A", | ||
1579 | ), | ||
1580 | docs: None, | ||
1581 | }, | ||
1582 | }, | ||
1583 | HoverGotoTypeData { | ||
1584 | mod_path: "B", | ||
1585 | nav: NavigationTarget { | ||
1586 | file_id: FileId( | ||
1587 | 1, | ||
1588 | ), | ||
1589 | full_range: 15..29, | ||
1590 | name: "B", | ||
1591 | kind: STRUCT_DEF, | ||
1592 | focus_range: Some( | ||
1593 | 22..23, | ||
1594 | ), | ||
1595 | container_name: None, | ||
1596 | description: Some( | ||
1597 | "struct B", | ||
1598 | ), | ||
1599 | docs: None, | ||
1600 | }, | ||
1601 | }, | ||
1602 | HoverGotoTypeData { | ||
1603 | mod_path: "M::C", | ||
1604 | nav: NavigationTarget { | ||
1605 | file_id: FileId( | ||
1606 | 1, | ||
1607 | ), | ||
1608 | full_range: 42..60, | ||
1609 | name: "C", | ||
1610 | kind: STRUCT_DEF, | ||
1611 | focus_range: Some( | ||
1612 | 53..54, | ||
1613 | ), | ||
1614 | container_name: None, | ||
1615 | description: Some( | ||
1616 | "pub struct C", | ||
1617 | ), | ||
1618 | docs: None, | ||
1619 | }, | ||
1620 | }, | ||
1621 | ], | ||
1622 | ), | ||
1623 | ] | ||
1624 | "###); | ||
1625 | } | ||
1626 | |||
1627 | #[test] | ||
1628 | fn test_hover_return_impl_trait_has_goto_type_action() { | ||
1629 | let (_, actions) = check_hover_result( | ||
1630 | " | ||
1631 | //- /main.rs | ||
1632 | trait Foo {} | ||
1633 | |||
1634 | fn foo() -> impl Foo {} | ||
1635 | |||
1636 | fn main() { | ||
1637 | let s<|>t = foo(); | ||
1638 | } | ||
1639 | ", | ||
1640 | &["impl Foo"], | ||
1641 | ); | ||
1642 | assert_debug_snapshot!(actions, | ||
1643 | @r###" | ||
1644 | [ | ||
1645 | GoToType( | ||
1646 | [ | ||
1647 | HoverGotoTypeData { | ||
1648 | mod_path: "Foo", | ||
1649 | nav: NavigationTarget { | ||
1650 | file_id: FileId( | ||
1651 | 1, | ||
1652 | ), | ||
1653 | full_range: 0..12, | ||
1654 | name: "Foo", | ||
1655 | kind: TRAIT_DEF, | ||
1656 | focus_range: Some( | ||
1657 | 6..9, | ||
1658 | ), | ||
1659 | container_name: None, | ||
1660 | description: Some( | ||
1661 | "trait Foo", | ||
1662 | ), | ||
1663 | docs: None, | ||
1664 | }, | ||
1665 | }, | ||
1666 | ], | ||
1667 | ), | ||
1668 | ] | ||
1669 | "###); | ||
1670 | } | ||
1671 | |||
1672 | #[test] | ||
1673 | fn test_hover_generic_return_impl_trait_has_goto_type_action() { | ||
1674 | let (_, actions) = check_hover_result( | ||
1675 | " | ||
1676 | //- /main.rs | ||
1677 | trait Foo<T> {} | ||
1678 | struct S; | ||
1679 | |||
1680 | fn foo() -> impl Foo<S> {} | ||
1681 | |||
1682 | fn main() { | ||
1683 | let s<|>t = foo(); | ||
1684 | } | ||
1685 | ", | ||
1686 | &["impl Foo<S>"], | ||
1687 | ); | ||
1688 | assert_debug_snapshot!(actions, | ||
1689 | @r###" | ||
1690 | [ | ||
1691 | GoToType( | ||
1692 | [ | ||
1693 | HoverGotoTypeData { | ||
1694 | mod_path: "Foo", | ||
1695 | nav: NavigationTarget { | ||
1696 | file_id: FileId( | ||
1697 | 1, | ||
1698 | ), | ||
1699 | full_range: 0..15, | ||
1700 | name: "Foo", | ||
1701 | kind: TRAIT_DEF, | ||
1702 | focus_range: Some( | ||
1703 | 6..9, | ||
1704 | ), | ||
1705 | container_name: None, | ||
1706 | description: Some( | ||
1707 | "trait Foo", | ||
1708 | ), | ||
1709 | docs: None, | ||
1710 | }, | ||
1711 | }, | ||
1712 | HoverGotoTypeData { | ||
1713 | mod_path: "S", | ||
1714 | nav: NavigationTarget { | ||
1715 | file_id: FileId( | ||
1716 | 1, | ||
1717 | ), | ||
1718 | full_range: 16..25, | ||
1719 | name: "S", | ||
1720 | kind: STRUCT_DEF, | ||
1721 | focus_range: Some( | ||
1722 | 23..24, | ||
1723 | ), | ||
1724 | container_name: None, | ||
1725 | description: Some( | ||
1726 | "struct S", | ||
1727 | ), | ||
1728 | docs: None, | ||
1729 | }, | ||
1730 | }, | ||
1731 | ], | ||
1732 | ), | ||
1733 | ] | ||
1734 | "###); | ||
1735 | } | ||
1736 | |||
1737 | #[test] | ||
1738 | fn test_hover_return_impl_traits_has_goto_type_action() { | ||
1739 | let (_, actions) = check_hover_result( | ||
1740 | " | ||
1741 | //- /main.rs | ||
1742 | trait Foo {} | ||
1743 | trait Bar {} | ||
1744 | |||
1745 | fn foo() -> impl Foo + Bar {} | ||
1746 | |||
1747 | fn main() { | ||
1748 | let s<|>t = foo(); | ||
1749 | } | ||
1750 | ", | ||
1751 | &["impl Foo + Bar"], | ||
1752 | ); | ||
1753 | assert_debug_snapshot!(actions, | ||
1754 | @r###" | ||
1755 | [ | ||
1756 | GoToType( | ||
1757 | [ | ||
1758 | HoverGotoTypeData { | ||
1759 | mod_path: "Foo", | ||
1760 | nav: NavigationTarget { | ||
1761 | file_id: FileId( | ||
1762 | 1, | ||
1763 | ), | ||
1764 | full_range: 0..12, | ||
1765 | name: "Foo", | ||
1766 | kind: TRAIT_DEF, | ||
1767 | focus_range: Some( | ||
1768 | 6..9, | ||
1769 | ), | ||
1770 | container_name: None, | ||
1771 | description: Some( | ||
1772 | "trait Foo", | ||
1773 | ), | ||
1774 | docs: None, | ||
1775 | }, | ||
1776 | }, | ||
1777 | HoverGotoTypeData { | ||
1778 | mod_path: "Bar", | ||
1779 | nav: NavigationTarget { | ||
1780 | file_id: FileId( | ||
1781 | 1, | ||
1782 | ), | ||
1783 | full_range: 13..25, | ||
1784 | name: "Bar", | ||
1785 | kind: TRAIT_DEF, | ||
1786 | focus_range: Some( | ||
1787 | 19..22, | ||
1788 | ), | ||
1789 | container_name: None, | ||
1790 | description: Some( | ||
1791 | "trait Bar", | ||
1792 | ), | ||
1793 | docs: None, | ||
1794 | }, | ||
1795 | }, | ||
1796 | ], | ||
1797 | ), | ||
1798 | ] | ||
1799 | "###); | ||
1800 | } | ||
1801 | |||
1802 | #[test] | ||
1803 | fn test_hover_generic_return_impl_traits_has_goto_type_action() { | ||
1804 | let (_, actions) = check_hover_result( | ||
1805 | " | ||
1806 | //- /main.rs | ||
1807 | trait Foo<T> {} | ||
1808 | trait Bar<T> {} | ||
1809 | struct S1 {} | ||
1810 | struct S2 {} | ||
1811 | |||
1812 | fn foo() -> impl Foo<S1> + Bar<S2> {} | ||
1813 | |||
1814 | fn main() { | ||
1815 | let s<|>t = foo(); | ||
1816 | } | ||
1817 | ", | ||
1818 | &["impl Foo<S1> + Bar<S2>"], | ||
1819 | ); | ||
1820 | assert_debug_snapshot!(actions, | ||
1821 | @r###" | ||
1822 | [ | ||
1823 | GoToType( | ||
1824 | [ | ||
1825 | HoverGotoTypeData { | ||
1826 | mod_path: "Foo", | ||
1827 | nav: NavigationTarget { | ||
1828 | file_id: FileId( | ||
1829 | 1, | ||
1830 | ), | ||
1831 | full_range: 0..15, | ||
1832 | name: "Foo", | ||
1833 | kind: TRAIT_DEF, | ||
1834 | focus_range: Some( | ||
1835 | 6..9, | ||
1836 | ), | ||
1837 | container_name: None, | ||
1838 | description: Some( | ||
1839 | "trait Foo", | ||
1840 | ), | ||
1841 | docs: None, | ||
1842 | }, | ||
1843 | }, | ||
1844 | HoverGotoTypeData { | ||
1845 | mod_path: "Bar", | ||
1846 | nav: NavigationTarget { | ||
1847 | file_id: FileId( | ||
1848 | 1, | ||
1849 | ), | ||
1850 | full_range: 16..31, | ||
1851 | name: "Bar", | ||
1852 | kind: TRAIT_DEF, | ||
1853 | focus_range: Some( | ||
1854 | 22..25, | ||
1855 | ), | ||
1856 | container_name: None, | ||
1857 | description: Some( | ||
1858 | "trait Bar", | ||
1859 | ), | ||
1860 | docs: None, | ||
1861 | }, | ||
1862 | }, | ||
1863 | HoverGotoTypeData { | ||
1864 | mod_path: "S1", | ||
1865 | nav: NavigationTarget { | ||
1866 | file_id: FileId( | ||
1867 | 1, | ||
1868 | ), | ||
1869 | full_range: 32..44, | ||
1870 | name: "S1", | ||
1871 | kind: STRUCT_DEF, | ||
1872 | focus_range: Some( | ||
1873 | 39..41, | ||
1874 | ), | ||
1875 | container_name: None, | ||
1876 | description: Some( | ||
1877 | "struct S1", | ||
1878 | ), | ||
1879 | docs: None, | ||
1880 | }, | ||
1881 | }, | ||
1882 | HoverGotoTypeData { | ||
1883 | mod_path: "S2", | ||
1884 | nav: NavigationTarget { | ||
1885 | file_id: FileId( | ||
1886 | 1, | ||
1887 | ), | ||
1888 | full_range: 45..57, | ||
1889 | name: "S2", | ||
1890 | kind: STRUCT_DEF, | ||
1891 | focus_range: Some( | ||
1892 | 52..54, | ||
1893 | ), | ||
1894 | container_name: None, | ||
1895 | description: Some( | ||
1896 | "struct S2", | ||
1897 | ), | ||
1898 | docs: None, | ||
1899 | }, | ||
1900 | }, | ||
1901 | ], | ||
1902 | ), | ||
1903 | ] | ||
1904 | "###); | ||
1905 | } | ||
1906 | |||
1907 | #[test] | ||
1908 | fn test_hover_arg_impl_trait_has_goto_type_action() { | ||
1909 | let (_, actions) = check_hover_result( | ||
1910 | " | ||
1911 | //- /lib.rs | ||
1912 | trait Foo {} | ||
1913 | fn foo(ar<|>g: &impl Foo) {} | ||
1914 | ", | ||
1915 | &["&impl Foo"], | ||
1916 | ); | ||
1917 | assert_debug_snapshot!(actions, | ||
1918 | @r###" | ||
1919 | [ | ||
1920 | GoToType( | ||
1921 | [ | ||
1922 | HoverGotoTypeData { | ||
1923 | mod_path: "Foo", | ||
1924 | nav: NavigationTarget { | ||
1925 | file_id: FileId( | ||
1926 | 1, | ||
1927 | ), | ||
1928 | full_range: 0..12, | ||
1929 | name: "Foo", | ||
1930 | kind: TRAIT_DEF, | ||
1931 | focus_range: Some( | ||
1932 | 6..9, | ||
1933 | ), | ||
1934 | container_name: None, | ||
1935 | description: Some( | ||
1936 | "trait Foo", | ||
1937 | ), | ||
1938 | docs: None, | ||
1939 | }, | ||
1940 | }, | ||
1941 | ], | ||
1942 | ), | ||
1943 | ] | ||
1944 | "###); | ||
1945 | } | ||
1946 | |||
1947 | #[test] | ||
1948 | fn test_hover_arg_impl_traits_has_goto_type_action() { | ||
1949 | let (_, actions) = check_hover_result( | ||
1950 | " | ||
1951 | //- /lib.rs | ||
1952 | trait Foo {} | ||
1953 | trait Bar<T> {} | ||
1954 | struct S{} | ||
1955 | |||
1956 | fn foo(ar<|>g: &impl Foo + Bar<S>) {} | ||
1957 | ", | ||
1958 | &["&impl Foo + Bar<S>"], | ||
1959 | ); | ||
1960 | assert_debug_snapshot!(actions, | ||
1961 | @r###" | ||
1962 | [ | ||
1963 | GoToType( | ||
1964 | [ | ||
1965 | HoverGotoTypeData { | ||
1966 | mod_path: "Foo", | ||
1967 | nav: NavigationTarget { | ||
1968 | file_id: FileId( | ||
1969 | 1, | ||
1970 | ), | ||
1971 | full_range: 0..12, | ||
1972 | name: "Foo", | ||
1973 | kind: TRAIT_DEF, | ||
1974 | focus_range: Some( | ||
1975 | 6..9, | ||
1976 | ), | ||
1977 | container_name: None, | ||
1978 | description: Some( | ||
1979 | "trait Foo", | ||
1980 | ), | ||
1981 | docs: None, | ||
1982 | }, | ||
1983 | }, | ||
1984 | HoverGotoTypeData { | ||
1985 | mod_path: "Bar", | ||
1986 | nav: NavigationTarget { | ||
1987 | file_id: FileId( | ||
1988 | 1, | ||
1989 | ), | ||
1990 | full_range: 13..28, | ||
1991 | name: "Bar", | ||
1992 | kind: TRAIT_DEF, | ||
1993 | focus_range: Some( | ||
1994 | 19..22, | ||
1995 | ), | ||
1996 | container_name: None, | ||
1997 | description: Some( | ||
1998 | "trait Bar", | ||
1999 | ), | ||
2000 | docs: None, | ||
2001 | }, | ||
2002 | }, | ||
2003 | HoverGotoTypeData { | ||
2004 | mod_path: "S", | ||
2005 | nav: NavigationTarget { | ||
2006 | file_id: FileId( | ||
2007 | 1, | ||
2008 | ), | ||
2009 | full_range: 29..39, | ||
2010 | name: "S", | ||
2011 | kind: STRUCT_DEF, | ||
2012 | focus_range: Some( | ||
2013 | 36..37, | ||
2014 | ), | ||
2015 | container_name: None, | ||
2016 | description: Some( | ||
2017 | "struct S", | ||
2018 | ), | ||
2019 | docs: None, | ||
2020 | }, | ||
2021 | }, | ||
2022 | ], | ||
2023 | ), | ||
2024 | ] | ||
2025 | "###); | ||
2026 | } | ||
2027 | |||
2028 | #[test] | ||
2029 | fn test_hover_arg_generic_impl_trait_has_goto_type_action() { | ||
2030 | let (_, actions) = check_hover_result( | ||
2031 | " | ||
2032 | //- /lib.rs | ||
2033 | trait Foo<T> {} | ||
2034 | struct S {} | ||
2035 | fn foo(ar<|>g: &impl Foo<S>) {} | ||
2036 | ", | ||
2037 | &["&impl Foo<S>"], | ||
2038 | ); | ||
2039 | assert_debug_snapshot!(actions, | ||
2040 | @r###" | ||
2041 | [ | ||
2042 | GoToType( | ||
2043 | [ | ||
2044 | HoverGotoTypeData { | ||
2045 | mod_path: "Foo", | ||
2046 | nav: NavigationTarget { | ||
2047 | file_id: FileId( | ||
2048 | 1, | ||
2049 | ), | ||
2050 | full_range: 0..15, | ||
2051 | name: "Foo", | ||
2052 | kind: TRAIT_DEF, | ||
2053 | focus_range: Some( | ||
2054 | 6..9, | ||
2055 | ), | ||
2056 | container_name: None, | ||
2057 | description: Some( | ||
2058 | "trait Foo", | ||
2059 | ), | ||
2060 | docs: None, | ||
2061 | }, | ||
2062 | }, | ||
2063 | HoverGotoTypeData { | ||
2064 | mod_path: "S", | ||
2065 | nav: NavigationTarget { | ||
2066 | file_id: FileId( | ||
2067 | 1, | ||
2068 | ), | ||
2069 | full_range: 16..27, | ||
2070 | name: "S", | ||
2071 | kind: STRUCT_DEF, | ||
2072 | focus_range: Some( | ||
2073 | 23..24, | ||
2074 | ), | ||
2075 | container_name: None, | ||
2076 | description: Some( | ||
2077 | "struct S", | ||
2078 | ), | ||
2079 | docs: None, | ||
2080 | }, | ||
2081 | }, | ||
2082 | ], | ||
2083 | ), | ||
2084 | ] | ||
2085 | "###); | ||
2086 | } | ||
2087 | |||
2088 | #[test] | ||
2089 | fn test_hover_dyn_return_has_goto_type_action() { | ||
2090 | let (_, actions) = check_hover_result( | ||
2091 | " | ||
2092 | //- /main.rs | ||
2093 | trait Foo {} | ||
2094 | struct S; | ||
2095 | impl Foo for S {} | ||
2096 | |||
2097 | struct B<T>{} | ||
2098 | |||
2099 | fn foo() -> B<dyn Foo> {} | ||
2100 | |||
2101 | fn main() { | ||
2102 | let s<|>t = foo(); | ||
2103 | } | ||
2104 | ", | ||
2105 | &["B<dyn Foo>"], | ||
2106 | ); | ||
2107 | assert_debug_snapshot!(actions, | ||
2108 | @r###" | ||
2109 | [ | ||
2110 | GoToType( | ||
2111 | [ | ||
2112 | HoverGotoTypeData { | ||
2113 | mod_path: "B", | ||
2114 | nav: NavigationTarget { | ||
2115 | file_id: FileId( | ||
2116 | 1, | ||
2117 | ), | ||
2118 | full_range: 41..54, | ||
2119 | name: "B", | ||
2120 | kind: STRUCT_DEF, | ||
2121 | focus_range: Some( | ||
2122 | 48..49, | ||
2123 | ), | ||
2124 | container_name: None, | ||
2125 | description: Some( | ||
2126 | "struct B", | ||
2127 | ), | ||
2128 | docs: None, | ||
2129 | }, | ||
2130 | }, | ||
2131 | HoverGotoTypeData { | ||
2132 | mod_path: "Foo", | ||
2133 | nav: NavigationTarget { | ||
2134 | file_id: FileId( | ||
2135 | 1, | ||
2136 | ), | ||
2137 | full_range: 0..12, | ||
2138 | name: "Foo", | ||
2139 | kind: TRAIT_DEF, | ||
2140 | focus_range: Some( | ||
2141 | 6..9, | ||
2142 | ), | ||
2143 | container_name: None, | ||
2144 | description: Some( | ||
2145 | "trait Foo", | ||
2146 | ), | ||
2147 | docs: None, | ||
2148 | }, | ||
2149 | }, | ||
2150 | ], | ||
2151 | ), | ||
2152 | ] | ||
2153 | "###); | ||
2154 | } | ||
2155 | |||
2156 | #[test] | ||
2157 | fn test_hover_dyn_arg_has_goto_type_action() { | ||
2158 | let (_, actions) = check_hover_result( | ||
2159 | " | ||
2160 | //- /lib.rs | ||
2161 | trait Foo {} | ||
2162 | fn foo(ar<|>g: &dyn Foo) {} | ||
2163 | ", | ||
2164 | &["&dyn Foo"], | ||
2165 | ); | ||
2166 | assert_debug_snapshot!(actions, | ||
2167 | @r###" | ||
2168 | [ | ||
2169 | GoToType( | ||
2170 | [ | ||
2171 | HoverGotoTypeData { | ||
2172 | mod_path: "Foo", | ||
2173 | nav: NavigationTarget { | ||
2174 | file_id: FileId( | ||
2175 | 1, | ||
2176 | ), | ||
2177 | full_range: 0..12, | ||
2178 | name: "Foo", | ||
2179 | kind: TRAIT_DEF, | ||
2180 | focus_range: Some( | ||
2181 | 6..9, | ||
2182 | ), | ||
2183 | container_name: None, | ||
2184 | description: Some( | ||
2185 | "trait Foo", | ||
2186 | ), | ||
2187 | docs: None, | ||
2188 | }, | ||
2189 | }, | ||
2190 | ], | ||
2191 | ), | ||
2192 | ] | ||
2193 | "###); | ||
2194 | } | ||
2195 | |||
2196 | #[test] | ||
2197 | fn test_hover_generic_dyn_arg_has_goto_type_action() { | ||
2198 | let (_, actions) = check_hover_result( | ||
2199 | " | ||
2200 | //- /lib.rs | ||
2201 | trait Foo<T> {} | ||
2202 | struct S {} | ||
2203 | fn foo(ar<|>g: &dyn Foo<S>) {} | ||
2204 | ", | ||
2205 | &["&dyn Foo<S>"], | ||
2206 | ); | ||
2207 | assert_debug_snapshot!(actions, | ||
2208 | @r###" | ||
2209 | [ | ||
2210 | GoToType( | ||
2211 | [ | ||
2212 | HoverGotoTypeData { | ||
2213 | mod_path: "Foo", | ||
2214 | nav: NavigationTarget { | ||
2215 | file_id: FileId( | ||
2216 | 1, | ||
2217 | ), | ||
2218 | full_range: 0..15, | ||
2219 | name: "Foo", | ||
2220 | kind: TRAIT_DEF, | ||
2221 | focus_range: Some( | ||
2222 | 6..9, | ||
2223 | ), | ||
2224 | container_name: None, | ||
2225 | description: Some( | ||
2226 | "trait Foo", | ||
2227 | ), | ||
2228 | docs: None, | ||
2229 | }, | ||
2230 | }, | ||
2231 | HoverGotoTypeData { | ||
2232 | mod_path: "S", | ||
2233 | nav: NavigationTarget { | ||
2234 | file_id: FileId( | ||
2235 | 1, | ||
2236 | ), | ||
2237 | full_range: 16..27, | ||
2238 | name: "S", | ||
2239 | kind: STRUCT_DEF, | ||
2240 | focus_range: Some( | ||
2241 | 23..24, | ||
2242 | ), | ||
2243 | container_name: None, | ||
2244 | description: Some( | ||
2245 | "struct S", | ||
2246 | ), | ||
2247 | docs: None, | ||
2248 | }, | ||
2249 | }, | ||
2250 | ], | ||
2251 | ), | ||
2252 | ] | ||
2253 | "###); | ||
2254 | } | ||
2255 | |||
2256 | #[test] | ||
2257 | fn test_hover_goto_type_action_links_order() { | ||
2258 | let (_, actions) = check_hover_result( | ||
2259 | " | ||
2260 | //- /lib.rs | ||
2261 | trait ImplTrait<T> {} | ||
2262 | trait DynTrait<T> {} | ||
2263 | struct B<T> {} | ||
2264 | struct S {} | ||
2265 | |||
2266 | fn foo(a<|>rg: &impl ImplTrait<B<dyn DynTrait<B<S>>>>) {} | ||
2267 | ", | ||
2268 | &["&impl ImplTrait<B<dyn DynTrait<B<S>>>>"], | ||
2269 | ); | ||
2270 | assert_debug_snapshot!(actions, | ||
2271 | @r###" | ||
2272 | [ | ||
2273 | GoToType( | ||
2274 | [ | ||
2275 | HoverGotoTypeData { | ||
2276 | mod_path: "ImplTrait", | ||
2277 | nav: NavigationTarget { | ||
2278 | file_id: FileId( | ||
2279 | 1, | ||
2280 | ), | ||
2281 | full_range: 0..21, | ||
2282 | name: "ImplTrait", | ||
2283 | kind: TRAIT_DEF, | ||
2284 | focus_range: Some( | ||
2285 | 6..15, | ||
2286 | ), | ||
2287 | container_name: None, | ||
2288 | description: Some( | ||
2289 | "trait ImplTrait", | ||
2290 | ), | ||
2291 | docs: None, | ||
2292 | }, | ||
2293 | }, | ||
2294 | HoverGotoTypeData { | ||
2295 | mod_path: "B", | ||
2296 | nav: NavigationTarget { | ||
2297 | file_id: FileId( | ||
2298 | 1, | ||
2299 | ), | ||
2300 | full_range: 43..57, | ||
2301 | name: "B", | ||
2302 | kind: STRUCT_DEF, | ||
2303 | focus_range: Some( | ||
2304 | 50..51, | ||
2305 | ), | ||
2306 | container_name: None, | ||
2307 | description: Some( | ||
2308 | "struct B", | ||
2309 | ), | ||
2310 | docs: None, | ||
2311 | }, | ||
2312 | }, | ||
2313 | HoverGotoTypeData { | ||
2314 | mod_path: "DynTrait", | ||
2315 | nav: NavigationTarget { | ||
2316 | file_id: FileId( | ||
2317 | 1, | ||
2318 | ), | ||
2319 | full_range: 22..42, | ||
2320 | name: "DynTrait", | ||
2321 | kind: TRAIT_DEF, | ||
2322 | focus_range: Some( | ||
2323 | 28..36, | ||
2324 | ), | ||
2325 | container_name: None, | ||
2326 | description: Some( | ||
2327 | "trait DynTrait", | ||
2328 | ), | ||
2329 | docs: None, | ||
2330 | }, | ||
2331 | }, | ||
2332 | HoverGotoTypeData { | ||
2333 | mod_path: "S", | ||
2334 | nav: NavigationTarget { | ||
2335 | file_id: FileId( | ||
2336 | 1, | ||
2337 | ), | ||
2338 | full_range: 58..69, | ||
2339 | name: "S", | ||
2340 | kind: STRUCT_DEF, | ||
2341 | focus_range: Some( | ||
2342 | 65..66, | ||
2343 | ), | ||
2344 | container_name: None, | ||
2345 | description: Some( | ||
2346 | "struct S", | ||
2347 | ), | ||
2348 | docs: None, | ||
2349 | }, | ||
2350 | }, | ||
2351 | ], | ||
2352 | ), | ||
2353 | ] | ||
2354 | "###); | ||
2355 | } | ||
2356 | |||
2357 | #[test] | ||
2358 | fn test_hover_associated_type_has_goto_type_action() { | ||
2359 | let (_, actions) = check_hover_result( | ||
2360 | " | ||
2361 | //- /main.rs | ||
2362 | trait Foo { | ||
2363 | type Item; | ||
2364 | fn get(self) -> Self::Item {} | ||
2365 | } | ||
2366 | |||
2367 | struct Bar{} | ||
2368 | struct S{} | ||
2369 | |||
2370 | impl Foo for S{ | ||
2371 | type Item = Bar; | ||
2372 | } | ||
2373 | |||
2374 | fn test() -> impl Foo { | ||
2375 | S{} | ||
2376 | } | ||
2377 | |||
2378 | fn main() { | ||
2379 | let s<|>t = test().get(); | ||
2380 | } | ||
2381 | ", | ||
2382 | &["Foo::Item<impl Foo>"], | ||
2383 | ); | ||
2384 | assert_debug_snapshot!(actions, | ||
2385 | @r###" | ||
2386 | [ | ||
2387 | GoToType( | ||
2388 | [ | ||
2389 | HoverGotoTypeData { | ||
2390 | mod_path: "Foo", | ||
2391 | nav: NavigationTarget { | ||
2392 | file_id: FileId( | ||
2393 | 1, | ||
2394 | ), | ||
2395 | full_range: 0..62, | ||
2396 | name: "Foo", | ||
2397 | kind: TRAIT_DEF, | ||
2398 | focus_range: Some( | ||
2399 | 6..9, | ||
2400 | ), | ||
2401 | container_name: None, | ||
2402 | description: Some( | ||
2403 | "trait Foo", | ||
2404 | ), | ||
2405 | docs: None, | ||
2406 | }, | ||
2407 | }, | ||
2408 | ], | ||
2409 | ), | ||
2410 | ] | ||
2411 | "###); | ||
2412 | } | ||
1313 | } | 2413 | } |
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs index 28f686767..47823718f 100644 --- a/crates/ra_ide/src/lib.rs +++ b/crates/ra_ide/src/lib.rs | |||
@@ -66,11 +66,10 @@ pub use crate::{ | |||
66 | display::{file_structure, FunctionSignature, NavigationTarget, StructureNode}, | 66 | display::{file_structure, FunctionSignature, NavigationTarget, StructureNode}, |
67 | expand_macro::ExpandedMacro, | 67 | expand_macro::ExpandedMacro, |
68 | folding_ranges::{Fold, FoldKind}, | 68 | folding_ranges::{Fold, FoldKind}, |
69 | hover::{HoverAction, HoverConfig, HoverResult}, | 69 | hover::{HoverAction, HoverConfig, HoverGotoTypeData, HoverResult}, |
70 | inlay_hints::{InlayHint, InlayHintsConfig, InlayKind}, | 70 | inlay_hints::{InlayHint, InlayHintsConfig, InlayKind}, |
71 | references::{Declaration, Reference, ReferenceAccess, ReferenceKind, ReferenceSearchResult}, | 71 | references::{Declaration, Reference, ReferenceAccess, ReferenceKind, ReferenceSearchResult}, |
72 | runnables::{Runnable, RunnableKind, TestId}, | 72 | runnables::{Runnable, RunnableKind, TestId}, |
73 | ssr::SsrError, | ||
74 | syntax_highlighting::{ | 73 | syntax_highlighting::{ |
75 | Highlight, HighlightModifier, HighlightModifiers, HighlightTag, HighlightedRange, | 74 | Highlight, HighlightModifier, HighlightModifiers, HighlightTag, HighlightedRange, |
76 | }, | 75 | }, |
@@ -82,13 +81,14 @@ pub use ra_db::{ | |||
82 | Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRootId, | 81 | Canceled, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange, SourceRootId, |
83 | }; | 82 | }; |
84 | pub use ra_ide_db::{ | 83 | pub use ra_ide_db::{ |
85 | change::{AnalysisChange, LibraryData}, | 84 | change::AnalysisChange, |
86 | line_index::{LineCol, LineIndex}, | 85 | line_index::{LineCol, LineIndex}, |
87 | search::SearchScope, | 86 | search::SearchScope, |
88 | source_change::{FileSystemEdit, SourceChange, SourceFileEdit}, | 87 | source_change::{FileSystemEdit, SourceChange, SourceFileEdit}, |
89 | symbol_index::Query, | 88 | symbol_index::Query, |
90 | RootDatabase, | 89 | RootDatabase, |
91 | }; | 90 | }; |
91 | pub use ra_ssr::SsrError; | ||
92 | pub use ra_text_edit::{Indel, TextEdit}; | 92 | pub use ra_text_edit::{Indel, TextEdit}; |
93 | 93 | ||
94 | pub type Cancelable<T> = Result<T, Canceled>; | 94 | pub type Cancelable<T> = Result<T, Canceled>; |
@@ -440,12 +440,14 @@ impl Analysis { | |||
440 | 440 | ||
441 | /// Computes syntax highlighting for the given file | 441 | /// Computes syntax highlighting for the given file |
442 | pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> { | 442 | pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> { |
443 | self.with_db(|db| syntax_highlighting::highlight(db, file_id, None)) | 443 | self.with_db(|db| syntax_highlighting::highlight(db, file_id, None, false)) |
444 | } | 444 | } |
445 | 445 | ||
446 | /// Computes syntax highlighting for the given file range. | 446 | /// Computes syntax highlighting for the given file range. |
447 | pub fn highlight_range(&self, frange: FileRange) -> Cancelable<Vec<HighlightedRange>> { | 447 | pub fn highlight_range(&self, frange: FileRange) -> Cancelable<Vec<HighlightedRange>> { |
448 | self.with_db(|db| syntax_highlighting::highlight(db, frange.file_id, Some(frange.range))) | 448 | self.with_db(|db| { |
449 | syntax_highlighting::highlight(db, frange.file_id, Some(frange.range), false) | ||
450 | }) | ||
449 | } | 451 | } |
450 | 452 | ||
451 | /// Computes syntax highlighting for the given file. | 453 | /// Computes syntax highlighting for the given file. |
diff --git a/crates/ra_ide/src/prime_caches.rs b/crates/ra_ide/src/prime_caches.rs index 90bf7d25f..c5ab5a1d8 100644 --- a/crates/ra_ide/src/prime_caches.rs +++ b/crates/ra_ide/src/prime_caches.rs | |||
@@ -7,6 +7,6 @@ use crate::{FileId, RootDatabase}; | |||
7 | 7 | ||
8 | pub(crate) fn prime_caches(db: &RootDatabase, files: Vec<FileId>) { | 8 | pub(crate) fn prime_caches(db: &RootDatabase, files: Vec<FileId>) { |
9 | for file in files { | 9 | for file in files { |
10 | let _ = crate::syntax_highlighting::highlight(db, file, None); | 10 | let _ = crate::syntax_highlighting::highlight(db, file, None, false); |
11 | } | 11 | } |
12 | } | 12 | } |
diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs index 915d4f4d3..99c2581b7 100644 --- a/crates/ra_ide/src/references/rename.rs +++ b/crates/ra_ide/src/references/rename.rs | |||
@@ -1,11 +1,14 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::{ModuleSource, Semantics}; | 3 | use hir::{Module, ModuleDef, ModuleSource, Semantics}; |
4 | use ra_db::{RelativePath, RelativePathBuf, SourceDatabaseExt}; | 4 | use ra_db::{RelativePathBuf, SourceDatabaseExt}; |
5 | use ra_ide_db::RootDatabase; | 5 | use ra_ide_db::{ |
6 | defs::{classify_name, classify_name_ref, Definition, NameClass, NameRefClass}, | ||
7 | RootDatabase, | ||
8 | }; | ||
6 | use ra_syntax::{ | 9 | use ra_syntax::{ |
7 | algo::find_node_at_offset, ast, ast::TypeAscriptionOwner, lex_single_valid_syntax_kind, | 10 | algo::find_node_at_offset, ast, ast::NameOwner, ast::TypeAscriptionOwner, |
8 | AstNode, SyntaxKind, SyntaxNode, SyntaxToken, | 11 | lex_single_valid_syntax_kind, match_ast, AstNode, SyntaxKind, SyntaxNode, SyntaxToken, |
9 | }; | 12 | }; |
10 | use ra_text_edit::TextEdit; | 13 | use ra_text_edit::TextEdit; |
11 | use std::convert::TryInto; | 14 | use std::convert::TryInto; |
@@ -30,10 +33,8 @@ pub(crate) fn rename( | |||
30 | let sema = Semantics::new(db); | 33 | let sema = Semantics::new(db); |
31 | let source_file = sema.parse(position.file_id); | 34 | let source_file = sema.parse(position.file_id); |
32 | let syntax = source_file.syntax(); | 35 | let syntax = source_file.syntax(); |
33 | if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) { | 36 | if let Some(module) = find_module_at_offset(&sema, position, syntax) { |
34 | let range = ast_name.syntax().text_range(); | 37 | rename_mod(db, position, module, new_name) |
35 | rename_mod(&sema, &ast_name, &ast_module, position, new_name) | ||
36 | .map(|info| RangeInfo::new(range, info)) | ||
37 | } else if let Some(self_token) = | 38 | } else if let Some(self_token) = |
38 | syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::SELF_KW) | 39 | syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::SELF_KW) |
39 | { | 40 | { |
@@ -43,13 +44,32 @@ pub(crate) fn rename( | |||
43 | } | 44 | } |
44 | } | 45 | } |
45 | 46 | ||
46 | fn find_name_and_module_at_offset( | 47 | fn find_module_at_offset( |
47 | syntax: &SyntaxNode, | 48 | sema: &Semantics<RootDatabase>, |
48 | position: FilePosition, | 49 | position: FilePosition, |
49 | ) -> Option<(ast::Name, ast::Module)> { | 50 | syntax: &SyntaxNode, |
50 | let ast_name = find_node_at_offset::<ast::Name>(syntax, position.offset)?; | 51 | ) -> Option<Module> { |
51 | let ast_module = ast::Module::cast(ast_name.syntax().parent()?)?; | 52 | let ident = syntax.token_at_offset(position.offset).find(|t| t.kind() == SyntaxKind::IDENT)?; |
52 | Some((ast_name, ast_module)) | 53 | |
54 | let module = match_ast! { | ||
55 | match (ident.parent()) { | ||
56 | ast::NameRef(name_ref) => { | ||
57 | match classify_name_ref(sema, &name_ref)? { | ||
58 | NameRefClass::Definition(Definition::ModuleDef(ModuleDef::Module(module))) => module, | ||
59 | _ => return None, | ||
60 | } | ||
61 | }, | ||
62 | ast::Name(name) => { | ||
63 | match classify_name(&sema, &name)? { | ||
64 | NameClass::Definition(Definition::ModuleDef(ModuleDef::Module(module))) => module, | ||
65 | _ => return None, | ||
66 | } | ||
67 | }, | ||
68 | _ => return None, | ||
69 | } | ||
70 | }; | ||
71 | |||
72 | Some(module) | ||
53 | } | 73 | } |
54 | 74 | ||
55 | fn source_edit_from_reference(reference: Reference, new_name: &str) -> SourceFileEdit { | 75 | fn source_edit_from_reference(reference: Reference, new_name: &str) -> SourceFileEdit { |
@@ -77,58 +97,50 @@ fn source_edit_from_reference(reference: Reference, new_name: &str) -> SourceFil | |||
77 | } | 97 | } |
78 | 98 | ||
79 | fn rename_mod( | 99 | fn rename_mod( |
80 | sema: &Semantics<RootDatabase>, | 100 | db: &RootDatabase, |
81 | ast_name: &ast::Name, | ||
82 | ast_module: &ast::Module, | ||
83 | position: FilePosition, | 101 | position: FilePosition, |
102 | module: Module, | ||
84 | new_name: &str, | 103 | new_name: &str, |
85 | ) -> Option<SourceChange> { | 104 | ) -> Option<RangeInfo<SourceChange>> { |
86 | let mut source_file_edits = Vec::new(); | 105 | let mut source_file_edits = Vec::new(); |
87 | let mut file_system_edits = Vec::new(); | 106 | let mut file_system_edits = Vec::new(); |
88 | if let Some(module) = sema.to_def(ast_module) { | 107 | |
89 | let src = module.definition_source(sema.db); | 108 | let src = module.definition_source(db); |
90 | let file_id = src.file_id.original_file(sema.db); | 109 | let file_id = src.file_id.original_file(db); |
91 | match src.value { | 110 | match src.value { |
92 | ModuleSource::SourceFile(..) => { | 111 | ModuleSource::SourceFile(..) => { |
93 | let mod_path: RelativePathBuf = sema.db.file_relative_path(file_id); | 112 | let mod_path: RelativePathBuf = db.file_relative_path(file_id); |
94 | // mod is defined in path/to/dir/mod.rs | 113 | // mod is defined in path/to/dir/mod.rs |
95 | let dst_path = if mod_path.file_stem() == Some("mod") { | 114 | let dst = if mod_path.file_stem() == Some("mod") { |
96 | mod_path | 115 | format!("../{}/mod.rs", new_name) |
97 | .parent() | 116 | } else { |
98 | .and_then(|p| p.parent()) | 117 | format!("{}.rs", new_name) |
99 | .or_else(|| Some(RelativePath::new(""))) | 118 | }; |
100 | .map(|p| p.join(new_name).join("mod.rs")) | 119 | let move_file = |
101 | } else { | 120 | FileSystemEdit::MoveFile { src: file_id, anchor: position.file_id, dst }; |
102 | Some(mod_path.with_file_name(new_name).with_extension("rs")) | 121 | file_system_edits.push(move_file); |
103 | }; | ||
104 | if let Some(path) = dst_path { | ||
105 | let move_file = FileSystemEdit::MoveFile { | ||
106 | src: file_id, | ||
107 | dst_source_root: sema.db.file_source_root(position.file_id), | ||
108 | dst_path: path, | ||
109 | }; | ||
110 | file_system_edits.push(move_file); | ||
111 | } | ||
112 | } | ||
113 | ModuleSource::Module(..) => {} | ||
114 | } | 122 | } |
123 | ModuleSource::Module(..) => {} | ||
115 | } | 124 | } |
116 | 125 | ||
117 | let edit = SourceFileEdit { | 126 | if let Some(src) = module.declaration_source(db) { |
118 | file_id: position.file_id, | 127 | let file_id = src.file_id.original_file(db); |
119 | edit: TextEdit::replace(ast_name.syntax().text_range(), new_name.into()), | 128 | let name = src.value.name()?; |
120 | }; | 129 | let edit = SourceFileEdit { |
121 | source_file_edits.push(edit); | 130 | file_id: file_id, |
122 | 131 | edit: TextEdit::replace(name.syntax().text_range(), new_name.into()), | |
123 | if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(sema.db, position, None) { | 132 | }; |
124 | let ref_edits = refs | 133 | source_file_edits.push(edit); |
125 | .references | ||
126 | .into_iter() | ||
127 | .map(|reference| source_edit_from_reference(reference, new_name)); | ||
128 | source_file_edits.extend(ref_edits); | ||
129 | } | 134 | } |
130 | 135 | ||
131 | Some(SourceChange::from_edits(source_file_edits, file_system_edits)) | 136 | let RangeInfo { range, info: refs } = find_all_refs(db, position, None)?; |
137 | let ref_edits = refs | ||
138 | .references | ||
139 | .into_iter() | ||
140 | .map(|reference| source_edit_from_reference(reference, new_name)); | ||
141 | source_file_edits.extend(ref_edits); | ||
142 | |||
143 | Some(RangeInfo::new(range, SourceChange::from_edits(source_file_edits, file_system_edits))) | ||
132 | } | 144 | } |
133 | 145 | ||
134 | fn rename_to_self(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<SourceChange>> { | 146 | fn rename_to_self(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<SourceChange>> { |
@@ -623,16 +635,16 @@ mod tests { | |||
623 | #[test] | 635 | #[test] |
624 | fn test_rename_mod() { | 636 | fn test_rename_mod() { |
625 | let (analysis, position) = analysis_and_position( | 637 | let (analysis, position) = analysis_and_position( |
626 | " | 638 | r#" |
627 | //- /lib.rs | 639 | //- /lib.rs |
628 | mod bar; | 640 | mod bar; |
629 | 641 | ||
630 | //- /bar.rs | 642 | //- /bar.rs |
631 | mod foo<|>; | 643 | mod foo<|>; |
632 | 644 | ||
633 | //- /bar/foo.rs | 645 | //- /bar/foo.rs |
634 | // emtpy | 646 | // emtpy |
635 | ", | 647 | "#, |
636 | ); | 648 | ); |
637 | let new_name = "foo2"; | 649 | let new_name = "foo2"; |
638 | let source_change = analysis.rename(position, new_name).unwrap(); | 650 | let source_change = analysis.rename(position, new_name).unwrap(); |
@@ -662,10 +674,80 @@ mod tests { | |||
662 | src: FileId( | 674 | src: FileId( |
663 | 3, | 675 | 3, |
664 | ), | 676 | ), |
665 | dst_source_root: SourceRootId( | 677 | anchor: FileId( |
666 | 0, | 678 | 2, |
667 | ), | 679 | ), |
668 | dst_path: "bar/foo2.rs", | 680 | dst: "foo2.rs", |
681 | }, | ||
682 | ], | ||
683 | is_snippet: false, | ||
684 | }, | ||
685 | }, | ||
686 | ) | ||
687 | "###); | ||
688 | } | ||
689 | |||
690 | #[test] | ||
691 | fn test_rename_mod_in_use_tree() { | ||
692 | let (analysis, position) = analysis_and_position( | ||
693 | r#" | ||
694 | //- /main.rs | ||
695 | pub mod foo; | ||
696 | pub mod bar; | ||
697 | fn main() {} | ||
698 | |||
699 | //- /foo.rs | ||
700 | pub struct FooContent; | ||
701 | |||
702 | //- /bar.rs | ||
703 | use crate::foo<|>::FooContent; | ||
704 | "#, | ||
705 | ); | ||
706 | let new_name = "qux"; | ||
707 | let source_change = analysis.rename(position, new_name).unwrap(); | ||
708 | assert_debug_snapshot!(&source_change, | ||
709 | @r###" | ||
710 | Some( | ||
711 | RangeInfo { | ||
712 | range: 11..14, | ||
713 | info: SourceChange { | ||
714 | source_file_edits: [ | ||
715 | SourceFileEdit { | ||
716 | file_id: FileId( | ||
717 | 1, | ||
718 | ), | ||
719 | edit: TextEdit { | ||
720 | indels: [ | ||
721 | Indel { | ||
722 | insert: "qux", | ||
723 | delete: 8..11, | ||
724 | }, | ||
725 | ], | ||
726 | }, | ||
727 | }, | ||
728 | SourceFileEdit { | ||
729 | file_id: FileId( | ||
730 | 3, | ||
731 | ), | ||
732 | edit: TextEdit { | ||
733 | indels: [ | ||
734 | Indel { | ||
735 | insert: "qux", | ||
736 | delete: 11..14, | ||
737 | }, | ||
738 | ], | ||
739 | }, | ||
740 | }, | ||
741 | ], | ||
742 | file_system_edits: [ | ||
743 | MoveFile { | ||
744 | src: FileId( | ||
745 | 2, | ||
746 | ), | ||
747 | anchor: FileId( | ||
748 | 3, | ||
749 | ), | ||
750 | dst: "qux.rs", | ||
669 | }, | 751 | }, |
670 | ], | 752 | ], |
671 | is_snippet: false, | 753 | is_snippet: false, |
@@ -678,12 +760,12 @@ mod tests { | |||
678 | #[test] | 760 | #[test] |
679 | fn test_rename_mod_in_dir() { | 761 | fn test_rename_mod_in_dir() { |
680 | let (analysis, position) = analysis_and_position( | 762 | let (analysis, position) = analysis_and_position( |
681 | " | 763 | r#" |
682 | //- /lib.rs | 764 | //- /lib.rs |
683 | mod fo<|>o; | 765 | mod fo<|>o; |
684 | //- /foo/mod.rs | 766 | //- /foo/mod.rs |
685 | // emtpy | 767 | // emtpy |
686 | ", | 768 | "#, |
687 | ); | 769 | ); |
688 | let new_name = "foo2"; | 770 | let new_name = "foo2"; |
689 | let source_change = analysis.rename(position, new_name).unwrap(); | 771 | let source_change = analysis.rename(position, new_name).unwrap(); |
@@ -713,10 +795,10 @@ mod tests { | |||
713 | src: FileId( | 795 | src: FileId( |
714 | 2, | 796 | 2, |
715 | ), | 797 | ), |
716 | dst_source_root: SourceRootId( | 798 | anchor: FileId( |
717 | 0, | 799 | 1, |
718 | ), | 800 | ), |
719 | dst_path: "foo2/mod.rs", | 801 | dst: "../foo2/mod.rs", |
720 | }, | 802 | }, |
721 | ], | 803 | ], |
722 | is_snippet: false, | 804 | is_snippet: false, |
@@ -753,19 +835,19 @@ mod tests { | |||
753 | #[test] | 835 | #[test] |
754 | fn test_rename_mod_filename_and_path() { | 836 | fn test_rename_mod_filename_and_path() { |
755 | let (analysis, position) = analysis_and_position( | 837 | let (analysis, position) = analysis_and_position( |
756 | " | 838 | r#" |
757 | //- /lib.rs | 839 | //- /lib.rs |
758 | mod bar; | 840 | mod bar; |
759 | fn f() { | 841 | fn f() { |
760 | bar::foo::fun() | 842 | bar::foo::fun() |
761 | } | 843 | } |
762 | 844 | ||
763 | //- /bar.rs | 845 | //- /bar.rs |
764 | pub mod foo<|>; | 846 | pub mod foo<|>; |
765 | 847 | ||
766 | //- /bar/foo.rs | 848 | //- /bar/foo.rs |
767 | // pub fn fun() {} | 849 | // pub fn fun() {} |
768 | ", | 850 | "#, |
769 | ); | 851 | ); |
770 | let new_name = "foo2"; | 852 | let new_name = "foo2"; |
771 | let source_change = analysis.rename(position, new_name).unwrap(); | 853 | let source_change = analysis.rename(position, new_name).unwrap(); |
@@ -808,10 +890,10 @@ mod tests { | |||
808 | src: FileId( | 890 | src: FileId( |
809 | 3, | 891 | 3, |
810 | ), | 892 | ), |
811 | dst_source_root: SourceRootId( | 893 | anchor: FileId( |
812 | 0, | 894 | 2, |
813 | ), | 895 | ), |
814 | dst_path: "bar/foo2.rs", | 896 | dst: "foo2.rs", |
815 | }, | 897 | }, |
816 | ], | 898 | ], |
817 | is_snippet: false, | 899 | is_snippet: false, |
diff --git a/crates/ra_ide/src/runnables.rs b/crates/ra_ide/src/runnables.rs index fc57dc33d..8105ef373 100644 --- a/crates/ra_ide/src/runnables.rs +++ b/crates/ra_ide/src/runnables.rs | |||
@@ -171,7 +171,15 @@ fn runnable_fn( | |||
171 | let cfg_exprs = | 171 | let cfg_exprs = |
172 | attrs.by_key("cfg").tt_values().map(|subtree| ra_cfg::parse_cfg(subtree)).collect(); | 172 | attrs.by_key("cfg").tt_values().map(|subtree| ra_cfg::parse_cfg(subtree)).collect(); |
173 | 173 | ||
174 | let nav = NavigationTarget::from_named(sema.db, InFile::new(file_id.into(), &fn_def)); | 174 | let nav = if let RunnableKind::DocTest { .. } = kind { |
175 | NavigationTarget::from_doc_commented( | ||
176 | sema.db, | ||
177 | InFile::new(file_id.into(), &fn_def), | ||
178 | InFile::new(file_id.into(), &fn_def), | ||
179 | ) | ||
180 | } else { | ||
181 | NavigationTarget::from_named(sema.db, InFile::new(file_id.into(), &fn_def)) | ||
182 | }; | ||
175 | Some(Runnable { nav, kind, cfg_exprs }) | 183 | Some(Runnable { nav, kind, cfg_exprs }) |
176 | } | 184 | } |
177 | 185 | ||
@@ -419,9 +427,7 @@ mod tests { | |||
419 | full_range: 22..64, | 427 | full_range: 22..64, |
420 | name: "foo", | 428 | name: "foo", |
421 | kind: FN_DEF, | 429 | kind: FN_DEF, |
422 | focus_range: Some( | 430 | focus_range: None, |
423 | 56..59, | ||
424 | ), | ||
425 | container_name: None, | 431 | container_name: None, |
426 | description: None, | 432 | description: None, |
427 | docs: None, | 433 | docs: None, |
@@ -486,9 +492,7 @@ mod tests { | |||
486 | full_range: 51..105, | 492 | full_range: 51..105, |
487 | name: "foo", | 493 | name: "foo", |
488 | kind: FN_DEF, | 494 | kind: FN_DEF, |
489 | focus_range: Some( | 495 | focus_range: None, |
490 | 97..100, | ||
491 | ), | ||
492 | container_name: None, | 496 | container_name: None, |
493 | description: None, | 497 | description: None, |
494 | docs: None, | 498 | docs: None, |
diff --git a/crates/ra_ide/src/snapshots/highlight_doctest.html b/crates/ra_ide/src/snapshots/highlight_doctest.html index 0ae8c7efc..63199cdbe 100644 --- a/crates/ra_ide/src/snapshots/highlight_doctest.html +++ b/crates/ra_ide/src/snapshots/highlight_doctest.html | |||
@@ -25,47 +25,72 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
25 | .variable { color: #DCDCCC; } | 25 | .variable { color: #DCDCCC; } |
26 | .format_specifier { color: #CC696B; } | 26 | .format_specifier { color: #CC696B; } |
27 | .mutable { text-decoration: underline; } | 27 | .mutable { text-decoration: underline; } |
28 | .unresolved_reference { color: #FC5555; } | ||
29 | .escape_sequence { color: #94BFF3; } | ||
28 | 30 | ||
29 | .keyword { color: #F0DFAF; font-weight: bold; } | 31 | .keyword { color: #F0DFAF; font-weight: bold; } |
30 | .keyword.unsafe { color: #BC8383; font-weight: bold; } | 32 | .keyword.unsafe { color: #BC8383; font-weight: bold; } |
31 | .control { font-style: italic; } | 33 | .control { font-style: italic; } |
32 | </style> | 34 | </style> |
33 | <pre><code><span class="keyword">impl</span> <span class="unresolved_reference">Foo</span> { | 35 | <pre><code><span class="keyword">struct</span> <span class="struct declaration">Foo</span> { |
34 | <span class="comment">/// Constructs a new `Foo`.</span> | 36 | <span class="field declaration">bar</span>: <span class="builtin_type">bool</span>, |
35 | <span class="comment">///</span> | 37 | } |
36 | <span class="comment">/// # Examples</span> | 38 | |
37 | <span class="comment">///</span> | 39 | <span class="keyword">impl</span> <span class="struct">Foo</span> { |
38 | <span class="comment">/// ```</span> | 40 | <span class="keyword">pub</span> <span class="keyword">const</span> <span class="constant declaration">bar</span>: <span class="builtin_type">bool</span> = <span class="bool_literal">true</span>; |
39 | <span class="comment">/// #</span> <span class="attribute">#![</span><span class="function attribute">allow</span><span class="attribute">(unused_mut)]</span> | 41 | |
40 | <span class="comment">/// </span><span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">foo</span>: <span class="unresolved_reference">Foo</span> = <span class="unresolved_reference">Foo</span>::<span class="unresolved_reference">new</span>(); | 42 | <span class="comment documentation">/// Constructs a new `Foo`.</span> |
41 | <span class="comment">/// ```</span> | 43 | <span class="comment documentation">///</span> |
42 | <span class="keyword">pub</span> <span class="keyword">const</span> <span class="keyword">fn</span> <span class="function declaration">new</span>() -> <span class="unresolved_reference">Foo</span> { | 44 | <span class="comment documentation">/// # Examples</span> |
43 | <span class="unresolved_reference">Foo</span> { } | 45 | <span class="comment documentation">///</span> |
46 | <span class="comment documentation">/// ```</span> | ||
47 | <span class="comment documentation">/// #</span> <span class="attribute">#![</span><span class="function attribute">allow</span><span class="attribute">(unused_mut)]</span> | ||
48 | <span class="comment documentation">/// </span><span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">foo</span>: <span class="struct">Foo</span> = <span class="struct">Foo</span>::<span class="function">new</span>(); | ||
49 | <span class="comment documentation">/// ```</span> | ||
50 | <span class="keyword">pub</span> <span class="keyword">const</span> <span class="keyword">fn</span> <span class="function declaration">new</span>() -> <span class="struct">Foo</span> { | ||
51 | <span class="struct">Foo</span> { <span class="field">bar</span>: <span class="bool_literal">true</span> } | ||
44 | } | 52 | } |
45 | 53 | ||
46 | <span class="comment">/// `bar` method on `Foo`.</span> | 54 | <span class="comment documentation">/// `bar` method on `Foo`.</span> |
47 | <span class="comment">///</span> | 55 | <span class="comment documentation">///</span> |
48 | <span class="comment">/// # Examples</span> | 56 | <span class="comment documentation">/// # Examples</span> |
49 | <span class="comment">///</span> | 57 | <span class="comment documentation">///</span> |
50 | <span class="comment">/// ```</span> | 58 | <span class="comment documentation">/// ```</span> |
51 | <span class="comment">/// </span><span class="keyword">let</span> <span class="variable declaration">foo</span> = <span class="unresolved_reference">Foo</span>::<span class="unresolved_reference">new</span>(); | 59 | <span class="comment documentation">/// </span><span class="keyword">use</span> <span class="module">x</span>::<span class="module">y</span>; |
52 | <span class="comment">///</span> | 60 | <span class="comment documentation">///</span> |
53 | <span class="comment">/// </span><span class="comment">// calls bar on foo</span> | 61 | <span class="comment documentation">/// </span><span class="keyword">let</span> <span class="variable declaration">foo</span> = <span class="struct">Foo</span>::<span class="function">new</span>(); |
54 | <span class="comment">/// </span><span class="macro">assert!</span>(foo.bar()); | 62 | <span class="comment documentation">///</span> |
55 | <span class="comment">///</span> | 63 | <span class="comment documentation">/// </span><span class="comment">// calls bar on foo</span> |
56 | <span class="comment">/// </span><span class="comment">/* multi-line | 64 | <span class="comment documentation">/// </span><span class="macro">assert!</span>(foo.bar()); |
57 | </span><span class="comment">/// </span><span class="comment"> comment */</span> | 65 | <span class="comment documentation">///</span> |
58 | <span class="comment">///</span> | 66 | <span class="comment documentation">/// </span><span class="keyword">let</span> <span class="variable declaration">bar</span> = <span class="variable">foo</span>.<span class="field">bar</span> || <span class="struct">Foo</span>::<span class="constant">bar</span>; |
59 | <span class="comment">/// </span><span class="keyword">let</span> <span class="variable declaration">multi_line_string</span> = <span class="string_literal">"Foo | 67 | <span class="comment documentation">///</span> |
60 | </span><span class="comment">/// </span><span class="string_literal"> bar | 68 | <span class="comment documentation">/// </span><span class="comment">/* multi-line |
61 | </span><span class="comment">/// </span><span class="string_literal"> "</span>; | 69 | </span><span class="comment documentation">/// </span><span class="comment"> comment */</span> |
62 | <span class="comment">///</span> | 70 | <span class="comment documentation">///</span> |
63 | <span class="comment">/// ```</span> | 71 | <span class="comment documentation">/// </span><span class="keyword">let</span> <span class="variable declaration">multi_line_string</span> = <span class="string_literal">"Foo |
64 | <span class="comment">///</span> | 72 | </span><span class="comment documentation">/// </span><span class="string_literal"> bar |
65 | <span class="comment">/// ```</span> | 73 | </span><span class="comment documentation">/// </span><span class="string_literal"> "</span>; |
66 | <span class="comment">/// </span><span class="keyword">let</span> <span class="variable declaration">foobar</span> = <span class="unresolved_reference">Foo</span>::<span class="unresolved_reference">new</span>().<span class="unresolved_reference">bar</span>(); | 74 | <span class="comment documentation">///</span> |
67 | <span class="comment">/// ```</span> | 75 | <span class="comment documentation">/// ```</span> |
76 | <span class="comment documentation">///</span> | ||
77 | <span class="comment documentation">/// ```rust,no_run</span> | ||
78 | <span class="comment documentation">/// </span><span class="keyword">let</span> <span class="variable declaration">foobar</span> = <span class="struct">Foo</span>::<span class="function">new</span>().<span class="function">bar</span>(); | ||
79 | <span class="comment documentation">/// ```</span> | ||
80 | <span class="comment documentation">///</span> | ||
81 | <span class="comment documentation">/// ```sh</span> | ||
82 | <span class="comment documentation">/// echo 1</span> | ||
83 | <span class="comment documentation">/// ```</span> | ||
68 | <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">foo</span>(&<span class="self_keyword">self</span>) -> <span class="builtin_type">bool</span> { | 84 | <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration">foo</span>(&<span class="self_keyword">self</span>) -> <span class="builtin_type">bool</span> { |
69 | <span class="bool_literal">true</span> | 85 | <span class="bool_literal">true</span> |
70 | } | 86 | } |
87 | } | ||
88 | |||
89 | <span class="comment documentation">/// ```</span> | ||
90 | <span class="comment documentation">/// </span><span class="macro">noop!</span>(<span class="numeric_literal">1</span>); | ||
91 | <span class="comment documentation">/// ```</span> | ||
92 | <span class="macro">macro_rules!</span> <span class="macro declaration">noop</span> { | ||
93 | ($expr:expr) => { | ||
94 | $expr | ||
95 | } | ||
71 | }</code></pre> \ No newline at end of file | 96 | }</code></pre> \ No newline at end of file |
diff --git a/crates/ra_ide/src/snapshots/highlight_injection.html b/crates/ra_ide/src/snapshots/highlight_injection.html index dec06eb51..47dbd7bc8 100644 --- a/crates/ra_ide/src/snapshots/highlight_injection.html +++ b/crates/ra_ide/src/snapshots/highlight_injection.html | |||
@@ -25,6 +25,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
25 | .variable { color: #DCDCCC; } | 25 | .variable { color: #DCDCCC; } |
26 | .format_specifier { color: #CC696B; } | 26 | .format_specifier { color: #CC696B; } |
27 | .mutable { text-decoration: underline; } | 27 | .mutable { text-decoration: underline; } |
28 | .unresolved_reference { color: #FC5555; } | ||
29 | .escape_sequence { color: #94BFF3; } | ||
28 | 30 | ||
29 | .keyword { color: #F0DFAF; font-weight: bold; } | 31 | .keyword { color: #F0DFAF; font-weight: bold; } |
30 | .keyword.unsafe { color: #BC8383; font-weight: bold; } | 32 | .keyword.unsafe { color: #BC8383; font-weight: bold; } |
diff --git a/crates/ra_ide/src/snapshots/highlight_strings.html b/crates/ra_ide/src/snapshots/highlight_strings.html index 849eb3b73..b46fa44c6 100644 --- a/crates/ra_ide/src/snapshots/highlight_strings.html +++ b/crates/ra_ide/src/snapshots/highlight_strings.html | |||
@@ -25,6 +25,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
25 | .variable { color: #DCDCCC; } | 25 | .variable { color: #DCDCCC; } |
26 | .format_specifier { color: #CC696B; } | 26 | .format_specifier { color: #CC696B; } |
27 | .mutable { text-decoration: underline; } | 27 | .mutable { text-decoration: underline; } |
28 | .unresolved_reference { color: #FC5555; } | ||
29 | .escape_sequence { color: #94BFF3; } | ||
28 | 30 | ||
29 | .keyword { color: #F0DFAF; font-weight: bold; } | 31 | .keyword { color: #F0DFAF; font-weight: bold; } |
30 | .keyword.unsafe { color: #BC8383; font-weight: bold; } | 32 | .keyword.unsafe { color: #BC8383; font-weight: bold; } |
@@ -82,6 +84,10 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
82 | 84 | ||
83 | <span class="macro">println!</span>(<span class="string_literal">r"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">!"</span>, <span class="string_literal">"world"</span>); | 85 | <span class="macro">println!</span>(<span class="string_literal">r"Hello, </span><span class="format_specifier">{</span><span class="format_specifier">}</span><span class="string_literal">!"</span>, <span class="string_literal">"world"</span>); |
84 | 86 | ||
85 | <span class="macro">println!</span>(<span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">\x41</span><span class="format_specifier">}</span><span class="string_literal">"</span>, A = <span class="numeric_literal">92</span>); | 87 | <span class="comment">// escape sequences</span> |
88 | <span class="macro">println!</span>(<span class="string_literal">"Hello</span><span class="escape_sequence">\n</span><span class="string_literal">World"</span>); | ||
89 | <span class="macro">println!</span>(<span class="string_literal">"</span><span class="escape_sequence">\u{48}</span><span class="escape_sequence">\x65</span><span class="escape_sequence">\x6C</span><span class="escape_sequence">\x6C</span><span class="escape_sequence">\x6F</span><span class="string_literal"> World"</span>); | ||
90 | |||
91 | <span class="macro">println!</span>(<span class="string_literal">"</span><span class="format_specifier">{</span><span class="escape_sequence">\x41</span><span class="format_specifier">}</span><span class="string_literal">"</span>, A = <span class="numeric_literal">92</span>); | ||
86 | <span class="macro">println!</span>(<span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">ничоси</span><span class="format_specifier">}</span><span class="string_literal">"</span>, ничоси = <span class="numeric_literal">92</span>); | 92 | <span class="macro">println!</span>(<span class="string_literal">"</span><span class="format_specifier">{</span><span class="variable">ничоси</span><span class="format_specifier">}</span><span class="string_literal">"</span>, ничоси = <span class="numeric_literal">92</span>); |
87 | }</code></pre> \ No newline at end of file | 93 | }</code></pre> \ No newline at end of file |
diff --git a/crates/ra_ide/src/snapshots/highlight_unsafe.html b/crates/ra_ide/src/snapshots/highlight_unsafe.html index bd24e6e38..73438fbb4 100644 --- a/crates/ra_ide/src/snapshots/highlight_unsafe.html +++ b/crates/ra_ide/src/snapshots/highlight_unsafe.html | |||
@@ -25,6 +25,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
25 | .variable { color: #DCDCCC; } | 25 | .variable { color: #DCDCCC; } |
26 | .format_specifier { color: #CC696B; } | 26 | .format_specifier { color: #CC696B; } |
27 | .mutable { text-decoration: underline; } | 27 | .mutable { text-decoration: underline; } |
28 | .unresolved_reference { color: #FC5555; } | ||
29 | .escape_sequence { color: #94BFF3; } | ||
28 | 30 | ||
29 | .keyword { color: #F0DFAF; font-weight: bold; } | 31 | .keyword { color: #F0DFAF; font-weight: bold; } |
30 | .keyword.unsafe { color: #BC8383; font-weight: bold; } | 32 | .keyword.unsafe { color: #BC8383; font-weight: bold; } |
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html index 5c2ff6ab5..0c4f0a018 100644 --- a/crates/ra_ide/src/snapshots/highlighting.html +++ b/crates/ra_ide/src/snapshots/highlighting.html | |||
@@ -25,6 +25,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
25 | .variable { color: #DCDCCC; } | 25 | .variable { color: #DCDCCC; } |
26 | .format_specifier { color: #CC696B; } | 26 | .format_specifier { color: #CC696B; } |
27 | .mutable { text-decoration: underline; } | 27 | .mutable { text-decoration: underline; } |
28 | .unresolved_reference { color: #FC5555; } | ||
29 | .escape_sequence { color: #94BFF3; } | ||
28 | 30 | ||
29 | .keyword { color: #F0DFAF; font-weight: bold; } | 31 | .keyword { color: #F0DFAF; font-weight: bold; } |
30 | .keyword.unsafe { color: #BC8383; font-weight: bold; } | 32 | .keyword.unsafe { color: #BC8383; font-weight: bold; } |
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html index 1ab06182c..a74a70069 100644 --- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html +++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html | |||
@@ -25,6 +25,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
25 | .variable { color: #DCDCCC; } | 25 | .variable { color: #DCDCCC; } |
26 | .format_specifier { color: #CC696B; } | 26 | .format_specifier { color: #CC696B; } |
27 | .mutable { text-decoration: underline; } | 27 | .mutable { text-decoration: underline; } |
28 | .unresolved_reference { color: #FC5555; } | ||
29 | .escape_sequence { color: #94BFF3; } | ||
28 | 30 | ||
29 | .keyword { color: #F0DFAF; font-weight: bold; } | 31 | .keyword { color: #F0DFAF; font-weight: bold; } |
30 | .keyword.unsafe { color: #BC8383; font-weight: bold; } | 32 | .keyword.unsafe { color: #BC8383; font-weight: bold; } |
diff --git a/crates/ra_ide/src/ssr.rs b/crates/ra_ide/src/ssr.rs index 93e9aee1d..59c230f6c 100644 --- a/crates/ra_ide/src/ssr.rs +++ b/crates/ra_ide/src/ssr.rs | |||
@@ -1,37 +1,18 @@ | |||
1 | use std::{collections::HashMap, iter::once, str::FromStr}; | 1 | use ra_db::SourceDatabaseExt; |
2 | |||
3 | use ra_db::{SourceDatabase, SourceDatabaseExt}; | ||
4 | use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase}; | 2 | use ra_ide_db::{symbol_index::SymbolsDatabase, RootDatabase}; |
5 | use ra_syntax::ast::{ | ||
6 | make::try_expr_from_text, ArgList, AstToken, CallExpr, Comment, Expr, MethodCallExpr, | ||
7 | RecordField, RecordLit, | ||
8 | }; | ||
9 | use ra_syntax::{AstNode, SyntaxElement, SyntaxKind, SyntaxNode}; | ||
10 | use ra_text_edit::{TextEdit, TextEditBuilder}; | ||
11 | use rustc_hash::FxHashMap; | ||
12 | 3 | ||
13 | use crate::SourceFileEdit; | 4 | use crate::SourceFileEdit; |
14 | 5 | use ra_ssr::{MatchFinder, SsrError, SsrRule}; | |
15 | #[derive(Debug, PartialEq)] | ||
16 | pub struct SsrError(String); | ||
17 | |||
18 | impl std::fmt::Display for SsrError { | ||
19 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { | ||
20 | write!(f, "Parse error: {}", self.0) | ||
21 | } | ||
22 | } | ||
23 | |||
24 | impl std::error::Error for SsrError {} | ||
25 | 6 | ||
26 | // Feature: Structural Seach and Replace | 7 | // Feature: Structural Seach and Replace |
27 | // | 8 | // |
28 | // Search and replace with named wildcards that will match any expression. | 9 | // Search and replace with named wildcards that will match any expression, type, path, pattern or item. |
29 | // The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`. | 10 | // The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`. |
30 | // A `$<name>:expr` placeholder in the search pattern will match any expression and `$<name>` will reference it in the replacement. | 11 | // A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement. |
31 | // Available via the command `rust-analyzer.ssr`. | 12 | // Available via the command `rust-analyzer.ssr`. |
32 | // | 13 | // |
33 | // ```rust | 14 | // ```rust |
34 | // // Using structural search replace command [foo($a:expr, $b:expr) ==>> ($a).foo($b)] | 15 | // // Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)] |
35 | // | 16 | // |
36 | // // BEFORE | 17 | // // BEFORE |
37 | // String::from(foo(y + 5, z)) | 18 | // String::from(foo(y + 5, z)) |
@@ -46,584 +27,24 @@ impl std::error::Error for SsrError {} | |||
46 | // | VS Code | **Rust Analyzer: Structural Search Replace** | 27 | // | VS Code | **Rust Analyzer: Structural Search Replace** |
47 | // |=== | 28 | // |=== |
48 | pub fn parse_search_replace( | 29 | pub fn parse_search_replace( |
49 | query: &str, | 30 | rule: &str, |
50 | parse_only: bool, | 31 | parse_only: bool, |
51 | db: &RootDatabase, | 32 | db: &RootDatabase, |
52 | ) -> Result<Vec<SourceFileEdit>, SsrError> { | 33 | ) -> Result<Vec<SourceFileEdit>, SsrError> { |
53 | let mut edits = vec![]; | 34 | let mut edits = vec![]; |
54 | let query: SsrQuery = query.parse()?; | 35 | let rule: SsrRule = rule.parse()?; |
55 | if parse_only { | 36 | if parse_only { |
56 | return Ok(edits); | 37 | return Ok(edits); |
57 | } | 38 | } |
39 | let mut match_finder = MatchFinder::new(db); | ||
40 | match_finder.add_rule(rule); | ||
58 | for &root in db.local_roots().iter() { | 41 | for &root in db.local_roots().iter() { |
59 | let sr = db.source_root(root); | 42 | let sr = db.source_root(root); |
60 | for file_id in sr.walk() { | 43 | for file_id in sr.walk() { |
61 | let matches = find(&query.pattern, db.parse(file_id).tree().syntax()); | 44 | if let Some(edit) = match_finder.edits_for_file(file_id) { |
62 | if !matches.matches.is_empty() { | 45 | edits.push(SourceFileEdit { file_id, edit }); |
63 | edits.push(SourceFileEdit { file_id, edit: replace(&matches, &query.template) }); | ||
64 | } | 46 | } |
65 | } | 47 | } |
66 | } | 48 | } |
67 | Ok(edits) | 49 | Ok(edits) |
68 | } | 50 | } |
69 | |||
70 | #[derive(Debug)] | ||
71 | struct SsrQuery { | ||
72 | pattern: SsrPattern, | ||
73 | template: SsrTemplate, | ||
74 | } | ||
75 | |||
76 | #[derive(Debug)] | ||
77 | struct SsrPattern { | ||
78 | pattern: SyntaxNode, | ||
79 | vars: Vec<Var>, | ||
80 | } | ||
81 | |||
82 | /// represents an `$var` in an SSR query | ||
83 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
84 | struct Var(String); | ||
85 | |||
86 | #[derive(Debug)] | ||
87 | struct SsrTemplate { | ||
88 | template: SyntaxNode, | ||
89 | placeholders: FxHashMap<SyntaxNode, Var>, | ||
90 | } | ||
91 | |||
92 | type Binding = HashMap<Var, SyntaxNode>; | ||
93 | |||
94 | #[derive(Debug)] | ||
95 | struct Match { | ||
96 | place: SyntaxNode, | ||
97 | binding: Binding, | ||
98 | ignored_comments: Vec<Comment>, | ||
99 | } | ||
100 | |||
101 | #[derive(Debug)] | ||
102 | struct SsrMatches { | ||
103 | matches: Vec<Match>, | ||
104 | } | ||
105 | |||
106 | impl FromStr for SsrQuery { | ||
107 | type Err = SsrError; | ||
108 | |||
109 | fn from_str(query: &str) -> Result<SsrQuery, SsrError> { | ||
110 | let mut it = query.split("==>>"); | ||
111 | let pattern = it.next().expect("at least empty string").trim(); | ||
112 | let mut template = it | ||
113 | .next() | ||
114 | .ok_or_else(|| SsrError("Cannot find delemiter `==>>`".into()))? | ||
115 | .trim() | ||
116 | .to_string(); | ||
117 | if it.next().is_some() { | ||
118 | return Err(SsrError("More than one delimiter found".into())); | ||
119 | } | ||
120 | let mut vars = vec![]; | ||
121 | let mut it = pattern.split('$'); | ||
122 | let mut pattern = it.next().expect("something").to_string(); | ||
123 | |||
124 | for part in it.map(split_by_var) { | ||
125 | let (var, var_type, remainder) = part?; | ||
126 | is_expr(var_type)?; | ||
127 | let new_var = create_name(var, &mut vars)?; | ||
128 | pattern.push_str(new_var); | ||
129 | pattern.push_str(remainder); | ||
130 | template = replace_in_template(template, var, new_var); | ||
131 | } | ||
132 | |||
133 | let template = try_expr_from_text(&template) | ||
134 | .ok_or(SsrError("Template is not an expression".into()))? | ||
135 | .syntax() | ||
136 | .clone(); | ||
137 | let mut placeholders = FxHashMap::default(); | ||
138 | |||
139 | traverse(&template, &mut |n| { | ||
140 | if let Some(v) = vars.iter().find(|v| v.0.as_str() == n.text()) { | ||
141 | placeholders.insert(n.clone(), v.clone()); | ||
142 | false | ||
143 | } else { | ||
144 | true | ||
145 | } | ||
146 | }); | ||
147 | |||
148 | let pattern = SsrPattern { | ||
149 | pattern: try_expr_from_text(&pattern) | ||
150 | .ok_or(SsrError("Pattern is not an expression".into()))? | ||
151 | .syntax() | ||
152 | .clone(), | ||
153 | vars, | ||
154 | }; | ||
155 | let template = SsrTemplate { template, placeholders }; | ||
156 | Ok(SsrQuery { pattern, template }) | ||
157 | } | ||
158 | } | ||
159 | |||
160 | fn traverse(node: &SyntaxNode, go: &mut impl FnMut(&SyntaxNode) -> bool) { | ||
161 | if !go(node) { | ||
162 | return; | ||
163 | } | ||
164 | for ref child in node.children() { | ||
165 | traverse(child, go); | ||
166 | } | ||
167 | } | ||
168 | |||
169 | fn split_by_var(s: &str) -> Result<(&str, &str, &str), SsrError> { | ||
170 | let end_of_name = s.find(':').ok_or_else(|| SsrError("Use $<name>:expr".into()))?; | ||
171 | let name = &s[0..end_of_name]; | ||
172 | is_name(name)?; | ||
173 | let type_begin = end_of_name + 1; | ||
174 | let type_length = | ||
175 | s[type_begin..].find(|c| !char::is_ascii_alphanumeric(&c)).unwrap_or_else(|| s.len()); | ||
176 | let type_name = &s[type_begin..type_begin + type_length]; | ||
177 | Ok((name, type_name, &s[type_begin + type_length..])) | ||
178 | } | ||
179 | |||
180 | fn is_name(s: &str) -> Result<(), SsrError> { | ||
181 | if s.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') { | ||
182 | Ok(()) | ||
183 | } else { | ||
184 | Err(SsrError("Name can contain only alphanumerics and _".into())) | ||
185 | } | ||
186 | } | ||
187 | |||
188 | fn is_expr(s: &str) -> Result<(), SsrError> { | ||
189 | if s == "expr" { | ||
190 | Ok(()) | ||
191 | } else { | ||
192 | Err(SsrError("Only $<name>:expr is supported".into())) | ||
193 | } | ||
194 | } | ||
195 | |||
196 | fn replace_in_template(template: String, var: &str, new_var: &str) -> String { | ||
197 | let name = format!("${}", var); | ||
198 | template.replace(&name, new_var) | ||
199 | } | ||
200 | |||
201 | fn create_name<'a>(name: &str, vars: &'a mut Vec<Var>) -> Result<&'a str, SsrError> { | ||
202 | let sanitized_name = format!("__search_pattern_{}", name); | ||
203 | if vars.iter().any(|a| a.0 == sanitized_name) { | ||
204 | return Err(SsrError(format!("Name `{}` repeats more than once", name))); | ||
205 | } | ||
206 | vars.push(Var(sanitized_name)); | ||
207 | Ok(&vars.last().unwrap().0) | ||
208 | } | ||
209 | |||
210 | fn find(pattern: &SsrPattern, code: &SyntaxNode) -> SsrMatches { | ||
211 | fn check_record_lit( | ||
212 | pattern: RecordLit, | ||
213 | code: RecordLit, | ||
214 | placeholders: &[Var], | ||
215 | match_: Match, | ||
216 | ) -> Option<Match> { | ||
217 | let match_ = check_opt_nodes(pattern.path(), code.path(), placeholders, match_)?; | ||
218 | |||
219 | let mut pattern_fields: Vec<RecordField> = | ||
220 | pattern.record_field_list().map(|x| x.fields().collect()).unwrap_or_default(); | ||
221 | let mut code_fields: Vec<RecordField> = | ||
222 | code.record_field_list().map(|x| x.fields().collect()).unwrap_or_default(); | ||
223 | |||
224 | if pattern_fields.len() != code_fields.len() { | ||
225 | return None; | ||
226 | } | ||
227 | |||
228 | let by_name = |a: &RecordField, b: &RecordField| { | ||
229 | a.name_ref() | ||
230 | .map(|x| x.syntax().text().to_string()) | ||
231 | .cmp(&b.name_ref().map(|x| x.syntax().text().to_string())) | ||
232 | }; | ||
233 | pattern_fields.sort_by(by_name); | ||
234 | code_fields.sort_by(by_name); | ||
235 | |||
236 | pattern_fields.into_iter().zip(code_fields.into_iter()).fold( | ||
237 | Some(match_), | ||
238 | |accum, (a, b)| { | ||
239 | accum.and_then(|match_| check_opt_nodes(Some(a), Some(b), placeholders, match_)) | ||
240 | }, | ||
241 | ) | ||
242 | } | ||
243 | |||
244 | fn check_call_and_method_call( | ||
245 | pattern: CallExpr, | ||
246 | code: MethodCallExpr, | ||
247 | placeholders: &[Var], | ||
248 | match_: Match, | ||
249 | ) -> Option<Match> { | ||
250 | let (pattern_name, pattern_type_args) = if let Some(Expr::PathExpr(path_exr)) = | ||
251 | pattern.expr() | ||
252 | { | ||
253 | let segment = path_exr.path().and_then(|p| p.segment()); | ||
254 | (segment.as_ref().and_then(|s| s.name_ref()), segment.and_then(|s| s.type_arg_list())) | ||
255 | } else { | ||
256 | (None, None) | ||
257 | }; | ||
258 | let match_ = check_opt_nodes(pattern_name, code.name_ref(), placeholders, match_)?; | ||
259 | let match_ = | ||
260 | check_opt_nodes(pattern_type_args, code.type_arg_list(), placeholders, match_)?; | ||
261 | let pattern_args = pattern.syntax().children().find_map(ArgList::cast)?.args(); | ||
262 | let code_args = code.syntax().children().find_map(ArgList::cast)?.args(); | ||
263 | let code_args = once(code.expr()?).chain(code_args); | ||
264 | check_iter(pattern_args, code_args, placeholders, match_) | ||
265 | } | ||
266 | |||
267 | fn check_method_call_and_call( | ||
268 | pattern: MethodCallExpr, | ||
269 | code: CallExpr, | ||
270 | placeholders: &[Var], | ||
271 | match_: Match, | ||
272 | ) -> Option<Match> { | ||
273 | let (code_name, code_type_args) = if let Some(Expr::PathExpr(path_exr)) = code.expr() { | ||
274 | let segment = path_exr.path().and_then(|p| p.segment()); | ||
275 | (segment.as_ref().and_then(|s| s.name_ref()), segment.and_then(|s| s.type_arg_list())) | ||
276 | } else { | ||
277 | (None, None) | ||
278 | }; | ||
279 | let match_ = check_opt_nodes(pattern.name_ref(), code_name, placeholders, match_)?; | ||
280 | let match_ = | ||
281 | check_opt_nodes(pattern.type_arg_list(), code_type_args, placeholders, match_)?; | ||
282 | let code_args = code.syntax().children().find_map(ArgList::cast)?.args(); | ||
283 | let pattern_args = pattern.syntax().children().find_map(ArgList::cast)?.args(); | ||
284 | let pattern_args = once(pattern.expr()?).chain(pattern_args); | ||
285 | check_iter(pattern_args, code_args, placeholders, match_) | ||
286 | } | ||
287 | |||
288 | fn check_opt_nodes( | ||
289 | pattern: Option<impl AstNode>, | ||
290 | code: Option<impl AstNode>, | ||
291 | placeholders: &[Var], | ||
292 | match_: Match, | ||
293 | ) -> Option<Match> { | ||
294 | match (pattern, code) { | ||
295 | (Some(pattern), Some(code)) => check( | ||
296 | &pattern.syntax().clone().into(), | ||
297 | &code.syntax().clone().into(), | ||
298 | placeholders, | ||
299 | match_, | ||
300 | ), | ||
301 | (None, None) => Some(match_), | ||
302 | _ => None, | ||
303 | } | ||
304 | } | ||
305 | |||
306 | fn check_iter<T, I1, I2>( | ||
307 | mut pattern: I1, | ||
308 | mut code: I2, | ||
309 | placeholders: &[Var], | ||
310 | match_: Match, | ||
311 | ) -> Option<Match> | ||
312 | where | ||
313 | T: AstNode, | ||
314 | I1: Iterator<Item = T>, | ||
315 | I2: Iterator<Item = T>, | ||
316 | { | ||
317 | pattern | ||
318 | .by_ref() | ||
319 | .zip(code.by_ref()) | ||
320 | .fold(Some(match_), |accum, (a, b)| { | ||
321 | accum.and_then(|match_| { | ||
322 | check( | ||
323 | &a.syntax().clone().into(), | ||
324 | &b.syntax().clone().into(), | ||
325 | placeholders, | ||
326 | match_, | ||
327 | ) | ||
328 | }) | ||
329 | }) | ||
330 | .filter(|_| pattern.next().is_none() && code.next().is_none()) | ||
331 | } | ||
332 | |||
333 | fn check( | ||
334 | pattern: &SyntaxElement, | ||
335 | code: &SyntaxElement, | ||
336 | placeholders: &[Var], | ||
337 | mut match_: Match, | ||
338 | ) -> Option<Match> { | ||
339 | match (&pattern, &code) { | ||
340 | (SyntaxElement::Token(pattern), SyntaxElement::Token(code)) => { | ||
341 | if pattern.text() == code.text() { | ||
342 | Some(match_) | ||
343 | } else { | ||
344 | None | ||
345 | } | ||
346 | } | ||
347 | (SyntaxElement::Node(pattern), SyntaxElement::Node(code)) => { | ||
348 | if placeholders.iter().any(|n| n.0.as_str() == pattern.text()) { | ||
349 | match_.binding.insert(Var(pattern.text().to_string()), code.clone()); | ||
350 | Some(match_) | ||
351 | } else { | ||
352 | if let (Some(pattern), Some(code)) = | ||
353 | (RecordLit::cast(pattern.clone()), RecordLit::cast(code.clone())) | ||
354 | { | ||
355 | check_record_lit(pattern, code, placeholders, match_) | ||
356 | } else if let (Some(pattern), Some(code)) = | ||
357 | (CallExpr::cast(pattern.clone()), MethodCallExpr::cast(code.clone())) | ||
358 | { | ||
359 | check_call_and_method_call(pattern, code, placeholders, match_) | ||
360 | } else if let (Some(pattern), Some(code)) = | ||
361 | (MethodCallExpr::cast(pattern.clone()), CallExpr::cast(code.clone())) | ||
362 | { | ||
363 | check_method_call_and_call(pattern, code, placeholders, match_) | ||
364 | } else { | ||
365 | let mut pattern_children = pattern | ||
366 | .children_with_tokens() | ||
367 | .filter(|element| !element.kind().is_trivia()); | ||
368 | let mut code_children = code | ||
369 | .children_with_tokens() | ||
370 | .filter(|element| !element.kind().is_trivia()); | ||
371 | let new_ignored_comments = | ||
372 | code.children_with_tokens().filter_map(|element| { | ||
373 | element.as_token().and_then(|token| Comment::cast(token.clone())) | ||
374 | }); | ||
375 | match_.ignored_comments.extend(new_ignored_comments); | ||
376 | pattern_children | ||
377 | .by_ref() | ||
378 | .zip(code_children.by_ref()) | ||
379 | .fold(Some(match_), |accum, (a, b)| { | ||
380 | accum.and_then(|match_| check(&a, &b, placeholders, match_)) | ||
381 | }) | ||
382 | .filter(|_| { | ||
383 | pattern_children.next().is_none() && code_children.next().is_none() | ||
384 | }) | ||
385 | } | ||
386 | } | ||
387 | } | ||
388 | _ => None, | ||
389 | } | ||
390 | } | ||
391 | let kind = pattern.pattern.kind(); | ||
392 | let matches = code | ||
393 | .descendants() | ||
394 | .filter(|n| { | ||
395 | n.kind() == kind | ||
396 | || (kind == SyntaxKind::CALL_EXPR && n.kind() == SyntaxKind::METHOD_CALL_EXPR) | ||
397 | || (kind == SyntaxKind::METHOD_CALL_EXPR && n.kind() == SyntaxKind::CALL_EXPR) | ||
398 | }) | ||
399 | .filter_map(|code| { | ||
400 | let match_ = | ||
401 | Match { place: code.clone(), binding: HashMap::new(), ignored_comments: vec![] }; | ||
402 | check(&pattern.pattern.clone().into(), &code.into(), &pattern.vars, match_) | ||
403 | }) | ||
404 | .collect(); | ||
405 | SsrMatches { matches } | ||
406 | } | ||
407 | |||
408 | fn replace(matches: &SsrMatches, template: &SsrTemplate) -> TextEdit { | ||
409 | let mut builder = TextEditBuilder::default(); | ||
410 | for match_ in &matches.matches { | ||
411 | builder.replace( | ||
412 | match_.place.text_range(), | ||
413 | render_replace(&match_.binding, &match_.ignored_comments, template), | ||
414 | ); | ||
415 | } | ||
416 | builder.finish() | ||
417 | } | ||
418 | |||
419 | fn render_replace( | ||
420 | binding: &Binding, | ||
421 | ignored_comments: &Vec<Comment>, | ||
422 | template: &SsrTemplate, | ||
423 | ) -> String { | ||
424 | let edit = { | ||
425 | let mut builder = TextEditBuilder::default(); | ||
426 | for element in template.template.descendants() { | ||
427 | if let Some(var) = template.placeholders.get(&element) { | ||
428 | builder.replace(element.text_range(), binding[var].to_string()) | ||
429 | } | ||
430 | } | ||
431 | for comment in ignored_comments { | ||
432 | builder.insert(template.template.text_range().end(), comment.syntax().to_string()) | ||
433 | } | ||
434 | builder.finish() | ||
435 | }; | ||
436 | |||
437 | let mut text = template.template.text().to_string(); | ||
438 | edit.apply(&mut text); | ||
439 | text | ||
440 | } | ||
441 | |||
442 | #[cfg(test)] | ||
443 | mod tests { | ||
444 | use super::*; | ||
445 | use ra_syntax::SourceFile; | ||
446 | |||
447 | fn parse_error_text(query: &str) -> String { | ||
448 | format!("{}", query.parse::<SsrQuery>().unwrap_err()) | ||
449 | } | ||
450 | |||
451 | #[test] | ||
452 | fn parser_happy_case() { | ||
453 | let result: SsrQuery = "foo($a:expr, $b:expr) ==>> bar($b, $a)".parse().unwrap(); | ||
454 | assert_eq!(&result.pattern.pattern.text(), "foo(__search_pattern_a, __search_pattern_b)"); | ||
455 | assert_eq!(result.pattern.vars.len(), 2); | ||
456 | assert_eq!(result.pattern.vars[0].0, "__search_pattern_a"); | ||
457 | assert_eq!(result.pattern.vars[1].0, "__search_pattern_b"); | ||
458 | assert_eq!(&result.template.template.text(), "bar(__search_pattern_b, __search_pattern_a)"); | ||
459 | } | ||
460 | |||
461 | #[test] | ||
462 | fn parser_empty_query() { | ||
463 | assert_eq!(parse_error_text(""), "Parse error: Cannot find delemiter `==>>`"); | ||
464 | } | ||
465 | |||
466 | #[test] | ||
467 | fn parser_no_delimiter() { | ||
468 | assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delemiter `==>>`"); | ||
469 | } | ||
470 | |||
471 | #[test] | ||
472 | fn parser_two_delimiters() { | ||
473 | assert_eq!( | ||
474 | parse_error_text("foo() ==>> a ==>> b "), | ||
475 | "Parse error: More than one delimiter found" | ||
476 | ); | ||
477 | } | ||
478 | |||
479 | #[test] | ||
480 | fn parser_no_pattern_type() { | ||
481 | assert_eq!(parse_error_text("foo($a) ==>>"), "Parse error: Use $<name>:expr"); | ||
482 | } | ||
483 | |||
484 | #[test] | ||
485 | fn parser_invalid_name() { | ||
486 | assert_eq!( | ||
487 | parse_error_text("foo($a+:expr) ==>>"), | ||
488 | "Parse error: Name can contain only alphanumerics and _" | ||
489 | ); | ||
490 | } | ||
491 | |||
492 | #[test] | ||
493 | fn parser_invalid_type() { | ||
494 | assert_eq!( | ||
495 | parse_error_text("foo($a:ident) ==>>"), | ||
496 | "Parse error: Only $<name>:expr is supported" | ||
497 | ); | ||
498 | } | ||
499 | |||
500 | #[test] | ||
501 | fn parser_repeated_name() { | ||
502 | assert_eq!( | ||
503 | parse_error_text("foo($a:expr, $a:expr) ==>>"), | ||
504 | "Parse error: Name `a` repeats more than once" | ||
505 | ); | ||
506 | } | ||
507 | |||
508 | #[test] | ||
509 | fn parser_invlid_pattern() { | ||
510 | assert_eq!(parse_error_text(" ==>> ()"), "Parse error: Pattern is not an expression"); | ||
511 | } | ||
512 | |||
513 | #[test] | ||
514 | fn parser_invlid_template() { | ||
515 | assert_eq!(parse_error_text("() ==>> )"), "Parse error: Template is not an expression"); | ||
516 | } | ||
517 | |||
518 | #[test] | ||
519 | fn parse_match_replace() { | ||
520 | let query: SsrQuery = "foo($x:expr) ==>> bar($x)".parse().unwrap(); | ||
521 | let input = "fn main() { foo(1+2); }"; | ||
522 | |||
523 | let code = SourceFile::parse(input).tree(); | ||
524 | let matches = find(&query.pattern, code.syntax()); | ||
525 | assert_eq!(matches.matches.len(), 1); | ||
526 | assert_eq!(matches.matches[0].place.text(), "foo(1+2)"); | ||
527 | assert_eq!(matches.matches[0].binding.len(), 1); | ||
528 | assert_eq!( | ||
529 | matches.matches[0].binding[&Var("__search_pattern_x".to_string())].text(), | ||
530 | "1+2" | ||
531 | ); | ||
532 | |||
533 | let edit = replace(&matches, &query.template); | ||
534 | let mut after = input.to_string(); | ||
535 | edit.apply(&mut after); | ||
536 | assert_eq!(after, "fn main() { bar(1+2); }"); | ||
537 | } | ||
538 | |||
539 | fn assert_ssr_transform(query: &str, input: &str, result: &str) { | ||
540 | let query: SsrQuery = query.parse().unwrap(); | ||
541 | let code = SourceFile::parse(input).tree(); | ||
542 | let matches = find(&query.pattern, code.syntax()); | ||
543 | let edit = replace(&matches, &query.template); | ||
544 | let mut after = input.to_string(); | ||
545 | edit.apply(&mut after); | ||
546 | assert_eq!(after, result); | ||
547 | } | ||
548 | |||
549 | #[test] | ||
550 | fn ssr_function_to_method() { | ||
551 | assert_ssr_transform( | ||
552 | "my_function($a:expr, $b:expr) ==>> ($a).my_method($b)", | ||
553 | "loop { my_function( other_func(x, y), z + w) }", | ||
554 | "loop { (other_func(x, y)).my_method(z + w) }", | ||
555 | ) | ||
556 | } | ||
557 | |||
558 | #[test] | ||
559 | fn ssr_nested_function() { | ||
560 | assert_ssr_transform( | ||
561 | "foo($a:expr, $b:expr, $c:expr) ==>> bar($c, baz($a, $b))", | ||
562 | "fn main { foo (x + value.method(b), x+y-z, true && false) }", | ||
563 | "fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }", | ||
564 | ) | ||
565 | } | ||
566 | |||
567 | #[test] | ||
568 | fn ssr_expected_spacing() { | ||
569 | assert_ssr_transform( | ||
570 | "foo($x:expr) + bar() ==>> bar($x)", | ||
571 | "fn main() { foo(5) + bar() }", | ||
572 | "fn main() { bar(5) }", | ||
573 | ); | ||
574 | } | ||
575 | |||
576 | #[test] | ||
577 | fn ssr_with_extra_space() { | ||
578 | assert_ssr_transform( | ||
579 | "foo($x:expr ) + bar() ==>> bar($x)", | ||
580 | "fn main() { foo( 5 ) +bar( ) }", | ||
581 | "fn main() { bar(5) }", | ||
582 | ); | ||
583 | } | ||
584 | |||
585 | #[test] | ||
586 | fn ssr_keeps_nested_comment() { | ||
587 | assert_ssr_transform( | ||
588 | "foo($x:expr) ==>> bar($x)", | ||
589 | "fn main() { foo(other(5 /* using 5 */)) }", | ||
590 | "fn main() { bar(other(5 /* using 5 */)) }", | ||
591 | ) | ||
592 | } | ||
593 | |||
594 | #[test] | ||
595 | fn ssr_keeps_comment() { | ||
596 | assert_ssr_transform( | ||
597 | "foo($x:expr) ==>> bar($x)", | ||
598 | "fn main() { foo(5 /* using 5 */) }", | ||
599 | "fn main() { bar(5)/* using 5 */ }", | ||
600 | ) | ||
601 | } | ||
602 | |||
603 | #[test] | ||
604 | fn ssr_struct_lit() { | ||
605 | assert_ssr_transform( | ||
606 | "foo{a: $a:expr, b: $b:expr} ==>> foo::new($a, $b)", | ||
607 | "fn main() { foo{b:2, a:1} }", | ||
608 | "fn main() { foo::new(1, 2) }", | ||
609 | ) | ||
610 | } | ||
611 | |||
612 | #[test] | ||
613 | fn ssr_call_and_method_call() { | ||
614 | assert_ssr_transform( | ||
615 | "foo::<'a>($a:expr, $b:expr)) ==>> foo2($a, $b)", | ||
616 | "fn main() { get().bar.foo::<'a>(1); }", | ||
617 | "fn main() { foo2(get().bar, 1); }", | ||
618 | ) | ||
619 | } | ||
620 | |||
621 | #[test] | ||
622 | fn ssr_method_call_and_call() { | ||
623 | assert_ssr_transform( | ||
624 | "$o:expr.foo::<i32>($a:expr)) ==>> $o.foo2($a)", | ||
625 | "fn main() { X::foo::<i32>(x, 1); }", | ||
626 | "fn main() { x.foo2(1); }", | ||
627 | ) | ||
628 | } | ||
629 | } | ||
diff --git a/crates/ra_ide/src/status.rs b/crates/ra_ide/src/status.rs index 5b7992920..45411b357 100644 --- a/crates/ra_ide/src/status.rs +++ b/crates/ra_ide/src/status.rs | |||
@@ -16,6 +16,7 @@ use ra_prof::{memory_usage, Bytes}; | |||
16 | use ra_syntax::{ast, Parse, SyntaxNode}; | 16 | use ra_syntax::{ast, Parse, SyntaxNode}; |
17 | 17 | ||
18 | use crate::FileId; | 18 | use crate::FileId; |
19 | use rustc_hash::FxHashMap; | ||
19 | 20 | ||
20 | fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { | 21 | fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats { |
21 | db.query(ra_db::ParseQuery).entries::<SyntaxTreeStats>() | 22 | db.query(ra_db::ParseQuery).entries::<SyntaxTreeStats>() |
@@ -123,20 +124,24 @@ struct LibrarySymbolsStats { | |||
123 | 124 | ||
124 | impl fmt::Display for LibrarySymbolsStats { | 125 | impl fmt::Display for LibrarySymbolsStats { |
125 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { | 126 | fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { |
126 | write!(fmt, "{} ({}) symbols", self.total, self.size,) | 127 | write!(fmt, "{} ({}) symbols", self.total, self.size) |
127 | } | 128 | } |
128 | } | 129 | } |
129 | 130 | ||
130 | impl FromIterator<TableEntry<SourceRootId, Arc<SymbolIndex>>> for LibrarySymbolsStats { | 131 | impl FromIterator<TableEntry<(), Arc<FxHashMap<SourceRootId, SymbolIndex>>>> |
132 | for LibrarySymbolsStats | ||
133 | { | ||
131 | fn from_iter<T>(iter: T) -> LibrarySymbolsStats | 134 | fn from_iter<T>(iter: T) -> LibrarySymbolsStats |
132 | where | 135 | where |
133 | T: IntoIterator<Item = TableEntry<SourceRootId, Arc<SymbolIndex>>>, | 136 | T: IntoIterator<Item = TableEntry<(), Arc<FxHashMap<SourceRootId, SymbolIndex>>>>, |
134 | { | 137 | { |
135 | let mut res = LibrarySymbolsStats::default(); | 138 | let mut res = LibrarySymbolsStats::default(); |
136 | for entry in iter { | 139 | for entry in iter { |
137 | let value = entry.value.unwrap(); | 140 | let value = entry.value.unwrap(); |
138 | res.total += value.len(); | 141 | for symbols in value.values() { |
139 | res.size += value.memory_size(); | 142 | res.total += symbols.len(); |
143 | res.size += symbols.memory_size(); | ||
144 | } | ||
140 | } | 145 | } |
141 | res | 146 | res |
142 | } | 147 | } |
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs index bbcd52a1c..448645bdc 100644 --- a/crates/ra_ide/src/syntax_highlighting.rs +++ b/crates/ra_ide/src/syntax_highlighting.rs | |||
@@ -44,6 +44,7 @@ pub(crate) fn highlight( | |||
44 | db: &RootDatabase, | 44 | db: &RootDatabase, |
45 | file_id: FileId, | 45 | file_id: FileId, |
46 | range_to_highlight: Option<TextRange>, | 46 | range_to_highlight: Option<TextRange>, |
47 | syntactic_name_ref_highlighting: bool, | ||
47 | ) -> Vec<HighlightedRange> { | 48 | ) -> Vec<HighlightedRange> { |
48 | let _p = profile("highlight"); | 49 | let _p = profile("highlight"); |
49 | let sema = Semantics::new(db); | 50 | let sema = Semantics::new(db); |
@@ -104,6 +105,7 @@ pub(crate) fn highlight( | |||
104 | if let Some((highlight, binding_hash)) = highlight_element( | 105 | if let Some((highlight, binding_hash)) = highlight_element( |
105 | &sema, | 106 | &sema, |
106 | &mut bindings_shadow_count, | 107 | &mut bindings_shadow_count, |
108 | syntactic_name_ref_highlighting, | ||
107 | name.syntax().clone().into(), | 109 | name.syntax().clone().into(), |
108 | ) { | 110 | ) { |
109 | stack.add(HighlightedRange { | 111 | stack.add(HighlightedRange { |
@@ -119,7 +121,6 @@ pub(crate) fn highlight( | |||
119 | assert!(current_macro_call == Some(mc)); | 121 | assert!(current_macro_call == Some(mc)); |
120 | current_macro_call = None; | 122 | current_macro_call = None; |
121 | format_string = None; | 123 | format_string = None; |
122 | continue; | ||
123 | } | 124 | } |
124 | _ => (), | 125 | _ => (), |
125 | } | 126 | } |
@@ -148,7 +149,7 @@ pub(crate) fn highlight( | |||
148 | 149 | ||
149 | let range = element.text_range(); | 150 | let range = element.text_range(); |
150 | 151 | ||
151 | let element_to_highlight = if current_macro_call.is_some() { | 152 | let element_to_highlight = if current_macro_call.is_some() && element.kind() != COMMENT { |
152 | // Inside a macro -- expand it first | 153 | // Inside a macro -- expand it first |
153 | let token = match element.clone().into_token() { | 154 | let token = match element.clone().into_token() { |
154 | Some(it) if it.parent().kind() == TOKEN_TREE => it, | 155 | Some(it) if it.parent().kind() == TOKEN_TREE => it, |
@@ -200,15 +201,18 @@ pub(crate) fn highlight( | |||
200 | 201 | ||
201 | let is_format_string = format_string.as_ref() == Some(&element_to_highlight); | 202 | let is_format_string = format_string.as_ref() == Some(&element_to_highlight); |
202 | 203 | ||
203 | if let Some((highlight, binding_hash)) = | 204 | if let Some((highlight, binding_hash)) = highlight_element( |
204 | highlight_element(&sema, &mut bindings_shadow_count, element_to_highlight.clone()) | 205 | &sema, |
205 | { | 206 | &mut bindings_shadow_count, |
207 | syntactic_name_ref_highlighting, | ||
208 | element_to_highlight.clone(), | ||
209 | ) { | ||
206 | stack.add(HighlightedRange { range, highlight, binding_hash }); | 210 | stack.add(HighlightedRange { range, highlight, binding_hash }); |
207 | if let Some(string) = | 211 | if let Some(string) = |
208 | element_to_highlight.as_token().cloned().and_then(ast::String::cast) | 212 | element_to_highlight.as_token().cloned().and_then(ast::String::cast) |
209 | { | 213 | { |
210 | stack.push(); | ||
211 | if is_format_string { | 214 | if is_format_string { |
215 | stack.push(); | ||
212 | string.lex_format_specifier(|piece_range, kind| { | 216 | string.lex_format_specifier(|piece_range, kind| { |
213 | if let Some(highlight) = highlight_format_specifier(kind) { | 217 | if let Some(highlight) = highlight_format_specifier(kind) { |
214 | stack.add(HighlightedRange { | 218 | stack.add(HighlightedRange { |
@@ -218,13 +222,27 @@ pub(crate) fn highlight( | |||
218 | }); | 222 | }); |
219 | } | 223 | } |
220 | }); | 224 | }); |
225 | stack.pop(); | ||
226 | } | ||
227 | // Highlight escape sequences | ||
228 | if let Some(char_ranges) = string.char_ranges() { | ||
229 | stack.push(); | ||
230 | for (piece_range, _) in char_ranges.iter().filter(|(_, char)| char.is_ok()) { | ||
231 | if string.text()[piece_range.start().into()..].starts_with('\\') { | ||
232 | stack.add(HighlightedRange { | ||
233 | range: piece_range + range.start(), | ||
234 | highlight: HighlightTag::EscapeSequence.into(), | ||
235 | binding_hash: None, | ||
236 | }); | ||
237 | } | ||
238 | } | ||
239 | stack.pop_and_inject(false); | ||
221 | } | 240 | } |
222 | stack.pop(); | ||
223 | } else if let Some(string) = | 241 | } else if let Some(string) = |
224 | element_to_highlight.as_token().cloned().and_then(ast::RawString::cast) | 242 | element_to_highlight.as_token().cloned().and_then(ast::RawString::cast) |
225 | { | 243 | { |
226 | stack.push(); | ||
227 | if is_format_string { | 244 | if is_format_string { |
245 | stack.push(); | ||
228 | string.lex_format_specifier(|piece_range, kind| { | 246 | string.lex_format_specifier(|piece_range, kind| { |
229 | if let Some(highlight) = highlight_format_specifier(kind) { | 247 | if let Some(highlight) = highlight_format_specifier(kind) { |
230 | stack.add(HighlightedRange { | 248 | stack.add(HighlightedRange { |
@@ -234,8 +252,8 @@ pub(crate) fn highlight( | |||
234 | }); | 252 | }); |
235 | } | 253 | } |
236 | }); | 254 | }); |
255 | stack.pop(); | ||
237 | } | 256 | } |
238 | stack.pop(); | ||
239 | } | 257 | } |
240 | } | 258 | } |
241 | } | 259 | } |
@@ -410,6 +428,7 @@ fn macro_call_range(macro_call: &ast::MacroCall) -> Option<TextRange> { | |||
410 | fn highlight_element( | 428 | fn highlight_element( |
411 | sema: &Semantics<RootDatabase>, | 429 | sema: &Semantics<RootDatabase>, |
412 | bindings_shadow_count: &mut FxHashMap<Name, u32>, | 430 | bindings_shadow_count: &mut FxHashMap<Name, u32>, |
431 | syntactic_name_ref_highlighting: bool, | ||
413 | element: SyntaxElement, | 432 | element: SyntaxElement, |
414 | ) -> Option<(Highlight, Option<u64>)> { | 433 | ) -> Option<(Highlight, Option<u64>)> { |
415 | let db = sema.db; | 434 | let db = sema.db; |
@@ -463,12 +482,20 @@ fn highlight_element( | |||
463 | } | 482 | } |
464 | NameRefClass::FieldShorthand { .. } => HighlightTag::Field.into(), | 483 | NameRefClass::FieldShorthand { .. } => HighlightTag::Field.into(), |
465 | }, | 484 | }, |
485 | None if syntactic_name_ref_highlighting => highlight_name_ref_by_syntax(name_ref), | ||
466 | None => HighlightTag::UnresolvedReference.into(), | 486 | None => HighlightTag::UnresolvedReference.into(), |
467 | } | 487 | } |
468 | } | 488 | } |
469 | 489 | ||
470 | // Simple token-based highlighting | 490 | // Simple token-based highlighting |
471 | COMMENT => HighlightTag::Comment.into(), | 491 | COMMENT => { |
492 | let comment = element.into_token().and_then(ast::Comment::cast)?; | ||
493 | let h = HighlightTag::Comment; | ||
494 | match comment.kind().doc { | ||
495 | Some(_) => h | HighlightModifier::Documentation, | ||
496 | None => h.into(), | ||
497 | } | ||
498 | } | ||
472 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => HighlightTag::StringLiteral.into(), | 499 | STRING | RAW_STRING | RAW_BYTE_STRING | BYTE_STRING => HighlightTag::StringLiteral.into(), |
473 | ATTR => HighlightTag::Attribute.into(), | 500 | ATTR => HighlightTag::Attribute.into(), |
474 | INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(), | 501 | INT_NUMBER | FLOAT_NUMBER => HighlightTag::NumericLiteral.into(), |
@@ -614,3 +641,53 @@ fn highlight_name_by_syntax(name: ast::Name) -> Highlight { | |||
614 | 641 | ||
615 | tag.into() | 642 | tag.into() |
616 | } | 643 | } |
644 | |||
645 | fn highlight_name_ref_by_syntax(name: ast::NameRef) -> Highlight { | ||
646 | let default = HighlightTag::UnresolvedReference; | ||
647 | |||
648 | let parent = match name.syntax().parent() { | ||
649 | Some(it) => it, | ||
650 | _ => return default.into(), | ||
651 | }; | ||
652 | |||
653 | let tag = match parent.kind() { | ||
654 | METHOD_CALL_EXPR => HighlightTag::Function, | ||
655 | FIELD_EXPR => HighlightTag::Field, | ||
656 | PATH_SEGMENT => { | ||
657 | let path = match parent.parent().and_then(ast::Path::cast) { | ||
658 | Some(it) => it, | ||
659 | _ => return default.into(), | ||
660 | }; | ||
661 | let expr = match path.syntax().parent().and_then(ast::PathExpr::cast) { | ||
662 | Some(it) => it, | ||
663 | _ => { | ||
664 | // within path, decide whether it is module or adt by checking for uppercase name | ||
665 | return if name.text().chars().next().unwrap_or_default().is_uppercase() { | ||
666 | HighlightTag::Struct | ||
667 | } else { | ||
668 | HighlightTag::Module | ||
669 | } | ||
670 | .into(); | ||
671 | } | ||
672 | }; | ||
673 | let parent = match expr.syntax().parent() { | ||
674 | Some(it) => it, | ||
675 | None => return default.into(), | ||
676 | }; | ||
677 | |||
678 | match parent.kind() { | ||
679 | CALL_EXPR => HighlightTag::Function, | ||
680 | _ => { | ||
681 | if name.text().chars().next().unwrap_or_default().is_uppercase() { | ||
682 | HighlightTag::Struct | ||
683 | } else { | ||
684 | HighlightTag::Constant | ||
685 | } | ||
686 | } | ||
687 | } | ||
688 | } | ||
689 | _ => default, | ||
690 | }; | ||
691 | |||
692 | tag.into() | ||
693 | } | ||
diff --git a/crates/ra_ide/src/syntax_highlighting/html.rs b/crates/ra_ide/src/syntax_highlighting/html.rs index 5bada6252..99b6b25ab 100644 --- a/crates/ra_ide/src/syntax_highlighting/html.rs +++ b/crates/ra_ide/src/syntax_highlighting/html.rs | |||
@@ -19,7 +19,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo | |||
19 | ) | 19 | ) |
20 | } | 20 | } |
21 | 21 | ||
22 | let ranges = highlight(db, file_id, None); | 22 | let ranges = highlight(db, file_id, None, false); |
23 | let text = parse.tree().syntax().to_string(); | 23 | let text = parse.tree().syntax().to_string(); |
24 | let mut prev_pos = TextSize::from(0); | 24 | let mut prev_pos = TextSize::from(0); |
25 | let mut buf = String::new(); | 25 | let mut buf = String::new(); |
@@ -84,6 +84,8 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd | |||
84 | .variable { color: #DCDCCC; } | 84 | .variable { color: #DCDCCC; } |
85 | .format_specifier { color: #CC696B; } | 85 | .format_specifier { color: #CC696B; } |
86 | .mutable { text-decoration: underline; } | 86 | .mutable { text-decoration: underline; } |
87 | .unresolved_reference { color: #FC5555; } | ||
88 | .escape_sequence { color: #94BFF3; } | ||
87 | 89 | ||
88 | .keyword { color: #F0DFAF; font-weight: bold; } | 90 | .keyword { color: #F0DFAF; font-weight: bold; } |
89 | .keyword.unsafe { color: #BC8383; font-weight: bold; } | 91 | .keyword.unsafe { color: #BC8383; font-weight: bold; } |
diff --git a/crates/ra_ide/src/syntax_highlighting/injection.rs b/crates/ra_ide/src/syntax_highlighting/injection.rs index 3575a0fc6..415f24a6d 100644 --- a/crates/ra_ide/src/syntax_highlighting/injection.rs +++ b/crates/ra_ide/src/syntax_highlighting/injection.rs | |||
@@ -7,7 +7,10 @@ use hir::Semantics; | |||
7 | use ra_syntax::{ast, AstToken, SyntaxNode, SyntaxToken, TextRange, TextSize}; | 7 | use ra_syntax::{ast, AstToken, SyntaxNode, SyntaxToken, TextRange, TextSize}; |
8 | use stdx::SepBy; | 8 | use stdx::SepBy; |
9 | 9 | ||
10 | use crate::{call_info::ActiveParameter, Analysis, HighlightTag, HighlightedRange, RootDatabase}; | 10 | use crate::{ |
11 | call_info::ActiveParameter, Analysis, HighlightModifier, HighlightTag, HighlightedRange, | ||
12 | RootDatabase, | ||
13 | }; | ||
11 | 14 | ||
12 | use super::HighlightedRangeStack; | 15 | use super::HighlightedRangeStack; |
13 | 16 | ||
@@ -53,6 +56,10 @@ pub(super) fn highlight_injection( | |||
53 | /// Mapping from extracted documentation code to original code | 56 | /// Mapping from extracted documentation code to original code |
54 | type RangesMap = BTreeMap<TextSize, TextSize>; | 57 | type RangesMap = BTreeMap<TextSize, TextSize>; |
55 | 58 | ||
59 | const RUSTDOC_FENCE: &'static str = "```"; | ||
60 | const RUSTDOC_FENCE_TOKENS: &[&'static str] = | ||
61 | &["", "rust", "should_panic", "ignore", "no_run", "compile_fail", "edition2015", "edition2018"]; | ||
62 | |||
56 | /// Extracts Rust code from documentation comments as well as a mapping from | 63 | /// Extracts Rust code from documentation comments as well as a mapping from |
57 | /// the extracted source code back to the original source ranges. | 64 | /// the extracted source code back to the original source ranges. |
58 | /// Lastly, a vector of new comment highlight ranges (spanning only the | 65 | /// Lastly, a vector of new comment highlight ranges (spanning only the |
@@ -67,6 +74,7 @@ pub(super) fn extract_doc_comments( | |||
67 | // Mapping from extracted documentation code to original code | 74 | // Mapping from extracted documentation code to original code |
68 | let mut range_mapping: RangesMap = BTreeMap::new(); | 75 | let mut range_mapping: RangesMap = BTreeMap::new(); |
69 | let mut line_start = TextSize::try_from(prefix.len()).unwrap(); | 76 | let mut line_start = TextSize::try_from(prefix.len()).unwrap(); |
77 | let mut is_codeblock = false; | ||
70 | let mut is_doctest = false; | 78 | let mut is_doctest = false; |
71 | // Replace the original, line-spanning comment ranges by new, only comment-prefix | 79 | // Replace the original, line-spanning comment ranges by new, only comment-prefix |
72 | // spanning comment ranges. | 80 | // spanning comment ranges. |
@@ -76,8 +84,13 @@ pub(super) fn extract_doc_comments( | |||
76 | .filter_map(|el| el.into_token().and_then(ast::Comment::cast)) | 84 | .filter_map(|el| el.into_token().and_then(ast::Comment::cast)) |
77 | .filter(|comment| comment.kind().doc.is_some()) | 85 | .filter(|comment| comment.kind().doc.is_some()) |
78 | .filter(|comment| { | 86 | .filter(|comment| { |
79 | if comment.text().contains("```") { | 87 | if let Some(idx) = comment.text().find(RUSTDOC_FENCE) { |
80 | is_doctest = !is_doctest; | 88 | is_codeblock = !is_codeblock; |
89 | // Check whether code is rust by inspecting fence guards | ||
90 | let guards = &comment.text()[idx + RUSTDOC_FENCE.len()..]; | ||
91 | let is_rust = | ||
92 | guards.split(',').all(|sub| RUSTDOC_FENCE_TOKENS.contains(&sub.trim())); | ||
93 | is_doctest = is_codeblock && is_rust; | ||
81 | false | 94 | false |
82 | } else { | 95 | } else { |
83 | is_doctest | 96 | is_doctest |
@@ -108,7 +121,7 @@ pub(super) fn extract_doc_comments( | |||
108 | range.start(), | 121 | range.start(), |
109 | range.start() + TextSize::try_from(pos).unwrap(), | 122 | range.start() + TextSize::try_from(pos).unwrap(), |
110 | ), | 123 | ), |
111 | highlight: HighlightTag::Comment.into(), | 124 | highlight: HighlightTag::Comment | HighlightModifier::Documentation, |
112 | binding_hash: None, | 125 | binding_hash: None, |
113 | }); | 126 | }); |
114 | line_start += range.len() - TextSize::try_from(pos).unwrap(); | 127 | line_start += range.len() - TextSize::try_from(pos).unwrap(); |
@@ -137,7 +150,7 @@ pub(super) fn highlight_doc_comment( | |||
137 | let (analysis, tmp_file_id) = Analysis::from_single_file(text); | 150 | let (analysis, tmp_file_id) = Analysis::from_single_file(text); |
138 | 151 | ||
139 | stack.push(); | 152 | stack.push(); |
140 | for mut h in analysis.highlight(tmp_file_id).unwrap() { | 153 | for mut h in analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)).unwrap() { |
141 | // Determine start offset and end offset in case of multi-line ranges | 154 | // Determine start offset and end offset in case of multi-line ranges |
142 | let mut start_offset = None; | 155 | let mut start_offset = None; |
143 | let mut end_offset = None; | 156 | let mut end_offset = None; |
@@ -154,6 +167,7 @@ pub(super) fn highlight_doc_comment( | |||
154 | h.range.start() + start_offset, | 167 | h.range.start() + start_offset, |
155 | h.range.end() + end_offset.unwrap_or(start_offset), | 168 | h.range.end() + end_offset.unwrap_or(start_offset), |
156 | ); | 169 | ); |
170 | |||
157 | stack.add(h); | 171 | stack.add(h); |
158 | } | 172 | } |
159 | } | 173 | } |
diff --git a/crates/ra_ide/src/syntax_highlighting/tags.rs b/crates/ra_ide/src/syntax_highlighting/tags.rs index 94f466966..93bbb4b4d 100644 --- a/crates/ra_ide/src/syntax_highlighting/tags.rs +++ b/crates/ra_ide/src/syntax_highlighting/tags.rs | |||
@@ -23,6 +23,7 @@ pub enum HighlightTag { | |||
23 | Constant, | 23 | Constant, |
24 | Enum, | 24 | Enum, |
25 | EnumVariant, | 25 | EnumVariant, |
26 | EscapeSequence, | ||
26 | Field, | 27 | Field, |
27 | FormatSpecifier, | 28 | FormatSpecifier, |
28 | Function, | 29 | Function, |
@@ -55,6 +56,7 @@ pub enum HighlightModifier { | |||
55 | /// `foo` in `fn foo(x: i32)` is a definition, `foo` in `foo(90 + 2)` is | 56 | /// `foo` in `fn foo(x: i32)` is a definition, `foo` in `foo(90 + 2)` is |
56 | /// not. | 57 | /// not. |
57 | Definition, | 58 | Definition, |
59 | Documentation, | ||
58 | Mutable, | 60 | Mutable, |
59 | Unsafe, | 61 | Unsafe, |
60 | } | 62 | } |
@@ -71,6 +73,7 @@ impl HighlightTag { | |||
71 | HighlightTag::Constant => "constant", | 73 | HighlightTag::Constant => "constant", |
72 | HighlightTag::Enum => "enum", | 74 | HighlightTag::Enum => "enum", |
73 | HighlightTag::EnumVariant => "enum_variant", | 75 | HighlightTag::EnumVariant => "enum_variant", |
76 | HighlightTag::EscapeSequence => "escape_sequence", | ||
74 | HighlightTag::Field => "field", | 77 | HighlightTag::Field => "field", |
75 | HighlightTag::FormatSpecifier => "format_specifier", | 78 | HighlightTag::FormatSpecifier => "format_specifier", |
76 | HighlightTag::Function => "function", | 79 | HighlightTag::Function => "function", |
@@ -106,6 +109,7 @@ impl HighlightModifier { | |||
106 | HighlightModifier::Attribute, | 109 | HighlightModifier::Attribute, |
107 | HighlightModifier::ControlFlow, | 110 | HighlightModifier::ControlFlow, |
108 | HighlightModifier::Definition, | 111 | HighlightModifier::Definition, |
112 | HighlightModifier::Documentation, | ||
109 | HighlightModifier::Mutable, | 113 | HighlightModifier::Mutable, |
110 | HighlightModifier::Unsafe, | 114 | HighlightModifier::Unsafe, |
111 | ]; | 115 | ]; |
@@ -115,6 +119,7 @@ impl HighlightModifier { | |||
115 | HighlightModifier::Attribute => "attribute", | 119 | HighlightModifier::Attribute => "attribute", |
116 | HighlightModifier::ControlFlow => "control", | 120 | HighlightModifier::ControlFlow => "control", |
117 | HighlightModifier::Definition => "declaration", | 121 | HighlightModifier::Definition => "declaration", |
122 | HighlightModifier::Documentation => "documentation", | ||
118 | HighlightModifier::Mutable => "mutable", | 123 | HighlightModifier::Mutable => "mutable", |
119 | HighlightModifier::Unsafe => "unsafe", | 124 | HighlightModifier::Unsafe => "unsafe", |
120 | } | 125 | } |
diff --git a/crates/ra_ide/src/syntax_highlighting/tests.rs b/crates/ra_ide/src/syntax_highlighting/tests.rs index 070b24f45..93a276ffe 100644 --- a/crates/ra_ide/src/syntax_highlighting/tests.rs +++ b/crates/ra_ide/src/syntax_highlighting/tests.rs | |||
@@ -246,6 +246,10 @@ fn main() { | |||
246 | 246 | ||
247 | println!(r"Hello, {}!", "world"); | 247 | println!(r"Hello, {}!", "world"); |
248 | 248 | ||
249 | // escape sequences | ||
250 | println!("Hello\nWorld"); | ||
251 | println!("\u{48}\x65\x6C\x6C\x6F World"); | ||
252 | |||
249 | println!("{\x41}", A = 92); | 253 | println!("{\x41}", A = 92); |
250 | println!("{ничоси}", ничоси = 92); | 254 | println!("{ничоси}", ничоси = 92); |
251 | }"# | 255 | }"# |
@@ -287,7 +291,13 @@ fn main() { | |||
287 | fn test_highlight_doctest() { | 291 | fn test_highlight_doctest() { |
288 | check_highlighting( | 292 | check_highlighting( |
289 | r#" | 293 | r#" |
294 | struct Foo { | ||
295 | bar: bool, | ||
296 | } | ||
297 | |||
290 | impl Foo { | 298 | impl Foo { |
299 | pub const bar: bool = true; | ||
300 | |||
291 | /// Constructs a new `Foo`. | 301 | /// Constructs a new `Foo`. |
292 | /// | 302 | /// |
293 | /// # Examples | 303 | /// # Examples |
@@ -297,7 +307,7 @@ impl Foo { | |||
297 | /// let mut foo: Foo = Foo::new(); | 307 | /// let mut foo: Foo = Foo::new(); |
298 | /// ``` | 308 | /// ``` |
299 | pub const fn new() -> Foo { | 309 | pub const fn new() -> Foo { |
300 | Foo { } | 310 | Foo { bar: true } |
301 | } | 311 | } |
302 | 312 | ||
303 | /// `bar` method on `Foo`. | 313 | /// `bar` method on `Foo`. |
@@ -305,11 +315,15 @@ impl Foo { | |||
305 | /// # Examples | 315 | /// # Examples |
306 | /// | 316 | /// |
307 | /// ``` | 317 | /// ``` |
318 | /// use x::y; | ||
319 | /// | ||
308 | /// let foo = Foo::new(); | 320 | /// let foo = Foo::new(); |
309 | /// | 321 | /// |
310 | /// // calls bar on foo | 322 | /// // calls bar on foo |
311 | /// assert!(foo.bar()); | 323 | /// assert!(foo.bar()); |
312 | /// | 324 | /// |
325 | /// let bar = foo.bar || Foo::bar; | ||
326 | /// | ||
313 | /// /* multi-line | 327 | /// /* multi-line |
314 | /// comment */ | 328 | /// comment */ |
315 | /// | 329 | /// |
@@ -319,18 +333,31 @@ impl Foo { | |||
319 | /// | 333 | /// |
320 | /// ``` | 334 | /// ``` |
321 | /// | 335 | /// |
322 | /// ``` | 336 | /// ```rust,no_run |
323 | /// let foobar = Foo::new().bar(); | 337 | /// let foobar = Foo::new().bar(); |
324 | /// ``` | 338 | /// ``` |
339 | /// | ||
340 | /// ```sh | ||
341 | /// echo 1 | ||
342 | /// ``` | ||
325 | pub fn foo(&self) -> bool { | 343 | pub fn foo(&self) -> bool { |
326 | true | 344 | true |
327 | } | 345 | } |
328 | } | 346 | } |
347 | |||
348 | /// ``` | ||
349 | /// noop!(1); | ||
350 | /// ``` | ||
351 | macro_rules! noop { | ||
352 | ($expr:expr) => { | ||
353 | $expr | ||
354 | } | ||
355 | } | ||
329 | "# | 356 | "# |
330 | .trim(), | 357 | .trim(), |
331 | "crates/ra_ide/src/snapshots/highlight_doctest.html", | 358 | "crates/ra_ide/src/snapshots/highlight_doctest.html", |
332 | false, | 359 | false, |
333 | ) | 360 | ); |
334 | } | 361 | } |
335 | 362 | ||
336 | /// Highlights the code given by the `ra_fixture` argument, renders the | 363 | /// Highlights the code given by the `ra_fixture` argument, renders the |
diff --git a/crates/ra_ide_db/src/change.rs b/crates/ra_ide_db/src/change.rs index 2fc796a85..98993d571 100644 --- a/crates/ra_ide_db/src/change.rs +++ b/crates/ra_ide_db/src/change.rs | |||
@@ -9,22 +9,15 @@ use ra_db::{ | |||
9 | SourceRootId, | 9 | SourceRootId, |
10 | }; | 10 | }; |
11 | use ra_prof::{memory_usage, profile, Bytes}; | 11 | use ra_prof::{memory_usage, profile, Bytes}; |
12 | use ra_syntax::SourceFile; | ||
13 | #[cfg(not(feature = "wasm"))] | ||
14 | use rayon::prelude::*; | ||
15 | use rustc_hash::FxHashMap; | 12 | use rustc_hash::FxHashMap; |
16 | 13 | ||
17 | use crate::{ | 14 | use crate::{symbol_index::SymbolsDatabase, RootDatabase}; |
18 | symbol_index::{SymbolIndex, SymbolsDatabase}, | ||
19 | RootDatabase, | ||
20 | }; | ||
21 | 15 | ||
22 | #[derive(Default)] | 16 | #[derive(Default)] |
23 | pub struct AnalysisChange { | 17 | pub struct AnalysisChange { |
24 | new_roots: Vec<(SourceRootId, bool)>, | 18 | new_roots: Vec<(SourceRootId, bool)>, |
25 | roots_changed: FxHashMap<SourceRootId, RootChange>, | 19 | roots_changed: FxHashMap<SourceRootId, RootChange>, |
26 | files_changed: Vec<(FileId, Arc<String>)>, | 20 | files_changed: Vec<(FileId, Arc<String>)>, |
27 | libraries_added: Vec<LibraryData>, | ||
28 | crate_graph: Option<CrateGraph>, | 21 | crate_graph: Option<CrateGraph>, |
29 | } | 22 | } |
30 | 23 | ||
@@ -40,9 +33,6 @@ impl fmt::Debug for AnalysisChange { | |||
40 | if !self.files_changed.is_empty() { | 33 | if !self.files_changed.is_empty() { |
41 | d.field("files_changed", &self.files_changed.len()); | 34 | d.field("files_changed", &self.files_changed.len()); |
42 | } | 35 | } |
43 | if !self.libraries_added.is_empty() { | ||
44 | d.field("libraries_added", &self.libraries_added.len()); | ||
45 | } | ||
46 | if self.crate_graph.is_some() { | 36 | if self.crate_graph.is_some() { |
47 | d.field("crate_graph", &self.crate_graph); | 37 | d.field("crate_graph", &self.crate_graph); |
48 | } | 38 | } |
@@ -79,10 +69,6 @@ impl AnalysisChange { | |||
79 | self.roots_changed.entry(root_id).or_default().removed.push(file); | 69 | self.roots_changed.entry(root_id).or_default().removed.push(file); |
80 | } | 70 | } |
81 | 71 | ||
82 | pub fn add_library(&mut self, data: LibraryData) { | ||
83 | self.libraries_added.push(data) | ||
84 | } | ||
85 | |||
86 | pub fn set_crate_graph(&mut self, graph: CrateGraph) { | 72 | pub fn set_crate_graph(&mut self, graph: CrateGraph) { |
87 | self.crate_graph = Some(graph); | 73 | self.crate_graph = Some(graph); |
88 | } | 74 | } |
@@ -116,47 +102,6 @@ impl fmt::Debug for RootChange { | |||
116 | } | 102 | } |
117 | } | 103 | } |
118 | 104 | ||
119 | pub struct LibraryData { | ||
120 | root_id: SourceRootId, | ||
121 | root_change: RootChange, | ||
122 | symbol_index: SymbolIndex, | ||
123 | } | ||
124 | |||
125 | impl fmt::Debug for LibraryData { | ||
126 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
127 | f.debug_struct("LibraryData") | ||
128 | .field("root_id", &self.root_id) | ||
129 | .field("root_change", &self.root_change) | ||
130 | .field("n_symbols", &self.symbol_index.len()) | ||
131 | .finish() | ||
132 | } | ||
133 | } | ||
134 | |||
135 | impl LibraryData { | ||
136 | pub fn prepare( | ||
137 | root_id: SourceRootId, | ||
138 | files: Vec<(FileId, RelativePathBuf, Arc<String>)>, | ||
139 | ) -> LibraryData { | ||
140 | let _p = profile("LibraryData::prepare"); | ||
141 | |||
142 | #[cfg(not(feature = "wasm"))] | ||
143 | let iter = files.par_iter(); | ||
144 | #[cfg(feature = "wasm")] | ||
145 | let iter = files.iter(); | ||
146 | |||
147 | let symbol_index = SymbolIndex::for_files(iter.map(|(file_id, _, text)| { | ||
148 | let parse = SourceFile::parse(text); | ||
149 | (*file_id, parse) | ||
150 | })); | ||
151 | let mut root_change = RootChange::default(); | ||
152 | root_change.added = files | ||
153 | .into_iter() | ||
154 | .map(|(file_id, path, text)| AddFile { file_id, path, text }) | ||
155 | .collect(); | ||
156 | LibraryData { root_id, root_change, symbol_index } | ||
157 | } | ||
158 | } | ||
159 | |||
160 | const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); | 105 | const GC_COOLDOWN: time::Duration = time::Duration::from_millis(100); |
161 | 106 | ||
162 | impl RootDatabase { | 107 | impl RootDatabase { |
@@ -171,6 +116,7 @@ impl RootDatabase { | |||
171 | log::info!("apply_change {:?}", change); | 116 | log::info!("apply_change {:?}", change); |
172 | if !change.new_roots.is_empty() { | 117 | if !change.new_roots.is_empty() { |
173 | let mut local_roots = Vec::clone(&self.local_roots()); | 118 | let mut local_roots = Vec::clone(&self.local_roots()); |
119 | let mut libraries = Vec::clone(&self.library_roots()); | ||
174 | for (root_id, is_local) in change.new_roots { | 120 | for (root_id, is_local) in change.new_roots { |
175 | let root = | 121 | let root = |
176 | if is_local { SourceRoot::new_local() } else { SourceRoot::new_library() }; | 122 | if is_local { SourceRoot::new_local() } else { SourceRoot::new_library() }; |
@@ -178,9 +124,12 @@ impl RootDatabase { | |||
178 | self.set_source_root_with_durability(root_id, Arc::new(root), durability); | 124 | self.set_source_root_with_durability(root_id, Arc::new(root), durability); |
179 | if is_local { | 125 | if is_local { |
180 | local_roots.push(root_id); | 126 | local_roots.push(root_id); |
127 | } else { | ||
128 | libraries.push(root_id) | ||
181 | } | 129 | } |
182 | } | 130 | } |
183 | self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); | 131 | self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH); |
132 | self.set_library_roots_with_durability(Arc::new(libraries), Durability::HIGH); | ||
184 | } | 133 | } |
185 | 134 | ||
186 | for (root_id, root_change) in change.roots_changed { | 135 | for (root_id, root_change) in change.roots_changed { |
@@ -192,24 +141,6 @@ impl RootDatabase { | |||
192 | let durability = durability(&source_root); | 141 | let durability = durability(&source_root); |
193 | self.set_file_text_with_durability(file_id, text, durability) | 142 | self.set_file_text_with_durability(file_id, text, durability) |
194 | } | 143 | } |
195 | if !change.libraries_added.is_empty() { | ||
196 | let mut libraries = Vec::clone(&self.library_roots()); | ||
197 | for library in change.libraries_added { | ||
198 | libraries.push(library.root_id); | ||
199 | self.set_source_root_with_durability( | ||
200 | library.root_id, | ||
201 | Arc::new(SourceRoot::new_library()), | ||
202 | Durability::HIGH, | ||
203 | ); | ||
204 | self.set_library_symbols_with_durability( | ||
205 | library.root_id, | ||
206 | Arc::new(library.symbol_index), | ||
207 | Durability::HIGH, | ||
208 | ); | ||
209 | self.apply_root_change(library.root_id, library.root_change); | ||
210 | } | ||
211 | self.set_library_roots_with_durability(Arc::new(libraries), Durability::HIGH); | ||
212 | } | ||
213 | if let Some(crate_graph) = change.crate_graph { | 144 | if let Some(crate_graph) = change.crate_graph { |
214 | self.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH) | 145 | self.set_crate_graph_with_durability(Arc::new(crate_graph), Durability::HIGH) |
215 | } | 146 | } |
@@ -352,7 +283,7 @@ impl RootDatabase { | |||
352 | hir::db::GenericPredicatesQuery | 283 | hir::db::GenericPredicatesQuery |
353 | hir::db::GenericDefaultsQuery | 284 | hir::db::GenericDefaultsQuery |
354 | hir::db::ImplsInCrateQuery | 285 | hir::db::ImplsInCrateQuery |
355 | hir::db::ImplsForTraitQuery | 286 | hir::db::ImplsFromDepsQuery |
356 | hir::db::InternTypeCtorQuery | 287 | hir::db::InternTypeCtorQuery |
357 | hir::db::InternTypeParamIdQuery | 288 | hir::db::InternTypeParamIdQuery |
358 | hir::db::InternChalkImplQuery | 289 | hir::db::InternChalkImplQuery |
diff --git a/crates/ra_ide_db/src/source_change.rs b/crates/ra_ide_db/src/source_change.rs index f40ae8304..0bbd3c3e5 100644 --- a/crates/ra_ide_db/src/source_change.rs +++ b/crates/ra_ide_db/src/source_change.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | //! | 3 | //! |
4 | //! It can be viewed as a dual for `AnalysisChange`. | 4 | //! It can be viewed as a dual for `AnalysisChange`. |
5 | 5 | ||
6 | use ra_db::{FileId, RelativePathBuf, SourceRootId}; | 6 | use ra_db::FileId; |
7 | use ra_text_edit::TextEdit; | 7 | use ra_text_edit::TextEdit; |
8 | 8 | ||
9 | #[derive(Debug, Clone)] | 9 | #[derive(Debug, Clone)] |
@@ -44,8 +44,8 @@ impl From<Vec<SourceFileEdit>> for SourceChange { | |||
44 | 44 | ||
45 | #[derive(Debug, Clone)] | 45 | #[derive(Debug, Clone)] |
46 | pub enum FileSystemEdit { | 46 | pub enum FileSystemEdit { |
47 | CreateFile { source_root: SourceRootId, path: RelativePathBuf }, | 47 | CreateFile { anchor: FileId, dst: String }, |
48 | MoveFile { src: FileId, dst_source_root: SourceRootId, dst_path: RelativePathBuf }, | 48 | MoveFile { src: FileId, anchor: FileId, dst: String }, |
49 | } | 49 | } |
50 | 50 | ||
51 | impl From<FileSystemEdit> for SourceChange { | 51 | impl From<FileSystemEdit> for SourceChange { |
diff --git a/crates/ra_ide_db/src/symbol_index.rs b/crates/ra_ide_db/src/symbol_index.rs index aab918973..25c99813f 100644 --- a/crates/ra_ide_db/src/symbol_index.rs +++ b/crates/ra_ide_db/src/symbol_index.rs | |||
@@ -34,14 +34,15 @@ use ra_db::{ | |||
34 | salsa::{self, ParallelDatabase}, | 34 | salsa::{self, ParallelDatabase}, |
35 | CrateId, FileId, SourceDatabaseExt, SourceRootId, | 35 | CrateId, FileId, SourceDatabaseExt, SourceRootId, |
36 | }; | 36 | }; |
37 | use ra_prof::profile; | ||
37 | use ra_syntax::{ | 38 | use ra_syntax::{ |
38 | ast::{self, NameOwner}, | 39 | ast::{self, NameOwner}, |
39 | match_ast, AstNode, Parse, SmolStr, SourceFile, | 40 | match_ast, AstNode, Parse, SmolStr, SourceFile, |
40 | SyntaxKind::{self, *}, | 41 | SyntaxKind::{self, *}, |
41 | SyntaxNode, SyntaxNodePtr, TextRange, WalkEvent, | 42 | SyntaxNode, SyntaxNodePtr, TextRange, WalkEvent, |
42 | }; | 43 | }; |
43 | #[cfg(not(feature = "wasm"))] | ||
44 | use rayon::prelude::*; | 44 | use rayon::prelude::*; |
45 | use rustc_hash::FxHashMap; | ||
45 | 46 | ||
46 | use crate::RootDatabase; | 47 | use crate::RootDatabase; |
47 | 48 | ||
@@ -86,10 +87,9 @@ impl Query { | |||
86 | } | 87 | } |
87 | 88 | ||
88 | #[salsa::query_group(SymbolsDatabaseStorage)] | 89 | #[salsa::query_group(SymbolsDatabaseStorage)] |
89 | pub trait SymbolsDatabase: hir::db::HirDatabase { | 90 | pub trait SymbolsDatabase: hir::db::HirDatabase + SourceDatabaseExt + ParallelDatabase { |
90 | fn file_symbols(&self, file_id: FileId) -> Arc<SymbolIndex>; | 91 | fn file_symbols(&self, file_id: FileId) -> Arc<SymbolIndex>; |
91 | #[salsa::input] | 92 | fn library_symbols(&self) -> Arc<FxHashMap<SourceRootId, SymbolIndex>>; |
92 | fn library_symbols(&self, id: SourceRootId) -> Arc<SymbolIndex>; | ||
93 | /// The set of "local" (that is, from the current workspace) roots. | 93 | /// The set of "local" (that is, from the current workspace) roots. |
94 | /// Files in local roots are assumed to change frequently. | 94 | /// Files in local roots are assumed to change frequently. |
95 | #[salsa::input] | 95 | #[salsa::input] |
@@ -100,6 +100,29 @@ pub trait SymbolsDatabase: hir::db::HirDatabase { | |||
100 | fn library_roots(&self) -> Arc<Vec<SourceRootId>>; | 100 | fn library_roots(&self) -> Arc<Vec<SourceRootId>>; |
101 | } | 101 | } |
102 | 102 | ||
103 | fn library_symbols( | ||
104 | db: &(impl SymbolsDatabase + ParallelDatabase), | ||
105 | ) -> Arc<FxHashMap<SourceRootId, SymbolIndex>> { | ||
106 | let _p = profile("library_symbols"); | ||
107 | |||
108 | let roots = db.library_roots(); | ||
109 | let res = roots | ||
110 | .iter() | ||
111 | .map(|&root_id| { | ||
112 | let root = db.source_root(root_id); | ||
113 | let files = root | ||
114 | .walk() | ||
115 | .map(|it| (it, SourceDatabaseExt::file_text(db, it))) | ||
116 | .collect::<Vec<_>>(); | ||
117 | let symbol_index = SymbolIndex::for_files( | ||
118 | files.into_par_iter().map(|(file, text)| (file, SourceFile::parse(&text))), | ||
119 | ); | ||
120 | (root_id, symbol_index) | ||
121 | }) | ||
122 | .collect(); | ||
123 | Arc::new(res) | ||
124 | } | ||
125 | |||
103 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> { | 126 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> { |
104 | db.check_canceled(); | 127 | db.check_canceled(); |
105 | let parse = db.parse(file_id); | 128 | let parse = db.parse(file_id); |
@@ -112,9 +135,9 @@ fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> | |||
112 | } | 135 | } |
113 | 136 | ||
114 | /// Need to wrap Snapshot to provide `Clone` impl for `map_with` | 137 | /// Need to wrap Snapshot to provide `Clone` impl for `map_with` |
115 | struct Snap(salsa::Snapshot<RootDatabase>); | 138 | struct Snap<DB>(DB); |
116 | impl Clone for Snap { | 139 | impl<DB: ParallelDatabase> Clone for Snap<salsa::Snapshot<DB>> { |
117 | fn clone(&self) -> Snap { | 140 | fn clone(&self) -> Snap<salsa::Snapshot<DB>> { |
118 | Snap(self.0.snapshot()) | 141 | Snap(self.0.snapshot()) |
119 | } | 142 | } |
120 | } | 143 | } |
@@ -143,19 +166,11 @@ impl Clone for Snap { | |||
143 | pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> { | 166 | pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> { |
144 | let _p = ra_prof::profile("world_symbols").detail(|| query.query.clone()); | 167 | let _p = ra_prof::profile("world_symbols").detail(|| query.query.clone()); |
145 | 168 | ||
146 | let buf: Vec<Arc<SymbolIndex>> = if query.libs { | 169 | let tmp1; |
147 | let snap = Snap(db.snapshot()); | 170 | let tmp2; |
148 | #[cfg(not(feature = "wasm"))] | 171 | let buf: Vec<&SymbolIndex> = if query.libs { |
149 | let buf = db | 172 | tmp1 = db.library_symbols(); |
150 | .library_roots() | 173 | tmp1.values().collect() |
151 | .par_iter() | ||
152 | .map_with(snap, |db, &lib_id| db.0.library_symbols(lib_id)) | ||
153 | .collect(); | ||
154 | |||
155 | #[cfg(feature = "wasm")] | ||
156 | let buf = db.library_roots().iter().map(|&lib_id| snap.0.library_symbols(lib_id)).collect(); | ||
157 | |||
158 | buf | ||
159 | } else { | 174 | } else { |
160 | let mut files = Vec::new(); | 175 | let mut files = Vec::new(); |
161 | for &root in db.local_roots().iter() { | 176 | for &root in db.local_roots().iter() { |
@@ -164,14 +179,11 @@ pub fn world_symbols(db: &RootDatabase, query: Query) -> Vec<FileSymbol> { | |||
164 | } | 179 | } |
165 | 180 | ||
166 | let snap = Snap(db.snapshot()); | 181 | let snap = Snap(db.snapshot()); |
167 | #[cfg(not(feature = "wasm"))] | 182 | tmp2 = files |
168 | let buf = | 183 | .par_iter() |
169 | files.par_iter().map_with(snap, |db, &file_id| db.0.file_symbols(file_id)).collect(); | 184 | .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) |
170 | 185 | .collect::<Vec<_>>(); | |
171 | #[cfg(feature = "wasm")] | 186 | tmp2.iter().map(|it| &**it).collect() |
172 | let buf = files.iter().map(|&file_id| snap.0.file_symbols(file_id)).collect(); | ||
173 | |||
174 | buf | ||
175 | }; | 187 | }; |
176 | query.search(&buf) | 188 | query.search(&buf) |
177 | } | 189 | } |
@@ -191,14 +203,11 @@ pub fn crate_symbols(db: &RootDatabase, krate: CrateId, query: Query) -> Vec<Fil | |||
191 | 203 | ||
192 | let snap = Snap(db.snapshot()); | 204 | let snap = Snap(db.snapshot()); |
193 | 205 | ||
194 | #[cfg(not(feature = "wasm"))] | ||
195 | let buf = files | 206 | let buf = files |
196 | .par_iter() | 207 | .par_iter() |
197 | .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) | 208 | .map_with(snap, |db, &file_id| db.0.file_symbols(file_id)) |
198 | .collect::<Vec<_>>(); | 209 | .collect::<Vec<_>>(); |
199 | 210 | let buf = buf.iter().map(|it| &**it).collect::<Vec<_>>(); | |
200 | #[cfg(feature = "wasm")] | ||
201 | let buf = files.iter().map(|&file_id| snap.0.file_symbols(file_id)).collect::<Vec<_>>(); | ||
202 | 211 | ||
203 | query.search(&buf) | 212 | query.search(&buf) |
204 | } | 213 | } |
@@ -245,12 +254,8 @@ impl SymbolIndex { | |||
245 | lhs_chars.cmp(rhs_chars) | 254 | lhs_chars.cmp(rhs_chars) |
246 | } | 255 | } |
247 | 256 | ||
248 | #[cfg(not(feature = "wasm"))] | ||
249 | symbols.par_sort_by(cmp); | 257 | symbols.par_sort_by(cmp); |
250 | 258 | ||
251 | #[cfg(feature = "wasm")] | ||
252 | symbols.sort_by(cmp); | ||
253 | |||
254 | let mut builder = fst::MapBuilder::memory(); | 259 | let mut builder = fst::MapBuilder::memory(); |
255 | 260 | ||
256 | let mut last_batch_start = 0; | 261 | let mut last_batch_start = 0; |
@@ -284,7 +289,6 @@ impl SymbolIndex { | |||
284 | self.map.as_fst().size() + self.symbols.len() * mem::size_of::<FileSymbol>() | 289 | self.map.as_fst().size() + self.symbols.len() * mem::size_of::<FileSymbol>() |
285 | } | 290 | } |
286 | 291 | ||
287 | #[cfg(not(feature = "wasm"))] | ||
288 | pub(crate) fn for_files( | 292 | pub(crate) fn for_files( |
289 | files: impl ParallelIterator<Item = (FileId, Parse<ast::SourceFile>)>, | 293 | files: impl ParallelIterator<Item = (FileId, Parse<ast::SourceFile>)>, |
290 | ) -> SymbolIndex { | 294 | ) -> SymbolIndex { |
@@ -294,16 +298,6 @@ impl SymbolIndex { | |||
294 | SymbolIndex::new(symbols) | 298 | SymbolIndex::new(symbols) |
295 | } | 299 | } |
296 | 300 | ||
297 | #[cfg(feature = "wasm")] | ||
298 | pub(crate) fn for_files( | ||
299 | files: impl Iterator<Item = (FileId, Parse<ast::SourceFile>)>, | ||
300 | ) -> SymbolIndex { | ||
301 | let symbols = files | ||
302 | .flat_map(|(file_id, file)| source_file_to_file_symbols(&file.tree(), file_id)) | ||
303 | .collect::<Vec<_>>(); | ||
304 | SymbolIndex::new(symbols) | ||
305 | } | ||
306 | |||
307 | fn range_to_map_value(start: usize, end: usize) -> u64 { | 301 | fn range_to_map_value(start: usize, end: usize) -> u64 { |
308 | debug_assert![start <= (std::u32::MAX as usize)]; | 302 | debug_assert![start <= (std::u32::MAX as usize)]; |
309 | debug_assert![end <= (std::u32::MAX as usize)]; | 303 | debug_assert![end <= (std::u32::MAX as usize)]; |
@@ -319,7 +313,7 @@ impl SymbolIndex { | |||
319 | } | 313 | } |
320 | 314 | ||
321 | impl Query { | 315 | impl Query { |
322 | pub(crate) fn search(self, indices: &[Arc<SymbolIndex>]) -> Vec<FileSymbol> { | 316 | pub(crate) fn search(self, indices: &[&SymbolIndex]) -> Vec<FileSymbol> { |
323 | let mut op = fst::map::OpBuilder::new(); | 317 | let mut op = fst::map::OpBuilder::new(); |
324 | for file_symbols in indices.iter() { | 318 | for file_symbols in indices.iter() { |
325 | let automaton = fst::automaton::Subsequence::new(&self.lowercased); | 319 | let automaton = fst::automaton::Subsequence::new(&self.lowercased); |
diff --git a/crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt b/crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt index bc010cfe9..e6fd21610 100644 --- a/crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt +++ b/crates/ra_proc_macro_srv/src/tests/fixtures/test_serialize_proc_macro.txt | |||
@@ -14,7 +14,7 @@ SUBTREE $ | |||
14 | PUNCH , [alone] 4294967295 | 14 | PUNCH , [alone] 4294967295 |
15 | IDENT unused_qualifications 4294967295 | 15 | IDENT unused_qualifications 4294967295 |
16 | IDENT const 4294967295 | 16 | IDENT const 4294967295 |
17 | IDENT _IMPL_SERIALIZE_FOR_Foo 4294967295 | 17 | IDENT _ 4294967295 |
18 | PUNCH : [alone] 4294967295 | 18 | PUNCH : [alone] 4294967295 |
19 | SUBTREE () 4294967295 | 19 | SUBTREE () 4294967295 |
20 | PUNCH = [alone] 4294967295 | 20 | PUNCH = [alone] 4294967295 |
diff --git a/crates/ra_ssr/Cargo.toml b/crates/ra_ssr/Cargo.toml new file mode 100644 index 000000000..3c2f15a83 --- /dev/null +++ b/crates/ra_ssr/Cargo.toml | |||
@@ -0,0 +1,19 @@ | |||
1 | [package] | ||
2 | edition = "2018" | ||
3 | name = "ra_ssr" | ||
4 | version = "0.1.0" | ||
5 | authors = ["rust-analyzer developers"] | ||
6 | license = "MIT OR Apache-2.0" | ||
7 | description = "Structural search and replace of Rust code" | ||
8 | repository = "https://github.com/rust-analyzer/rust-analyzer" | ||
9 | |||
10 | [lib] | ||
11 | doctest = false | ||
12 | |||
13 | [dependencies] | ||
14 | ra_text_edit = { path = "../ra_text_edit" } | ||
15 | ra_syntax = { path = "../ra_syntax" } | ||
16 | ra_db = { path = "../ra_db" } | ||
17 | ra_ide_db = { path = "../ra_ide_db" } | ||
18 | hir = { path = "../ra_hir", package = "ra_hir" } | ||
19 | rustc-hash = "1.1.0" | ||
diff --git a/crates/ra_ssr/src/lib.rs b/crates/ra_ssr/src/lib.rs new file mode 100644 index 000000000..fc716ae82 --- /dev/null +++ b/crates/ra_ssr/src/lib.rs | |||
@@ -0,0 +1,120 @@ | |||
1 | //! Structural Search Replace | ||
2 | //! | ||
3 | //! Allows searching the AST for code that matches one or more patterns and then replacing that code | ||
4 | //! based on a template. | ||
5 | |||
6 | mod matching; | ||
7 | mod parsing; | ||
8 | mod replacing; | ||
9 | #[cfg(test)] | ||
10 | mod tests; | ||
11 | |||
12 | use crate::matching::Match; | ||
13 | use hir::Semantics; | ||
14 | use ra_db::{FileId, FileRange}; | ||
15 | use ra_syntax::{AstNode, SmolStr, SyntaxNode}; | ||
16 | use ra_text_edit::TextEdit; | ||
17 | use rustc_hash::FxHashMap; | ||
18 | |||
19 | // A structured search replace rule. Create by calling `parse` on a str. | ||
20 | #[derive(Debug)] | ||
21 | pub struct SsrRule { | ||
22 | /// A structured pattern that we're searching for. | ||
23 | pattern: SsrPattern, | ||
24 | /// What we'll replace it with. | ||
25 | template: parsing::SsrTemplate, | ||
26 | } | ||
27 | |||
28 | #[derive(Debug)] | ||
29 | struct SsrPattern { | ||
30 | raw: parsing::RawSearchPattern, | ||
31 | /// Placeholders keyed by the stand-in ident that we use in Rust source code. | ||
32 | placeholders_by_stand_in: FxHashMap<SmolStr, parsing::Placeholder>, | ||
33 | // We store our search pattern, parsed as each different kind of thing we can look for. As we | ||
34 | // traverse the AST, we get the appropriate one of these for the type of node we're on. For many | ||
35 | // search patterns, only some of these will be present. | ||
36 | expr: Option<SyntaxNode>, | ||
37 | type_ref: Option<SyntaxNode>, | ||
38 | item: Option<SyntaxNode>, | ||
39 | path: Option<SyntaxNode>, | ||
40 | pattern: Option<SyntaxNode>, | ||
41 | } | ||
42 | |||
43 | #[derive(Debug, PartialEq)] | ||
44 | pub struct SsrError(String); | ||
45 | |||
46 | #[derive(Debug, Default)] | ||
47 | pub struct SsrMatches { | ||
48 | matches: Vec<Match>, | ||
49 | } | ||
50 | |||
51 | /// Searches a crate for pattern matches and possibly replaces them with something else. | ||
52 | pub struct MatchFinder<'db> { | ||
53 | /// Our source of information about the user's code. | ||
54 | sema: Semantics<'db, ra_ide_db::RootDatabase>, | ||
55 | rules: Vec<SsrRule>, | ||
56 | } | ||
57 | |||
58 | impl<'db> MatchFinder<'db> { | ||
59 | pub fn new(db: &'db ra_ide_db::RootDatabase) -> MatchFinder<'db> { | ||
60 | MatchFinder { sema: Semantics::new(db), rules: Vec::new() } | ||
61 | } | ||
62 | |||
63 | pub fn add_rule(&mut self, rule: SsrRule) { | ||
64 | self.rules.push(rule); | ||
65 | } | ||
66 | |||
67 | pub fn edits_for_file(&self, file_id: FileId) -> Option<TextEdit> { | ||
68 | let matches = self.find_matches_in_file(file_id); | ||
69 | if matches.matches.is_empty() { | ||
70 | None | ||
71 | } else { | ||
72 | Some(replacing::matches_to_edit(&matches)) | ||
73 | } | ||
74 | } | ||
75 | |||
76 | fn find_matches_in_file(&self, file_id: FileId) -> SsrMatches { | ||
77 | let file = self.sema.parse(file_id); | ||
78 | let code = file.syntax(); | ||
79 | let mut matches = SsrMatches::default(); | ||
80 | self.find_matches(code, &None, &mut matches); | ||
81 | matches | ||
82 | } | ||
83 | |||
84 | fn find_matches( | ||
85 | &self, | ||
86 | code: &SyntaxNode, | ||
87 | restrict_range: &Option<FileRange>, | ||
88 | matches_out: &mut SsrMatches, | ||
89 | ) { | ||
90 | for rule in &self.rules { | ||
91 | if let Ok(mut m) = matching::get_match(false, rule, &code, restrict_range, &self.sema) { | ||
92 | // Continue searching in each of our placeholders. | ||
93 | for placeholder_value in m.placeholder_values.values_mut() { | ||
94 | // Don't search our placeholder if it's the entire matched node, otherwise we'd | ||
95 | // find the same match over and over until we got a stack overflow. | ||
96 | if placeholder_value.node != *code { | ||
97 | self.find_matches( | ||
98 | &placeholder_value.node, | ||
99 | restrict_range, | ||
100 | &mut placeholder_value.inner_matches, | ||
101 | ); | ||
102 | } | ||
103 | } | ||
104 | matches_out.matches.push(m); | ||
105 | return; | ||
106 | } | ||
107 | } | ||
108 | for child in code.children() { | ||
109 | self.find_matches(&child, restrict_range, matches_out); | ||
110 | } | ||
111 | } | ||
112 | } | ||
113 | |||
114 | impl std::fmt::Display for SsrError { | ||
115 | fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { | ||
116 | write!(f, "Parse error: {}", self.0) | ||
117 | } | ||
118 | } | ||
119 | |||
120 | impl std::error::Error for SsrError {} | ||
diff --git a/crates/ra_ssr/src/matching.rs b/crates/ra_ssr/src/matching.rs new file mode 100644 index 000000000..265b6d793 --- /dev/null +++ b/crates/ra_ssr/src/matching.rs | |||
@@ -0,0 +1,494 @@ | |||
1 | //! This module is responsible for matching a search pattern against a node in the AST. In the | ||
2 | //! process of matching, placeholder values are recorded. | ||
3 | |||
4 | use crate::{ | ||
5 | parsing::{Placeholder, SsrTemplate}, | ||
6 | SsrMatches, SsrPattern, SsrRule, | ||
7 | }; | ||
8 | use hir::Semantics; | ||
9 | use ra_db::FileRange; | ||
10 | use ra_syntax::ast::{AstNode, AstToken}; | ||
11 | use ra_syntax::{ | ||
12 | ast, SyntaxElement, SyntaxElementChildren, SyntaxKind, SyntaxNode, SyntaxToken, TextRange, | ||
13 | }; | ||
14 | use rustc_hash::FxHashMap; | ||
15 | use std::{cell::Cell, iter::Peekable}; | ||
16 | |||
17 | // Creates a match error. If we're currently attempting to match some code that we thought we were | ||
18 | // going to match, as indicated by the --debug-snippet flag, then populate the reason field. | ||
19 | macro_rules! match_error { | ||
20 | ($e:expr) => {{ | ||
21 | MatchFailed { | ||
22 | reason: if recording_match_fail_reasons() { | ||
23 | Some(format!("{}", $e)) | ||
24 | } else { | ||
25 | None | ||
26 | } | ||
27 | } | ||
28 | }}; | ||
29 | ($fmt:expr, $($arg:tt)+) => {{ | ||
30 | MatchFailed { | ||
31 | reason: if recording_match_fail_reasons() { | ||
32 | Some(format!($fmt, $($arg)+)) | ||
33 | } else { | ||
34 | None | ||
35 | } | ||
36 | } | ||
37 | }}; | ||
38 | } | ||
39 | |||
40 | // Fails the current match attempt, recording the supplied reason if we're recording match fail reasons. | ||
41 | macro_rules! fail_match { | ||
42 | ($($args:tt)*) => {return Err(match_error!($($args)*))}; | ||
43 | } | ||
44 | |||
45 | /// Information about a match that was found. | ||
46 | #[derive(Debug)] | ||
47 | pub(crate) struct Match { | ||
48 | pub(crate) range: TextRange, | ||
49 | pub(crate) matched_node: SyntaxNode, | ||
50 | pub(crate) placeholder_values: FxHashMap<Var, PlaceholderMatch>, | ||
51 | pub(crate) ignored_comments: Vec<ast::Comment>, | ||
52 | // A copy of the template for the rule that produced this match. We store this on the match for | ||
53 | // if/when we do replacement. | ||
54 | pub(crate) template: SsrTemplate, | ||
55 | } | ||
56 | |||
57 | /// Represents a `$var` in an SSR query. | ||
58 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
59 | pub(crate) struct Var(pub String); | ||
60 | |||
61 | /// Information about a placeholder bound in a match. | ||
62 | #[derive(Debug)] | ||
63 | pub(crate) struct PlaceholderMatch { | ||
64 | /// The node that the placeholder matched to. | ||
65 | pub(crate) node: SyntaxNode, | ||
66 | pub(crate) range: FileRange, | ||
67 | /// More matches, found within `node`. | ||
68 | pub(crate) inner_matches: SsrMatches, | ||
69 | } | ||
70 | |||
71 | #[derive(Debug)] | ||
72 | pub(crate) struct MatchFailureReason { | ||
73 | pub(crate) reason: String, | ||
74 | } | ||
75 | |||
76 | /// An "error" indicating that matching failed. Use the fail_match! macro to create and return this. | ||
77 | #[derive(Clone)] | ||
78 | pub(crate) struct MatchFailed { | ||
79 | /// The reason why we failed to match. Only present when debug_active true in call to | ||
80 | /// `get_match`. | ||
81 | pub(crate) reason: Option<String>, | ||
82 | } | ||
83 | |||
84 | /// Checks if `code` matches the search pattern found in `search_scope`, returning information about | ||
85 | /// the match, if it does. Since we only do matching in this module and searching is done by the | ||
86 | /// parent module, we don't populate nested matches. | ||
87 | pub(crate) fn get_match( | ||
88 | debug_active: bool, | ||
89 | rule: &SsrRule, | ||
90 | code: &SyntaxNode, | ||
91 | restrict_range: &Option<FileRange>, | ||
92 | sema: &Semantics<ra_ide_db::RootDatabase>, | ||
93 | ) -> Result<Match, MatchFailed> { | ||
94 | record_match_fails_reasons_scope(debug_active, || { | ||
95 | MatchState::try_match(rule, code, restrict_range, sema) | ||
96 | }) | ||
97 | } | ||
98 | |||
99 | /// Inputs to matching. This cannot be part of `MatchState`, since we mutate `MatchState` and in at | ||
100 | /// least one case need to hold a borrow of a placeholder from the input pattern while calling a | ||
101 | /// mutable `MatchState` method. | ||
102 | struct MatchInputs<'pattern> { | ||
103 | ssr_pattern: &'pattern SsrPattern, | ||
104 | } | ||
105 | |||
106 | /// State used while attempting to match our search pattern against a particular node of the AST. | ||
107 | struct MatchState<'db, 'sema> { | ||
108 | sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>, | ||
109 | /// If any placeholders come from anywhere outside of this range, then the match will be | ||
110 | /// rejected. | ||
111 | restrict_range: Option<FileRange>, | ||
112 | /// The match that we're building. We do two passes for a successful match. On the first pass, | ||
113 | /// this is None so that we can avoid doing things like storing copies of what placeholders | ||
114 | /// matched to. If that pass succeeds, then we do a second pass where we collect those details. | ||
115 | /// This means that if we have a pattern like `$a.foo()` we won't do an insert into the | ||
116 | /// placeholders map for every single method call in the codebase. Instead we'll discard all the | ||
117 | /// method calls that aren't calls to `foo` on the first pass and only insert into the | ||
118 | /// placeholders map on the second pass. Likewise for ignored comments. | ||
119 | match_out: Option<Match>, | ||
120 | } | ||
121 | |||
122 | impl<'db, 'sema> MatchState<'db, 'sema> { | ||
123 | fn try_match( | ||
124 | rule: &SsrRule, | ||
125 | code: &SyntaxNode, | ||
126 | restrict_range: &Option<FileRange>, | ||
127 | sema: &'sema Semantics<'db, ra_ide_db::RootDatabase>, | ||
128 | ) -> Result<Match, MatchFailed> { | ||
129 | let mut match_state = | ||
130 | MatchState { sema, restrict_range: restrict_range.clone(), match_out: None }; | ||
131 | let match_inputs = MatchInputs { ssr_pattern: &rule.pattern }; | ||
132 | let pattern_tree = rule.pattern.tree_for_kind(code.kind())?; | ||
133 | // First pass at matching, where we check that node types and idents match. | ||
134 | match_state.attempt_match_node(&match_inputs, &pattern_tree, code)?; | ||
135 | match_state.validate_range(&sema.original_range(code))?; | ||
136 | match_state.match_out = Some(Match { | ||
137 | range: sema.original_range(code).range, | ||
138 | matched_node: code.clone(), | ||
139 | placeholder_values: FxHashMap::default(), | ||
140 | ignored_comments: Vec::new(), | ||
141 | template: rule.template.clone(), | ||
142 | }); | ||
143 | // Second matching pass, where we record placeholder matches, ignored comments and maybe do | ||
144 | // any other more expensive checks that we didn't want to do on the first pass. | ||
145 | match_state.attempt_match_node(&match_inputs, &pattern_tree, code)?; | ||
146 | Ok(match_state.match_out.unwrap()) | ||
147 | } | ||
148 | |||
149 | /// Checks that `range` is within the permitted range if any. This is applicable when we're | ||
150 | /// processing a macro expansion and we want to fail the match if we're working with a node that | ||
151 | /// didn't originate from the token tree of the macro call. | ||
152 | fn validate_range(&self, range: &FileRange) -> Result<(), MatchFailed> { | ||
153 | if let Some(restrict_range) = &self.restrict_range { | ||
154 | if restrict_range.file_id != range.file_id | ||
155 | || !restrict_range.range.contains_range(range.range) | ||
156 | { | ||
157 | fail_match!("Node originated from a macro"); | ||
158 | } | ||
159 | } | ||
160 | Ok(()) | ||
161 | } | ||
162 | |||
163 | fn attempt_match_node( | ||
164 | &mut self, | ||
165 | match_inputs: &MatchInputs, | ||
166 | pattern: &SyntaxNode, | ||
167 | code: &SyntaxNode, | ||
168 | ) -> Result<(), MatchFailed> { | ||
169 | // Handle placeholders. | ||
170 | if let Some(placeholder) = | ||
171 | match_inputs.get_placeholder(&SyntaxElement::Node(pattern.clone())) | ||
172 | { | ||
173 | if self.match_out.is_none() { | ||
174 | return Ok(()); | ||
175 | } | ||
176 | let original_range = self.sema.original_range(code); | ||
177 | // We validated the range for the node when we started the match, so the placeholder | ||
178 | // probably can't fail range validation, but just to be safe... | ||
179 | self.validate_range(&original_range)?; | ||
180 | if let Some(match_out) = &mut self.match_out { | ||
181 | match_out.placeholder_values.insert( | ||
182 | Var(placeholder.ident.to_string()), | ||
183 | PlaceholderMatch::new(code, original_range), | ||
184 | ); | ||
185 | } | ||
186 | return Ok(()); | ||
187 | } | ||
188 | // Non-placeholders. | ||
189 | if pattern.kind() != code.kind() { | ||
190 | fail_match!("Pattern had a {:?}, code had {:?}", pattern.kind(), code.kind()); | ||
191 | } | ||
192 | // Some kinds of nodes have special handling. For everything else, we fall back to default | ||
193 | // matching. | ||
194 | match code.kind() { | ||
195 | SyntaxKind::RECORD_FIELD_LIST => { | ||
196 | self.attempt_match_record_field_list(match_inputs, pattern, code) | ||
197 | } | ||
198 | _ => self.attempt_match_node_children(match_inputs, pattern, code), | ||
199 | } | ||
200 | } | ||
201 | |||
202 | fn attempt_match_node_children( | ||
203 | &mut self, | ||
204 | match_inputs: &MatchInputs, | ||
205 | pattern: &SyntaxNode, | ||
206 | code: &SyntaxNode, | ||
207 | ) -> Result<(), MatchFailed> { | ||
208 | self.attempt_match_sequences( | ||
209 | match_inputs, | ||
210 | PatternIterator::new(pattern), | ||
211 | code.children_with_tokens(), | ||
212 | ) | ||
213 | } | ||
214 | |||
215 | fn attempt_match_sequences( | ||
216 | &mut self, | ||
217 | match_inputs: &MatchInputs, | ||
218 | pattern_it: PatternIterator, | ||
219 | mut code_it: SyntaxElementChildren, | ||
220 | ) -> Result<(), MatchFailed> { | ||
221 | let mut pattern_it = pattern_it.peekable(); | ||
222 | loop { | ||
223 | match self.next_non_trivial(&mut code_it) { | ||
224 | None => { | ||
225 | if let Some(p) = pattern_it.next() { | ||
226 | fail_match!("Part of the pattern was unmached: {:?}", p); | ||
227 | } | ||
228 | return Ok(()); | ||
229 | } | ||
230 | Some(SyntaxElement::Token(c)) => { | ||
231 | self.attempt_match_token(&mut pattern_it, &c)?; | ||
232 | } | ||
233 | Some(SyntaxElement::Node(c)) => match pattern_it.next() { | ||
234 | Some(SyntaxElement::Node(p)) => { | ||
235 | self.attempt_match_node(match_inputs, &p, &c)?; | ||
236 | } | ||
237 | Some(p) => fail_match!("Pattern wanted '{}', code has {}", p, c.text()), | ||
238 | None => fail_match!("Pattern reached end, code has {}", c.text()), | ||
239 | }, | ||
240 | } | ||
241 | } | ||
242 | } | ||
243 | |||
244 | fn attempt_match_token( | ||
245 | &mut self, | ||
246 | pattern: &mut Peekable<PatternIterator>, | ||
247 | code: &ra_syntax::SyntaxToken, | ||
248 | ) -> Result<(), MatchFailed> { | ||
249 | self.record_ignored_comments(code); | ||
250 | // Ignore whitespace and comments. | ||
251 | if code.kind().is_trivia() { | ||
252 | return Ok(()); | ||
253 | } | ||
254 | if let Some(SyntaxElement::Token(p)) = pattern.peek() { | ||
255 | // If the code has a comma and the pattern is about to close something, then accept the | ||
256 | // comma without advancing the pattern. i.e. ignore trailing commas. | ||
257 | if code.kind() == SyntaxKind::COMMA && is_closing_token(p.kind()) { | ||
258 | return Ok(()); | ||
259 | } | ||
260 | // Conversely, if the pattern has a comma and the code doesn't, skip that part of the | ||
261 | // pattern and continue to match the code. | ||
262 | if p.kind() == SyntaxKind::COMMA && is_closing_token(code.kind()) { | ||
263 | pattern.next(); | ||
264 | } | ||
265 | } | ||
266 | // Consume an element from the pattern and make sure it matches. | ||
267 | match pattern.next() { | ||
268 | Some(SyntaxElement::Token(p)) => { | ||
269 | if p.kind() != code.kind() || p.text() != code.text() { | ||
270 | fail_match!( | ||
271 | "Pattern wanted token '{}' ({:?}), but code had token '{}' ({:?})", | ||
272 | p.text(), | ||
273 | p.kind(), | ||
274 | code.text(), | ||
275 | code.kind() | ||
276 | ) | ||
277 | } | ||
278 | } | ||
279 | Some(SyntaxElement::Node(p)) => { | ||
280 | // Not sure if this is actually reachable. | ||
281 | fail_match!( | ||
282 | "Pattern wanted {:?}, but code had token '{}' ({:?})", | ||
283 | p, | ||
284 | code.text(), | ||
285 | code.kind() | ||
286 | ); | ||
287 | } | ||
288 | None => { | ||
289 | fail_match!("Pattern exhausted, while code remains: `{}`", code.text()); | ||
290 | } | ||
291 | } | ||
292 | Ok(()) | ||
293 | } | ||
294 | |||
295 | /// We want to allow the records to match in any order, so we have special matching logic for | ||
296 | /// them. | ||
297 | fn attempt_match_record_field_list( | ||
298 | &mut self, | ||
299 | match_inputs: &MatchInputs, | ||
300 | pattern: &SyntaxNode, | ||
301 | code: &SyntaxNode, | ||
302 | ) -> Result<(), MatchFailed> { | ||
303 | // Build a map keyed by field name. | ||
304 | let mut fields_by_name = FxHashMap::default(); | ||
305 | for child in code.children() { | ||
306 | if let Some(record) = ast::RecordField::cast(child.clone()) { | ||
307 | if let Some(name) = record.field_name() { | ||
308 | fields_by_name.insert(name.text().clone(), child.clone()); | ||
309 | } | ||
310 | } | ||
311 | } | ||
312 | for p in pattern.children_with_tokens() { | ||
313 | if let SyntaxElement::Node(p) = p { | ||
314 | if let Some(name_element) = p.first_child_or_token() { | ||
315 | if match_inputs.get_placeholder(&name_element).is_some() { | ||
316 | // If the pattern is using placeholders for field names then order | ||
317 | // independence doesn't make sense. Fall back to regular ordered | ||
318 | // matching. | ||
319 | return self.attempt_match_node_children(match_inputs, pattern, code); | ||
320 | } | ||
321 | if let Some(ident) = only_ident(name_element) { | ||
322 | let code_record = fields_by_name.remove(ident.text()).ok_or_else(|| { | ||
323 | match_error!( | ||
324 | "Placeholder has record field '{}', but code doesn't", | ||
325 | ident | ||
326 | ) | ||
327 | })?; | ||
328 | self.attempt_match_node(match_inputs, &p, &code_record)?; | ||
329 | } | ||
330 | } | ||
331 | } | ||
332 | } | ||
333 | if let Some(unmatched_fields) = fields_by_name.keys().next() { | ||
334 | fail_match!( | ||
335 | "{} field(s) of a record literal failed to match, starting with {}", | ||
336 | fields_by_name.len(), | ||
337 | unmatched_fields | ||
338 | ); | ||
339 | } | ||
340 | Ok(()) | ||
341 | } | ||
342 | |||
343 | fn next_non_trivial(&mut self, code_it: &mut SyntaxElementChildren) -> Option<SyntaxElement> { | ||
344 | loop { | ||
345 | let c = code_it.next(); | ||
346 | if let Some(SyntaxElement::Token(t)) = &c { | ||
347 | self.record_ignored_comments(t); | ||
348 | if t.kind().is_trivia() { | ||
349 | continue; | ||
350 | } | ||
351 | } | ||
352 | return c; | ||
353 | } | ||
354 | } | ||
355 | |||
356 | fn record_ignored_comments(&mut self, token: &SyntaxToken) { | ||
357 | if token.kind() == SyntaxKind::COMMENT { | ||
358 | if let Some(match_out) = &mut self.match_out { | ||
359 | if let Some(comment) = ast::Comment::cast(token.clone()) { | ||
360 | match_out.ignored_comments.push(comment); | ||
361 | } | ||
362 | } | ||
363 | } | ||
364 | } | ||
365 | } | ||
366 | |||
367 | impl MatchInputs<'_> { | ||
368 | fn get_placeholder(&self, element: &SyntaxElement) -> Option<&Placeholder> { | ||
369 | only_ident(element.clone()) | ||
370 | .and_then(|ident| self.ssr_pattern.placeholders_by_stand_in.get(ident.text())) | ||
371 | } | ||
372 | } | ||
373 | |||
374 | fn is_closing_token(kind: SyntaxKind) -> bool { | ||
375 | kind == SyntaxKind::R_PAREN || kind == SyntaxKind::R_CURLY || kind == SyntaxKind::R_BRACK | ||
376 | } | ||
377 | |||
378 | pub(crate) fn record_match_fails_reasons_scope<F, T>(debug_active: bool, f: F) -> T | ||
379 | where | ||
380 | F: Fn() -> T, | ||
381 | { | ||
382 | RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(debug_active)); | ||
383 | let res = f(); | ||
384 | RECORDING_MATCH_FAIL_REASONS.with(|c| c.set(false)); | ||
385 | res | ||
386 | } | ||
387 | |||
388 | // For performance reasons, we don't want to record the reason why every match fails, only the bit | ||
389 | // of code that the user indicated they thought would match. We use a thread local to indicate when | ||
390 | // we are trying to match that bit of code. This saves us having to pass a boolean into all the bits | ||
391 | // of code that can make the decision to not match. | ||
392 | thread_local! { | ||
393 | pub static RECORDING_MATCH_FAIL_REASONS: Cell<bool> = Cell::new(false); | ||
394 | } | ||
395 | |||
396 | fn recording_match_fail_reasons() -> bool { | ||
397 | RECORDING_MATCH_FAIL_REASONS.with(|c| c.get()) | ||
398 | } | ||
399 | |||
400 | impl PlaceholderMatch { | ||
401 | fn new(node: &SyntaxNode, range: FileRange) -> Self { | ||
402 | Self { node: node.clone(), range, inner_matches: SsrMatches::default() } | ||
403 | } | ||
404 | } | ||
405 | |||
406 | impl SsrPattern { | ||
407 | pub(crate) fn tree_for_kind(&self, kind: SyntaxKind) -> Result<&SyntaxNode, MatchFailed> { | ||
408 | let (tree, kind_name) = if ast::Expr::can_cast(kind) { | ||
409 | (&self.expr, "expression") | ||
410 | } else if ast::TypeRef::can_cast(kind) { | ||
411 | (&self.type_ref, "type reference") | ||
412 | } else if ast::ModuleItem::can_cast(kind) { | ||
413 | (&self.item, "item") | ||
414 | } else if ast::Path::can_cast(kind) { | ||
415 | (&self.path, "path") | ||
416 | } else if ast::Pat::can_cast(kind) { | ||
417 | (&self.pattern, "pattern") | ||
418 | } else { | ||
419 | fail_match!("Matching nodes of kind {:?} is not supported", kind); | ||
420 | }; | ||
421 | match tree { | ||
422 | Some(tree) => Ok(tree), | ||
423 | None => fail_match!("Pattern cannot be parsed as a {}", kind_name), | ||
424 | } | ||
425 | } | ||
426 | } | ||
427 | |||
428 | // If `node` contains nothing but an ident then return it, otherwise return None. | ||
429 | fn only_ident(element: SyntaxElement) -> Option<SyntaxToken> { | ||
430 | match element { | ||
431 | SyntaxElement::Token(t) => { | ||
432 | if t.kind() == SyntaxKind::IDENT { | ||
433 | return Some(t); | ||
434 | } | ||
435 | } | ||
436 | SyntaxElement::Node(n) => { | ||
437 | let mut children = n.children_with_tokens(); | ||
438 | if let (Some(only_child), None) = (children.next(), children.next()) { | ||
439 | return only_ident(only_child); | ||
440 | } | ||
441 | } | ||
442 | } | ||
443 | None | ||
444 | } | ||
445 | |||
446 | struct PatternIterator { | ||
447 | iter: SyntaxElementChildren, | ||
448 | } | ||
449 | |||
450 | impl Iterator for PatternIterator { | ||
451 | type Item = SyntaxElement; | ||
452 | |||
453 | fn next(&mut self) -> Option<SyntaxElement> { | ||
454 | while let Some(element) = self.iter.next() { | ||
455 | if !element.kind().is_trivia() { | ||
456 | return Some(element); | ||
457 | } | ||
458 | } | ||
459 | None | ||
460 | } | ||
461 | } | ||
462 | |||
463 | impl PatternIterator { | ||
464 | fn new(parent: &SyntaxNode) -> Self { | ||
465 | Self { iter: parent.children_with_tokens() } | ||
466 | } | ||
467 | } | ||
468 | |||
469 | #[cfg(test)] | ||
470 | mod tests { | ||
471 | use super::*; | ||
472 | use crate::MatchFinder; | ||
473 | |||
474 | #[test] | ||
475 | fn parse_match_replace() { | ||
476 | let rule: SsrRule = "foo($x) ==>> bar($x)".parse().unwrap(); | ||
477 | let input = "fn main() { foo(1+2); }"; | ||
478 | |||
479 | use ra_db::fixture::WithFixture; | ||
480 | let (db, file_id) = ra_ide_db::RootDatabase::with_single_file(input); | ||
481 | let mut match_finder = MatchFinder::new(&db); | ||
482 | match_finder.add_rule(rule); | ||
483 | let matches = match_finder.find_matches_in_file(file_id); | ||
484 | assert_eq!(matches.matches.len(), 1); | ||
485 | assert_eq!(matches.matches[0].matched_node.text(), "foo(1+2)"); | ||
486 | assert_eq!(matches.matches[0].placeholder_values.len(), 1); | ||
487 | assert_eq!(matches.matches[0].placeholder_values[&Var("x".to_string())].node.text(), "1+2"); | ||
488 | |||
489 | let edit = crate::replacing::matches_to_edit(&matches); | ||
490 | let mut after = input.to_string(); | ||
491 | edit.apply(&mut after); | ||
492 | assert_eq!(after, "fn main() { bar(1+2); }"); | ||
493 | } | ||
494 | } | ||
diff --git a/crates/ra_ssr/src/parsing.rs b/crates/ra_ssr/src/parsing.rs new file mode 100644 index 000000000..90c13dbc2 --- /dev/null +++ b/crates/ra_ssr/src/parsing.rs | |||
@@ -0,0 +1,272 @@ | |||
1 | //! This file contains code for parsing SSR rules, which look something like `foo($a) ==>> bar($b)`. | ||
2 | //! We first split everything before and after the separator `==>>`. Next, both the search pattern | ||
3 | //! and the replacement template get tokenized by the Rust tokenizer. Tokens are then searched for | ||
4 | //! placeholders, which start with `$`. For replacement templates, this is the final form. For | ||
5 | //! search patterns, we go further and parse the pattern as each kind of thing that we can match. | ||
6 | //! e.g. expressions, type references etc. | ||
7 | |||
8 | use crate::{SsrError, SsrPattern, SsrRule}; | ||
9 | use ra_syntax::{ast, AstNode, SmolStr, SyntaxKind}; | ||
10 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
11 | use std::str::FromStr; | ||
12 | |||
13 | /// Returns from the current function with an error, supplied by arguments as for format! | ||
14 | macro_rules! bail { | ||
15 | ($e:expr) => {return Err($crate::SsrError::new($e))}; | ||
16 | ($fmt:expr, $($arg:tt)+) => {return Err($crate::SsrError::new(format!($fmt, $($arg)+)))} | ||
17 | } | ||
18 | |||
19 | #[derive(Clone, Debug)] | ||
20 | pub(crate) struct SsrTemplate { | ||
21 | pub(crate) tokens: Vec<PatternElement>, | ||
22 | } | ||
23 | |||
24 | #[derive(Debug)] | ||
25 | pub(crate) struct RawSearchPattern { | ||
26 | tokens: Vec<PatternElement>, | ||
27 | } | ||
28 | |||
29 | // Part of a search or replace pattern. | ||
30 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
31 | pub(crate) enum PatternElement { | ||
32 | Token(Token), | ||
33 | Placeholder(Placeholder), | ||
34 | } | ||
35 | |||
36 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
37 | pub(crate) struct Placeholder { | ||
38 | /// The name of this placeholder. e.g. for "$a", this would be "a" | ||
39 | pub(crate) ident: SmolStr, | ||
40 | /// A unique name used in place of this placeholder when we parse the pattern as Rust code. | ||
41 | stand_in_name: String, | ||
42 | } | ||
43 | |||
44 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
45 | pub(crate) struct Token { | ||
46 | kind: SyntaxKind, | ||
47 | pub(crate) text: SmolStr, | ||
48 | } | ||
49 | |||
50 | impl FromStr for SsrRule { | ||
51 | type Err = SsrError; | ||
52 | |||
53 | fn from_str(query: &str) -> Result<SsrRule, SsrError> { | ||
54 | let mut it = query.split("==>>"); | ||
55 | let pattern = it.next().expect("at least empty string").trim(); | ||
56 | let template = it | ||
57 | .next() | ||
58 | .ok_or_else(|| SsrError("Cannot find delemiter `==>>`".into()))? | ||
59 | .trim() | ||
60 | .to_string(); | ||
61 | if it.next().is_some() { | ||
62 | return Err(SsrError("More than one delimiter found".into())); | ||
63 | } | ||
64 | let rule = SsrRule { pattern: pattern.parse()?, template: template.parse()? }; | ||
65 | validate_rule(&rule)?; | ||
66 | Ok(rule) | ||
67 | } | ||
68 | } | ||
69 | |||
70 | impl FromStr for RawSearchPattern { | ||
71 | type Err = SsrError; | ||
72 | |||
73 | fn from_str(pattern_str: &str) -> Result<RawSearchPattern, SsrError> { | ||
74 | Ok(RawSearchPattern { tokens: parse_pattern(pattern_str)? }) | ||
75 | } | ||
76 | } | ||
77 | |||
78 | impl RawSearchPattern { | ||
79 | /// Returns this search pattern as Rust source code that we can feed to the Rust parser. | ||
80 | fn as_rust_code(&self) -> String { | ||
81 | let mut res = String::new(); | ||
82 | for t in &self.tokens { | ||
83 | res.push_str(match t { | ||
84 | PatternElement::Token(token) => token.text.as_str(), | ||
85 | PatternElement::Placeholder(placeholder) => placeholder.stand_in_name.as_str(), | ||
86 | }); | ||
87 | } | ||
88 | res | ||
89 | } | ||
90 | |||
91 | fn placeholders_by_stand_in(&self) -> FxHashMap<SmolStr, Placeholder> { | ||
92 | let mut res = FxHashMap::default(); | ||
93 | for t in &self.tokens { | ||
94 | if let PatternElement::Placeholder(placeholder) = t { | ||
95 | res.insert(SmolStr::new(placeholder.stand_in_name.clone()), placeholder.clone()); | ||
96 | } | ||
97 | } | ||
98 | res | ||
99 | } | ||
100 | } | ||
101 | |||
102 | impl FromStr for SsrPattern { | ||
103 | type Err = SsrError; | ||
104 | |||
105 | fn from_str(pattern_str: &str) -> Result<SsrPattern, SsrError> { | ||
106 | let raw: RawSearchPattern = pattern_str.parse()?; | ||
107 | let raw_str = raw.as_rust_code(); | ||
108 | let res = SsrPattern { | ||
109 | expr: ast::Expr::parse(&raw_str).ok().map(|n| n.syntax().clone()), | ||
110 | type_ref: ast::TypeRef::parse(&raw_str).ok().map(|n| n.syntax().clone()), | ||
111 | item: ast::ModuleItem::parse(&raw_str).ok().map(|n| n.syntax().clone()), | ||
112 | path: ast::Path::parse(&raw_str).ok().map(|n| n.syntax().clone()), | ||
113 | pattern: ast::Pat::parse(&raw_str).ok().map(|n| n.syntax().clone()), | ||
114 | placeholders_by_stand_in: raw.placeholders_by_stand_in(), | ||
115 | raw, | ||
116 | }; | ||
117 | if res.expr.is_none() | ||
118 | && res.type_ref.is_none() | ||
119 | && res.item.is_none() | ||
120 | && res.path.is_none() | ||
121 | && res.pattern.is_none() | ||
122 | { | ||
123 | bail!("Pattern is not a valid Rust expression, type, item, path or pattern"); | ||
124 | } | ||
125 | Ok(res) | ||
126 | } | ||
127 | } | ||
128 | |||
129 | impl FromStr for SsrTemplate { | ||
130 | type Err = SsrError; | ||
131 | |||
132 | fn from_str(pattern_str: &str) -> Result<SsrTemplate, SsrError> { | ||
133 | let tokens = parse_pattern(pattern_str)?; | ||
134 | // Validate that the template is a valid fragment of Rust code. We reuse the validation | ||
135 | // logic for search patterns since the only thing that differs is the error message. | ||
136 | if SsrPattern::from_str(pattern_str).is_err() { | ||
137 | bail!("Replacement is not a valid Rust expression, type, item, path or pattern"); | ||
138 | } | ||
139 | // Our actual template needs to preserve whitespace, so we can't reuse `tokens`. | ||
140 | Ok(SsrTemplate { tokens }) | ||
141 | } | ||
142 | } | ||
143 | |||
144 | /// Returns `pattern_str`, parsed as a search or replace pattern. If `remove_whitespace` is true, | ||
145 | /// then any whitespace tokens will be removed, which we do for the search pattern, but not for the | ||
146 | /// replace pattern. | ||
147 | fn parse_pattern(pattern_str: &str) -> Result<Vec<PatternElement>, SsrError> { | ||
148 | let mut res = Vec::new(); | ||
149 | let mut placeholder_names = FxHashSet::default(); | ||
150 | let mut tokens = tokenize(pattern_str)?.into_iter(); | ||
151 | while let Some(token) = tokens.next() { | ||
152 | if token.kind == SyntaxKind::DOLLAR { | ||
153 | let placeholder = parse_placeholder(&mut tokens)?; | ||
154 | if !placeholder_names.insert(placeholder.ident.clone()) { | ||
155 | bail!("Name `{}` repeats more than once", placeholder.ident); | ||
156 | } | ||
157 | res.push(PatternElement::Placeholder(placeholder)); | ||
158 | } else { | ||
159 | res.push(PatternElement::Token(token)); | ||
160 | } | ||
161 | } | ||
162 | Ok(res) | ||
163 | } | ||
164 | |||
165 | /// Checks for errors in a rule. e.g. the replace pattern referencing placeholders that the search | ||
166 | /// pattern didn't define. | ||
167 | fn validate_rule(rule: &SsrRule) -> Result<(), SsrError> { | ||
168 | let mut defined_placeholders = std::collections::HashSet::new(); | ||
169 | for p in &rule.pattern.raw.tokens { | ||
170 | if let PatternElement::Placeholder(placeholder) = p { | ||
171 | defined_placeholders.insert(&placeholder.ident); | ||
172 | } | ||
173 | } | ||
174 | let mut undefined = Vec::new(); | ||
175 | for p in &rule.template.tokens { | ||
176 | if let PatternElement::Placeholder(placeholder) = p { | ||
177 | if !defined_placeholders.contains(&placeholder.ident) { | ||
178 | undefined.push(format!("${}", placeholder.ident)); | ||
179 | } | ||
180 | } | ||
181 | } | ||
182 | if !undefined.is_empty() { | ||
183 | bail!("Replacement contains undefined placeholders: {}", undefined.join(", ")); | ||
184 | } | ||
185 | Ok(()) | ||
186 | } | ||
187 | |||
188 | fn tokenize(source: &str) -> Result<Vec<Token>, SsrError> { | ||
189 | let mut start = 0; | ||
190 | let (raw_tokens, errors) = ra_syntax::tokenize(source); | ||
191 | if let Some(first_error) = errors.first() { | ||
192 | bail!("Failed to parse pattern: {}", first_error); | ||
193 | } | ||
194 | let mut tokens: Vec<Token> = Vec::new(); | ||
195 | for raw_token in raw_tokens { | ||
196 | let token_len = usize::from(raw_token.len); | ||
197 | tokens.push(Token { | ||
198 | kind: raw_token.kind, | ||
199 | text: SmolStr::new(&source[start..start + token_len]), | ||
200 | }); | ||
201 | start += token_len; | ||
202 | } | ||
203 | Ok(tokens) | ||
204 | } | ||
205 | |||
206 | fn parse_placeholder(tokens: &mut std::vec::IntoIter<Token>) -> Result<Placeholder, SsrError> { | ||
207 | let mut name = None; | ||
208 | if let Some(token) = tokens.next() { | ||
209 | match token.kind { | ||
210 | SyntaxKind::IDENT => { | ||
211 | name = Some(token.text); | ||
212 | } | ||
213 | _ => { | ||
214 | bail!("Placeholders should be $name"); | ||
215 | } | ||
216 | } | ||
217 | } | ||
218 | let name = name.ok_or_else(|| SsrError::new("Placeholder ($) with no name"))?; | ||
219 | Ok(Placeholder::new(name)) | ||
220 | } | ||
221 | |||
222 | impl Placeholder { | ||
223 | fn new(name: SmolStr) -> Self { | ||
224 | Self { stand_in_name: format!("__placeholder_{}", name), ident: name } | ||
225 | } | ||
226 | } | ||
227 | |||
228 | impl SsrError { | ||
229 | fn new(message: impl Into<String>) -> SsrError { | ||
230 | SsrError(message.into()) | ||
231 | } | ||
232 | } | ||
233 | |||
234 | #[cfg(test)] | ||
235 | mod tests { | ||
236 | use super::*; | ||
237 | |||
238 | #[test] | ||
239 | fn parser_happy_case() { | ||
240 | fn token(kind: SyntaxKind, text: &str) -> PatternElement { | ||
241 | PatternElement::Token(Token { kind, text: SmolStr::new(text) }) | ||
242 | } | ||
243 | fn placeholder(name: &str) -> PatternElement { | ||
244 | PatternElement::Placeholder(Placeholder::new(SmolStr::new(name))) | ||
245 | } | ||
246 | let result: SsrRule = "foo($a, $b) ==>> bar($b, $a)".parse().unwrap(); | ||
247 | assert_eq!( | ||
248 | result.pattern.raw.tokens, | ||
249 | vec![ | ||
250 | token(SyntaxKind::IDENT, "foo"), | ||
251 | token(SyntaxKind::L_PAREN, "("), | ||
252 | placeholder("a"), | ||
253 | token(SyntaxKind::COMMA, ","), | ||
254 | token(SyntaxKind::WHITESPACE, " "), | ||
255 | placeholder("b"), | ||
256 | token(SyntaxKind::R_PAREN, ")"), | ||
257 | ] | ||
258 | ); | ||
259 | assert_eq!( | ||
260 | result.template.tokens, | ||
261 | vec![ | ||
262 | token(SyntaxKind::IDENT, "bar"), | ||
263 | token(SyntaxKind::L_PAREN, "("), | ||
264 | placeholder("b"), | ||
265 | token(SyntaxKind::COMMA, ","), | ||
266 | token(SyntaxKind::WHITESPACE, " "), | ||
267 | placeholder("a"), | ||
268 | token(SyntaxKind::R_PAREN, ")"), | ||
269 | ] | ||
270 | ); | ||
271 | } | ||
272 | } | ||
diff --git a/crates/ra_ssr/src/replacing.rs b/crates/ra_ssr/src/replacing.rs new file mode 100644 index 000000000..81a5e06a9 --- /dev/null +++ b/crates/ra_ssr/src/replacing.rs | |||
@@ -0,0 +1,55 @@ | |||
1 | //! Code for applying replacement templates for matches that have previously been found. | ||
2 | |||
3 | use crate::matching::Var; | ||
4 | use crate::parsing::PatternElement; | ||
5 | use crate::{Match, SsrMatches}; | ||
6 | use ra_syntax::ast::AstToken; | ||
7 | use ra_syntax::TextSize; | ||
8 | use ra_text_edit::TextEdit; | ||
9 | |||
10 | /// Returns a text edit that will replace each match in `matches` with its corresponding replacement | ||
11 | /// template. Placeholders in the template will have been substituted with whatever they matched to | ||
12 | /// in the original code. | ||
13 | pub(crate) fn matches_to_edit(matches: &SsrMatches) -> TextEdit { | ||
14 | matches_to_edit_at_offset(matches, 0.into()) | ||
15 | } | ||
16 | |||
17 | fn matches_to_edit_at_offset(matches: &SsrMatches, relative_start: TextSize) -> TextEdit { | ||
18 | let mut edit_builder = ra_text_edit::TextEditBuilder::default(); | ||
19 | for m in &matches.matches { | ||
20 | edit_builder.replace(m.range.checked_sub(relative_start).unwrap(), render_replace(m)); | ||
21 | } | ||
22 | edit_builder.finish() | ||
23 | } | ||
24 | |||
25 | fn render_replace(match_info: &Match) -> String { | ||
26 | let mut out = String::new(); | ||
27 | for r in &match_info.template.tokens { | ||
28 | match r { | ||
29 | PatternElement::Token(t) => out.push_str(t.text.as_str()), | ||
30 | PatternElement::Placeholder(p) => { | ||
31 | if let Some(placeholder_value) = | ||
32 | match_info.placeholder_values.get(&Var(p.ident.to_string())) | ||
33 | { | ||
34 | let range = &placeholder_value.range.range; | ||
35 | let mut matched_text = placeholder_value.node.text().to_string(); | ||
36 | let edit = | ||
37 | matches_to_edit_at_offset(&placeholder_value.inner_matches, range.start()); | ||
38 | edit.apply(&mut matched_text); | ||
39 | out.push_str(&matched_text); | ||
40 | } else { | ||
41 | // We validated that all placeholder references were valid before we | ||
42 | // started, so this shouldn't happen. | ||
43 | panic!( | ||
44 | "Internal error: replacement referenced unknown placeholder {}", | ||
45 | p.ident | ||
46 | ); | ||
47 | } | ||
48 | } | ||
49 | } | ||
50 | } | ||
51 | for comment in &match_info.ignored_comments { | ||
52 | out.push_str(&comment.syntax().to_string()); | ||
53 | } | ||
54 | out | ||
55 | } | ||
diff --git a/crates/ra_ssr/src/tests.rs b/crates/ra_ssr/src/tests.rs new file mode 100644 index 000000000..4b747fe18 --- /dev/null +++ b/crates/ra_ssr/src/tests.rs | |||
@@ -0,0 +1,496 @@ | |||
1 | use crate::matching::MatchFailureReason; | ||
2 | use crate::{matching, Match, MatchFinder, SsrMatches, SsrPattern, SsrRule}; | ||
3 | use matching::record_match_fails_reasons_scope; | ||
4 | use ra_db::{FileId, FileRange, SourceDatabaseExt}; | ||
5 | use ra_syntax::ast::AstNode; | ||
6 | use ra_syntax::{ast, SyntaxKind, SyntaxNode, TextRange}; | ||
7 | |||
8 | struct MatchDebugInfo { | ||
9 | node: SyntaxNode, | ||
10 | /// Our search pattern parsed as the same kind of syntax node as `node`. e.g. expression, item, | ||
11 | /// etc. Will be absent if the pattern can't be parsed as that kind. | ||
12 | pattern: Result<SyntaxNode, MatchFailureReason>, | ||
13 | matched: Result<Match, MatchFailureReason>, | ||
14 | } | ||
15 | |||
16 | impl SsrPattern { | ||
17 | pub(crate) fn tree_for_kind_with_reason( | ||
18 | &self, | ||
19 | kind: SyntaxKind, | ||
20 | ) -> Result<&SyntaxNode, MatchFailureReason> { | ||
21 | record_match_fails_reasons_scope(true, || self.tree_for_kind(kind)) | ||
22 | .map_err(|e| MatchFailureReason { reason: e.reason.unwrap() }) | ||
23 | } | ||
24 | } | ||
25 | |||
26 | impl std::fmt::Debug for MatchDebugInfo { | ||
27 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
28 | write!(f, "========= PATTERN ==========\n")?; | ||
29 | match &self.pattern { | ||
30 | Ok(pattern) => { | ||
31 | write!(f, "{:#?}", pattern)?; | ||
32 | } | ||
33 | Err(err) => { | ||
34 | write!(f, "{}", err.reason)?; | ||
35 | } | ||
36 | } | ||
37 | write!( | ||
38 | f, | ||
39 | "\n============ AST ===========\n\ | ||
40 | {:#?}\n============================", | ||
41 | self.node | ||
42 | )?; | ||
43 | match &self.matched { | ||
44 | Ok(_) => write!(f, "Node matched")?, | ||
45 | Err(reason) => write!(f, "Node failed to match because: {}", reason.reason)?, | ||
46 | } | ||
47 | Ok(()) | ||
48 | } | ||
49 | } | ||
50 | |||
51 | impl SsrMatches { | ||
52 | /// Returns `self` with any nested matches removed and made into top-level matches. | ||
53 | pub(crate) fn flattened(self) -> SsrMatches { | ||
54 | let mut out = SsrMatches::default(); | ||
55 | self.flatten_into(&mut out); | ||
56 | out | ||
57 | } | ||
58 | |||
59 | fn flatten_into(self, out: &mut SsrMatches) { | ||
60 | for mut m in self.matches { | ||
61 | for p in m.placeholder_values.values_mut() { | ||
62 | std::mem::replace(&mut p.inner_matches, SsrMatches::default()).flatten_into(out); | ||
63 | } | ||
64 | out.matches.push(m); | ||
65 | } | ||
66 | } | ||
67 | } | ||
68 | |||
69 | impl Match { | ||
70 | pub(crate) fn matched_text(&self) -> String { | ||
71 | self.matched_node.text().to_string() | ||
72 | } | ||
73 | } | ||
74 | |||
75 | impl<'db> MatchFinder<'db> { | ||
76 | /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you | ||
77 | /// intend to do replacement, use `add_rule` instead. | ||
78 | fn add_search_pattern(&mut self, pattern: SsrPattern) { | ||
79 | self.add_rule(SsrRule { pattern, template: "()".parse().unwrap() }) | ||
80 | } | ||
81 | |||
82 | /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match | ||
83 | /// them, while recording reasons why they don't match. This API is useful for command | ||
84 | /// line-based debugging where providing a range is difficult. | ||
85 | fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> { | ||
86 | let file = self.sema.parse(file_id); | ||
87 | let mut res = Vec::new(); | ||
88 | let file_text = self.sema.db.file_text(file_id); | ||
89 | let mut remaining_text = file_text.as_str(); | ||
90 | let mut base = 0; | ||
91 | let len = snippet.len() as u32; | ||
92 | while let Some(offset) = remaining_text.find(snippet) { | ||
93 | let start = base + offset as u32; | ||
94 | let end = start + len; | ||
95 | self.output_debug_for_nodes_at_range( | ||
96 | file.syntax(), | ||
97 | TextRange::new(start.into(), end.into()), | ||
98 | &None, | ||
99 | &mut res, | ||
100 | ); | ||
101 | remaining_text = &remaining_text[offset + snippet.len()..]; | ||
102 | base = end; | ||
103 | } | ||
104 | res | ||
105 | } | ||
106 | |||
107 | fn output_debug_for_nodes_at_range( | ||
108 | &self, | ||
109 | node: &SyntaxNode, | ||
110 | range: TextRange, | ||
111 | restrict_range: &Option<FileRange>, | ||
112 | out: &mut Vec<MatchDebugInfo>, | ||
113 | ) { | ||
114 | for node in node.children() { | ||
115 | if !node.text_range().contains_range(range) { | ||
116 | continue; | ||
117 | } | ||
118 | if node.text_range() == range { | ||
119 | for rule in &self.rules { | ||
120 | let pattern = | ||
121 | rule.pattern.tree_for_kind_with_reason(node.kind()).map(|p| p.clone()); | ||
122 | out.push(MatchDebugInfo { | ||
123 | matched: matching::get_match(true, rule, &node, restrict_range, &self.sema) | ||
124 | .map_err(|e| MatchFailureReason { | ||
125 | reason: e.reason.unwrap_or_else(|| { | ||
126 | "Match failed, but no reason was given".to_owned() | ||
127 | }), | ||
128 | }), | ||
129 | pattern, | ||
130 | node: node.clone(), | ||
131 | }); | ||
132 | } | ||
133 | } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) { | ||
134 | if let Some(expanded) = self.sema.expand(¯o_call) { | ||
135 | if let Some(tt) = macro_call.token_tree() { | ||
136 | self.output_debug_for_nodes_at_range( | ||
137 | &expanded, | ||
138 | range, | ||
139 | &Some(self.sema.original_range(tt.syntax())), | ||
140 | out, | ||
141 | ); | ||
142 | } | ||
143 | } | ||
144 | } | ||
145 | } | ||
146 | } | ||
147 | } | ||
148 | |||
149 | fn parse_error_text(query: &str) -> String { | ||
150 | format!("{}", query.parse::<SsrRule>().unwrap_err()) | ||
151 | } | ||
152 | |||
153 | #[test] | ||
154 | fn parser_empty_query() { | ||
155 | assert_eq!(parse_error_text(""), "Parse error: Cannot find delemiter `==>>`"); | ||
156 | } | ||
157 | |||
158 | #[test] | ||
159 | fn parser_no_delimiter() { | ||
160 | assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delemiter `==>>`"); | ||
161 | } | ||
162 | |||
163 | #[test] | ||
164 | fn parser_two_delimiters() { | ||
165 | assert_eq!( | ||
166 | parse_error_text("foo() ==>> a ==>> b "), | ||
167 | "Parse error: More than one delimiter found" | ||
168 | ); | ||
169 | } | ||
170 | |||
171 | #[test] | ||
172 | fn parser_repeated_name() { | ||
173 | assert_eq!( | ||
174 | parse_error_text("foo($a, $a) ==>>"), | ||
175 | "Parse error: Name `a` repeats more than once" | ||
176 | ); | ||
177 | } | ||
178 | |||
179 | #[test] | ||
180 | fn parser_invalid_pattern() { | ||
181 | assert_eq!( | ||
182 | parse_error_text(" ==>> ()"), | ||
183 | "Parse error: Pattern is not a valid Rust expression, type, item, path or pattern" | ||
184 | ); | ||
185 | } | ||
186 | |||
187 | #[test] | ||
188 | fn parser_invalid_template() { | ||
189 | assert_eq!( | ||
190 | parse_error_text("() ==>> )"), | ||
191 | "Parse error: Replacement is not a valid Rust expression, type, item, path or pattern" | ||
192 | ); | ||
193 | } | ||
194 | |||
195 | #[test] | ||
196 | fn parser_undefined_placeholder_in_replacement() { | ||
197 | assert_eq!( | ||
198 | parse_error_text("42 ==>> $a"), | ||
199 | "Parse error: Replacement contains undefined placeholders: $a" | ||
200 | ); | ||
201 | } | ||
202 | |||
203 | fn single_file(code: &str) -> (ra_ide_db::RootDatabase, FileId) { | ||
204 | use ra_db::fixture::WithFixture; | ||
205 | ra_ide_db::RootDatabase::with_single_file(code) | ||
206 | } | ||
207 | |||
208 | fn assert_ssr_transform(rule: &str, input: &str, result: &str) { | ||
209 | assert_ssr_transforms(&[rule], input, result); | ||
210 | } | ||
211 | |||
212 | fn assert_ssr_transforms(rules: &[&str], input: &str, result: &str) { | ||
213 | let (db, file_id) = single_file(input); | ||
214 | let mut match_finder = MatchFinder::new(&db); | ||
215 | for rule in rules { | ||
216 | let rule: SsrRule = rule.parse().unwrap(); | ||
217 | match_finder.add_rule(rule); | ||
218 | } | ||
219 | if let Some(edits) = match_finder.edits_for_file(file_id) { | ||
220 | let mut after = input.to_string(); | ||
221 | edits.apply(&mut after); | ||
222 | assert_eq!(after, result); | ||
223 | } else { | ||
224 | panic!("No edits were made"); | ||
225 | } | ||
226 | } | ||
227 | |||
228 | fn assert_matches(pattern: &str, code: &str, expected: &[&str]) { | ||
229 | let (db, file_id) = single_file(code); | ||
230 | let mut match_finder = MatchFinder::new(&db); | ||
231 | match_finder.add_search_pattern(pattern.parse().unwrap()); | ||
232 | let matched_strings: Vec<String> = match_finder | ||
233 | .find_matches_in_file(file_id) | ||
234 | .flattened() | ||
235 | .matches | ||
236 | .iter() | ||
237 | .map(|m| m.matched_text()) | ||
238 | .collect(); | ||
239 | if matched_strings != expected && !expected.is_empty() { | ||
240 | let debug_info = match_finder.debug_where_text_equal(file_id, &expected[0]); | ||
241 | eprintln!("Test is about to fail. Some possibly useful info: {} nodes had text exactly equal to '{}'", debug_info.len(), &expected[0]); | ||
242 | for d in debug_info { | ||
243 | eprintln!("{:#?}", d); | ||
244 | } | ||
245 | } | ||
246 | assert_eq!(matched_strings, expected); | ||
247 | } | ||
248 | |||
249 | fn assert_no_match(pattern: &str, code: &str) { | ||
250 | assert_matches(pattern, code, &[]); | ||
251 | } | ||
252 | |||
253 | #[test] | ||
254 | fn ssr_function_to_method() { | ||
255 | assert_ssr_transform( | ||
256 | "my_function($a, $b) ==>> ($a).my_method($b)", | ||
257 | "loop { my_function( other_func(x, y), z + w) }", | ||
258 | "loop { (other_func(x, y)).my_method(z + w) }", | ||
259 | ) | ||
260 | } | ||
261 | |||
262 | #[test] | ||
263 | fn ssr_nested_function() { | ||
264 | assert_ssr_transform( | ||
265 | "foo($a, $b, $c) ==>> bar($c, baz($a, $b))", | ||
266 | "fn main { foo (x + value.method(b), x+y-z, true && false) }", | ||
267 | "fn main { bar(true && false, baz(x + value.method(b), x+y-z)) }", | ||
268 | ) | ||
269 | } | ||
270 | |||
271 | #[test] | ||
272 | fn ssr_expected_spacing() { | ||
273 | assert_ssr_transform( | ||
274 | "foo($x) + bar() ==>> bar($x)", | ||
275 | "fn main() { foo(5) + bar() }", | ||
276 | "fn main() { bar(5) }", | ||
277 | ); | ||
278 | } | ||
279 | |||
280 | #[test] | ||
281 | fn ssr_with_extra_space() { | ||
282 | assert_ssr_transform( | ||
283 | "foo($x ) + bar() ==>> bar($x)", | ||
284 | "fn main() { foo( 5 ) +bar( ) }", | ||
285 | "fn main() { bar(5) }", | ||
286 | ); | ||
287 | } | ||
288 | |||
289 | #[test] | ||
290 | fn ssr_keeps_nested_comment() { | ||
291 | assert_ssr_transform( | ||
292 | "foo($x) ==>> bar($x)", | ||
293 | "fn main() { foo(other(5 /* using 5 */)) }", | ||
294 | "fn main() { bar(other(5 /* using 5 */)) }", | ||
295 | ) | ||
296 | } | ||
297 | |||
298 | #[test] | ||
299 | fn ssr_keeps_comment() { | ||
300 | assert_ssr_transform( | ||
301 | "foo($x) ==>> bar($x)", | ||
302 | "fn main() { foo(5 /* using 5 */) }", | ||
303 | "fn main() { bar(5)/* using 5 */ }", | ||
304 | ) | ||
305 | } | ||
306 | |||
307 | #[test] | ||
308 | fn ssr_struct_lit() { | ||
309 | assert_ssr_transform( | ||
310 | "foo{a: $a, b: $b} ==>> foo::new($a, $b)", | ||
311 | "fn main() { foo{b:2, a:1} }", | ||
312 | "fn main() { foo::new(1, 2) }", | ||
313 | ) | ||
314 | } | ||
315 | |||
316 | #[test] | ||
317 | fn ignores_whitespace() { | ||
318 | assert_matches("1+2", "fn f() -> i32 {1 + 2}", &["1 + 2"]); | ||
319 | assert_matches("1 + 2", "fn f() -> i32 {1+2}", &["1+2"]); | ||
320 | } | ||
321 | |||
322 | #[test] | ||
323 | fn no_match() { | ||
324 | assert_no_match("1 + 3", "fn f() -> i32 {1 + 2}"); | ||
325 | } | ||
326 | |||
327 | #[test] | ||
328 | fn match_fn_definition() { | ||
329 | assert_matches("fn $a($b: $t) {$c}", "fn f(a: i32) {bar()}", &["fn f(a: i32) {bar()}"]); | ||
330 | } | ||
331 | |||
332 | #[test] | ||
333 | fn match_struct_definition() { | ||
334 | assert_matches( | ||
335 | "struct $n {$f: Option<String>}", | ||
336 | "struct Bar {} struct Foo {name: Option<String>}", | ||
337 | &["struct Foo {name: Option<String>}"], | ||
338 | ); | ||
339 | } | ||
340 | |||
341 | #[test] | ||
342 | fn match_expr() { | ||
343 | let code = "fn f() -> i32 {foo(40 + 2, 42)}"; | ||
344 | assert_matches("foo($a, $b)", code, &["foo(40 + 2, 42)"]); | ||
345 | assert_no_match("foo($a, $b, $c)", code); | ||
346 | assert_no_match("foo($a)", code); | ||
347 | } | ||
348 | |||
349 | #[test] | ||
350 | fn match_nested_method_calls() { | ||
351 | assert_matches( | ||
352 | "$a.z().z().z()", | ||
353 | "fn f() {h().i().j().z().z().z().d().e()}", | ||
354 | &["h().i().j().z().z().z()"], | ||
355 | ); | ||
356 | } | ||
357 | |||
358 | #[test] | ||
359 | fn match_complex_expr() { | ||
360 | let code = "fn f() -> i32 {foo(bar(40, 2), 42)}"; | ||
361 | assert_matches("foo($a, $b)", code, &["foo(bar(40, 2), 42)"]); | ||
362 | assert_no_match("foo($a, $b, $c)", code); | ||
363 | assert_no_match("foo($a)", code); | ||
364 | assert_matches("bar($a, $b)", code, &["bar(40, 2)"]); | ||
365 | } | ||
366 | |||
367 | // Trailing commas in the code should be ignored. | ||
368 | #[test] | ||
369 | fn match_with_trailing_commas() { | ||
370 | // Code has comma, pattern doesn't. | ||
371 | assert_matches("foo($a, $b)", "fn f() {foo(1, 2,);}", &["foo(1, 2,)"]); | ||
372 | assert_matches("Foo{$a, $b}", "fn f() {Foo{1, 2,};}", &["Foo{1, 2,}"]); | ||
373 | |||
374 | // Pattern has comma, code doesn't. | ||
375 | assert_matches("foo($a, $b,)", "fn f() {foo(1, 2);}", &["foo(1, 2)"]); | ||
376 | assert_matches("Foo{$a, $b,}", "fn f() {Foo{1, 2};}", &["Foo{1, 2}"]); | ||
377 | } | ||
378 | |||
379 | #[test] | ||
380 | fn match_type() { | ||
381 | assert_matches("i32", "fn f() -> i32 {1 + 2}", &["i32"]); | ||
382 | assert_matches("Option<$a>", "fn f() -> Option<i32> {42}", &["Option<i32>"]); | ||
383 | assert_no_match("Option<$a>", "fn f() -> Result<i32, ()> {42}"); | ||
384 | } | ||
385 | |||
386 | #[test] | ||
387 | fn match_struct_instantiation() { | ||
388 | assert_matches( | ||
389 | "Foo {bar: 1, baz: 2}", | ||
390 | "fn f() {Foo {bar: 1, baz: 2}}", | ||
391 | &["Foo {bar: 1, baz: 2}"], | ||
392 | ); | ||
393 | // Now with placeholders for all parts of the struct. | ||
394 | assert_matches( | ||
395 | "Foo {$a: $b, $c: $d}", | ||
396 | "fn f() {Foo {bar: 1, baz: 2}}", | ||
397 | &["Foo {bar: 1, baz: 2}"], | ||
398 | ); | ||
399 | assert_matches("Foo {}", "fn f() {Foo {}}", &["Foo {}"]); | ||
400 | } | ||
401 | |||
402 | #[test] | ||
403 | fn match_path() { | ||
404 | assert_matches("foo::bar", "fn f() {foo::bar(42)}", &["foo::bar"]); | ||
405 | assert_matches("$a::bar", "fn f() {foo::bar(42)}", &["foo::bar"]); | ||
406 | assert_matches("foo::$b", "fn f() {foo::bar(42)}", &["foo::bar"]); | ||
407 | } | ||
408 | |||
409 | #[test] | ||
410 | fn match_pattern() { | ||
411 | assert_matches("Some($a)", "fn f() {if let Some(x) = foo() {}}", &["Some(x)"]); | ||
412 | } | ||
413 | |||
414 | #[test] | ||
415 | fn match_reordered_struct_instantiation() { | ||
416 | assert_matches( | ||
417 | "Foo {aa: 1, b: 2, ccc: 3}", | ||
418 | "fn f() {Foo {b: 2, ccc: 3, aa: 1}}", | ||
419 | &["Foo {b: 2, ccc: 3, aa: 1}"], | ||
420 | ); | ||
421 | assert_no_match("Foo {a: 1}", "fn f() {Foo {b: 1}}"); | ||
422 | assert_no_match("Foo {a: 1}", "fn f() {Foo {a: 2}}"); | ||
423 | assert_no_match("Foo {a: 1, b: 2}", "fn f() {Foo {a: 1}}"); | ||
424 | assert_no_match("Foo {a: 1, b: 2}", "fn f() {Foo {b: 2}}"); | ||
425 | assert_no_match("Foo {a: 1, }", "fn f() {Foo {a: 1, b: 2}}"); | ||
426 | assert_no_match("Foo {a: 1, z: 9}", "fn f() {Foo {a: 1}}"); | ||
427 | } | ||
428 | |||
429 | #[test] | ||
430 | fn replace_function_call() { | ||
431 | assert_ssr_transform("foo() ==>> bar()", "fn f1() {foo(); foo();}", "fn f1() {bar(); bar();}"); | ||
432 | } | ||
433 | |||
434 | #[test] | ||
435 | fn replace_function_call_with_placeholders() { | ||
436 | assert_ssr_transform( | ||
437 | "foo($a, $b) ==>> bar($b, $a)", | ||
438 | "fn f1() {foo(5, 42)}", | ||
439 | "fn f1() {bar(42, 5)}", | ||
440 | ); | ||
441 | } | ||
442 | |||
443 | #[test] | ||
444 | fn replace_nested_function_calls() { | ||
445 | assert_ssr_transform( | ||
446 | "foo($a) ==>> bar($a)", | ||
447 | "fn f1() {foo(foo(42))}", | ||
448 | "fn f1() {bar(bar(42))}", | ||
449 | ); | ||
450 | } | ||
451 | |||
452 | #[test] | ||
453 | fn replace_type() { | ||
454 | assert_ssr_transform( | ||
455 | "Result<(), $a> ==>> Option<$a>", | ||
456 | "fn f1() -> Result<(), Vec<Error>> {foo()}", | ||
457 | "fn f1() -> Option<Vec<Error>> {foo()}", | ||
458 | ); | ||
459 | } | ||
460 | |||
461 | #[test] | ||
462 | fn replace_struct_init() { | ||
463 | assert_ssr_transform( | ||
464 | "Foo {a: $a, b: $b} ==>> Foo::new($a, $b)", | ||
465 | "fn f1() {Foo{b: 1, a: 2}}", | ||
466 | "fn f1() {Foo::new(2, 1)}", | ||
467 | ); | ||
468 | } | ||
469 | |||
470 | #[test] | ||
471 | fn replace_binary_op() { | ||
472 | assert_ssr_transform( | ||
473 | "$a + $b ==>> $b + $a", | ||
474 | "fn f() {2 * 3 + 4 * 5}", | ||
475 | "fn f() {4 * 5 + 2 * 3}", | ||
476 | ); | ||
477 | assert_ssr_transform( | ||
478 | "$a + $b ==>> $b + $a", | ||
479 | "fn f() {1 + 2 + 3 + 4}", | ||
480 | "fn f() {4 + 3 + 2 + 1}", | ||
481 | ); | ||
482 | } | ||
483 | |||
484 | #[test] | ||
485 | fn match_binary_op() { | ||
486 | assert_matches("$a + $b", "fn f() {1 + 2 + 3 + 4}", &["1 + 2", "1 + 2 + 3", "1 + 2 + 3 + 4"]); | ||
487 | } | ||
488 | |||
489 | #[test] | ||
490 | fn multiple_rules() { | ||
491 | assert_ssr_transforms( | ||
492 | &["$a + 1 ==>> add_one($a)", "$a + $b ==>> add($a, $b)"], | ||
493 | "fn f() -> i32 {3 + 2 + 1}", | ||
494 | "fn f() -> i32 {add_one(add(3, 2))}", | ||
495 | ) | ||
496 | } | ||
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index 664894d1f..f7a885eb3 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -290,6 +290,11 @@ impl<'a> SyntaxRewriter<'a> { | |||
290 | N::cast(self.rewrite(node.syntax())).unwrap() | 290 | N::cast(self.rewrite(node.syntax())).unwrap() |
291 | } | 291 | } |
292 | 292 | ||
293 | /// Returns a node that encompasses all replacements to be done by this rewriter. | ||
294 | /// | ||
295 | /// Passing the returned node to `rewrite` will apply all replacements queued up in `self`. | ||
296 | /// | ||
297 | /// Returns `None` when there are no replacements. | ||
293 | pub fn rewrite_root(&self) -> Option<SyntaxNode> { | 298 | pub fn rewrite_root(&self) -> Option<SyntaxNode> { |
294 | assert!(self.f.is_none()); | 299 | assert!(self.f.is_none()); |
295 | self.replacements | 300 | self.replacements |
@@ -298,6 +303,9 @@ impl<'a> SyntaxRewriter<'a> { | |||
298 | SyntaxElement::Node(it) => it.clone(), | 303 | SyntaxElement::Node(it) => it.clone(), |
299 | SyntaxElement::Token(it) => it.parent(), | 304 | SyntaxElement::Token(it) => it.parent(), |
300 | }) | 305 | }) |
306 | // If we only have one replacement, we must return its parent node, since `rewrite` does | ||
307 | // not replace the node passed to it. | ||
308 | .map(|it| it.parent().unwrap_or(it)) | ||
301 | .fold1(|a, b| least_common_ancestor(&a, &b).unwrap()) | 309 | .fold1(|a, b| least_common_ancestor(&a, &b).unwrap()) |
302 | } | 310 | } |
303 | 311 | ||
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs index da0eb0926..192c610f1 100644 --- a/crates/ra_syntax/src/ast/make.rs +++ b/crates/ra_syntax/src/ast/make.rs | |||
@@ -75,6 +75,10 @@ pub fn record_field(name: ast::NameRef, expr: Option<ast::Expr>) -> ast::RecordF | |||
75 | } | 75 | } |
76 | } | 76 | } |
77 | 77 | ||
78 | pub fn record_field_def(name: ast::NameRef, ty: ast::TypeRef) -> ast::RecordFieldDef { | ||
79 | ast_from_text(&format!("struct S {{ {}: {}, }}", name, ty)) | ||
80 | } | ||
81 | |||
78 | pub fn block_expr( | 82 | pub fn block_expr( |
79 | stmts: impl IntoIterator<Item = ast::Stmt>, | 83 | stmts: impl IntoIterator<Item = ast::Stmt>, |
80 | tail_expr: Option<ast::Expr>, | 84 | tail_expr: Option<ast::Expr>, |
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index a33a35cc1..9b7664576 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -168,6 +168,41 @@ impl SourceFile { | |||
168 | } | 168 | } |
169 | } | 169 | } |
170 | 170 | ||
171 | impl ast::Path { | ||
172 | /// Returns `text`, parsed as a path, but only if it has no errors. | ||
173 | pub fn parse(text: &str) -> Result<Self, ()> { | ||
174 | parsing::parse_text_fragment(text, ra_parser::FragmentKind::Path) | ||
175 | } | ||
176 | } | ||
177 | |||
178 | impl ast::Pat { | ||
179 | /// Returns `text`, parsed as a pattern, but only if it has no errors. | ||
180 | pub fn parse(text: &str) -> Result<Self, ()> { | ||
181 | parsing::parse_text_fragment(text, ra_parser::FragmentKind::Pattern) | ||
182 | } | ||
183 | } | ||
184 | |||
185 | impl ast::Expr { | ||
186 | /// Returns `text`, parsed as an expression, but only if it has no errors. | ||
187 | pub fn parse(text: &str) -> Result<Self, ()> { | ||
188 | parsing::parse_text_fragment(text, ra_parser::FragmentKind::Expr) | ||
189 | } | ||
190 | } | ||
191 | |||
192 | impl ast::ModuleItem { | ||
193 | /// Returns `text`, parsed as an item, but only if it has no errors. | ||
194 | pub fn parse(text: &str) -> Result<Self, ()> { | ||
195 | parsing::parse_text_fragment(text, ra_parser::FragmentKind::Item) | ||
196 | } | ||
197 | } | ||
198 | |||
199 | impl ast::TypeRef { | ||
200 | /// Returns `text`, parsed as an type reference, but only if it has no errors. | ||
201 | pub fn parse(text: &str) -> Result<Self, ()> { | ||
202 | parsing::parse_text_fragment(text, ra_parser::FragmentKind::Type) | ||
203 | } | ||
204 | } | ||
205 | |||
171 | /// Matches a `SyntaxNode` against an `ast` type. | 206 | /// Matches a `SyntaxNode` against an `ast` type. |
172 | /// | 207 | /// |
173 | /// # Example: | 208 | /// # Example: |
diff --git a/crates/ra_syntax/src/parsing.rs b/crates/ra_syntax/src/parsing.rs index e5eb80850..0ed3c20ef 100644 --- a/crates/ra_syntax/src/parsing.rs +++ b/crates/ra_syntax/src/parsing.rs | |||
@@ -6,13 +6,14 @@ mod text_token_source; | |||
6 | mod text_tree_sink; | 6 | mod text_tree_sink; |
7 | mod reparsing; | 7 | mod reparsing; |
8 | 8 | ||
9 | use crate::{syntax_node::GreenNode, SyntaxError}; | 9 | use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode}; |
10 | use text_token_source::TextTokenSource; | 10 | use text_token_source::TextTokenSource; |
11 | use text_tree_sink::TextTreeSink; | 11 | use text_tree_sink::TextTreeSink; |
12 | 12 | ||
13 | pub use lexer::*; | 13 | pub use lexer::*; |
14 | 14 | ||
15 | pub(crate) use self::reparsing::incremental_reparse; | 15 | pub(crate) use self::reparsing::incremental_reparse; |
16 | use ra_parser::SyntaxKind; | ||
16 | 17 | ||
17 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { | 18 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { |
18 | let (tokens, lexer_errors) = tokenize(&text); | 19 | let (tokens, lexer_errors) = tokenize(&text); |
@@ -27,3 +28,32 @@ pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { | |||
27 | 28 | ||
28 | (tree, parser_errors) | 29 | (tree, parser_errors) |
29 | } | 30 | } |
31 | |||
32 | /// Returns `text` parsed as a `T` provided there are no parse errors. | ||
33 | pub(crate) fn parse_text_fragment<T: AstNode>( | ||
34 | text: &str, | ||
35 | fragment_kind: ra_parser::FragmentKind, | ||
36 | ) -> Result<T, ()> { | ||
37 | let (tokens, lexer_errors) = tokenize(&text); | ||
38 | if !lexer_errors.is_empty() { | ||
39 | return Err(()); | ||
40 | } | ||
41 | |||
42 | let mut token_source = TextTokenSource::new(text, &tokens); | ||
43 | let mut tree_sink = TextTreeSink::new(text, &tokens); | ||
44 | |||
45 | // TextTreeSink assumes that there's at least some root node to which it can attach errors and | ||
46 | // tokens. We arbitrarily give it a SourceFile. | ||
47 | use ra_parser::TreeSink; | ||
48 | tree_sink.start_node(SyntaxKind::SOURCE_FILE); | ||
49 | ra_parser::parse_fragment(&mut token_source, &mut tree_sink, fragment_kind); | ||
50 | tree_sink.finish_node(); | ||
51 | |||
52 | let (tree, parser_errors) = tree_sink.finish(); | ||
53 | use ra_parser::TokenSource; | ||
54 | if !parser_errors.is_empty() || token_source.current().kind != SyntaxKind::EOF { | ||
55 | return Err(()); | ||
56 | } | ||
57 | |||
58 | SyntaxNode::new_root(tree).first_child().and_then(T::cast).ok_or(()) | ||
59 | } | ||
diff --git a/crates/ra_syntax/src/parsing/text_token_source.rs b/crates/ra_syntax/src/parsing/text_token_source.rs index 7ddc2c2c3..97aa3e795 100644 --- a/crates/ra_syntax/src/parsing/text_token_source.rs +++ b/crates/ra_syntax/src/parsing/text_token_source.rs | |||
@@ -1,40 +1,35 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! See `TextTokenSource` docs. |
2 | 2 | ||
3 | use ra_parser::Token as PToken; | ||
4 | use ra_parser::TokenSource; | 3 | use ra_parser::TokenSource; |
5 | 4 | ||
6 | use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextSize}; | 5 | use crate::{parsing::lexer::Token, SyntaxKind::EOF, TextRange, TextSize}; |
7 | 6 | ||
7 | /// Implementation of `ra_parser::TokenSource` that takes tokens from source code text. | ||
8 | pub(crate) struct TextTokenSource<'t> { | 8 | pub(crate) struct TextTokenSource<'t> { |
9 | text: &'t str, | 9 | text: &'t str, |
10 | /// start position of each token(expect whitespace and comment) | 10 | /// token and its start position (non-whitespace/comment tokens) |
11 | /// ```non-rust | 11 | /// ```non-rust |
12 | /// struct Foo; | 12 | /// struct Foo; |
13 | /// ^------^--- | 13 | /// ^------^--^- |
14 | /// | | ^- | 14 | /// | | \________ |
15 | /// 0 7 10 | 15 | /// | \____ \ |
16 | /// | \ | | ||
17 | /// (struct, 0) (Foo, 7) (;, 10) | ||
16 | /// ``` | 18 | /// ``` |
17 | /// (token, start_offset): `[(struct, 0), (Foo, 7), (;, 10)]` | 19 | /// `[(struct, 0), (Foo, 7), (;, 10)]` |
18 | start_offsets: Vec<TextSize>, | 20 | token_offset_pairs: Vec<(Token, TextSize)>, |
19 | /// non-whitespace/comment tokens | ||
20 | /// ```non-rust | ||
21 | /// struct Foo {} | ||
22 | /// ^^^^^^ ^^^ ^^ | ||
23 | /// ``` | ||
24 | /// tokens: `[struct, Foo, {, }]` | ||
25 | tokens: Vec<Token>, | ||
26 | 21 | ||
27 | /// Current token and position | 22 | /// Current token and position |
28 | curr: (PToken, usize), | 23 | curr: (ra_parser::Token, usize), |
29 | } | 24 | } |
30 | 25 | ||
31 | impl<'t> TokenSource for TextTokenSource<'t> { | 26 | impl<'t> TokenSource for TextTokenSource<'t> { |
32 | fn current(&self) -> PToken { | 27 | fn current(&self) -> ra_parser::Token { |
33 | self.curr.0 | 28 | self.curr.0 |
34 | } | 29 | } |
35 | 30 | ||
36 | fn lookahead_nth(&self, n: usize) -> PToken { | 31 | fn lookahead_nth(&self, n: usize) -> ra_parser::Token { |
37 | mk_token(self.curr.1 + n, &self.start_offsets, &self.tokens) | 32 | mk_token(self.curr.1 + n, &self.token_offset_pairs) |
38 | } | 33 | } |
39 | 34 | ||
40 | fn bump(&mut self) { | 35 | fn bump(&mut self) { |
@@ -43,45 +38,47 @@ impl<'t> TokenSource for TextTokenSource<'t> { | |||
43 | } | 38 | } |
44 | 39 | ||
45 | let pos = self.curr.1 + 1; | 40 | let pos = self.curr.1 + 1; |
46 | self.curr = (mk_token(pos, &self.start_offsets, &self.tokens), pos); | 41 | self.curr = (mk_token(pos, &self.token_offset_pairs), pos); |
47 | } | 42 | } |
48 | 43 | ||
49 | fn is_keyword(&self, kw: &str) -> bool { | 44 | fn is_keyword(&self, kw: &str) -> bool { |
50 | let pos = self.curr.1; | 45 | self.token_offset_pairs |
51 | if pos >= self.tokens.len() { | 46 | .get(self.curr.1) |
52 | return false; | 47 | .map(|(token, offset)| &self.text[TextRange::at(*offset, token.len)] == kw) |
53 | } | 48 | .unwrap_or(false) |
54 | let range = TextRange::at(self.start_offsets[pos], self.tokens[pos].len); | ||
55 | self.text[range] == *kw | ||
56 | } | 49 | } |
57 | } | 50 | } |
58 | 51 | ||
59 | fn mk_token(pos: usize, start_offsets: &[TextSize], tokens: &[Token]) -> PToken { | 52 | fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> ra_parser::Token { |
60 | let kind = tokens.get(pos).map(|t| t.kind).unwrap_or(EOF); | 53 | let (kind, is_jointed_to_next) = match token_offset_pairs.get(pos) { |
61 | let is_jointed_to_next = if pos + 1 < start_offsets.len() { | 54 | Some((token, offset)) => ( |
62 | start_offsets[pos] + tokens[pos].len == start_offsets[pos + 1] | 55 | token.kind, |
63 | } else { | 56 | token_offset_pairs |
64 | false | 57 | .get(pos + 1) |
58 | .map(|(_, next_offset)| offset + token.len == *next_offset) | ||
59 | .unwrap_or(false), | ||
60 | ), | ||
61 | None => (EOF, false), | ||
65 | }; | 62 | }; |
66 | 63 | ra_parser::Token { kind, is_jointed_to_next } | |
67 | PToken { kind, is_jointed_to_next } | ||
68 | } | 64 | } |
69 | 65 | ||
70 | impl<'t> TextTokenSource<'t> { | 66 | impl<'t> TextTokenSource<'t> { |
71 | /// Generate input from tokens(expect comment and whitespace). | 67 | /// Generate input from tokens(expect comment and whitespace). |
72 | pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> { | 68 | pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> { |
73 | let mut tokens = Vec::new(); | 69 | let token_offset_pairs: Vec<_> = raw_tokens |
74 | let mut start_offsets = Vec::new(); | 70 | .iter() |
75 | let mut len = 0.into(); | 71 | .filter_map({ |
76 | for &token in raw_tokens.iter() { | 72 | let mut len = 0.into(); |
77 | if !token.kind.is_trivia() { | 73 | move |token| { |
78 | tokens.push(token); | 74 | let pair = if token.kind.is_trivia() { None } else { Some((*token, len)) }; |
79 | start_offsets.push(len); | 75 | len += token.len; |
80 | } | 76 | pair |
81 | len += token.len; | 77 | } |
82 | } | 78 | }) |
79 | .collect(); | ||
83 | 80 | ||
84 | let first = mk_token(0, &start_offsets, &tokens); | 81 | let first = mk_token(0, &token_offset_pairs); |
85 | TextTokenSource { text, start_offsets, tokens, curr: (first, 0) } | 82 | TextTokenSource { text, token_offset_pairs, curr: (first, 0) } |
86 | } | 83 | } |
87 | } | 84 | } |
diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs index aee57db62..959967b79 100644 --- a/crates/ra_syntax/src/tests.rs +++ b/crates/ra_syntax/src/tests.rs | |||
@@ -55,6 +55,51 @@ fn parser_tests() { | |||
55 | } | 55 | } |
56 | 56 | ||
57 | #[test] | 57 | #[test] |
58 | fn expr_parser_tests() { | ||
59 | fragment_parser_dir_test( | ||
60 | &["parser/fragments/expr/ok"], | ||
61 | &["parser/fragments/expr/err"], | ||
62 | crate::ast::Expr::parse, | ||
63 | ); | ||
64 | } | ||
65 | |||
66 | #[test] | ||
67 | fn path_parser_tests() { | ||
68 | fragment_parser_dir_test( | ||
69 | &["parser/fragments/path/ok"], | ||
70 | &["parser/fragments/path/err"], | ||
71 | crate::ast::Path::parse, | ||
72 | ); | ||
73 | } | ||
74 | |||
75 | #[test] | ||
76 | fn pattern_parser_tests() { | ||
77 | fragment_parser_dir_test( | ||
78 | &["parser/fragments/pattern/ok"], | ||
79 | &["parser/fragments/pattern/err"], | ||
80 | crate::ast::Pat::parse, | ||
81 | ); | ||
82 | } | ||
83 | |||
84 | #[test] | ||
85 | fn item_parser_tests() { | ||
86 | fragment_parser_dir_test( | ||
87 | &["parser/fragments/item/ok"], | ||
88 | &["parser/fragments/item/err"], | ||
89 | crate::ast::ModuleItem::parse, | ||
90 | ); | ||
91 | } | ||
92 | |||
93 | #[test] | ||
94 | fn type_parser_tests() { | ||
95 | fragment_parser_dir_test( | ||
96 | &["parser/fragments/type/ok"], | ||
97 | &["parser/fragments/type/err"], | ||
98 | crate::ast::TypeRef::parse, | ||
99 | ); | ||
100 | } | ||
101 | |||
102 | #[test] | ||
58 | fn parser_fuzz_tests() { | 103 | fn parser_fuzz_tests() { |
59 | for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) { | 104 | for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) { |
60 | fuzz::check_parser(&text) | 105 | fuzz::check_parser(&text) |
@@ -134,3 +179,24 @@ fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) | |||
134 | } | 179 | } |
135 | acc | 180 | acc |
136 | } | 181 | } |
182 | |||
183 | fn fragment_parser_dir_test<T, F>(ok_paths: &[&str], err_paths: &[&str], f: F) | ||
184 | where | ||
185 | T: crate::AstNode, | ||
186 | F: Fn(&str) -> Result<T, ()>, | ||
187 | { | ||
188 | dir_tests(&test_data_dir(), ok_paths, "rast", |text, path| { | ||
189 | if let Ok(node) = f(text) { | ||
190 | format!("{:#?}", crate::ast::AstNode::syntax(&node)) | ||
191 | } else { | ||
192 | panic!("Failed to parse '{:?}'", path); | ||
193 | } | ||
194 | }); | ||
195 | dir_tests(&test_data_dir(), err_paths, "rast", |text, path| { | ||
196 | if let Ok(_) = f(text) { | ||
197 | panic!("'{:?}' successfully parsed when it should have errored", path); | ||
198 | } else { | ||
199 | "ERROR\n".to_owned() | ||
200 | } | ||
201 | }); | ||
202 | } | ||
diff --git a/crates/ra_syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rast b/crates/ra_syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rast new file mode 100644 index 000000000..5df7507e2 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rast | |||
@@ -0,0 +1 @@ | |||
ERROR | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rs b/crates/ra_syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rs new file mode 100644 index 000000000..ca49acb07 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/expr/err/0000_truncated_add.rs | |||
@@ -0,0 +1 @@ | |||
1 + | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/expr/ok/0000_add.rast b/crates/ra_syntax/test_data/parser/fragments/expr/ok/0000_add.rast new file mode 100644 index 000000000..fa78a02a6 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/expr/ok/0000_add.rast | |||
@@ -0,0 +1,8 @@ | |||
1 | [email protected] | ||
2 | [email protected] | ||
3 | [email protected] "1" | ||
4 | [email protected] " " | ||
5 | [email protected] "+" | ||
6 | [email protected] " " | ||
7 | [email protected] | ||
8 | [email protected] "2" | ||
diff --git a/crates/ra_syntax/test_data/parser/fragments/expr/ok/0000_add.rs b/crates/ra_syntax/test_data/parser/fragments/expr/ok/0000_add.rs new file mode 100644 index 000000000..e0ef58402 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/expr/ok/0000_add.rs | |||
@@ -0,0 +1 @@ | |||
1 + 2 | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rast b/crates/ra_syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rast new file mode 100644 index 000000000..5df7507e2 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rast | |||
@@ -0,0 +1 @@ | |||
ERROR | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rs b/crates/ra_syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rs new file mode 100644 index 000000000..dc32389bb --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/item/err/0000_extra_keyword.rs | |||
@@ -0,0 +1 @@ | |||
fn fn foo() {} | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/item/ok/0000_fn.rast b/crates/ra_syntax/test_data/parser/fragments/item/ok/0000_fn.rast new file mode 100644 index 000000000..f1e78f388 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/item/ok/0000_fn.rast | |||
@@ -0,0 +1,12 @@ | |||
1 | [email protected] | ||
2 | [email protected] "fn" | ||
3 | [email protected] " " | ||
4 | [email protected] | ||
5 | [email protected] "foo" | ||
6 | [email protected] | ||
7 | [email protected] "(" | ||
8 | [email protected] ")" | ||
9 | [email protected] " " | ||
10 | [email protected] | ||
11 | [email protected] "{" | ||
12 | [email protected] "}" | ||
diff --git a/crates/ra_syntax/test_data/parser/fragments/item/ok/0000_fn.rs b/crates/ra_syntax/test_data/parser/fragments/item/ok/0000_fn.rs new file mode 100644 index 000000000..8f3b7ef11 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/item/ok/0000_fn.rs | |||
@@ -0,0 +1 @@ | |||
fn foo() {} | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/path/err/0000_reserved_word.rast b/crates/ra_syntax/test_data/parser/fragments/path/err/0000_reserved_word.rast new file mode 100644 index 000000000..5df7507e2 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/path/err/0000_reserved_word.rast | |||
@@ -0,0 +1 @@ | |||
ERROR | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/path/err/0000_reserved_word.rs b/crates/ra_syntax/test_data/parser/fragments/path/err/0000_reserved_word.rs new file mode 100644 index 000000000..2046de049 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/path/err/0000_reserved_word.rs | |||
@@ -0,0 +1 @@ | |||
struct | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/path/err/0001_expression.rast b/crates/ra_syntax/test_data/parser/fragments/path/err/0001_expression.rast new file mode 100644 index 000000000..5df7507e2 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/path/err/0001_expression.rast | |||
@@ -0,0 +1 @@ | |||
ERROR | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/path/err/0001_expression.rs b/crates/ra_syntax/test_data/parser/fragments/path/err/0001_expression.rs new file mode 100644 index 000000000..745e8d376 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/path/err/0001_expression.rs | |||
@@ -0,0 +1 @@ | |||
a + b | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/path/ok/0000_single_ident.rast b/crates/ra_syntax/test_data/parser/fragments/path/ok/0000_single_ident.rast new file mode 100644 index 000000000..0c5d4360f --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/path/ok/0000_single_ident.rast | |||
@@ -0,0 +1,4 @@ | |||
1 | [email protected] | ||
2 | [email protected] | ||
3 | [email protected] | ||
4 | [email protected] "foo" | ||
diff --git a/crates/ra_syntax/test_data/parser/fragments/path/ok/0000_single_ident.rs b/crates/ra_syntax/test_data/parser/fragments/path/ok/0000_single_ident.rs new file mode 100644 index 000000000..257cc5642 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/path/ok/0000_single_ident.rs | |||
@@ -0,0 +1 @@ | |||
foo | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/path/ok/0001_multipart.rast b/crates/ra_syntax/test_data/parser/fragments/path/ok/0001_multipart.rast new file mode 100644 index 000000000..4a2b45e6a --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/path/ok/0001_multipart.rast | |||
@@ -0,0 +1,14 @@ | |||
1 | [email protected] | ||
2 | [email protected] | ||
3 | [email protected] | ||
4 | [email protected] | ||
5 | [email protected] | ||
6 | [email protected] "foo" | ||
7 | [email protected] "::" | ||
8 | [email protected] | ||
9 | [email protected] | ||
10 | [email protected] "bar" | ||
11 | [email protected] "::" | ||
12 | [email protected] | ||
13 | [email protected] | ||
14 | [email protected] "baz" | ||
diff --git a/crates/ra_syntax/test_data/parser/fragments/path/ok/0001_multipart.rs b/crates/ra_syntax/test_data/parser/fragments/path/ok/0001_multipart.rs new file mode 100644 index 000000000..81e0b21cd --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/path/ok/0001_multipart.rs | |||
@@ -0,0 +1 @@ | |||
foo::bar::baz | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rast b/crates/ra_syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rast new file mode 100644 index 000000000..5df7507e2 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rast | |||
@@ -0,0 +1 @@ | |||
ERROR | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rs b/crates/ra_syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rs new file mode 100644 index 000000000..ae26fc455 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/pattern/err/0000_reserved_word.rs | |||
@@ -0,0 +1 @@ | |||
fn | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rast b/crates/ra_syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rast new file mode 100644 index 000000000..5df7507e2 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rast | |||
@@ -0,0 +1 @@ | |||
ERROR | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rs b/crates/ra_syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rs new file mode 100644 index 000000000..61a391d08 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/pattern/err/0001_missing_paren.rs | |||
@@ -0,0 +1 @@ | |||
Some(x | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/pattern/ok/0000_enum.rast b/crates/ra_syntax/test_data/parser/fragments/pattern/ok/0000_enum.rast new file mode 100644 index 000000000..15eb7f9c6 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/pattern/ok/0000_enum.rast | |||
@@ -0,0 +1,10 @@ | |||
1 | [email protected] | ||
2 | [email protected] | ||
3 | [email protected] | ||
4 | [email protected] | ||
5 | [email protected] "Some" | ||
6 | [email protected] "(" | ||
7 | [email protected] | ||
8 | [email protected] | ||
9 | [email protected] "x" | ||
10 | [email protected] ")" | ||
diff --git a/crates/ra_syntax/test_data/parser/fragments/pattern/ok/0000_enum.rs b/crates/ra_syntax/test_data/parser/fragments/pattern/ok/0000_enum.rs new file mode 100644 index 000000000..87114dd78 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/pattern/ok/0000_enum.rs | |||
@@ -0,0 +1 @@ | |||
Some(x) | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/type/err/0000_missing_close.rast b/crates/ra_syntax/test_data/parser/fragments/type/err/0000_missing_close.rast new file mode 100644 index 000000000..5df7507e2 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/type/err/0000_missing_close.rast | |||
@@ -0,0 +1 @@ | |||
ERROR | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/type/err/0000_missing_close.rs b/crates/ra_syntax/test_data/parser/fragments/type/err/0000_missing_close.rs new file mode 100644 index 000000000..caa4d7c09 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/type/err/0000_missing_close.rs | |||
@@ -0,0 +1 @@ | |||
Result<Foo, Bar | |||
diff --git a/crates/ra_syntax/test_data/parser/fragments/type/ok/0000_result.rast b/crates/ra_syntax/test_data/parser/fragments/type/ok/0000_result.rast new file mode 100644 index 000000000..8831cfa6c --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/type/ok/0000_result.rast | |||
@@ -0,0 +1,22 @@ | |||
1 | [email protected] | ||
2 | [email protected] | ||
3 | [email protected] | ||
4 | [email protected] | ||
5 | [email protected] "Result" | ||
6 | [email protected] | ||
7 | [email protected] "<" | ||
8 | [email protected] | ||
9 | [email protected] | ||
10 | [email protected] | ||
11 | [email protected] | ||
12 | [email protected] | ||
13 | [email protected] "Foo" | ||
14 | [email protected] "," | ||
15 | [email protected] " " | ||
16 | [email protected] | ||
17 | [email protected] | ||
18 | [email protected] | ||
19 | [email protected] | ||
20 | [email protected] | ||
21 | [email protected] "Bar" | ||
22 | [email protected] ">" | ||
diff --git a/crates/ra_syntax/test_data/parser/fragments/type/ok/0000_result.rs b/crates/ra_syntax/test_data/parser/fragments/type/ok/0000_result.rs new file mode 100644 index 000000000..b50b3bb3b --- /dev/null +++ b/crates/ra_syntax/test_data/parser/fragments/type/ok/0000_result.rs | |||
@@ -0,0 +1 @@ | |||
Result<Foo, Bar> | |||
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 458089e53..2b46e8905 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml | |||
@@ -32,7 +32,7 @@ threadpool = "1.7.1" | |||
32 | 32 | ||
33 | stdx = { path = "../stdx" } | 33 | stdx = { path = "../stdx" } |
34 | 34 | ||
35 | lsp-server = "0.3.2" | 35 | lsp-server = "0.3.3" |
36 | ra_flycheck = { path = "../ra_flycheck" } | 36 | ra_flycheck = { path = "../ra_flycheck" } |
37 | ra_ide = { path = "../ra_ide" } | 37 | ra_ide = { path = "../ra_ide" } |
38 | ra_prof = { path = "../ra_prof" } | 38 | ra_prof = { path = "../ra_prof" } |
diff --git a/crates/rust-analyzer/build.rs b/crates/rust-analyzer/build.rs index d4b010c04..5ae76ba30 100644 --- a/crates/rust-analyzer/build.rs +++ b/crates/rust-analyzer/build.rs | |||
@@ -5,11 +5,14 @@ use std::{env, path::PathBuf, process::Command}; | |||
5 | fn main() { | 5 | fn main() { |
6 | set_rerun(); | 6 | set_rerun(); |
7 | 7 | ||
8 | let rev = rev().unwrap_or_else(|| "???????".to_string()); | 8 | let rev = |
9 | env::var("RUST_ANALYZER_REV").ok().or_else(rev).unwrap_or_else(|| "???????".to_string()); | ||
9 | println!("cargo:rustc-env=REV={}", rev) | 10 | println!("cargo:rustc-env=REV={}", rev) |
10 | } | 11 | } |
11 | 12 | ||
12 | fn set_rerun() { | 13 | fn set_rerun() { |
14 | println!("cargo:rerun-if-env-changed=RUST_ANALYZER_REV"); | ||
15 | |||
13 | let mut manifest_dir = PathBuf::from( | 16 | let mut manifest_dir = PathBuf::from( |
14 | env::var("CARGO_MANIFEST_DIR").expect("`CARGO_MANIFEST_DIR` is always set by cargo."), | 17 | env::var("CARGO_MANIFEST_DIR").expect("`CARGO_MANIFEST_DIR` is always set by cargo."), |
15 | ); | 18 | ); |
diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index 44f856f6b..5c22dce0d 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | 2 | ||
3 | use ra_cfg::CfgExpr; | 3 | use ra_cfg::CfgExpr; |
4 | use ra_ide::{FileId, RunnableKind, TestId}; | 4 | use ra_ide::{FileId, RunnableKind, TestId}; |
5 | use ra_project_model::{self, ProjectWorkspace, TargetKind}; | 5 | use ra_project_model::{self, TargetKind}; |
6 | 6 | ||
7 | use crate::{global_state::GlobalStateSnapshot, Result}; | 7 | use crate::{global_state::GlobalStateSnapshot, Result}; |
8 | 8 | ||
@@ -89,27 +89,23 @@ impl CargoTargetSpec { | |||
89 | } | 89 | } |
90 | 90 | ||
91 | pub(crate) fn for_file( | 91 | pub(crate) fn for_file( |
92 | world: &GlobalStateSnapshot, | 92 | global_state_snapshot: &GlobalStateSnapshot, |
93 | file_id: FileId, | 93 | file_id: FileId, |
94 | ) -> Result<Option<CargoTargetSpec>> { | 94 | ) -> Result<Option<CargoTargetSpec>> { |
95 | let &crate_id = match world.analysis().crate_for(file_id)?.first() { | 95 | let crate_id = match global_state_snapshot.analysis().crate_for(file_id)?.first() { |
96 | Some(crate_id) => crate_id, | 96 | Some(crate_id) => *crate_id, |
97 | None => return Ok(None), | 97 | None => return Ok(None), |
98 | }; | 98 | }; |
99 | let file_id = world.analysis().crate_root(crate_id)?; | 99 | let (cargo_ws, target) = match global_state_snapshot.cargo_target_for_crate_root(crate_id) { |
100 | let path = world.file_id_to_path(file_id); | 100 | Some(it) => it, |
101 | let res = world.workspaces.iter().find_map(|ws| match ws { | 101 | None => return Ok(None), |
102 | ProjectWorkspace::Cargo { cargo, .. } => { | 102 | }; |
103 | let tgt = cargo.target_by_root(&path)?; | 103 | let res = CargoTargetSpec { |
104 | Some(CargoTargetSpec { | 104 | package: cargo_ws.package_flag(&cargo_ws[cargo_ws[target].package]), |
105 | package: cargo.package_flag(&cargo[cargo[tgt].package]), | 105 | target: cargo_ws[target].name.clone(), |
106 | target: cargo[tgt].name.clone(), | 106 | target_kind: cargo_ws[target].kind, |
107 | target_kind: cargo[tgt].kind, | 107 | }; |
108 | }) | 108 | Ok(Some(res)) |
109 | } | ||
110 | ProjectWorkspace::Json { .. } => None, | ||
111 | }); | ||
112 | Ok(res) | ||
113 | } | 109 | } |
114 | 110 | ||
115 | pub(crate) fn push_to(self, buf: &mut Vec<String>, kind: &RunnableKind) { | 111 | pub(crate) fn push_to(self, buf: &mut Vec<String>, kind: &RunnableKind) { |
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 0e7a937a0..aa2c4ae15 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs | |||
@@ -9,6 +9,7 @@ | |||
9 | 9 | ||
10 | use std::{ffi::OsString, path::PathBuf}; | 10 | use std::{ffi::OsString, path::PathBuf}; |
11 | 11 | ||
12 | use crate::diagnostics::DiagnosticsConfig; | ||
12 | use lsp_types::ClientCapabilities; | 13 | use lsp_types::ClientCapabilities; |
13 | use ra_flycheck::FlycheckConfig; | 14 | use ra_flycheck::FlycheckConfig; |
14 | use ra_ide::{AssistConfig, CompletionConfig, HoverConfig, InlayHintsConfig}; | 15 | use ra_ide::{AssistConfig, CompletionConfig, HoverConfig, InlayHintsConfig}; |
@@ -20,6 +21,7 @@ pub struct Config { | |||
20 | pub client_caps: ClientCapsConfig, | 21 | pub client_caps: ClientCapsConfig, |
21 | 22 | ||
22 | pub publish_diagnostics: bool, | 23 | pub publish_diagnostics: bool, |
24 | pub diagnostics: DiagnosticsConfig, | ||
23 | pub lru_capacity: Option<usize>, | 25 | pub lru_capacity: Option<usize>, |
24 | pub proc_macro_srv: Option<(PathBuf, Vec<OsString>)>, | 26 | pub proc_macro_srv: Option<(PathBuf, Vec<OsString>)>, |
25 | pub files: FilesConfig, | 27 | pub files: FilesConfig, |
@@ -136,6 +138,7 @@ impl Default for Config { | |||
136 | 138 | ||
137 | with_sysroot: true, | 139 | with_sysroot: true, |
138 | publish_diagnostics: true, | 140 | publish_diagnostics: true, |
141 | diagnostics: DiagnosticsConfig::default(), | ||
139 | lru_capacity: None, | 142 | lru_capacity: None, |
140 | proc_macro_srv: None, | 143 | proc_macro_srv: None, |
141 | files: FilesConfig { watcher: FilesWatcher::Notify, exclude: Vec::new() }, | 144 | files: FilesConfig { watcher: FilesWatcher::Notify, exclude: Vec::new() }, |
@@ -184,6 +187,8 @@ impl Config { | |||
184 | 187 | ||
185 | set(value, "/withSysroot", &mut self.with_sysroot); | 188 | set(value, "/withSysroot", &mut self.with_sysroot); |
186 | set(value, "/diagnostics/enable", &mut self.publish_diagnostics); | 189 | set(value, "/diagnostics/enable", &mut self.publish_diagnostics); |
190 | set(value, "/diagnostics/warningsAsInfo", &mut self.diagnostics.warnings_as_info); | ||
191 | set(value, "/diagnostics/warningsAsHint", &mut self.diagnostics.warnings_as_hint); | ||
187 | set(value, "/lruCapacity", &mut self.lru_capacity); | 192 | set(value, "/lruCapacity", &mut self.lru_capacity); |
188 | self.files.watcher = match get(value, "/files/watcher") { | 193 | self.files.watcher = match get(value, "/files/watcher") { |
189 | Some("client") => FilesWatcher::Client, | 194 | Some("client") => FilesWatcher::Client, |
@@ -291,6 +296,7 @@ impl Config { | |||
291 | set(value, "/hoverActions/implementations", &mut self.hover.implementations); | 296 | set(value, "/hoverActions/implementations", &mut self.hover.implementations); |
292 | set(value, "/hoverActions/run", &mut self.hover.run); | 297 | set(value, "/hoverActions/run", &mut self.hover.run); |
293 | set(value, "/hoverActions/debug", &mut self.hover.debug); | 298 | set(value, "/hoverActions/debug", &mut self.hover.debug); |
299 | set(value, "/hoverActions/gotoTypeDef", &mut self.hover.goto_type_def); | ||
294 | } else { | 300 | } else { |
295 | self.hover = HoverConfig::NO_ACTIONS; | 301 | self.hover = HoverConfig::NO_ACTIONS; |
296 | } | 302 | } |
diff --git a/crates/rust-analyzer/src/diagnostics.rs b/crates/rust-analyzer/src/diagnostics.rs index 25856c543..290609e7f 100644 --- a/crates/rust-analyzer/src/diagnostics.rs +++ b/crates/rust-analyzer/src/diagnostics.rs | |||
@@ -11,6 +11,12 @@ use crate::lsp_ext; | |||
11 | pub type CheckFixes = Arc<HashMap<FileId, Vec<Fix>>>; | 11 | pub type CheckFixes = Arc<HashMap<FileId, Vec<Fix>>>; |
12 | 12 | ||
13 | #[derive(Debug, Default, Clone)] | 13 | #[derive(Debug, Default, Clone)] |
14 | pub struct DiagnosticsConfig { | ||
15 | pub warnings_as_info: Vec<String>, | ||
16 | pub warnings_as_hint: Vec<String>, | ||
17 | } | ||
18 | |||
19 | #[derive(Debug, Default, Clone)] | ||
14 | pub struct DiagnosticCollection { | 20 | pub struct DiagnosticCollection { |
15 | pub native: HashMap<FileId, Vec<Diagnostic>>, | 21 | pub native: HashMap<FileId, Vec<Diagnostic>>, |
16 | pub check: HashMap<FileId, Vec<Diagnostic>>, | 22 | pub check: HashMap<FileId, Vec<Diagnostic>>, |
diff --git a/crates/rust-analyzer/src/diagnostics/snapshots/rust_analyzer__diagnostics__to_proto__tests__snap_rustc_unused_variable_as_hint.snap b/crates/rust-analyzer/src/diagnostics/snapshots/rust_analyzer__diagnostics__to_proto__tests__snap_rustc_unused_variable_as_hint.snap new file mode 100644 index 000000000..f0273315e --- /dev/null +++ b/crates/rust-analyzer/src/diagnostics/snapshots/rust_analyzer__diagnostics__to_proto__tests__snap_rustc_unused_variable_as_hint.snap | |||
@@ -0,0 +1,86 @@ | |||
1 | --- | ||
2 | source: crates/rust-analyzer/src/diagnostics/to_proto.rs | ||
3 | expression: diag | ||
4 | --- | ||
5 | [ | ||
6 | MappedRustDiagnostic { | ||
7 | location: Location { | ||
8 | uri: "file:///test/driver/subcommand/repl.rs", | ||
9 | range: Range { | ||
10 | start: Position { | ||
11 | line: 290, | ||
12 | character: 8, | ||
13 | }, | ||
14 | end: Position { | ||
15 | line: 290, | ||
16 | character: 11, | ||
17 | }, | ||
18 | }, | ||
19 | }, | ||
20 | diagnostic: Diagnostic { | ||
21 | range: Range { | ||
22 | start: Position { | ||
23 | line: 290, | ||
24 | character: 8, | ||
25 | }, | ||
26 | end: Position { | ||
27 | line: 290, | ||
28 | character: 11, | ||
29 | }, | ||
30 | }, | ||
31 | severity: Some( | ||
32 | Hint, | ||
33 | ), | ||
34 | code: Some( | ||
35 | String( | ||
36 | "unused_variables", | ||
37 | ), | ||
38 | ), | ||
39 | source: Some( | ||
40 | "rustc", | ||
41 | ), | ||
42 | message: "unused variable: `foo`\n#[warn(unused_variables)] on by default", | ||
43 | related_information: None, | ||
44 | tags: Some( | ||
45 | [ | ||
46 | Unnecessary, | ||
47 | ], | ||
48 | ), | ||
49 | }, | ||
50 | fixes: [ | ||
51 | CodeAction { | ||
52 | title: "consider prefixing with an underscore", | ||
53 | id: None, | ||
54 | group: None, | ||
55 | kind: Some( | ||
56 | "quickfix", | ||
57 | ), | ||
58 | command: None, | ||
59 | edit: Some( | ||
60 | SnippetWorkspaceEdit { | ||
61 | changes: Some( | ||
62 | { | ||
63 | "file:///test/driver/subcommand/repl.rs": [ | ||
64 | TextEdit { | ||
65 | range: Range { | ||
66 | start: Position { | ||
67 | line: 290, | ||
68 | character: 8, | ||
69 | }, | ||
70 | end: Position { | ||
71 | line: 290, | ||
72 | character: 11, | ||
73 | }, | ||
74 | }, | ||
75 | new_text: "_foo", | ||
76 | }, | ||
77 | ], | ||
78 | }, | ||
79 | ), | ||
80 | document_changes: None, | ||
81 | }, | ||
82 | ), | ||
83 | }, | ||
84 | ], | ||
85 | }, | ||
86 | ] | ||
diff --git a/crates/rust-analyzer/src/diagnostics/snapshots/rust_analyzer__diagnostics__to_proto__tests__snap_rustc_unused_variable_as_info.snap b/crates/rust-analyzer/src/diagnostics/snapshots/rust_analyzer__diagnostics__to_proto__tests__snap_rustc_unused_variable_as_info.snap new file mode 100644 index 000000000..85fd050fd --- /dev/null +++ b/crates/rust-analyzer/src/diagnostics/snapshots/rust_analyzer__diagnostics__to_proto__tests__snap_rustc_unused_variable_as_info.snap | |||
@@ -0,0 +1,86 @@ | |||
1 | --- | ||
2 | source: crates/rust-analyzer/src/diagnostics/to_proto.rs | ||
3 | expression: diag | ||
4 | --- | ||
5 | [ | ||
6 | MappedRustDiagnostic { | ||
7 | location: Location { | ||
8 | uri: "file:///test/driver/subcommand/repl.rs", | ||
9 | range: Range { | ||
10 | start: Position { | ||
11 | line: 290, | ||
12 | character: 8, | ||
13 | }, | ||
14 | end: Position { | ||
15 | line: 290, | ||
16 | character: 11, | ||
17 | }, | ||
18 | }, | ||
19 | }, | ||
20 | diagnostic: Diagnostic { | ||
21 | range: Range { | ||
22 | start: Position { | ||
23 | line: 290, | ||
24 | character: 8, | ||
25 | }, | ||
26 | end: Position { | ||
27 | line: 290, | ||
28 | character: 11, | ||
29 | }, | ||
30 | }, | ||
31 | severity: Some( | ||
32 | Information, | ||
33 | ), | ||
34 | code: Some( | ||
35 | String( | ||
36 | "unused_variables", | ||
37 | ), | ||
38 | ), | ||
39 | source: Some( | ||
40 | "rustc", | ||
41 | ), | ||
42 | message: "unused variable: `foo`\n#[warn(unused_variables)] on by default", | ||
43 | related_information: None, | ||
44 | tags: Some( | ||
45 | [ | ||
46 | Unnecessary, | ||
47 | ], | ||
48 | ), | ||
49 | }, | ||
50 | fixes: [ | ||
51 | CodeAction { | ||
52 | title: "consider prefixing with an underscore", | ||
53 | id: None, | ||
54 | group: None, | ||
55 | kind: Some( | ||
56 | "quickfix", | ||
57 | ), | ||
58 | command: None, | ||
59 | edit: Some( | ||
60 | SnippetWorkspaceEdit { | ||
61 | changes: Some( | ||
62 | { | ||
63 | "file:///test/driver/subcommand/repl.rs": [ | ||
64 | TextEdit { | ||
65 | range: Range { | ||
66 | start: Position { | ||
67 | line: 290, | ||
68 | character: 8, | ||
69 | }, | ||
70 | end: Position { | ||
71 | line: 290, | ||
72 | character: 11, | ||
73 | }, | ||
74 | }, | ||
75 | new_text: "_foo", | ||
76 | }, | ||
77 | ], | ||
78 | }, | ||
79 | ), | ||
80 | document_changes: None, | ||
81 | }, | ||
82 | ), | ||
83 | }, | ||
84 | ], | ||
85 | }, | ||
86 | ] | ||
diff --git a/crates/rust-analyzer/src/diagnostics/to_proto.rs b/crates/rust-analyzer/src/diagnostics/to_proto.rs index 24ff9b280..ba74f15f3 100644 --- a/crates/rust-analyzer/src/diagnostics/to_proto.rs +++ b/crates/rust-analyzer/src/diagnostics/to_proto.rs | |||
@@ -9,14 +9,24 @@ use lsp_types::{ | |||
9 | use ra_flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion}; | 9 | use ra_flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion}; |
10 | use stdx::format_to; | 10 | use stdx::format_to; |
11 | 11 | ||
12 | use super::DiagnosticsConfig; | ||
12 | use crate::{lsp_ext, to_proto::url_from_abs_path}; | 13 | use crate::{lsp_ext, to_proto::url_from_abs_path}; |
13 | 14 | ||
14 | /// Converts a Rust level string to a LSP severity | 15 | /// Determines the LSP severity from a diagnostic |
15 | fn map_level_to_severity(val: DiagnosticLevel) -> Option<DiagnosticSeverity> { | 16 | fn map_diagnostic_to_severity( |
16 | let res = match val { | 17 | config: &DiagnosticsConfig, |
18 | val: &ra_flycheck::Diagnostic, | ||
19 | ) -> Option<DiagnosticSeverity> { | ||
20 | let res = match val.level { | ||
17 | DiagnosticLevel::Ice => DiagnosticSeverity::Error, | 21 | DiagnosticLevel::Ice => DiagnosticSeverity::Error, |
18 | DiagnosticLevel::Error => DiagnosticSeverity::Error, | 22 | DiagnosticLevel::Error => DiagnosticSeverity::Error, |
19 | DiagnosticLevel::Warning => DiagnosticSeverity::Warning, | 23 | DiagnosticLevel::Warning => match &val.code { |
24 | Some(code) if config.warnings_as_hint.contains(&code.code) => DiagnosticSeverity::Hint, | ||
25 | Some(code) if config.warnings_as_info.contains(&code.code) => { | ||
26 | DiagnosticSeverity::Information | ||
27 | } | ||
28 | _ => DiagnosticSeverity::Warning, | ||
29 | }, | ||
20 | DiagnosticLevel::Note => DiagnosticSeverity::Information, | 30 | DiagnosticLevel::Note => DiagnosticSeverity::Information, |
21 | DiagnosticLevel::Help => DiagnosticSeverity::Hint, | 31 | DiagnosticLevel::Help => DiagnosticSeverity::Hint, |
22 | DiagnosticLevel::Unknown => return None, | 32 | DiagnosticLevel::Unknown => return None, |
@@ -172,6 +182,7 @@ pub(crate) struct MappedRustDiagnostic { | |||
172 | /// | 182 | /// |
173 | /// If the diagnostic has no primary span this will return `None` | 183 | /// If the diagnostic has no primary span this will return `None` |
174 | pub(crate) fn map_rust_diagnostic_to_lsp( | 184 | pub(crate) fn map_rust_diagnostic_to_lsp( |
185 | config: &DiagnosticsConfig, | ||
175 | rd: &ra_flycheck::Diagnostic, | 186 | rd: &ra_flycheck::Diagnostic, |
176 | workspace_root: &Path, | 187 | workspace_root: &Path, |
177 | ) -> Vec<MappedRustDiagnostic> { | 188 | ) -> Vec<MappedRustDiagnostic> { |
@@ -180,7 +191,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
180 | return Vec::new(); | 191 | return Vec::new(); |
181 | } | 192 | } |
182 | 193 | ||
183 | let severity = map_level_to_severity(rd.level); | 194 | let severity = map_diagnostic_to_severity(config, rd); |
184 | 195 | ||
185 | let mut source = String::from("rustc"); | 196 | let mut source = String::from("rustc"); |
186 | let mut code = rd.code.as_ref().map(|c| c.code.clone()); | 197 | let mut code = rd.code.as_ref().map(|c| c.code.clone()); |
@@ -328,7 +339,7 @@ mod tests { | |||
328 | ); | 339 | ); |
329 | 340 | ||
330 | let workspace_root = Path::new("/test/"); | 341 | let workspace_root = Path::new("/test/"); |
331 | let diag = map_rust_diagnostic_to_lsp(&diag, workspace_root); | 342 | let diag = map_rust_diagnostic_to_lsp(&DiagnosticsConfig::default(), &diag, workspace_root); |
332 | insta::assert_debug_snapshot!(diag); | 343 | insta::assert_debug_snapshot!(diag); |
333 | } | 344 | } |
334 | 345 | ||
@@ -410,7 +421,183 @@ mod tests { | |||
410 | ); | 421 | ); |
411 | 422 | ||
412 | let workspace_root = Path::new("/test/"); | 423 | let workspace_root = Path::new("/test/"); |
413 | let diag = map_rust_diagnostic_to_lsp(&diag, workspace_root); | 424 | let diag = map_rust_diagnostic_to_lsp(&DiagnosticsConfig::default(), &diag, workspace_root); |
425 | insta::assert_debug_snapshot!(diag); | ||
426 | } | ||
427 | |||
428 | #[test] | ||
429 | #[cfg(not(windows))] | ||
430 | fn snap_rustc_unused_variable_as_info() { | ||
431 | let diag = parse_diagnostic( | ||
432 | r##"{ | ||
433 | "message": "unused variable: `foo`", | ||
434 | "code": { | ||
435 | "code": "unused_variables", | ||
436 | "explanation": null | ||
437 | }, | ||
438 | "level": "warning", | ||
439 | "spans": [ | ||
440 | { | ||
441 | "file_name": "driver/subcommand/repl.rs", | ||
442 | "byte_start": 9228, | ||
443 | "byte_end": 9231, | ||
444 | "line_start": 291, | ||
445 | "line_end": 291, | ||
446 | "column_start": 9, | ||
447 | "column_end": 12, | ||
448 | "is_primary": true, | ||
449 | "text": [ | ||
450 | { | ||
451 | "text": " let foo = 42;", | ||
452 | "highlight_start": 9, | ||
453 | "highlight_end": 12 | ||
454 | } | ||
455 | ], | ||
456 | "label": null, | ||
457 | "suggested_replacement": null, | ||
458 | "suggestion_applicability": null, | ||
459 | "expansion": null | ||
460 | } | ||
461 | ], | ||
462 | "children": [ | ||
463 | { | ||
464 | "message": "#[warn(unused_variables)] on by default", | ||
465 | "code": null, | ||
466 | "level": "note", | ||
467 | "spans": [], | ||
468 | "children": [], | ||
469 | "rendered": null | ||
470 | }, | ||
471 | { | ||
472 | "message": "consider prefixing with an underscore", | ||
473 | "code": null, | ||
474 | "level": "help", | ||
475 | "spans": [ | ||
476 | { | ||
477 | "file_name": "driver/subcommand/repl.rs", | ||
478 | "byte_start": 9228, | ||
479 | "byte_end": 9231, | ||
480 | "line_start": 291, | ||
481 | "line_end": 291, | ||
482 | "column_start": 9, | ||
483 | "column_end": 12, | ||
484 | "is_primary": true, | ||
485 | "text": [ | ||
486 | { | ||
487 | "text": " let foo = 42;", | ||
488 | "highlight_start": 9, | ||
489 | "highlight_end": 12 | ||
490 | } | ||
491 | ], | ||
492 | "label": null, | ||
493 | "suggested_replacement": "_foo", | ||
494 | "suggestion_applicability": "MachineApplicable", | ||
495 | "expansion": null | ||
496 | } | ||
497 | ], | ||
498 | "children": [], | ||
499 | "rendered": null | ||
500 | } | ||
501 | ], | ||
502 | "rendered": "warning: unused variable: `foo`\n --> driver/subcommand/repl.rs:291:9\n |\n291 | let foo = 42;\n | ^^^ help: consider prefixing with an underscore: `_foo`\n |\n = note: #[warn(unused_variables)] on by default\n\n" | ||
503 | }"##, | ||
504 | ); | ||
505 | |||
506 | let config = DiagnosticsConfig { | ||
507 | warnings_as_info: vec!["unused_variables".to_string()], | ||
508 | ..DiagnosticsConfig::default() | ||
509 | }; | ||
510 | |||
511 | let workspace_root = Path::new("/test/"); | ||
512 | let diag = map_rust_diagnostic_to_lsp(&config, &diag, workspace_root); | ||
513 | insta::assert_debug_snapshot!(diag); | ||
514 | } | ||
515 | |||
516 | #[test] | ||
517 | #[cfg(not(windows))] | ||
518 | fn snap_rustc_unused_variable_as_hint() { | ||
519 | let diag = parse_diagnostic( | ||
520 | r##"{ | ||
521 | "message": "unused variable: `foo`", | ||
522 | "code": { | ||
523 | "code": "unused_variables", | ||
524 | "explanation": null | ||
525 | }, | ||
526 | "level": "warning", | ||
527 | "spans": [ | ||
528 | { | ||
529 | "file_name": "driver/subcommand/repl.rs", | ||
530 | "byte_start": 9228, | ||
531 | "byte_end": 9231, | ||
532 | "line_start": 291, | ||
533 | "line_end": 291, | ||
534 | "column_start": 9, | ||
535 | "column_end": 12, | ||
536 | "is_primary": true, | ||
537 | "text": [ | ||
538 | { | ||
539 | "text": " let foo = 42;", | ||
540 | "highlight_start": 9, | ||
541 | "highlight_end": 12 | ||
542 | } | ||
543 | ], | ||
544 | "label": null, | ||
545 | "suggested_replacement": null, | ||
546 | "suggestion_applicability": null, | ||
547 | "expansion": null | ||
548 | } | ||
549 | ], | ||
550 | "children": [ | ||
551 | { | ||
552 | "message": "#[warn(unused_variables)] on by default", | ||
553 | "code": null, | ||
554 | "level": "note", | ||
555 | "spans": [], | ||
556 | "children": [], | ||
557 | "rendered": null | ||
558 | }, | ||
559 | { | ||
560 | "message": "consider prefixing with an underscore", | ||
561 | "code": null, | ||
562 | "level": "help", | ||
563 | "spans": [ | ||
564 | { | ||
565 | "file_name": "driver/subcommand/repl.rs", | ||
566 | "byte_start": 9228, | ||
567 | "byte_end": 9231, | ||
568 | "line_start": 291, | ||
569 | "line_end": 291, | ||
570 | "column_start": 9, | ||
571 | "column_end": 12, | ||
572 | "is_primary": true, | ||
573 | "text": [ | ||
574 | { | ||
575 | "text": " let foo = 42;", | ||
576 | "highlight_start": 9, | ||
577 | "highlight_end": 12 | ||
578 | } | ||
579 | ], | ||
580 | "label": null, | ||
581 | "suggested_replacement": "_foo", | ||
582 | "suggestion_applicability": "MachineApplicable", | ||
583 | "expansion": null | ||
584 | } | ||
585 | ], | ||
586 | "children": [], | ||
587 | "rendered": null | ||
588 | } | ||
589 | ], | ||
590 | "rendered": "warning: unused variable: `foo`\n --> driver/subcommand/repl.rs:291:9\n |\n291 | let foo = 42;\n | ^^^ help: consider prefixing with an underscore: `_foo`\n |\n = note: #[warn(unused_variables)] on by default\n\n" | ||
591 | }"##, | ||
592 | ); | ||
593 | |||
594 | let config = DiagnosticsConfig { | ||
595 | warnings_as_hint: vec!["unused_variables".to_string()], | ||
596 | ..DiagnosticsConfig::default() | ||
597 | }; | ||
598 | |||
599 | let workspace_root = Path::new("/test/"); | ||
600 | let diag = map_rust_diagnostic_to_lsp(&config, &diag, workspace_root); | ||
414 | insta::assert_debug_snapshot!(diag); | 601 | insta::assert_debug_snapshot!(diag); |
415 | } | 602 | } |
416 | 603 | ||
@@ -534,7 +721,7 @@ mod tests { | |||
534 | ); | 721 | ); |
535 | 722 | ||
536 | let workspace_root = Path::new("/test/"); | 723 | let workspace_root = Path::new("/test/"); |
537 | let diag = map_rust_diagnostic_to_lsp(&diag, workspace_root); | 724 | let diag = map_rust_diagnostic_to_lsp(&DiagnosticsConfig::default(), &diag, workspace_root); |
538 | insta::assert_debug_snapshot!(diag); | 725 | insta::assert_debug_snapshot!(diag); |
539 | } | 726 | } |
540 | 727 | ||
@@ -654,7 +841,7 @@ mod tests { | |||
654 | ); | 841 | ); |
655 | 842 | ||
656 | let workspace_root = Path::new("/test/"); | 843 | let workspace_root = Path::new("/test/"); |
657 | let diag = map_rust_diagnostic_to_lsp(&diag, workspace_root); | 844 | let diag = map_rust_diagnostic_to_lsp(&DiagnosticsConfig::default(), &diag, workspace_root); |
658 | insta::assert_debug_snapshot!(diag); | 845 | insta::assert_debug_snapshot!(diag); |
659 | } | 846 | } |
660 | 847 | ||
@@ -697,7 +884,7 @@ mod tests { | |||
697 | ); | 884 | ); |
698 | 885 | ||
699 | let workspace_root = Path::new("/test/"); | 886 | let workspace_root = Path::new("/test/"); |
700 | let diag = map_rust_diagnostic_to_lsp(&diag, workspace_root); | 887 | let diag = map_rust_diagnostic_to_lsp(&DiagnosticsConfig::default(), &diag, workspace_root); |
701 | insta::assert_debug_snapshot!(diag); | 888 | insta::assert_debug_snapshot!(diag); |
702 | } | 889 | } |
703 | 890 | ||
@@ -968,7 +1155,7 @@ mod tests { | |||
968 | ); | 1155 | ); |
969 | 1156 | ||
970 | let workspace_root = Path::new("/test/"); | 1157 | let workspace_root = Path::new("/test/"); |
971 | let diag = map_rust_diagnostic_to_lsp(&diag, workspace_root); | 1158 | let diag = map_rust_diagnostic_to_lsp(&DiagnosticsConfig::default(), &diag, workspace_root); |
972 | insta::assert_debug_snapshot!(diag); | 1159 | insta::assert_debug_snapshot!(diag); |
973 | } | 1160 | } |
974 | 1161 | ||
@@ -1197,7 +1384,7 @@ mod tests { | |||
1197 | ); | 1384 | ); |
1198 | 1385 | ||
1199 | let workspace_root = Path::new("/test/"); | 1386 | let workspace_root = Path::new("/test/"); |
1200 | let diag = map_rust_diagnostic_to_lsp(&diag, workspace_root); | 1387 | let diag = map_rust_diagnostic_to_lsp(&DiagnosticsConfig::default(), &diag, workspace_root); |
1201 | insta::assert_debug_snapshot!(diag); | 1388 | insta::assert_debug_snapshot!(diag); |
1202 | } | 1389 | } |
1203 | 1390 | ||
@@ -1330,7 +1517,7 @@ mod tests { | |||
1330 | ); | 1517 | ); |
1331 | 1518 | ||
1332 | let workspace_root = Path::new("/test/"); | 1519 | let workspace_root = Path::new("/test/"); |
1333 | let diag = map_rust_diagnostic_to_lsp(&diag, workspace_root); | 1520 | let diag = map_rust_diagnostic_to_lsp(&DiagnosticsConfig::default(), &diag, workspace_root); |
1334 | insta::assert_debug_snapshot!(diag); | 1521 | insta::assert_debug_snapshot!(diag); |
1335 | } | 1522 | } |
1336 | } | 1523 | } |
diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 9d5685d88..d04ef4c61 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs | |||
@@ -12,41 +12,34 @@ use crossbeam_channel::{unbounded, Receiver}; | |||
12 | use lsp_types::Url; | 12 | use lsp_types::Url; |
13 | use parking_lot::RwLock; | 13 | use parking_lot::RwLock; |
14 | use ra_flycheck::{Flycheck, FlycheckConfig}; | 14 | use ra_flycheck::{Flycheck, FlycheckConfig}; |
15 | use ra_ide::{ | 15 | use ra_ide::{Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, SourceRootId}; |
16 | Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, LibraryData, SourceRootId, | 16 | use ra_project_model::{CargoWorkspace, ProcMacroClient, ProjectWorkspace, Target}; |
17 | }; | 17 | use ra_vfs::{LineEndings, RootEntry, Vfs, VfsChange, VfsFile, VfsTask, Watch}; |
18 | use ra_project_model::{ProcMacroClient, ProjectWorkspace}; | ||
19 | use ra_vfs::{LineEndings, RootEntry, Vfs, VfsChange, VfsFile, VfsRoot, VfsTask, Watch}; | ||
20 | use relative_path::RelativePathBuf; | ||
21 | use stdx::format_to; | 18 | use stdx::format_to; |
22 | 19 | ||
23 | use crate::{ | 20 | use crate::{ |
24 | config::Config, | 21 | config::{Config, FilesWatcher}, |
25 | diagnostics::{CheckFixes, DiagnosticCollection}, | 22 | diagnostics::{CheckFixes, DiagnosticCollection}, |
26 | main_loop::pending_requests::{CompletedRequest, LatestRequests}, | 23 | main_loop::request_metrics::{LatestRequests, RequestMetrics}, |
27 | to_proto::url_from_abs_path, | 24 | to_proto::url_from_abs_path, |
28 | vfs_glob::{Glob, RustPackageFilterBuilder}, | 25 | vfs_glob::{Glob, RustPackageFilterBuilder}, |
29 | LspError, Result, | 26 | LspError, Result, |
30 | }; | 27 | }; |
31 | use ra_db::ExternSourceId; | 28 | use ra_db::{CrateId, ExternSourceId}; |
32 | use rustc_hash::{FxHashMap, FxHashSet}; | 29 | use rustc_hash::{FxHashMap, FxHashSet}; |
33 | 30 | ||
34 | fn create_flycheck(workspaces: &[ProjectWorkspace], config: &FlycheckConfig) -> Option<Flycheck> { | 31 | fn create_flycheck(workspaces: &[ProjectWorkspace], config: &FlycheckConfig) -> Option<Flycheck> { |
35 | // FIXME: Figure out the multi-workspace situation | 32 | // FIXME: Figure out the multi-workspace situation |
36 | workspaces | 33 | workspaces.iter().find_map(|w| match w { |
37 | .iter() | 34 | ProjectWorkspace::Cargo { cargo, .. } => { |
38 | .find_map(|w| match w { | ||
39 | ProjectWorkspace::Cargo { cargo, .. } => Some(cargo), | ||
40 | ProjectWorkspace::Json { .. } => None, | ||
41 | }) | ||
42 | .map(|cargo| { | ||
43 | let cargo_project_root = cargo.workspace_root().to_path_buf(); | 35 | let cargo_project_root = cargo.workspace_root().to_path_buf(); |
44 | Some(Flycheck::new(config.clone(), cargo_project_root)) | 36 | Some(Flycheck::new(config.clone(), cargo_project_root)) |
45 | }) | 37 | } |
46 | .unwrap_or_else(|| { | 38 | ProjectWorkspace::Json { .. } => { |
47 | log::warn!("Cargo check watching only supported for cargo workspaces, disabling"); | 39 | log::warn!("Cargo check watching only supported for cargo workspaces, disabling"); |
48 | None | 40 | None |
49 | }) | 41 | } |
42 | }) | ||
50 | } | 43 | } |
51 | 44 | ||
52 | /// `GlobalState` is the primary mutable state of the language server | 45 | /// `GlobalState` is the primary mutable state of the language server |
@@ -62,10 +55,10 @@ pub struct GlobalState { | |||
62 | pub analysis_host: AnalysisHost, | 55 | pub analysis_host: AnalysisHost, |
63 | pub vfs: Arc<RwLock<Vfs>>, | 56 | pub vfs: Arc<RwLock<Vfs>>, |
64 | pub task_receiver: Receiver<VfsTask>, | 57 | pub task_receiver: Receiver<VfsTask>, |
65 | pub latest_requests: Arc<RwLock<LatestRequests>>, | ||
66 | pub flycheck: Option<Flycheck>, | 58 | pub flycheck: Option<Flycheck>, |
67 | pub diagnostics: DiagnosticCollection, | 59 | pub diagnostics: DiagnosticCollection, |
68 | pub proc_macro_client: ProcMacroClient, | 60 | pub proc_macro_client: ProcMacroClient, |
61 | pub(crate) latest_requests: Arc<RwLock<LatestRequests>>, | ||
69 | } | 62 | } |
70 | 63 | ||
71 | /// An immutable snapshot of the world's state at a point in time. | 64 | /// An immutable snapshot of the world's state at a point in time. |
@@ -73,8 +66,8 @@ pub struct GlobalStateSnapshot { | |||
73 | pub config: Config, | 66 | pub config: Config, |
74 | pub workspaces: Arc<Vec<ProjectWorkspace>>, | 67 | pub workspaces: Arc<Vec<ProjectWorkspace>>, |
75 | pub analysis: Analysis, | 68 | pub analysis: Analysis, |
76 | pub latest_requests: Arc<RwLock<LatestRequests>>, | ||
77 | pub check_fixes: CheckFixes, | 69 | pub check_fixes: CheckFixes, |
70 | pub(crate) latest_requests: Arc<RwLock<LatestRequests>>, | ||
78 | vfs: Arc<RwLock<Vfs>>, | 71 | vfs: Arc<RwLock<Vfs>>, |
79 | } | 72 | } |
80 | 73 | ||
@@ -83,7 +76,6 @@ impl GlobalState { | |||
83 | workspaces: Vec<ProjectWorkspace>, | 76 | workspaces: Vec<ProjectWorkspace>, |
84 | lru_capacity: Option<usize>, | 77 | lru_capacity: Option<usize>, |
85 | exclude_globs: &[Glob], | 78 | exclude_globs: &[Glob], |
86 | watch: Watch, | ||
87 | config: Config, | 79 | config: Config, |
88 | ) -> GlobalState { | 80 | ) -> GlobalState { |
89 | let mut change = AnalysisChange::new(); | 81 | let mut change = AnalysisChange::new(); |
@@ -118,6 +110,7 @@ impl GlobalState { | |||
118 | 110 | ||
119 | let (task_sender, task_receiver) = unbounded(); | 111 | let (task_sender, task_receiver) = unbounded(); |
120 | let task_sender = Box::new(move |t| task_sender.send(t).unwrap()); | 112 | let task_sender = Box::new(move |t| task_sender.send(t).unwrap()); |
113 | let watch = Watch(matches!(config.files.watcher, FilesWatcher::Notify)); | ||
121 | let (mut vfs, vfs_roots) = Vfs::new(roots, task_sender, watch); | 114 | let (mut vfs, vfs_roots) = Vfs::new(roots, task_sender, watch); |
122 | 115 | ||
123 | let mut extern_source_roots = FxHashMap::default(); | 116 | let mut extern_source_roots = FxHashMap::default(); |
@@ -195,32 +188,18 @@ impl GlobalState { | |||
195 | 188 | ||
196 | /// Returns a vec of libraries | 189 | /// Returns a vec of libraries |
197 | /// FIXME: better API here | 190 | /// FIXME: better API here |
198 | pub fn process_changes( | 191 | pub fn process_changes(&mut self, roots_scanned: &mut usize) -> bool { |
199 | &mut self, | ||
200 | roots_scanned: &mut usize, | ||
201 | ) -> Option<Vec<(SourceRootId, Vec<(FileId, RelativePathBuf, Arc<String>)>)>> { | ||
202 | let changes = self.vfs.write().commit_changes(); | 192 | let changes = self.vfs.write().commit_changes(); |
203 | if changes.is_empty() { | 193 | if changes.is_empty() { |
204 | return None; | 194 | return false; |
205 | } | 195 | } |
206 | let mut libs = Vec::new(); | ||
207 | let mut change = AnalysisChange::new(); | 196 | let mut change = AnalysisChange::new(); |
208 | for c in changes { | 197 | for c in changes { |
209 | match c { | 198 | match c { |
210 | VfsChange::AddRoot { root, files } => { | 199 | VfsChange::AddRoot { root, files } => { |
211 | let root_path = self.vfs.read().root2path(root); | 200 | *roots_scanned += 1; |
212 | let is_local = self.local_roots.iter().any(|r| root_path.starts_with(r)); | 201 | for (file, path, text) in files { |
213 | if is_local { | 202 | change.add_file(SourceRootId(root.0), FileId(file.0), path, text); |
214 | *roots_scanned += 1; | ||
215 | for (file, path, text) in files { | ||
216 | change.add_file(SourceRootId(root.0), FileId(file.0), path, text); | ||
217 | } | ||
218 | } else { | ||
219 | let files = files | ||
220 | .into_iter() | ||
221 | .map(|(vfsfile, path, text)| (FileId(vfsfile.0), path, text)) | ||
222 | .collect(); | ||
223 | libs.push((SourceRootId(root.0), files)); | ||
224 | } | 203 | } |
225 | } | 204 | } |
226 | VfsChange::AddFile { root, file, path, text } => { | 205 | VfsChange::AddFile { root, file, path, text } => { |
@@ -235,13 +214,7 @@ impl GlobalState { | |||
235 | } | 214 | } |
236 | } | 215 | } |
237 | self.analysis_host.apply_change(change); | 216 | self.analysis_host.apply_change(change); |
238 | Some(libs) | 217 | true |
239 | } | ||
240 | |||
241 | pub fn add_lib(&mut self, data: LibraryData) { | ||
242 | let mut change = AnalysisChange::new(); | ||
243 | change.add_library(data); | ||
244 | self.analysis_host.apply_change(change); | ||
245 | } | 218 | } |
246 | 219 | ||
247 | pub fn snapshot(&self) -> GlobalStateSnapshot { | 220 | pub fn snapshot(&self) -> GlobalStateSnapshot { |
@@ -263,7 +236,7 @@ impl GlobalState { | |||
263 | self.analysis_host.collect_garbage() | 236 | self.analysis_host.collect_garbage() |
264 | } | 237 | } |
265 | 238 | ||
266 | pub fn complete_request(&mut self, request: CompletedRequest) { | 239 | pub(crate) fn complete_request(&mut self, request: RequestMetrics) { |
267 | self.latest_requests.write().record(request) | 240 | self.latest_requests.write().record(request) |
268 | } | 241 | } |
269 | } | 242 | } |
@@ -290,20 +263,31 @@ impl GlobalStateSnapshot { | |||
290 | file_id_to_url(&self.vfs.read(), id) | 263 | file_id_to_url(&self.vfs.read(), id) |
291 | } | 264 | } |
292 | 265 | ||
293 | pub fn file_id_to_path(&self, id: FileId) -> PathBuf { | ||
294 | self.vfs.read().file2path(VfsFile(id.0)) | ||
295 | } | ||
296 | |||
297 | pub fn file_line_endings(&self, id: FileId) -> LineEndings { | 266 | pub fn file_line_endings(&self, id: FileId) -> LineEndings { |
298 | self.vfs.read().file_line_endings(VfsFile(id.0)) | 267 | self.vfs.read().file_line_endings(VfsFile(id.0)) |
299 | } | 268 | } |
300 | 269 | ||
301 | pub fn path_to_url(&self, root: SourceRootId, path: &RelativePathBuf) -> Url { | 270 | pub fn anchored_path(&self, file_id: FileId, path: &str) -> Url { |
302 | let base = self.vfs.read().root2path(VfsRoot(root.0)); | 271 | let mut base = self.vfs.read().file2path(VfsFile(file_id.0)); |
303 | let path = path.to_path(base); | 272 | base.pop(); |
273 | let path = base.join(path); | ||
304 | url_from_abs_path(&path) | 274 | url_from_abs_path(&path) |
305 | } | 275 | } |
306 | 276 | ||
277 | pub(crate) fn cargo_target_for_crate_root( | ||
278 | &self, | ||
279 | crate_id: CrateId, | ||
280 | ) -> Option<(&CargoWorkspace, Target)> { | ||
281 | let file_id = self.analysis().crate_root(crate_id).ok()?; | ||
282 | let path = self.vfs.read().file2path(VfsFile(file_id.0)); | ||
283 | self.workspaces.iter().find_map(|ws| match ws { | ||
284 | ProjectWorkspace::Cargo { cargo, .. } => { | ||
285 | cargo.target_by_root(&path).map(|it| (cargo, it)) | ||
286 | } | ||
287 | ProjectWorkspace::Json { .. } => None, | ||
288 | }) | ||
289 | } | ||
290 | |||
307 | pub fn status(&self) -> String { | 291 | pub fn status(&self) -> String { |
308 | let mut buf = String::new(); | 292 | let mut buf = String::new(); |
309 | if self.workspaces.is_empty() { | 293 | if self.workspaces.is_empty() { |
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index 609cb69d3..64e70955f 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs | |||
@@ -32,7 +32,7 @@ mod semantic_tokens; | |||
32 | 32 | ||
33 | use serde::de::DeserializeOwned; | 33 | use serde::de::DeserializeOwned; |
34 | 34 | ||
35 | pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>; | 35 | pub type Result<T, E = Box<dyn std::error::Error + Send + Sync>> = std::result::Result<T, E>; |
36 | pub use crate::{ | 36 | pub use crate::{ |
37 | caps::server_capabilities, | 37 | caps::server_capabilities, |
38 | main_loop::LspError, | 38 | main_loop::LspError, |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index cc9bb1726..674b1323b 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | 3 | ||
4 | mod handlers; | 4 | mod handlers; |
5 | mod subscriptions; | 5 | mod subscriptions; |
6 | pub(crate) mod pending_requests; | 6 | pub(crate) mod request_metrics; |
7 | 7 | ||
8 | use std::{ | 8 | use std::{ |
9 | borrow::Cow, | 9 | borrow::Cow, |
@@ -17,19 +17,19 @@ use std::{ | |||
17 | }; | 17 | }; |
18 | 18 | ||
19 | use crossbeam_channel::{never, select, unbounded, RecvError, Sender}; | 19 | use crossbeam_channel::{never, select, unbounded, RecvError, Sender}; |
20 | use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; | 20 | use lsp_server::{ |
21 | Connection, ErrorCode, Message, Notification, ReqQueue, Request, RequestId, Response, | ||
22 | }; | ||
21 | use lsp_types::{ | 23 | use lsp_types::{ |
22 | DidChangeTextDocumentParams, NumberOrString, TextDocumentContentChangeEvent, WorkDoneProgress, | 24 | request::Request as _, DidChangeTextDocumentParams, NumberOrString, |
23 | WorkDoneProgressBegin, WorkDoneProgressCreateParams, WorkDoneProgressEnd, | 25 | TextDocumentContentChangeEvent, WorkDoneProgress, WorkDoneProgressBegin, |
24 | WorkDoneProgressReport, | 26 | WorkDoneProgressCreateParams, WorkDoneProgressEnd, WorkDoneProgressReport, |
25 | }; | 27 | }; |
26 | use ra_flycheck::{CheckTask, Status}; | 28 | use ra_flycheck::{CheckTask, Status}; |
27 | use ra_ide::{Canceled, FileId, LibraryData, LineIndex, SourceRootId}; | 29 | use ra_ide::{Canceled, FileId, LineIndex}; |
28 | use ra_prof::profile; | 30 | use ra_prof::profile; |
29 | use ra_project_model::{PackageRoot, ProjectWorkspace}; | 31 | use ra_project_model::{PackageRoot, ProjectWorkspace}; |
30 | use ra_vfs::{VfsTask, Watch}; | 32 | use ra_vfs::VfsTask; |
31 | use relative_path::RelativePathBuf; | ||
32 | use rustc_hash::FxHashSet; | ||
33 | use serde::{de::DeserializeOwned, Serialize}; | 33 | use serde::{de::DeserializeOwned, Serialize}; |
34 | use threadpool::ThreadPool; | 34 | use threadpool::ThreadPool; |
35 | 35 | ||
@@ -39,10 +39,7 @@ use crate::{ | |||
39 | from_proto, | 39 | from_proto, |
40 | global_state::{file_id_to_url, GlobalState, GlobalStateSnapshot}, | 40 | global_state::{file_id_to_url, GlobalState, GlobalStateSnapshot}, |
41 | lsp_ext, | 41 | lsp_ext, |
42 | main_loop::{ | 42 | main_loop::{request_metrics::RequestMetrics, subscriptions::Subscriptions}, |
43 | pending_requests::{PendingRequest, PendingRequests}, | ||
44 | subscriptions::Subscriptions, | ||
45 | }, | ||
46 | Result, | 43 | Result, |
47 | }; | 44 | }; |
48 | 45 | ||
@@ -154,32 +151,25 @@ pub fn main_loop(config: Config, connection: Connection) -> Result<()> { | |||
154 | register_options: Some(serde_json::to_value(registration_options).unwrap()), | 151 | register_options: Some(serde_json::to_value(registration_options).unwrap()), |
155 | }; | 152 | }; |
156 | let params = lsp_types::RegistrationParams { registrations: vec![registration] }; | 153 | let params = lsp_types::RegistrationParams { registrations: vec![registration] }; |
157 | let request = request_new::<lsp_types::request::RegisterCapability>( | 154 | let request = loop_state.req_queue.outgoing.register( |
158 | loop_state.next_request_id(), | 155 | lsp_types::request::RegisterCapability::METHOD.to_string(), |
159 | params, | 156 | params, |
157 | DO_NOTHING, | ||
160 | ); | 158 | ); |
161 | connection.sender.send(request.into()).unwrap(); | 159 | connection.sender.send(request.into()).unwrap(); |
162 | } | 160 | } |
163 | 161 | ||
164 | GlobalState::new( | 162 | GlobalState::new(workspaces, config.lru_capacity, &globs, config) |
165 | workspaces, | ||
166 | config.lru_capacity, | ||
167 | &globs, | ||
168 | Watch(matches!(config.files.watcher, FilesWatcher::Notify)), | ||
169 | config, | ||
170 | ) | ||
171 | }; | 163 | }; |
172 | 164 | ||
173 | loop_state.roots_total = global_state.vfs.read().n_roots(); | 165 | loop_state.roots_total = global_state.vfs.read().n_roots(); |
174 | 166 | ||
175 | let pool = ThreadPool::default(); | 167 | let pool = ThreadPool::default(); |
176 | let (task_sender, task_receiver) = unbounded::<Task>(); | 168 | let (task_sender, task_receiver) = unbounded::<Task>(); |
177 | let (libdata_sender, libdata_receiver) = unbounded::<LibraryData>(); | ||
178 | 169 | ||
179 | log::info!("server initialized, serving requests"); | 170 | log::info!("server initialized, serving requests"); |
180 | { | 171 | { |
181 | let task_sender = task_sender; | 172 | let task_sender = task_sender; |
182 | let libdata_sender = libdata_sender; | ||
183 | loop { | 173 | loop { |
184 | log::trace!("selecting"); | 174 | log::trace!("selecting"); |
185 | let event = select! { | 175 | let event = select! { |
@@ -192,7 +182,6 @@ pub fn main_loop(config: Config, connection: Connection) -> Result<()> { | |||
192 | Ok(task) => Event::Vfs(task), | 182 | Ok(task) => Event::Vfs(task), |
193 | Err(RecvError) => return Err("vfs died".into()), | 183 | Err(RecvError) => return Err("vfs died".into()), |
194 | }, | 184 | }, |
195 | recv(libdata_receiver) -> data => Event::Lib(data.unwrap()), | ||
196 | recv(global_state.flycheck.as_ref().map_or(&never(), |it| &it.task_recv)) -> task => match task { | 185 | recv(global_state.flycheck.as_ref().map_or(&never(), |it| &it.task_recv)) -> task => match task { |
197 | Ok(task) => Event::CheckWatcher(task), | 186 | Ok(task) => Event::CheckWatcher(task), |
198 | Err(RecvError) => return Err("check watcher died".into()), | 187 | Err(RecvError) => return Err("check watcher died".into()), |
@@ -203,23 +192,14 @@ pub fn main_loop(config: Config, connection: Connection) -> Result<()> { | |||
203 | break; | 192 | break; |
204 | }; | 193 | }; |
205 | } | 194 | } |
206 | loop_turn( | 195 | loop_turn(&pool, &task_sender, &connection, &mut global_state, &mut loop_state, event)?; |
207 | &pool, | ||
208 | &task_sender, | ||
209 | &libdata_sender, | ||
210 | &connection, | ||
211 | &mut global_state, | ||
212 | &mut loop_state, | ||
213 | event, | ||
214 | )?; | ||
215 | } | 196 | } |
216 | } | 197 | } |
217 | global_state.analysis_host.request_cancellation(); | 198 | global_state.analysis_host.request_cancellation(); |
218 | log::info!("waiting for tasks to finish..."); | 199 | log::info!("waiting for tasks to finish..."); |
219 | task_receiver.into_iter().for_each(|task| { | 200 | task_receiver.into_iter().for_each(|task| { |
220 | on_task(task, &connection.sender, &mut loop_state.pending_requests, &mut global_state) | 201 | on_task(task, &connection.sender, &mut loop_state.req_queue.incoming, &mut global_state) |
221 | }); | 202 | }); |
222 | libdata_receiver.into_iter().for_each(drop); | ||
223 | log::info!("...tasks have finished"); | 203 | log::info!("...tasks have finished"); |
224 | log::info!("joining threadpool..."); | 204 | log::info!("joining threadpool..."); |
225 | pool.join(); | 205 | pool.join(); |
@@ -243,7 +223,6 @@ enum Event { | |||
243 | Msg(Message), | 223 | Msg(Message), |
244 | Task(Task), | 224 | Task(Task), |
245 | Vfs(VfsTask), | 225 | Vfs(VfsTask), |
246 | Lib(LibraryData), | ||
247 | CheckWatcher(CheckTask), | 226 | CheckWatcher(CheckTask), |
248 | } | 227 | } |
249 | 228 | ||
@@ -279,43 +258,28 @@ impl fmt::Debug for Event { | |||
279 | Event::Msg(it) => fmt::Debug::fmt(it, f), | 258 | Event::Msg(it) => fmt::Debug::fmt(it, f), |
280 | Event::Task(it) => fmt::Debug::fmt(it, f), | 259 | Event::Task(it) => fmt::Debug::fmt(it, f), |
281 | Event::Vfs(it) => fmt::Debug::fmt(it, f), | 260 | Event::Vfs(it) => fmt::Debug::fmt(it, f), |
282 | Event::Lib(it) => fmt::Debug::fmt(it, f), | ||
283 | Event::CheckWatcher(it) => fmt::Debug::fmt(it, f), | 261 | Event::CheckWatcher(it) => fmt::Debug::fmt(it, f), |
284 | } | 262 | } |
285 | } | 263 | } |
286 | } | 264 | } |
287 | 265 | ||
288 | #[derive(Debug, Default)] | 266 | type ReqHandler = fn(&mut GlobalState, Response); |
267 | const DO_NOTHING: ReqHandler = |_, _| (); | ||
268 | type Incoming = lsp_server::Incoming<(&'static str, Instant)>; | ||
269 | |||
270 | #[derive(Default)] | ||
289 | struct LoopState { | 271 | struct LoopState { |
290 | next_request_id: u64, | 272 | req_queue: ReqQueue<(&'static str, Instant), ReqHandler>, |
291 | pending_responses: FxHashSet<RequestId>, | ||
292 | pending_requests: PendingRequests, | ||
293 | subscriptions: Subscriptions, | 273 | subscriptions: Subscriptions, |
294 | // We try not to index more than MAX_IN_FLIGHT_LIBS libraries at the same | ||
295 | // time to always have a thread ready to react to input. | ||
296 | in_flight_libraries: usize, | ||
297 | pending_libraries: Vec<(SourceRootId, Vec<(FileId, RelativePathBuf, Arc<String>)>)>, | ||
298 | workspace_loaded: bool, | 274 | workspace_loaded: bool, |
299 | roots_progress_reported: Option<usize>, | 275 | roots_progress_reported: Option<usize>, |
300 | roots_scanned: usize, | 276 | roots_scanned: usize, |
301 | roots_total: usize, | 277 | roots_total: usize, |
302 | configuration_request_id: Option<RequestId>, | ||
303 | } | ||
304 | |||
305 | impl LoopState { | ||
306 | fn next_request_id(&mut self) -> RequestId { | ||
307 | self.next_request_id += 1; | ||
308 | let res: RequestId = self.next_request_id.into(); | ||
309 | let inserted = self.pending_responses.insert(res.clone()); | ||
310 | assert!(inserted); | ||
311 | res | ||
312 | } | ||
313 | } | 278 | } |
314 | 279 | ||
315 | fn loop_turn( | 280 | fn loop_turn( |
316 | pool: &ThreadPool, | 281 | pool: &ThreadPool, |
317 | task_sender: &Sender<Task>, | 282 | task_sender: &Sender<Task>, |
318 | libdata_sender: &Sender<LibraryData>, | ||
319 | connection: &Connection, | 283 | connection: &Connection, |
320 | global_state: &mut GlobalState, | 284 | global_state: &mut GlobalState, |
321 | loop_state: &mut LoopState, | 285 | loop_state: &mut LoopState, |
@@ -333,23 +297,17 @@ fn loop_turn( | |||
333 | 297 | ||
334 | match event { | 298 | match event { |
335 | Event::Task(task) => { | 299 | Event::Task(task) => { |
336 | on_task(task, &connection.sender, &mut loop_state.pending_requests, global_state); | 300 | on_task(task, &connection.sender, &mut loop_state.req_queue.incoming, global_state); |
337 | global_state.maybe_collect_garbage(); | 301 | global_state.maybe_collect_garbage(); |
338 | } | 302 | } |
339 | Event::Vfs(task) => { | 303 | Event::Vfs(task) => { |
340 | global_state.vfs.write().handle_task(task); | 304 | global_state.vfs.write().handle_task(task); |
341 | } | 305 | } |
342 | Event::Lib(lib) => { | ||
343 | global_state.add_lib(lib); | ||
344 | global_state.maybe_collect_garbage(); | ||
345 | loop_state.in_flight_libraries -= 1; | ||
346 | loop_state.roots_scanned += 1; | ||
347 | } | ||
348 | Event::CheckWatcher(task) => on_check_task(task, global_state, task_sender)?, | 306 | Event::CheckWatcher(task) => on_check_task(task, global_state, task_sender)?, |
349 | Event::Msg(msg) => match msg { | 307 | Event::Msg(msg) => match msg { |
350 | Message::Request(req) => on_request( | 308 | Message::Request(req) => on_request( |
351 | global_state, | 309 | global_state, |
352 | &mut loop_state.pending_requests, | 310 | &mut loop_state.req_queue.incoming, |
353 | pool, | 311 | pool, |
354 | task_sender, | 312 | task_sender, |
355 | &connection.sender, | 313 | &connection.sender, |
@@ -360,66 +318,18 @@ fn loop_turn( | |||
360 | on_notification(&connection.sender, global_state, loop_state, not)?; | 318 | on_notification(&connection.sender, global_state, loop_state, not)?; |
361 | } | 319 | } |
362 | Message::Response(resp) => { | 320 | Message::Response(resp) => { |
363 | let removed = loop_state.pending_responses.remove(&resp.id); | 321 | let handler = loop_state.req_queue.outgoing.complete(resp.id.clone()); |
364 | if !removed { | 322 | handler(global_state, resp) |
365 | log::error!("unexpected response: {:?}", resp) | ||
366 | } | ||
367 | |||
368 | if Some(&resp.id) == loop_state.configuration_request_id.as_ref() { | ||
369 | loop_state.configuration_request_id = None; | ||
370 | log::debug!("config update response: '{:?}", resp); | ||
371 | let Response { error, result, .. } = resp; | ||
372 | |||
373 | match (error, result) { | ||
374 | (Some(err), _) => { | ||
375 | log::error!("failed to fetch the server settings: {:?}", err) | ||
376 | } | ||
377 | (None, Some(configs)) => { | ||
378 | if let Some(new_config) = configs.get(0) { | ||
379 | let mut config = global_state.config.clone(); | ||
380 | config.update(&new_config); | ||
381 | global_state.update_configuration(config); | ||
382 | } | ||
383 | } | ||
384 | (None, None) => { | ||
385 | log::error!("received empty server settings response from the client") | ||
386 | } | ||
387 | } | ||
388 | } | ||
389 | } | 323 | } |
390 | }, | 324 | }, |
391 | }; | 325 | }; |
392 | 326 | ||
393 | let mut state_changed = false; | 327 | let mut state_changed = global_state.process_changes(&mut loop_state.roots_scanned); |
394 | if let Some(changes) = global_state.process_changes(&mut loop_state.roots_scanned) { | ||
395 | state_changed = true; | ||
396 | loop_state.pending_libraries.extend(changes); | ||
397 | } | ||
398 | |||
399 | let max_in_flight_libs = pool.max_count().saturating_sub(2).max(1); | ||
400 | while loop_state.in_flight_libraries < max_in_flight_libs { | ||
401 | let (root, files) = match loop_state.pending_libraries.pop() { | ||
402 | Some(it) => it, | ||
403 | None => break, | ||
404 | }; | ||
405 | |||
406 | loop_state.in_flight_libraries += 1; | ||
407 | let sender = libdata_sender.clone(); | ||
408 | pool.execute(move || { | ||
409 | log::info!("indexing {:?} ... ", root); | ||
410 | let data = LibraryData::prepare(root, files); | ||
411 | sender.send(data).unwrap(); | ||
412 | }); | ||
413 | } | ||
414 | 328 | ||
415 | let show_progress = | 329 | let show_progress = |
416 | !loop_state.workspace_loaded && global_state.config.client_caps.work_done_progress; | 330 | !loop_state.workspace_loaded && global_state.config.client_caps.work_done_progress; |
417 | 331 | ||
418 | if !loop_state.workspace_loaded | 332 | if !loop_state.workspace_loaded && loop_state.roots_scanned == loop_state.roots_total { |
419 | && loop_state.roots_scanned == loop_state.roots_total | ||
420 | && loop_state.pending_libraries.is_empty() | ||
421 | && loop_state.in_flight_libraries == 0 | ||
422 | { | ||
423 | state_changed = true; | 333 | state_changed = true; |
424 | loop_state.workspace_loaded = true; | 334 | loop_state.workspace_loaded = true; |
425 | if let Some(flycheck) = &global_state.flycheck { | 335 | if let Some(flycheck) = &global_state.flycheck { |
@@ -463,14 +373,19 @@ fn loop_turn( | |||
463 | fn on_task( | 373 | fn on_task( |
464 | task: Task, | 374 | task: Task, |
465 | msg_sender: &Sender<Message>, | 375 | msg_sender: &Sender<Message>, |
466 | pending_requests: &mut PendingRequests, | 376 | incoming_requests: &mut Incoming, |
467 | state: &mut GlobalState, | 377 | state: &mut GlobalState, |
468 | ) { | 378 | ) { |
469 | match task { | 379 | match task { |
470 | Task::Respond(response) => { | 380 | Task::Respond(response) => { |
471 | if let Some(completed) = pending_requests.finish(&response.id) { | 381 | if let Some((method, start)) = incoming_requests.complete(response.id.clone()) { |
472 | log::info!("handled req#{} in {:?}", completed.id, completed.duration); | 382 | let duration = start.elapsed(); |
473 | state.complete_request(completed); | 383 | log::info!("handled req#{} in {:?}", response.id, duration); |
384 | state.complete_request(RequestMetrics { | ||
385 | id: response.id.clone(), | ||
386 | method: method.to_string(), | ||
387 | duration, | ||
388 | }); | ||
474 | msg_sender.send(response.into()).unwrap(); | 389 | msg_sender.send(response.into()).unwrap(); |
475 | } | 390 | } |
476 | } | 391 | } |
@@ -483,7 +398,7 @@ fn on_task( | |||
483 | 398 | ||
484 | fn on_request( | 399 | fn on_request( |
485 | global_state: &mut GlobalState, | 400 | global_state: &mut GlobalState, |
486 | pending_requests: &mut PendingRequests, | 401 | incoming_requests: &mut Incoming, |
487 | pool: &ThreadPool, | 402 | pool: &ThreadPool, |
488 | task_sender: &Sender<Task>, | 403 | task_sender: &Sender<Task>, |
489 | msg_sender: &Sender<Message>, | 404 | msg_sender: &Sender<Message>, |
@@ -496,7 +411,7 @@ fn on_request( | |||
496 | global_state, | 411 | global_state, |
497 | task_sender, | 412 | task_sender, |
498 | msg_sender, | 413 | msg_sender, |
499 | pending_requests, | 414 | incoming_requests, |
500 | request_received, | 415 | request_received, |
501 | }; | 416 | }; |
502 | pool_dispatcher | 417 | pool_dispatcher |
@@ -560,12 +475,7 @@ fn on_notification( | |||
560 | NumberOrString::Number(id) => id.into(), | 475 | NumberOrString::Number(id) => id.into(), |
561 | NumberOrString::String(id) => id.into(), | 476 | NumberOrString::String(id) => id.into(), |
562 | }; | 477 | }; |
563 | if loop_state.pending_requests.cancel(&id) { | 478 | if let Some(response) = loop_state.req_queue.incoming.cancel(id) { |
564 | let response = Response::new_err( | ||
565 | id, | ||
566 | ErrorCode::RequestCanceled as i32, | ||
567 | "canceled by client".to_string(), | ||
568 | ); | ||
569 | msg_sender.send(response.into()).unwrap() | 479 | msg_sender.send(response.into()).unwrap() |
570 | } | 480 | } |
571 | return Ok(()); | 481 | return Ok(()); |
@@ -628,18 +538,36 @@ fn on_notification( | |||
628 | Ok(_) => { | 538 | Ok(_) => { |
629 | // As stated in https://github.com/microsoft/language-server-protocol/issues/676, | 539 | // As stated in https://github.com/microsoft/language-server-protocol/issues/676, |
630 | // this notification's parameters should be ignored and the actual config queried separately. | 540 | // this notification's parameters should be ignored and the actual config queried separately. |
631 | let request_id = loop_state.next_request_id(); | 541 | let request = loop_state.req_queue.outgoing.register( |
632 | let request = request_new::<lsp_types::request::WorkspaceConfiguration>( | 542 | lsp_types::request::WorkspaceConfiguration::METHOD.to_string(), |
633 | request_id.clone(), | ||
634 | lsp_types::ConfigurationParams { | 543 | lsp_types::ConfigurationParams { |
635 | items: vec![lsp_types::ConfigurationItem { | 544 | items: vec![lsp_types::ConfigurationItem { |
636 | scope_uri: None, | 545 | scope_uri: None, |
637 | section: Some("rust-analyzer".to_string()), | 546 | section: Some("rust-analyzer".to_string()), |
638 | }], | 547 | }], |
639 | }, | 548 | }, |
549 | |global_state, resp| { | ||
550 | log::debug!("config update response: '{:?}", resp); | ||
551 | let Response { error, result, .. } = resp; | ||
552 | |||
553 | match (error, result) { | ||
554 | (Some(err), _) => { | ||
555 | log::error!("failed to fetch the server settings: {:?}", err) | ||
556 | } | ||
557 | (None, Some(configs)) => { | ||
558 | if let Some(new_config) = configs.get(0) { | ||
559 | let mut config = global_state.config.clone(); | ||
560 | config.update(&new_config); | ||
561 | global_state.update_configuration(config); | ||
562 | } | ||
563 | } | ||
564 | (None, None) => { | ||
565 | log::error!("received empty server settings response from the client") | ||
566 | } | ||
567 | } | ||
568 | }, | ||
640 | ); | 569 | ); |
641 | msg_sender.send(request.into())?; | 570 | msg_sender.send(request.into())?; |
642 | loop_state.configuration_request_id = Some(request_id); | ||
643 | 571 | ||
644 | return Ok(()); | 572 | return Ok(()); |
645 | } | 573 | } |
@@ -669,14 +597,11 @@ fn apply_document_changes( | |||
669 | mut line_index: Cow<'_, LineIndex>, | 597 | mut line_index: Cow<'_, LineIndex>, |
670 | content_changes: Vec<TextDocumentContentChangeEvent>, | 598 | content_changes: Vec<TextDocumentContentChangeEvent>, |
671 | ) { | 599 | ) { |
672 | // Remove when https://github.com/rust-analyzer/rust-analyzer/issues/4263 is fixed. | ||
673 | let backup_text = old_text.clone(); | ||
674 | let backup_changes = content_changes.clone(); | ||
675 | |||
676 | // The changes we got must be applied sequentially, but can cross lines so we | 600 | // The changes we got must be applied sequentially, but can cross lines so we |
677 | // have to keep our line index updated. | 601 | // have to keep our line index updated. |
678 | // Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we | 602 | // Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we |
679 | // remember the last valid line in the index and only rebuild it if needed. | 603 | // remember the last valid line in the index and only rebuild it if needed. |
604 | // The VFS will normalize the end of lines to `\n`. | ||
680 | enum IndexValid { | 605 | enum IndexValid { |
681 | All, | 606 | All, |
682 | UpToLineExclusive(u64), | 607 | UpToLineExclusive(u64), |
@@ -700,19 +625,7 @@ fn apply_document_changes( | |||
700 | } | 625 | } |
701 | index_valid = IndexValid::UpToLineExclusive(range.start.line); | 626 | index_valid = IndexValid::UpToLineExclusive(range.start.line); |
702 | let range = from_proto::text_range(&line_index, range); | 627 | let range = from_proto::text_range(&line_index, range); |
703 | let mut text = old_text.to_owned(); | 628 | old_text.replace_range(Range::<usize>::from(range), &change.text); |
704 | match std::panic::catch_unwind(move || { | ||
705 | text.replace_range(Range::<usize>::from(range), &change.text); | ||
706 | text | ||
707 | }) { | ||
708 | Ok(t) => *old_text = t, | ||
709 | Err(e) => { | ||
710 | eprintln!("Bug in incremental text synchronization. Please report the following output on https://github.com/rust-analyzer/rust-analyzer/issues/4263"); | ||
711 | dbg!(&backup_text); | ||
712 | dbg!(&backup_changes); | ||
713 | std::panic::resume_unwind(e); | ||
714 | } | ||
715 | } | ||
716 | } | 629 | } |
717 | None => { | 630 | None => { |
718 | *old_text = change.text; | 631 | *old_text = change.text; |
@@ -734,6 +647,7 @@ fn on_check_task( | |||
734 | 647 | ||
735 | CheckTask::AddDiagnostic { workspace_root, diagnostic } => { | 648 | CheckTask::AddDiagnostic { workspace_root, diagnostic } => { |
736 | let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( | 649 | let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp( |
650 | &global_state.config.diagnostics, | ||
737 | &diagnostic, | 651 | &diagnostic, |
738 | &workspace_root, | 652 | &workspace_root, |
739 | ); | 653 | ); |
@@ -822,13 +736,14 @@ fn send_startup_progress(sender: &Sender<Message>, loop_state: &mut LoopState) { | |||
822 | 736 | ||
823 | match (prev, loop_state.workspace_loaded) { | 737 | match (prev, loop_state.workspace_loaded) { |
824 | (None, false) => { | 738 | (None, false) => { |
825 | let work_done_progress_create = request_new::<lsp_types::request::WorkDoneProgressCreate>( | 739 | let request = loop_state.req_queue.outgoing.register( |
826 | loop_state.next_request_id(), | 740 | lsp_types::request::WorkDoneProgressCreate::METHOD.to_string(), |
827 | WorkDoneProgressCreateParams { | 741 | WorkDoneProgressCreateParams { |
828 | token: lsp_types::ProgressToken::String("rustAnalyzer/startup".into()), | 742 | token: lsp_types::ProgressToken::String("rustAnalyzer/startup".into()), |
829 | }, | 743 | }, |
744 | DO_NOTHING, | ||
830 | ); | 745 | ); |
831 | sender.send(work_done_progress_create.into()).unwrap(); | 746 | sender.send(request.into()).unwrap(); |
832 | send_startup_progress_notif( | 747 | send_startup_progress_notif( |
833 | sender, | 748 | sender, |
834 | WorkDoneProgress::Begin(WorkDoneProgressBegin { | 749 | WorkDoneProgress::Begin(WorkDoneProgressBegin { |
@@ -870,7 +785,7 @@ struct PoolDispatcher<'a> { | |||
870 | req: Option<Request>, | 785 | req: Option<Request>, |
871 | pool: &'a ThreadPool, | 786 | pool: &'a ThreadPool, |
872 | global_state: &'a mut GlobalState, | 787 | global_state: &'a mut GlobalState, |
873 | pending_requests: &'a mut PendingRequests, | 788 | incoming_requests: &'a mut Incoming, |
874 | msg_sender: &'a Sender<Message>, | 789 | msg_sender: &'a Sender<Message>, |
875 | task_sender: &'a Sender<Task>, | 790 | task_sender: &'a Sender<Task>, |
876 | request_received: Instant, | 791 | request_received: Instant, |
@@ -899,7 +814,7 @@ impl<'a> PoolDispatcher<'a> { | |||
899 | result_to_task::<R>(id, result) | 814 | result_to_task::<R>(id, result) |
900 | }) | 815 | }) |
901 | .map_err(|_| format!("sync task {:?} panicked", R::METHOD))?; | 816 | .map_err(|_| format!("sync task {:?} panicked", R::METHOD))?; |
902 | on_task(task, self.msg_sender, self.pending_requests, self.global_state); | 817 | on_task(task, self.msg_sender, self.incoming_requests, self.global_state); |
903 | Ok(self) | 818 | Ok(self) |
904 | } | 819 | } |
905 | 820 | ||
@@ -946,11 +861,7 @@ impl<'a> PoolDispatcher<'a> { | |||
946 | return None; | 861 | return None; |
947 | } | 862 | } |
948 | }; | 863 | }; |
949 | self.pending_requests.start(PendingRequest { | 864 | self.incoming_requests.register(id.clone(), (R::METHOD, self.request_received)); |
950 | id: id.clone(), | ||
951 | method: R::METHOD.to_string(), | ||
952 | received: self.request_received, | ||
953 | }); | ||
954 | Some((id, params)) | 865 | Some((id, params)) |
955 | } | 866 | } |
956 | 867 | ||
@@ -1063,14 +974,6 @@ where | |||
1063 | Notification::new(N::METHOD.to_string(), params) | 974 | Notification::new(N::METHOD.to_string(), params) |
1064 | } | 975 | } |
1065 | 976 | ||
1066 | fn request_new<R>(id: RequestId, params: R::Params) -> Request | ||
1067 | where | ||
1068 | R: lsp_types::request::Request, | ||
1069 | R::Params: Serialize, | ||
1070 | { | ||
1071 | Request::new(id, R::METHOD.to_string(), params) | ||
1072 | } | ||
1073 | |||
1074 | #[cfg(test)] | 977 | #[cfg(test)] |
1075 | mod tests { | 978 | mod tests { |
1076 | use std::borrow::Cow; | 979 | use std::borrow::Cow; |
diff --git a/crates/rust-analyzer/src/main_loop/handlers.rs b/crates/rust-analyzer/src/main_loop/handlers.rs index b34b529b5..2d7e649d2 100644 --- a/crates/rust-analyzer/src/main_loop/handlers.rs +++ b/crates/rust-analyzer/src/main_loop/handlers.rs | |||
@@ -18,8 +18,8 @@ use lsp_types::{ | |||
18 | TextDocumentIdentifier, Url, WorkspaceEdit, | 18 | TextDocumentIdentifier, Url, WorkspaceEdit, |
19 | }; | 19 | }; |
20 | use ra_ide::{ | 20 | use ra_ide::{ |
21 | FileId, FilePosition, FileRange, HoverAction, Query, RangeInfo, Runnable, RunnableKind, | 21 | FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query, |
22 | SearchScope, TextEdit, | 22 | RangeInfo, Runnable, RunnableKind, SearchScope, TextEdit, |
23 | }; | 23 | }; |
24 | use ra_prof::profile; | 24 | use ra_prof::profile; |
25 | use ra_project_model::TargetKind; | 25 | use ra_project_model::TargetKind; |
@@ -1150,6 +1150,23 @@ fn debug_single_command(runnable: &lsp_ext::Runnable) -> Command { | |||
1150 | } | 1150 | } |
1151 | } | 1151 | } |
1152 | 1152 | ||
1153 | fn goto_location_command(snap: &GlobalStateSnapshot, nav: &NavigationTarget) -> Option<Command> { | ||
1154 | let value = if snap.config.client_caps.location_link { | ||
1155 | let link = to_proto::location_link(snap, None, nav.clone()).ok()?; | ||
1156 | to_value(link).ok()? | ||
1157 | } else { | ||
1158 | let range = FileRange { file_id: nav.file_id(), range: nav.range() }; | ||
1159 | let location = to_proto::location(snap, range).ok()?; | ||
1160 | to_value(location).ok()? | ||
1161 | }; | ||
1162 | |||
1163 | Some(Command { | ||
1164 | title: nav.name().to_string(), | ||
1165 | command: "rust-analyzer.gotoLocation".into(), | ||
1166 | arguments: Some(vec![value]), | ||
1167 | }) | ||
1168 | } | ||
1169 | |||
1153 | fn to_command_link(command: Command, tooltip: String) -> lsp_ext::CommandLink { | 1170 | fn to_command_link(command: Command, tooltip: String) -> lsp_ext::CommandLink { |
1154 | lsp_ext::CommandLink { tooltip: Some(tooltip), command } | 1171 | lsp_ext::CommandLink { tooltip: Some(tooltip), command } |
1155 | } | 1172 | } |
@@ -1180,13 +1197,13 @@ fn show_impl_command_link( | |||
1180 | None | 1197 | None |
1181 | } | 1198 | } |
1182 | 1199 | ||
1183 | fn to_runnable_action( | 1200 | fn runnable_action_links( |
1184 | snap: &GlobalStateSnapshot, | 1201 | snap: &GlobalStateSnapshot, |
1185 | file_id: FileId, | 1202 | file_id: FileId, |
1186 | runnable: Runnable, | 1203 | runnable: Runnable, |
1187 | ) -> Option<lsp_ext::CommandLinkGroup> { | 1204 | ) -> Option<lsp_ext::CommandLinkGroup> { |
1188 | let cargo_spec = CargoTargetSpec::for_file(&snap, file_id).ok()?; | 1205 | let cargo_spec = CargoTargetSpec::for_file(&snap, file_id).ok()?; |
1189 | if should_skip_target(&runnable, cargo_spec.as_ref()) { | 1206 | if !snap.config.hover.runnable() || should_skip_target(&runnable, cargo_spec.as_ref()) { |
1190 | return None; | 1207 | return None; |
1191 | } | 1208 | } |
1192 | 1209 | ||
@@ -1208,6 +1225,26 @@ fn to_runnable_action( | |||
1208 | }) | 1225 | }) |
1209 | } | 1226 | } |
1210 | 1227 | ||
1228 | fn goto_type_action_links( | ||
1229 | snap: &GlobalStateSnapshot, | ||
1230 | nav_targets: &[HoverGotoTypeData], | ||
1231 | ) -> Option<lsp_ext::CommandLinkGroup> { | ||
1232 | if !snap.config.hover.goto_type_def || nav_targets.is_empty() { | ||
1233 | return None; | ||
1234 | } | ||
1235 | |||
1236 | Some(lsp_ext::CommandLinkGroup { | ||
1237 | title: Some("Go to ".into()), | ||
1238 | commands: nav_targets | ||
1239 | .iter() | ||
1240 | .filter_map(|it| { | ||
1241 | goto_location_command(snap, &it.nav) | ||
1242 | .map(|cmd| to_command_link(cmd, it.mod_path.clone())) | ||
1243 | }) | ||
1244 | .collect(), | ||
1245 | }) | ||
1246 | } | ||
1247 | |||
1211 | fn prepare_hover_actions( | 1248 | fn prepare_hover_actions( |
1212 | snap: &GlobalStateSnapshot, | 1249 | snap: &GlobalStateSnapshot, |
1213 | file_id: FileId, | 1250 | file_id: FileId, |
@@ -1221,7 +1258,8 @@ fn prepare_hover_actions( | |||
1221 | .iter() | 1258 | .iter() |
1222 | .filter_map(|it| match it { | 1259 | .filter_map(|it| match it { |
1223 | HoverAction::Implementaion(position) => show_impl_command_link(snap, position), | 1260 | HoverAction::Implementaion(position) => show_impl_command_link(snap, position), |
1224 | HoverAction::Runnable(r) => to_runnable_action(snap, file_id, r.clone()), | 1261 | HoverAction::Runnable(r) => runnable_action_links(snap, file_id, r.clone()), |
1262 | HoverAction::GoToType(targets) => goto_type_action_links(snap, targets), | ||
1225 | }) | 1263 | }) |
1226 | .collect() | 1264 | .collect() |
1227 | } | 1265 | } |
diff --git a/crates/rust-analyzer/src/main_loop/pending_requests.rs b/crates/rust-analyzer/src/main_loop/pending_requests.rs deleted file mode 100644 index 73b33e419..000000000 --- a/crates/rust-analyzer/src/main_loop/pending_requests.rs +++ /dev/null | |||
@@ -1,75 +0,0 @@ | |||
1 | //! Data structures that keep track of inflight requests. | ||
2 | |||
3 | use std::time::{Duration, Instant}; | ||
4 | |||
5 | use lsp_server::RequestId; | ||
6 | use rustc_hash::FxHashMap; | ||
7 | |||
8 | #[derive(Debug)] | ||
9 | pub struct CompletedRequest { | ||
10 | pub id: RequestId, | ||
11 | pub method: String, | ||
12 | pub duration: Duration, | ||
13 | } | ||
14 | |||
15 | #[derive(Debug)] | ||
16 | pub(crate) struct PendingRequest { | ||
17 | pub(crate) id: RequestId, | ||
18 | pub(crate) method: String, | ||
19 | pub(crate) received: Instant, | ||
20 | } | ||
21 | |||
22 | impl From<PendingRequest> for CompletedRequest { | ||
23 | fn from(pending: PendingRequest) -> CompletedRequest { | ||
24 | CompletedRequest { | ||
25 | id: pending.id, | ||
26 | method: pending.method, | ||
27 | duration: pending.received.elapsed(), | ||
28 | } | ||
29 | } | ||
30 | } | ||
31 | |||
32 | #[derive(Debug, Default)] | ||
33 | pub(crate) struct PendingRequests { | ||
34 | map: FxHashMap<RequestId, PendingRequest>, | ||
35 | } | ||
36 | |||
37 | impl PendingRequests { | ||
38 | pub(crate) fn start(&mut self, request: PendingRequest) { | ||
39 | let id = request.id.clone(); | ||
40 | let prev = self.map.insert(id.clone(), request); | ||
41 | assert!(prev.is_none(), "duplicate request with id {}", id); | ||
42 | } | ||
43 | pub(crate) fn cancel(&mut self, id: &RequestId) -> bool { | ||
44 | self.map.remove(id).is_some() | ||
45 | } | ||
46 | pub(crate) fn finish(&mut self, id: &RequestId) -> Option<CompletedRequest> { | ||
47 | self.map.remove(id).map(CompletedRequest::from) | ||
48 | } | ||
49 | } | ||
50 | |||
51 | const N_COMPLETED_REQUESTS: usize = 10; | ||
52 | |||
53 | #[derive(Debug, Default)] | ||
54 | pub struct LatestRequests { | ||
55 | // hand-rolling VecDeque here to print things in a nicer way | ||
56 | buf: [Option<CompletedRequest>; N_COMPLETED_REQUESTS], | ||
57 | idx: usize, | ||
58 | } | ||
59 | |||
60 | impl LatestRequests { | ||
61 | pub(crate) fn record(&mut self, request: CompletedRequest) { | ||
62 | // special case: don't track status request itself | ||
63 | if request.method == "rust-analyzer/analyzerStatus" { | ||
64 | return; | ||
65 | } | ||
66 | let idx = self.idx; | ||
67 | self.buf[idx] = Some(request); | ||
68 | self.idx = (idx + 1) % N_COMPLETED_REQUESTS; | ||
69 | } | ||
70 | |||
71 | pub(crate) fn iter(&self) -> impl Iterator<Item = (bool, &CompletedRequest)> { | ||
72 | let idx = self.idx; | ||
73 | self.buf.iter().enumerate().filter_map(move |(i, req)| Some((i == idx, req.as_ref()?))) | ||
74 | } | ||
75 | } | ||
diff --git a/crates/rust-analyzer/src/main_loop/request_metrics.rs b/crates/rust-analyzer/src/main_loop/request_metrics.rs new file mode 100644 index 000000000..b1019e2d6 --- /dev/null +++ b/crates/rust-analyzer/src/main_loop/request_metrics.rs | |||
@@ -0,0 +1,37 @@ | |||
1 | //! Records stats about requests | ||
2 | use std::time::Duration; | ||
3 | |||
4 | use lsp_server::RequestId; | ||
5 | |||
6 | #[derive(Debug)] | ||
7 | pub(crate) struct RequestMetrics { | ||
8 | pub(crate) id: RequestId, | ||
9 | pub(crate) method: String, | ||
10 | pub(crate) duration: Duration, | ||
11 | } | ||
12 | |||
13 | const N_COMPLETED_REQUESTS: usize = 10; | ||
14 | |||
15 | #[derive(Debug, Default)] | ||
16 | pub(crate) struct LatestRequests { | ||
17 | // hand-rolling VecDeque here to print things in a nicer way | ||
18 | buf: [Option<RequestMetrics>; N_COMPLETED_REQUESTS], | ||
19 | idx: usize, | ||
20 | } | ||
21 | |||
22 | impl LatestRequests { | ||
23 | pub(crate) fn record(&mut self, request: RequestMetrics) { | ||
24 | // special case: don't track status request itself | ||
25 | if request.method == "rust-analyzer/analyzerStatus" { | ||
26 | return; | ||
27 | } | ||
28 | let idx = self.idx; | ||
29 | self.buf[idx] = Some(request); | ||
30 | self.idx = (idx + 1) % N_COMPLETED_REQUESTS; | ||
31 | } | ||
32 | |||
33 | pub(crate) fn iter(&self) -> impl Iterator<Item = (bool, &RequestMetrics)> { | ||
34 | let idx = self.idx; | ||
35 | self.buf.iter().enumerate().filter_map(move |(i, req)| Some((i == idx, req.as_ref()?))) | ||
36 | } | ||
37 | } | ||
diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs index 6f125c37c..2ea63d33b 100644 --- a/crates/rust-analyzer/src/semantic_tokens.rs +++ b/crates/rust-analyzer/src/semantic_tokens.rs | |||
@@ -45,6 +45,7 @@ define_semantic_token_types![ | |||
45 | (UNION, "union"), | 45 | (UNION, "union"), |
46 | (UNRESOLVED_REFERENCE, "unresolvedReference"), | 46 | (UNRESOLVED_REFERENCE, "unresolvedReference"), |
47 | (FORMAT_SPECIFIER, "formatSpecifier"), | 47 | (FORMAT_SPECIFIER, "formatSpecifier"), |
48 | (ESCAPE_SEQUENCE, "escapeSequence"), | ||
48 | ]; | 49 | ]; |
49 | 50 | ||
50 | macro_rules! define_semantic_token_modifiers { | 51 | macro_rules! define_semantic_token_modifiers { |
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 881aa1c55..ec153097e 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs | |||
@@ -324,12 +324,14 @@ fn semantic_token_type_and_modifiers( | |||
324 | HighlightTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE, | 324 | HighlightTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE, |
325 | HighlightTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER, | 325 | HighlightTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER, |
326 | HighlightTag::Operator => lsp_types::SemanticTokenType::OPERATOR, | 326 | HighlightTag::Operator => lsp_types::SemanticTokenType::OPERATOR, |
327 | HighlightTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE, | ||
327 | }; | 328 | }; |
328 | 329 | ||
329 | for modifier in highlight.modifiers.iter() { | 330 | for modifier in highlight.modifiers.iter() { |
330 | let modifier = match modifier { | 331 | let modifier = match modifier { |
331 | HighlightModifier::Attribute => semantic_tokens::ATTRIBUTE_MODIFIER, | 332 | HighlightModifier::Attribute => semantic_tokens::ATTRIBUTE_MODIFIER, |
332 | HighlightModifier::Definition => lsp_types::SemanticTokenModifier::DECLARATION, | 333 | HighlightModifier::Definition => lsp_types::SemanticTokenModifier::DECLARATION, |
334 | HighlightModifier::Documentation => lsp_types::SemanticTokenModifier::DOCUMENTATION, | ||
333 | HighlightModifier::ControlFlow => semantic_tokens::CONTROL_FLOW, | 335 | HighlightModifier::ControlFlow => semantic_tokens::CONTROL_FLOW, |
334 | HighlightModifier::Mutable => semantic_tokens::MUTABLE, | 336 | HighlightModifier::Mutable => semantic_tokens::MUTABLE, |
335 | HighlightModifier::Unsafe => semantic_tokens::UNSAFE, | 337 | HighlightModifier::Unsafe => semantic_tokens::UNSAFE, |
@@ -528,13 +530,13 @@ pub(crate) fn resource_op( | |||
528 | file_system_edit: FileSystemEdit, | 530 | file_system_edit: FileSystemEdit, |
529 | ) -> lsp_types::ResourceOp { | 531 | ) -> lsp_types::ResourceOp { |
530 | match file_system_edit { | 532 | match file_system_edit { |
531 | FileSystemEdit::CreateFile { source_root, path } => { | 533 | FileSystemEdit::CreateFile { anchor, dst } => { |
532 | let uri = snap.path_to_url(source_root, &path); | 534 | let uri = snap.anchored_path(anchor, &dst); |
533 | lsp_types::ResourceOp::Create(lsp_types::CreateFile { uri, options: None }) | 535 | lsp_types::ResourceOp::Create(lsp_types::CreateFile { uri, options: None }) |
534 | } | 536 | } |
535 | FileSystemEdit::MoveFile { src, dst_source_root, dst_path } => { | 537 | FileSystemEdit::MoveFile { src, anchor, dst } => { |
536 | let old_uri = snap.file_id_to_url(src); | 538 | let old_uri = snap.file_id_to_url(src); |
537 | let new_uri = snap.path_to_url(dst_source_root, &dst_path); | 539 | let new_uri = snap.anchored_path(anchor, &dst); |
538 | lsp_types::ResourceOp::Rename(lsp_types::RenameFile { old_uri, new_uri, options: None }) | 540 | lsp_types::ResourceOp::Rename(lsp_types::RenameFile { old_uri, new_uri, options: None }) |
539 | } | 541 | } |
540 | } | 542 | } |
diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index c0356344c..f2ff0e435 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs | |||
@@ -1,5 +1,4 @@ | |||
1 | //! Missing batteries for standard libraries. | 1 | //! Missing batteries for standard libraries. |
2 | |||
3 | use std::{cell::Cell, fmt, time::Instant}; | 2 | use std::{cell::Cell, fmt, time::Instant}; |
4 | 3 | ||
5 | #[inline(always)] | 4 | #[inline(always)] |
diff --git a/crates/vfs/Cargo.toml b/crates/vfs/Cargo.toml new file mode 100644 index 000000000..c03e6363b --- /dev/null +++ b/crates/vfs/Cargo.toml | |||
@@ -0,0 +1,14 @@ | |||
1 | [package] | ||
2 | name = "vfs" | ||
3 | version = "0.1.0" | ||
4 | authors = ["rust-analyzer developers"] | ||
5 | edition = "2018" | ||
6 | |||
7 | [dependencies] | ||
8 | rustc-hash = "1.0" | ||
9 | jod-thread = "0.1.0" | ||
10 | walkdir = "2.3.1" | ||
11 | globset = "0.4.5" | ||
12 | crossbeam-channel = "0.4.0" | ||
13 | |||
14 | paths = { path = "../paths" } | ||
diff --git a/crates/vfs/src/file_set.rs b/crates/vfs/src/file_set.rs new file mode 100644 index 000000000..724606a3d --- /dev/null +++ b/crates/vfs/src/file_set.rs | |||
@@ -0,0 +1,113 @@ | |||
1 | //! Partitions a list of files into disjoint subsets. | ||
2 | //! | ||
3 | //! Files which do not belong to any explicitly configured `FileSet` belong to | ||
4 | //! the default `FileSet`. | ||
5 | use std::{fmt, iter}; | ||
6 | |||
7 | use paths::AbsPathBuf; | ||
8 | use rustc_hash::FxHashMap; | ||
9 | |||
10 | use crate::{FileId, Vfs, VfsPath}; | ||
11 | |||
12 | #[derive(Default, Clone, Eq, PartialEq)] | ||
13 | pub struct FileSet { | ||
14 | files: FxHashMap<VfsPath, FileId>, | ||
15 | paths: FxHashMap<FileId, VfsPath>, | ||
16 | } | ||
17 | |||
18 | impl FileSet { | ||
19 | pub fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { | ||
20 | let mut base = self.paths[&anchor].clone(); | ||
21 | base.pop(); | ||
22 | let path = base.join(path); | ||
23 | let res = self.files.get(&path).copied(); | ||
24 | res | ||
25 | } | ||
26 | pub fn insert(&mut self, file_id: FileId, path: VfsPath) { | ||
27 | self.files.insert(path.clone(), file_id); | ||
28 | self.paths.insert(file_id, path); | ||
29 | } | ||
30 | pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ { | ||
31 | self.paths.keys().copied() | ||
32 | } | ||
33 | } | ||
34 | |||
35 | impl fmt::Debug for FileSet { | ||
36 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
37 | f.debug_struct("FileSet").field("n_files", &self.files.len()).finish() | ||
38 | } | ||
39 | } | ||
40 | |||
41 | #[derive(Debug)] | ||
42 | pub struct FileSetConfig { | ||
43 | n_file_sets: usize, | ||
44 | roots: Vec<(AbsPathBuf, usize)>, | ||
45 | } | ||
46 | |||
47 | impl Default for FileSetConfig { | ||
48 | fn default() -> Self { | ||
49 | FileSetConfig::builder().build() | ||
50 | } | ||
51 | } | ||
52 | |||
53 | impl FileSetConfig { | ||
54 | pub fn builder() -> FileSetConfigBuilder { | ||
55 | FileSetConfigBuilder::default() | ||
56 | } | ||
57 | pub fn partition(&self, vfs: &Vfs) -> Vec<FileSet> { | ||
58 | let mut res = vec![FileSet::default(); self.len()]; | ||
59 | for (file_id, path) in vfs.iter() { | ||
60 | let root = self.classify(&path); | ||
61 | res[root].insert(file_id, path) | ||
62 | } | ||
63 | res | ||
64 | } | ||
65 | fn len(&self) -> usize { | ||
66 | self.n_file_sets | ||
67 | } | ||
68 | fn classify(&self, path: &VfsPath) -> usize { | ||
69 | let path = match path.as_path() { | ||
70 | Some(it) => it, | ||
71 | None => return self.len() - 1, | ||
72 | }; | ||
73 | let idx = match self.roots.binary_search_by(|(p, _)| p.as_path().cmp(path)) { | ||
74 | Ok(it) => it, | ||
75 | Err(it) => it.saturating_sub(1), | ||
76 | }; | ||
77 | if path.starts_with(&self.roots[idx].0) { | ||
78 | self.roots[idx].1 | ||
79 | } else { | ||
80 | self.len() - 1 | ||
81 | } | ||
82 | } | ||
83 | } | ||
84 | |||
85 | pub struct FileSetConfigBuilder { | ||
86 | roots: Vec<Vec<AbsPathBuf>>, | ||
87 | } | ||
88 | |||
89 | impl Default for FileSetConfigBuilder { | ||
90 | fn default() -> Self { | ||
91 | FileSetConfigBuilder { roots: Vec::new() } | ||
92 | } | ||
93 | } | ||
94 | |||
95 | impl FileSetConfigBuilder { | ||
96 | pub fn len(&self) -> usize { | ||
97 | self.roots.len() | ||
98 | } | ||
99 | pub fn add_file_set(&mut self, roots: Vec<AbsPathBuf>) { | ||
100 | self.roots.push(roots) | ||
101 | } | ||
102 | pub fn build(self) -> FileSetConfig { | ||
103 | let n_file_sets = self.roots.len() + 1; | ||
104 | let mut roots: Vec<(AbsPathBuf, usize)> = self | ||
105 | .roots | ||
106 | .into_iter() | ||
107 | .enumerate() | ||
108 | .flat_map(|(i, paths)| paths.into_iter().zip(iter::repeat(i))) | ||
109 | .collect(); | ||
110 | roots.sort(); | ||
111 | FileSetConfig { n_file_sets, roots } | ||
112 | } | ||
113 | } | ||
diff --git a/crates/vfs/src/lib.rs b/crates/vfs/src/lib.rs new file mode 100644 index 000000000..055219b0c --- /dev/null +++ b/crates/vfs/src/lib.rs | |||
@@ -0,0 +1,141 @@ | |||
1 | //! # Virtual File System | ||
2 | //! | ||
3 | //! VFS stores all files read by rust-analyzer. Reading file contents from VFS | ||
4 | //! always returns the same contents, unless VFS was explicitly modified with | ||
5 | //! `set_file_contents`. All changes to VFS are logged, and can be retrieved via | ||
6 | //! `take_changes` method. The pack of changes is then pushed to `salsa` and | ||
7 | //! triggers incremental recomputation. | ||
8 | //! | ||
9 | //! Files in VFS are identified with `FileId`s -- interned paths. The notion of | ||
10 | //! the path, `VfsPath` is somewhat abstract: at the moment, it is represented | ||
11 | //! as an `std::path::PathBuf` internally, but this is an implementation detail. | ||
12 | //! | ||
13 | //! VFS doesn't do IO or file watching itself. For that, see the `loader` | ||
14 | //! module. `loader::Handle` is an object-safe trait which abstracts both file | ||
15 | //! loading and file watching. `Handle` is dynamically configured with a set of | ||
16 | //! directory entries which should be scanned and watched. `Handle` then | ||
17 | //! asynchronously pushes file changes. Directory entries are configured in | ||
18 | //! free-form via list of globs, it's up to the `Handle` to interpret the globs | ||
19 | //! in any specific way. | ||
20 | //! | ||
21 | //! A simple `WalkdirLoaderHandle` is provided, which doesn't implement watching | ||
22 | //! and just scans the directory using walkdir. | ||
23 | //! | ||
24 | //! VFS stores a flat list of files. `FileSet` can partition this list of files | ||
25 | //! into disjoint sets of files. Traversal-like operations (including getting | ||
26 | //! the neighbor file by the relative path) are handled by the `FileSet`. | ||
27 | //! `FileSet`s are also pushed to salsa and cause it to re-check `mod foo;` | ||
28 | //! declarations when files are created or deleted. | ||
29 | //! | ||
30 | //! `file_set::FileSet` and `loader::Entry` play similar, but different roles. | ||
31 | //! Both specify the "set of paths/files", one is geared towards file watching, | ||
32 | //! the other towards salsa changes. In particular, single `file_set::FileSet` | ||
33 | //! may correspond to several `loader::Entry`. For example, a crate from | ||
34 | //! crates.io which uses code generation would have two `Entries` -- for sources | ||
35 | //! in `~/.cargo`, and for generated code in `./target/debug/build`. It will | ||
36 | //! have a single `FileSet` which unions the two sources. | ||
37 | mod vfs_path; | ||
38 | mod path_interner; | ||
39 | pub mod file_set; | ||
40 | pub mod loader; | ||
41 | pub mod walkdir_loader; | ||
42 | |||
43 | use std::{fmt, mem}; | ||
44 | |||
45 | use crate::path_interner::PathInterner; | ||
46 | |||
47 | pub use crate::vfs_path::VfsPath; | ||
48 | pub use paths::{AbsPath, AbsPathBuf}; | ||
49 | |||
50 | #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] | ||
51 | pub struct FileId(pub u32); | ||
52 | |||
53 | #[derive(Default)] | ||
54 | pub struct Vfs { | ||
55 | interner: PathInterner, | ||
56 | data: Vec<Option<Vec<u8>>>, | ||
57 | changes: Vec<ChangedFile>, | ||
58 | } | ||
59 | |||
60 | pub struct ChangedFile { | ||
61 | pub file_id: FileId, | ||
62 | pub change_kind: ChangeKind, | ||
63 | } | ||
64 | |||
65 | impl ChangedFile { | ||
66 | pub fn exists(&self) -> bool { | ||
67 | self.change_kind != ChangeKind::Delete | ||
68 | } | ||
69 | pub fn is_created_or_deleted(&self) -> bool { | ||
70 | matches!(self.change_kind, ChangeKind::Create | ChangeKind::Delete) | ||
71 | } | ||
72 | } | ||
73 | |||
74 | #[derive(Eq, PartialEq)] | ||
75 | pub enum ChangeKind { | ||
76 | Create, | ||
77 | Modify, | ||
78 | Delete, | ||
79 | } | ||
80 | |||
81 | impl Vfs { | ||
82 | pub fn len(&self) -> usize { | ||
83 | self.data.len() | ||
84 | } | ||
85 | pub fn file_id(&self, path: &VfsPath) -> Option<FileId> { | ||
86 | self.interner.get(path).filter(|&it| self.get(it).is_some()) | ||
87 | } | ||
88 | pub fn file_path(&self, file_id: FileId) -> VfsPath { | ||
89 | self.interner.lookup(file_id).clone() | ||
90 | } | ||
91 | pub fn file_contents(&self, file_id: FileId) -> &[u8] { | ||
92 | self.get(file_id).as_deref().unwrap() | ||
93 | } | ||
94 | pub fn iter(&self) -> impl Iterator<Item = (FileId, VfsPath)> + '_ { | ||
95 | (0..self.data.len()) | ||
96 | .map(|it| FileId(it as u32)) | ||
97 | .filter(move |&file_id| self.get(file_id).is_some()) | ||
98 | .map(move |file_id| { | ||
99 | let path = self.interner.lookup(file_id).clone(); | ||
100 | (file_id, path) | ||
101 | }) | ||
102 | } | ||
103 | pub fn set_file_contents(&mut self, path: VfsPath, contents: Option<Vec<u8>>) { | ||
104 | let file_id = self.alloc_file_id(path); | ||
105 | let change_kind = match (&self.get(file_id), &contents) { | ||
106 | (None, None) => return, | ||
107 | (None, Some(_)) => ChangeKind::Create, | ||
108 | (Some(_), None) => ChangeKind::Delete, | ||
109 | (Some(old), Some(new)) if old == new => return, | ||
110 | (Some(_), Some(_)) => ChangeKind::Modify, | ||
111 | }; | ||
112 | |||
113 | *self.get_mut(file_id) = contents; | ||
114 | self.changes.push(ChangedFile { file_id, change_kind }) | ||
115 | } | ||
116 | pub fn has_changes(&self) -> bool { | ||
117 | !self.changes.is_empty() | ||
118 | } | ||
119 | pub fn take_changes(&mut self) -> Vec<ChangedFile> { | ||
120 | mem::take(&mut self.changes) | ||
121 | } | ||
122 | fn alloc_file_id(&mut self, path: VfsPath) -> FileId { | ||
123 | let file_id = self.interner.intern(path); | ||
124 | let idx = file_id.0 as usize; | ||
125 | let len = self.data.len().max(idx + 1); | ||
126 | self.data.resize_with(len, || None); | ||
127 | file_id | ||
128 | } | ||
129 | fn get(&self, file_id: FileId) -> &Option<Vec<u8>> { | ||
130 | &self.data[file_id.0 as usize] | ||
131 | } | ||
132 | fn get_mut(&mut self, file_id: FileId) -> &mut Option<Vec<u8>> { | ||
133 | &mut self.data[file_id.0 as usize] | ||
134 | } | ||
135 | } | ||
136 | |||
137 | impl fmt::Debug for Vfs { | ||
138 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
139 | f.debug_struct("Vfs").field("n_files", &self.data.len()).finish() | ||
140 | } | ||
141 | } | ||
diff --git a/crates/vfs/src/loader.rs b/crates/vfs/src/loader.rs new file mode 100644 index 000000000..5a0ca68f3 --- /dev/null +++ b/crates/vfs/src/loader.rs | |||
@@ -0,0 +1,69 @@ | |||
1 | //! Object safe interface for file watching and reading. | ||
2 | use std::fmt; | ||
3 | |||
4 | use paths::AbsPathBuf; | ||
5 | |||
6 | pub enum Entry { | ||
7 | Files(Vec<AbsPathBuf>), | ||
8 | Directory { path: AbsPathBuf, globs: Vec<String> }, | ||
9 | } | ||
10 | |||
11 | pub struct Config { | ||
12 | pub load: Vec<Entry>, | ||
13 | pub watch: Vec<usize>, | ||
14 | } | ||
15 | |||
16 | pub enum Message { | ||
17 | DidSwitchConfig { n_entries: usize }, | ||
18 | DidLoadAllEntries, | ||
19 | Loaded { files: Vec<(AbsPathBuf, Option<Vec<u8>>)> }, | ||
20 | } | ||
21 | |||
22 | pub type Sender = Box<dyn Fn(Message) + Send>; | ||
23 | |||
24 | pub trait Handle: fmt::Debug { | ||
25 | fn spawn(sender: Sender) -> Self | ||
26 | where | ||
27 | Self: Sized; | ||
28 | fn set_config(&mut self, config: Config); | ||
29 | fn invalidate(&mut self, path: AbsPathBuf); | ||
30 | fn load_sync(&mut self, path: &AbsPathBuf) -> Option<Vec<u8>>; | ||
31 | } | ||
32 | |||
33 | impl Entry { | ||
34 | pub fn rs_files_recursively(base: AbsPathBuf) -> Entry { | ||
35 | Entry::Directory { path: base, globs: globs(&["*.rs"]) } | ||
36 | } | ||
37 | pub fn local_cargo_package(base: AbsPathBuf) -> Entry { | ||
38 | Entry::Directory { path: base, globs: globs(&["*.rs", "!/target/"]) } | ||
39 | } | ||
40 | pub fn cargo_package_dependency(base: AbsPathBuf) -> Entry { | ||
41 | Entry::Directory { | ||
42 | path: base, | ||
43 | globs: globs(&["*.rs", "!/tests/", "!/examples/", "!/benches/"]), | ||
44 | } | ||
45 | } | ||
46 | } | ||
47 | |||
48 | fn globs(globs: &[&str]) -> Vec<String> { | ||
49 | globs.iter().map(|it| it.to_string()).collect() | ||
50 | } | ||
51 | |||
52 | impl fmt::Debug for Message { | ||
53 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
54 | match self { | ||
55 | Message::Loaded { files } => { | ||
56 | f.debug_struct("Loaded").field("n_files", &files.len()).finish() | ||
57 | } | ||
58 | Message::DidSwitchConfig { n_entries } => { | ||
59 | f.debug_struct("DidSwitchConfig").field("n_entries", n_entries).finish() | ||
60 | } | ||
61 | Message::DidLoadAllEntries => f.debug_struct("DidLoadAllEntries").finish(), | ||
62 | } | ||
63 | } | ||
64 | } | ||
65 | |||
66 | #[test] | ||
67 | fn handle_is_object_safe() { | ||
68 | fn _assert(_: &dyn Handle) {} | ||
69 | } | ||
diff --git a/crates/vfs/src/path_interner.rs b/crates/vfs/src/path_interner.rs new file mode 100644 index 000000000..4f70d61e8 --- /dev/null +++ b/crates/vfs/src/path_interner.rs | |||
@@ -0,0 +1,31 @@ | |||
1 | //! Maps paths to compact integer ids. We don't care about clearings paths which | ||
2 | //! no longer exist -- the assumption is total size of paths we ever look at is | ||
3 | //! not too big. | ||
4 | use rustc_hash::FxHashMap; | ||
5 | |||
6 | use crate::{FileId, VfsPath}; | ||
7 | |||
8 | #[derive(Default)] | ||
9 | pub(crate) struct PathInterner { | ||
10 | map: FxHashMap<VfsPath, FileId>, | ||
11 | vec: Vec<VfsPath>, | ||
12 | } | ||
13 | |||
14 | impl PathInterner { | ||
15 | pub(crate) fn get(&self, path: &VfsPath) -> Option<FileId> { | ||
16 | self.map.get(path).copied() | ||
17 | } | ||
18 | pub(crate) fn intern(&mut self, path: VfsPath) -> FileId { | ||
19 | if let Some(id) = self.get(&path) { | ||
20 | return id; | ||
21 | } | ||
22 | let id = FileId(self.vec.len() as u32); | ||
23 | self.map.insert(path.clone(), id); | ||
24 | self.vec.push(path); | ||
25 | id | ||
26 | } | ||
27 | |||
28 | pub(crate) fn lookup(&self, id: FileId) -> &VfsPath { | ||
29 | &self.vec[id.0 as usize] | ||
30 | } | ||
31 | } | ||
diff --git a/crates/vfs/src/vfs_path.rs b/crates/vfs/src/vfs_path.rs new file mode 100644 index 000000000..de5dc0bf3 --- /dev/null +++ b/crates/vfs/src/vfs_path.rs | |||
@@ -0,0 +1,49 @@ | |||
1 | //! Abstract-ish representation of paths for VFS. | ||
2 | use std::fmt; | ||
3 | |||
4 | use paths::{AbsPath, AbsPathBuf}; | ||
5 | |||
6 | /// Long-term, we want to support files which do not reside in the file-system, | ||
7 | /// so we treat VfsPaths as opaque identifiers. | ||
8 | #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] | ||
9 | pub struct VfsPath(VfsPathRepr); | ||
10 | |||
11 | impl VfsPath { | ||
12 | pub fn as_path(&self) -> Option<&AbsPath> { | ||
13 | match &self.0 { | ||
14 | VfsPathRepr::PathBuf(it) => Some(it.as_path()), | ||
15 | } | ||
16 | } | ||
17 | pub fn join(&self, path: &str) -> VfsPath { | ||
18 | match &self.0 { | ||
19 | VfsPathRepr::PathBuf(it) => { | ||
20 | let res = it.join(path).normalize(); | ||
21 | VfsPath(VfsPathRepr::PathBuf(res)) | ||
22 | } | ||
23 | } | ||
24 | } | ||
25 | pub fn pop(&mut self) -> bool { | ||
26 | match &mut self.0 { | ||
27 | VfsPathRepr::PathBuf(it) => it.pop(), | ||
28 | } | ||
29 | } | ||
30 | } | ||
31 | |||
32 | #[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] | ||
33 | enum VfsPathRepr { | ||
34 | PathBuf(AbsPathBuf), | ||
35 | } | ||
36 | |||
37 | impl From<AbsPathBuf> for VfsPath { | ||
38 | fn from(v: AbsPathBuf) -> Self { | ||
39 | VfsPath(VfsPathRepr::PathBuf(v)) | ||
40 | } | ||
41 | } | ||
42 | |||
43 | impl fmt::Display for VfsPath { | ||
44 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
45 | match &self.0 { | ||
46 | VfsPathRepr::PathBuf(it) => fmt::Display::fmt(&it.display(), f), | ||
47 | } | ||
48 | } | ||
49 | } | ||
diff --git a/crates/vfs/src/walkdir_loader.rs b/crates/vfs/src/walkdir_loader.rs new file mode 100644 index 000000000..13e59e3f3 --- /dev/null +++ b/crates/vfs/src/walkdir_loader.rs | |||
@@ -0,0 +1,108 @@ | |||
1 | //! A walkdir-based implementation of `loader::Handle`, which doesn't try to | ||
2 | //! watch files. | ||
3 | use std::convert::TryFrom; | ||
4 | |||
5 | use globset::{Glob, GlobSetBuilder}; | ||
6 | use paths::{AbsPath, AbsPathBuf}; | ||
7 | use walkdir::WalkDir; | ||
8 | |||
9 | use crate::loader; | ||
10 | |||
11 | #[derive(Debug)] | ||
12 | pub struct WalkdirLoaderHandle { | ||
13 | // Relative order of fields below is significant. | ||
14 | sender: crossbeam_channel::Sender<Message>, | ||
15 | _thread: jod_thread::JoinHandle, | ||
16 | } | ||
17 | |||
18 | enum Message { | ||
19 | Config(loader::Config), | ||
20 | Invalidate(AbsPathBuf), | ||
21 | } | ||
22 | |||
23 | impl loader::Handle for WalkdirLoaderHandle { | ||
24 | fn spawn(sender: loader::Sender) -> WalkdirLoaderHandle { | ||
25 | let actor = WalkdirLoaderActor { sender }; | ||
26 | let (sender, receiver) = crossbeam_channel::unbounded::<Message>(); | ||
27 | let thread = jod_thread::spawn(move || actor.run(receiver)); | ||
28 | WalkdirLoaderHandle { sender, _thread: thread } | ||
29 | } | ||
30 | fn set_config(&mut self, config: loader::Config) { | ||
31 | self.sender.send(Message::Config(config)).unwrap() | ||
32 | } | ||
33 | fn invalidate(&mut self, path: AbsPathBuf) { | ||
34 | self.sender.send(Message::Invalidate(path)).unwrap(); | ||
35 | } | ||
36 | fn load_sync(&mut self, path: &AbsPathBuf) -> Option<Vec<u8>> { | ||
37 | read(path) | ||
38 | } | ||
39 | } | ||
40 | |||
41 | struct WalkdirLoaderActor { | ||
42 | sender: loader::Sender, | ||
43 | } | ||
44 | |||
45 | impl WalkdirLoaderActor { | ||
46 | fn run(mut self, receiver: crossbeam_channel::Receiver<Message>) { | ||
47 | for msg in receiver { | ||
48 | match msg { | ||
49 | Message::Config(config) => { | ||
50 | self.send(loader::Message::DidSwitchConfig { n_entries: config.load.len() }); | ||
51 | for entry in config.load.into_iter() { | ||
52 | let files = self.load_entry(entry); | ||
53 | self.send(loader::Message::Loaded { files }); | ||
54 | } | ||
55 | drop(config.watch); | ||
56 | self.send(loader::Message::DidLoadAllEntries); | ||
57 | } | ||
58 | Message::Invalidate(path) => { | ||
59 | let contents = read(path.as_path()); | ||
60 | let files = vec![(path, contents)]; | ||
61 | self.send(loader::Message::Loaded { files }); | ||
62 | } | ||
63 | } | ||
64 | } | ||
65 | } | ||
66 | fn load_entry(&mut self, entry: loader::Entry) -> Vec<(AbsPathBuf, Option<Vec<u8>>)> { | ||
67 | match entry { | ||
68 | loader::Entry::Files(files) => files | ||
69 | .into_iter() | ||
70 | .map(|file| { | ||
71 | let contents = read(file.as_path()); | ||
72 | (file, contents) | ||
73 | }) | ||
74 | .collect::<Vec<_>>(), | ||
75 | loader::Entry::Directory { path, globs } => { | ||
76 | let globset = { | ||
77 | let mut builder = GlobSetBuilder::new(); | ||
78 | for glob in &globs { | ||
79 | builder.add(Glob::new(glob).unwrap()); | ||
80 | } | ||
81 | builder.build().unwrap() | ||
82 | }; | ||
83 | |||
84 | let files = WalkDir::new(path) | ||
85 | .into_iter() | ||
86 | .filter_map(|it| it.ok()) | ||
87 | .filter(|it| it.file_type().is_file()) | ||
88 | .map(|it| it.into_path()) | ||
89 | .map(|it| AbsPathBuf::try_from(it).unwrap()) | ||
90 | .filter(|it| globset.is_match(&it)); | ||
91 | |||
92 | files | ||
93 | .map(|file| { | ||
94 | let contents = read(file.as_path()); | ||
95 | (file, contents) | ||
96 | }) | ||
97 | .collect() | ||
98 | } | ||
99 | } | ||
100 | } | ||
101 | fn send(&mut self, msg: loader::Message) { | ||
102 | (self.sender)(msg) | ||
103 | } | ||
104 | } | ||
105 | |||
106 | fn read(path: &AbsPath) -> Option<Vec<u8>> { | ||
107 | std::fs::read(path).ok() | ||
108 | } | ||