diff options
Diffstat (limited to 'crates')
-rw-r--r-- | crates/ra_cargo_watch/src/conv.rs | 14 | ||||
-rw-r--r-- | crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap | 52 | ||||
-rw-r--r-- | crates/ra_cargo_watch/src/lib.rs | 67 | ||||
-rw-r--r-- | crates/ra_hir/src/lib.rs | 12 | ||||
-rw-r--r-- | crates/ra_hir/src/semantics.rs | 29 | ||||
-rw-r--r-- | crates/ra_hir_def/src/body.rs | 9 | ||||
-rw-r--r-- | crates/ra_hir_def/src/body/lower.rs | 12 | ||||
-rw-r--r-- | crates/ra_hir_expand/src/db.rs | 24 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/tests/regression.rs | 31 | ||||
-rw-r--r-- | crates/ra_ide/src/references.rs | 32 | ||||
-rw-r--r-- | crates/ra_ide/src/references/rename.rs | 57 | ||||
-rw-r--r-- | crates/ra_ide_db/src/search.rs | 20 | ||||
-rw-r--r-- | crates/ra_ide_db/src/symbol_index.rs | 7 | ||||
-rw-r--r-- | crates/ra_project_model/src/cargo_workspace.rs | 27 | ||||
-rw-r--r-- | crates/ra_project_model/src/lib.rs | 6 | ||||
-rw-r--r-- | crates/ra_syntax/src/algo.rs | 4 | ||||
-rw-r--r-- | crates/ra_syntax/src/ast/extensions.rs | 13 |
17 files changed, 296 insertions, 120 deletions
diff --git a/crates/ra_cargo_watch/src/conv.rs b/crates/ra_cargo_watch/src/conv.rs index c6f8ca329..817543deb 100644 --- a/crates/ra_cargo_watch/src/conv.rs +++ b/crates/ra_cargo_watch/src/conv.rs | |||
@@ -1,7 +1,8 @@ | |||
1 | //! This module provides the functionality needed to convert diagnostics from | 1 | //! This module provides the functionality needed to convert diagnostics from |
2 | //! `cargo check` json format to the LSP diagnostic format. | 2 | //! `cargo check` json format to the LSP diagnostic format. |
3 | use cargo_metadata::diagnostic::{ | 3 | use cargo_metadata::diagnostic::{ |
4 | Diagnostic as RustDiagnostic, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion, | 4 | Applicability, Diagnostic as RustDiagnostic, DiagnosticLevel, DiagnosticSpan, |
5 | DiagnosticSpanMacroExpansion, | ||
5 | }; | 6 | }; |
6 | use lsp_types::{ | 7 | use lsp_types::{ |
7 | CodeAction, Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, | 8 | CodeAction, Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, |
@@ -136,10 +137,13 @@ fn map_rust_child_diagnostic( | |||
136 | 137 | ||
137 | let mut edit_map: HashMap<Url, Vec<TextEdit>> = HashMap::new(); | 138 | let mut edit_map: HashMap<Url, Vec<TextEdit>> = HashMap::new(); |
138 | for &span in &spans { | 139 | for &span in &spans { |
139 | if let Some(suggested_replacement) = &span.suggested_replacement { | 140 | match (&span.suggestion_applicability, &span.suggested_replacement) { |
140 | let location = map_span_to_location(span, workspace_root); | 141 | (Some(Applicability::MachineApplicable), Some(suggested_replacement)) => { |
141 | let edit = TextEdit::new(location.range, suggested_replacement.clone()); | 142 | let location = map_span_to_location(span, workspace_root); |
142 | edit_map.entry(location.uri).or_default().push(edit); | 143 | let edit = TextEdit::new(location.range, suggested_replacement.clone()); |
144 | edit_map.entry(location.uri).or_default().push(edit); | ||
145 | } | ||
146 | _ => {} | ||
143 | } | 147 | } |
144 | } | 148 | } |
145 | 149 | ||
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap index 9e8f4eff4..a59fa84fa 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap | |||
@@ -58,44 +58,26 @@ expression: diag | |||
58 | }, | 58 | }, |
59 | message: "lint level defined here", | 59 | message: "lint level defined here", |
60 | }, | 60 | }, |
61 | DiagnosticRelatedInformation { | ||
62 | location: Location { | ||
63 | uri: "file:///test/compiler/mir/tagset.rs", | ||
64 | range: Range { | ||
65 | start: Position { | ||
66 | line: 41, | ||
67 | character: 23, | ||
68 | }, | ||
69 | end: Position { | ||
70 | line: 41, | ||
71 | character: 28, | ||
72 | }, | ||
73 | }, | ||
74 | }, | ||
75 | message: "consider passing by value instead", | ||
76 | }, | ||
61 | ], | 77 | ], |
62 | ), | 78 | ), |
63 | tags: None, | 79 | tags: None, |
64 | }, | 80 | }, |
65 | fixes: [ | 81 | fixes: [], |
66 | CodeAction { | ||
67 | title: "consider passing by value instead", | ||
68 | kind: Some( | ||
69 | "quickfix", | ||
70 | ), | ||
71 | diagnostics: None, | ||
72 | edit: Some( | ||
73 | WorkspaceEdit { | ||
74 | changes: Some( | ||
75 | { | ||
76 | "file:///test/compiler/mir/tagset.rs": [ | ||
77 | TextEdit { | ||
78 | range: Range { | ||
79 | start: Position { | ||
80 | line: 41, | ||
81 | character: 23, | ||
82 | }, | ||
83 | end: Position { | ||
84 | line: 41, | ||
85 | character: 28, | ||
86 | }, | ||
87 | }, | ||
88 | new_text: "self", | ||
89 | }, | ||
90 | ], | ||
91 | }, | ||
92 | ), | ||
93 | document_changes: None, | ||
94 | }, | ||
95 | ), | ||
96 | command: None, | ||
97 | is_preferred: None, | ||
98 | }, | ||
99 | ], | ||
100 | }, | 82 | }, |
101 | ] | 83 | ] |
diff --git a/crates/ra_cargo_watch/src/lib.rs b/crates/ra_cargo_watch/src/lib.rs index bffe5eb00..7c525c430 100644 --- a/crates/ra_cargo_watch/src/lib.rs +++ b/crates/ra_cargo_watch/src/lib.rs | |||
@@ -8,9 +8,10 @@ use lsp_types::{ | |||
8 | WorkDoneProgressEnd, WorkDoneProgressReport, | 8 | WorkDoneProgressEnd, WorkDoneProgressReport, |
9 | }; | 9 | }; |
10 | use std::{ | 10 | use std::{ |
11 | error, fmt, | ||
11 | io::{BufRead, BufReader}, | 12 | io::{BufRead, BufReader}, |
12 | path::{Path, PathBuf}, | 13 | path::{Path, PathBuf}, |
13 | process::{Child, Command, Stdio}, | 14 | process::{Command, Stdio}, |
14 | thread::JoinHandle, | 15 | thread::JoinHandle, |
15 | time::Instant, | 16 | time::Instant, |
16 | }; | 17 | }; |
@@ -70,10 +71,10 @@ impl std::ops::Drop for CheckWatcher { | |||
70 | fn drop(&mut self) { | 71 | fn drop(&mut self) { |
71 | if let Some(handle) = self.handle.take() { | 72 | if let Some(handle) = self.handle.take() { |
72 | // Take the sender out of the option | 73 | // Take the sender out of the option |
73 | let recv = self.cmd_send.take(); | 74 | let cmd_send = self.cmd_send.take(); |
74 | 75 | ||
75 | // Dropping the sender finishes the thread loop | 76 | // Dropping the sender finishes the thread loop |
76 | drop(recv); | 77 | drop(cmd_send); |
77 | 78 | ||
78 | // Join the thread, it should finish shortly. We don't really care | 79 | // Join the thread, it should finish shortly. We don't really care |
79 | // whether it panicked, so it is safe to ignore the result | 80 | // whether it panicked, so it is safe to ignore the result |
@@ -246,11 +247,21 @@ enum CheckEvent { | |||
246 | End, | 247 | End, |
247 | } | 248 | } |
248 | 249 | ||
250 | #[derive(Debug)] | ||
251 | pub struct CargoError(String); | ||
252 | |||
253 | impl fmt::Display for CargoError { | ||
254 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
255 | write!(f, "Cargo failed: {}", self.0) | ||
256 | } | ||
257 | } | ||
258 | impl error::Error for CargoError {} | ||
259 | |||
249 | pub fn run_cargo( | 260 | pub fn run_cargo( |
250 | args: &[String], | 261 | args: &[String], |
251 | current_dir: Option<&Path>, | 262 | current_dir: Option<&Path>, |
252 | on_message: &mut dyn FnMut(cargo_metadata::Message) -> bool, | 263 | on_message: &mut dyn FnMut(cargo_metadata::Message) -> bool, |
253 | ) -> Child { | 264 | ) -> Result<(), CargoError> { |
254 | let mut command = Command::new("cargo"); | 265 | let mut command = Command::new("cargo"); |
255 | if let Some(current_dir) = current_dir { | 266 | if let Some(current_dir) = current_dir { |
256 | command.current_dir(current_dir); | 267 | command.current_dir(current_dir); |
@@ -273,6 +284,8 @@ pub fn run_cargo( | |||
273 | // simply skip a line if it doesn't parse, which just ignores any | 284 | // simply skip a line if it doesn't parse, which just ignores any |
274 | // erroneus output. | 285 | // erroneus output. |
275 | let stdout = BufReader::new(child.stdout.take().unwrap()); | 286 | let stdout = BufReader::new(child.stdout.take().unwrap()); |
287 | let mut read_at_least_one_message = false; | ||
288 | |||
276 | for line in stdout.lines() { | 289 | for line in stdout.lines() { |
277 | let line = match line { | 290 | let line = match line { |
278 | Ok(line) => line, | 291 | Ok(line) => line, |
@@ -291,12 +304,31 @@ pub fn run_cargo( | |||
291 | } | 304 | } |
292 | }; | 305 | }; |
293 | 306 | ||
307 | read_at_least_one_message = true; | ||
308 | |||
294 | if !on_message(message) { | 309 | if !on_message(message) { |
295 | break; | 310 | break; |
296 | } | 311 | } |
297 | } | 312 | } |
298 | 313 | ||
299 | child | 314 | // It is okay to ignore the result, as it only errors if the process is already dead |
315 | let _ = child.kill(); | ||
316 | |||
317 | let err_msg = match child.wait() { | ||
318 | Ok(exit_code) if !exit_code.success() && !read_at_least_one_message => { | ||
319 | // FIXME: Read the stderr to display the reason, see `read2()` reference in PR comment: | ||
320 | // https://github.com/rust-analyzer/rust-analyzer/pull/3632#discussion_r395605298 | ||
321 | format!( | ||
322 | "the command produced no valid metadata (exit code: {:?}): cargo {}", | ||
323 | exit_code, | ||
324 | args.join(" ") | ||
325 | ) | ||
326 | } | ||
327 | Err(err) => format!("io error: {:?}", err), | ||
328 | Ok(_) => return Ok(()), | ||
329 | }; | ||
330 | |||
331 | Err(CargoError(err_msg)) | ||
300 | } | 332 | } |
301 | 333 | ||
302 | impl WatchThread { | 334 | impl WatchThread { |
@@ -325,7 +357,7 @@ impl WatchThread { | |||
325 | // which will break out of the loop, and continue the shutdown | 357 | // which will break out of the loop, and continue the shutdown |
326 | let _ = message_send.send(CheckEvent::Begin); | 358 | let _ = message_send.send(CheckEvent::Begin); |
327 | 359 | ||
328 | let mut child = run_cargo(&args, Some(&workspace_root), &mut |message| { | 360 | let res = run_cargo(&args, Some(&workspace_root), &mut |message| { |
329 | // Skip certain kinds of messages to only spend time on what's useful | 361 | // Skip certain kinds of messages to only spend time on what's useful |
330 | match &message { | 362 | match &message { |
331 | Message::CompilerArtifact(artifact) if artifact.fresh => return true, | 363 | Message::CompilerArtifact(artifact) if artifact.fresh => return true, |
@@ -334,26 +366,19 @@ impl WatchThread { | |||
334 | _ => {} | 366 | _ => {} |
335 | } | 367 | } |
336 | 368 | ||
337 | match message_send.send(CheckEvent::Msg(message)) { | 369 | // if the send channel was closed, we want to shutdown |
338 | Ok(()) => {} | 370 | message_send.send(CheckEvent::Msg(message)).is_ok() |
339 | Err(_err) => { | ||
340 | // The send channel was closed, so we want to shutdown | ||
341 | return false; | ||
342 | } | ||
343 | }; | ||
344 | |||
345 | true | ||
346 | }); | 371 | }); |
347 | 372 | ||
373 | if let Err(err) = res { | ||
374 | // FIXME: make the `message_send` to be `Sender<Result<CheckEvent, CargoError>>` | ||
375 | // to display user-caused misconfiguration errors instead of just logging them here | ||
376 | log::error!("Cargo watcher failed {:?}", err); | ||
377 | } | ||
378 | |||
348 | // We can ignore any error here, as we are already in the progress | 379 | // We can ignore any error here, as we are already in the progress |
349 | // of shutting down. | 380 | // of shutting down. |
350 | let _ = message_send.send(CheckEvent::End); | 381 | let _ = message_send.send(CheckEvent::End); |
351 | |||
352 | // It is okay to ignore the result, as it only errors if the process is already dead | ||
353 | let _ = child.kill(); | ||
354 | |||
355 | // Again, we don't care about the exit status so just ignore the result | ||
356 | let _ = child.wait(); | ||
357 | })) | 382 | })) |
358 | } else { | 383 | } else { |
359 | None | 384 | None |
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index 9f59d590c..713d45f48 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs | |||
@@ -4,6 +4,18 @@ | |||
4 | //! The principal difference between HIR and syntax trees is that HIR is bound | 4 | //! The principal difference between HIR and syntax trees is that HIR is bound |
5 | //! to a particular crate instance. That is, it has cfg flags and features | 5 | //! to a particular crate instance. That is, it has cfg flags and features |
6 | //! applied. So, the relation between syntax and HIR is many-to-one. | 6 | //! applied. So, the relation between syntax and HIR is many-to-one. |
7 | //! | ||
8 | //! HIR is the public API of the all of the compiler logic above syntax trees. | ||
9 | //! It is written in "OO" style. Each type is self contained (as in, it knows it's | ||
10 | //! parents and full context). It should be "clean code". | ||
11 | //! | ||
12 | //! `ra_hir_*` crates are the implementation of the compiler logic. | ||
13 | //! They are written in "ECS" style, with relatively little abstractions. | ||
14 | //! Many types are not self-contained, and explicitly use local indexes, arenas, etc. | ||
15 | //! | ||
16 | //! `ra_hir` is what insulates the "we don't know how to actually write an incremental compiler" | ||
17 | //! from the ide with completions, hovers, etc. It is a (soft, internal) boundary: | ||
18 | //! https://www.tedinski.com/2018/02/06/system-boundaries.html. | ||
7 | 19 | ||
8 | #![recursion_limit = "512"] | 20 | #![recursion_limit = "512"] |
9 | 21 | ||
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 55e634528..d982f6ffa 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -12,7 +12,8 @@ use hir_expand::ExpansionInfo; | |||
12 | use ra_db::{FileId, FileRange}; | 12 | use ra_db::{FileId, FileRange}; |
13 | use ra_prof::profile; | 13 | use ra_prof::profile; |
14 | use ra_syntax::{ | 14 | use ra_syntax::{ |
15 | algo::skip_trivia_token, ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit, | 15 | algo::{find_node_at_offset, skip_trivia_token}, |
16 | ast, AstNode, Direction, SyntaxNode, SyntaxToken, TextRange, TextUnit, | ||
16 | }; | 17 | }; |
17 | use rustc_hash::{FxHashMap, FxHashSet}; | 18 | use rustc_hash::{FxHashMap, FxHashSet}; |
18 | 19 | ||
@@ -108,6 +109,17 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
108 | token.value | 109 | token.value |
109 | } | 110 | } |
110 | 111 | ||
112 | pub fn descend_node_at_offset<N: ast::AstNode>( | ||
113 | &self, | ||
114 | node: &SyntaxNode, | ||
115 | offset: TextUnit, | ||
116 | ) -> Option<N> { | ||
117 | // Handle macro token cases | ||
118 | node.token_at_offset(offset) | ||
119 | .map(|token| self.descend_into_macros(token)) | ||
120 | .find_map(|it| self.ancestors_with_macros(it.parent()).find_map(N::cast)) | ||
121 | } | ||
122 | |||
111 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { | 123 | pub fn original_range(&self, node: &SyntaxNode) -> FileRange { |
112 | let node = self.find_file(node.clone()); | 124 | let node = self.find_file(node.clone()); |
113 | original_range(self.db, node.as_ref()) | 125 | original_range(self.db, node.as_ref()) |
@@ -129,6 +141,8 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
129 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) | 141 | .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) |
130 | } | 142 | } |
131 | 143 | ||
144 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, | ||
145 | /// search up until it is of the target AstNode type | ||
132 | pub fn find_node_at_offset_with_macros<N: AstNode>( | 146 | pub fn find_node_at_offset_with_macros<N: AstNode>( |
133 | &self, | 147 | &self, |
134 | node: &SyntaxNode, | 148 | node: &SyntaxNode, |
@@ -137,6 +151,19 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> { | |||
137 | self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) | 151 | self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) |
138 | } | 152 | } |
139 | 153 | ||
154 | /// Find a AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, | ||
155 | /// descend it and find again | ||
156 | pub fn find_node_at_offset_with_descend<N: AstNode>( | ||
157 | &self, | ||
158 | node: &SyntaxNode, | ||
159 | offset: TextUnit, | ||
160 | ) -> Option<N> { | ||
161 | if let Some(it) = find_node_at_offset(&node, offset) { | ||
162 | return Some(it); | ||
163 | } | ||
164 | self.descend_node_at_offset(&node, offset) | ||
165 | } | ||
166 | |||
140 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { | 167 | pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { |
141 | self.analyze(expr.syntax()).type_of(self.db, &expr) | 168 | self.analyze(expr.syntax()).type_of(self.db, &expr) |
142 | } | 169 | } |
diff --git a/crates/ra_hir_def/src/body.rs b/crates/ra_hir_def/src/body.rs index 27a297e8b..5f9d53ecb 100644 --- a/crates/ra_hir_def/src/body.rs +++ b/crates/ra_hir_def/src/body.rs | |||
@@ -30,6 +30,7 @@ pub(crate) struct Expander { | |||
30 | hygiene: Hygiene, | 30 | hygiene: Hygiene, |
31 | ast_id_map: Arc<AstIdMap>, | 31 | ast_id_map: Arc<AstIdMap>, |
32 | module: ModuleId, | 32 | module: ModuleId, |
33 | recursive_limit: usize, | ||
33 | } | 34 | } |
34 | 35 | ||
35 | impl Expander { | 36 | impl Expander { |
@@ -41,7 +42,7 @@ impl Expander { | |||
41 | let crate_def_map = db.crate_def_map(module.krate); | 42 | let crate_def_map = db.crate_def_map(module.krate); |
42 | let hygiene = Hygiene::new(db.upcast(), current_file_id); | 43 | let hygiene = Hygiene::new(db.upcast(), current_file_id); |
43 | let ast_id_map = db.ast_id_map(current_file_id); | 44 | let ast_id_map = db.ast_id_map(current_file_id); |
44 | Expander { crate_def_map, current_file_id, hygiene, ast_id_map, module } | 45 | Expander { crate_def_map, current_file_id, hygiene, ast_id_map, module, recursive_limit: 0 } |
45 | } | 46 | } |
46 | 47 | ||
47 | pub(crate) fn enter_expand<T: ast::AstNode>( | 48 | pub(crate) fn enter_expand<T: ast::AstNode>( |
@@ -50,6 +51,10 @@ impl Expander { | |||
50 | local_scope: Option<&ItemScope>, | 51 | local_scope: Option<&ItemScope>, |
51 | macro_call: ast::MacroCall, | 52 | macro_call: ast::MacroCall, |
52 | ) -> Option<(Mark, T)> { | 53 | ) -> Option<(Mark, T)> { |
54 | if self.recursive_limit > 1024 { | ||
55 | return None; | ||
56 | } | ||
57 | |||
53 | let macro_call = InFile::new(self.current_file_id, ¯o_call); | 58 | let macro_call = InFile::new(self.current_file_id, ¯o_call); |
54 | 59 | ||
55 | if let Some(call_id) = macro_call.as_call_id(db, |path| { | 60 | if let Some(call_id) = macro_call.as_call_id(db, |path| { |
@@ -73,6 +78,7 @@ impl Expander { | |||
73 | self.hygiene = Hygiene::new(db.upcast(), file_id); | 78 | self.hygiene = Hygiene::new(db.upcast(), file_id); |
74 | self.current_file_id = file_id; | 79 | self.current_file_id = file_id; |
75 | self.ast_id_map = db.ast_id_map(file_id); | 80 | self.ast_id_map = db.ast_id_map(file_id); |
81 | self.recursive_limit += 1; | ||
76 | 82 | ||
77 | return Some((mark, expr)); | 83 | return Some((mark, expr)); |
78 | } | 84 | } |
@@ -88,6 +94,7 @@ impl Expander { | |||
88 | self.hygiene = Hygiene::new(db.upcast(), mark.file_id); | 94 | self.hygiene = Hygiene::new(db.upcast(), mark.file_id); |
89 | self.current_file_id = mark.file_id; | 95 | self.current_file_id = mark.file_id; |
90 | self.ast_id_map = mem::take(&mut mark.ast_id_map); | 96 | self.ast_id_map = mem::take(&mut mark.ast_id_map); |
97 | self.recursive_limit -= 1; | ||
91 | mark.bomb.defuse(); | 98 | mark.bomb.defuse(); |
92 | } | 99 | } |
93 | 100 | ||
diff --git a/crates/ra_hir_def/src/body/lower.rs b/crates/ra_hir_def/src/body/lower.rs index e8c58ed32..3cf0c66ea 100644 --- a/crates/ra_hir_def/src/body/lower.rs +++ b/crates/ra_hir_def/src/body/lower.rs | |||
@@ -453,7 +453,7 @@ impl ExprCollector<'_> { | |||
453 | } | 453 | } |
454 | } | 454 | } |
455 | ast::Expr::MacroCall(e) => { | 455 | ast::Expr::MacroCall(e) => { |
456 | if let Some(name) = is_macro_rules(&e) { | 456 | if let Some(name) = e.is_macro_rules().map(|it| it.as_name()) { |
457 | let mac = MacroDefId { | 457 | let mac = MacroDefId { |
458 | krate: Some(self.expander.module.krate), | 458 | krate: Some(self.expander.module.krate), |
459 | ast_id: Some(self.expander.ast_id(&e)), | 459 | ast_id: Some(self.expander.ast_id(&e)), |
@@ -697,16 +697,6 @@ impl ExprCollector<'_> { | |||
697 | } | 697 | } |
698 | } | 698 | } |
699 | 699 | ||
700 | fn is_macro_rules(m: &ast::MacroCall) -> Option<Name> { | ||
701 | let name = m.path()?.segment()?.name_ref()?.as_name(); | ||
702 | |||
703 | if name == name![macro_rules] { | ||
704 | Some(m.name()?.as_name()) | ||
705 | } else { | ||
706 | None | ||
707 | } | ||
708 | } | ||
709 | |||
710 | impl From<ast::BinOp> for BinaryOp { | 700 | impl From<ast::BinOp> for BinaryOp { |
711 | fn from(ast_op: ast::BinOp) -> Self { | 701 | fn from(ast_op: ast::BinOp) -> Self { |
712 | match ast_op { | 702 | match ast_op { |
diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs index d171d2dfd..5a696542f 100644 --- a/crates/ra_hir_expand/src/db.rs +++ b/crates/ra_hir_expand/src/db.rs | |||
@@ -6,7 +6,7 @@ use mbe::{ExpandResult, MacroRules}; | |||
6 | use ra_db::{salsa, SourceDatabase}; | 6 | use ra_db::{salsa, SourceDatabase}; |
7 | use ra_parser::FragmentKind; | 7 | use ra_parser::FragmentKind; |
8 | use ra_prof::profile; | 8 | use ra_prof::profile; |
9 | use ra_syntax::{AstNode, Parse, SyntaxKind::*, SyntaxNode}; | 9 | use ra_syntax::{algo::diff, AstNode, Parse, SyntaxKind::*, SyntaxNode}; |
10 | 10 | ||
11 | use crate::{ | 11 | use crate::{ |
12 | ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, | 12 | ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, |
@@ -238,7 +238,7 @@ pub fn parse_macro_with_arg( | |||
238 | } else { | 238 | } else { |
239 | db.macro_expand(macro_call_id) | 239 | db.macro_expand(macro_call_id) |
240 | }; | 240 | }; |
241 | if let Some(err) = err { | 241 | if let Some(err) = &err { |
242 | // Note: | 242 | // Note: |
243 | // The final goal we would like to make all parse_macro success, | 243 | // The final goal we would like to make all parse_macro success, |
244 | // such that the following log will not call anyway. | 244 | // such that the following log will not call anyway. |
@@ -272,7 +272,25 @@ pub fn parse_macro_with_arg( | |||
272 | let fragment_kind = to_fragment_kind(db, macro_call_id); | 272 | let fragment_kind = to_fragment_kind(db, macro_call_id); |
273 | 273 | ||
274 | let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?; | 274 | let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?; |
275 | Some((parse, Arc::new(rev_token_map))) | 275 | |
276 | if err.is_none() { | ||
277 | Some((parse, Arc::new(rev_token_map))) | ||
278 | } else { | ||
279 | // FIXME: | ||
280 | // In future, we should propagate the actual error with recovery information | ||
281 | // instead of ignore the error here. | ||
282 | |||
283 | // Safe check for recurisve identity macro | ||
284 | let node = parse.syntax_node(); | ||
285 | let file: HirFileId = macro_file.into(); | ||
286 | let call_node = file.call_node(db)?; | ||
287 | |||
288 | if !diff(&node, &call_node.value).is_empty() { | ||
289 | Some((parse, Arc::new(rev_token_map))) | ||
290 | } else { | ||
291 | None | ||
292 | } | ||
293 | } | ||
276 | } | 294 | } |
277 | 295 | ||
278 | /// Given a `MacroCallId`, return what `FragmentKind` it belongs to. | 296 | /// Given a `MacroCallId`, return what `FragmentKind` it belongs to. |
diff --git a/crates/ra_hir_ty/src/tests/regression.rs b/crates/ra_hir_ty/src/tests/regression.rs index 14c8ed3a9..a02e3ee05 100644 --- a/crates/ra_hir_ty/src/tests/regression.rs +++ b/crates/ra_hir_ty/src/tests/regression.rs | |||
@@ -453,3 +453,34 @@ pub mod str { | |||
453 | // should be Option<char>, but currently not because of Chalk ambiguity problem | 453 | // should be Option<char>, but currently not because of Chalk ambiguity problem |
454 | assert_eq!("(Option<{unknown}>, Option<{unknown}>)", super::type_at_pos(&db, pos)); | 454 | assert_eq!("(Option<{unknown}>, Option<{unknown}>)", super::type_at_pos(&db, pos)); |
455 | } | 455 | } |
456 | |||
457 | #[test] | ||
458 | fn issue_3642_bad_macro_stackover() { | ||
459 | let (db, pos) = TestDB::with_position( | ||
460 | r#" | ||
461 | //- /main.rs | ||
462 | #[macro_export] | ||
463 | macro_rules! match_ast { | ||
464 | (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; | ||
465 | |||
466 | (match ($node:expr) { | ||
467 | $( ast::$ast:ident($it:ident) => $res:expr, )* | ||
468 | _ => $catch_all:expr $(,)? | ||
469 | }) => {{ | ||
470 | $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )* | ||
471 | { $catch_all } | ||
472 | }}; | ||
473 | } | ||
474 | |||
475 | fn main() { | ||
476 | let anchor<|> = match_ast! { | ||
477 | match parent { | ||
478 | as => {}, | ||
479 | _ => return None | ||
480 | } | ||
481 | }; | ||
482 | }"#, | ||
483 | ); | ||
484 | |||
485 | assert_eq!("()", super::type_at_pos(&db, pos)); | ||
486 | } | ||
diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs index 3ea0ac230..746cc86ba 100644 --- a/crates/ra_ide/src/references.rs +++ b/crates/ra_ide/src/references.rs | |||
@@ -94,12 +94,16 @@ pub(crate) fn find_all_refs( | |||
94 | let sema = Semantics::new(db); | 94 | let sema = Semantics::new(db); |
95 | let syntax = sema.parse(position.file_id).syntax().clone(); | 95 | let syntax = sema.parse(position.file_id).syntax().clone(); |
96 | 96 | ||
97 | let (opt_name, search_kind) = | 97 | let (opt_name, search_kind) = if let Some(name) = |
98 | if let Some(name) = get_struct_def_name_for_struct_literal_search(&syntax, position) { | 98 | get_struct_def_name_for_struct_literal_search(&sema, &syntax, position) |
99 | (Some(name), ReferenceKind::StructLiteral) | 99 | { |
100 | } else { | 100 | (Some(name), ReferenceKind::StructLiteral) |
101 | (find_node_at_offset::<ast::Name>(&syntax, position.offset), ReferenceKind::Other) | 101 | } else { |
102 | }; | 102 | ( |
103 | sema.find_node_at_offset_with_descend::<ast::Name>(&syntax, position.offset), | ||
104 | ReferenceKind::Other, | ||
105 | ) | ||
106 | }; | ||
103 | 107 | ||
104 | let RangeInfo { range, info: def } = find_name(&sema, &syntax, position, opt_name)?; | 108 | let RangeInfo { range, info: def } = find_name(&sema, &syntax, position, opt_name)?; |
105 | 109 | ||
@@ -131,7 +135,8 @@ fn find_name( | |||
131 | let range = name.syntax().text_range(); | 135 | let range = name.syntax().text_range(); |
132 | return Some(RangeInfo::new(range, def)); | 136 | return Some(RangeInfo::new(range, def)); |
133 | } | 137 | } |
134 | let name_ref = find_node_at_offset::<ast::NameRef>(&syntax, position.offset)?; | 138 | let name_ref = |
139 | sema.find_node_at_offset_with_descend::<ast::NameRef>(&syntax, position.offset)?; | ||
135 | let def = classify_name_ref(sema, &name_ref)?.definition(); | 140 | let def = classify_name_ref(sema, &name_ref)?.definition(); |
136 | let range = name_ref.syntax().text_range(); | 141 | let range = name_ref.syntax().text_range(); |
137 | Some(RangeInfo::new(range, def)) | 142 | Some(RangeInfo::new(range, def)) |
@@ -157,6 +162,7 @@ fn decl_access(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> Optio | |||
157 | } | 162 | } |
158 | 163 | ||
159 | fn get_struct_def_name_for_struct_literal_search( | 164 | fn get_struct_def_name_for_struct_literal_search( |
165 | sema: &Semantics<RootDatabase>, | ||
160 | syntax: &SyntaxNode, | 166 | syntax: &SyntaxNode, |
161 | position: FilePosition, | 167 | position: FilePosition, |
162 | ) -> Option<ast::Name> { | 168 | ) -> Option<ast::Name> { |
@@ -164,10 +170,18 @@ fn get_struct_def_name_for_struct_literal_search( | |||
164 | if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN { | 170 | if right.kind() != SyntaxKind::L_CURLY && right.kind() != SyntaxKind::L_PAREN { |
165 | return None; | 171 | return None; |
166 | } | 172 | } |
167 | if let Some(name) = find_node_at_offset::<ast::Name>(&syntax, left.text_range().start()) { | 173 | if let Some(name) = |
174 | sema.find_node_at_offset_with_descend::<ast::Name>(&syntax, left.text_range().start()) | ||
175 | { | ||
168 | return name.syntax().ancestors().find_map(ast::StructDef::cast).and_then(|l| l.name()); | 176 | return name.syntax().ancestors().find_map(ast::StructDef::cast).and_then(|l| l.name()); |
169 | } | 177 | } |
170 | if find_node_at_offset::<ast::TypeParamList>(&syntax, left.text_range().start()).is_some() { | 178 | if sema |
179 | .find_node_at_offset_with_descend::<ast::TypeParamList>( | ||
180 | &syntax, | ||
181 | left.text_range().start(), | ||
182 | ) | ||
183 | .is_some() | ||
184 | { | ||
171 | return left.ancestors().find_map(ast::StructDef::cast).and_then(|l| l.name()); | 185 | return left.ancestors().find_map(ast::StructDef::cast).and_then(|l| l.name()); |
172 | } | 186 | } |
173 | } | 187 | } |
diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs index 7d1190af9..9acc6158a 100644 --- a/crates/ra_ide/src/references/rename.rs +++ b/crates/ra_ide/src/references/rename.rs | |||
@@ -250,6 +250,63 @@ mod tests { | |||
250 | } | 250 | } |
251 | 251 | ||
252 | #[test] | 252 | #[test] |
253 | fn test_rename_for_macro_args_rev() { | ||
254 | test_rename( | ||
255 | r#" | ||
256 | macro_rules! foo {($i:ident) => {$i} } | ||
257 | fn main() { | ||
258 | let a = "test"; | ||
259 | foo!(a<|>); | ||
260 | }"#, | ||
261 | "b", | ||
262 | r#" | ||
263 | macro_rules! foo {($i:ident) => {$i} } | ||
264 | fn main() { | ||
265 | let b = "test"; | ||
266 | foo!(b); | ||
267 | }"#, | ||
268 | ); | ||
269 | } | ||
270 | |||
271 | #[test] | ||
272 | fn test_rename_for_macro_define_fn() { | ||
273 | test_rename( | ||
274 | r#" | ||
275 | macro_rules! define_fn {($id:ident) => { fn $id{} }} | ||
276 | define_fn!(foo); | ||
277 | fn main() { | ||
278 | fo<|>o(); | ||
279 | }"#, | ||
280 | "bar", | ||
281 | r#" | ||
282 | macro_rules! define_fn {($id:ident) => { fn $id{} }} | ||
283 | define_fn!(bar); | ||
284 | fn main() { | ||
285 | bar(); | ||
286 | }"#, | ||
287 | ); | ||
288 | } | ||
289 | |||
290 | #[test] | ||
291 | fn test_rename_for_macro_define_fn_rev() { | ||
292 | test_rename( | ||
293 | r#" | ||
294 | macro_rules! define_fn {($id:ident) => { fn $id{} }} | ||
295 | define_fn!(fo<|>o); | ||
296 | fn main() { | ||
297 | foo(); | ||
298 | }"#, | ||
299 | "bar", | ||
300 | r#" | ||
301 | macro_rules! define_fn {($id:ident) => { fn $id{} }} | ||
302 | define_fn!(bar); | ||
303 | fn main() { | ||
304 | bar(); | ||
305 | }"#, | ||
306 | ); | ||
307 | } | ||
308 | |||
309 | #[test] | ||
253 | fn test_rename_for_param_inside() { | 310 | fn test_rename_for_param_inside() { |
254 | test_rename( | 311 | test_rename( |
255 | r#" | 312 | r#" |
diff --git a/crates/ra_ide_db/src/search.rs b/crates/ra_ide_db/src/search.rs index cf78d3e41..117454695 100644 --- a/crates/ra_ide_db/src/search.rs +++ b/crates/ra_ide_db/src/search.rs | |||
@@ -10,9 +10,7 @@ use hir::{DefWithBody, HasSource, ModuleSource, Semantics}; | |||
10 | use once_cell::unsync::Lazy; | 10 | use once_cell::unsync::Lazy; |
11 | use ra_db::{FileId, FileRange, SourceDatabaseExt}; | 11 | use ra_db::{FileId, FileRange, SourceDatabaseExt}; |
12 | use ra_prof::profile; | 12 | use ra_prof::profile; |
13 | use ra_syntax::{ | 13 | use ra_syntax::{ast, match_ast, AstNode, TextRange, TextUnit}; |
14 | algo::find_node_at_offset, ast, match_ast, AstNode, TextRange, TextUnit, TokenAtOffset, | ||
15 | }; | ||
16 | use rustc_hash::FxHashMap; | 14 | use rustc_hash::FxHashMap; |
17 | use test_utils::tested_by; | 15 | use test_utils::tested_by; |
18 | 16 | ||
@@ -219,21 +217,11 @@ impl Definition { | |||
219 | continue; | 217 | continue; |
220 | } | 218 | } |
221 | 219 | ||
222 | let name_ref = | 220 | let name_ref: ast::NameRef = |
223 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&tree, offset) { | 221 | if let Some(name_ref) = sema.find_node_at_offset_with_descend(&tree, offset) { |
224 | name_ref | 222 | name_ref |
225 | } else { | 223 | } else { |
226 | // Handle macro token cases | 224 | continue; |
227 | let token = match tree.token_at_offset(offset) { | ||
228 | TokenAtOffset::None => continue, | ||
229 | TokenAtOffset::Single(t) => t, | ||
230 | TokenAtOffset::Between(_, t) => t, | ||
231 | }; | ||
232 | let expanded = sema.descend_into_macros(token); | ||
233 | match ast::NameRef::cast(expanded.parent()) { | ||
234 | Some(name_ref) => name_ref, | ||
235 | _ => continue, | ||
236 | } | ||
237 | }; | 225 | }; |
238 | 226 | ||
239 | // FIXME: reuse sb | 227 | // FIXME: reuse sb |
diff --git a/crates/ra_ide_db/src/symbol_index.rs b/crates/ra_ide_db/src/symbol_index.rs index 884359ee3..0f46f93c1 100644 --- a/crates/ra_ide_db/src/symbol_index.rs +++ b/crates/ra_ide_db/src/symbol_index.rs | |||
@@ -362,6 +362,13 @@ fn to_symbol(node: &SyntaxNode) -> Option<(SmolStr, SyntaxNodePtr, TextRange)> { | |||
362 | ast::TypeAliasDef(it) => { decl(it) }, | 362 | ast::TypeAliasDef(it) => { decl(it) }, |
363 | ast::ConstDef(it) => { decl(it) }, | 363 | ast::ConstDef(it) => { decl(it) }, |
364 | ast::StaticDef(it) => { decl(it) }, | 364 | ast::StaticDef(it) => { decl(it) }, |
365 | ast::MacroCall(it) => { | ||
366 | if it.is_macro_rules().is_some() { | ||
367 | decl(it) | ||
368 | } else { | ||
369 | None | ||
370 | } | ||
371 | }, | ||
365 | _ => None, | 372 | _ => None, |
366 | } | 373 | } |
367 | } | 374 | } |
diff --git a/crates/ra_project_model/src/cargo_workspace.rs b/crates/ra_project_model/src/cargo_workspace.rs index c2857dbfc..c7f9bd873 100644 --- a/crates/ra_project_model/src/cargo_workspace.rs +++ b/crates/ra_project_model/src/cargo_workspace.rs | |||
@@ -6,7 +6,7 @@ use std::{ | |||
6 | }; | 6 | }; |
7 | 7 | ||
8 | use anyhow::{Context, Result}; | 8 | use anyhow::{Context, Result}; |
9 | use cargo_metadata::{CargoOpt, Message, MetadataCommand, PackageId}; | 9 | use cargo_metadata::{BuildScript, CargoOpt, Message, MetadataCommand, PackageId}; |
10 | use ra_arena::{Arena, Idx}; | 10 | use ra_arena::{Arena, Idx}; |
11 | use ra_cargo_watch::run_cargo; | 11 | use ra_cargo_watch::run_cargo; |
12 | use ra_db::Edition; | 12 | use ra_db::Edition; |
@@ -254,7 +254,7 @@ pub fn load_out_dirs( | |||
254 | "check".to_string(), | 254 | "check".to_string(), |
255 | "--message-format=json".to_string(), | 255 | "--message-format=json".to_string(), |
256 | "--manifest-path".to_string(), | 256 | "--manifest-path".to_string(), |
257 | format!("{}", cargo_toml.display()), | 257 | cargo_toml.display().to_string(), |
258 | ]; | 258 | ]; |
259 | 259 | ||
260 | if cargo_features.all_features { | 260 | if cargo_features.all_features { |
@@ -263,19 +263,15 @@ pub fn load_out_dirs( | |||
263 | // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` | 263 | // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` |
264 | // https://github.com/oli-obk/cargo_metadata/issues/79 | 264 | // https://github.com/oli-obk/cargo_metadata/issues/79 |
265 | args.push("--no-default-features".to_string()); | 265 | args.push("--no-default-features".to_string()); |
266 | } else if !cargo_features.features.is_empty() { | 266 | } else { |
267 | for feature in &cargo_features.features { | 267 | args.extend(cargo_features.features.iter().cloned()); |
268 | args.push(feature.clone()); | ||
269 | } | ||
270 | } | 268 | } |
271 | 269 | ||
272 | let mut res = FxHashMap::default(); | 270 | let mut acc = FxHashMap::default(); |
273 | let mut child = run_cargo(&args, cargo_toml.parent(), &mut |message| { | 271 | let res = run_cargo(&args, cargo_toml.parent(), &mut |message| { |
274 | match message { | 272 | match message { |
275 | Message::BuildScriptExecuted(message) => { | 273 | Message::BuildScriptExecuted(BuildScript { package_id, out_dir, .. }) => { |
276 | let package_id = message.package_id; | 274 | acc.insert(package_id, out_dir); |
277 | let out_dir = message.out_dir; | ||
278 | res.insert(package_id, out_dir); | ||
279 | } | 275 | } |
280 | 276 | ||
281 | Message::CompilerArtifact(_) => (), | 277 | Message::CompilerArtifact(_) => (), |
@@ -285,6 +281,9 @@ pub fn load_out_dirs( | |||
285 | true | 281 | true |
286 | }); | 282 | }); |
287 | 283 | ||
288 | let _ = child.wait(); | 284 | if let Err(err) = res { |
289 | res | 285 | log::error!("Failed to load outdirs: {:?}", err); |
286 | } | ||
287 | |||
288 | acc | ||
290 | } | 289 | } |
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs index b500a74fb..a3ef9acdc 100644 --- a/crates/ra_project_model/src/lib.rs +++ b/crates/ra_project_model/src/lib.rs | |||
@@ -336,7 +336,7 @@ impl ProjectWorkspace { | |||
336 | extern_source, | 336 | extern_source, |
337 | ); | 337 | ); |
338 | if cargo[tgt].kind == TargetKind::Lib { | 338 | if cargo[tgt].kind == TargetKind::Lib { |
339 | lib_tgt = Some(crate_id); | 339 | lib_tgt = Some((crate_id, cargo[tgt].name.clone())); |
340 | pkg_to_lib_crate.insert(pkg, crate_id); | 340 | pkg_to_lib_crate.insert(pkg, crate_id); |
341 | } | 341 | } |
342 | if cargo[tgt].is_proc_macro { | 342 | if cargo[tgt].is_proc_macro { |
@@ -363,7 +363,7 @@ impl ProjectWorkspace { | |||
363 | 363 | ||
364 | // Set deps to the core, std and to the lib target of the current package | 364 | // Set deps to the core, std and to the lib target of the current package |
365 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { | 365 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { |
366 | if let Some(to) = lib_tgt { | 366 | if let Some((to, name)) = lib_tgt.clone() { |
367 | if to != from | 367 | if to != from |
368 | && crate_graph | 368 | && crate_graph |
369 | .add_dep( | 369 | .add_dep( |
@@ -371,7 +371,7 @@ impl ProjectWorkspace { | |||
371 | // For root projects with dashes in their name, | 371 | // For root projects with dashes in their name, |
372 | // cargo metadata does not do any normalization, | 372 | // cargo metadata does not do any normalization, |
373 | // so we do it ourselves currently | 373 | // so we do it ourselves currently |
374 | CrateName::normalize_dashes(&cargo[pkg].name), | 374 | CrateName::normalize_dashes(&name), |
375 | to, | 375 | to, |
376 | ) | 376 | ) |
377 | .is_err() | 377 | .is_err() |
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index 344cf0fbe..ffdbdc767 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -95,6 +95,10 @@ impl TreeDiff { | |||
95 | builder.replace(from.text_range(), to.to_string()) | 95 | builder.replace(from.text_range(), to.to_string()) |
96 | } | 96 | } |
97 | } | 97 | } |
98 | |||
99 | pub fn is_empty(&self) -> bool { | ||
100 | self.replacements.is_empty() | ||
101 | } | ||
98 | } | 102 | } |
99 | 103 | ||
100 | /// Finds minimal the diff, which, applied to `from`, will result in `to`. | 104 | /// Finds minimal the diff, which, applied to `from`, will result in `to`. |
diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs index c3ae8f90e..392731dac 100644 --- a/crates/ra_syntax/src/ast/extensions.rs +++ b/crates/ra_syntax/src/ast/extensions.rs | |||
@@ -4,7 +4,7 @@ | |||
4 | use itertools::Itertools; | 4 | use itertools::Itertools; |
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | ast::{self, child_opt, children, AstNode, AttrInput, SyntaxNode}, | 7 | ast::{self, child_opt, children, AstNode, AttrInput, NameOwner, SyntaxNode}, |
8 | SmolStr, SyntaxElement, | 8 | SmolStr, SyntaxElement, |
9 | SyntaxKind::*, | 9 | SyntaxKind::*, |
10 | SyntaxToken, T, | 10 | SyntaxToken, T, |
@@ -514,3 +514,14 @@ impl ast::Visibility { | |||
514 | self.syntax().children_with_tokens().any(|it| it.kind() == T![super]) | 514 | self.syntax().children_with_tokens().any(|it| it.kind() == T![super]) |
515 | } | 515 | } |
516 | } | 516 | } |
517 | |||
518 | impl ast::MacroCall { | ||
519 | pub fn is_macro_rules(&self) -> Option<ast::Name> { | ||
520 | let name_ref = self.path()?.segment()?.name_ref()?; | ||
521 | if name_ref.text() == "macro_rules" { | ||
522 | self.name() | ||
523 | } else { | ||
524 | None | ||
525 | } | ||
526 | } | ||
527 | } | ||