diff options
-rw-r--r-- | crates/ra_hir_def/src/body.rs | 9 | ||||
-rw-r--r-- | crates/ra_hir_expand/src/db.rs | 24 | ||||
-rw-r--r-- | crates/ra_hir_ty/src/tests/regression.rs | 31 | ||||
-rw-r--r-- | crates/ra_project_model/src/lib.rs | 6 | ||||
-rw-r--r-- | crates/ra_syntax/src/algo.rs | 4 | ||||
-rw-r--r-- | xtask/src/codegen/gen_assists_docs.rs | 10 | ||||
-rw-r--r-- | xtask/src/lib.rs | 16 | ||||
-rw-r--r-- | xtask/tests/tidy-tests/main.rs | 21 |
8 files changed, 88 insertions, 33 deletions
diff --git a/crates/ra_hir_def/src/body.rs b/crates/ra_hir_def/src/body.rs index 27a297e8b..5f9d53ecb 100644 --- a/crates/ra_hir_def/src/body.rs +++ b/crates/ra_hir_def/src/body.rs | |||
@@ -30,6 +30,7 @@ pub(crate) struct Expander { | |||
30 | hygiene: Hygiene, | 30 | hygiene: Hygiene, |
31 | ast_id_map: Arc<AstIdMap>, | 31 | ast_id_map: Arc<AstIdMap>, |
32 | module: ModuleId, | 32 | module: ModuleId, |
33 | recursive_limit: usize, | ||
33 | } | 34 | } |
34 | 35 | ||
35 | impl Expander { | 36 | impl Expander { |
@@ -41,7 +42,7 @@ impl Expander { | |||
41 | let crate_def_map = db.crate_def_map(module.krate); | 42 | let crate_def_map = db.crate_def_map(module.krate); |
42 | let hygiene = Hygiene::new(db.upcast(), current_file_id); | 43 | let hygiene = Hygiene::new(db.upcast(), current_file_id); |
43 | let ast_id_map = db.ast_id_map(current_file_id); | 44 | let ast_id_map = db.ast_id_map(current_file_id); |
44 | Expander { crate_def_map, current_file_id, hygiene, ast_id_map, module } | 45 | Expander { crate_def_map, current_file_id, hygiene, ast_id_map, module, recursive_limit: 0 } |
45 | } | 46 | } |
46 | 47 | ||
47 | pub(crate) fn enter_expand<T: ast::AstNode>( | 48 | pub(crate) fn enter_expand<T: ast::AstNode>( |
@@ -50,6 +51,10 @@ impl Expander { | |||
50 | local_scope: Option<&ItemScope>, | 51 | local_scope: Option<&ItemScope>, |
51 | macro_call: ast::MacroCall, | 52 | macro_call: ast::MacroCall, |
52 | ) -> Option<(Mark, T)> { | 53 | ) -> Option<(Mark, T)> { |
54 | if self.recursive_limit > 1024 { | ||
55 | return None; | ||
56 | } | ||
57 | |||
53 | let macro_call = InFile::new(self.current_file_id, ¯o_call); | 58 | let macro_call = InFile::new(self.current_file_id, ¯o_call); |
54 | 59 | ||
55 | if let Some(call_id) = macro_call.as_call_id(db, |path| { | 60 | if let Some(call_id) = macro_call.as_call_id(db, |path| { |
@@ -73,6 +78,7 @@ impl Expander { | |||
73 | self.hygiene = Hygiene::new(db.upcast(), file_id); | 78 | self.hygiene = Hygiene::new(db.upcast(), file_id); |
74 | self.current_file_id = file_id; | 79 | self.current_file_id = file_id; |
75 | self.ast_id_map = db.ast_id_map(file_id); | 80 | self.ast_id_map = db.ast_id_map(file_id); |
81 | self.recursive_limit += 1; | ||
76 | 82 | ||
77 | return Some((mark, expr)); | 83 | return Some((mark, expr)); |
78 | } | 84 | } |
@@ -88,6 +94,7 @@ impl Expander { | |||
88 | self.hygiene = Hygiene::new(db.upcast(), mark.file_id); | 94 | self.hygiene = Hygiene::new(db.upcast(), mark.file_id); |
89 | self.current_file_id = mark.file_id; | 95 | self.current_file_id = mark.file_id; |
90 | self.ast_id_map = mem::take(&mut mark.ast_id_map); | 96 | self.ast_id_map = mem::take(&mut mark.ast_id_map); |
97 | self.recursive_limit -= 1; | ||
91 | mark.bomb.defuse(); | 98 | mark.bomb.defuse(); |
92 | } | 99 | } |
93 | 100 | ||
diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs index d171d2dfd..5a696542f 100644 --- a/crates/ra_hir_expand/src/db.rs +++ b/crates/ra_hir_expand/src/db.rs | |||
@@ -6,7 +6,7 @@ use mbe::{ExpandResult, MacroRules}; | |||
6 | use ra_db::{salsa, SourceDatabase}; | 6 | use ra_db::{salsa, SourceDatabase}; |
7 | use ra_parser::FragmentKind; | 7 | use ra_parser::FragmentKind; |
8 | use ra_prof::profile; | 8 | use ra_prof::profile; |
9 | use ra_syntax::{AstNode, Parse, SyntaxKind::*, SyntaxNode}; | 9 | use ra_syntax::{algo::diff, AstNode, Parse, SyntaxKind::*, SyntaxNode}; |
10 | 10 | ||
11 | use crate::{ | 11 | use crate::{ |
12 | ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, | 12 | ast_id_map::AstIdMap, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallLoc, EagerMacroId, |
@@ -238,7 +238,7 @@ pub fn parse_macro_with_arg( | |||
238 | } else { | 238 | } else { |
239 | db.macro_expand(macro_call_id) | 239 | db.macro_expand(macro_call_id) |
240 | }; | 240 | }; |
241 | if let Some(err) = err { | 241 | if let Some(err) = &err { |
242 | // Note: | 242 | // Note: |
243 | // The final goal we would like to make all parse_macro success, | 243 | // The final goal we would like to make all parse_macro success, |
244 | // such that the following log will not call anyway. | 244 | // such that the following log will not call anyway. |
@@ -272,7 +272,25 @@ pub fn parse_macro_with_arg( | |||
272 | let fragment_kind = to_fragment_kind(db, macro_call_id); | 272 | let fragment_kind = to_fragment_kind(db, macro_call_id); |
273 | 273 | ||
274 | let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?; | 274 | let (parse, rev_token_map) = mbe::token_tree_to_syntax_node(&tt, fragment_kind).ok()?; |
275 | Some((parse, Arc::new(rev_token_map))) | 275 | |
276 | if err.is_none() { | ||
277 | Some((parse, Arc::new(rev_token_map))) | ||
278 | } else { | ||
279 | // FIXME: | ||
280 | // In future, we should propagate the actual error with recovery information | ||
281 | // instead of ignore the error here. | ||
282 | |||
283 | // Safe check for recurisve identity macro | ||
284 | let node = parse.syntax_node(); | ||
285 | let file: HirFileId = macro_file.into(); | ||
286 | let call_node = file.call_node(db)?; | ||
287 | |||
288 | if !diff(&node, &call_node.value).is_empty() { | ||
289 | Some((parse, Arc::new(rev_token_map))) | ||
290 | } else { | ||
291 | None | ||
292 | } | ||
293 | } | ||
276 | } | 294 | } |
277 | 295 | ||
278 | /// Given a `MacroCallId`, return what `FragmentKind` it belongs to. | 296 | /// Given a `MacroCallId`, return what `FragmentKind` it belongs to. |
diff --git a/crates/ra_hir_ty/src/tests/regression.rs b/crates/ra_hir_ty/src/tests/regression.rs index 14c8ed3a9..a02e3ee05 100644 --- a/crates/ra_hir_ty/src/tests/regression.rs +++ b/crates/ra_hir_ty/src/tests/regression.rs | |||
@@ -453,3 +453,34 @@ pub mod str { | |||
453 | // should be Option<char>, but currently not because of Chalk ambiguity problem | 453 | // should be Option<char>, but currently not because of Chalk ambiguity problem |
454 | assert_eq!("(Option<{unknown}>, Option<{unknown}>)", super::type_at_pos(&db, pos)); | 454 | assert_eq!("(Option<{unknown}>, Option<{unknown}>)", super::type_at_pos(&db, pos)); |
455 | } | 455 | } |
456 | |||
457 | #[test] | ||
458 | fn issue_3642_bad_macro_stackover() { | ||
459 | let (db, pos) = TestDB::with_position( | ||
460 | r#" | ||
461 | //- /main.rs | ||
462 | #[macro_export] | ||
463 | macro_rules! match_ast { | ||
464 | (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) }; | ||
465 | |||
466 | (match ($node:expr) { | ||
467 | $( ast::$ast:ident($it:ident) => $res:expr, )* | ||
468 | _ => $catch_all:expr $(,)? | ||
469 | }) => {{ | ||
470 | $( if let Some($it) = ast::$ast::cast($node.clone()) { $res } else )* | ||
471 | { $catch_all } | ||
472 | }}; | ||
473 | } | ||
474 | |||
475 | fn main() { | ||
476 | let anchor<|> = match_ast! { | ||
477 | match parent { | ||
478 | as => {}, | ||
479 | _ => return None | ||
480 | } | ||
481 | }; | ||
482 | }"#, | ||
483 | ); | ||
484 | |||
485 | assert_eq!("()", super::type_at_pos(&db, pos)); | ||
486 | } | ||
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs index b500a74fb..a3ef9acdc 100644 --- a/crates/ra_project_model/src/lib.rs +++ b/crates/ra_project_model/src/lib.rs | |||
@@ -336,7 +336,7 @@ impl ProjectWorkspace { | |||
336 | extern_source, | 336 | extern_source, |
337 | ); | 337 | ); |
338 | if cargo[tgt].kind == TargetKind::Lib { | 338 | if cargo[tgt].kind == TargetKind::Lib { |
339 | lib_tgt = Some(crate_id); | 339 | lib_tgt = Some((crate_id, cargo[tgt].name.clone())); |
340 | pkg_to_lib_crate.insert(pkg, crate_id); | 340 | pkg_to_lib_crate.insert(pkg, crate_id); |
341 | } | 341 | } |
342 | if cargo[tgt].is_proc_macro { | 342 | if cargo[tgt].is_proc_macro { |
@@ -363,7 +363,7 @@ impl ProjectWorkspace { | |||
363 | 363 | ||
364 | // Set deps to the core, std and to the lib target of the current package | 364 | // Set deps to the core, std and to the lib target of the current package |
365 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { | 365 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { |
366 | if let Some(to) = lib_tgt { | 366 | if let Some((to, name)) = lib_tgt.clone() { |
367 | if to != from | 367 | if to != from |
368 | && crate_graph | 368 | && crate_graph |
369 | .add_dep( | 369 | .add_dep( |
@@ -371,7 +371,7 @@ impl ProjectWorkspace { | |||
371 | // For root projects with dashes in their name, | 371 | // For root projects with dashes in their name, |
372 | // cargo metadata does not do any normalization, | 372 | // cargo metadata does not do any normalization, |
373 | // so we do it ourselves currently | 373 | // so we do it ourselves currently |
374 | CrateName::normalize_dashes(&cargo[pkg].name), | 374 | CrateName::normalize_dashes(&name), |
375 | to, | 375 | to, |
376 | ) | 376 | ) |
377 | .is_err() | 377 | .is_err() |
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index 344cf0fbe..ffdbdc767 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -95,6 +95,10 @@ impl TreeDiff { | |||
95 | builder.replace(from.text_range(), to.to_string()) | 95 | builder.replace(from.text_range(), to.to_string()) |
96 | } | 96 | } |
97 | } | 97 | } |
98 | |||
99 | pub fn is_empty(&self) -> bool { | ||
100 | self.replacements.is_empty() | ||
101 | } | ||
98 | } | 102 | } |
99 | 103 | ||
100 | /// Finds minimal the diff, which, applied to `from`, will result in `to`. | 104 | /// Finds minimal the diff, which, applied to `from`, will result in `to`. |
diff --git a/xtask/src/codegen/gen_assists_docs.rs b/xtask/src/codegen/gen_assists_docs.rs index 6da5ca89e..31d606535 100644 --- a/xtask/src/codegen/gen_assists_docs.rs +++ b/xtask/src/codegen/gen_assists_docs.rs | |||
@@ -4,7 +4,7 @@ use std::{fs, path::Path}; | |||
4 | 4 | ||
5 | use crate::{ | 5 | use crate::{ |
6 | codegen::{self, extract_comment_blocks_with_empty_lines, Mode}, | 6 | codegen::{self, extract_comment_blocks_with_empty_lines, Mode}, |
7 | project_root, Result, | 7 | project_root, rust_files, Result, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | pub fn generate_assists_docs(mode: Mode) -> Result<()> { | 10 | pub fn generate_assists_docs(mode: Mode) -> Result<()> { |
@@ -46,12 +46,8 @@ fn reveal_hash_comments(text: &str) -> String { | |||
46 | 46 | ||
47 | fn collect_assists() -> Result<Vec<Assist>> { | 47 | fn collect_assists() -> Result<Vec<Assist>> { |
48 | let mut res = Vec::new(); | 48 | let mut res = Vec::new(); |
49 | for entry in fs::read_dir(project_root().join(codegen::ASSISTS_DIR))? { | 49 | for path in rust_files(&project_root().join(codegen::ASSISTS_DIR)) { |
50 | let entry = entry?; | 50 | collect_file(&mut res, path.as_path())?; |
51 | let path = entry.path(); | ||
52 | if path.is_file() { | ||
53 | collect_file(&mut res, path.as_path())?; | ||
54 | } | ||
55 | } | 51 | } |
56 | res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id)); | 52 | res.sort_by(|lhs, rhs| lhs.id.cmp(&rhs.id)); |
57 | return Ok(res); | 53 | return Ok(res); |
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs index e1472e85d..4f01f84fb 100644 --- a/xtask/src/lib.rs +++ b/xtask/src/lib.rs | |||
@@ -17,6 +17,7 @@ use std::{ | |||
17 | path::{Path, PathBuf}, | 17 | path::{Path, PathBuf}, |
18 | process::{Command, Stdio}, | 18 | process::{Command, Stdio}, |
19 | }; | 19 | }; |
20 | use walkdir::{DirEntry, WalkDir}; | ||
20 | 21 | ||
21 | use crate::{ | 22 | use crate::{ |
22 | codegen::Mode, | 23 | codegen::Mode, |
@@ -37,6 +38,21 @@ pub fn project_root() -> PathBuf { | |||
37 | .to_path_buf() | 38 | .to_path_buf() |
38 | } | 39 | } |
39 | 40 | ||
41 | pub fn rust_files(path: &Path) -> impl Iterator<Item = PathBuf> { | ||
42 | let iter = WalkDir::new(path); | ||
43 | return iter | ||
44 | .into_iter() | ||
45 | .filter_entry(|e| !is_hidden(e)) | ||
46 | .map(|e| e.unwrap()) | ||
47 | .filter(|e| !e.file_type().is_dir()) | ||
48 | .map(|e| e.into_path()) | ||
49 | .filter(|path| path.extension().map(|it| it == "rs").unwrap_or(false)); | ||
50 | |||
51 | fn is_hidden(entry: &DirEntry) -> bool { | ||
52 | entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) | ||
53 | } | ||
54 | } | ||
55 | |||
40 | pub fn run_rustfmt(mode: Mode) -> Result<()> { | 56 | pub fn run_rustfmt(mode: Mode) -> Result<()> { |
41 | let _dir = pushd(project_root()); | 57 | let _dir = pushd(project_root()); |
42 | ensure_rustfmt()?; | 58 | ensure_rustfmt()?; |
diff --git a/xtask/tests/tidy-tests/main.rs b/xtask/tests/tidy-tests/main.rs index 5ae86c87c..80911a68e 100644 --- a/xtask/tests/tidy-tests/main.rs +++ b/xtask/tests/tidy-tests/main.rs | |||
@@ -5,13 +5,12 @@ use std::{ | |||
5 | path::{Path, PathBuf}, | 5 | path::{Path, PathBuf}, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | use walkdir::{DirEntry, WalkDir}; | 8 | use xtask::{not_bash::fs2, project_root, rust_files}; |
9 | use xtask::{not_bash::fs2, project_root}; | ||
10 | 9 | ||
11 | #[test] | 10 | #[test] |
12 | fn rust_files_are_tidy() { | 11 | fn rust_files_are_tidy() { |
13 | let mut tidy_docs = TidyDocs::default(); | 12 | let mut tidy_docs = TidyDocs::default(); |
14 | for path in rust_files() { | 13 | for path in rust_files(&project_root().join("crates")) { |
15 | let text = fs2::read_to_string(&path).unwrap(); | 14 | let text = fs2::read_to_string(&path).unwrap(); |
16 | check_todo(&path, &text); | 15 | check_todo(&path, &text); |
17 | check_trailing_ws(&path, &text); | 16 | check_trailing_ws(&path, &text); |
@@ -142,19 +141,3 @@ fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool { | |||
142 | 141 | ||
143 | false | 142 | false |
144 | } | 143 | } |
145 | |||
146 | fn rust_files() -> impl Iterator<Item = PathBuf> { | ||
147 | let crates = project_root().join("crates"); | ||
148 | let iter = WalkDir::new(crates); | ||
149 | return iter | ||
150 | .into_iter() | ||
151 | .filter_entry(|e| !is_hidden(e)) | ||
152 | .map(|e| e.unwrap()) | ||
153 | .filter(|e| !e.file_type().is_dir()) | ||
154 | .map(|e| e.into_path()) | ||
155 | .filter(|path| path.extension().map(|it| it == "rs").unwrap_or(false)); | ||
156 | |||
157 | fn is_hidden(entry: &DirEntry) -> bool { | ||
158 | entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) | ||
159 | } | ||
160 | } | ||