aboutsummaryrefslogtreecommitdiff
path: root/xtask/src
diff options
context:
space:
mode:
Diffstat (limited to 'xtask/src')
-rw-r--r--xtask/src/codegen/gen_lint_completions.rs5
-rw-r--r--xtask/src/codegen/gen_syntax.rs6
-rw-r--r--xtask/src/dist.rs4
-rw-r--r--xtask/src/main.rs2
-rw-r--r--xtask/src/metrics.rs2
-rw-r--r--xtask/src/tidy.rs85
6 files changed, 92 insertions, 12 deletions
diff --git a/xtask/src/codegen/gen_lint_completions.rs b/xtask/src/codegen/gen_lint_completions.rs
index 3b54b2489..54fcaa0e6 100644
--- a/xtask/src/codegen/gen_lint_completions.rs
+++ b/xtask/src/codegen/gen_lint_completions.rs
@@ -28,7 +28,7 @@ pub(crate) fn generate_lint_completions() -> Result<()> {
28 contents.push('\n'); 28 contents.push('\n');
29 29
30 cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?; 30 cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?;
31 generate_descriptor_clippy(&mut contents, &Path::new("./target/clippy_lints.json"))?; 31 generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"))?;
32 let contents = reformat(&contents)?; 32 let contents = reformat(&contents)?;
33 33
34 let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs"); 34 let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs");
@@ -140,8 +140,7 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
140 .expect("should be prefixed by what it does") 140 .expect("should be prefixed by what it does")
141 .strip_suffix(suffix_to_strip) 141 .strip_suffix(suffix_to_strip)
142 .map(unescape) 142 .map(unescape)
143 .expect("should be suffixed by comma") 143 .expect("should be suffixed by comma");
144 .into();
145 } 144 }
146 } 145 }
147 clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id)); 146 clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs
index b0b9e30db..5435da76e 100644
--- a/xtask/src/codegen/gen_syntax.rs
+++ b/xtask/src/codegen/gen_syntax.rs
@@ -258,7 +258,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
258 for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") { 258 for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
259 res.push_str(chunk); 259 res.push_str(chunk);
260 if let Some(doc) = docs.next() { 260 if let Some(doc) = docs.next() {
261 write_doc_comment(&doc, &mut res); 261 write_doc_comment(doc, &mut res);
262 } 262 }
263 } 263 }
264 264
@@ -294,14 +294,14 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
294 294
295 let full_keywords_values = &grammar.keywords; 295 let full_keywords_values = &grammar.keywords;
296 let full_keywords = 296 let full_keywords =
297 full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(&kw))); 297 full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)));
298 298
299 let all_keywords_values = 299 let all_keywords_values =
300 grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>(); 300 grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
301 let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw)); 301 let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
302 let all_keywords = all_keywords_values 302 let all_keywords = all_keywords_values
303 .iter() 303 .iter()
304 .map(|name| format_ident!("{}_KW", to_upper_snake_case(&name))) 304 .map(|name| format_ident!("{}_KW", to_upper_snake_case(name)))
305 .collect::<Vec<_>>(); 305 .collect::<Vec<_>>();
306 306
307 let literals = 307 let literals =
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs
index 3a67294c5..c7363688a 100644
--- a/xtask/src/dist.rs
+++ b/xtask/src/dist.rs
@@ -45,8 +45,8 @@ fn dist_client(version: &str, release_tag: &str) -> Result<()> {
45 patch 45 patch
46 .replace(r#""version": "0.4.0-dev""#, &format!(r#""version": "{}""#, version)) 46 .replace(r#""version": "0.4.0-dev""#, &format!(r#""version": "{}""#, version))
47 .replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{}""#, release_tag)) 47 .replace(r#""releaseTag": null"#, &format!(r#""releaseTag": "{}""#, release_tag))
48 .replace(r#""$generated-start": false,"#, "") 48 .replace(r#""$generated-start": {},"#, "")
49 .replace(",\n \"$generated-end\": false", ""); 49 .replace(",\n \"$generated-end\": {}", "");
50 50
51 if nightly { 51 if nightly {
52 patch.replace( 52 patch.replace(
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index d0bef7b7a..063e11a5a 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -1,4 +1,4 @@
1//! See https://github.com/matklad/cargo-xtask/. 1//! See <https://github.com/matklad/cargo-xtask/>.
2//! 2//!
3//! This binary defines various auxiliary build commands, which are not 3//! This binary defines various auxiliary build commands, which are not
4//! expressible with just `cargo`. Notably, it provides tests via `cargo test -p xtask` 4//! expressible with just `cargo`. Notably, it provides tests via `cargo test -p xtask`
diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs
index 34679062f..7b190d425 100644
--- a/xtask/src/metrics.rs
+++ b/xtask/src/metrics.rs
@@ -71,7 +71,7 @@ impl Metrics {
71 Ok(()) 71 Ok(())
72 } 72 }
73 fn measure_analysis_stats_self(&mut self) -> Result<()> { 73 fn measure_analysis_stats_self(&mut self) -> Result<()> {
74 self.measure_analysis_stats_path("self", &".") 74 self.measure_analysis_stats_path("self", ".")
75 } 75 }
76 fn measure_analysis_stats(&mut self, bench: &str) -> Result<()> { 76 fn measure_analysis_stats(&mut self, bench: &str) -> Result<()> {
77 self.measure_analysis_stats_path( 77 self.measure_analysis_stats_path(
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
index 9447d463d..06219d155 100644
--- a/xtask/src/tidy.rs
+++ b/xtask/src/tidy.rs
@@ -1,4 +1,7 @@
1use std::path::{Path, PathBuf}; 1use std::{
2 collections::HashSet,
3 path::{Path, PathBuf},
4};
2 5
3use xshell::{cmd, pushd, pushenv, read_file}; 6use xshell::{cmd, pushd, pushenv, read_file};
4 7
@@ -81,15 +84,19 @@ Please adjust docs/dev/lsp-extensions.md.
81#[test] 84#[test]
82fn rust_files_are_tidy() { 85fn rust_files_are_tidy() {
83 let mut tidy_docs = TidyDocs::default(); 86 let mut tidy_docs = TidyDocs::default();
87 let mut tidy_marks = TidyMarks::default();
84 for path in rust_files() { 88 for path in rust_files() {
85 let text = read_file(&path).unwrap(); 89 let text = read_file(&path).unwrap();
86 check_todo(&path, &text); 90 check_todo(&path, &text);
87 check_dbg(&path, &text); 91 check_dbg(&path, &text);
92 check_test_attrs(&path, &text);
88 check_trailing_ws(&path, &text); 93 check_trailing_ws(&path, &text);
89 deny_clippy(&path, &text); 94 deny_clippy(&path, &text);
90 tidy_docs.visit(&path, &text); 95 tidy_docs.visit(&path, &text);
96 tidy_marks.visit(&path, &text);
91 } 97 }
92 tidy_docs.finish(); 98 tidy_docs.finish();
99 tidy_marks.finish();
93} 100}
94 101
95#[test] 102#[test]
@@ -328,6 +335,36 @@ fn check_dbg(path: &Path, text: &str) {
328 } 335 }
329} 336}
330 337
338fn check_test_attrs(path: &Path, text: &str) {
339 let ignore_rule =
340 "https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/dev/style.md#ignore";
341 let need_ignore: &[&str] = &[
342 // Special case to run `#[ignore]` tests
343 "ide/src/runnables.rs",
344 // A legit test which needs to be ignored, as it takes too long to run
345 // :(
346 "hir_def/src/nameres/collector.rs",
347 // Obviously needs ignore.
348 "ide_assists/src/handlers/toggle_ignore.rs",
349 // See above.
350 "ide_assists/src/tests/generated.rs",
351 ];
352 if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) {
353 panic!("\ndon't `#[ignore]` tests, see:\n\n {}\n\n {}\n", ignore_rule, path.display(),)
354 }
355
356 let panic_rule =
357 "https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/dev/style.md#should_panic";
358 let need_panic: &[&str] = &["test_utils/src/fixture.rs"];
359 if text.contains("#[should_panic") && !need_panic.iter().any(|p| path.ends_with(p)) {
360 panic!(
361 "\ndon't add `#[should_panic]` tests, see:\n\n {}\n\n {}\n",
362 panic_rule,
363 path.display(),
364 )
365 }
366}
367
331fn check_trailing_ws(path: &Path, text: &str) { 368fn check_trailing_ws(path: &Path, text: &str) {
332 if is_exclude_dir(path, &["test_data"]) { 369 if is_exclude_dir(path, &["test_data"]) {
333 return; 370 return;
@@ -366,7 +403,10 @@ impl TidyDocs {
366 self.contains_fixme.push(path.to_path_buf()); 403 self.contains_fixme.push(path.to_path_buf());
367 } 404 }
368 } else { 405 } else {
369 if text.contains("// Feature:") || text.contains("// Assist:") { 406 if text.contains("// Feature:")
407 || text.contains("// Assist:")
408 || text.contains("// Diagnostic:")
409 {
370 return; 410 return;
371 } 411 }
372 self.missing_docs.push(path.display().to_string()); 412 self.missing_docs.push(path.display().to_string());
@@ -408,6 +448,39 @@ fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
408 .any(|it| dirs_to_exclude.contains(&it)) 448 .any(|it| dirs_to_exclude.contains(&it))
409} 449}
410 450
451#[derive(Default)]
452struct TidyMarks {
453 hits: HashSet<String>,
454 checks: HashSet<String>,
455}
456
457impl TidyMarks {
458 fn visit(&mut self, _path: &Path, text: &str) {
459 for line in text.lines() {
460 if let Some(mark) = find_mark(line, "hit") {
461 self.hits.insert(mark.to_string());
462 }
463 if let Some(mark) = find_mark(line, "check") {
464 self.checks.insert(mark.to_string());
465 }
466 if let Some(mark) = find_mark(line, "check_count") {
467 self.checks.insert(mark.to_string());
468 }
469 }
470 }
471
472 fn finish(self) {
473 assert!(!self.hits.is_empty());
474
475 let diff: Vec<_> =
476 self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect();
477
478 if !diff.is_empty() {
479 panic!("unpaired marks: {:?}", diff)
480 }
481 }
482}
483
411#[allow(deprecated)] 484#[allow(deprecated)]
412fn stable_hash(text: &str) -> u64 { 485fn stable_hash(text: &str) -> u64 {
413 use std::hash::{Hash, Hasher, SipHasher}; 486 use std::hash::{Hash, Hasher, SipHasher};
@@ -417,3 +490,11 @@ fn stable_hash(text: &str) -> u64 {
417 text.hash(&mut hasher); 490 text.hash(&mut hasher);
418 hasher.finish() 491 hasher.finish()
419} 492}
493
494fn find_mark<'a>(text: &'a str, mark: &'static str) -> Option<&'a str> {
495 let idx = text.find(mark)?;
496 let text = text[idx + mark.len()..].strip_prefix("!(")?;
497 let idx = text.find(|c: char| !(c.is_alphanumeric() || c == '_'))?;
498 let text = &text[..idx];
499 Some(text)
500}