aboutsummaryrefslogtreecommitdiff
path: root/xtask/src
diff options
context:
space:
mode:
Diffstat (limited to 'xtask/src')
-rw-r--r--xtask/src/ast_src.rs2
-rw-r--r--xtask/src/codegen/gen_lint_completions.rs5
-rw-r--r--xtask/src/codegen/gen_syntax.rs6
-rw-r--r--xtask/src/metrics.rs2
-rw-r--r--xtask/src/tidy.rs54
5 files changed, 59 insertions, 10 deletions
diff --git a/xtask/src/ast_src.rs b/xtask/src/ast_src.rs
index 0fd1d13e6..fe37d0245 100644
--- a/xtask/src/ast_src.rs
+++ b/xtask/src/ast_src.rs
@@ -184,7 +184,7 @@ pub(crate) const KINDS_SRC: KindsSrc = KindsSrc {
184 "ITEM_LIST", 184 "ITEM_LIST",
185 "ASSOC_ITEM_LIST", 185 "ASSOC_ITEM_LIST",
186 "ATTR", 186 "ATTR",
187 "META_ITEM", // not an item actually 187 "META",
188 "USE_TREE", 188 "USE_TREE",
189 "USE_TREE_LIST", 189 "USE_TREE_LIST",
190 "PATH", 190 "PATH",
diff --git a/xtask/src/codegen/gen_lint_completions.rs b/xtask/src/codegen/gen_lint_completions.rs
index 3b54b2489..54fcaa0e6 100644
--- a/xtask/src/codegen/gen_lint_completions.rs
+++ b/xtask/src/codegen/gen_lint_completions.rs
@@ -28,7 +28,7 @@ pub(crate) fn generate_lint_completions() -> Result<()> {
28 contents.push('\n'); 28 contents.push('\n');
29 29
30 cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?; 30 cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?;
31 generate_descriptor_clippy(&mut contents, &Path::new("./target/clippy_lints.json"))?; 31 generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"))?;
32 let contents = reformat(&contents)?; 32 let contents = reformat(&contents)?;
33 33
34 let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs"); 34 let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs");
@@ -140,8 +140,7 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
140 .expect("should be prefixed by what it does") 140 .expect("should be prefixed by what it does")
141 .strip_suffix(suffix_to_strip) 141 .strip_suffix(suffix_to_strip)
142 .map(unescape) 142 .map(unescape)
143 .expect("should be suffixed by comma") 143 .expect("should be suffixed by comma");
144 .into();
145 } 144 }
146 } 145 }
147 clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id)); 146 clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs
index b0b9e30db..5435da76e 100644
--- a/xtask/src/codegen/gen_syntax.rs
+++ b/xtask/src/codegen/gen_syntax.rs
@@ -258,7 +258,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
258 for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") { 258 for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
259 res.push_str(chunk); 259 res.push_str(chunk);
260 if let Some(doc) = docs.next() { 260 if let Some(doc) = docs.next() {
261 write_doc_comment(&doc, &mut res); 261 write_doc_comment(doc, &mut res);
262 } 262 }
263 } 263 }
264 264
@@ -294,14 +294,14 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
294 294
295 let full_keywords_values = &grammar.keywords; 295 let full_keywords_values = &grammar.keywords;
296 let full_keywords = 296 let full_keywords =
297 full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(&kw))); 297 full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)));
298 298
299 let all_keywords_values = 299 let all_keywords_values =
300 grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>(); 300 grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
301 let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw)); 301 let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
302 let all_keywords = all_keywords_values 302 let all_keywords = all_keywords_values
303 .iter() 303 .iter()
304 .map(|name| format_ident!("{}_KW", to_upper_snake_case(&name))) 304 .map(|name| format_ident!("{}_KW", to_upper_snake_case(name)))
305 .collect::<Vec<_>>(); 305 .collect::<Vec<_>>();
306 306
307 let literals = 307 let literals =
diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs
index 34679062f..7b190d425 100644
--- a/xtask/src/metrics.rs
+++ b/xtask/src/metrics.rs
@@ -71,7 +71,7 @@ impl Metrics {
71 Ok(()) 71 Ok(())
72 } 72 }
73 fn measure_analysis_stats_self(&mut self) -> Result<()> { 73 fn measure_analysis_stats_self(&mut self) -> Result<()> {
74 self.measure_analysis_stats_path("self", &".") 74 self.measure_analysis_stats_path("self", ".")
75 } 75 }
76 fn measure_analysis_stats(&mut self, bench: &str) -> Result<()> { 76 fn measure_analysis_stats(&mut self, bench: &str) -> Result<()> {
77 self.measure_analysis_stats_path( 77 self.measure_analysis_stats_path(
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
index 9447d463d..f2ba8efef 100644
--- a/xtask/src/tidy.rs
+++ b/xtask/src/tidy.rs
@@ -1,4 +1,7 @@
1use std::path::{Path, PathBuf}; 1use std::{
2 collections::HashSet,
3 path::{Path, PathBuf},
4};
2 5
3use xshell::{cmd, pushd, pushenv, read_file}; 6use xshell::{cmd, pushd, pushenv, read_file};
4 7
@@ -81,6 +84,7 @@ Please adjust docs/dev/lsp-extensions.md.
81#[test] 84#[test]
82fn rust_files_are_tidy() { 85fn rust_files_are_tidy() {
83 let mut tidy_docs = TidyDocs::default(); 86 let mut tidy_docs = TidyDocs::default();
87 let mut tidy_marks = TidyMarks::default();
84 for path in rust_files() { 88 for path in rust_files() {
85 let text = read_file(&path).unwrap(); 89 let text = read_file(&path).unwrap();
86 check_todo(&path, &text); 90 check_todo(&path, &text);
@@ -88,8 +92,10 @@ fn rust_files_are_tidy() {
88 check_trailing_ws(&path, &text); 92 check_trailing_ws(&path, &text);
89 deny_clippy(&path, &text); 93 deny_clippy(&path, &text);
90 tidy_docs.visit(&path, &text); 94 tidy_docs.visit(&path, &text);
95 tidy_marks.visit(&path, &text);
91 } 96 }
92 tidy_docs.finish(); 97 tidy_docs.finish();
98 tidy_marks.finish();
93} 99}
94 100
95#[test] 101#[test]
@@ -366,7 +372,10 @@ impl TidyDocs {
366 self.contains_fixme.push(path.to_path_buf()); 372 self.contains_fixme.push(path.to_path_buf());
367 } 373 }
368 } else { 374 } else {
369 if text.contains("// Feature:") || text.contains("// Assist:") { 375 if text.contains("// Feature:")
376 || text.contains("// Assist:")
377 || text.contains("// Diagnostic:")
378 {
370 return; 379 return;
371 } 380 }
372 self.missing_docs.push(path.display().to_string()); 381 self.missing_docs.push(path.display().to_string());
@@ -408,6 +417,39 @@ fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
408 .any(|it| dirs_to_exclude.contains(&it)) 417 .any(|it| dirs_to_exclude.contains(&it))
409} 418}
410 419
420#[derive(Default)]
421struct TidyMarks {
422 hits: HashSet<String>,
423 checks: HashSet<String>,
424}
425
426impl TidyMarks {
427 fn visit(&mut self, _path: &Path, text: &str) {
428 for line in text.lines() {
429 if let Some(mark) = find_mark(line, "hit") {
430 self.hits.insert(mark.to_string());
431 }
432 if let Some(mark) = find_mark(line, "check") {
433 self.checks.insert(mark.to_string());
434 }
435 if let Some(mark) = find_mark(line, "check_count") {
436 self.checks.insert(mark.to_string());
437 }
438 }
439 }
440
441 fn finish(self) {
442 assert!(!self.hits.is_empty());
443
444 let diff: Vec<_> =
445 self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect();
446
447 if !diff.is_empty() {
448 panic!("unpaired marks: {:?}", diff)
449 }
450 }
451}
452
411#[allow(deprecated)] 453#[allow(deprecated)]
412fn stable_hash(text: &str) -> u64 { 454fn stable_hash(text: &str) -> u64 {
413 use std::hash::{Hash, Hasher, SipHasher}; 455 use std::hash::{Hash, Hasher, SipHasher};
@@ -417,3 +459,11 @@ fn stable_hash(text: &str) -> u64 {
417 text.hash(&mut hasher); 459 text.hash(&mut hasher);
418 hasher.finish() 460 hasher.finish()
419} 461}
462
463fn find_mark<'a>(text: &'a str, mark: &'static str) -> Option<&'a str> {
464 let idx = text.find(mark)?;
465 let text = text[idx + mark.len()..].strip_prefix("!(")?;
466 let idx = text.find(|c: char| !(c.is_alphanumeric() || c == '_'))?;
467 let text = &text[..idx];
468 Some(text)
469}