aboutsummaryrefslogtreecommitdiff
path: root/xtask
diff options
context:
space:
mode:
Diffstat (limited to 'xtask')
-rw-r--r--xtask/Cargo.toml2
-rw-r--r--xtask/src/ast_src.rs2
-rw-r--r--xtask/src/codegen/gen_lint_completions.rs94
-rw-r--r--xtask/src/codegen/gen_syntax.rs26
-rw-r--r--xtask/src/main.rs2
-rw-r--r--xtask/src/metrics.rs7
-rw-r--r--xtask/src/release/changelog.rs4
-rw-r--r--xtask/src/tidy.rs70
8 files changed, 157 insertions, 50 deletions
diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml
index 997770958..c4fc91386 100644
--- a/xtask/Cargo.toml
+++ b/xtask/Cargo.toml
@@ -11,7 +11,7 @@ anyhow = "1.0.26"
11flate2 = "1.0" 11flate2 = "1.0"
12proc-macro2 = "1.0.8" 12proc-macro2 = "1.0.8"
13quote = "1.0.2" 13quote = "1.0.2"
14ungrammar = "=1.13" 14ungrammar = "=1.14"
15walkdir = "2.3.1" 15walkdir = "2.3.1"
16write-json = "0.1.0" 16write-json = "0.1.0"
17xshell = "0.1" 17xshell = "0.1"
diff --git a/xtask/src/ast_src.rs b/xtask/src/ast_src.rs
index 0fd1d13e6..fe37d0245 100644
--- a/xtask/src/ast_src.rs
+++ b/xtask/src/ast_src.rs
@@ -184,7 +184,7 @@ pub(crate) const KINDS_SRC: KindsSrc = KindsSrc {
184 "ITEM_LIST", 184 "ITEM_LIST",
185 "ASSOC_ITEM_LIST", 185 "ASSOC_ITEM_LIST",
186 "ATTR", 186 "ATTR",
187 "META_ITEM", // not an item actually 187 "META",
188 "USE_TREE", 188 "USE_TREE",
189 "USE_TREE_LIST", 189 "USE_TREE_LIST",
190 "PATH", 190 "PATH",
diff --git a/xtask/src/codegen/gen_lint_completions.rs b/xtask/src/codegen/gen_lint_completions.rs
index 24dbc6a39..54fcaa0e6 100644
--- a/xtask/src/codegen/gen_lint_completions.rs
+++ b/xtask/src/codegen/gen_lint_completions.rs
@@ -1,4 +1,5 @@
1//! Generates descriptors structure for unstable feature from Unstable Book 1//! Generates descriptors structure for unstable feature from Unstable Book
2use std::borrow::Cow;
2use std::fmt::Write; 3use std::fmt::Write;
3use std::path::{Path, PathBuf}; 4use std::path::{Path, PathBuf};
4 5
@@ -12,25 +13,75 @@ pub(crate) fn generate_lint_completions() -> Result<()> {
12 cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?; 13 cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?;
13 } 14 }
14 15
15 let mut contents = String::from("use crate::completions::attribute::LintCompletion;\n\n"); 16 let mut contents = String::from(
16 generate_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into())?; 17 r#"pub struct Lint {
18 pub label: &'static str,
19 pub description: &'static str,
20}
21
22"#,
23 );
24 generate_lint_descriptor(&mut contents)?;
25 contents.push('\n');
26
27 generate_feature_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into())?;
17 contents.push('\n'); 28 contents.push('\n');
18 29
19 cmd!("curl http://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?; 30 cmd!("curl https://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?;
20 generate_descriptor_clippy(&mut contents, &Path::new("./target/clippy_lints.json"))?; 31 generate_descriptor_clippy(&mut contents, Path::new("./target/clippy_lints.json"))?;
21 let contents = reformat(&contents)?; 32 let contents = reformat(&contents)?;
22 33
23 let destination = 34 let destination = project_root().join("crates/ide_db/src/helpers/generated_lints.rs");
24 project_root().join("crates/ide_completion/src/generated_lint_completions.rs");
25 ensure_file_contents(destination.as_path(), &contents)?; 35 ensure_file_contents(destination.as_path(), &contents)?;
26 36
27 Ok(()) 37 Ok(())
28} 38}
29 39
30fn generate_descriptor(buf: &mut String, src_dir: PathBuf) -> Result<()> { 40fn generate_lint_descriptor(buf: &mut String) -> Result<()> {
31 buf.push_str(r#"pub(super) const FEATURES: &[LintCompletion] = &["#); 41 let stdout = cmd!("rustc -W help").read()?;
42 let start_lints =
43 stdout.find("---- ------- -------").ok_or_else(|| anyhow::format_err!(""))?;
44 let start_lint_groups =
45 stdout.find("---- ---------").ok_or_else(|| anyhow::format_err!(""))?;
46 let end_lints =
47 stdout.find("Lint groups provided by rustc:").ok_or_else(|| anyhow::format_err!(""))?;
48 let end_lint_groups = stdout
49 .find("Lint tools like Clippy can provide additional lints and lint groups.")
50 .ok_or_else(|| anyhow::format_err!(""))?;
51 buf.push_str(r#"pub const DEFAULT_LINTS: &[Lint] = &["#);
52 buf.push('\n');
53 let mut lints = stdout[start_lints..end_lints]
54 .lines()
55 .skip(1)
56 .filter(|l| !l.is_empty())
57 .map(|line| {
58 let (name, rest) = line.trim().split_once(char::is_whitespace).unwrap();
59 let (_default_level, description) =
60 rest.trim().split_once(char::is_whitespace).unwrap();
61 (name.trim(), Cow::Borrowed(description.trim()))
62 })
63 .collect::<Vec<_>>();
64 lints.extend(
65 stdout[start_lint_groups..end_lint_groups].lines().skip(1).filter(|l| !l.is_empty()).map(
66 |line| {
67 let (name, lints) = line.trim().split_once(char::is_whitespace).unwrap();
68 (name.trim(), format!("lint group for: {}", lints.trim()).into())
69 },
70 ),
71 );
72
73 lints.sort_by(|(ident, _), (ident2, _)| ident.cmp(ident2));
74 lints.into_iter().for_each(|(name, description)| {
75 push_lint_completion(buf, &name.replace("-", "_"), &description)
76 });
77 buf.push_str("];\n");
78 Ok(())
79}
80
81fn generate_feature_descriptor(buf: &mut String, src_dir: PathBuf) -> Result<()> {
82 buf.push_str(r#"pub const FEATURES: &[Lint] = &["#);
32 buf.push('\n'); 83 buf.push('\n');
33 ["language-features", "library-features"] 84 let mut vec = ["language-features", "library-features"]
34 .iter() 85 .iter()
35 .flat_map(|it| WalkDir::new(src_dir.join(it))) 86 .flat_map(|it| WalkDir::new(src_dir.join(it)))
36 .filter_map(|e| e.ok()) 87 .filter_map(|e| e.ok())
@@ -38,13 +89,16 @@ fn generate_descriptor(buf: &mut String, src_dir: PathBuf) -> Result<()> {
38 // Get all `.md ` files 89 // Get all `.md ` files
39 entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md" 90 entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md"
40 }) 91 })
41 .for_each(|entry| { 92 .map(|entry| {
42 let path = entry.path(); 93 let path = entry.path();
43 let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_"); 94 let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_");
44 let doc = read_file(path).unwrap(); 95 let doc = read_file(path).unwrap();
45 96 (feature_ident, doc)
46 push_lint_completion(buf, &feature_ident, &doc); 97 })
47 }); 98 .collect::<Vec<_>>();
99 vec.sort_by(|(feature_ident, _), (feature_ident2, _)| feature_ident.cmp(feature_ident2));
100 vec.into_iter()
101 .for_each(|(feature_ident, doc)| push_lint_completion(buf, &feature_ident, &doc));
48 buf.push_str("];\n"); 102 buf.push_str("];\n");
49 Ok(()) 103 Ok(())
50} 104}
@@ -55,6 +109,10 @@ struct ClippyLint {
55 id: String, 109 id: String,
56} 110}
57 111
112fn unescape(s: &str) -> String {
113 s.replace(r#"\""#, "").replace(r#"\n"#, "\n").replace(r#"\r"#, "")
114}
115
58fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> { 116fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
59 let file_content = read_file(path)?; 117 let file_content = read_file(path)?;
60 let mut clippy_lints: Vec<ClippyLint> = vec![]; 118 let mut clippy_lints: Vec<ClippyLint> = vec![];
@@ -81,12 +139,12 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
81 .strip_prefix(prefix_to_strip) 139 .strip_prefix(prefix_to_strip)
82 .expect("should be prefixed by what it does") 140 .expect("should be prefixed by what it does")
83 .strip_suffix(suffix_to_strip) 141 .strip_suffix(suffix_to_strip)
84 .expect("should be suffixed by comma") 142 .map(unescape)
85 .into(); 143 .expect("should be suffixed by comma");
86 } 144 }
87 } 145 }
88 146 clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));
89 buf.push_str(r#"pub(super) const CLIPPY_LINTS: &[LintCompletion] = &["#); 147 buf.push_str(r#"pub const CLIPPY_LINTS: &[Lint] = &["#);
90 buf.push('\n'); 148 buf.push('\n');
91 clippy_lints.into_iter().for_each(|clippy_lint| { 149 clippy_lints.into_iter().for_each(|clippy_lint| {
92 let lint_ident = format!("clippy::{}", clippy_lint.id); 150 let lint_ident = format!("clippy::{}", clippy_lint.id);
@@ -102,7 +160,7 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> {
102fn push_lint_completion(buf: &mut String, label: &str, description: &str) { 160fn push_lint_completion(buf: &mut String, label: &str, description: &str) {
103 writeln!( 161 writeln!(
104 buf, 162 buf,
105 r###" LintCompletion {{ 163 r###" Lint {{
106 label: "{}", 164 label: "{}",
107 description: r##"{}"## 165 description: r##"{}"##
108 }},"###, 166 }},"###,
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs
index ba4b24848..5435da76e 100644
--- a/xtask/src/codegen/gen_syntax.rs
+++ b/xtask/src/codegen/gen_syntax.rs
@@ -94,18 +94,16 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
94 support::children(&self.syntax) 94 support::children(&self.syntax)
95 } 95 }
96 } 96 }
97 } else { 97 } else if let Some(token_kind) = field.token_kind() {
98 if let Some(token_kind) = field.token_kind() { 98 quote! {
99 quote! { 99 pub fn #method_name(&self) -> Option<#ty> {
100 pub fn #method_name(&self) -> Option<#ty> { 100 support::token(&self.syntax, #token_kind)
101 support::token(&self.syntax, #token_kind)
102 }
103 } 101 }
104 } else { 102 }
105 quote! { 103 } else {
106 pub fn #method_name(&self) -> Option<#ty> { 104 quote! {
107 support::child(&self.syntax) 105 pub fn #method_name(&self) -> Option<#ty> {
108 } 106 support::child(&self.syntax)
109 } 107 }
110 } 108 }
111 } 109 }
@@ -260,7 +258,7 @@ fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> Result<String> {
260 for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") { 258 for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
261 res.push_str(chunk); 259 res.push_str(chunk);
262 if let Some(doc) = docs.next() { 260 if let Some(doc) = docs.next() {
263 write_doc_comment(&doc, &mut res); 261 write_doc_comment(doc, &mut res);
264 } 262 }
265 } 263 }
266 264
@@ -296,14 +294,14 @@ fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> Result<String> {
296 294
297 let full_keywords_values = &grammar.keywords; 295 let full_keywords_values = &grammar.keywords;
298 let full_keywords = 296 let full_keywords =
299 full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(&kw))); 297 full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(kw)));
300 298
301 let all_keywords_values = 299 let all_keywords_values =
302 grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>(); 300 grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
303 let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw)); 301 let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
304 let all_keywords = all_keywords_values 302 let all_keywords = all_keywords_values
305 .iter() 303 .iter()
306 .map(|name| format_ident!("{}_KW", to_upper_snake_case(&name))) 304 .map(|name| format_ident!("{}_KW", to_upper_snake_case(name)))
307 .collect::<Vec<_>>(); 305 .collect::<Vec<_>>();
308 306
309 let literals = 307 let literals =
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index d0bef7b7a..063e11a5a 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -1,4 +1,4 @@
1//! See https://github.com/matklad/cargo-xtask/. 1//! See <https://github.com/matklad/cargo-xtask/>.
2//! 2//!
3//! This binary defines various auxiliary build commands, which are not 3//! This binary defines various auxiliary build commands, which are not
4//! expressible with just `cargo`. Notably, it provides tests via `cargo test -p xtask` 4//! expressible with just `cargo`. Notably, it provides tests via `cargo test -p xtask`
diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs
index b0b76b8aa..7b190d425 100644
--- a/xtask/src/metrics.rs
+++ b/xtask/src/metrics.rs
@@ -71,7 +71,7 @@ impl Metrics {
71 Ok(()) 71 Ok(())
72 } 72 }
73 fn measure_analysis_stats_self(&mut self) -> Result<()> { 73 fn measure_analysis_stats_self(&mut self) -> Result<()> {
74 self.measure_analysis_stats_path("self", &".") 74 self.measure_analysis_stats_path("self", ".")
75 } 75 }
76 fn measure_analysis_stats(&mut self, bench: &str) -> Result<()> { 76 fn measure_analysis_stats(&mut self, bench: &str) -> Result<()> {
77 self.measure_analysis_stats_path( 77 self.measure_analysis_stats_path(
@@ -81,9 +81,8 @@ impl Metrics {
81 } 81 }
82 fn measure_analysis_stats_path(&mut self, name: &str, path: &str) -> Result<()> { 82 fn measure_analysis_stats_path(&mut self, name: &str, path: &str) -> Result<()> {
83 eprintln!("\nMeasuring analysis-stats/{}", name); 83 eprintln!("\nMeasuring analysis-stats/{}", name);
84 let output = 84 let output = cmd!("./target/release/rust-analyzer -q analysis-stats --memory-usage {path}")
85 cmd!("./target/release/rust-analyzer --quiet analysis-stats --memory-usage {path}") 85 .read()?;
86 .read()?;
87 for (metric, value, unit) in parse_metrics(&output) { 86 for (metric, value, unit) in parse_metrics(&output) {
88 self.report(&format!("analysis-stats/{}/{}", name, metric), value, unit.into()); 87 self.report(&format!("analysis-stats/{}/{}", name, metric), value, unit.into());
89 } 88 }
diff --git a/xtask/src/release/changelog.rs b/xtask/src/release/changelog.rs
index ffcae2cf7..2384a746f 100644
--- a/xtask/src/release/changelog.rs
+++ b/xtask/src/release/changelog.rs
@@ -132,7 +132,7 @@ fn parse_changelog_line(s: &str) -> Option<PrInfo> {
132 return Some(PrInfo { kind, message: Some(message) }); 132 return Some(PrInfo { kind, message: Some(message) });
133 } 133 }
134 }; 134 };
135 let res = PrInfo { kind, message }; 135 let res = PrInfo { message, kind };
136 Some(res) 136 Some(res)
137} 137}
138 138
@@ -152,7 +152,7 @@ fn parse_title_line(s: &str) -> PrInfo {
152 PrKind::Skip => None, 152 PrKind::Skip => None,
153 _ => Some(s[prefix.len()..].to_string()), 153 _ => Some(s[prefix.len()..].to_string()),
154 }; 154 };
155 return PrInfo { kind, message }; 155 return PrInfo { message, kind };
156 } 156 }
157 } 157 }
158 PrInfo { kind: PrKind::Other, message: Some(s.to_string()) } 158 PrInfo { kind: PrKind::Other, message: Some(s.to_string()) }
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
index 6f687a788..f2ba8efef 100644
--- a/xtask/src/tidy.rs
+++ b/xtask/src/tidy.rs
@@ -1,4 +1,7 @@
1use std::path::{Path, PathBuf}; 1use std::{
2 collections::HashSet,
3 path::{Path, PathBuf},
4};
2 5
3use xshell::{cmd, pushd, pushenv, read_file}; 6use xshell::{cmd, pushd, pushenv, read_file};
4 7
@@ -33,7 +36,7 @@ fn check_code_formatting() {
33 let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); 36 let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
34 crate::ensure_rustfmt().unwrap(); 37 crate::ensure_rustfmt().unwrap();
35 let res = cmd!("cargo fmt -- --check").run(); 38 let res = cmd!("cargo fmt -- --check").run();
36 if !res.is_ok() { 39 if res.is_err() {
37 let _ = cmd!("cargo fmt").run(); 40 let _ = cmd!("cargo fmt").run();
38 } 41 }
39 res.unwrap() 42 res.unwrap()
@@ -81,6 +84,7 @@ Please adjust docs/dev/lsp-extensions.md.
81#[test] 84#[test]
82fn rust_files_are_tidy() { 85fn rust_files_are_tidy() {
83 let mut tidy_docs = TidyDocs::default(); 86 let mut tidy_docs = TidyDocs::default();
87 let mut tidy_marks = TidyMarks::default();
84 for path in rust_files() { 88 for path in rust_files() {
85 let text = read_file(&path).unwrap(); 89 let text = read_file(&path).unwrap();
86 check_todo(&path, &text); 90 check_todo(&path, &text);
@@ -88,8 +92,10 @@ fn rust_files_are_tidy() {
88 check_trailing_ws(&path, &text); 92 check_trailing_ws(&path, &text);
89 deny_clippy(&path, &text); 93 deny_clippy(&path, &text);
90 tidy_docs.visit(&path, &text); 94 tidy_docs.visit(&path, &text);
95 tidy_marks.visit(&path, &text);
91 } 96 }
92 tidy_docs.finish(); 97 tidy_docs.finish();
98 tidy_marks.finish();
93} 99}
94 100
95#[test] 101#[test]
@@ -193,7 +199,9 @@ https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-
193fn deny_clippy(path: &Path, text: &str) { 199fn deny_clippy(path: &Path, text: &str) {
194 let ignore = &[ 200 let ignore = &[
195 // The documentation in string literals may contain anything for its own purposes 201 // The documentation in string literals may contain anything for its own purposes
196 "ide_completion/src/generated_lint_completions.rs", 202 "ide_db/src/helpers/generated_lints.rs",
203 // The tests test clippy lint hovers
204 "ide/src/hover.rs",
197 ]; 205 ];
198 if ignore.iter().any(|p| path.ends_with(p)) { 206 if ignore.iter().any(|p| path.ends_with(p)) {
199 return; 207 return;
@@ -244,19 +252,19 @@ Zlib OR Apache-2.0 OR MIT
244 .map(|it| it.trim()) 252 .map(|it| it.trim())
245 .map(|it| it[r#""license":"#.len()..].trim_matches('"')) 253 .map(|it| it[r#""license":"#.len()..].trim_matches('"'))
246 .collect::<Vec<_>>(); 254 .collect::<Vec<_>>();
247 licenses.sort(); 255 licenses.sort_unstable();
248 licenses.dedup(); 256 licenses.dedup();
249 if licenses != expected { 257 if licenses != expected {
250 let mut diff = String::new(); 258 let mut diff = String::new();
251 259
252 diff += &format!("New Licenses:\n"); 260 diff.push_str("New Licenses:\n");
253 for &l in licenses.iter() { 261 for &l in licenses.iter() {
254 if !expected.contains(&l) { 262 if !expected.contains(&l) {
255 diff += &format!(" {}\n", l) 263 diff += &format!(" {}\n", l)
256 } 264 }
257 } 265 }
258 266
259 diff += &format!("\nMissing Licenses:\n"); 267 diff.push_str("\nMissing Licenses:\n");
260 for &l in expected.iter() { 268 for &l in expected.iter() {
261 if !licenses.contains(&l) { 269 if !licenses.contains(&l) {
262 diff += &format!(" {}\n", l) 270 diff += &format!(" {}\n", l)
@@ -280,7 +288,7 @@ fn check_todo(path: &Path, text: &str) {
280 // `ast::make`. 288 // `ast::make`.
281 "ast/make.rs", 289 "ast/make.rs",
282 // The documentation in string literals may contain anything for its own purposes 290 // The documentation in string literals may contain anything for its own purposes
283 "ide_completion/src/generated_lint_completions.rs", 291 "ide_db/src/helpers/generated_lints.rs",
284 ]; 292 ];
285 if need_todo.iter().any(|p| path.ends_with(p)) { 293 if need_todo.iter().any(|p| path.ends_with(p)) {
286 return; 294 return;
@@ -310,7 +318,7 @@ fn check_dbg(path: &Path, text: &str) {
310 "ide_completion/src/completions/postfix.rs", 318 "ide_completion/src/completions/postfix.rs",
311 // The documentation in string literals may contain anything for its own purposes 319 // The documentation in string literals may contain anything for its own purposes
312 "ide_completion/src/lib.rs", 320 "ide_completion/src/lib.rs",
313 "ide_completion/src/generated_lint_completions.rs", 321 "ide_db/src/helpers/generated_lints.rs",
314 // test for doc test for remove_dbg 322 // test for doc test for remove_dbg
315 "src/tests/generated.rs", 323 "src/tests/generated.rs",
316 ]; 324 ];
@@ -364,7 +372,10 @@ impl TidyDocs {
364 self.contains_fixme.push(path.to_path_buf()); 372 self.contains_fixme.push(path.to_path_buf());
365 } 373 }
366 } else { 374 } else {
367 if text.contains("// Feature:") || text.contains("// Assist:") { 375 if text.contains("// Feature:")
376 || text.contains("// Assist:")
377 || text.contains("// Diagnostic:")
378 {
368 return; 379 return;
369 } 380 }
370 self.missing_docs.push(path.display().to_string()); 381 self.missing_docs.push(path.display().to_string());
@@ -406,6 +417,39 @@ fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
406 .any(|it| dirs_to_exclude.contains(&it)) 417 .any(|it| dirs_to_exclude.contains(&it))
407} 418}
408 419
420#[derive(Default)]
421struct TidyMarks {
422 hits: HashSet<String>,
423 checks: HashSet<String>,
424}
425
426impl TidyMarks {
427 fn visit(&mut self, _path: &Path, text: &str) {
428 for line in text.lines() {
429 if let Some(mark) = find_mark(line, "hit") {
430 self.hits.insert(mark.to_string());
431 }
432 if let Some(mark) = find_mark(line, "check") {
433 self.checks.insert(mark.to_string());
434 }
435 if let Some(mark) = find_mark(line, "check_count") {
436 self.checks.insert(mark.to_string());
437 }
438 }
439 }
440
441 fn finish(self) {
442 assert!(!self.hits.is_empty());
443
444 let diff: Vec<_> =
445 self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect();
446
447 if !diff.is_empty() {
448 panic!("unpaired marks: {:?}", diff)
449 }
450 }
451}
452
409#[allow(deprecated)] 453#[allow(deprecated)]
410fn stable_hash(text: &str) -> u64 { 454fn stable_hash(text: &str) -> u64 {
411 use std::hash::{Hash, Hasher, SipHasher}; 455 use std::hash::{Hash, Hasher, SipHasher};
@@ -415,3 +459,11 @@ fn stable_hash(text: &str) -> u64 {
415 text.hash(&mut hasher); 459 text.hash(&mut hasher);
416 hasher.finish() 460 hasher.finish()
417} 461}
462
463fn find_mark<'a>(text: &'a str, mark: &'static str) -> Option<&'a str> {
464 let idx = text.find(mark)?;
465 let text = text[idx + mark.len()..].strip_prefix("!(")?;
466 let idx = text.find(|c: char| !(c.is_alphanumeric() || c == '_'))?;
467 let text = &text[..idx];
468 Some(text)
469}