diff options
Diffstat (limited to 'xtask/src/tidy.rs')
-rw-r--r-- | xtask/src/tidy.rs | 447 |
1 files changed, 447 insertions, 0 deletions
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs new file mode 100644 index 000000000..349ca14d0 --- /dev/null +++ b/xtask/src/tidy.rs | |||
@@ -0,0 +1,447 @@ | |||
1 | use std::{ | ||
2 | collections::HashMap, | ||
3 | path::{Path, PathBuf}, | ||
4 | }; | ||
5 | |||
6 | use xshell::{cmd, read_file}; | ||
7 | |||
8 | use crate::{ | ||
9 | cargo_files, | ||
10 | codegen::{self, Mode}, | ||
11 | project_root, run_rustfmt, rust_files, | ||
12 | }; | ||
13 | |||
14 | #[test] | ||
15 | fn generated_grammar_is_fresh() { | ||
16 | if let Err(error) = codegen::generate_syntax(Mode::Verify) { | ||
17 | panic!("{}. Please update it by running `cargo xtask codegen`", error); | ||
18 | } | ||
19 | } | ||
20 | |||
21 | #[test] | ||
22 | fn generated_tests_are_fresh() { | ||
23 | if let Err(error) = codegen::generate_parser_tests(Mode::Verify) { | ||
24 | panic!("{}. Please update tests by running `cargo xtask codegen`", error); | ||
25 | } | ||
26 | } | ||
27 | |||
28 | #[test] | ||
29 | fn generated_assists_are_fresh() { | ||
30 | if let Err(error) = codegen::generate_assists_tests(Mode::Verify) { | ||
31 | panic!("{}. Please update assists by running `cargo xtask codegen`", error); | ||
32 | } | ||
33 | } | ||
34 | |||
35 | #[test] | ||
36 | fn check_code_formatting() { | ||
37 | if let Err(error) = run_rustfmt(Mode::Verify) { | ||
38 | panic!("{}. Please format the code by running `cargo format`", error); | ||
39 | } | ||
40 | } | ||
41 | |||
42 | #[test] | ||
43 | fn smoke_test_docs_generation() { | ||
44 | // We don't commit docs to the repo, so we can just overwrite in tests. | ||
45 | codegen::generate_assists_docs(Mode::Overwrite).unwrap(); | ||
46 | codegen::generate_feature_docs(Mode::Overwrite).unwrap(); | ||
47 | codegen::generate_diagnostic_docs(Mode::Overwrite).unwrap(); | ||
48 | } | ||
49 | |||
50 | #[test] | ||
51 | fn check_lsp_extensions_docs() { | ||
52 | let expected_hash = { | ||
53 | let lsp_ext_rs = | ||
54 | read_file(project_root().join("crates/rust-analyzer/src/lsp_ext.rs")).unwrap(); | ||
55 | stable_hash(lsp_ext_rs.as_str()) | ||
56 | }; | ||
57 | |||
58 | let actual_hash = { | ||
59 | let lsp_extensions_md = | ||
60 | read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap(); | ||
61 | let text = lsp_extensions_md | ||
62 | .lines() | ||
63 | .find_map(|line| line.strip_prefix("lsp_ext.rs hash:")) | ||
64 | .unwrap() | ||
65 | .trim(); | ||
66 | u64::from_str_radix(text, 16).unwrap() | ||
67 | }; | ||
68 | |||
69 | if actual_hash != expected_hash { | ||
70 | panic!( | ||
71 | " | ||
72 | lsp_ext.rs was changed without touching lsp-extensions.md. | ||
73 | |||
74 | Expected hash: {:x} | ||
75 | Actual hash: {:x} | ||
76 | |||
77 | Please adjust docs/dev/lsp-extensions.md. | ||
78 | ", | ||
79 | expected_hash, actual_hash | ||
80 | ) | ||
81 | } | ||
82 | } | ||
83 | |||
84 | #[test] | ||
85 | fn rust_files_are_tidy() { | ||
86 | let mut tidy_docs = TidyDocs::default(); | ||
87 | for path in rust_files() { | ||
88 | let text = read_file(&path).unwrap(); | ||
89 | check_todo(&path, &text); | ||
90 | check_dbg(&path, &text); | ||
91 | check_trailing_ws(&path, &text); | ||
92 | deny_clippy(&path, &text); | ||
93 | tidy_docs.visit(&path, &text); | ||
94 | } | ||
95 | tidy_docs.finish(); | ||
96 | } | ||
97 | |||
98 | #[test] | ||
99 | fn cargo_files_are_tidy() { | ||
100 | for cargo in cargo_files() { | ||
101 | let mut section = None; | ||
102 | for (line_no, text) in read_file(&cargo).unwrap().lines().enumerate() { | ||
103 | let text = text.trim(); | ||
104 | if text.starts_with('[') { | ||
105 | if !text.ends_with(']') { | ||
106 | panic!( | ||
107 | "\nplease don't add comments or trailing whitespace in section lines.\n\ | ||
108 | {}:{}\n", | ||
109 | cargo.display(), | ||
110 | line_no + 1 | ||
111 | ) | ||
112 | } | ||
113 | section = Some(text); | ||
114 | continue; | ||
115 | } | ||
116 | let text: String = text.split_whitespace().collect(); | ||
117 | if !text.contains("path=") { | ||
118 | continue; | ||
119 | } | ||
120 | match section { | ||
121 | Some(s) if s.contains("dev-dependencies") => { | ||
122 | if text.contains("version") { | ||
123 | panic!( | ||
124 | "\ncargo internal dev-dependencies should not have a version.\n\ | ||
125 | {}:{}\n", | ||
126 | cargo.display(), | ||
127 | line_no + 1 | ||
128 | ); | ||
129 | } | ||
130 | } | ||
131 | Some(s) if s.contains("dependencies") => { | ||
132 | if !text.contains("version") { | ||
133 | panic!( | ||
134 | "\ncargo internal dependencies should have a version.\n\ | ||
135 | {}:{}\n", | ||
136 | cargo.display(), | ||
137 | line_no + 1 | ||
138 | ); | ||
139 | } | ||
140 | } | ||
141 | _ => {} | ||
142 | } | ||
143 | } | ||
144 | } | ||
145 | } | ||
146 | |||
147 | #[test] | ||
148 | fn check_merge_commits() { | ||
149 | let stdout = cmd!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19..") | ||
150 | .read() | ||
151 | .unwrap(); | ||
152 | if !stdout.is_empty() { | ||
153 | panic!( | ||
154 | " | ||
155 | Merge commits are not allowed in the history. | ||
156 | |||
157 | When updating a pull-request, please rebase your feature branch | ||
158 | on top of master by running `git rebase master`. If rebase fails, | ||
159 | you can re-apply your changes like this: | ||
160 | |||
161 | # Just look around to see the current state. | ||
162 | $ git status | ||
163 | $ git log | ||
164 | |||
165 | # Abort in-progress rebase and merges, if any. | ||
166 | $ git rebase --abort | ||
167 | $ git merge --abort | ||
168 | |||
169 | # Make the branch point to the latest commit from master, | ||
170 | # while maintaining your local changes uncommited. | ||
171 | $ git reset --soft origin/master | ||
172 | |||
173 | # Commit all changes in a single batch. | ||
174 | $ git commit -am'My changes' | ||
175 | |||
176 | # Verify that everything looks alright. | ||
177 | $ git status | ||
178 | $ git log | ||
179 | |||
180 | # Push the changes. We did a rebase, so we need `--force` option. | ||
181 | # `--force-with-lease` is a more safe (Rusty) version of `--force`. | ||
182 | $ git push --force-with-lease | ||
183 | |||
184 | # Verify that both local and remote branch point to the same commit. | ||
185 | $ git log | ||
186 | |||
187 | And don't fear to mess something up during a rebase -- you can | ||
188 | always restore the previous state using `git ref-log`: | ||
189 | |||
190 | https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-undo-local | ||
191 | " | ||
192 | ); | ||
193 | } | ||
194 | } | ||
195 | |||
196 | fn deny_clippy(path: &PathBuf, text: &String) { | ||
197 | let ignore = &[ | ||
198 | // The documentation in string literals may contain anything for its own purposes | ||
199 | "ide_completion/src/generated_lint_completions.rs", | ||
200 | ]; | ||
201 | if ignore.iter().any(|p| path.ends_with(p)) { | ||
202 | return; | ||
203 | } | ||
204 | |||
205 | if text.contains("\u{61}llow(clippy") { | ||
206 | panic!( | ||
207 | "\n\nallowing lints is forbidden: {}. | ||
208 | rust-analyzer intentionally doesn't check clippy on CI. | ||
209 | You can allow lint globally via `xtask clippy`. | ||
210 | See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion. | ||
211 | |||
212 | ", | ||
213 | path.display() | ||
214 | ) | ||
215 | } | ||
216 | } | ||
217 | |||
218 | #[test] | ||
219 | fn check_licenses() { | ||
220 | let expected = " | ||
221 | 0BSD OR MIT OR Apache-2.0 | ||
222 | Apache-2.0 | ||
223 | Apache-2.0 OR BSL-1.0 | ||
224 | Apache-2.0 OR MIT | ||
225 | Apache-2.0/MIT | ||
226 | BSD-3-Clause | ||
227 | CC0-1.0 | ||
228 | ISC | ||
229 | MIT | ||
230 | MIT / Apache-2.0 | ||
231 | MIT OR Apache-2.0 | ||
232 | MIT OR Apache-2.0 OR Zlib | ||
233 | MIT OR Zlib OR Apache-2.0 | ||
234 | MIT/Apache-2.0 | ||
235 | Unlicense OR MIT | ||
236 | Unlicense/MIT | ||
237 | Zlib OR Apache-2.0 OR MIT | ||
238 | " | ||
239 | .lines() | ||
240 | .filter(|it| !it.is_empty()) | ||
241 | .collect::<Vec<_>>(); | ||
242 | |||
243 | let meta = cmd!("cargo metadata --format-version 1").read().unwrap(); | ||
244 | let mut licenses = meta | ||
245 | .split(|c| c == ',' || c == '{' || c == '}') | ||
246 | .filter(|it| it.contains(r#""license""#)) | ||
247 | .map(|it| it.trim()) | ||
248 | .map(|it| it[r#""license":"#.len()..].trim_matches('"')) | ||
249 | .collect::<Vec<_>>(); | ||
250 | licenses.sort(); | ||
251 | licenses.dedup(); | ||
252 | if licenses != expected { | ||
253 | let mut diff = String::new(); | ||
254 | |||
255 | diff += &format!("New Licenses:\n"); | ||
256 | for &l in licenses.iter() { | ||
257 | if !expected.contains(&l) { | ||
258 | diff += &format!(" {}\n", l) | ||
259 | } | ||
260 | } | ||
261 | |||
262 | diff += &format!("\nMissing Licenses:\n"); | ||
263 | for &l in expected.iter() { | ||
264 | if !licenses.contains(&l) { | ||
265 | diff += &format!(" {}\n", l) | ||
266 | } | ||
267 | } | ||
268 | |||
269 | panic!("different set of licenses!\n{}", diff); | ||
270 | } | ||
271 | assert_eq!(licenses, expected); | ||
272 | } | ||
273 | |||
274 | fn check_todo(path: &Path, text: &str) { | ||
275 | let need_todo = &[ | ||
276 | // This file itself obviously needs to use todo (<- like this!). | ||
277 | "tests/tidy.rs", | ||
278 | // Some of our assists generate `todo!()`. | ||
279 | "handlers/add_turbo_fish.rs", | ||
280 | "handlers/generate_function.rs", | ||
281 | // To support generating `todo!()` in assists, we have `expr_todo()` in | ||
282 | // `ast::make`. | ||
283 | "ast/make.rs", | ||
284 | // The documentation in string literals may contain anything for its own purposes | ||
285 | "ide_completion/src/generated_lint_completions.rs", | ||
286 | ]; | ||
287 | if need_todo.iter().any(|p| path.ends_with(p)) { | ||
288 | return; | ||
289 | } | ||
290 | if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") { | ||
291 | // Generated by an assist | ||
292 | if text.contains("${0:todo!()}") { | ||
293 | return; | ||
294 | } | ||
295 | |||
296 | panic!( | ||
297 | "\nTODO markers or todo! macros should not be committed to the master branch,\n\ | ||
298 | use FIXME instead\n\ | ||
299 | {}\n", | ||
300 | path.display(), | ||
301 | ) | ||
302 | } | ||
303 | } | ||
304 | |||
305 | fn check_dbg(path: &Path, text: &str) { | ||
306 | let need_dbg = &[ | ||
307 | // This file itself obviously needs to use dbg. | ||
308 | "tests/tidy.rs", | ||
309 | // Assists to remove `dbg!()` | ||
310 | "handlers/remove_dbg.rs", | ||
311 | // We have .dbg postfix | ||
312 | "ide_completion/src/completions/postfix.rs", | ||
313 | // The documentation in string literals may contain anything for its own purposes | ||
314 | "ide_completion/src/lib.rs", | ||
315 | "ide_completion/src/generated_lint_completions.rs", | ||
316 | // test for doc test for remove_dbg | ||
317 | "src/tests/generated.rs", | ||
318 | ]; | ||
319 | if need_dbg.iter().any(|p| path.ends_with(p)) { | ||
320 | return; | ||
321 | } | ||
322 | if text.contains("dbg!") { | ||
323 | panic!( | ||
324 | "\ndbg! macros should not be committed to the master branch,\n\ | ||
325 | {}\n", | ||
326 | path.display(), | ||
327 | ) | ||
328 | } | ||
329 | } | ||
330 | |||
331 | fn check_trailing_ws(path: &Path, text: &str) { | ||
332 | if is_exclude_dir(path, &["test_data"]) { | ||
333 | return; | ||
334 | } | ||
335 | for (line_number, line) in text.lines().enumerate() { | ||
336 | if line.chars().last().map(char::is_whitespace) == Some(true) { | ||
337 | panic!("Trailing whitespace in {} at line {}", path.display(), line_number) | ||
338 | } | ||
339 | } | ||
340 | } | ||
341 | |||
342 | #[derive(Default)] | ||
343 | struct TidyDocs { | ||
344 | missing_docs: Vec<String>, | ||
345 | contains_fixme: Vec<PathBuf>, | ||
346 | } | ||
347 | |||
348 | impl TidyDocs { | ||
349 | fn visit(&mut self, path: &Path, text: &str) { | ||
350 | // Test hopefully don't really need comments, and for assists we already | ||
351 | // have special comments which are source of doc tests and user docs. | ||
352 | if is_exclude_dir(path, &["tests", "test_data"]) { | ||
353 | return; | ||
354 | } | ||
355 | |||
356 | if is_exclude_file(path) { | ||
357 | return; | ||
358 | } | ||
359 | |||
360 | let first_line = match text.lines().next() { | ||
361 | Some(it) => it, | ||
362 | None => return, | ||
363 | }; | ||
364 | |||
365 | if first_line.starts_with("//!") { | ||
366 | if first_line.contains("FIXME") { | ||
367 | self.contains_fixme.push(path.to_path_buf()); | ||
368 | } | ||
369 | } else { | ||
370 | if text.contains("// Feature:") || text.contains("// Assist:") { | ||
371 | return; | ||
372 | } | ||
373 | self.missing_docs.push(path.display().to_string()); | ||
374 | } | ||
375 | |||
376 | fn is_exclude_file(d: &Path) -> bool { | ||
377 | let file_names = ["tests.rs", "famous_defs_fixture.rs"]; | ||
378 | |||
379 | d.file_name() | ||
380 | .unwrap_or_default() | ||
381 | .to_str() | ||
382 | .map(|f_n| file_names.iter().any(|name| *name == f_n)) | ||
383 | .unwrap_or(false) | ||
384 | } | ||
385 | } | ||
386 | |||
387 | fn finish(self) { | ||
388 | if !self.missing_docs.is_empty() { | ||
389 | panic!( | ||
390 | "\nMissing docs strings\n\n\ | ||
391 | modules:\n{}\n\n", | ||
392 | self.missing_docs.join("\n") | ||
393 | ) | ||
394 | } | ||
395 | |||
396 | let poorly_documented = [ | ||
397 | "hir", | ||
398 | "hir_expand", | ||
399 | "ide", | ||
400 | "mbe", | ||
401 | "parser", | ||
402 | "profile", | ||
403 | "project_model", | ||
404 | "syntax", | ||
405 | "tt", | ||
406 | "hir_ty", | ||
407 | ]; | ||
408 | |||
409 | let mut has_fixmes = | ||
410 | poorly_documented.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>(); | ||
411 | 'outer: for path in self.contains_fixme { | ||
412 | for krate in poorly_documented.iter() { | ||
413 | if path.components().any(|it| it.as_os_str() == *krate) { | ||
414 | has_fixmes.insert(krate, true); | ||
415 | continue 'outer; | ||
416 | } | ||
417 | } | ||
418 | panic!("FIXME doc in a fully-documented crate: {}", path.display()) | ||
419 | } | ||
420 | |||
421 | for (krate, has_fixme) in has_fixmes.iter() { | ||
422 | if !has_fixme { | ||
423 | panic!("crate {} is fully documented :tada:, remove it from the list of poorly documented crates", krate) | ||
424 | } | ||
425 | } | ||
426 | } | ||
427 | } | ||
428 | |||
429 | fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool { | ||
430 | p.strip_prefix(project_root()) | ||
431 | .unwrap() | ||
432 | .components() | ||
433 | .rev() | ||
434 | .skip(1) | ||
435 | .filter_map(|it| it.as_os_str().to_str()) | ||
436 | .any(|it| dirs_to_exclude.contains(&it)) | ||
437 | } | ||
438 | |||
439 | #[allow(deprecated)] | ||
440 | fn stable_hash(text: &str) -> u64 { | ||
441 | use std::hash::{Hash, Hasher, SipHasher}; | ||
442 | |||
443 | let text = text.replace('\r', ""); | ||
444 | let mut hasher = SipHasher::default(); | ||
445 | text.hash(&mut hasher); | ||
446 | hasher.finish() | ||
447 | } | ||