diff options
Diffstat (limited to 'xtask/src/tidy.rs')
-rw-r--r-- | xtask/src/tidy.rs | 424 |
1 files changed, 424 insertions, 0 deletions
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs new file mode 100644 index 000000000..63116ec6b --- /dev/null +++ b/xtask/src/tidy.rs | |||
@@ -0,0 +1,424 @@ | |||
1 | use std::{ | ||
2 | collections::HashMap, | ||
3 | path::{Path, PathBuf}, | ||
4 | }; | ||
5 | |||
6 | use xshell::{cmd, read_file}; | ||
7 | |||
8 | use crate::{ | ||
9 | cargo_files, | ||
10 | codegen::{self, Mode}, | ||
11 | project_root, run_rustfmt, rust_files, | ||
12 | }; | ||
13 | |||
14 | #[test] | ||
15 | fn generated_grammar_is_fresh() { | ||
16 | if let Err(error) = codegen::generate_syntax(Mode::Verify) { | ||
17 | panic!("{}. Please update it by running `cargo xtask codegen`", error); | ||
18 | } | ||
19 | } | ||
20 | |||
21 | #[test] | ||
22 | fn generated_tests_are_fresh() { | ||
23 | if let Err(error) = codegen::generate_parser_tests(Mode::Verify) { | ||
24 | panic!("{}. Please update tests by running `cargo xtask codegen`", error); | ||
25 | } | ||
26 | } | ||
27 | |||
28 | #[test] | ||
29 | fn generated_assists_are_fresh() { | ||
30 | if let Err(error) = codegen::generate_assists_tests(Mode::Verify) { | ||
31 | panic!("{}. Please update assists by running `cargo xtask codegen`", error); | ||
32 | } | ||
33 | } | ||
34 | |||
35 | #[test] | ||
36 | fn check_code_formatting() { | ||
37 | if let Err(error) = run_rustfmt(Mode::Verify) { | ||
38 | panic!("{}. Please format the code by running `cargo format`", error); | ||
39 | } | ||
40 | } | ||
41 | |||
42 | #[test] | ||
43 | fn smoke_test_docs_generation() { | ||
44 | // We don't commit docs to the repo, so we can just overwrite in tests. | ||
45 | codegen::generate_assists_docs(Mode::Overwrite).unwrap(); | ||
46 | codegen::generate_feature_docs(Mode::Overwrite).unwrap(); | ||
47 | codegen::generate_diagnostic_docs(Mode::Overwrite).unwrap(); | ||
48 | } | ||
49 | |||
50 | #[test] | ||
51 | fn check_lsp_extensions_docs() { | ||
52 | let expected_hash = { | ||
53 | let lsp_ext_rs = | ||
54 | read_file(project_root().join("crates/rust-analyzer/src/lsp_ext.rs")).unwrap(); | ||
55 | stable_hash(lsp_ext_rs.as_str()) | ||
56 | }; | ||
57 | |||
58 | let actual_hash = { | ||
59 | let lsp_extensions_md = | ||
60 | read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap(); | ||
61 | let text = lsp_extensions_md | ||
62 | .lines() | ||
63 | .find_map(|line| line.strip_prefix("lsp_ext.rs hash:")) | ||
64 | .unwrap() | ||
65 | .trim(); | ||
66 | u64::from_str_radix(text, 16).unwrap() | ||
67 | }; | ||
68 | |||
69 | if actual_hash != expected_hash { | ||
70 | panic!( | ||
71 | " | ||
72 | lsp_ext.rs was changed without touching lsp-extensions.md. | ||
73 | |||
74 | Expected hash: {:x} | ||
75 | Actual hash: {:x} | ||
76 | |||
77 | Please adjust docs/dev/lsp-extensions.md. | ||
78 | ", | ||
79 | expected_hash, actual_hash | ||
80 | ) | ||
81 | } | ||
82 | } | ||
83 | |||
84 | #[test] | ||
85 | fn rust_files_are_tidy() { | ||
86 | let mut tidy_docs = TidyDocs::default(); | ||
87 | for path in rust_files() { | ||
88 | let text = read_file(&path).unwrap(); | ||
89 | check_todo(&path, &text); | ||
90 | check_dbg(&path, &text); | ||
91 | check_trailing_ws(&path, &text); | ||
92 | deny_clippy(&path, &text); | ||
93 | tidy_docs.visit(&path, &text); | ||
94 | } | ||
95 | tidy_docs.finish(); | ||
96 | } | ||
97 | |||
98 | #[test] | ||
99 | fn cargo_files_are_tidy() { | ||
100 | for cargo in cargo_files() { | ||
101 | let mut section = None; | ||
102 | for (line_no, text) in read_file(&cargo).unwrap().lines().enumerate() { | ||
103 | let text = text.trim(); | ||
104 | if text.starts_with("[") { | ||
105 | section = Some(text); | ||
106 | continue; | ||
107 | } | ||
108 | if !section.map(|it| it.starts_with("[dependencies")).unwrap_or(false) { | ||
109 | continue; | ||
110 | } | ||
111 | let text: String = text.split_whitespace().collect(); | ||
112 | if text.contains("path=") && !text.contains("version") { | ||
113 | panic!( | ||
114 | "\ncargo internal dependencies should have version.\n\ | ||
115 | {}:{}\n", | ||
116 | cargo.display(), | ||
117 | line_no + 1 | ||
118 | ) | ||
119 | } | ||
120 | } | ||
121 | } | ||
122 | } | ||
123 | |||
124 | #[test] | ||
125 | fn check_merge_commits() { | ||
126 | let stdout = cmd!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19..") | ||
127 | .read() | ||
128 | .unwrap(); | ||
129 | if !stdout.is_empty() { | ||
130 | panic!( | ||
131 | " | ||
132 | Merge commits are not allowed in the history. | ||
133 | |||
134 | When updating a pull-request, please rebase your feature branch | ||
135 | on top of master by running `git rebase master`. If rebase fails, | ||
136 | you can re-apply your changes like this: | ||
137 | |||
138 | # Just look around to see the current state. | ||
139 | $ git status | ||
140 | $ git log | ||
141 | |||
142 | # Abort in-progress rebase and merges, if any. | ||
143 | $ git rebase --abort | ||
144 | $ git merge --abort | ||
145 | |||
146 | # Make the branch point to the latest commit from master, | ||
147 | # while maintaining your local changes uncommited. | ||
148 | $ git reset --soft origin/master | ||
149 | |||
150 | # Commit all changes in a single batch. | ||
151 | $ git commit -am'My changes' | ||
152 | |||
153 | # Verify that everything looks alright. | ||
154 | $ git status | ||
155 | $ git log | ||
156 | |||
157 | # Push the changes. We did a rebase, so we need `--force` option. | ||
158 | # `--force-with-lease` is a more safe (Rusty) version of `--force`. | ||
159 | $ git push --force-with-lease | ||
160 | |||
161 | # Verify that both local and remote branch point to the same commit. | ||
162 | $ git log | ||
163 | |||
164 | And don't fear to mess something up during a rebase -- you can | ||
165 | always restore the previous state using `git ref-log`: | ||
166 | |||
167 | https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-undo-local | ||
168 | " | ||
169 | ); | ||
170 | } | ||
171 | } | ||
172 | |||
173 | fn deny_clippy(path: &PathBuf, text: &String) { | ||
174 | let ignore = &[ | ||
175 | // The documentation in string literals may contain anything for its own purposes | ||
176 | "ide_completion/src/generated_lint_completions.rs", | ||
177 | ]; | ||
178 | if ignore.iter().any(|p| path.ends_with(p)) { | ||
179 | return; | ||
180 | } | ||
181 | |||
182 | if text.contains("\u{61}llow(clippy") { | ||
183 | panic!( | ||
184 | "\n\nallowing lints is forbidden: {}. | ||
185 | rust-analyzer intentionally doesn't check clippy on CI. | ||
186 | You can allow lint globally via `xtask clippy`. | ||
187 | See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion. | ||
188 | |||
189 | ", | ||
190 | path.display() | ||
191 | ) | ||
192 | } | ||
193 | } | ||
194 | |||
195 | #[test] | ||
196 | fn check_licenses() { | ||
197 | let expected = " | ||
198 | 0BSD OR MIT OR Apache-2.0 | ||
199 | Apache-2.0 | ||
200 | Apache-2.0 OR BSL-1.0 | ||
201 | Apache-2.0 OR MIT | ||
202 | Apache-2.0/MIT | ||
203 | BSD-3-Clause | ||
204 | CC0-1.0 | ||
205 | ISC | ||
206 | MIT | ||
207 | MIT / Apache-2.0 | ||
208 | MIT OR Apache-2.0 | ||
209 | MIT OR Apache-2.0 OR Zlib | ||
210 | MIT OR Zlib OR Apache-2.0 | ||
211 | MIT/Apache-2.0 | ||
212 | Unlicense OR MIT | ||
213 | Unlicense/MIT | ||
214 | Zlib OR Apache-2.0 OR MIT | ||
215 | " | ||
216 | .lines() | ||
217 | .filter(|it| !it.is_empty()) | ||
218 | .collect::<Vec<_>>(); | ||
219 | |||
220 | let meta = cmd!("cargo metadata --format-version 1").read().unwrap(); | ||
221 | let mut licenses = meta | ||
222 | .split(|c| c == ',' || c == '{' || c == '}') | ||
223 | .filter(|it| it.contains(r#""license""#)) | ||
224 | .map(|it| it.trim()) | ||
225 | .map(|it| it[r#""license":"#.len()..].trim_matches('"')) | ||
226 | .collect::<Vec<_>>(); | ||
227 | licenses.sort(); | ||
228 | licenses.dedup(); | ||
229 | if licenses != expected { | ||
230 | let mut diff = String::new(); | ||
231 | |||
232 | diff += &format!("New Licenses:\n"); | ||
233 | for &l in licenses.iter() { | ||
234 | if !expected.contains(&l) { | ||
235 | diff += &format!(" {}\n", l) | ||
236 | } | ||
237 | } | ||
238 | |||
239 | diff += &format!("\nMissing Licenses:\n"); | ||
240 | for &l in expected.iter() { | ||
241 | if !licenses.contains(&l) { | ||
242 | diff += &format!(" {}\n", l) | ||
243 | } | ||
244 | } | ||
245 | |||
246 | panic!("different set of licenses!\n{}", diff); | ||
247 | } | ||
248 | assert_eq!(licenses, expected); | ||
249 | } | ||
250 | |||
251 | fn check_todo(path: &Path, text: &str) { | ||
252 | let need_todo = &[ | ||
253 | // This file itself obviously needs to use todo (<- like this!). | ||
254 | "tests/tidy.rs", | ||
255 | // Some of our assists generate `todo!()`. | ||
256 | "handlers/add_turbo_fish.rs", | ||
257 | "handlers/generate_function.rs", | ||
258 | // To support generating `todo!()` in assists, we have `expr_todo()` in | ||
259 | // `ast::make`. | ||
260 | "ast/make.rs", | ||
261 | // The documentation in string literals may contain anything for its own purposes | ||
262 | "ide_completion/src/generated_lint_completions.rs", | ||
263 | ]; | ||
264 | if need_todo.iter().any(|p| path.ends_with(p)) { | ||
265 | return; | ||
266 | } | ||
267 | if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") { | ||
268 | // Generated by an assist | ||
269 | if text.contains("${0:todo!()}") { | ||
270 | return; | ||
271 | } | ||
272 | |||
273 | panic!( | ||
274 | "\nTODO markers or todo! macros should not be committed to the master branch,\n\ | ||
275 | use FIXME instead\n\ | ||
276 | {}\n", | ||
277 | path.display(), | ||
278 | ) | ||
279 | } | ||
280 | } | ||
281 | |||
282 | fn check_dbg(path: &Path, text: &str) { | ||
283 | let need_dbg = &[ | ||
284 | // This file itself obviously needs to use dbg. | ||
285 | "tests/tidy.rs", | ||
286 | // Assists to remove `dbg!()` | ||
287 | "handlers/remove_dbg.rs", | ||
288 | // We have .dbg postfix | ||
289 | "ide_completion/src/completions/postfix.rs", | ||
290 | // The documentation in string literals may contain anything for its own purposes | ||
291 | "ide_completion/src/lib.rs", | ||
292 | "ide_completion/src/generated_lint_completions.rs", | ||
293 | // test for doc test for remove_dbg | ||
294 | "src/tests/generated.rs", | ||
295 | ]; | ||
296 | if need_dbg.iter().any(|p| path.ends_with(p)) { | ||
297 | return; | ||
298 | } | ||
299 | if text.contains("dbg!") { | ||
300 | panic!( | ||
301 | "\ndbg! macros should not be committed to the master branch,\n\ | ||
302 | {}\n", | ||
303 | path.display(), | ||
304 | ) | ||
305 | } | ||
306 | } | ||
307 | |||
308 | fn check_trailing_ws(path: &Path, text: &str) { | ||
309 | if is_exclude_dir(path, &["test_data"]) { | ||
310 | return; | ||
311 | } | ||
312 | for (line_number, line) in text.lines().enumerate() { | ||
313 | if line.chars().last().map(char::is_whitespace) == Some(true) { | ||
314 | panic!("Trailing whitespace in {} at line {}", path.display(), line_number) | ||
315 | } | ||
316 | } | ||
317 | } | ||
318 | |||
319 | #[derive(Default)] | ||
320 | struct TidyDocs { | ||
321 | missing_docs: Vec<String>, | ||
322 | contains_fixme: Vec<PathBuf>, | ||
323 | } | ||
324 | |||
325 | impl TidyDocs { | ||
326 | fn visit(&mut self, path: &Path, text: &str) { | ||
327 | // Test hopefully don't really need comments, and for assists we already | ||
328 | // have special comments which are source of doc tests and user docs. | ||
329 | if is_exclude_dir(path, &["tests", "test_data"]) { | ||
330 | return; | ||
331 | } | ||
332 | |||
333 | if is_exclude_file(path) { | ||
334 | return; | ||
335 | } | ||
336 | |||
337 | let first_line = match text.lines().next() { | ||
338 | Some(it) => it, | ||
339 | None => return, | ||
340 | }; | ||
341 | |||
342 | if first_line.starts_with("//!") { | ||
343 | if first_line.contains("FIXME") { | ||
344 | self.contains_fixme.push(path.to_path_buf()); | ||
345 | } | ||
346 | } else { | ||
347 | if text.contains("// Feature:") || text.contains("// Assist:") { | ||
348 | return; | ||
349 | } | ||
350 | self.missing_docs.push(path.display().to_string()); | ||
351 | } | ||
352 | |||
353 | fn is_exclude_file(d: &Path) -> bool { | ||
354 | let file_names = ["tests.rs", "famous_defs_fixture.rs"]; | ||
355 | |||
356 | d.file_name() | ||
357 | .unwrap_or_default() | ||
358 | .to_str() | ||
359 | .map(|f_n| file_names.iter().any(|name| *name == f_n)) | ||
360 | .unwrap_or(false) | ||
361 | } | ||
362 | } | ||
363 | |||
364 | fn finish(self) { | ||
365 | if !self.missing_docs.is_empty() { | ||
366 | panic!( | ||
367 | "\nMissing docs strings\n\n\ | ||
368 | modules:\n{}\n\n", | ||
369 | self.missing_docs.join("\n") | ||
370 | ) | ||
371 | } | ||
372 | |||
373 | let poorly_documented = [ | ||
374 | "hir", | ||
375 | "hir_expand", | ||
376 | "ide", | ||
377 | "mbe", | ||
378 | "parser", | ||
379 | "profile", | ||
380 | "project_model", | ||
381 | "syntax", | ||
382 | "tt", | ||
383 | "hir_ty", | ||
384 | ]; | ||
385 | |||
386 | let mut has_fixmes = | ||
387 | poorly_documented.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>(); | ||
388 | 'outer: for path in self.contains_fixme { | ||
389 | for krate in poorly_documented.iter() { | ||
390 | if path.components().any(|it| it.as_os_str() == *krate) { | ||
391 | has_fixmes.insert(krate, true); | ||
392 | continue 'outer; | ||
393 | } | ||
394 | } | ||
395 | panic!("FIXME doc in a fully-documented crate: {}", path.display()) | ||
396 | } | ||
397 | |||
398 | for (krate, has_fixme) in has_fixmes.iter() { | ||
399 | if !has_fixme { | ||
400 | panic!("crate {} is fully documented :tada:, remove it from the list of poorly documented crates", krate) | ||
401 | } | ||
402 | } | ||
403 | } | ||
404 | } | ||
405 | |||
406 | fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool { | ||
407 | p.strip_prefix(project_root()) | ||
408 | .unwrap() | ||
409 | .components() | ||
410 | .rev() | ||
411 | .skip(1) | ||
412 | .filter_map(|it| it.as_os_str().to_str()) | ||
413 | .any(|it| dirs_to_exclude.contains(&it)) | ||
414 | } | ||
415 | |||
416 | #[allow(deprecated)] | ||
417 | fn stable_hash(text: &str) -> u64 { | ||
418 | use std::hash::{Hash, Hasher, SipHasher}; | ||
419 | |||
420 | let text = text.replace('\r', ""); | ||
421 | let mut hasher = SipHasher::default(); | ||
422 | text.hash(&mut hasher); | ||
423 | hasher.finish() | ||
424 | } | ||