aboutsummaryrefslogtreecommitdiff
path: root/xtask/src/tidy.rs
diff options
context:
space:
mode:
authorAleksey Kladov <[email protected]>2021-03-01 17:16:23 +0000
committerAleksey Kladov <[email protected]>2021-03-01 17:26:37 +0000
commitd9dcfd81c5d4325379ac88c4250b9c77ecbd75e8 (patch)
tree0795d7277733920361a260db1a65306f1a47e1ac /xtask/src/tidy.rs
parentc17f2bf2a27798858ef2e3012ca28295aed46efa (diff)
Simplify xtask
lib/bin/test separation isn't really needed.
Diffstat (limited to 'xtask/src/tidy.rs')
-rw-r--r--xtask/src/tidy.rs424
1 files changed, 424 insertions, 0 deletions
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs
new file mode 100644
index 000000000..63116ec6b
--- /dev/null
+++ b/xtask/src/tidy.rs
@@ -0,0 +1,424 @@
1use std::{
2 collections::HashMap,
3 path::{Path, PathBuf},
4};
5
6use xshell::{cmd, read_file};
7
8use crate::{
9 cargo_files,
10 codegen::{self, Mode},
11 project_root, run_rustfmt, rust_files,
12};
13
14#[test]
15fn generated_grammar_is_fresh() {
16 if let Err(error) = codegen::generate_syntax(Mode::Verify) {
17 panic!("{}. Please update it by running `cargo xtask codegen`", error);
18 }
19}
20
21#[test]
22fn generated_tests_are_fresh() {
23 if let Err(error) = codegen::generate_parser_tests(Mode::Verify) {
24 panic!("{}. Please update tests by running `cargo xtask codegen`", error);
25 }
26}
27
28#[test]
29fn generated_assists_are_fresh() {
30 if let Err(error) = codegen::generate_assists_tests(Mode::Verify) {
31 panic!("{}. Please update assists by running `cargo xtask codegen`", error);
32 }
33}
34
35#[test]
36fn check_code_formatting() {
37 if let Err(error) = run_rustfmt(Mode::Verify) {
38 panic!("{}. Please format the code by running `cargo format`", error);
39 }
40}
41
42#[test]
43fn smoke_test_docs_generation() {
44 // We don't commit docs to the repo, so we can just overwrite in tests.
45 codegen::generate_assists_docs(Mode::Overwrite).unwrap();
46 codegen::generate_feature_docs(Mode::Overwrite).unwrap();
47 codegen::generate_diagnostic_docs(Mode::Overwrite).unwrap();
48}
49
50#[test]
51fn check_lsp_extensions_docs() {
52 let expected_hash = {
53 let lsp_ext_rs =
54 read_file(project_root().join("crates/rust-analyzer/src/lsp_ext.rs")).unwrap();
55 stable_hash(lsp_ext_rs.as_str())
56 };
57
58 let actual_hash = {
59 let lsp_extensions_md =
60 read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap();
61 let text = lsp_extensions_md
62 .lines()
63 .find_map(|line| line.strip_prefix("lsp_ext.rs hash:"))
64 .unwrap()
65 .trim();
66 u64::from_str_radix(text, 16).unwrap()
67 };
68
69 if actual_hash != expected_hash {
70 panic!(
71 "
72lsp_ext.rs was changed without touching lsp-extensions.md.
73
74Expected hash: {:x}
75Actual hash: {:x}
76
77Please adjust docs/dev/lsp-extensions.md.
78",
79 expected_hash, actual_hash
80 )
81 }
82}
83
84#[test]
85fn rust_files_are_tidy() {
86 let mut tidy_docs = TidyDocs::default();
87 for path in rust_files() {
88 let text = read_file(&path).unwrap();
89 check_todo(&path, &text);
90 check_dbg(&path, &text);
91 check_trailing_ws(&path, &text);
92 deny_clippy(&path, &text);
93 tidy_docs.visit(&path, &text);
94 }
95 tidy_docs.finish();
96}
97
98#[test]
99fn cargo_files_are_tidy() {
100 for cargo in cargo_files() {
101 let mut section = None;
102 for (line_no, text) in read_file(&cargo).unwrap().lines().enumerate() {
103 let text = text.trim();
104 if text.starts_with("[") {
105 section = Some(text);
106 continue;
107 }
108 if !section.map(|it| it.starts_with("[dependencies")).unwrap_or(false) {
109 continue;
110 }
111 let text: String = text.split_whitespace().collect();
112 if text.contains("path=") && !text.contains("version") {
113 panic!(
114 "\ncargo internal dependencies should have version.\n\
115 {}:{}\n",
116 cargo.display(),
117 line_no + 1
118 )
119 }
120 }
121 }
122}
123
124#[test]
125fn check_merge_commits() {
126 let stdout = cmd!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19..")
127 .read()
128 .unwrap();
129 if !stdout.is_empty() {
130 panic!(
131 "
132Merge commits are not allowed in the history.
133
134When updating a pull-request, please rebase your feature branch
135on top of master by running `git rebase master`. If rebase fails,
136you can re-apply your changes like this:
137
138 # Just look around to see the current state.
139 $ git status
140 $ git log
141
142 # Abort in-progress rebase and merges, if any.
143 $ git rebase --abort
144 $ git merge --abort
145
146 # Make the branch point to the latest commit from master,
147 # while maintaining your local changes uncommited.
148 $ git reset --soft origin/master
149
150 # Commit all changes in a single batch.
151 $ git commit -am'My changes'
152
153 # Verify that everything looks alright.
154 $ git status
155 $ git log
156
157 # Push the changes. We did a rebase, so we need `--force` option.
158 # `--force-with-lease` is a more safe (Rusty) version of `--force`.
159 $ git push --force-with-lease
160
161 # Verify that both local and remote branch point to the same commit.
162 $ git log
163
164And don't fear to mess something up during a rebase -- you can
165always restore the previous state using `git ref-log`:
166
167https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-undo-local
168"
169 );
170 }
171}
172
173fn deny_clippy(path: &PathBuf, text: &String) {
174 let ignore = &[
175 // The documentation in string literals may contain anything for its own purposes
176 "ide_completion/src/generated_lint_completions.rs",
177 ];
178 if ignore.iter().any(|p| path.ends_with(p)) {
179 return;
180 }
181
182 if text.contains("\u{61}llow(clippy") {
183 panic!(
184 "\n\nallowing lints is forbidden: {}.
185rust-analyzer intentionally doesn't check clippy on CI.
186You can allow lint globally via `xtask clippy`.
187See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion.
188
189",
190 path.display()
191 )
192 }
193}
194
195#[test]
196fn check_licenses() {
197 let expected = "
1980BSD OR MIT OR Apache-2.0
199Apache-2.0
200Apache-2.0 OR BSL-1.0
201Apache-2.0 OR MIT
202Apache-2.0/MIT
203BSD-3-Clause
204CC0-1.0
205ISC
206MIT
207MIT / Apache-2.0
208MIT OR Apache-2.0
209MIT OR Apache-2.0 OR Zlib
210MIT OR Zlib OR Apache-2.0
211MIT/Apache-2.0
212Unlicense OR MIT
213Unlicense/MIT
214Zlib OR Apache-2.0 OR MIT
215"
216 .lines()
217 .filter(|it| !it.is_empty())
218 .collect::<Vec<_>>();
219
220 let meta = cmd!("cargo metadata --format-version 1").read().unwrap();
221 let mut licenses = meta
222 .split(|c| c == ',' || c == '{' || c == '}')
223 .filter(|it| it.contains(r#""license""#))
224 .map(|it| it.trim())
225 .map(|it| it[r#""license":"#.len()..].trim_matches('"'))
226 .collect::<Vec<_>>();
227 licenses.sort();
228 licenses.dedup();
229 if licenses != expected {
230 let mut diff = String::new();
231
232 diff += &format!("New Licenses:\n");
233 for &l in licenses.iter() {
234 if !expected.contains(&l) {
235 diff += &format!(" {}\n", l)
236 }
237 }
238
239 diff += &format!("\nMissing Licenses:\n");
240 for &l in expected.iter() {
241 if !licenses.contains(&l) {
242 diff += &format!(" {}\n", l)
243 }
244 }
245
246 panic!("different set of licenses!\n{}", diff);
247 }
248 assert_eq!(licenses, expected);
249}
250
251fn check_todo(path: &Path, text: &str) {
252 let need_todo = &[
253 // This file itself obviously needs to use todo (<- like this!).
254 "tests/tidy.rs",
255 // Some of our assists generate `todo!()`.
256 "handlers/add_turbo_fish.rs",
257 "handlers/generate_function.rs",
258 // To support generating `todo!()` in assists, we have `expr_todo()` in
259 // `ast::make`.
260 "ast/make.rs",
261 // The documentation in string literals may contain anything for its own purposes
262 "ide_completion/src/generated_lint_completions.rs",
263 ];
264 if need_todo.iter().any(|p| path.ends_with(p)) {
265 return;
266 }
267 if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
268 // Generated by an assist
269 if text.contains("${0:todo!()}") {
270 return;
271 }
272
273 panic!(
274 "\nTODO markers or todo! macros should not be committed to the master branch,\n\
275 use FIXME instead\n\
276 {}\n",
277 path.display(),
278 )
279 }
280}
281
282fn check_dbg(path: &Path, text: &str) {
283 let need_dbg = &[
284 // This file itself obviously needs to use dbg.
285 "tests/tidy.rs",
286 // Assists to remove `dbg!()`
287 "handlers/remove_dbg.rs",
288 // We have .dbg postfix
289 "ide_completion/src/completions/postfix.rs",
290 // The documentation in string literals may contain anything for its own purposes
291 "ide_completion/src/lib.rs",
292 "ide_completion/src/generated_lint_completions.rs",
293 // test for doc test for remove_dbg
294 "src/tests/generated.rs",
295 ];
296 if need_dbg.iter().any(|p| path.ends_with(p)) {
297 return;
298 }
299 if text.contains("dbg!") {
300 panic!(
301 "\ndbg! macros should not be committed to the master branch,\n\
302 {}\n",
303 path.display(),
304 )
305 }
306}
307
308fn check_trailing_ws(path: &Path, text: &str) {
309 if is_exclude_dir(path, &["test_data"]) {
310 return;
311 }
312 for (line_number, line) in text.lines().enumerate() {
313 if line.chars().last().map(char::is_whitespace) == Some(true) {
314 panic!("Trailing whitespace in {} at line {}", path.display(), line_number)
315 }
316 }
317}
318
319#[derive(Default)]
320struct TidyDocs {
321 missing_docs: Vec<String>,
322 contains_fixme: Vec<PathBuf>,
323}
324
325impl TidyDocs {
326 fn visit(&mut self, path: &Path, text: &str) {
327 // Test hopefully don't really need comments, and for assists we already
328 // have special comments which are source of doc tests and user docs.
329 if is_exclude_dir(path, &["tests", "test_data"]) {
330 return;
331 }
332
333 if is_exclude_file(path) {
334 return;
335 }
336
337 let first_line = match text.lines().next() {
338 Some(it) => it,
339 None => return,
340 };
341
342 if first_line.starts_with("//!") {
343 if first_line.contains("FIXME") {
344 self.contains_fixme.push(path.to_path_buf());
345 }
346 } else {
347 if text.contains("// Feature:") || text.contains("// Assist:") {
348 return;
349 }
350 self.missing_docs.push(path.display().to_string());
351 }
352
353 fn is_exclude_file(d: &Path) -> bool {
354 let file_names = ["tests.rs", "famous_defs_fixture.rs"];
355
356 d.file_name()
357 .unwrap_or_default()
358 .to_str()
359 .map(|f_n| file_names.iter().any(|name| *name == f_n))
360 .unwrap_or(false)
361 }
362 }
363
364 fn finish(self) {
365 if !self.missing_docs.is_empty() {
366 panic!(
367 "\nMissing docs strings\n\n\
368 modules:\n{}\n\n",
369 self.missing_docs.join("\n")
370 )
371 }
372
373 let poorly_documented = [
374 "hir",
375 "hir_expand",
376 "ide",
377 "mbe",
378 "parser",
379 "profile",
380 "project_model",
381 "syntax",
382 "tt",
383 "hir_ty",
384 ];
385
386 let mut has_fixmes =
387 poorly_documented.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>();
388 'outer: for path in self.contains_fixme {
389 for krate in poorly_documented.iter() {
390 if path.components().any(|it| it.as_os_str() == *krate) {
391 has_fixmes.insert(krate, true);
392 continue 'outer;
393 }
394 }
395 panic!("FIXME doc in a fully-documented crate: {}", path.display())
396 }
397
398 for (krate, has_fixme) in has_fixmes.iter() {
399 if !has_fixme {
400 panic!("crate {} is fully documented :tada:, remove it from the list of poorly documented crates", krate)
401 }
402 }
403 }
404}
405
406fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
407 p.strip_prefix(project_root())
408 .unwrap()
409 .components()
410 .rev()
411 .skip(1)
412 .filter_map(|it| it.as_os_str().to_str())
413 .any(|it| dirs_to_exclude.contains(&it))
414}
415
416#[allow(deprecated)]
417fn stable_hash(text: &str) -> u64 {
418 use std::hash::{Hash, Hasher, SipHasher};
419
420 let text = text.replace('\r', "");
421 let mut hasher = SipHasher::default();
422 text.hash(&mut hasher);
423 hasher.finish()
424}