aboutsummaryrefslogtreecommitdiff
path: root/xtask/tests
diff options
context:
space:
mode:
Diffstat (limited to 'xtask/tests')
-rw-r--r--xtask/tests/tidy.rs423
1 files changed, 0 insertions, 423 deletions
diff --git a/xtask/tests/tidy.rs b/xtask/tests/tidy.rs
deleted file mode 100644
index a72498a38..000000000
--- a/xtask/tests/tidy.rs
+++ /dev/null
@@ -1,423 +0,0 @@
1use std::{
2 collections::HashMap,
3 path::{Path, PathBuf},
4};
5
6use xshell::{cmd, read_file};
7use xtask::{
8 cargo_files,
9 codegen::{self, Mode},
10 project_root, run_rustfmt, rust_files,
11};
12
13#[test]
14fn generated_grammar_is_fresh() {
15 if let Err(error) = codegen::generate_syntax(Mode::Verify) {
16 panic!("{}. Please update it by running `cargo xtask codegen`", error);
17 }
18}
19
20#[test]
21fn generated_tests_are_fresh() {
22 if let Err(error) = codegen::generate_parser_tests(Mode::Verify) {
23 panic!("{}. Please update tests by running `cargo xtask codegen`", error);
24 }
25}
26
27#[test]
28fn generated_assists_are_fresh() {
29 if let Err(error) = codegen::generate_assists_tests(Mode::Verify) {
30 panic!("{}. Please update assists by running `cargo xtask codegen`", error);
31 }
32}
33
34#[test]
35fn check_code_formatting() {
36 if let Err(error) = run_rustfmt(Mode::Verify) {
37 panic!("{}. Please format the code by running `cargo format`", error);
38 }
39}
40
41#[test]
42fn smoke_test_docs_generation() {
43 // We don't commit docs to the repo, so we can just overwrite in tests.
44 codegen::generate_assists_docs(Mode::Overwrite).unwrap();
45 codegen::generate_feature_docs(Mode::Overwrite).unwrap();
46 codegen::generate_diagnostic_docs(Mode::Overwrite).unwrap();
47}
48
49#[test]
50fn check_lsp_extensions_docs() {
51 let expected_hash = {
52 let lsp_ext_rs =
53 read_file(project_root().join("crates/rust-analyzer/src/lsp_ext.rs")).unwrap();
54 stable_hash(lsp_ext_rs.as_str())
55 };
56
57 let actual_hash = {
58 let lsp_extensions_md =
59 read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap();
60 let text = lsp_extensions_md
61 .lines()
62 .find_map(|line| line.strip_prefix("lsp_ext.rs hash:"))
63 .unwrap()
64 .trim();
65 u64::from_str_radix(text, 16).unwrap()
66 };
67
68 if actual_hash != expected_hash {
69 panic!(
70 "
71lsp_ext.rs was changed without touching lsp-extensions.md.
72
73Expected hash: {:x}
74Actual hash: {:x}
75
76Please adjust docs/dev/lsp-extensions.md.
77",
78 expected_hash, actual_hash
79 )
80 }
81}
82
83#[test]
84fn rust_files_are_tidy() {
85 let mut tidy_docs = TidyDocs::default();
86 for path in rust_files() {
87 let text = read_file(&path).unwrap();
88 check_todo(&path, &text);
89 check_dbg(&path, &text);
90 check_trailing_ws(&path, &text);
91 deny_clippy(&path, &text);
92 tidy_docs.visit(&path, &text);
93 }
94 tidy_docs.finish();
95}
96
97#[test]
98fn cargo_files_are_tidy() {
99 for cargo in cargo_files() {
100 let mut section = None;
101 for (line_no, text) in read_file(&cargo).unwrap().lines().enumerate() {
102 let text = text.trim();
103 if text.starts_with("[") {
104 section = Some(text);
105 continue;
106 }
107 if !section.map(|it| it.starts_with("[dependencies")).unwrap_or(false) {
108 continue;
109 }
110 let text: String = text.split_whitespace().collect();
111 if text.contains("path=") && !text.contains("version") {
112 panic!(
113 "\ncargo internal dependencies should have version.\n\
114 {}:{}\n",
115 cargo.display(),
116 line_no + 1
117 )
118 }
119 }
120 }
121}
122
123#[test]
124fn check_merge_commits() {
125 let stdout = cmd!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19..")
126 .read()
127 .unwrap();
128 if !stdout.is_empty() {
129 panic!(
130 "
131Merge commits are not allowed in the history.
132
133When updating a pull-request, please rebase your feature branch
134on top of master by running `git rebase master`. If rebase fails,
135you can re-apply your changes like this:
136
137 # Just look around to see the current state.
138 $ git status
139 $ git log
140
141 # Abort in-progress rebase and merges, if any.
142 $ git rebase --abort
143 $ git merge --abort
144
145 # Make the branch point to the latest commit from master,
146 # while maintaining your local changes uncommited.
147 $ git reset --soft origin/master
148
149 # Commit all changes in a single batch.
150 $ git commit -am'My changes'
151
152 # Verify that everything looks alright.
153 $ git status
154 $ git log
155
156 # Push the changes. We did a rebase, so we need `--force` option.
157 # `--force-with-lease` is a more safe (Rusty) version of `--force`.
158 $ git push --force-with-lease
159
160 # Verify that both local and remote branch point to the same commit.
161 $ git log
162
163And don't fear to mess something up during a rebase -- you can
164always restore the previous state using `git ref-log`:
165
166https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-undo-local
167"
168 );
169 }
170}
171
172fn deny_clippy(path: &PathBuf, text: &String) {
173 let ignore = &[
174 // The documentation in string literals may contain anything for its own purposes
175 "ide_completion/src/generated_lint_completions.rs",
176 ];
177 if ignore.iter().any(|p| path.ends_with(p)) {
178 return;
179 }
180
181 if text.contains("\u{61}llow(clippy") {
182 panic!(
183 "\n\nallowing lints is forbidden: {}.
184rust-analyzer intentionally doesn't check clippy on CI.
185You can allow lint globally via `xtask clippy`.
186See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion.
187
188",
189 path.display()
190 )
191 }
192}
193
194#[test]
195fn check_licenses() {
196 let expected = "
1970BSD OR MIT OR Apache-2.0
198Apache-2.0
199Apache-2.0 OR BSL-1.0
200Apache-2.0 OR MIT
201Apache-2.0/MIT
202BSD-3-Clause
203CC0-1.0
204ISC
205MIT
206MIT / Apache-2.0
207MIT OR Apache-2.0
208MIT OR Apache-2.0 OR Zlib
209MIT OR Zlib OR Apache-2.0
210MIT/Apache-2.0
211Unlicense OR MIT
212Unlicense/MIT
213Zlib OR Apache-2.0 OR MIT
214"
215 .lines()
216 .filter(|it| !it.is_empty())
217 .collect::<Vec<_>>();
218
219 let meta = cmd!("cargo metadata --format-version 1").read().unwrap();
220 let mut licenses = meta
221 .split(|c| c == ',' || c == '{' || c == '}')
222 .filter(|it| it.contains(r#""license""#))
223 .map(|it| it.trim())
224 .map(|it| it[r#""license":"#.len()..].trim_matches('"'))
225 .collect::<Vec<_>>();
226 licenses.sort();
227 licenses.dedup();
228 if licenses != expected {
229 let mut diff = String::new();
230
231 diff += &format!("New Licenses:\n");
232 for &l in licenses.iter() {
233 if !expected.contains(&l) {
234 diff += &format!(" {}\n", l)
235 }
236 }
237
238 diff += &format!("\nMissing Licenses:\n");
239 for &l in expected.iter() {
240 if !licenses.contains(&l) {
241 diff += &format!(" {}\n", l)
242 }
243 }
244
245 panic!("different set of licenses!\n{}", diff);
246 }
247 assert_eq!(licenses, expected);
248}
249
250fn check_todo(path: &Path, text: &str) {
251 let need_todo = &[
252 // This file itself obviously needs to use todo (<- like this!).
253 "tests/tidy.rs",
254 // Some of our assists generate `todo!()`.
255 "handlers/add_turbo_fish.rs",
256 "handlers/generate_function.rs",
257 // To support generating `todo!()` in assists, we have `expr_todo()` in
258 // `ast::make`.
259 "ast/make.rs",
260 // The documentation in string literals may contain anything for its own purposes
261 "ide_completion/src/generated_lint_completions.rs",
262 ];
263 if need_todo.iter().any(|p| path.ends_with(p)) {
264 return;
265 }
266 if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
267 // Generated by an assist
268 if text.contains("${0:todo!()}") {
269 return;
270 }
271
272 panic!(
273 "\nTODO markers or todo! macros should not be committed to the master branch,\n\
274 use FIXME instead\n\
275 {}\n",
276 path.display(),
277 )
278 }
279}
280
281fn check_dbg(path: &Path, text: &str) {
282 let need_dbg = &[
283 // This file itself obviously needs to use dbg.
284 "tests/tidy.rs",
285 // Assists to remove `dbg!()`
286 "handlers/remove_dbg.rs",
287 // We have .dbg postfix
288 "ide_completion/src/completions/postfix.rs",
289 // The documentation in string literals may contain anything for its own purposes
290 "ide_completion/src/lib.rs",
291 "ide_completion/src/generated_lint_completions.rs",
292 // test for doc test for remove_dbg
293 "src/tests/generated.rs",
294 ];
295 if need_dbg.iter().any(|p| path.ends_with(p)) {
296 return;
297 }
298 if text.contains("dbg!") {
299 panic!(
300 "\ndbg! macros should not be committed to the master branch,\n\
301 {}\n",
302 path.display(),
303 )
304 }
305}
306
307fn check_trailing_ws(path: &Path, text: &str) {
308 if is_exclude_dir(path, &["test_data"]) {
309 return;
310 }
311 for (line_number, line) in text.lines().enumerate() {
312 if line.chars().last().map(char::is_whitespace) == Some(true) {
313 panic!("Trailing whitespace in {} at line {}", path.display(), line_number)
314 }
315 }
316}
317
318#[derive(Default)]
319struct TidyDocs {
320 missing_docs: Vec<String>,
321 contains_fixme: Vec<PathBuf>,
322}
323
324impl TidyDocs {
325 fn visit(&mut self, path: &Path, text: &str) {
326 // Test hopefully don't really need comments, and for assists we already
327 // have special comments which are source of doc tests and user docs.
328 if is_exclude_dir(path, &["tests", "test_data"]) {
329 return;
330 }
331
332 if is_exclude_file(path) {
333 return;
334 }
335
336 let first_line = match text.lines().next() {
337 Some(it) => it,
338 None => return,
339 };
340
341 if first_line.starts_with("//!") {
342 if first_line.contains("FIXME") {
343 self.contains_fixme.push(path.to_path_buf());
344 }
345 } else {
346 if text.contains("// Feature:") || text.contains("// Assist:") {
347 return;
348 }
349 self.missing_docs.push(path.display().to_string());
350 }
351
352 fn is_exclude_file(d: &Path) -> bool {
353 let file_names = ["tests.rs", "famous_defs_fixture.rs"];
354
355 d.file_name()
356 .unwrap_or_default()
357 .to_str()
358 .map(|f_n| file_names.iter().any(|name| *name == f_n))
359 .unwrap_or(false)
360 }
361 }
362
363 fn finish(self) {
364 if !self.missing_docs.is_empty() {
365 panic!(
366 "\nMissing docs strings\n\n\
367 modules:\n{}\n\n",
368 self.missing_docs.join("\n")
369 )
370 }
371
372 let poorly_documented = [
373 "hir",
374 "hir_expand",
375 "ide",
376 "mbe",
377 "parser",
378 "profile",
379 "project_model",
380 "syntax",
381 "tt",
382 "hir_ty",
383 ];
384
385 let mut has_fixmes =
386 poorly_documented.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>();
387 'outer: for path in self.contains_fixme {
388 for krate in poorly_documented.iter() {
389 if path.components().any(|it| it.as_os_str() == *krate) {
390 has_fixmes.insert(krate, true);
391 continue 'outer;
392 }
393 }
394 panic!("FIXME doc in a fully-documented crate: {}", path.display())
395 }
396
397 for (krate, has_fixme) in has_fixmes.iter() {
398 if !has_fixme {
399 panic!("crate {} is fully documented :tada:, remove it from the list of poorly documented crates", krate)
400 }
401 }
402 }
403}
404
405fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
406 p.strip_prefix(project_root())
407 .unwrap()
408 .components()
409 .rev()
410 .skip(1)
411 .filter_map(|it| it.as_os_str().to_str())
412 .any(|it| dirs_to_exclude.contains(&it))
413}
414
415#[allow(deprecated)]
416fn stable_hash(text: &str) -> u64 {
417 use std::hash::{Hash, Hasher, SipHasher};
418
419 let text = text.replace('\r', "");
420 let mut hasher = SipHasher::default();
421 text.hash(&mut hasher);
422 hasher.finish()
423}