diff options
Diffstat (limited to 'xtask/src')
-rw-r--r-- | xtask/src/codegen.rs | 10 | ||||
-rw-r--r-- | xtask/src/codegen/gen_assists_docs.rs | 4 | ||||
-rw-r--r-- | xtask/src/codegen/gen_diagnostic_docs.rs | 2 | ||||
-rw-r--r-- | xtask/src/codegen/gen_feature_docs.rs | 2 | ||||
-rw-r--r-- | xtask/src/codegen/gen_lint_completions.rs | 2 | ||||
-rw-r--r-- | xtask/src/codegen/gen_parser_tests.rs | 2 | ||||
-rw-r--r-- | xtask/src/codegen/gen_syntax.rs | 2 | ||||
-rw-r--r-- | xtask/src/dist.rs | 8 | ||||
-rw-r--r-- | xtask/src/install.rs | 18 | ||||
-rw-r--r-- | xtask/src/lib.rs | 131 | ||||
-rw-r--r-- | xtask/src/main.rs | 134 | ||||
-rw-r--r-- | xtask/src/metrics.rs | 6 | ||||
-rw-r--r-- | xtask/src/pre_cache.rs | 4 | ||||
-rw-r--r-- | xtask/src/pre_commit.rs | 4 | ||||
-rw-r--r-- | xtask/src/release.rs | 12 | ||||
-rw-r--r-- | xtask/src/tidy.rs | 424 |
16 files changed, 589 insertions, 176 deletions
diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index adea053b6..743e83e76 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs | |||
@@ -20,7 +20,7 @@ use xshell::{cmd, pushenv, read_file, write_file}; | |||
20 | 20 | ||
21 | use crate::{ensure_rustfmt, project_root, Result}; | 21 | use crate::{ensure_rustfmt, project_root, Result}; |
22 | 22 | ||
23 | pub use self::{ | 23 | pub(crate) use self::{ |
24 | gen_assists_docs::{generate_assists_docs, generate_assists_tests}, | 24 | gen_assists_docs::{generate_assists_docs, generate_assists_tests}, |
25 | gen_diagnostic_docs::generate_diagnostic_docs, | 25 | gen_diagnostic_docs::generate_diagnostic_docs, |
26 | gen_feature_docs::generate_feature_docs, | 26 | gen_feature_docs::generate_feature_docs, |
@@ -30,17 +30,17 @@ pub use self::{ | |||
30 | }; | 30 | }; |
31 | 31 | ||
32 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | 32 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] |
33 | pub enum Mode { | 33 | pub(crate) enum Mode { |
34 | Overwrite, | 34 | Overwrite, |
35 | Verify, | 35 | Verify, |
36 | } | 36 | } |
37 | 37 | ||
38 | pub struct CodegenCmd { | 38 | pub(crate) struct CodegenCmd { |
39 | pub features: bool, | 39 | pub(crate) features: bool, |
40 | } | 40 | } |
41 | 41 | ||
42 | impl CodegenCmd { | 42 | impl CodegenCmd { |
43 | pub fn run(self) -> Result<()> { | 43 | pub(crate) fn run(self) -> Result<()> { |
44 | if self.features { | 44 | if self.features { |
45 | generate_lint_completions(Mode::Overwrite)?; | 45 | generate_lint_completions(Mode::Overwrite)?; |
46 | } | 46 | } |
diff --git a/xtask/src/codegen/gen_assists_docs.rs b/xtask/src/codegen/gen_assists_docs.rs index 1ae1343a5..c469b388d 100644 --- a/xtask/src/codegen/gen_assists_docs.rs +++ b/xtask/src/codegen/gen_assists_docs.rs | |||
@@ -7,12 +7,12 @@ use crate::{ | |||
7 | project_root, rust_files_in, Result, | 7 | project_root, rust_files_in, Result, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | pub fn generate_assists_tests(mode: Mode) -> Result<()> { | 10 | pub(crate) fn generate_assists_tests(mode: Mode) -> Result<()> { |
11 | let assists = Assist::collect()?; | 11 | let assists = Assist::collect()?; |
12 | generate_tests(&assists, mode) | 12 | generate_tests(&assists, mode) |
13 | } | 13 | } |
14 | 14 | ||
15 | pub fn generate_assists_docs(mode: Mode) -> Result<()> { | 15 | pub(crate) fn generate_assists_docs(mode: Mode) -> Result<()> { |
16 | let assists = Assist::collect()?; | 16 | let assists = Assist::collect()?; |
17 | let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); | 17 | let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); |
18 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); | 18 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); |
diff --git a/xtask/src/codegen/gen_diagnostic_docs.rs b/xtask/src/codegen/gen_diagnostic_docs.rs index 7c14d4a07..a2561817b 100644 --- a/xtask/src/codegen/gen_diagnostic_docs.rs +++ b/xtask/src/codegen/gen_diagnostic_docs.rs | |||
@@ -7,7 +7,7 @@ use crate::{ | |||
7 | project_root, rust_files, Result, | 7 | project_root, rust_files, Result, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | pub fn generate_diagnostic_docs(mode: Mode) -> Result<()> { | 10 | pub(crate) fn generate_diagnostic_docs(mode: Mode) -> Result<()> { |
11 | let diagnostics = Diagnostic::collect()?; | 11 | let diagnostics = Diagnostic::collect()?; |
12 | let contents = | 12 | let contents = |
13 | diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); | 13 | diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); |
diff --git a/xtask/src/codegen/gen_feature_docs.rs b/xtask/src/codegen/gen_feature_docs.rs index 61081063b..cad7ff477 100644 --- a/xtask/src/codegen/gen_feature_docs.rs +++ b/xtask/src/codegen/gen_feature_docs.rs | |||
@@ -7,7 +7,7 @@ use crate::{ | |||
7 | project_root, rust_files, Result, | 7 | project_root, rust_files, Result, |
8 | }; | 8 | }; |
9 | 9 | ||
10 | pub fn generate_feature_docs(mode: Mode) -> Result<()> { | 10 | pub(crate) fn generate_feature_docs(mode: Mode) -> Result<()> { |
11 | let features = Feature::collect()?; | 11 | let features = Feature::collect()?; |
12 | let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); | 12 | let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); |
13 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); | 13 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); |
diff --git a/xtask/src/codegen/gen_lint_completions.rs b/xtask/src/codegen/gen_lint_completions.rs index 8c51d35c7..b1c057037 100644 --- a/xtask/src/codegen/gen_lint_completions.rs +++ b/xtask/src/codegen/gen_lint_completions.rs | |||
@@ -10,7 +10,7 @@ use crate::{ | |||
10 | run_rustfmt, | 10 | run_rustfmt, |
11 | }; | 11 | }; |
12 | 12 | ||
13 | pub fn generate_lint_completions(mode: Mode) -> Result<()> { | 13 | pub(crate) fn generate_lint_completions(mode: Mode) -> Result<()> { |
14 | if !Path::new("./target/rust").exists() { | 14 | if !Path::new("./target/rust").exists() { |
15 | cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?; | 15 | cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?; |
16 | } | 16 | } |
diff --git a/xtask/src/codegen/gen_parser_tests.rs b/xtask/src/codegen/gen_parser_tests.rs index 6e4abd10c..cb8939063 100644 --- a/xtask/src/codegen/gen_parser_tests.rs +++ b/xtask/src/codegen/gen_parser_tests.rs | |||
@@ -12,7 +12,7 @@ use crate::{ | |||
12 | project_root, Result, | 12 | project_root, Result, |
13 | }; | 13 | }; |
14 | 14 | ||
15 | pub fn generate_parser_tests(mode: Mode) -> Result<()> { | 15 | pub(crate) fn generate_parser_tests(mode: Mode) -> Result<()> { |
16 | let tests = tests_from_dir(&project_root().join(Path::new("crates/parser/src/grammar")))?; | 16 | let tests = tests_from_dir(&project_root().join(Path::new("crates/parser/src/grammar")))?; |
17 | fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<()> { | 17 | fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<()> { |
18 | let tests_dir = project_root().join(into); | 18 | let tests_dir = project_root().join(into); |
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs index eb524d85a..191bc0e9d 100644 --- a/xtask/src/codegen/gen_syntax.rs +++ b/xtask/src/codegen/gen_syntax.rs | |||
@@ -18,7 +18,7 @@ use crate::{ | |||
18 | project_root, Result, | 18 | project_root, Result, |
19 | }; | 19 | }; |
20 | 20 | ||
21 | pub fn generate_syntax(mode: Mode) -> Result<()> { | 21 | pub(crate) fn generate_syntax(mode: Mode) -> Result<()> { |
22 | let grammar = rust_grammar(); | 22 | let grammar = rust_grammar(); |
23 | let ast = lower(&grammar); | 23 | let ast = lower(&grammar); |
24 | 24 | ||
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs index 56bf9f99d..f2503f807 100644 --- a/xtask/src/dist.rs +++ b/xtask/src/dist.rs | |||
@@ -11,13 +11,13 @@ use xshell::{cmd, cp, mkdir_p, pushd, read_file, rm_rf, write_file}; | |||
11 | 11 | ||
12 | use crate::{date_iso, project_root}; | 12 | use crate::{date_iso, project_root}; |
13 | 13 | ||
14 | pub struct DistCmd { | 14 | pub(crate) struct DistCmd { |
15 | pub nightly: bool, | 15 | pub(crate) nightly: bool, |
16 | pub client_version: Option<String>, | 16 | pub(crate) client_version: Option<String>, |
17 | } | 17 | } |
18 | 18 | ||
19 | impl DistCmd { | 19 | impl DistCmd { |
20 | pub fn run(self) -> Result<()> { | 20 | pub(crate) fn run(self) -> Result<()> { |
21 | let dist = project_root().join("dist"); | 21 | let dist = project_root().join("dist"); |
22 | rm_rf(&dist)?; | 22 | rm_rf(&dist)?; |
23 | mkdir_p(&dist)?; | 23 | mkdir_p(&dist)?; |
diff --git a/xtask/src/install.rs b/xtask/src/install.rs index 4c5c2673c..ea2194248 100644 --- a/xtask/src/install.rs +++ b/xtask/src/install.rs | |||
@@ -8,13 +8,13 @@ use xshell::{cmd, pushd}; | |||
8 | // Latest stable, feel free to send a PR if this lags behind. | 8 | // Latest stable, feel free to send a PR if this lags behind. |
9 | const REQUIRED_RUST_VERSION: u32 = 50; | 9 | const REQUIRED_RUST_VERSION: u32 = 50; |
10 | 10 | ||
11 | pub struct InstallCmd { | 11 | pub(crate) struct InstallCmd { |
12 | pub client: Option<ClientOpt>, | 12 | pub(crate) client: Option<ClientOpt>, |
13 | pub server: Option<ServerOpt>, | 13 | pub(crate) server: Option<ServerOpt>, |
14 | } | 14 | } |
15 | 15 | ||
16 | #[derive(Clone, Copy)] | 16 | #[derive(Clone, Copy)] |
17 | pub enum ClientOpt { | 17 | pub(crate) enum ClientOpt { |
18 | VsCode, | 18 | VsCode, |
19 | VsCodeExploration, | 19 | VsCodeExploration, |
20 | VsCodeInsiders, | 20 | VsCodeInsiders, |
@@ -24,7 +24,7 @@ pub enum ClientOpt { | |||
24 | } | 24 | } |
25 | 25 | ||
26 | impl ClientOpt { | 26 | impl ClientOpt { |
27 | pub const fn as_cmds(&self) -> &'static [&'static str] { | 27 | pub(crate) const fn as_cmds(&self) -> &'static [&'static str] { |
28 | match self { | 28 | match self { |
29 | ClientOpt::VsCode => &["code"], | 29 | ClientOpt::VsCode => &["code"], |
30 | ClientOpt::VsCodeExploration => &["code-exploration"], | 30 | ClientOpt::VsCodeExploration => &["code-exploration"], |
@@ -60,18 +60,18 @@ impl std::str::FromStr for ClientOpt { | |||
60 | } | 60 | } |
61 | } | 61 | } |
62 | 62 | ||
63 | pub struct ServerOpt { | 63 | pub(crate) struct ServerOpt { |
64 | pub malloc: Malloc, | 64 | pub(crate) malloc: Malloc, |
65 | } | 65 | } |
66 | 66 | ||
67 | pub enum Malloc { | 67 | pub(crate) enum Malloc { |
68 | System, | 68 | System, |
69 | Mimalloc, | 69 | Mimalloc, |
70 | Jemalloc, | 70 | Jemalloc, |
71 | } | 71 | } |
72 | 72 | ||
73 | impl InstallCmd { | 73 | impl InstallCmd { |
74 | pub fn run(self) -> Result<()> { | 74 | pub(crate) fn run(self) -> Result<()> { |
75 | if cfg!(target_os = "macos") { | 75 | if cfg!(target_os = "macos") { |
76 | fix_path_for_mac().context("Fix path for mac")? | 76 | fix_path_for_mac().context("Fix path for mac")? |
77 | } | 77 | } |
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs deleted file mode 100644 index b19985fb2..000000000 --- a/xtask/src/lib.rs +++ /dev/null | |||
@@ -1,131 +0,0 @@ | |||
1 | //! Support library for `cargo xtask` command. | ||
2 | //! | ||
3 | //! See https://github.com/matklad/cargo-xtask/ | ||
4 | |||
5 | pub mod codegen; | ||
6 | mod ast_src; | ||
7 | |||
8 | pub mod install; | ||
9 | pub mod release; | ||
10 | pub mod dist; | ||
11 | pub mod pre_commit; | ||
12 | pub mod metrics; | ||
13 | pub mod pre_cache; | ||
14 | |||
15 | use std::{ | ||
16 | env, | ||
17 | path::{Path, PathBuf}, | ||
18 | }; | ||
19 | |||
20 | use walkdir::{DirEntry, WalkDir}; | ||
21 | use xshell::{cmd, pushd, pushenv}; | ||
22 | |||
23 | use crate::codegen::Mode; | ||
24 | |||
25 | pub use anyhow::{bail, Context as _, Result}; | ||
26 | |||
27 | pub fn project_root() -> PathBuf { | ||
28 | Path::new( | ||
29 | &env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()), | ||
30 | ) | ||
31 | .ancestors() | ||
32 | .nth(1) | ||
33 | .unwrap() | ||
34 | .to_path_buf() | ||
35 | } | ||
36 | |||
37 | pub fn rust_files() -> impl Iterator<Item = PathBuf> { | ||
38 | rust_files_in(&project_root().join("crates")) | ||
39 | } | ||
40 | |||
41 | pub fn cargo_files() -> impl Iterator<Item = PathBuf> { | ||
42 | files_in(&project_root(), "toml") | ||
43 | .filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false)) | ||
44 | } | ||
45 | |||
46 | pub fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> { | ||
47 | files_in(path, "rs") | ||
48 | } | ||
49 | |||
50 | pub fn run_rustfmt(mode: Mode) -> Result<()> { | ||
51 | let _dir = pushd(project_root())?; | ||
52 | let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); | ||
53 | ensure_rustfmt()?; | ||
54 | let check = match mode { | ||
55 | Mode::Overwrite => &[][..], | ||
56 | Mode::Verify => &["--", "--check"], | ||
57 | }; | ||
58 | cmd!("cargo fmt {check...}").run()?; | ||
59 | Ok(()) | ||
60 | } | ||
61 | |||
62 | fn ensure_rustfmt() -> Result<()> { | ||
63 | let out = cmd!("rustfmt --version").read()?; | ||
64 | if !out.contains("stable") { | ||
65 | bail!( | ||
66 | "Failed to run rustfmt from toolchain 'stable'. \ | ||
67 | Please run `rustup component add rustfmt --toolchain stable` to install it.", | ||
68 | ) | ||
69 | } | ||
70 | Ok(()) | ||
71 | } | ||
72 | |||
73 | pub fn run_clippy() -> Result<()> { | ||
74 | if cmd!("cargo clippy --version").read().is_err() { | ||
75 | bail!( | ||
76 | "Failed run cargo clippy. \ | ||
77 | Please run `rustup component add clippy` to install it.", | ||
78 | ) | ||
79 | } | ||
80 | |||
81 | let allowed_lints = " | ||
82 | -A clippy::collapsible_if | ||
83 | -A clippy::needless_pass_by_value | ||
84 | -A clippy::nonminimal_bool | ||
85 | -A clippy::redundant_pattern_matching | ||
86 | " | ||
87 | .split_ascii_whitespace(); | ||
88 | cmd!("cargo clippy --all-features --all-targets -- {allowed_lints...}").run()?; | ||
89 | Ok(()) | ||
90 | } | ||
91 | |||
92 | pub fn run_fuzzer() -> Result<()> { | ||
93 | let _d = pushd("./crates/syntax")?; | ||
94 | let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly"); | ||
95 | if cmd!("cargo fuzz --help").read().is_err() { | ||
96 | cmd!("cargo install cargo-fuzz").run()?; | ||
97 | }; | ||
98 | |||
99 | // Expecting nightly rustc | ||
100 | let out = cmd!("rustc --version").read()?; | ||
101 | if !out.contains("nightly") { | ||
102 | bail!("fuzz tests require nightly rustc") | ||
103 | } | ||
104 | |||
105 | cmd!("cargo fuzz run parser").run()?; | ||
106 | Ok(()) | ||
107 | } | ||
108 | |||
109 | fn date_iso() -> Result<String> { | ||
110 | let res = cmd!("date --iso --utc").read()?; | ||
111 | Ok(res) | ||
112 | } | ||
113 | |||
114 | fn is_release_tag(tag: &str) -> bool { | ||
115 | tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit()) | ||
116 | } | ||
117 | |||
118 | fn files_in(path: &Path, ext: &'static str) -> impl Iterator<Item = PathBuf> { | ||
119 | let iter = WalkDir::new(path); | ||
120 | return iter | ||
121 | .into_iter() | ||
122 | .filter_entry(|e| !is_hidden(e)) | ||
123 | .map(|e| e.unwrap()) | ||
124 | .filter(|e| !e.file_type().is_dir()) | ||
125 | .map(|e| e.into_path()) | ||
126 | .filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false)); | ||
127 | |||
128 | fn is_hidden(entry: &DirEntry) -> bool { | ||
129 | entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) | ||
130 | } | ||
131 | } | ||
diff --git a/xtask/src/main.rs b/xtask/src/main.rs index cbb9b315e..48c0d9920 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs | |||
@@ -7,22 +7,35 @@ | |||
7 | //! | 7 | //! |
8 | //! This binary is integrated into the `cargo` command line by using an alias in | 8 | //! This binary is integrated into the `cargo` command line by using an alias in |
9 | //! `.cargo/config`. | 9 | //! `.cargo/config`. |
10 | mod codegen; | ||
11 | mod ast_src; | ||
12 | #[cfg(test)] | ||
13 | mod tidy; | ||
10 | 14 | ||
11 | use std::env; | 15 | mod install; |
16 | mod release; | ||
17 | mod dist; | ||
18 | mod pre_commit; | ||
19 | mod metrics; | ||
20 | mod pre_cache; | ||
12 | 21 | ||
13 | use anyhow::bail; | 22 | use anyhow::{bail, Result}; |
14 | use codegen::CodegenCmd; | 23 | use codegen::CodegenCmd; |
15 | use pico_args::Arguments; | 24 | use pico_args::Arguments; |
16 | use xshell::{cmd, cp, pushd}; | 25 | use std::{ |
17 | use xtask::{ | 26 | env, |
18 | codegen::{self, Mode}, | 27 | path::{Path, PathBuf}, |
28 | }; | ||
29 | use walkdir::{DirEntry, WalkDir}; | ||
30 | use xshell::{cmd, cp, pushd, pushenv}; | ||
31 | |||
32 | use crate::{ | ||
33 | codegen::Mode, | ||
19 | dist::DistCmd, | 34 | dist::DistCmd, |
20 | install::{InstallCmd, Malloc, ServerOpt}, | 35 | install::{InstallCmd, Malloc, ServerOpt}, |
21 | metrics::MetricsCmd, | 36 | metrics::MetricsCmd, |
22 | pre_cache::PreCacheCmd, | 37 | pre_cache::PreCacheCmd, |
23 | pre_commit, project_root, | ||
24 | release::{PromoteCmd, ReleaseCmd}, | 38 | release::{PromoteCmd, ReleaseCmd}, |
25 | run_clippy, run_fuzzer, run_rustfmt, Result, | ||
26 | }; | 39 | }; |
27 | 40 | ||
28 | fn main() -> Result<()> { | 41 | fn main() -> Result<()> { |
@@ -172,3 +185,110 @@ fn finish_args(args: Arguments) -> Result<()> { | |||
172 | } | 185 | } |
173 | Ok(()) | 186 | Ok(()) |
174 | } | 187 | } |
188 | |||
189 | fn project_root() -> PathBuf { | ||
190 | Path::new( | ||
191 | &env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()), | ||
192 | ) | ||
193 | .ancestors() | ||
194 | .nth(1) | ||
195 | .unwrap() | ||
196 | .to_path_buf() | ||
197 | } | ||
198 | |||
199 | fn rust_files() -> impl Iterator<Item = PathBuf> { | ||
200 | rust_files_in(&project_root().join("crates")) | ||
201 | } | ||
202 | |||
203 | #[cfg(test)] | ||
204 | fn cargo_files() -> impl Iterator<Item = PathBuf> { | ||
205 | files_in(&project_root(), "toml") | ||
206 | .filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false)) | ||
207 | } | ||
208 | |||
209 | fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> { | ||
210 | files_in(path, "rs") | ||
211 | } | ||
212 | |||
213 | fn run_rustfmt(mode: Mode) -> Result<()> { | ||
214 | let _dir = pushd(project_root())?; | ||
215 | let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); | ||
216 | ensure_rustfmt()?; | ||
217 | let check = match mode { | ||
218 | Mode::Overwrite => &[][..], | ||
219 | Mode::Verify => &["--", "--check"], | ||
220 | }; | ||
221 | cmd!("cargo fmt {check...}").run()?; | ||
222 | Ok(()) | ||
223 | } | ||
224 | |||
225 | fn ensure_rustfmt() -> Result<()> { | ||
226 | let out = cmd!("rustfmt --version").read()?; | ||
227 | if !out.contains("stable") { | ||
228 | bail!( | ||
229 | "Failed to run rustfmt from toolchain 'stable'. \ | ||
230 | Please run `rustup component add rustfmt --toolchain stable` to install it.", | ||
231 | ) | ||
232 | } | ||
233 | Ok(()) | ||
234 | } | ||
235 | |||
236 | fn run_clippy() -> Result<()> { | ||
237 | if cmd!("cargo clippy --version").read().is_err() { | ||
238 | bail!( | ||
239 | "Failed run cargo clippy. \ | ||
240 | Please run `rustup component add clippy` to install it.", | ||
241 | ) | ||
242 | } | ||
243 | |||
244 | let allowed_lints = " | ||
245 | -A clippy::collapsible_if | ||
246 | -A clippy::needless_pass_by_value | ||
247 | -A clippy::nonminimal_bool | ||
248 | -A clippy::redundant_pattern_matching | ||
249 | " | ||
250 | .split_ascii_whitespace(); | ||
251 | cmd!("cargo clippy --all-features --all-targets -- {allowed_lints...}").run()?; | ||
252 | Ok(()) | ||
253 | } | ||
254 | |||
255 | fn run_fuzzer() -> Result<()> { | ||
256 | let _d = pushd("./crates/syntax")?; | ||
257 | let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly"); | ||
258 | if cmd!("cargo fuzz --help").read().is_err() { | ||
259 | cmd!("cargo install cargo-fuzz").run()?; | ||
260 | }; | ||
261 | |||
262 | // Expecting nightly rustc | ||
263 | let out = cmd!("rustc --version").read()?; | ||
264 | if !out.contains("nightly") { | ||
265 | bail!("fuzz tests require nightly rustc") | ||
266 | } | ||
267 | |||
268 | cmd!("cargo fuzz run parser").run()?; | ||
269 | Ok(()) | ||
270 | } | ||
271 | |||
272 | fn date_iso() -> Result<String> { | ||
273 | let res = cmd!("date --iso --utc").read()?; | ||
274 | Ok(res) | ||
275 | } | ||
276 | |||
277 | fn is_release_tag(tag: &str) -> bool { | ||
278 | tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit()) | ||
279 | } | ||
280 | |||
281 | fn files_in(path: &Path, ext: &'static str) -> impl Iterator<Item = PathBuf> { | ||
282 | let iter = WalkDir::new(path); | ||
283 | return iter | ||
284 | .into_iter() | ||
285 | .filter_entry(|e| !is_hidden(e)) | ||
286 | .map(|e| e.unwrap()) | ||
287 | .filter(|e| !e.file_type().is_dir()) | ||
288 | .map(|e| e.into_path()) | ||
289 | .filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false)); | ||
290 | |||
291 | fn is_hidden(entry: &DirEntry) -> bool { | ||
292 | entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) | ||
293 | } | ||
294 | } | ||
diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 624ad3b7e..babc2a6d4 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs | |||
@@ -11,12 +11,12 @@ use xshell::{cmd, mkdir_p, pushd, pushenv, read_file, rm_rf}; | |||
11 | 11 | ||
12 | type Unit = String; | 12 | type Unit = String; |
13 | 13 | ||
14 | pub struct MetricsCmd { | 14 | pub(crate) struct MetricsCmd { |
15 | pub dry_run: bool, | 15 | pub(crate) dry_run: bool, |
16 | } | 16 | } |
17 | 17 | ||
18 | impl MetricsCmd { | 18 | impl MetricsCmd { |
19 | pub fn run(self) -> Result<()> { | 19 | pub(crate) fn run(self) -> Result<()> { |
20 | let mut metrics = Metrics::new()?; | 20 | let mut metrics = Metrics::new()?; |
21 | if !self.dry_run { | 21 | if !self.dry_run { |
22 | rm_rf("./target/release")?; | 22 | rm_rf("./target/release")?; |
diff --git a/xtask/src/pre_cache.rs b/xtask/src/pre_cache.rs index 569f88f68..54f4a95a9 100644 --- a/xtask/src/pre_cache.rs +++ b/xtask/src/pre_cache.rs | |||
@@ -6,12 +6,12 @@ use std::{ | |||
6 | use anyhow::Result; | 6 | use anyhow::Result; |
7 | use xshell::rm_rf; | 7 | use xshell::rm_rf; |
8 | 8 | ||
9 | pub struct PreCacheCmd; | 9 | pub(crate) struct PreCacheCmd; |
10 | 10 | ||
11 | impl PreCacheCmd { | 11 | impl PreCacheCmd { |
12 | /// Cleans the `./target` dir after the build such that only | 12 | /// Cleans the `./target` dir after the build such that only |
13 | /// dependencies are cached on CI. | 13 | /// dependencies are cached on CI. |
14 | pub fn run(self) -> Result<()> { | 14 | pub(crate) fn run(self) -> Result<()> { |
15 | let slow_tests_cookie = Path::new("./target/.slow_tests_cookie"); | 15 | let slow_tests_cookie = Path::new("./target/.slow_tests_cookie"); |
16 | if !slow_tests_cookie.exists() { | 16 | if !slow_tests_cookie.exists() { |
17 | panic!("slow tests were skipped on CI!") | 17 | panic!("slow tests were skipped on CI!") |
diff --git a/xtask/src/pre_commit.rs b/xtask/src/pre_commit.rs index 8f2dbea19..b57cf3ce2 100644 --- a/xtask/src/pre_commit.rs +++ b/xtask/src/pre_commit.rs | |||
@@ -9,7 +9,7 @@ use crate::{project_root, run_rustfmt, Mode}; | |||
9 | 9 | ||
10 | // FIXME: if there are changed `.ts` files, also reformat TypeScript (by | 10 | // FIXME: if there are changed `.ts` files, also reformat TypeScript (by |
11 | // shelling out to `npm fmt`). | 11 | // shelling out to `npm fmt`). |
12 | pub fn run_hook() -> Result<()> { | 12 | pub(crate) fn run_hook() -> Result<()> { |
13 | run_rustfmt(Mode::Overwrite)?; | 13 | run_rustfmt(Mode::Overwrite)?; |
14 | 14 | ||
15 | let diff = cmd!("git diff --diff-filter=MAR --name-only --cached").read()?; | 15 | let diff = cmd!("git diff --diff-filter=MAR --name-only --cached").read()?; |
@@ -23,7 +23,7 @@ pub fn run_hook() -> Result<()> { | |||
23 | Ok(()) | 23 | Ok(()) |
24 | } | 24 | } |
25 | 25 | ||
26 | pub fn install_hook() -> Result<()> { | 26 | pub(crate) fn install_hook() -> Result<()> { |
27 | let hook_path: PathBuf = | 27 | let hook_path: PathBuf = |
28 | format!("./.git/hooks/pre-commit{}", std::env::consts::EXE_SUFFIX).into(); | 28 | format!("./.git/hooks/pre-commit{}", std::env::consts::EXE_SUFFIX).into(); |
29 | 29 | ||
diff --git a/xtask/src/release.rs b/xtask/src/release.rs index 63556476d..5008881e4 100644 --- a/xtask/src/release.rs +++ b/xtask/src/release.rs | |||
@@ -4,12 +4,12 @@ use xshell::{cmd, cp, pushd, read_dir, write_file}; | |||
4 | 4 | ||
5 | use crate::{codegen, date_iso, is_release_tag, project_root, Mode, Result}; | 5 | use crate::{codegen, date_iso, is_release_tag, project_root, Mode, Result}; |
6 | 6 | ||
7 | pub struct ReleaseCmd { | 7 | pub(crate) struct ReleaseCmd { |
8 | pub dry_run: bool, | 8 | pub(crate) dry_run: bool, |
9 | } | 9 | } |
10 | 10 | ||
11 | impl ReleaseCmd { | 11 | impl ReleaseCmd { |
12 | pub fn run(self) -> Result<()> { | 12 | pub(crate) fn run(self) -> Result<()> { |
13 | if !self.dry_run { | 13 | if !self.dry_run { |
14 | cmd!("git switch release").run()?; | 14 | cmd!("git switch release").run()?; |
15 | cmd!("git fetch upstream --tags --force").run()?; | 15 | cmd!("git fetch upstream --tags --force").run()?; |
@@ -86,12 +86,12 @@ https://github.com/sponsors/rust-analyzer[GitHub Sponsors]. | |||
86 | } | 86 | } |
87 | } | 87 | } |
88 | 88 | ||
89 | pub struct PromoteCmd { | 89 | pub(crate) struct PromoteCmd { |
90 | pub dry_run: bool, | 90 | pub(crate) dry_run: bool, |
91 | } | 91 | } |
92 | 92 | ||
93 | impl PromoteCmd { | 93 | impl PromoteCmd { |
94 | pub fn run(self) -> Result<()> { | 94 | pub(crate) fn run(self) -> Result<()> { |
95 | let _dir = pushd("../rust-rust-analyzer")?; | 95 | let _dir = pushd("../rust-rust-analyzer")?; |
96 | cmd!("git switch master").run()?; | 96 | cmd!("git switch master").run()?; |
97 | cmd!("git fetch upstream").run()?; | 97 | cmd!("git fetch upstream").run()?; |
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs new file mode 100644 index 000000000..63116ec6b --- /dev/null +++ b/xtask/src/tidy.rs | |||
@@ -0,0 +1,424 @@ | |||
1 | use std::{ | ||
2 | collections::HashMap, | ||
3 | path::{Path, PathBuf}, | ||
4 | }; | ||
5 | |||
6 | use xshell::{cmd, read_file}; | ||
7 | |||
8 | use crate::{ | ||
9 | cargo_files, | ||
10 | codegen::{self, Mode}, | ||
11 | project_root, run_rustfmt, rust_files, | ||
12 | }; | ||
13 | |||
14 | #[test] | ||
15 | fn generated_grammar_is_fresh() { | ||
16 | if let Err(error) = codegen::generate_syntax(Mode::Verify) { | ||
17 | panic!("{}. Please update it by running `cargo xtask codegen`", error); | ||
18 | } | ||
19 | } | ||
20 | |||
21 | #[test] | ||
22 | fn generated_tests_are_fresh() { | ||
23 | if let Err(error) = codegen::generate_parser_tests(Mode::Verify) { | ||
24 | panic!("{}. Please update tests by running `cargo xtask codegen`", error); | ||
25 | } | ||
26 | } | ||
27 | |||
28 | #[test] | ||
29 | fn generated_assists_are_fresh() { | ||
30 | if let Err(error) = codegen::generate_assists_tests(Mode::Verify) { | ||
31 | panic!("{}. Please update assists by running `cargo xtask codegen`", error); | ||
32 | } | ||
33 | } | ||
34 | |||
35 | #[test] | ||
36 | fn check_code_formatting() { | ||
37 | if let Err(error) = run_rustfmt(Mode::Verify) { | ||
38 | panic!("{}. Please format the code by running `cargo format`", error); | ||
39 | } | ||
40 | } | ||
41 | |||
42 | #[test] | ||
43 | fn smoke_test_docs_generation() { | ||
44 | // We don't commit docs to the repo, so we can just overwrite in tests. | ||
45 | codegen::generate_assists_docs(Mode::Overwrite).unwrap(); | ||
46 | codegen::generate_feature_docs(Mode::Overwrite).unwrap(); | ||
47 | codegen::generate_diagnostic_docs(Mode::Overwrite).unwrap(); | ||
48 | } | ||
49 | |||
50 | #[test] | ||
51 | fn check_lsp_extensions_docs() { | ||
52 | let expected_hash = { | ||
53 | let lsp_ext_rs = | ||
54 | read_file(project_root().join("crates/rust-analyzer/src/lsp_ext.rs")).unwrap(); | ||
55 | stable_hash(lsp_ext_rs.as_str()) | ||
56 | }; | ||
57 | |||
58 | let actual_hash = { | ||
59 | let lsp_extensions_md = | ||
60 | read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap(); | ||
61 | let text = lsp_extensions_md | ||
62 | .lines() | ||
63 | .find_map(|line| line.strip_prefix("lsp_ext.rs hash:")) | ||
64 | .unwrap() | ||
65 | .trim(); | ||
66 | u64::from_str_radix(text, 16).unwrap() | ||
67 | }; | ||
68 | |||
69 | if actual_hash != expected_hash { | ||
70 | panic!( | ||
71 | " | ||
72 | lsp_ext.rs was changed without touching lsp-extensions.md. | ||
73 | |||
74 | Expected hash: {:x} | ||
75 | Actual hash: {:x} | ||
76 | |||
77 | Please adjust docs/dev/lsp-extensions.md. | ||
78 | ", | ||
79 | expected_hash, actual_hash | ||
80 | ) | ||
81 | } | ||
82 | } | ||
83 | |||
84 | #[test] | ||
85 | fn rust_files_are_tidy() { | ||
86 | let mut tidy_docs = TidyDocs::default(); | ||
87 | for path in rust_files() { | ||
88 | let text = read_file(&path).unwrap(); | ||
89 | check_todo(&path, &text); | ||
90 | check_dbg(&path, &text); | ||
91 | check_trailing_ws(&path, &text); | ||
92 | deny_clippy(&path, &text); | ||
93 | tidy_docs.visit(&path, &text); | ||
94 | } | ||
95 | tidy_docs.finish(); | ||
96 | } | ||
97 | |||
98 | #[test] | ||
99 | fn cargo_files_are_tidy() { | ||
100 | for cargo in cargo_files() { | ||
101 | let mut section = None; | ||
102 | for (line_no, text) in read_file(&cargo).unwrap().lines().enumerate() { | ||
103 | let text = text.trim(); | ||
104 | if text.starts_with("[") { | ||
105 | section = Some(text); | ||
106 | continue; | ||
107 | } | ||
108 | if !section.map(|it| it.starts_with("[dependencies")).unwrap_or(false) { | ||
109 | continue; | ||
110 | } | ||
111 | let text: String = text.split_whitespace().collect(); | ||
112 | if text.contains("path=") && !text.contains("version") { | ||
113 | panic!( | ||
114 | "\ncargo internal dependencies should have version.\n\ | ||
115 | {}:{}\n", | ||
116 | cargo.display(), | ||
117 | line_no + 1 | ||
118 | ) | ||
119 | } | ||
120 | } | ||
121 | } | ||
122 | } | ||
123 | |||
124 | #[test] | ||
125 | fn check_merge_commits() { | ||
126 | let stdout = cmd!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19..") | ||
127 | .read() | ||
128 | .unwrap(); | ||
129 | if !stdout.is_empty() { | ||
130 | panic!( | ||
131 | " | ||
132 | Merge commits are not allowed in the history. | ||
133 | |||
134 | When updating a pull-request, please rebase your feature branch | ||
135 | on top of master by running `git rebase master`. If rebase fails, | ||
136 | you can re-apply your changes like this: | ||
137 | |||
138 | # Just look around to see the current state. | ||
139 | $ git status | ||
140 | $ git log | ||
141 | |||
142 | # Abort in-progress rebase and merges, if any. | ||
143 | $ git rebase --abort | ||
144 | $ git merge --abort | ||
145 | |||
146 | # Make the branch point to the latest commit from master, | ||
147 | # while maintaining your local changes uncommited. | ||
148 | $ git reset --soft origin/master | ||
149 | |||
150 | # Commit all changes in a single batch. | ||
151 | $ git commit -am'My changes' | ||
152 | |||
153 | # Verify that everything looks alright. | ||
154 | $ git status | ||
155 | $ git log | ||
156 | |||
157 | # Push the changes. We did a rebase, so we need `--force` option. | ||
158 | # `--force-with-lease` is a more safe (Rusty) version of `--force`. | ||
159 | $ git push --force-with-lease | ||
160 | |||
161 | # Verify that both local and remote branch point to the same commit. | ||
162 | $ git log | ||
163 | |||
164 | And don't fear to mess something up during a rebase -- you can | ||
165 | always restore the previous state using `git ref-log`: | ||
166 | |||
167 | https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-undo-local | ||
168 | " | ||
169 | ); | ||
170 | } | ||
171 | } | ||
172 | |||
173 | fn deny_clippy(path: &PathBuf, text: &String) { | ||
174 | let ignore = &[ | ||
175 | // The documentation in string literals may contain anything for its own purposes | ||
176 | "ide_completion/src/generated_lint_completions.rs", | ||
177 | ]; | ||
178 | if ignore.iter().any(|p| path.ends_with(p)) { | ||
179 | return; | ||
180 | } | ||
181 | |||
182 | if text.contains("\u{61}llow(clippy") { | ||
183 | panic!( | ||
184 | "\n\nallowing lints is forbidden: {}. | ||
185 | rust-analyzer intentionally doesn't check clippy on CI. | ||
186 | You can allow lint globally via `xtask clippy`. | ||
187 | See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion. | ||
188 | |||
189 | ", | ||
190 | path.display() | ||
191 | ) | ||
192 | } | ||
193 | } | ||
194 | |||
195 | #[test] | ||
196 | fn check_licenses() { | ||
197 | let expected = " | ||
198 | 0BSD OR MIT OR Apache-2.0 | ||
199 | Apache-2.0 | ||
200 | Apache-2.0 OR BSL-1.0 | ||
201 | Apache-2.0 OR MIT | ||
202 | Apache-2.0/MIT | ||
203 | BSD-3-Clause | ||
204 | CC0-1.0 | ||
205 | ISC | ||
206 | MIT | ||
207 | MIT / Apache-2.0 | ||
208 | MIT OR Apache-2.0 | ||
209 | MIT OR Apache-2.0 OR Zlib | ||
210 | MIT OR Zlib OR Apache-2.0 | ||
211 | MIT/Apache-2.0 | ||
212 | Unlicense OR MIT | ||
213 | Unlicense/MIT | ||
214 | Zlib OR Apache-2.0 OR MIT | ||
215 | " | ||
216 | .lines() | ||
217 | .filter(|it| !it.is_empty()) | ||
218 | .collect::<Vec<_>>(); | ||
219 | |||
220 | let meta = cmd!("cargo metadata --format-version 1").read().unwrap(); | ||
221 | let mut licenses = meta | ||
222 | .split(|c| c == ',' || c == '{' || c == '}') | ||
223 | .filter(|it| it.contains(r#""license""#)) | ||
224 | .map(|it| it.trim()) | ||
225 | .map(|it| it[r#""license":"#.len()..].trim_matches('"')) | ||
226 | .collect::<Vec<_>>(); | ||
227 | licenses.sort(); | ||
228 | licenses.dedup(); | ||
229 | if licenses != expected { | ||
230 | let mut diff = String::new(); | ||
231 | |||
232 | diff += &format!("New Licenses:\n"); | ||
233 | for &l in licenses.iter() { | ||
234 | if !expected.contains(&l) { | ||
235 | diff += &format!(" {}\n", l) | ||
236 | } | ||
237 | } | ||
238 | |||
239 | diff += &format!("\nMissing Licenses:\n"); | ||
240 | for &l in expected.iter() { | ||
241 | if !licenses.contains(&l) { | ||
242 | diff += &format!(" {}\n", l) | ||
243 | } | ||
244 | } | ||
245 | |||
246 | panic!("different set of licenses!\n{}", diff); | ||
247 | } | ||
248 | assert_eq!(licenses, expected); | ||
249 | } | ||
250 | |||
251 | fn check_todo(path: &Path, text: &str) { | ||
252 | let need_todo = &[ | ||
253 | // This file itself obviously needs to use todo (<- like this!). | ||
254 | "tests/tidy.rs", | ||
255 | // Some of our assists generate `todo!()`. | ||
256 | "handlers/add_turbo_fish.rs", | ||
257 | "handlers/generate_function.rs", | ||
258 | // To support generating `todo!()` in assists, we have `expr_todo()` in | ||
259 | // `ast::make`. | ||
260 | "ast/make.rs", | ||
261 | // The documentation in string literals may contain anything for its own purposes | ||
262 | "ide_completion/src/generated_lint_completions.rs", | ||
263 | ]; | ||
264 | if need_todo.iter().any(|p| path.ends_with(p)) { | ||
265 | return; | ||
266 | } | ||
267 | if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") { | ||
268 | // Generated by an assist | ||
269 | if text.contains("${0:todo!()}") { | ||
270 | return; | ||
271 | } | ||
272 | |||
273 | panic!( | ||
274 | "\nTODO markers or todo! macros should not be committed to the master branch,\n\ | ||
275 | use FIXME instead\n\ | ||
276 | {}\n", | ||
277 | path.display(), | ||
278 | ) | ||
279 | } | ||
280 | } | ||
281 | |||
282 | fn check_dbg(path: &Path, text: &str) { | ||
283 | let need_dbg = &[ | ||
284 | // This file itself obviously needs to use dbg. | ||
285 | "tests/tidy.rs", | ||
286 | // Assists to remove `dbg!()` | ||
287 | "handlers/remove_dbg.rs", | ||
288 | // We have .dbg postfix | ||
289 | "ide_completion/src/completions/postfix.rs", | ||
290 | // The documentation in string literals may contain anything for its own purposes | ||
291 | "ide_completion/src/lib.rs", | ||
292 | "ide_completion/src/generated_lint_completions.rs", | ||
293 | // test for doc test for remove_dbg | ||
294 | "src/tests/generated.rs", | ||
295 | ]; | ||
296 | if need_dbg.iter().any(|p| path.ends_with(p)) { | ||
297 | return; | ||
298 | } | ||
299 | if text.contains("dbg!") { | ||
300 | panic!( | ||
301 | "\ndbg! macros should not be committed to the master branch,\n\ | ||
302 | {}\n", | ||
303 | path.display(), | ||
304 | ) | ||
305 | } | ||
306 | } | ||
307 | |||
308 | fn check_trailing_ws(path: &Path, text: &str) { | ||
309 | if is_exclude_dir(path, &["test_data"]) { | ||
310 | return; | ||
311 | } | ||
312 | for (line_number, line) in text.lines().enumerate() { | ||
313 | if line.chars().last().map(char::is_whitespace) == Some(true) { | ||
314 | panic!("Trailing whitespace in {} at line {}", path.display(), line_number) | ||
315 | } | ||
316 | } | ||
317 | } | ||
318 | |||
319 | #[derive(Default)] | ||
320 | struct TidyDocs { | ||
321 | missing_docs: Vec<String>, | ||
322 | contains_fixme: Vec<PathBuf>, | ||
323 | } | ||
324 | |||
325 | impl TidyDocs { | ||
326 | fn visit(&mut self, path: &Path, text: &str) { | ||
327 | // Test hopefully don't really need comments, and for assists we already | ||
328 | // have special comments which are source of doc tests and user docs. | ||
329 | if is_exclude_dir(path, &["tests", "test_data"]) { | ||
330 | return; | ||
331 | } | ||
332 | |||
333 | if is_exclude_file(path) { | ||
334 | return; | ||
335 | } | ||
336 | |||
337 | let first_line = match text.lines().next() { | ||
338 | Some(it) => it, | ||
339 | None => return, | ||
340 | }; | ||
341 | |||
342 | if first_line.starts_with("//!") { | ||
343 | if first_line.contains("FIXME") { | ||
344 | self.contains_fixme.push(path.to_path_buf()); | ||
345 | } | ||
346 | } else { | ||
347 | if text.contains("// Feature:") || text.contains("// Assist:") { | ||
348 | return; | ||
349 | } | ||
350 | self.missing_docs.push(path.display().to_string()); | ||
351 | } | ||
352 | |||
353 | fn is_exclude_file(d: &Path) -> bool { | ||
354 | let file_names = ["tests.rs", "famous_defs_fixture.rs"]; | ||
355 | |||
356 | d.file_name() | ||
357 | .unwrap_or_default() | ||
358 | .to_str() | ||
359 | .map(|f_n| file_names.iter().any(|name| *name == f_n)) | ||
360 | .unwrap_or(false) | ||
361 | } | ||
362 | } | ||
363 | |||
364 | fn finish(self) { | ||
365 | if !self.missing_docs.is_empty() { | ||
366 | panic!( | ||
367 | "\nMissing docs strings\n\n\ | ||
368 | modules:\n{}\n\n", | ||
369 | self.missing_docs.join("\n") | ||
370 | ) | ||
371 | } | ||
372 | |||
373 | let poorly_documented = [ | ||
374 | "hir", | ||
375 | "hir_expand", | ||
376 | "ide", | ||
377 | "mbe", | ||
378 | "parser", | ||
379 | "profile", | ||
380 | "project_model", | ||
381 | "syntax", | ||
382 | "tt", | ||
383 | "hir_ty", | ||
384 | ]; | ||
385 | |||
386 | let mut has_fixmes = | ||
387 | poorly_documented.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>(); | ||
388 | 'outer: for path in self.contains_fixme { | ||
389 | for krate in poorly_documented.iter() { | ||
390 | if path.components().any(|it| it.as_os_str() == *krate) { | ||
391 | has_fixmes.insert(krate, true); | ||
392 | continue 'outer; | ||
393 | } | ||
394 | } | ||
395 | panic!("FIXME doc in a fully-documented crate: {}", path.display()) | ||
396 | } | ||
397 | |||
398 | for (krate, has_fixme) in has_fixmes.iter() { | ||
399 | if !has_fixme { | ||
400 | panic!("crate {} is fully documented :tada:, remove it from the list of poorly documented crates", krate) | ||
401 | } | ||
402 | } | ||
403 | } | ||
404 | } | ||
405 | |||
406 | fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool { | ||
407 | p.strip_prefix(project_root()) | ||
408 | .unwrap() | ||
409 | .components() | ||
410 | .rev() | ||
411 | .skip(1) | ||
412 | .filter_map(|it| it.as_os_str().to_str()) | ||
413 | .any(|it| dirs_to_exclude.contains(&it)) | ||
414 | } | ||
415 | |||
416 | #[allow(deprecated)] | ||
417 | fn stable_hash(text: &str) -> u64 { | ||
418 | use std::hash::{Hash, Hasher, SipHasher}; | ||
419 | |||
420 | let text = text.replace('\r', ""); | ||
421 | let mut hasher = SipHasher::default(); | ||
422 | text.hash(&mut hasher); | ||
423 | hasher.finish() | ||
424 | } | ||