diff options
Diffstat (limited to 'xtask/src')
-rw-r--r-- | xtask/src/codegen.rs | 82 | ||||
-rw-r--r-- | xtask/src/codegen/gen_assists_docs.rs | 20 | ||||
-rw-r--r-- | xtask/src/codegen/gen_diagnostic_docs.rs | 8 | ||||
-rw-r--r-- | xtask/src/codegen/gen_feature_docs.rs | 8 | ||||
-rw-r--r-- | xtask/src/codegen/gen_features.rs | 48 | ||||
-rw-r--r-- | xtask/src/codegen/gen_lint_completions.rs | 78 | ||||
-rw-r--r-- | xtask/src/codegen/gen_parser_tests.rs | 12 | ||||
-rw-r--r-- | xtask/src/codegen/gen_syntax.rs | 10 | ||||
-rw-r--r-- | xtask/src/dist.rs | 8 | ||||
-rw-r--r-- | xtask/src/flags.rs | 154 | ||||
-rw-r--r-- | xtask/src/install.rs | 126 | ||||
-rw-r--r-- | xtask/src/lib.rs | 131 | ||||
-rw-r--r-- | xtask/src/main.rs | 274 | ||||
-rw-r--r-- | xtask/src/metrics.rs | 12 | ||||
-rw-r--r-- | xtask/src/pre_cache.rs | 6 | ||||
-rw-r--r-- | xtask/src/pre_commit.rs | 38 | ||||
-rw-r--r-- | xtask/src/release.rs | 21 | ||||
-rw-r--r-- | xtask/src/tidy.rs | 447 |
18 files changed, 908 insertions, 575 deletions
diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index adea053b6..518e17e38 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs | |||
@@ -7,72 +7,66 @@ | |||
7 | 7 | ||
8 | mod gen_syntax; | 8 | mod gen_syntax; |
9 | mod gen_parser_tests; | 9 | mod gen_parser_tests; |
10 | mod gen_lint_completions; | ||
10 | mod gen_assists_docs; | 11 | mod gen_assists_docs; |
11 | mod gen_feature_docs; | 12 | mod gen_feature_docs; |
12 | mod gen_lint_completions; | ||
13 | mod gen_diagnostic_docs; | 13 | mod gen_diagnostic_docs; |
14 | 14 | ||
15 | use std::{ | 15 | use std::{ |
16 | fmt, mem, | 16 | fmt, mem, |
17 | path::{Path, PathBuf}, | 17 | path::{Path, PathBuf}, |
18 | }; | 18 | }; |
19 | use xshell::{cmd, pushenv, read_file, write_file}; | 19 | use xshell::{cmd, pushenv}; |
20 | 20 | ||
21 | use crate::{ensure_rustfmt, project_root, Result}; | 21 | use crate::{ensure_rustfmt, project_root, Result}; |
22 | 22 | ||
23 | pub use self::{ | 23 | pub(crate) use self::{ |
24 | gen_assists_docs::{generate_assists_docs, generate_assists_tests}, | 24 | gen_assists_docs::generate_assists_tests, gen_lint_completions::generate_lint_completions, |
25 | gen_diagnostic_docs::generate_diagnostic_docs, | 25 | gen_parser_tests::generate_parser_tests, gen_syntax::generate_syntax, |
26 | gen_feature_docs::generate_feature_docs, | ||
27 | gen_lint_completions::generate_lint_completions, | ||
28 | gen_parser_tests::generate_parser_tests, | ||
29 | gen_syntax::generate_syntax, | ||
30 | }; | 26 | }; |
31 | 27 | ||
32 | #[derive(Debug, PartialEq, Eq, Clone, Copy)] | 28 | pub(crate) fn docs() -> Result<()> { |
33 | pub enum Mode { | 29 | // We don't commit docs to the repo, so we can just overwrite them. |
34 | Overwrite, | 30 | gen_assists_docs::generate_assists_docs()?; |
35 | Verify, | 31 | gen_feature_docs::generate_feature_docs()?; |
32 | gen_diagnostic_docs::generate_diagnostic_docs()?; | ||
33 | Ok(()) | ||
36 | } | 34 | } |
37 | 35 | ||
38 | pub struct CodegenCmd { | 36 | #[allow(unused)] |
39 | pub features: bool, | 37 | fn used() { |
38 | generate_parser_tests(); | ||
39 | generate_assists_tests(); | ||
40 | generate_syntax(); | ||
41 | generate_lint_completions(); | ||
40 | } | 42 | } |
41 | 43 | ||
42 | impl CodegenCmd { | 44 | /// Checks that the `file` has the specified `contents`. If that is not the |
43 | pub fn run(self) -> Result<()> { | 45 | /// case, updates the file and then fails the test. |
44 | if self.features { | 46 | pub(crate) fn ensure_file_contents(file: &Path, contents: &str) -> Result<()> { |
45 | generate_lint_completions(Mode::Overwrite)?; | 47 | match std::fs::read_to_string(file) { |
46 | } | 48 | Ok(old_contents) if normalize_newlines(&old_contents) == normalize_newlines(contents) => { |
47 | generate_syntax(Mode::Overwrite)?; | 49 | return Ok(()) |
48 | generate_parser_tests(Mode::Overwrite)?; | ||
49 | generate_assists_tests(Mode::Overwrite)?; | ||
50 | generate_assists_docs(Mode::Overwrite)?; | ||
51 | generate_feature_docs(Mode::Overwrite)?; | ||
52 | generate_diagnostic_docs(Mode::Overwrite)?; | ||
53 | Ok(()) | ||
54 | } | ||
55 | } | ||
56 | |||
57 | /// A helper to update file on disk if it has changed. | ||
58 | /// With verify = false, | ||
59 | fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> { | ||
60 | match read_file(path) { | ||
61 | Ok(old_contents) if normalize(&old_contents) == normalize(contents) => { | ||
62 | return Ok(()); | ||
63 | } | 50 | } |
64 | _ => (), | 51 | _ => (), |
65 | } | 52 | } |
66 | if mode == Mode::Verify { | 53 | let display_path = file.strip_prefix(&project_root()).unwrap_or(file); |
67 | anyhow::bail!("`{}` is not up-to-date", path.display()); | 54 | eprintln!( |
55 | "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n", | ||
56 | display_path.display() | ||
57 | ); | ||
58 | if std::env::var("CI").is_ok() { | ||
59 | eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n"); | ||
68 | } | 60 | } |
69 | eprintln!("updating {}", path.display()); | 61 | if let Some(parent) = file.parent() { |
70 | write_file(path, contents)?; | 62 | let _ = std::fs::create_dir_all(parent); |
71 | return Ok(()); | ||
72 | |||
73 | fn normalize(s: &str) -> String { | ||
74 | s.replace("\r\n", "\n") | ||
75 | } | 63 | } |
64 | std::fs::write(file, contents).unwrap(); | ||
65 | anyhow::bail!("some file was not up to date and has been updated, simply re-run the tests") | ||
66 | } | ||
67 | |||
68 | fn normalize_newlines(s: &str) -> String { | ||
69 | s.replace("\r\n", "\n") | ||
76 | } | 70 | } |
77 | 71 | ||
78 | const PREAMBLE: &str = "Generated file, do not edit by hand, see `xtask/src/codegen`"; | 72 | const PREAMBLE: &str = "Generated file, do not edit by hand, see `xtask/src/codegen`"; |
diff --git a/xtask/src/codegen/gen_assists_docs.rs b/xtask/src/codegen/gen_assists_docs.rs index 1ae1343a5..158680993 100644 --- a/xtask/src/codegen/gen_assists_docs.rs +++ b/xtask/src/codegen/gen_assists_docs.rs | |||
@@ -2,22 +2,25 @@ | |||
2 | 2 | ||
3 | use std::{fmt, path::Path}; | 3 | use std::{fmt, path::Path}; |
4 | 4 | ||
5 | use xshell::write_file; | ||
6 | |||
5 | use crate::{ | 7 | use crate::{ |
6 | codegen::{self, extract_comment_blocks_with_empty_lines, reformat, Location, Mode, PREAMBLE}, | 8 | codegen::{self, extract_comment_blocks_with_empty_lines, reformat, Location, PREAMBLE}, |
7 | project_root, rust_files_in, Result, | 9 | project_root, rust_files_in, Result, |
8 | }; | 10 | }; |
9 | 11 | ||
10 | pub fn generate_assists_tests(mode: Mode) -> Result<()> { | 12 | pub(crate) fn generate_assists_tests() -> Result<()> { |
11 | let assists = Assist::collect()?; | 13 | let assists = Assist::collect()?; |
12 | generate_tests(&assists, mode) | 14 | generate_tests(&assists) |
13 | } | 15 | } |
14 | 16 | ||
15 | pub fn generate_assists_docs(mode: Mode) -> Result<()> { | 17 | pub(crate) fn generate_assists_docs() -> Result<()> { |
16 | let assists = Assist::collect()?; | 18 | let assists = Assist::collect()?; |
17 | let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); | 19 | let contents = assists.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); |
18 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); | 20 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); |
19 | let dst = project_root().join("docs/user/generated_assists.adoc"); | 21 | let dst = project_root().join("docs/user/generated_assists.adoc"); |
20 | codegen::update(&dst, &contents, mode) | 22 | write_file(dst, &contents)?; |
23 | Ok(()) | ||
21 | } | 24 | } |
22 | 25 | ||
23 | #[derive(Debug)] | 26 | #[derive(Debug)] |
@@ -111,7 +114,7 @@ impl fmt::Display for Assist { | |||
111 | } | 114 | } |
112 | } | 115 | } |
113 | 116 | ||
114 | fn generate_tests(assists: &[Assist], mode: Mode) -> Result<()> { | 117 | fn generate_tests(assists: &[Assist]) -> Result<()> { |
115 | let mut buf = String::from("use super::check_doc_test;\n"); | 118 | let mut buf = String::from("use super::check_doc_test;\n"); |
116 | 119 | ||
117 | for assist in assists.iter() { | 120 | for assist in assists.iter() { |
@@ -135,7 +138,10 @@ r#####" | |||
135 | buf.push_str(&test) | 138 | buf.push_str(&test) |
136 | } | 139 | } |
137 | let buf = reformat(&buf)?; | 140 | let buf = reformat(&buf)?; |
138 | codegen::update(&project_root().join("crates/ide_assists/src/tests/generated.rs"), &buf, mode) | 141 | codegen::ensure_file_contents( |
142 | &project_root().join("crates/ide_assists/src/tests/generated.rs"), | ||
143 | &buf, | ||
144 | ) | ||
139 | } | 145 | } |
140 | 146 | ||
141 | fn hide_hash_comments(text: &str) -> String { | 147 | fn hide_hash_comments(text: &str) -> String { |
diff --git a/xtask/src/codegen/gen_diagnostic_docs.rs b/xtask/src/codegen/gen_diagnostic_docs.rs index 7c14d4a07..9cf4d0a88 100644 --- a/xtask/src/codegen/gen_diagnostic_docs.rs +++ b/xtask/src/codegen/gen_diagnostic_docs.rs | |||
@@ -2,18 +2,20 @@ | |||
2 | 2 | ||
3 | use std::{fmt, path::PathBuf}; | 3 | use std::{fmt, path::PathBuf}; |
4 | 4 | ||
5 | use xshell::write_file; | ||
6 | |||
5 | use crate::{ | 7 | use crate::{ |
6 | codegen::{self, extract_comment_blocks_with_empty_lines, Location, Mode, PREAMBLE}, | 8 | codegen::{extract_comment_blocks_with_empty_lines, Location, PREAMBLE}, |
7 | project_root, rust_files, Result, | 9 | project_root, rust_files, Result, |
8 | }; | 10 | }; |
9 | 11 | ||
10 | pub fn generate_diagnostic_docs(mode: Mode) -> Result<()> { | 12 | pub(crate) fn generate_diagnostic_docs() -> Result<()> { |
11 | let diagnostics = Diagnostic::collect()?; | 13 | let diagnostics = Diagnostic::collect()?; |
12 | let contents = | 14 | let contents = |
13 | diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); | 15 | diagnostics.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); |
14 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); | 16 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); |
15 | let dst = project_root().join("docs/user/generated_diagnostic.adoc"); | 17 | let dst = project_root().join("docs/user/generated_diagnostic.adoc"); |
16 | codegen::update(&dst, &contents, mode)?; | 18 | write_file(&dst, &contents)?; |
17 | Ok(()) | 19 | Ok(()) |
18 | } | 20 | } |
19 | 21 | ||
diff --git a/xtask/src/codegen/gen_feature_docs.rs b/xtask/src/codegen/gen_feature_docs.rs index 61081063b..c373d7d70 100644 --- a/xtask/src/codegen/gen_feature_docs.rs +++ b/xtask/src/codegen/gen_feature_docs.rs | |||
@@ -2,17 +2,19 @@ | |||
2 | 2 | ||
3 | use std::{fmt, path::PathBuf}; | 3 | use std::{fmt, path::PathBuf}; |
4 | 4 | ||
5 | use xshell::write_file; | ||
6 | |||
5 | use crate::{ | 7 | use crate::{ |
6 | codegen::{self, extract_comment_blocks_with_empty_lines, Location, Mode, PREAMBLE}, | 8 | codegen::{extract_comment_blocks_with_empty_lines, Location, PREAMBLE}, |
7 | project_root, rust_files, Result, | 9 | project_root, rust_files, Result, |
8 | }; | 10 | }; |
9 | 11 | ||
10 | pub fn generate_feature_docs(mode: Mode) -> Result<()> { | 12 | pub(crate) fn generate_feature_docs() -> Result<()> { |
11 | let features = Feature::collect()?; | 13 | let features = Feature::collect()?; |
12 | let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); | 14 | let contents = features.into_iter().map(|it| it.to_string()).collect::<Vec<_>>().join("\n\n"); |
13 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); | 15 | let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); |
14 | let dst = project_root().join("docs/user/generated_features.adoc"); | 16 | let dst = project_root().join("docs/user/generated_features.adoc"); |
15 | codegen::update(&dst, &contents, mode)?; | 17 | write_file(&dst, &contents)?; |
16 | Ok(()) | 18 | Ok(()) |
17 | } | 19 | } |
18 | 20 | ||
diff --git a/xtask/src/codegen/gen_features.rs b/xtask/src/codegen/gen_features.rs deleted file mode 100644 index 3cf15ce02..000000000 --- a/xtask/src/codegen/gen_features.rs +++ /dev/null | |||
@@ -1,48 +0,0 @@ | |||
1 | //! Generates descriptors structure for unstable feature from Unstable Book | ||
2 | use std::path::{Path, PathBuf}; | ||
3 | |||
4 | use quote::quote; | ||
5 | use walkdir::WalkDir; | ||
6 | use xshell::{cmd, read_file}; | ||
7 | |||
8 | use crate::codegen::{project_root, reformat, update, Mode, Result}; | ||
9 | |||
10 | pub fn generate_features(mode: Mode) -> Result<()> { | ||
11 | if !Path::new("./target/rust").exists() { | ||
12 | cmd!("git clone https://github.com/rust-lang/rust ./target/rust").run()?; | ||
13 | } | ||
14 | |||
15 | let contents = generate_descriptor("./target/rust/src/doc/unstable-book/src".into())?; | ||
16 | |||
17 | let destination = project_root().join("crates/ide/src/completion/generated_features.rs"); | ||
18 | update(destination.as_path(), &contents, mode)?; | ||
19 | |||
20 | Ok(()) | ||
21 | } | ||
22 | |||
23 | fn generate_descriptor(src_dir: PathBuf) -> Result<String> { | ||
24 | let definitions = ["language-features", "library-features"] | ||
25 | .iter() | ||
26 | .flat_map(|it| WalkDir::new(src_dir.join(it))) | ||
27 | .filter_map(|e| e.ok()) | ||
28 | .filter(|entry| { | ||
29 | // Get all `.md ` files | ||
30 | entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md" | ||
31 | }) | ||
32 | .map(|entry| { | ||
33 | let path = entry.path(); | ||
34 | let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_"); | ||
35 | let doc = read_file(path).unwrap(); | ||
36 | |||
37 | quote! { LintCompletion { label: #feature_ident, description: #doc } } | ||
38 | }); | ||
39 | |||
40 | let ts = quote! { | ||
41 | use crate::completion::complete_attribute::LintCompletion; | ||
42 | |||
43 | pub(super) const FEATURES: &[LintCompletion] = &[ | ||
44 | #(#definitions),* | ||
45 | ]; | ||
46 | }; | ||
47 | reformat(&ts.to_string()) | ||
48 | } | ||
diff --git a/xtask/src/codegen/gen_lint_completions.rs b/xtask/src/codegen/gen_lint_completions.rs index 25f770eaf..24dbc6a39 100644 --- a/xtask/src/codegen/gen_lint_completions.rs +++ b/xtask/src/codegen/gen_lint_completions.rs | |||
@@ -1,41 +1,36 @@ | |||
1 | //! Generates descriptors structure for unstable feature from Unstable Book | 1 | //! Generates descriptors structure for unstable feature from Unstable Book |
2 | use std::fmt::Write; | ||
2 | use std::path::{Path, PathBuf}; | 3 | use std::path::{Path, PathBuf}; |
3 | 4 | ||
4 | use quote::quote; | ||
5 | use walkdir::WalkDir; | 5 | use walkdir::WalkDir; |
6 | use xshell::{cmd, read_file}; | 6 | use xshell::{cmd, read_file}; |
7 | 7 | ||
8 | use crate::{ | 8 | use crate::codegen::{ensure_file_contents, project_root, reformat, Result}; |
9 | codegen::{project_root, reformat, update, Mode, Result}, | ||
10 | run_rustfmt, | ||
11 | }; | ||
12 | 9 | ||
13 | pub fn generate_lint_completions(mode: Mode) -> Result<()> { | 10 | pub(crate) fn generate_lint_completions() -> Result<()> { |
14 | if !Path::new("./target/rust").exists() { | 11 | if !project_root().join("./target/rust").exists() { |
15 | cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?; | 12 | cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?; |
16 | } | 13 | } |
17 | 14 | ||
18 | let ts_features = generate_descriptor("./target/rust/src/doc/unstable-book/src".into())?; | 15 | let mut contents = String::from("use crate::completions::attribute::LintCompletion;\n\n"); |
19 | cmd!("curl http://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?; | 16 | generate_descriptor(&mut contents, "./target/rust/src/doc/unstable-book/src".into())?; |
17 | contents.push('\n'); | ||
20 | 18 | ||
21 | let ts_clippy = generate_descriptor_clippy(&Path::new("./target/clippy_lints.json"))?; | 19 | cmd!("curl http://rust-lang.github.io/rust-clippy/master/lints.json --output ./target/clippy_lints.json").run()?; |
22 | let ts = quote! { | 20 | generate_descriptor_clippy(&mut contents, &Path::new("./target/clippy_lints.json"))?; |
23 | use crate::completions::attribute::LintCompletion; | 21 | let contents = reformat(&contents)?; |
24 | #ts_features | ||
25 | #ts_clippy | ||
26 | }; | ||
27 | let contents = reformat(ts.to_string().as_str())?; | ||
28 | 22 | ||
29 | let destination = | 23 | let destination = |
30 | project_root().join("crates/ide_completion/src/generated_lint_completions.rs"); | 24 | project_root().join("crates/ide_completion/src/generated_lint_completions.rs"); |
31 | update(destination.as_path(), &contents, mode)?; | 25 | ensure_file_contents(destination.as_path(), &contents)?; |
32 | run_rustfmt(mode)?; | ||
33 | 26 | ||
34 | Ok(()) | 27 | Ok(()) |
35 | } | 28 | } |
36 | 29 | ||
37 | fn generate_descriptor(src_dir: PathBuf) -> Result<proc_macro2::TokenStream> { | 30 | fn generate_descriptor(buf: &mut String, src_dir: PathBuf) -> Result<()> { |
38 | let definitions = ["language-features", "library-features"] | 31 | buf.push_str(r#"pub(super) const FEATURES: &[LintCompletion] = &["#); |
32 | buf.push('\n'); | ||
33 | ["language-features", "library-features"] | ||
39 | .iter() | 34 | .iter() |
40 | .flat_map(|it| WalkDir::new(src_dir.join(it))) | 35 | .flat_map(|it| WalkDir::new(src_dir.join(it))) |
41 | .filter_map(|e| e.ok()) | 36 | .filter_map(|e| e.ok()) |
@@ -43,21 +38,15 @@ fn generate_descriptor(src_dir: PathBuf) -> Result<proc_macro2::TokenStream> { | |||
43 | // Get all `.md ` files | 38 | // Get all `.md ` files |
44 | entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md" | 39 | entry.file_type().is_file() && entry.path().extension().unwrap_or_default() == "md" |
45 | }) | 40 | }) |
46 | .map(|entry| { | 41 | .for_each(|entry| { |
47 | let path = entry.path(); | 42 | let path = entry.path(); |
48 | let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_"); | 43 | let feature_ident = path.file_stem().unwrap().to_str().unwrap().replace("-", "_"); |
49 | let doc = read_file(path).unwrap(); | 44 | let doc = read_file(path).unwrap(); |
50 | 45 | ||
51 | quote! { LintCompletion { label: #feature_ident, description: #doc } } | 46 | push_lint_completion(buf, &feature_ident, &doc); |
52 | }); | 47 | }); |
53 | 48 | buf.push_str("];\n"); | |
54 | let ts = quote! { | 49 | Ok(()) |
55 | pub(super) const FEATURES: &[LintCompletion] = &[ | ||
56 | #(#definitions),* | ||
57 | ]; | ||
58 | }; | ||
59 | |||
60 | Ok(ts) | ||
61 | } | 50 | } |
62 | 51 | ||
63 | #[derive(Default)] | 52 | #[derive(Default)] |
@@ -66,7 +55,7 @@ struct ClippyLint { | |||
66 | id: String, | 55 | id: String, |
67 | } | 56 | } |
68 | 57 | ||
69 | fn generate_descriptor_clippy(path: &Path) -> Result<proc_macro2::TokenStream> { | 58 | fn generate_descriptor_clippy(buf: &mut String, path: &Path) -> Result<()> { |
70 | let file_content = read_file(path)?; | 59 | let file_content = read_file(path)?; |
71 | let mut clippy_lints: Vec<ClippyLint> = vec![]; | 60 | let mut clippy_lints: Vec<ClippyLint> = vec![]; |
72 | 61 | ||
@@ -97,18 +86,27 @@ fn generate_descriptor_clippy(path: &Path) -> Result<proc_macro2::TokenStream> { | |||
97 | } | 86 | } |
98 | } | 87 | } |
99 | 88 | ||
100 | let definitions = clippy_lints.into_iter().map(|clippy_lint| { | 89 | buf.push_str(r#"pub(super) const CLIPPY_LINTS: &[LintCompletion] = &["#); |
90 | buf.push('\n'); | ||
91 | clippy_lints.into_iter().for_each(|clippy_lint| { | ||
101 | let lint_ident = format!("clippy::{}", clippy_lint.id); | 92 | let lint_ident = format!("clippy::{}", clippy_lint.id); |
102 | let doc = clippy_lint.help; | 93 | let doc = clippy_lint.help; |
103 | 94 | push_lint_completion(buf, &lint_ident, &doc); | |
104 | quote! { LintCompletion { label: #lint_ident, description: #doc } } | ||
105 | }); | 95 | }); |
106 | 96 | ||
107 | let ts = quote! { | 97 | buf.push_str("];\n"); |
108 | pub(super) const CLIPPY_LINTS: &[LintCompletion] = &[ | 98 | |
109 | #(#definitions),* | 99 | Ok(()) |
110 | ]; | 100 | } |
111 | }; | ||
112 | 101 | ||
113 | Ok(ts) | 102 | fn push_lint_completion(buf: &mut String, label: &str, description: &str) { |
103 | writeln!( | ||
104 | buf, | ||
105 | r###" LintCompletion {{ | ||
106 | label: "{}", | ||
107 | description: r##"{}"## | ||
108 | }},"###, | ||
109 | label, description | ||
110 | ) | ||
111 | .unwrap(); | ||
114 | } | 112 | } |
diff --git a/xtask/src/codegen/gen_parser_tests.rs b/xtask/src/codegen/gen_parser_tests.rs index 6e4abd10c..096590653 100644 --- a/xtask/src/codegen/gen_parser_tests.rs +++ b/xtask/src/codegen/gen_parser_tests.rs | |||
@@ -8,13 +8,13 @@ use std::{ | |||
8 | }; | 8 | }; |
9 | 9 | ||
10 | use crate::{ | 10 | use crate::{ |
11 | codegen::{extract_comment_blocks, update, Mode}, | 11 | codegen::{ensure_file_contents, extract_comment_blocks}, |
12 | project_root, Result, | 12 | project_root, Result, |
13 | }; | 13 | }; |
14 | 14 | ||
15 | pub fn generate_parser_tests(mode: Mode) -> Result<()> { | 15 | pub(crate) fn generate_parser_tests() -> Result<()> { |
16 | let tests = tests_from_dir(&project_root().join(Path::new("crates/parser/src/grammar")))?; | 16 | let tests = tests_from_dir(&project_root().join(Path::new("crates/parser/src/grammar")))?; |
17 | fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<()> { | 17 | fn install_tests(tests: &HashMap<String, Test>, into: &str) -> Result<()> { |
18 | let tests_dir = project_root().join(into); | 18 | let tests_dir = project_root().join(into); |
19 | if !tests_dir.is_dir() { | 19 | if !tests_dir.is_dir() { |
20 | fs::create_dir_all(&tests_dir)?; | 20 | fs::create_dir_all(&tests_dir)?; |
@@ -35,12 +35,12 @@ pub fn generate_parser_tests(mode: Mode) -> Result<()> { | |||
35 | tests_dir.join(file_name) | 35 | tests_dir.join(file_name) |
36 | } | 36 | } |
37 | }; | 37 | }; |
38 | update(&path, &test.text, mode)?; | 38 | ensure_file_contents(&path, &test.text)?; |
39 | } | 39 | } |
40 | Ok(()) | 40 | Ok(()) |
41 | } | 41 | } |
42 | install_tests(&tests.ok, "crates/syntax/test_data/parser/inline/ok", mode)?; | 42 | install_tests(&tests.ok, "crates/syntax/test_data/parser/inline/ok")?; |
43 | install_tests(&tests.err, "crates/syntax/test_data/parser/inline/err", mode) | 43 | install_tests(&tests.err, "crates/syntax/test_data/parser/inline/err") |
44 | } | 44 | } |
45 | 45 | ||
46 | #[derive(Debug)] | 46 | #[derive(Debug)] |
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs index eb524d85a..80f26e8f5 100644 --- a/xtask/src/codegen/gen_syntax.rs +++ b/xtask/src/codegen/gen_syntax.rs | |||
@@ -14,25 +14,25 @@ use ungrammar::{rust_grammar, Grammar, Rule}; | |||
14 | 14 | ||
15 | use crate::{ | 15 | use crate::{ |
16 | ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC}, | 16 | ast_src::{AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC}, |
17 | codegen::{reformat, update, Mode}, | 17 | codegen::{ensure_file_contents, reformat}, |
18 | project_root, Result, | 18 | project_root, Result, |
19 | }; | 19 | }; |
20 | 20 | ||
21 | pub fn generate_syntax(mode: Mode) -> Result<()> { | 21 | pub(crate) fn generate_syntax() -> Result<()> { |
22 | let grammar = rust_grammar(); | 22 | let grammar = rust_grammar(); |
23 | let ast = lower(&grammar); | 23 | let ast = lower(&grammar); |
24 | 24 | ||
25 | let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs"); | 25 | let syntax_kinds_file = project_root().join("crates/parser/src/syntax_kind/generated.rs"); |
26 | let syntax_kinds = generate_syntax_kinds(KINDS_SRC)?; | 26 | let syntax_kinds = generate_syntax_kinds(KINDS_SRC)?; |
27 | update(syntax_kinds_file.as_path(), &syntax_kinds, mode)?; | 27 | ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds)?; |
28 | 28 | ||
29 | let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs"); | 29 | let ast_tokens_file = project_root().join("crates/syntax/src/ast/generated/tokens.rs"); |
30 | let contents = generate_tokens(&ast)?; | 30 | let contents = generate_tokens(&ast)?; |
31 | update(ast_tokens_file.as_path(), &contents, mode)?; | 31 | ensure_file_contents(ast_tokens_file.as_path(), &contents)?; |
32 | 32 | ||
33 | let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs"); | 33 | let ast_nodes_file = project_root().join("crates/syntax/src/ast/generated/nodes.rs"); |
34 | let contents = generate_nodes(KINDS_SRC, &ast)?; | 34 | let contents = generate_nodes(KINDS_SRC, &ast)?; |
35 | update(ast_nodes_file.as_path(), &contents, mode)?; | 35 | ensure_file_contents(ast_nodes_file.as_path(), &contents)?; |
36 | 36 | ||
37 | Ok(()) | 37 | Ok(()) |
38 | } | 38 | } |
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs index 56bf9f99d..f2503f807 100644 --- a/xtask/src/dist.rs +++ b/xtask/src/dist.rs | |||
@@ -11,13 +11,13 @@ use xshell::{cmd, cp, mkdir_p, pushd, read_file, rm_rf, write_file}; | |||
11 | 11 | ||
12 | use crate::{date_iso, project_root}; | 12 | use crate::{date_iso, project_root}; |
13 | 13 | ||
14 | pub struct DistCmd { | 14 | pub(crate) struct DistCmd { |
15 | pub nightly: bool, | 15 | pub(crate) nightly: bool, |
16 | pub client_version: Option<String>, | 16 | pub(crate) client_version: Option<String>, |
17 | } | 17 | } |
18 | 18 | ||
19 | impl DistCmd { | 19 | impl DistCmd { |
20 | pub fn run(self) -> Result<()> { | 20 | pub(crate) fn run(self) -> Result<()> { |
21 | let dist = project_root().join("dist"); | 21 | let dist = project_root().join("dist"); |
22 | rm_rf(&dist)?; | 22 | rm_rf(&dist)?; |
23 | mkdir_p(&dist)?; | 23 | mkdir_p(&dist)?; |
diff --git a/xtask/src/flags.rs b/xtask/src/flags.rs new file mode 100644 index 000000000..48d1ad45e --- /dev/null +++ b/xtask/src/flags.rs | |||
@@ -0,0 +1,154 @@ | |||
1 | #![allow(unreachable_pub)] | ||
2 | |||
3 | use crate::install::{ClientOpt, Malloc, ServerOpt}; | ||
4 | |||
5 | xflags::xflags! { | ||
6 | src "./src/flags.rs" | ||
7 | |||
8 | /// Run custom build command. | ||
9 | cmd xtask { | ||
10 | default cmd help { | ||
11 | /// Print help information. | ||
12 | optional -h, --help | ||
13 | } | ||
14 | |||
15 | /// Install rust-analyzer server or editor plugin. | ||
16 | cmd install { | ||
17 | /// Install only VS Code plugin. | ||
18 | optional --client | ||
19 | /// One of 'code', 'code-exploration', 'code-insiders', 'codium', or 'code-oss'. | ||
20 | optional --code-bin name: String | ||
21 | |||
22 | /// Install only the language server. | ||
23 | optional --server | ||
24 | /// Use mimalloc allocator for server | ||
25 | optional --mimalloc | ||
26 | /// Use jemalloc allocator for server | ||
27 | optional --jemalloc | ||
28 | } | ||
29 | |||
30 | cmd lint {} | ||
31 | cmd fuzz-tests {} | ||
32 | cmd pre-cache {} | ||
33 | |||
34 | cmd release { | ||
35 | optional --dry-run | ||
36 | } | ||
37 | cmd promote { | ||
38 | optional --dry-run | ||
39 | } | ||
40 | cmd dist { | ||
41 | optional --nightly | ||
42 | optional --client version: String | ||
43 | } | ||
44 | cmd metrics { | ||
45 | optional --dry-run | ||
46 | } | ||
47 | /// Builds a benchmark version of rust-analyzer and puts it into `./target`. | ||
48 | cmd bb | ||
49 | required suffix: String | ||
50 | {} | ||
51 | } | ||
52 | } | ||
53 | |||
54 | // generated start | ||
55 | // The following code is generated by `xflags` macro. | ||
56 | // Run `env UPDATE_XFLAGS=1 cargo build` to regenerate. | ||
57 | #[derive(Debug)] | ||
58 | pub struct Xtask { | ||
59 | pub subcommand: XtaskCmd, | ||
60 | } | ||
61 | |||
62 | #[derive(Debug)] | ||
63 | pub enum XtaskCmd { | ||
64 | Help(Help), | ||
65 | Install(Install), | ||
66 | Lint(Lint), | ||
67 | FuzzTests(FuzzTests), | ||
68 | PreCache(PreCache), | ||
69 | Release(Release), | ||
70 | Promote(Promote), | ||
71 | Dist(Dist), | ||
72 | Metrics(Metrics), | ||
73 | Bb(Bb), | ||
74 | } | ||
75 | |||
76 | #[derive(Debug)] | ||
77 | pub struct Help { | ||
78 | pub help: bool, | ||
79 | } | ||
80 | |||
81 | #[derive(Debug)] | ||
82 | pub struct Install { | ||
83 | pub client: bool, | ||
84 | pub code_bin: Option<String>, | ||
85 | pub server: bool, | ||
86 | pub mimalloc: bool, | ||
87 | pub jemalloc: bool, | ||
88 | } | ||
89 | |||
90 | #[derive(Debug)] | ||
91 | pub struct Lint; | ||
92 | |||
93 | #[derive(Debug)] | ||
94 | pub struct FuzzTests; | ||
95 | |||
96 | #[derive(Debug)] | ||
97 | pub struct PreCache; | ||
98 | |||
99 | #[derive(Debug)] | ||
100 | pub struct Release { | ||
101 | pub dry_run: bool, | ||
102 | } | ||
103 | |||
104 | #[derive(Debug)] | ||
105 | pub struct Promote { | ||
106 | pub dry_run: bool, | ||
107 | } | ||
108 | |||
109 | #[derive(Debug)] | ||
110 | pub struct Dist { | ||
111 | pub nightly: bool, | ||
112 | pub client: Option<String>, | ||
113 | } | ||
114 | |||
115 | #[derive(Debug)] | ||
116 | pub struct Metrics { | ||
117 | pub dry_run: bool, | ||
118 | } | ||
119 | |||
120 | #[derive(Debug)] | ||
121 | pub struct Bb { | ||
122 | pub suffix: String, | ||
123 | } | ||
124 | |||
125 | impl Xtask { | ||
126 | pub const HELP: &'static str = Self::HELP_; | ||
127 | |||
128 | pub fn from_env() -> xflags::Result<Self> { | ||
129 | Self::from_env_() | ||
130 | } | ||
131 | } | ||
132 | // generated end | ||
133 | |||
134 | impl Install { | ||
135 | pub(crate) fn server(&self) -> Option<ServerOpt> { | ||
136 | if self.client && !self.server { | ||
137 | return None; | ||
138 | } | ||
139 | let malloc = if self.mimalloc { | ||
140 | Malloc::Mimalloc | ||
141 | } else if self.jemalloc { | ||
142 | Malloc::Jemalloc | ||
143 | } else { | ||
144 | Malloc::System | ||
145 | }; | ||
146 | Some(ServerOpt { malloc }) | ||
147 | } | ||
148 | pub(crate) fn client(&self) -> Option<ClientOpt> { | ||
149 | if !self.client && self.server { | ||
150 | return None; | ||
151 | } | ||
152 | Some(ClientOpt { code_bin: self.code_bin.clone() }) | ||
153 | } | ||
154 | } | ||
diff --git a/xtask/src/install.rs b/xtask/src/install.rs index 4c5c2673c..177028b08 100644 --- a/xtask/src/install.rs +++ b/xtask/src/install.rs | |||
@@ -5,86 +5,43 @@ use std::{env, path::PathBuf, str}; | |||
5 | use anyhow::{bail, format_err, Context, Result}; | 5 | use anyhow::{bail, format_err, Context, Result}; |
6 | use xshell::{cmd, pushd}; | 6 | use xshell::{cmd, pushd}; |
7 | 7 | ||
8 | use crate::flags; | ||
9 | |||
8 | // Latest stable, feel free to send a PR if this lags behind. | 10 | // Latest stable, feel free to send a PR if this lags behind. |
9 | const REQUIRED_RUST_VERSION: u32 = 50; | 11 | const REQUIRED_RUST_VERSION: u32 = 50; |
10 | 12 | ||
11 | pub struct InstallCmd { | 13 | impl flags::Install { |
12 | pub client: Option<ClientOpt>, | 14 | pub(crate) fn run(self) -> Result<()> { |
13 | pub server: Option<ServerOpt>, | 15 | if cfg!(target_os = "macos") { |
14 | } | 16 | fix_path_for_mac().context("Fix path for mac")? |
15 | 17 | } | |
16 | #[derive(Clone, Copy)] | 18 | if let Some(server) = self.server() { |
17 | pub enum ClientOpt { | 19 | install_server(server).context("install server")?; |
18 | VsCode, | ||
19 | VsCodeExploration, | ||
20 | VsCodeInsiders, | ||
21 | VsCodium, | ||
22 | VsCodeOss, | ||
23 | Any, | ||
24 | } | ||
25 | |||
26 | impl ClientOpt { | ||
27 | pub const fn as_cmds(&self) -> &'static [&'static str] { | ||
28 | match self { | ||
29 | ClientOpt::VsCode => &["code"], | ||
30 | ClientOpt::VsCodeExploration => &["code-exploration"], | ||
31 | ClientOpt::VsCodeInsiders => &["code-insiders"], | ||
32 | ClientOpt::VsCodium => &["codium"], | ||
33 | ClientOpt::VsCodeOss => &["code-oss"], | ||
34 | ClientOpt::Any => &["code", "code-exploration", "code-insiders", "codium", "code-oss"], | ||
35 | } | 20 | } |
21 | if let Some(client) = self.client() { | ||
22 | install_client(client).context("install client")?; | ||
23 | } | ||
24 | Ok(()) | ||
36 | } | 25 | } |
37 | } | 26 | } |
38 | 27 | ||
39 | impl Default for ClientOpt { | 28 | #[derive(Clone)] |
40 | fn default() -> Self { | 29 | pub(crate) struct ClientOpt { |
41 | ClientOpt::Any | 30 | pub(crate) code_bin: Option<String>, |
42 | } | ||
43 | } | 31 | } |
44 | 32 | ||
45 | impl std::str::FromStr for ClientOpt { | 33 | const VS_CODES: &[&str] = &["code", "code-exploration", "code-insiders", "codium", "code-oss"]; |
46 | type Err = anyhow::Error; | ||
47 | |||
48 | fn from_str(s: &str) -> Result<Self, Self::Err> { | ||
49 | [ | ||
50 | ClientOpt::VsCode, | ||
51 | ClientOpt::VsCodeExploration, | ||
52 | ClientOpt::VsCodeInsiders, | ||
53 | ClientOpt::VsCodium, | ||
54 | ClientOpt::VsCodeOss, | ||
55 | ] | ||
56 | .iter() | ||
57 | .copied() | ||
58 | .find(|c| [s] == c.as_cmds()) | ||
59 | .ok_or_else(|| anyhow::format_err!("no such client")) | ||
60 | } | ||
61 | } | ||
62 | 34 | ||
63 | pub struct ServerOpt { | 35 | pub(crate) struct ServerOpt { |
64 | pub malloc: Malloc, | 36 | pub(crate) malloc: Malloc, |
65 | } | 37 | } |
66 | 38 | ||
67 | pub enum Malloc { | 39 | pub(crate) enum Malloc { |
68 | System, | 40 | System, |
69 | Mimalloc, | 41 | Mimalloc, |
70 | Jemalloc, | 42 | Jemalloc, |
71 | } | 43 | } |
72 | 44 | ||
73 | impl InstallCmd { | ||
74 | pub fn run(self) -> Result<()> { | ||
75 | if cfg!(target_os = "macos") { | ||
76 | fix_path_for_mac().context("Fix path for mac")? | ||
77 | } | ||
78 | if let Some(server) = self.server { | ||
79 | install_server(server).context("install server")?; | ||
80 | } | ||
81 | if let Some(client) = self.client { | ||
82 | install_client(client).context("install client")?; | ||
83 | } | ||
84 | Ok(()) | ||
85 | } | ||
86 | } | ||
87 | |||
88 | fn fix_path_for_mac() -> Result<()> { | 45 | fn fix_path_for_mac() -> Result<()> { |
89 | let mut vscode_path: Vec<PathBuf> = { | 46 | let mut vscode_path: Vec<PathBuf> = { |
90 | const COMMON_APP_PATH: &str = | 47 | const COMMON_APP_PATH: &str = |
@@ -121,21 +78,12 @@ fn fix_path_for_mac() -> Result<()> { | |||
121 | fn install_client(client_opt: ClientOpt) -> Result<()> { | 78 | fn install_client(client_opt: ClientOpt) -> Result<()> { |
122 | let _dir = pushd("./editors/code"); | 79 | let _dir = pushd("./editors/code"); |
123 | 80 | ||
124 | let find_code = |f: fn(&str) -> bool| -> Result<&'static str> { | 81 | // Package extension. |
125 | client_opt.as_cmds().iter().copied().find(|bin| f(bin)).ok_or_else(|| { | 82 | if cfg!(unix) { |
126 | format_err!("Can't execute `code --version`. Perhaps it is not in $PATH?") | ||
127 | }) | ||
128 | }; | ||
129 | |||
130 | let installed_extensions = if cfg!(unix) { | ||
131 | cmd!("npm --version").run().context("`npm` is required to build the VS Code plugin")?; | 83 | cmd!("npm --version").run().context("`npm` is required to build the VS Code plugin")?; |
132 | cmd!("npm ci").run()?; | 84 | cmd!("npm ci").run()?; |
133 | 85 | ||
134 | cmd!("npm run package --scripts-prepend-node-path").run()?; | 86 | cmd!("npm run package --scripts-prepend-node-path").run()?; |
135 | |||
136 | let code = find_code(|bin| cmd!("{bin} --version").read().is_ok())?; | ||
137 | cmd!("{code} --install-extension rust-analyzer.vsix --force").run()?; | ||
138 | cmd!("{code} --list-extensions").read()? | ||
139 | } else { | 87 | } else { |
140 | cmd!("cmd.exe /c npm --version") | 88 | cmd!("cmd.exe /c npm --version") |
141 | .run() | 89 | .run() |
@@ -143,8 +91,36 @@ fn install_client(client_opt: ClientOpt) -> Result<()> { | |||
143 | cmd!("cmd.exe /c npm ci").run()?; | 91 | cmd!("cmd.exe /c npm ci").run()?; |
144 | 92 | ||
145 | cmd!("cmd.exe /c npm run package").run()?; | 93 | cmd!("cmd.exe /c npm run package").run()?; |
94 | }; | ||
146 | 95 | ||
147 | let code = find_code(|bin| cmd!("cmd.exe /c {bin}.cmd --version").read().is_ok())?; | 96 | // Find the appropriate VS Code binary. |
97 | let lifetime_extender; | ||
98 | let candidates: &[&str] = match client_opt.code_bin.as_deref() { | ||
99 | Some(it) => { | ||
100 | lifetime_extender = [it]; | ||
101 | &lifetime_extender[..] | ||
102 | } | ||
103 | None => VS_CODES, | ||
104 | }; | ||
105 | let code = candidates | ||
106 | .iter() | ||
107 | .copied() | ||
108 | .find(|&bin| { | ||
109 | if cfg!(unix) { | ||
110 | cmd!("{bin} --version").read().is_ok() | ||
111 | } else { | ||
112 | cmd!("cmd.exe /c {bin}.cmd --version").read().is_ok() | ||
113 | } | ||
114 | }) | ||
115 | .ok_or_else(|| { | ||
116 | format_err!("Can't execute `{} --version`. Perhaps it is not in $PATH?", candidates[0]) | ||
117 | })?; | ||
118 | |||
119 | // Install & verify. | ||
120 | let installed_extensions = if cfg!(unix) { | ||
121 | cmd!("{code} --install-extension rust-analyzer.vsix --force").run()?; | ||
122 | cmd!("{code} --list-extensions").read()? | ||
123 | } else { | ||
148 | cmd!("cmd.exe /c {code}.cmd --install-extension rust-analyzer.vsix --force").run()?; | 124 | cmd!("cmd.exe /c {code}.cmd --install-extension rust-analyzer.vsix --force").run()?; |
149 | cmd!("cmd.exe /c {code}.cmd --list-extensions").read()? | 125 | cmd!("cmd.exe /c {code}.cmd --list-extensions").read()? |
150 | }; | 126 | }; |
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs deleted file mode 100644 index b19985fb2..000000000 --- a/xtask/src/lib.rs +++ /dev/null | |||
@@ -1,131 +0,0 @@ | |||
1 | //! Support library for `cargo xtask` command. | ||
2 | //! | ||
3 | //! See https://github.com/matklad/cargo-xtask/ | ||
4 | |||
5 | pub mod codegen; | ||
6 | mod ast_src; | ||
7 | |||
8 | pub mod install; | ||
9 | pub mod release; | ||
10 | pub mod dist; | ||
11 | pub mod pre_commit; | ||
12 | pub mod metrics; | ||
13 | pub mod pre_cache; | ||
14 | |||
15 | use std::{ | ||
16 | env, | ||
17 | path::{Path, PathBuf}, | ||
18 | }; | ||
19 | |||
20 | use walkdir::{DirEntry, WalkDir}; | ||
21 | use xshell::{cmd, pushd, pushenv}; | ||
22 | |||
23 | use crate::codegen::Mode; | ||
24 | |||
25 | pub use anyhow::{bail, Context as _, Result}; | ||
26 | |||
27 | pub fn project_root() -> PathBuf { | ||
28 | Path::new( | ||
29 | &env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()), | ||
30 | ) | ||
31 | .ancestors() | ||
32 | .nth(1) | ||
33 | .unwrap() | ||
34 | .to_path_buf() | ||
35 | } | ||
36 | |||
37 | pub fn rust_files() -> impl Iterator<Item = PathBuf> { | ||
38 | rust_files_in(&project_root().join("crates")) | ||
39 | } | ||
40 | |||
41 | pub fn cargo_files() -> impl Iterator<Item = PathBuf> { | ||
42 | files_in(&project_root(), "toml") | ||
43 | .filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false)) | ||
44 | } | ||
45 | |||
46 | pub fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> { | ||
47 | files_in(path, "rs") | ||
48 | } | ||
49 | |||
50 | pub fn run_rustfmt(mode: Mode) -> Result<()> { | ||
51 | let _dir = pushd(project_root())?; | ||
52 | let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); | ||
53 | ensure_rustfmt()?; | ||
54 | let check = match mode { | ||
55 | Mode::Overwrite => &[][..], | ||
56 | Mode::Verify => &["--", "--check"], | ||
57 | }; | ||
58 | cmd!("cargo fmt {check...}").run()?; | ||
59 | Ok(()) | ||
60 | } | ||
61 | |||
62 | fn ensure_rustfmt() -> Result<()> { | ||
63 | let out = cmd!("rustfmt --version").read()?; | ||
64 | if !out.contains("stable") { | ||
65 | bail!( | ||
66 | "Failed to run rustfmt from toolchain 'stable'. \ | ||
67 | Please run `rustup component add rustfmt --toolchain stable` to install it.", | ||
68 | ) | ||
69 | } | ||
70 | Ok(()) | ||
71 | } | ||
72 | |||
73 | pub fn run_clippy() -> Result<()> { | ||
74 | if cmd!("cargo clippy --version").read().is_err() { | ||
75 | bail!( | ||
76 | "Failed run cargo clippy. \ | ||
77 | Please run `rustup component add clippy` to install it.", | ||
78 | ) | ||
79 | } | ||
80 | |||
81 | let allowed_lints = " | ||
82 | -A clippy::collapsible_if | ||
83 | -A clippy::needless_pass_by_value | ||
84 | -A clippy::nonminimal_bool | ||
85 | -A clippy::redundant_pattern_matching | ||
86 | " | ||
87 | .split_ascii_whitespace(); | ||
88 | cmd!("cargo clippy --all-features --all-targets -- {allowed_lints...}").run()?; | ||
89 | Ok(()) | ||
90 | } | ||
91 | |||
92 | pub fn run_fuzzer() -> Result<()> { | ||
93 | let _d = pushd("./crates/syntax")?; | ||
94 | let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly"); | ||
95 | if cmd!("cargo fuzz --help").read().is_err() { | ||
96 | cmd!("cargo install cargo-fuzz").run()?; | ||
97 | }; | ||
98 | |||
99 | // Expecting nightly rustc | ||
100 | let out = cmd!("rustc --version").read()?; | ||
101 | if !out.contains("nightly") { | ||
102 | bail!("fuzz tests require nightly rustc") | ||
103 | } | ||
104 | |||
105 | cmd!("cargo fuzz run parser").run()?; | ||
106 | Ok(()) | ||
107 | } | ||
108 | |||
109 | fn date_iso() -> Result<String> { | ||
110 | let res = cmd!("date --iso --utc").read()?; | ||
111 | Ok(res) | ||
112 | } | ||
113 | |||
114 | fn is_release_tag(tag: &str) -> bool { | ||
115 | tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit()) | ||
116 | } | ||
117 | |||
118 | fn files_in(path: &Path, ext: &'static str) -> impl Iterator<Item = PathBuf> { | ||
119 | let iter = WalkDir::new(path); | ||
120 | return iter | ||
121 | .into_iter() | ||
122 | .filter_entry(|e| !is_hidden(e)) | ||
123 | .map(|e| e.unwrap()) | ||
124 | .filter(|e| !e.file_type().is_dir()) | ||
125 | .map(|e| e.into_path()) | ||
126 | .filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false)); | ||
127 | |||
128 | fn is_hidden(entry: &DirEntry) -> bool { | ||
129 | entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) | ||
130 | } | ||
131 | } | ||
diff --git a/xtask/src/main.rs b/xtask/src/main.rs index cbb9b315e..057cd57ae 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs | |||
@@ -1,174 +1,156 @@ | |||
1 | //! See https://github.com/matklad/cargo-xtask/. | 1 | //! See https://github.com/matklad/cargo-xtask/. |
2 | //! | 2 | //! |
3 | //! This binary defines various auxiliary build commands, which are not | 3 | //! This binary defines various auxiliary build commands, which are not |
4 | //! expressible with just `cargo`. Notably, it provides `cargo xtask codegen` | 4 | //! expressible with just `cargo`. Notably, it provides tests via `cargo test -p xtask` |
5 | //! for code generation and `cargo xtask install` for installation of | 5 | //! for code generation and `cargo xtask install` for installation of |
6 | //! rust-analyzer server and client. | 6 | //! rust-analyzer server and client. |
7 | //! | 7 | //! |
8 | //! This binary is integrated into the `cargo` command line by using an alias in | 8 | //! This binary is integrated into the `cargo` command line by using an alias in |
9 | //! `.cargo/config`. | 9 | //! `.cargo/config`. |
10 | mod flags; | ||
10 | 11 | ||
11 | use std::env; | 12 | mod codegen; |
12 | 13 | mod ast_src; | |
13 | use anyhow::bail; | 14 | #[cfg(test)] |
14 | use codegen::CodegenCmd; | 15 | mod tidy; |
15 | use pico_args::Arguments; | ||
16 | use xshell::{cmd, cp, pushd}; | ||
17 | use xtask::{ | ||
18 | codegen::{self, Mode}, | ||
19 | dist::DistCmd, | ||
20 | install::{InstallCmd, Malloc, ServerOpt}, | ||
21 | metrics::MetricsCmd, | ||
22 | pre_cache::PreCacheCmd, | ||
23 | pre_commit, project_root, | ||
24 | release::{PromoteCmd, ReleaseCmd}, | ||
25 | run_clippy, run_fuzzer, run_rustfmt, Result, | ||
26 | }; | ||
27 | 16 | ||
28 | fn main() -> Result<()> { | 17 | mod install; |
29 | if env::args().next().map(|it| it.contains("pre-commit")) == Some(true) { | 18 | mod release; |
30 | return pre_commit::run_hook(); | 19 | mod dist; |
31 | } | 20 | mod metrics; |
21 | mod pre_cache; | ||
32 | 22 | ||
33 | let _d = pushd(project_root())?; | 23 | use anyhow::{bail, Result}; |
34 | 24 | use std::{ | |
35 | let mut args = Arguments::from_env(); | 25 | env, |
36 | let subcommand = args.subcommand()?.unwrap_or_default(); | 26 | path::{Path, PathBuf}, |
37 | 27 | }; | |
38 | match subcommand.as_str() { | 28 | use walkdir::{DirEntry, WalkDir}; |
39 | "install" => { | 29 | use xshell::{cmd, cp, pushd, pushenv}; |
40 | if args.contains(["-h", "--help"]) { | ||
41 | eprintln!( | ||
42 | "\ | ||
43 | cargo xtask install | ||
44 | Install rust-analyzer server or editor plugin. | ||
45 | |||
46 | USAGE: | ||
47 | cargo xtask install [FLAGS] | ||
48 | |||
49 | FLAGS: | ||
50 | --client[=CLIENT] Install only VS Code plugin. | ||
51 | CLIENT is one of 'code', 'code-exploration', 'code-insiders', 'codium', or 'code-oss' | ||
52 | --server Install only the language server | ||
53 | --mimalloc Use mimalloc allocator for server | ||
54 | --jemalloc Use jemalloc allocator for server | ||
55 | -h, --help Prints help information | ||
56 | " | ||
57 | ); | ||
58 | return Ok(()); | ||
59 | } | ||
60 | let server = args.contains("--server"); | ||
61 | let client_code = args.contains("--client"); | ||
62 | if server && client_code { | ||
63 | eprintln!( | ||
64 | "error: The argument `--server` cannot be used with `--client`\n\n\ | ||
65 | For more information try --help" | ||
66 | ); | ||
67 | return Ok(()); | ||
68 | } | ||
69 | |||
70 | let malloc = if args.contains("--mimalloc") { | ||
71 | Malloc::Mimalloc | ||
72 | } else if args.contains("--jemalloc") { | ||
73 | Malloc::Jemalloc | ||
74 | } else { | ||
75 | Malloc::System | ||
76 | }; | ||
77 | 30 | ||
78 | let client_opt = args.opt_value_from_str("--client")?; | 31 | use crate::dist::DistCmd; |
79 | 32 | ||
80 | finish_args(args)?; | 33 | fn main() -> Result<()> { |
34 | let _d = pushd(project_root())?; | ||
81 | 35 | ||
82 | InstallCmd { | 36 | let flags = flags::Xtask::from_env()?; |
83 | client: if server { None } else { Some(client_opt.unwrap_or_default()) }, | 37 | match flags.subcommand { |
84 | server: if client_code { None } else { Some(ServerOpt { malloc }) }, | 38 | flags::XtaskCmd::Help(_) => { |
85 | } | 39 | println!("{}", flags::Xtask::HELP); |
86 | .run() | 40 | return Ok(()); |
87 | } | ||
88 | "codegen" => { | ||
89 | let features = args.contains("--features"); | ||
90 | finish_args(args)?; | ||
91 | CodegenCmd { features }.run() | ||
92 | } | 41 | } |
93 | "format" => { | 42 | flags::XtaskCmd::Install(cmd) => cmd.run(), |
94 | finish_args(args)?; | 43 | flags::XtaskCmd::Lint(_) => run_clippy(), |
95 | run_rustfmt(Mode::Overwrite) | 44 | flags::XtaskCmd::FuzzTests(_) => run_fuzzer(), |
45 | flags::XtaskCmd::PreCache(cmd) => cmd.run(), | ||
46 | flags::XtaskCmd::Release(cmd) => cmd.run(), | ||
47 | flags::XtaskCmd::Promote(cmd) => cmd.run(), | ||
48 | flags::XtaskCmd::Dist(flags) => { | ||
49 | DistCmd { nightly: flags.nightly, client_version: flags.client }.run() | ||
96 | } | 50 | } |
97 | "install-pre-commit-hook" => { | 51 | flags::XtaskCmd::Metrics(cmd) => cmd.run(), |
98 | finish_args(args)?; | 52 | flags::XtaskCmd::Bb(cmd) => { |
99 | pre_commit::install_hook() | ||
100 | } | ||
101 | "lint" => { | ||
102 | finish_args(args)?; | ||
103 | run_clippy() | ||
104 | } | ||
105 | "fuzz-tests" => { | ||
106 | finish_args(args)?; | ||
107 | run_fuzzer() | ||
108 | } | ||
109 | "pre-cache" => { | ||
110 | finish_args(args)?; | ||
111 | PreCacheCmd.run() | ||
112 | } | ||
113 | "release" => { | ||
114 | let dry_run = args.contains("--dry-run"); | ||
115 | finish_args(args)?; | ||
116 | ReleaseCmd { dry_run }.run() | ||
117 | } | ||
118 | "promote" => { | ||
119 | let dry_run = args.contains("--dry-run"); | ||
120 | finish_args(args)?; | ||
121 | PromoteCmd { dry_run }.run() | ||
122 | } | ||
123 | "dist" => { | ||
124 | let nightly = args.contains("--nightly"); | ||
125 | let client_version: Option<String> = args.opt_value_from_str("--client")?; | ||
126 | finish_args(args)?; | ||
127 | DistCmd { nightly, client_version }.run() | ||
128 | } | ||
129 | "metrics" => { | ||
130 | let dry_run = args.contains("--dry-run"); | ||
131 | finish_args(args)?; | ||
132 | MetricsCmd { dry_run }.run() | ||
133 | } | ||
134 | "bb" => { | ||
135 | let suffix: String = args.free_from_str()?; | ||
136 | finish_args(args)?; | ||
137 | { | 53 | { |
138 | let _d = pushd("./crates/rust-analyzer")?; | 54 | let _d = pushd("./crates/rust-analyzer")?; |
139 | cmd!("cargo build --release --features jemalloc").run()?; | 55 | cmd!("cargo build --release --features jemalloc").run()?; |
140 | } | 56 | } |
141 | cp("./target/release/rust-analyzer", format!("./target/rust-analyzer-{}", suffix))?; | 57 | cp("./target/release/rust-analyzer", format!("./target/rust-analyzer-{}", cmd.suffix))?; |
142 | Ok(()) | ||
143 | } | ||
144 | _ => { | ||
145 | eprintln!( | ||
146 | "\ | ||
147 | cargo xtask | ||
148 | Run custom build command. | ||
149 | |||
150 | USAGE: | ||
151 | cargo xtask <SUBCOMMAND> | ||
152 | |||
153 | SUBCOMMANDS: | ||
154 | format | ||
155 | install-pre-commit-hook | ||
156 | fuzz-tests | ||
157 | codegen | ||
158 | install | ||
159 | lint | ||
160 | dist | ||
161 | promote | ||
162 | bb" | ||
163 | ); | ||
164 | Ok(()) | 58 | Ok(()) |
165 | } | 59 | } |
166 | } | 60 | } |
167 | } | 61 | } |
168 | 62 | ||
169 | fn finish_args(args: Arguments) -> Result<()> { | 63 | fn project_root() -> PathBuf { |
170 | if !args.finish().is_empty() { | 64 | Path::new( |
171 | bail!("Unused arguments."); | 65 | &env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()), |
66 | ) | ||
67 | .ancestors() | ||
68 | .nth(1) | ||
69 | .unwrap() | ||
70 | .to_path_buf() | ||
71 | } | ||
72 | |||
73 | fn rust_files() -> impl Iterator<Item = PathBuf> { | ||
74 | rust_files_in(&project_root().join("crates")) | ||
75 | } | ||
76 | |||
77 | #[cfg(test)] | ||
78 | fn cargo_files() -> impl Iterator<Item = PathBuf> { | ||
79 | files_in(&project_root(), "toml") | ||
80 | .filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false)) | ||
81 | } | ||
82 | |||
83 | fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> { | ||
84 | files_in(path, "rs") | ||
85 | } | ||
86 | |||
87 | fn ensure_rustfmt() -> Result<()> { | ||
88 | let out = cmd!("rustfmt --version").read()?; | ||
89 | if !out.contains("stable") { | ||
90 | bail!( | ||
91 | "Failed to run rustfmt from toolchain 'stable'. \ | ||
92 | Please run `rustup component add rustfmt --toolchain stable` to install it.", | ||
93 | ) | ||
94 | } | ||
95 | Ok(()) | ||
96 | } | ||
97 | |||
98 | fn run_clippy() -> Result<()> { | ||
99 | if cmd!("cargo clippy --version").read().is_err() { | ||
100 | bail!( | ||
101 | "Failed run cargo clippy. \ | ||
102 | Please run `rustup component add clippy` to install it.", | ||
103 | ) | ||
172 | } | 104 | } |
105 | |||
106 | let allowed_lints = " | ||
107 | -A clippy::collapsible_if | ||
108 | -A clippy::needless_pass_by_value | ||
109 | -A clippy::nonminimal_bool | ||
110 | -A clippy::redundant_pattern_matching | ||
111 | " | ||
112 | .split_ascii_whitespace(); | ||
113 | cmd!("cargo clippy --all-features --all-targets -- {allowed_lints...}").run()?; | ||
173 | Ok(()) | 114 | Ok(()) |
174 | } | 115 | } |
116 | |||
117 | fn run_fuzzer() -> Result<()> { | ||
118 | let _d = pushd("./crates/syntax")?; | ||
119 | let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly"); | ||
120 | if cmd!("cargo fuzz --help").read().is_err() { | ||
121 | cmd!("cargo install cargo-fuzz").run()?; | ||
122 | }; | ||
123 | |||
124 | // Expecting nightly rustc | ||
125 | let out = cmd!("rustc --version").read()?; | ||
126 | if !out.contains("nightly") { | ||
127 | bail!("fuzz tests require nightly rustc") | ||
128 | } | ||
129 | |||
130 | cmd!("cargo fuzz run parser").run()?; | ||
131 | Ok(()) | ||
132 | } | ||
133 | |||
134 | fn date_iso() -> Result<String> { | ||
135 | let res = cmd!("date --iso --utc").read()?; | ||
136 | Ok(res) | ||
137 | } | ||
138 | |||
139 | fn is_release_tag(tag: &str) -> bool { | ||
140 | tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit()) | ||
141 | } | ||
142 | |||
143 | fn files_in(path: &Path, ext: &'static str) -> impl Iterator<Item = PathBuf> { | ||
144 | let iter = WalkDir::new(path); | ||
145 | return iter | ||
146 | .into_iter() | ||
147 | .filter_entry(|e| !is_hidden(e)) | ||
148 | .map(|e| e.unwrap()) | ||
149 | .filter(|e| !e.file_type().is_dir()) | ||
150 | .map(|e| e.into_path()) | ||
151 | .filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false)); | ||
152 | |||
153 | fn is_hidden(entry: &DirEntry) -> bool { | ||
154 | entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) | ||
155 | } | ||
156 | } | ||
diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 624ad3b7e..72de92c64 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs | |||
@@ -9,14 +9,12 @@ use std::{ | |||
9 | use anyhow::{bail, format_err, Result}; | 9 | use anyhow::{bail, format_err, Result}; |
10 | use xshell::{cmd, mkdir_p, pushd, pushenv, read_file, rm_rf}; | 10 | use xshell::{cmd, mkdir_p, pushd, pushenv, read_file, rm_rf}; |
11 | 11 | ||
12 | type Unit = String; | 12 | use crate::flags; |
13 | 13 | ||
14 | pub struct MetricsCmd { | 14 | type Unit = String; |
15 | pub dry_run: bool, | ||
16 | } | ||
17 | 15 | ||
18 | impl MetricsCmd { | 16 | impl flags::Metrics { |
19 | pub fn run(self) -> Result<()> { | 17 | pub(crate) fn run(self) -> Result<()> { |
20 | let mut metrics = Metrics::new()?; | 18 | let mut metrics = Metrics::new()?; |
21 | if !self.dry_run { | 19 | if !self.dry_run { |
22 | rm_rf("./target/release")?; | 20 | rm_rf("./target/release")?; |
@@ -82,7 +80,7 @@ impl Metrics { | |||
82 | fn measure_analysis_stats_path(&mut self, name: &str, path: &str) -> Result<()> { | 80 | fn measure_analysis_stats_path(&mut self, name: &str, path: &str) -> Result<()> { |
83 | eprintln!("\nMeasuring analysis-stats/{}", name); | 81 | eprintln!("\nMeasuring analysis-stats/{}", name); |
84 | let output = | 82 | let output = |
85 | cmd!("./target/release/rust-analyzer analysis-stats --quiet --memory-usage {path}") | 83 | cmd!("./target/release/rust-analyzer --quiet analysis-stats --memory-usage {path}") |
86 | .read()?; | 84 | .read()?; |
87 | for (metric, value, unit) in parse_metrics(&output) { | 85 | for (metric, value, unit) in parse_metrics(&output) { |
88 | self.report(&format!("analysis-stats/{}/{}", name, metric), value, unit.into()); | 86 | self.report(&format!("analysis-stats/{}/{}", name, metric), value, unit.into()); |
diff --git a/xtask/src/pre_cache.rs b/xtask/src/pre_cache.rs index 569f88f68..b456224fd 100644 --- a/xtask/src/pre_cache.rs +++ b/xtask/src/pre_cache.rs | |||
@@ -6,12 +6,12 @@ use std::{ | |||
6 | use anyhow::Result; | 6 | use anyhow::Result; |
7 | use xshell::rm_rf; | 7 | use xshell::rm_rf; |
8 | 8 | ||
9 | pub struct PreCacheCmd; | 9 | use crate::flags; |
10 | 10 | ||
11 | impl PreCacheCmd { | 11 | impl flags::PreCache { |
12 | /// Cleans the `./target` dir after the build such that only | 12 | /// Cleans the `./target` dir after the build such that only |
13 | /// dependencies are cached on CI. | 13 | /// dependencies are cached on CI. |
14 | pub fn run(self) -> Result<()> { | 14 | pub(crate) fn run(self) -> Result<()> { |
15 | let slow_tests_cookie = Path::new("./target/.slow_tests_cookie"); | 15 | let slow_tests_cookie = Path::new("./target/.slow_tests_cookie"); |
16 | if !slow_tests_cookie.exists() { | 16 | if !slow_tests_cookie.exists() { |
17 | panic!("slow tests were skipped on CI!") | 17 | panic!("slow tests were skipped on CI!") |
diff --git a/xtask/src/pre_commit.rs b/xtask/src/pre_commit.rs deleted file mode 100644 index 8f2dbea19..000000000 --- a/xtask/src/pre_commit.rs +++ /dev/null | |||
@@ -1,38 +0,0 @@ | |||
1 | //! pre-commit hook for code formatting. | ||
2 | |||
3 | use std::{fs, path::PathBuf}; | ||
4 | |||
5 | use anyhow::{bail, Result}; | ||
6 | use xshell::cmd; | ||
7 | |||
8 | use crate::{project_root, run_rustfmt, Mode}; | ||
9 | |||
10 | // FIXME: if there are changed `.ts` files, also reformat TypeScript (by | ||
11 | // shelling out to `npm fmt`). | ||
12 | pub fn run_hook() -> Result<()> { | ||
13 | run_rustfmt(Mode::Overwrite)?; | ||
14 | |||
15 | let diff = cmd!("git diff --diff-filter=MAR --name-only --cached").read()?; | ||
16 | |||
17 | let root = project_root(); | ||
18 | for line in diff.lines() { | ||
19 | let file = root.join(line); | ||
20 | cmd!("git update-index --add {file}").run()?; | ||
21 | } | ||
22 | |||
23 | Ok(()) | ||
24 | } | ||
25 | |||
26 | pub fn install_hook() -> Result<()> { | ||
27 | let hook_path: PathBuf = | ||
28 | format!("./.git/hooks/pre-commit{}", std::env::consts::EXE_SUFFIX).into(); | ||
29 | |||
30 | if hook_path.exists() { | ||
31 | bail!("Git hook already created"); | ||
32 | } | ||
33 | |||
34 | let me = std::env::current_exe()?; | ||
35 | fs::copy(me, hook_path)?; | ||
36 | |||
37 | Ok(()) | ||
38 | } | ||
diff --git a/xtask/src/release.rs b/xtask/src/release.rs index 63556476d..dde5d14ee 100644 --- a/xtask/src/release.rs +++ b/xtask/src/release.rs | |||
@@ -2,22 +2,17 @@ use std::fmt::Write; | |||
2 | 2 | ||
3 | use xshell::{cmd, cp, pushd, read_dir, write_file}; | 3 | use xshell::{cmd, cp, pushd, read_dir, write_file}; |
4 | 4 | ||
5 | use crate::{codegen, date_iso, is_release_tag, project_root, Mode, Result}; | 5 | use crate::{codegen, date_iso, flags, is_release_tag, project_root, Result}; |
6 | 6 | ||
7 | pub struct ReleaseCmd { | 7 | impl flags::Release { |
8 | pub dry_run: bool, | 8 | pub(crate) fn run(self) -> Result<()> { |
9 | } | ||
10 | |||
11 | impl ReleaseCmd { | ||
12 | pub fn run(self) -> Result<()> { | ||
13 | if !self.dry_run { | 9 | if !self.dry_run { |
14 | cmd!("git switch release").run()?; | 10 | cmd!("git switch release").run()?; |
15 | cmd!("git fetch upstream --tags --force").run()?; | 11 | cmd!("git fetch upstream --tags --force").run()?; |
16 | cmd!("git reset --hard tags/nightly").run()?; | 12 | cmd!("git reset --hard tags/nightly").run()?; |
17 | cmd!("git push").run()?; | 13 | cmd!("git push").run()?; |
18 | } | 14 | } |
19 | codegen::generate_assists_docs(Mode::Overwrite)?; | 15 | codegen::docs()?; |
20 | codegen::generate_feature_docs(Mode::Overwrite)?; | ||
21 | 16 | ||
22 | let website_root = project_root().join("../rust-analyzer.github.io"); | 17 | let website_root = project_root().join("../rust-analyzer.github.io"); |
23 | let changelog_dir = website_root.join("./thisweek/_posts"); | 18 | let changelog_dir = website_root.join("./thisweek/_posts"); |
@@ -86,12 +81,8 @@ https://github.com/sponsors/rust-analyzer[GitHub Sponsors]. | |||
86 | } | 81 | } |
87 | } | 82 | } |
88 | 83 | ||
89 | pub struct PromoteCmd { | 84 | impl flags::Promote { |
90 | pub dry_run: bool, | 85 | pub(crate) fn run(self) -> Result<()> { |
91 | } | ||
92 | |||
93 | impl PromoteCmd { | ||
94 | pub fn run(self) -> Result<()> { | ||
95 | let _dir = pushd("../rust-rust-analyzer")?; | 86 | let _dir = pushd("../rust-rust-analyzer")?; |
96 | cmd!("git switch master").run()?; | 87 | cmd!("git switch master").run()?; |
97 | cmd!("git fetch upstream").run()?; | 88 | cmd!("git fetch upstream").run()?; |
diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs new file mode 100644 index 000000000..1352d1218 --- /dev/null +++ b/xtask/src/tidy.rs | |||
@@ -0,0 +1,447 @@ | |||
1 | use std::{ | ||
2 | collections::HashMap, | ||
3 | path::{Path, PathBuf}, | ||
4 | }; | ||
5 | |||
6 | use xshell::{cmd, pushd, pushenv, read_file}; | ||
7 | |||
8 | use crate::{cargo_files, codegen, project_root, rust_files}; | ||
9 | |||
10 | #[test] | ||
11 | fn generate_grammar() { | ||
12 | codegen::generate_syntax().unwrap() | ||
13 | } | ||
14 | |||
15 | #[test] | ||
16 | fn generate_parser_tests() { | ||
17 | codegen::generate_parser_tests().unwrap() | ||
18 | } | ||
19 | |||
20 | #[test] | ||
21 | fn generate_assists_tests() { | ||
22 | codegen::generate_assists_tests().unwrap(); | ||
23 | } | ||
24 | |||
25 | /// This clones rustc repo, and so is not worth to keep up-to-date. We update | ||
26 | /// manually by un-ignoring the test from time to time. | ||
27 | #[test] | ||
28 | #[ignore] | ||
29 | fn generate_lint_completions() { | ||
30 | codegen::generate_lint_completions().unwrap() | ||
31 | } | ||
32 | |||
33 | #[test] | ||
34 | fn check_code_formatting() { | ||
35 | let _dir = pushd(project_root()).unwrap(); | ||
36 | let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); | ||
37 | crate::ensure_rustfmt().unwrap(); | ||
38 | let res = cmd!("cargo fmt -- --check").run(); | ||
39 | if !res.is_ok() { | ||
40 | let _ = cmd!("cargo fmt").run(); | ||
41 | } | ||
42 | res.unwrap() | ||
43 | } | ||
44 | |||
45 | #[test] | ||
46 | fn smoke_test_generate_documentation() { | ||
47 | codegen::docs().unwrap() | ||
48 | } | ||
49 | |||
50 | #[test] | ||
51 | fn check_lsp_extensions_docs() { | ||
52 | let expected_hash = { | ||
53 | let lsp_ext_rs = | ||
54 | read_file(project_root().join("crates/rust-analyzer/src/lsp_ext.rs")).unwrap(); | ||
55 | stable_hash(lsp_ext_rs.as_str()) | ||
56 | }; | ||
57 | |||
58 | let actual_hash = { | ||
59 | let lsp_extensions_md = | ||
60 | read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap(); | ||
61 | let text = lsp_extensions_md | ||
62 | .lines() | ||
63 | .find_map(|line| line.strip_prefix("lsp_ext.rs hash:")) | ||
64 | .unwrap() | ||
65 | .trim(); | ||
66 | u64::from_str_radix(text, 16).unwrap() | ||
67 | }; | ||
68 | |||
69 | if actual_hash != expected_hash { | ||
70 | panic!( | ||
71 | " | ||
72 | lsp_ext.rs was changed without touching lsp-extensions.md. | ||
73 | |||
74 | Expected hash: {:x} | ||
75 | Actual hash: {:x} | ||
76 | |||
77 | Please adjust docs/dev/lsp-extensions.md. | ||
78 | ", | ||
79 | expected_hash, actual_hash | ||
80 | ) | ||
81 | } | ||
82 | } | ||
83 | |||
84 | #[test] | ||
85 | fn rust_files_are_tidy() { | ||
86 | let mut tidy_docs = TidyDocs::default(); | ||
87 | for path in rust_files() { | ||
88 | let text = read_file(&path).unwrap(); | ||
89 | check_todo(&path, &text); | ||
90 | check_dbg(&path, &text); | ||
91 | check_trailing_ws(&path, &text); | ||
92 | deny_clippy(&path, &text); | ||
93 | tidy_docs.visit(&path, &text); | ||
94 | } | ||
95 | tidy_docs.finish(); | ||
96 | } | ||
97 | |||
98 | #[test] | ||
99 | fn cargo_files_are_tidy() { | ||
100 | for cargo in cargo_files() { | ||
101 | let mut section = None; | ||
102 | for (line_no, text) in read_file(&cargo).unwrap().lines().enumerate() { | ||
103 | let text = text.trim(); | ||
104 | if text.starts_with('[') { | ||
105 | if !text.ends_with(']') { | ||
106 | panic!( | ||
107 | "\nplease don't add comments or trailing whitespace in section lines.\n\ | ||
108 | {}:{}\n", | ||
109 | cargo.display(), | ||
110 | line_no + 1 | ||
111 | ) | ||
112 | } | ||
113 | section = Some(text); | ||
114 | continue; | ||
115 | } | ||
116 | let text: String = text.split_whitespace().collect(); | ||
117 | if !text.contains("path=") { | ||
118 | continue; | ||
119 | } | ||
120 | match section { | ||
121 | Some(s) if s.contains("dev-dependencies") => { | ||
122 | if text.contains("version") { | ||
123 | panic!( | ||
124 | "\ncargo internal dev-dependencies should not have a version.\n\ | ||
125 | {}:{}\n", | ||
126 | cargo.display(), | ||
127 | line_no + 1 | ||
128 | ); | ||
129 | } | ||
130 | } | ||
131 | Some(s) if s.contains("dependencies") => { | ||
132 | if !text.contains("version") { | ||
133 | panic!( | ||
134 | "\ncargo internal dependencies should have a version.\n\ | ||
135 | {}:{}\n", | ||
136 | cargo.display(), | ||
137 | line_no + 1 | ||
138 | ); | ||
139 | } | ||
140 | } | ||
141 | _ => {} | ||
142 | } | ||
143 | } | ||
144 | } | ||
145 | } | ||
146 | |||
147 | #[test] | ||
148 | fn check_merge_commits() { | ||
149 | let stdout = cmd!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19..") | ||
150 | .read() | ||
151 | .unwrap(); | ||
152 | if !stdout.is_empty() { | ||
153 | panic!( | ||
154 | " | ||
155 | Merge commits are not allowed in the history. | ||
156 | |||
157 | When updating a pull-request, please rebase your feature branch | ||
158 | on top of master by running `git rebase master`. If rebase fails, | ||
159 | you can re-apply your changes like this: | ||
160 | |||
161 | # Just look around to see the current state. | ||
162 | $ git status | ||
163 | $ git log | ||
164 | |||
165 | # Abort in-progress rebase and merges, if any. | ||
166 | $ git rebase --abort | ||
167 | $ git merge --abort | ||
168 | |||
169 | # Make the branch point to the latest commit from master, | ||
170 | # while maintaining your local changes uncommited. | ||
171 | $ git reset --soft origin/master | ||
172 | |||
173 | # Commit all changes in a single batch. | ||
174 | $ git commit -am'My changes' | ||
175 | |||
176 | # Verify that everything looks alright. | ||
177 | $ git status | ||
178 | $ git log | ||
179 | |||
180 | # Push the changes. We did a rebase, so we need `--force` option. | ||
181 | # `--force-with-lease` is a more safe (Rusty) version of `--force`. | ||
182 | $ git push --force-with-lease | ||
183 | |||
184 | # Verify that both local and remote branch point to the same commit. | ||
185 | $ git log | ||
186 | |||
187 | And don't fear to mess something up during a rebase -- you can | ||
188 | always restore the previous state using `git ref-log`: | ||
189 | |||
190 | https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-undo-local | ||
191 | " | ||
192 | ); | ||
193 | } | ||
194 | } | ||
195 | |||
196 | fn deny_clippy(path: &PathBuf, text: &String) { | ||
197 | let ignore = &[ | ||
198 | // The documentation in string literals may contain anything for its own purposes | ||
199 | "ide_completion/src/generated_lint_completions.rs", | ||
200 | ]; | ||
201 | if ignore.iter().any(|p| path.ends_with(p)) { | ||
202 | return; | ||
203 | } | ||
204 | |||
205 | if text.contains("\u{61}llow(clippy") { | ||
206 | panic!( | ||
207 | "\n\nallowing lints is forbidden: {}. | ||
208 | rust-analyzer intentionally doesn't check clippy on CI. | ||
209 | You can allow lint globally via `xtask clippy`. | ||
210 | See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion. | ||
211 | |||
212 | ", | ||
213 | path.display() | ||
214 | ) | ||
215 | } | ||
216 | } | ||
217 | |||
218 | #[test] | ||
219 | fn check_licenses() { | ||
220 | let expected = " | ||
221 | 0BSD OR MIT OR Apache-2.0 | ||
222 | Apache-2.0 | ||
223 | Apache-2.0 OR BSL-1.0 | ||
224 | Apache-2.0 OR MIT | ||
225 | Apache-2.0/MIT | ||
226 | BSD-3-Clause | ||
227 | CC0-1.0 | ||
228 | ISC | ||
229 | MIT | ||
230 | MIT / Apache-2.0 | ||
231 | MIT OR Apache-2.0 | ||
232 | MIT OR Apache-2.0 OR Zlib | ||
233 | MIT OR Zlib OR Apache-2.0 | ||
234 | MIT/Apache-2.0 | ||
235 | Unlicense OR MIT | ||
236 | Unlicense/MIT | ||
237 | Zlib OR Apache-2.0 OR MIT | ||
238 | " | ||
239 | .lines() | ||
240 | .filter(|it| !it.is_empty()) | ||
241 | .collect::<Vec<_>>(); | ||
242 | |||
243 | let meta = cmd!("cargo metadata --format-version 1").read().unwrap(); | ||
244 | let mut licenses = meta | ||
245 | .split(|c| c == ',' || c == '{' || c == '}') | ||
246 | .filter(|it| it.contains(r#""license""#)) | ||
247 | .map(|it| it.trim()) | ||
248 | .map(|it| it[r#""license":"#.len()..].trim_matches('"')) | ||
249 | .collect::<Vec<_>>(); | ||
250 | licenses.sort(); | ||
251 | licenses.dedup(); | ||
252 | if licenses != expected { | ||
253 | let mut diff = String::new(); | ||
254 | |||
255 | diff += &format!("New Licenses:\n"); | ||
256 | for &l in licenses.iter() { | ||
257 | if !expected.contains(&l) { | ||
258 | diff += &format!(" {}\n", l) | ||
259 | } | ||
260 | } | ||
261 | |||
262 | diff += &format!("\nMissing Licenses:\n"); | ||
263 | for &l in expected.iter() { | ||
264 | if !licenses.contains(&l) { | ||
265 | diff += &format!(" {}\n", l) | ||
266 | } | ||
267 | } | ||
268 | |||
269 | panic!("different set of licenses!\n{}", diff); | ||
270 | } | ||
271 | assert_eq!(licenses, expected); | ||
272 | } | ||
273 | |||
274 | fn check_todo(path: &Path, text: &str) { | ||
275 | let need_todo = &[ | ||
276 | // This file itself obviously needs to use todo (<- like this!). | ||
277 | "tests/tidy.rs", | ||
278 | // Some of our assists generate `todo!()`. | ||
279 | "handlers/add_turbo_fish.rs", | ||
280 | "handlers/generate_function.rs", | ||
281 | // To support generating `todo!()` in assists, we have `expr_todo()` in | ||
282 | // `ast::make`. | ||
283 | "ast/make.rs", | ||
284 | // The documentation in string literals may contain anything for its own purposes | ||
285 | "ide_completion/src/generated_lint_completions.rs", | ||
286 | ]; | ||
287 | if need_todo.iter().any(|p| path.ends_with(p)) { | ||
288 | return; | ||
289 | } | ||
290 | if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") { | ||
291 | // Generated by an assist | ||
292 | if text.contains("${0:todo!()}") { | ||
293 | return; | ||
294 | } | ||
295 | |||
296 | panic!( | ||
297 | "\nTODO markers or todo! macros should not be committed to the master branch,\n\ | ||
298 | use FIXME instead\n\ | ||
299 | {}\n", | ||
300 | path.display(), | ||
301 | ) | ||
302 | } | ||
303 | } | ||
304 | |||
305 | fn check_dbg(path: &Path, text: &str) { | ||
306 | let need_dbg = &[ | ||
307 | // This file itself obviously needs to use dbg. | ||
308 | "tests/tidy.rs", | ||
309 | // Assists to remove `dbg!()` | ||
310 | "handlers/remove_dbg.rs", | ||
311 | // We have .dbg postfix | ||
312 | "ide_completion/src/completions/postfix.rs", | ||
313 | // The documentation in string literals may contain anything for its own purposes | ||
314 | "ide_completion/src/lib.rs", | ||
315 | "ide_completion/src/generated_lint_completions.rs", | ||
316 | // test for doc test for remove_dbg | ||
317 | "src/tests/generated.rs", | ||
318 | ]; | ||
319 | if need_dbg.iter().any(|p| path.ends_with(p)) { | ||
320 | return; | ||
321 | } | ||
322 | if text.contains("dbg!") { | ||
323 | panic!( | ||
324 | "\ndbg! macros should not be committed to the master branch,\n\ | ||
325 | {}\n", | ||
326 | path.display(), | ||
327 | ) | ||
328 | } | ||
329 | } | ||
330 | |||
331 | fn check_trailing_ws(path: &Path, text: &str) { | ||
332 | if is_exclude_dir(path, &["test_data"]) { | ||
333 | return; | ||
334 | } | ||
335 | for (line_number, line) in text.lines().enumerate() { | ||
336 | if line.chars().last().map(char::is_whitespace) == Some(true) { | ||
337 | panic!("Trailing whitespace in {} at line {}", path.display(), line_number) | ||
338 | } | ||
339 | } | ||
340 | } | ||
341 | |||
342 | #[derive(Default)] | ||
343 | struct TidyDocs { | ||
344 | missing_docs: Vec<String>, | ||
345 | contains_fixme: Vec<PathBuf>, | ||
346 | } | ||
347 | |||
348 | impl TidyDocs { | ||
349 | fn visit(&mut self, path: &Path, text: &str) { | ||
350 | // Test hopefully don't really need comments, and for assists we already | ||
351 | // have special comments which are source of doc tests and user docs. | ||
352 | if is_exclude_dir(path, &["tests", "test_data"]) { | ||
353 | return; | ||
354 | } | ||
355 | |||
356 | if is_exclude_file(path) { | ||
357 | return; | ||
358 | } | ||
359 | |||
360 | let first_line = match text.lines().next() { | ||
361 | Some(it) => it, | ||
362 | None => return, | ||
363 | }; | ||
364 | |||
365 | if first_line.starts_with("//!") { | ||
366 | if first_line.contains("FIXME") { | ||
367 | self.contains_fixme.push(path.to_path_buf()); | ||
368 | } | ||
369 | } else { | ||
370 | if text.contains("// Feature:") || text.contains("// Assist:") { | ||
371 | return; | ||
372 | } | ||
373 | self.missing_docs.push(path.display().to_string()); | ||
374 | } | ||
375 | |||
376 | fn is_exclude_file(d: &Path) -> bool { | ||
377 | let file_names = ["tests.rs", "famous_defs_fixture.rs"]; | ||
378 | |||
379 | d.file_name() | ||
380 | .unwrap_or_default() | ||
381 | .to_str() | ||
382 | .map(|f_n| file_names.iter().any(|name| *name == f_n)) | ||
383 | .unwrap_or(false) | ||
384 | } | ||
385 | } | ||
386 | |||
387 | fn finish(self) { | ||
388 | if !self.missing_docs.is_empty() { | ||
389 | panic!( | ||
390 | "\nMissing docs strings\n\n\ | ||
391 | modules:\n{}\n\n", | ||
392 | self.missing_docs.join("\n") | ||
393 | ) | ||
394 | } | ||
395 | |||
396 | let poorly_documented = [ | ||
397 | "hir", | ||
398 | "hir_expand", | ||
399 | "ide", | ||
400 | "mbe", | ||
401 | "parser", | ||
402 | "profile", | ||
403 | "project_model", | ||
404 | "syntax", | ||
405 | "tt", | ||
406 | "hir_ty", | ||
407 | ]; | ||
408 | |||
409 | let mut has_fixmes = | ||
410 | poorly_documented.iter().map(|it| (*it, false)).collect::<HashMap<&str, bool>>(); | ||
411 | 'outer: for path in self.contains_fixme { | ||
412 | for krate in poorly_documented.iter() { | ||
413 | if path.components().any(|it| it.as_os_str() == *krate) { | ||
414 | has_fixmes.insert(krate, true); | ||
415 | continue 'outer; | ||
416 | } | ||
417 | } | ||
418 | panic!("FIXME doc in a fully-documented crate: {}", path.display()) | ||
419 | } | ||
420 | |||
421 | for (krate, has_fixme) in has_fixmes.iter() { | ||
422 | if !has_fixme { | ||
423 | panic!("crate {} is fully documented :tada:, remove it from the list of poorly documented crates", krate) | ||
424 | } | ||
425 | } | ||
426 | } | ||
427 | } | ||
428 | |||
429 | fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool { | ||
430 | p.strip_prefix(project_root()) | ||
431 | .unwrap() | ||
432 | .components() | ||
433 | .rev() | ||
434 | .skip(1) | ||
435 | .filter_map(|it| it.as_os_str().to_str()) | ||
436 | .any(|it| dirs_to_exclude.contains(&it)) | ||
437 | } | ||
438 | |||
439 | #[allow(deprecated)] | ||
440 | fn stable_hash(text: &str) -> u64 { | ||
441 | use std::hash::{Hash, Hasher, SipHasher}; | ||
442 | |||
443 | let text = text.replace('\r', ""); | ||
444 | let mut hasher = SipHasher::default(); | ||
445 | text.hash(&mut hasher); | ||
446 | hasher.finish() | ||
447 | } | ||