From d9dcfd81c5d4325379ac88c4250b9c77ecbd75e8 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Mon, 1 Mar 2021 20:16:23 +0300 Subject: Simplify xtask lib/bin/test separation isn't really needed. --- xtask/Cargo.toml | 3 - xtask/src/codegen.rs | 10 +- xtask/src/codegen/gen_assists_docs.rs | 4 +- xtask/src/codegen/gen_diagnostic_docs.rs | 2 +- xtask/src/codegen/gen_feature_docs.rs | 2 +- xtask/src/codegen/gen_lint_completions.rs | 2 +- xtask/src/codegen/gen_parser_tests.rs | 2 +- xtask/src/codegen/gen_syntax.rs | 2 +- xtask/src/dist.rs | 8 +- xtask/src/install.rs | 18 +- xtask/src/lib.rs | 131 --------- xtask/src/main.rs | 134 +++++++++- xtask/src/metrics.rs | 6 +- xtask/src/pre_cache.rs | 4 +- xtask/src/pre_commit.rs | 4 +- xtask/src/release.rs | 12 +- xtask/src/tidy.rs | 424 ++++++++++++++++++++++++++++++ xtask/tests/tidy.rs | 423 ----------------------------- 18 files changed, 589 insertions(+), 602 deletions(-) delete mode 100644 xtask/src/lib.rs create mode 100644 xtask/src/tidy.rs delete mode 100644 xtask/tests/tidy.rs diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index b379797f9..0455dd2eb 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -6,9 +6,6 @@ authors = ["rust-analyzer developers"] publish = false license = "MIT OR Apache-2.0" -[lib] -doctest = false - [dependencies] anyhow = "1.0.26" flate2 = "1.0" diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs index adea053b6..743e83e76 100644 --- a/xtask/src/codegen.rs +++ b/xtask/src/codegen.rs @@ -20,7 +20,7 @@ use xshell::{cmd, pushenv, read_file, write_file}; use crate::{ensure_rustfmt, project_root, Result}; -pub use self::{ +pub(crate) use self::{ gen_assists_docs::{generate_assists_docs, generate_assists_tests}, gen_diagnostic_docs::generate_diagnostic_docs, gen_feature_docs::generate_feature_docs, @@ -30,17 +30,17 @@ pub use self::{ }; #[derive(Debug, PartialEq, Eq, Clone, Copy)] -pub enum Mode { +pub(crate) enum Mode { Overwrite, Verify, } -pub struct CodegenCmd { - pub features: bool, +pub(crate) struct CodegenCmd { + pub(crate) features: bool, } impl CodegenCmd { - pub fn run(self) -> Result<()> { + pub(crate) fn run(self) -> Result<()> { if self.features { generate_lint_completions(Mode::Overwrite)?; } diff --git a/xtask/src/codegen/gen_assists_docs.rs b/xtask/src/codegen/gen_assists_docs.rs index 1ae1343a5..c469b388d 100644 --- a/xtask/src/codegen/gen_assists_docs.rs +++ b/xtask/src/codegen/gen_assists_docs.rs @@ -7,12 +7,12 @@ use crate::{ project_root, rust_files_in, Result, }; -pub fn generate_assists_tests(mode: Mode) -> Result<()> { +pub(crate) fn generate_assists_tests(mode: Mode) -> Result<()> { let assists = Assist::collect()?; generate_tests(&assists, mode) } -pub fn generate_assists_docs(mode: Mode) -> Result<()> { +pub(crate) fn generate_assists_docs(mode: Mode) -> Result<()> { let assists = Assist::collect()?; let contents = assists.into_iter().map(|it| it.to_string()).collect::>().join("\n\n"); let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); diff --git a/xtask/src/codegen/gen_diagnostic_docs.rs b/xtask/src/codegen/gen_diagnostic_docs.rs index 7c14d4a07..a2561817b 100644 --- a/xtask/src/codegen/gen_diagnostic_docs.rs +++ b/xtask/src/codegen/gen_diagnostic_docs.rs @@ -7,7 +7,7 @@ use crate::{ project_root, rust_files, Result, }; -pub fn generate_diagnostic_docs(mode: Mode) -> Result<()> { +pub(crate) fn generate_diagnostic_docs(mode: Mode) -> Result<()> { let diagnostics = Diagnostic::collect()?; let contents = diagnostics.into_iter().map(|it| it.to_string()).collect::>().join("\n\n"); diff --git a/xtask/src/codegen/gen_feature_docs.rs b/xtask/src/codegen/gen_feature_docs.rs index 61081063b..cad7ff477 100644 --- a/xtask/src/codegen/gen_feature_docs.rs +++ b/xtask/src/codegen/gen_feature_docs.rs @@ -7,7 +7,7 @@ use crate::{ project_root, rust_files, Result, }; -pub fn generate_feature_docs(mode: Mode) -> Result<()> { +pub(crate) fn generate_feature_docs(mode: Mode) -> Result<()> { let features = Feature::collect()?; let contents = features.into_iter().map(|it| it.to_string()).collect::>().join("\n\n"); let contents = format!("//{}\n{}\n", PREAMBLE, contents.trim()); diff --git a/xtask/src/codegen/gen_lint_completions.rs b/xtask/src/codegen/gen_lint_completions.rs index 8c51d35c7..b1c057037 100644 --- a/xtask/src/codegen/gen_lint_completions.rs +++ b/xtask/src/codegen/gen_lint_completions.rs @@ -10,7 +10,7 @@ use crate::{ run_rustfmt, }; -pub fn generate_lint_completions(mode: Mode) -> Result<()> { +pub(crate) fn generate_lint_completions(mode: Mode) -> Result<()> { if !Path::new("./target/rust").exists() { cmd!("git clone --depth=1 https://github.com/rust-lang/rust ./target/rust").run()?; } diff --git a/xtask/src/codegen/gen_parser_tests.rs b/xtask/src/codegen/gen_parser_tests.rs index 6e4abd10c..cb8939063 100644 --- a/xtask/src/codegen/gen_parser_tests.rs +++ b/xtask/src/codegen/gen_parser_tests.rs @@ -12,7 +12,7 @@ use crate::{ project_root, Result, }; -pub fn generate_parser_tests(mode: Mode) -> Result<()> { +pub(crate) fn generate_parser_tests(mode: Mode) -> Result<()> { let tests = tests_from_dir(&project_root().join(Path::new("crates/parser/src/grammar")))?; fn install_tests(tests: &HashMap, into: &str, mode: Mode) -> Result<()> { let tests_dir = project_root().join(into); diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs index eb524d85a..191bc0e9d 100644 --- a/xtask/src/codegen/gen_syntax.rs +++ b/xtask/src/codegen/gen_syntax.rs @@ -18,7 +18,7 @@ use crate::{ project_root, Result, }; -pub fn generate_syntax(mode: Mode) -> Result<()> { +pub(crate) fn generate_syntax(mode: Mode) -> Result<()> { let grammar = rust_grammar(); let ast = lower(&grammar); diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs index 56bf9f99d..f2503f807 100644 --- a/xtask/src/dist.rs +++ b/xtask/src/dist.rs @@ -11,13 +11,13 @@ use xshell::{cmd, cp, mkdir_p, pushd, read_file, rm_rf, write_file}; use crate::{date_iso, project_root}; -pub struct DistCmd { - pub nightly: bool, - pub client_version: Option, +pub(crate) struct DistCmd { + pub(crate) nightly: bool, + pub(crate) client_version: Option, } impl DistCmd { - pub fn run(self) -> Result<()> { + pub(crate) fn run(self) -> Result<()> { let dist = project_root().join("dist"); rm_rf(&dist)?; mkdir_p(&dist)?; diff --git a/xtask/src/install.rs b/xtask/src/install.rs index 4c5c2673c..ea2194248 100644 --- a/xtask/src/install.rs +++ b/xtask/src/install.rs @@ -8,13 +8,13 @@ use xshell::{cmd, pushd}; // Latest stable, feel free to send a PR if this lags behind. const REQUIRED_RUST_VERSION: u32 = 50; -pub struct InstallCmd { - pub client: Option, - pub server: Option, +pub(crate) struct InstallCmd { + pub(crate) client: Option, + pub(crate) server: Option, } #[derive(Clone, Copy)] -pub enum ClientOpt { +pub(crate) enum ClientOpt { VsCode, VsCodeExploration, VsCodeInsiders, @@ -24,7 +24,7 @@ pub enum ClientOpt { } impl ClientOpt { - pub const fn as_cmds(&self) -> &'static [&'static str] { + pub(crate) const fn as_cmds(&self) -> &'static [&'static str] { match self { ClientOpt::VsCode => &["code"], ClientOpt::VsCodeExploration => &["code-exploration"], @@ -60,18 +60,18 @@ impl std::str::FromStr for ClientOpt { } } -pub struct ServerOpt { - pub malloc: Malloc, +pub(crate) struct ServerOpt { + pub(crate) malloc: Malloc, } -pub enum Malloc { +pub(crate) enum Malloc { System, Mimalloc, Jemalloc, } impl InstallCmd { - pub fn run(self) -> Result<()> { + pub(crate) fn run(self) -> Result<()> { if cfg!(target_os = "macos") { fix_path_for_mac().context("Fix path for mac")? } diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs deleted file mode 100644 index b19985fb2..000000000 --- a/xtask/src/lib.rs +++ /dev/null @@ -1,131 +0,0 @@ -//! Support library for `cargo xtask` command. -//! -//! See https://github.com/matklad/cargo-xtask/ - -pub mod codegen; -mod ast_src; - -pub mod install; -pub mod release; -pub mod dist; -pub mod pre_commit; -pub mod metrics; -pub mod pre_cache; - -use std::{ - env, - path::{Path, PathBuf}, -}; - -use walkdir::{DirEntry, WalkDir}; -use xshell::{cmd, pushd, pushenv}; - -use crate::codegen::Mode; - -pub use anyhow::{bail, Context as _, Result}; - -pub fn project_root() -> PathBuf { - Path::new( - &env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()), - ) - .ancestors() - .nth(1) - .unwrap() - .to_path_buf() -} - -pub fn rust_files() -> impl Iterator { - rust_files_in(&project_root().join("crates")) -} - -pub fn cargo_files() -> impl Iterator { - files_in(&project_root(), "toml") - .filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false)) -} - -pub fn rust_files_in(path: &Path) -> impl Iterator { - files_in(path, "rs") -} - -pub fn run_rustfmt(mode: Mode) -> Result<()> { - let _dir = pushd(project_root())?; - let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); - ensure_rustfmt()?; - let check = match mode { - Mode::Overwrite => &[][..], - Mode::Verify => &["--", "--check"], - }; - cmd!("cargo fmt {check...}").run()?; - Ok(()) -} - -fn ensure_rustfmt() -> Result<()> { - let out = cmd!("rustfmt --version").read()?; - if !out.contains("stable") { - bail!( - "Failed to run rustfmt from toolchain 'stable'. \ - Please run `rustup component add rustfmt --toolchain stable` to install it.", - ) - } - Ok(()) -} - -pub fn run_clippy() -> Result<()> { - if cmd!("cargo clippy --version").read().is_err() { - bail!( - "Failed run cargo clippy. \ - Please run `rustup component add clippy` to install it.", - ) - } - - let allowed_lints = " - -A clippy::collapsible_if - -A clippy::needless_pass_by_value - -A clippy::nonminimal_bool - -A clippy::redundant_pattern_matching - " - .split_ascii_whitespace(); - cmd!("cargo clippy --all-features --all-targets -- {allowed_lints...}").run()?; - Ok(()) -} - -pub fn run_fuzzer() -> Result<()> { - let _d = pushd("./crates/syntax")?; - let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly"); - if cmd!("cargo fuzz --help").read().is_err() { - cmd!("cargo install cargo-fuzz").run()?; - }; - - // Expecting nightly rustc - let out = cmd!("rustc --version").read()?; - if !out.contains("nightly") { - bail!("fuzz tests require nightly rustc") - } - - cmd!("cargo fuzz run parser").run()?; - Ok(()) -} - -fn date_iso() -> Result { - let res = cmd!("date --iso --utc").read()?; - Ok(res) -} - -fn is_release_tag(tag: &str) -> bool { - tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit()) -} - -fn files_in(path: &Path, ext: &'static str) -> impl Iterator { - let iter = WalkDir::new(path); - return iter - .into_iter() - .filter_entry(|e| !is_hidden(e)) - .map(|e| e.unwrap()) - .filter(|e| !e.file_type().is_dir()) - .map(|e| e.into_path()) - .filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false)); - - fn is_hidden(entry: &DirEntry) -> bool { - entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) - } -} diff --git a/xtask/src/main.rs b/xtask/src/main.rs index cbb9b315e..48c0d9920 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -7,22 +7,35 @@ //! //! This binary is integrated into the `cargo` command line by using an alias in //! `.cargo/config`. +mod codegen; +mod ast_src; +#[cfg(test)] +mod tidy; -use std::env; +mod install; +mod release; +mod dist; +mod pre_commit; +mod metrics; +mod pre_cache; -use anyhow::bail; +use anyhow::{bail, Result}; use codegen::CodegenCmd; use pico_args::Arguments; -use xshell::{cmd, cp, pushd}; -use xtask::{ - codegen::{self, Mode}, +use std::{ + env, + path::{Path, PathBuf}, +}; +use walkdir::{DirEntry, WalkDir}; +use xshell::{cmd, cp, pushd, pushenv}; + +use crate::{ + codegen::Mode, dist::DistCmd, install::{InstallCmd, Malloc, ServerOpt}, metrics::MetricsCmd, pre_cache::PreCacheCmd, - pre_commit, project_root, release::{PromoteCmd, ReleaseCmd}, - run_clippy, run_fuzzer, run_rustfmt, Result, }; fn main() -> Result<()> { @@ -172,3 +185,110 @@ fn finish_args(args: Arguments) -> Result<()> { } Ok(()) } + +fn project_root() -> PathBuf { + Path::new( + &env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| env!("CARGO_MANIFEST_DIR").to_owned()), + ) + .ancestors() + .nth(1) + .unwrap() + .to_path_buf() +} + +fn rust_files() -> impl Iterator { + rust_files_in(&project_root().join("crates")) +} + +#[cfg(test)] +fn cargo_files() -> impl Iterator { + files_in(&project_root(), "toml") + .filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false)) +} + +fn rust_files_in(path: &Path) -> impl Iterator { + files_in(path, "rs") +} + +fn run_rustfmt(mode: Mode) -> Result<()> { + let _dir = pushd(project_root())?; + let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); + ensure_rustfmt()?; + let check = match mode { + Mode::Overwrite => &[][..], + Mode::Verify => &["--", "--check"], + }; + cmd!("cargo fmt {check...}").run()?; + Ok(()) +} + +fn ensure_rustfmt() -> Result<()> { + let out = cmd!("rustfmt --version").read()?; + if !out.contains("stable") { + bail!( + "Failed to run rustfmt from toolchain 'stable'. \ + Please run `rustup component add rustfmt --toolchain stable` to install it.", + ) + } + Ok(()) +} + +fn run_clippy() -> Result<()> { + if cmd!("cargo clippy --version").read().is_err() { + bail!( + "Failed run cargo clippy. \ + Please run `rustup component add clippy` to install it.", + ) + } + + let allowed_lints = " + -A clippy::collapsible_if + -A clippy::needless_pass_by_value + -A clippy::nonminimal_bool + -A clippy::redundant_pattern_matching + " + .split_ascii_whitespace(); + cmd!("cargo clippy --all-features --all-targets -- {allowed_lints...}").run()?; + Ok(()) +} + +fn run_fuzzer() -> Result<()> { + let _d = pushd("./crates/syntax")?; + let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly"); + if cmd!("cargo fuzz --help").read().is_err() { + cmd!("cargo install cargo-fuzz").run()?; + }; + + // Expecting nightly rustc + let out = cmd!("rustc --version").read()?; + if !out.contains("nightly") { + bail!("fuzz tests require nightly rustc") + } + + cmd!("cargo fuzz run parser").run()?; + Ok(()) +} + +fn date_iso() -> Result { + let res = cmd!("date --iso --utc").read()?; + Ok(res) +} + +fn is_release_tag(tag: &str) -> bool { + tag.len() == "2020-02-24".len() && tag.starts_with(|c: char| c.is_ascii_digit()) +} + +fn files_in(path: &Path, ext: &'static str) -> impl Iterator { + let iter = WalkDir::new(path); + return iter + .into_iter() + .filter_entry(|e| !is_hidden(e)) + .map(|e| e.unwrap()) + .filter(|e| !e.file_type().is_dir()) + .map(|e| e.into_path()) + .filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false)); + + fn is_hidden(entry: &DirEntry) -> bool { + entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false) + } +} diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 624ad3b7e..babc2a6d4 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -11,12 +11,12 @@ use xshell::{cmd, mkdir_p, pushd, pushenv, read_file, rm_rf}; type Unit = String; -pub struct MetricsCmd { - pub dry_run: bool, +pub(crate) struct MetricsCmd { + pub(crate) dry_run: bool, } impl MetricsCmd { - pub fn run(self) -> Result<()> { + pub(crate) fn run(self) -> Result<()> { let mut metrics = Metrics::new()?; if !self.dry_run { rm_rf("./target/release")?; diff --git a/xtask/src/pre_cache.rs b/xtask/src/pre_cache.rs index 569f88f68..54f4a95a9 100644 --- a/xtask/src/pre_cache.rs +++ b/xtask/src/pre_cache.rs @@ -6,12 +6,12 @@ use std::{ use anyhow::Result; use xshell::rm_rf; -pub struct PreCacheCmd; +pub(crate) struct PreCacheCmd; impl PreCacheCmd { /// Cleans the `./target` dir after the build such that only /// dependencies are cached on CI. - pub fn run(self) -> Result<()> { + pub(crate) fn run(self) -> Result<()> { let slow_tests_cookie = Path::new("./target/.slow_tests_cookie"); if !slow_tests_cookie.exists() { panic!("slow tests were skipped on CI!") diff --git a/xtask/src/pre_commit.rs b/xtask/src/pre_commit.rs index 8f2dbea19..b57cf3ce2 100644 --- a/xtask/src/pre_commit.rs +++ b/xtask/src/pre_commit.rs @@ -9,7 +9,7 @@ use crate::{project_root, run_rustfmt, Mode}; // FIXME: if there are changed `.ts` files, also reformat TypeScript (by // shelling out to `npm fmt`). -pub fn run_hook() -> Result<()> { +pub(crate) fn run_hook() -> Result<()> { run_rustfmt(Mode::Overwrite)?; let diff = cmd!("git diff --diff-filter=MAR --name-only --cached").read()?; @@ -23,7 +23,7 @@ pub fn run_hook() -> Result<()> { Ok(()) } -pub fn install_hook() -> Result<()> { +pub(crate) fn install_hook() -> Result<()> { let hook_path: PathBuf = format!("./.git/hooks/pre-commit{}", std::env::consts::EXE_SUFFIX).into(); diff --git a/xtask/src/release.rs b/xtask/src/release.rs index 63556476d..5008881e4 100644 --- a/xtask/src/release.rs +++ b/xtask/src/release.rs @@ -4,12 +4,12 @@ use xshell::{cmd, cp, pushd, read_dir, write_file}; use crate::{codegen, date_iso, is_release_tag, project_root, Mode, Result}; -pub struct ReleaseCmd { - pub dry_run: bool, +pub(crate) struct ReleaseCmd { + pub(crate) dry_run: bool, } impl ReleaseCmd { - pub fn run(self) -> Result<()> { + pub(crate) fn run(self) -> Result<()> { if !self.dry_run { cmd!("git switch release").run()?; cmd!("git fetch upstream --tags --force").run()?; @@ -86,12 +86,12 @@ https://github.com/sponsors/rust-analyzer[GitHub Sponsors]. } } -pub struct PromoteCmd { - pub dry_run: bool, +pub(crate) struct PromoteCmd { + pub(crate) dry_run: bool, } impl PromoteCmd { - pub fn run(self) -> Result<()> { + pub(crate) fn run(self) -> Result<()> { let _dir = pushd("../rust-rust-analyzer")?; cmd!("git switch master").run()?; cmd!("git fetch upstream").run()?; diff --git a/xtask/src/tidy.rs b/xtask/src/tidy.rs new file mode 100644 index 000000000..63116ec6b --- /dev/null +++ b/xtask/src/tidy.rs @@ -0,0 +1,424 @@ +use std::{ + collections::HashMap, + path::{Path, PathBuf}, +}; + +use xshell::{cmd, read_file}; + +use crate::{ + cargo_files, + codegen::{self, Mode}, + project_root, run_rustfmt, rust_files, +}; + +#[test] +fn generated_grammar_is_fresh() { + if let Err(error) = codegen::generate_syntax(Mode::Verify) { + panic!("{}. Please update it by running `cargo xtask codegen`", error); + } +} + +#[test] +fn generated_tests_are_fresh() { + if let Err(error) = codegen::generate_parser_tests(Mode::Verify) { + panic!("{}. Please update tests by running `cargo xtask codegen`", error); + } +} + +#[test] +fn generated_assists_are_fresh() { + if let Err(error) = codegen::generate_assists_tests(Mode::Verify) { + panic!("{}. Please update assists by running `cargo xtask codegen`", error); + } +} + +#[test] +fn check_code_formatting() { + if let Err(error) = run_rustfmt(Mode::Verify) { + panic!("{}. Please format the code by running `cargo format`", error); + } +} + +#[test] +fn smoke_test_docs_generation() { + // We don't commit docs to the repo, so we can just overwrite in tests. + codegen::generate_assists_docs(Mode::Overwrite).unwrap(); + codegen::generate_feature_docs(Mode::Overwrite).unwrap(); + codegen::generate_diagnostic_docs(Mode::Overwrite).unwrap(); +} + +#[test] +fn check_lsp_extensions_docs() { + let expected_hash = { + let lsp_ext_rs = + read_file(project_root().join("crates/rust-analyzer/src/lsp_ext.rs")).unwrap(); + stable_hash(lsp_ext_rs.as_str()) + }; + + let actual_hash = { + let lsp_extensions_md = + read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap(); + let text = lsp_extensions_md + .lines() + .find_map(|line| line.strip_prefix("lsp_ext.rs hash:")) + .unwrap() + .trim(); + u64::from_str_radix(text, 16).unwrap() + }; + + if actual_hash != expected_hash { + panic!( + " +lsp_ext.rs was changed without touching lsp-extensions.md. + +Expected hash: {:x} +Actual hash: {:x} + +Please adjust docs/dev/lsp-extensions.md. +", + expected_hash, actual_hash + ) + } +} + +#[test] +fn rust_files_are_tidy() { + let mut tidy_docs = TidyDocs::default(); + for path in rust_files() { + let text = read_file(&path).unwrap(); + check_todo(&path, &text); + check_dbg(&path, &text); + check_trailing_ws(&path, &text); + deny_clippy(&path, &text); + tidy_docs.visit(&path, &text); + } + tidy_docs.finish(); +} + +#[test] +fn cargo_files_are_tidy() { + for cargo in cargo_files() { + let mut section = None; + for (line_no, text) in read_file(&cargo).unwrap().lines().enumerate() { + let text = text.trim(); + if text.starts_with("[") { + section = Some(text); + continue; + } + if !section.map(|it| it.starts_with("[dependencies")).unwrap_or(false) { + continue; + } + let text: String = text.split_whitespace().collect(); + if text.contains("path=") && !text.contains("version") { + panic!( + "\ncargo internal dependencies should have version.\n\ + {}:{}\n", + cargo.display(), + line_no + 1 + ) + } + } + } +} + +#[test] +fn check_merge_commits() { + let stdout = cmd!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19..") + .read() + .unwrap(); + if !stdout.is_empty() { + panic!( + " +Merge commits are not allowed in the history. + +When updating a pull-request, please rebase your feature branch +on top of master by running `git rebase master`. If rebase fails, +you can re-apply your changes like this: + + # Just look around to see the current state. + $ git status + $ git log + + # Abort in-progress rebase and merges, if any. + $ git rebase --abort + $ git merge --abort + + # Make the branch point to the latest commit from master, + # while maintaining your local changes uncommited. + $ git reset --soft origin/master + + # Commit all changes in a single batch. + $ git commit -am'My changes' + + # Verify that everything looks alright. + $ git status + $ git log + + # Push the changes. We did a rebase, so we need `--force` option. + # `--force-with-lease` is a more safe (Rusty) version of `--force`. + $ git push --force-with-lease + + # Verify that both local and remote branch point to the same commit. + $ git log + +And don't fear to mess something up during a rebase -- you can +always restore the previous state using `git ref-log`: + +https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-undo-local +" + ); + } +} + +fn deny_clippy(path: &PathBuf, text: &String) { + let ignore = &[ + // The documentation in string literals may contain anything for its own purposes + "ide_completion/src/generated_lint_completions.rs", + ]; + if ignore.iter().any(|p| path.ends_with(p)) { + return; + } + + if text.contains("\u{61}llow(clippy") { + panic!( + "\n\nallowing lints is forbidden: {}. +rust-analyzer intentionally doesn't check clippy on CI. +You can allow lint globally via `xtask clippy`. +See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion. + +", + path.display() + ) + } +} + +#[test] +fn check_licenses() { + let expected = " +0BSD OR MIT OR Apache-2.0 +Apache-2.0 +Apache-2.0 OR BSL-1.0 +Apache-2.0 OR MIT +Apache-2.0/MIT +BSD-3-Clause +CC0-1.0 +ISC +MIT +MIT / Apache-2.0 +MIT OR Apache-2.0 +MIT OR Apache-2.0 OR Zlib +MIT OR Zlib OR Apache-2.0 +MIT/Apache-2.0 +Unlicense OR MIT +Unlicense/MIT +Zlib OR Apache-2.0 OR MIT +" + .lines() + .filter(|it| !it.is_empty()) + .collect::>(); + + let meta = cmd!("cargo metadata --format-version 1").read().unwrap(); + let mut licenses = meta + .split(|c| c == ',' || c == '{' || c == '}') + .filter(|it| it.contains(r#""license""#)) + .map(|it| it.trim()) + .map(|it| it[r#""license":"#.len()..].trim_matches('"')) + .collect::>(); + licenses.sort(); + licenses.dedup(); + if licenses != expected { + let mut diff = String::new(); + + diff += &format!("New Licenses:\n"); + for &l in licenses.iter() { + if !expected.contains(&l) { + diff += &format!(" {}\n", l) + } + } + + diff += &format!("\nMissing Licenses:\n"); + for &l in expected.iter() { + if !licenses.contains(&l) { + diff += &format!(" {}\n", l) + } + } + + panic!("different set of licenses!\n{}", diff); + } + assert_eq!(licenses, expected); +} + +fn check_todo(path: &Path, text: &str) { + let need_todo = &[ + // This file itself obviously needs to use todo (<- like this!). + "tests/tidy.rs", + // Some of our assists generate `todo!()`. + "handlers/add_turbo_fish.rs", + "handlers/generate_function.rs", + // To support generating `todo!()` in assists, we have `expr_todo()` in + // `ast::make`. + "ast/make.rs", + // The documentation in string literals may contain anything for its own purposes + "ide_completion/src/generated_lint_completions.rs", + ]; + if need_todo.iter().any(|p| path.ends_with(p)) { + return; + } + if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") { + // Generated by an assist + if text.contains("${0:todo!()}") { + return; + } + + panic!( + "\nTODO markers or todo! macros should not be committed to the master branch,\n\ + use FIXME instead\n\ + {}\n", + path.display(), + ) + } +} + +fn check_dbg(path: &Path, text: &str) { + let need_dbg = &[ + // This file itself obviously needs to use dbg. + "tests/tidy.rs", + // Assists to remove `dbg!()` + "handlers/remove_dbg.rs", + // We have .dbg postfix + "ide_completion/src/completions/postfix.rs", + // The documentation in string literals may contain anything for its own purposes + "ide_completion/src/lib.rs", + "ide_completion/src/generated_lint_completions.rs", + // test for doc test for remove_dbg + "src/tests/generated.rs", + ]; + if need_dbg.iter().any(|p| path.ends_with(p)) { + return; + } + if text.contains("dbg!") { + panic!( + "\ndbg! macros should not be committed to the master branch,\n\ + {}\n", + path.display(), + ) + } +} + +fn check_trailing_ws(path: &Path, text: &str) { + if is_exclude_dir(path, &["test_data"]) { + return; + } + for (line_number, line) in text.lines().enumerate() { + if line.chars().last().map(char::is_whitespace) == Some(true) { + panic!("Trailing whitespace in {} at line {}", path.display(), line_number) + } + } +} + +#[derive(Default)] +struct TidyDocs { + missing_docs: Vec, + contains_fixme: Vec, +} + +impl TidyDocs { + fn visit(&mut self, path: &Path, text: &str) { + // Test hopefully don't really need comments, and for assists we already + // have special comments which are source of doc tests and user docs. + if is_exclude_dir(path, &["tests", "test_data"]) { + return; + } + + if is_exclude_file(path) { + return; + } + + let first_line = match text.lines().next() { + Some(it) => it, + None => return, + }; + + if first_line.starts_with("//!") { + if first_line.contains("FIXME") { + self.contains_fixme.push(path.to_path_buf()); + } + } else { + if text.contains("// Feature:") || text.contains("// Assist:") { + return; + } + self.missing_docs.push(path.display().to_string()); + } + + fn is_exclude_file(d: &Path) -> bool { + let file_names = ["tests.rs", "famous_defs_fixture.rs"]; + + d.file_name() + .unwrap_or_default() + .to_str() + .map(|f_n| file_names.iter().any(|name| *name == f_n)) + .unwrap_or(false) + } + } + + fn finish(self) { + if !self.missing_docs.is_empty() { + panic!( + "\nMissing docs strings\n\n\ + modules:\n{}\n\n", + self.missing_docs.join("\n") + ) + } + + let poorly_documented = [ + "hir", + "hir_expand", + "ide", + "mbe", + "parser", + "profile", + "project_model", + "syntax", + "tt", + "hir_ty", + ]; + + let mut has_fixmes = + poorly_documented.iter().map(|it| (*it, false)).collect::>(); + 'outer: for path in self.contains_fixme { + for krate in poorly_documented.iter() { + if path.components().any(|it| it.as_os_str() == *krate) { + has_fixmes.insert(krate, true); + continue 'outer; + } + } + panic!("FIXME doc in a fully-documented crate: {}", path.display()) + } + + for (krate, has_fixme) in has_fixmes.iter() { + if !has_fixme { + panic!("crate {} is fully documented :tada:, remove it from the list of poorly documented crates", krate) + } + } + } +} + +fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool { + p.strip_prefix(project_root()) + .unwrap() + .components() + .rev() + .skip(1) + .filter_map(|it| it.as_os_str().to_str()) + .any(|it| dirs_to_exclude.contains(&it)) +} + +#[allow(deprecated)] +fn stable_hash(text: &str) -> u64 { + use std::hash::{Hash, Hasher, SipHasher}; + + let text = text.replace('\r', ""); + let mut hasher = SipHasher::default(); + text.hash(&mut hasher); + hasher.finish() +} diff --git a/xtask/tests/tidy.rs b/xtask/tests/tidy.rs deleted file mode 100644 index a72498a38..000000000 --- a/xtask/tests/tidy.rs +++ /dev/null @@ -1,423 +0,0 @@ -use std::{ - collections::HashMap, - path::{Path, PathBuf}, -}; - -use xshell::{cmd, read_file}; -use xtask::{ - cargo_files, - codegen::{self, Mode}, - project_root, run_rustfmt, rust_files, -}; - -#[test] -fn generated_grammar_is_fresh() { - if let Err(error) = codegen::generate_syntax(Mode::Verify) { - panic!("{}. Please update it by running `cargo xtask codegen`", error); - } -} - -#[test] -fn generated_tests_are_fresh() { - if let Err(error) = codegen::generate_parser_tests(Mode::Verify) { - panic!("{}. Please update tests by running `cargo xtask codegen`", error); - } -} - -#[test] -fn generated_assists_are_fresh() { - if let Err(error) = codegen::generate_assists_tests(Mode::Verify) { - panic!("{}. Please update assists by running `cargo xtask codegen`", error); - } -} - -#[test] -fn check_code_formatting() { - if let Err(error) = run_rustfmt(Mode::Verify) { - panic!("{}. Please format the code by running `cargo format`", error); - } -} - -#[test] -fn smoke_test_docs_generation() { - // We don't commit docs to the repo, so we can just overwrite in tests. - codegen::generate_assists_docs(Mode::Overwrite).unwrap(); - codegen::generate_feature_docs(Mode::Overwrite).unwrap(); - codegen::generate_diagnostic_docs(Mode::Overwrite).unwrap(); -} - -#[test] -fn check_lsp_extensions_docs() { - let expected_hash = { - let lsp_ext_rs = - read_file(project_root().join("crates/rust-analyzer/src/lsp_ext.rs")).unwrap(); - stable_hash(lsp_ext_rs.as_str()) - }; - - let actual_hash = { - let lsp_extensions_md = - read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap(); - let text = lsp_extensions_md - .lines() - .find_map(|line| line.strip_prefix("lsp_ext.rs hash:")) - .unwrap() - .trim(); - u64::from_str_radix(text, 16).unwrap() - }; - - if actual_hash != expected_hash { - panic!( - " -lsp_ext.rs was changed without touching lsp-extensions.md. - -Expected hash: {:x} -Actual hash: {:x} - -Please adjust docs/dev/lsp-extensions.md. -", - expected_hash, actual_hash - ) - } -} - -#[test] -fn rust_files_are_tidy() { - let mut tidy_docs = TidyDocs::default(); - for path in rust_files() { - let text = read_file(&path).unwrap(); - check_todo(&path, &text); - check_dbg(&path, &text); - check_trailing_ws(&path, &text); - deny_clippy(&path, &text); - tidy_docs.visit(&path, &text); - } - tidy_docs.finish(); -} - -#[test] -fn cargo_files_are_tidy() { - for cargo in cargo_files() { - let mut section = None; - for (line_no, text) in read_file(&cargo).unwrap().lines().enumerate() { - let text = text.trim(); - if text.starts_with("[") { - section = Some(text); - continue; - } - if !section.map(|it| it.starts_with("[dependencies")).unwrap_or(false) { - continue; - } - let text: String = text.split_whitespace().collect(); - if text.contains("path=") && !text.contains("version") { - panic!( - "\ncargo internal dependencies should have version.\n\ - {}:{}\n", - cargo.display(), - line_no + 1 - ) - } - } - } -} - -#[test] -fn check_merge_commits() { - let stdout = cmd!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19..") - .read() - .unwrap(); - if !stdout.is_empty() { - panic!( - " -Merge commits are not allowed in the history. - -When updating a pull-request, please rebase your feature branch -on top of master by running `git rebase master`. If rebase fails, -you can re-apply your changes like this: - - # Just look around to see the current state. - $ git status - $ git log - - # Abort in-progress rebase and merges, if any. - $ git rebase --abort - $ git merge --abort - - # Make the branch point to the latest commit from master, - # while maintaining your local changes uncommited. - $ git reset --soft origin/master - - # Commit all changes in a single batch. - $ git commit -am'My changes' - - # Verify that everything looks alright. - $ git status - $ git log - - # Push the changes. We did a rebase, so we need `--force` option. - # `--force-with-lease` is a more safe (Rusty) version of `--force`. - $ git push --force-with-lease - - # Verify that both local and remote branch point to the same commit. - $ git log - -And don't fear to mess something up during a rebase -- you can -always restore the previous state using `git ref-log`: - -https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-undo-local -" - ); - } -} - -fn deny_clippy(path: &PathBuf, text: &String) { - let ignore = &[ - // The documentation in string literals may contain anything for its own purposes - "ide_completion/src/generated_lint_completions.rs", - ]; - if ignore.iter().any(|p| path.ends_with(p)) { - return; - } - - if text.contains("\u{61}llow(clippy") { - panic!( - "\n\nallowing lints is forbidden: {}. -rust-analyzer intentionally doesn't check clippy on CI. -You can allow lint globally via `xtask clippy`. -See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion. - -", - path.display() - ) - } -} - -#[test] -fn check_licenses() { - let expected = " -0BSD OR MIT OR Apache-2.0 -Apache-2.0 -Apache-2.0 OR BSL-1.0 -Apache-2.0 OR MIT -Apache-2.0/MIT -BSD-3-Clause -CC0-1.0 -ISC -MIT -MIT / Apache-2.0 -MIT OR Apache-2.0 -MIT OR Apache-2.0 OR Zlib -MIT OR Zlib OR Apache-2.0 -MIT/Apache-2.0 -Unlicense OR MIT -Unlicense/MIT -Zlib OR Apache-2.0 OR MIT -" - .lines() - .filter(|it| !it.is_empty()) - .collect::>(); - - let meta = cmd!("cargo metadata --format-version 1").read().unwrap(); - let mut licenses = meta - .split(|c| c == ',' || c == '{' || c == '}') - .filter(|it| it.contains(r#""license""#)) - .map(|it| it.trim()) - .map(|it| it[r#""license":"#.len()..].trim_matches('"')) - .collect::>(); - licenses.sort(); - licenses.dedup(); - if licenses != expected { - let mut diff = String::new(); - - diff += &format!("New Licenses:\n"); - for &l in licenses.iter() { - if !expected.contains(&l) { - diff += &format!(" {}\n", l) - } - } - - diff += &format!("\nMissing Licenses:\n"); - for &l in expected.iter() { - if !licenses.contains(&l) { - diff += &format!(" {}\n", l) - } - } - - panic!("different set of licenses!\n{}", diff); - } - assert_eq!(licenses, expected); -} - -fn check_todo(path: &Path, text: &str) { - let need_todo = &[ - // This file itself obviously needs to use todo (<- like this!). - "tests/tidy.rs", - // Some of our assists generate `todo!()`. - "handlers/add_turbo_fish.rs", - "handlers/generate_function.rs", - // To support generating `todo!()` in assists, we have `expr_todo()` in - // `ast::make`. - "ast/make.rs", - // The documentation in string literals may contain anything for its own purposes - "ide_completion/src/generated_lint_completions.rs", - ]; - if need_todo.iter().any(|p| path.ends_with(p)) { - return; - } - if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") { - // Generated by an assist - if text.contains("${0:todo!()}") { - return; - } - - panic!( - "\nTODO markers or todo! macros should not be committed to the master branch,\n\ - use FIXME instead\n\ - {}\n", - path.display(), - ) - } -} - -fn check_dbg(path: &Path, text: &str) { - let need_dbg = &[ - // This file itself obviously needs to use dbg. - "tests/tidy.rs", - // Assists to remove `dbg!()` - "handlers/remove_dbg.rs", - // We have .dbg postfix - "ide_completion/src/completions/postfix.rs", - // The documentation in string literals may contain anything for its own purposes - "ide_completion/src/lib.rs", - "ide_completion/src/generated_lint_completions.rs", - // test for doc test for remove_dbg - "src/tests/generated.rs", - ]; - if need_dbg.iter().any(|p| path.ends_with(p)) { - return; - } - if text.contains("dbg!") { - panic!( - "\ndbg! macros should not be committed to the master branch,\n\ - {}\n", - path.display(), - ) - } -} - -fn check_trailing_ws(path: &Path, text: &str) { - if is_exclude_dir(path, &["test_data"]) { - return; - } - for (line_number, line) in text.lines().enumerate() { - if line.chars().last().map(char::is_whitespace) == Some(true) { - panic!("Trailing whitespace in {} at line {}", path.display(), line_number) - } - } -} - -#[derive(Default)] -struct TidyDocs { - missing_docs: Vec, - contains_fixme: Vec, -} - -impl TidyDocs { - fn visit(&mut self, path: &Path, text: &str) { - // Test hopefully don't really need comments, and for assists we already - // have special comments which are source of doc tests and user docs. - if is_exclude_dir(path, &["tests", "test_data"]) { - return; - } - - if is_exclude_file(path) { - return; - } - - let first_line = match text.lines().next() { - Some(it) => it, - None => return, - }; - - if first_line.starts_with("//!") { - if first_line.contains("FIXME") { - self.contains_fixme.push(path.to_path_buf()); - } - } else { - if text.contains("// Feature:") || text.contains("// Assist:") { - return; - } - self.missing_docs.push(path.display().to_string()); - } - - fn is_exclude_file(d: &Path) -> bool { - let file_names = ["tests.rs", "famous_defs_fixture.rs"]; - - d.file_name() - .unwrap_or_default() - .to_str() - .map(|f_n| file_names.iter().any(|name| *name == f_n)) - .unwrap_or(false) - } - } - - fn finish(self) { - if !self.missing_docs.is_empty() { - panic!( - "\nMissing docs strings\n\n\ - modules:\n{}\n\n", - self.missing_docs.join("\n") - ) - } - - let poorly_documented = [ - "hir", - "hir_expand", - "ide", - "mbe", - "parser", - "profile", - "project_model", - "syntax", - "tt", - "hir_ty", - ]; - - let mut has_fixmes = - poorly_documented.iter().map(|it| (*it, false)).collect::>(); - 'outer: for path in self.contains_fixme { - for krate in poorly_documented.iter() { - if path.components().any(|it| it.as_os_str() == *krate) { - has_fixmes.insert(krate, true); - continue 'outer; - } - } - panic!("FIXME doc in a fully-documented crate: {}", path.display()) - } - - for (krate, has_fixme) in has_fixmes.iter() { - if !has_fixme { - panic!("crate {} is fully documented :tada:, remove it from the list of poorly documented crates", krate) - } - } - } -} - -fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool { - p.strip_prefix(project_root()) - .unwrap() - .components() - .rev() - .skip(1) - .filter_map(|it| it.as_os_str().to_str()) - .any(|it| dirs_to_exclude.contains(&it)) -} - -#[allow(deprecated)] -fn stable_hash(text: &str) -> u64 { - use std::hash::{Hash, Hasher, SipHasher}; - - let text = text.replace('\r', ""); - let mut hasher = SipHasher::default(); - text.hash(&mut hasher); - hasher.finish() -} -- cgit v1.2.3