From b6101184537b1165cfdd5fc473e04ad4c5b7bffa Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Mon, 2 Nov 2020 13:13:32 +0100 Subject: Deny unreachable-pub It's very useful when `pub` is equivalent to "this is crate's public API", let's enforce this! Ideally, we should enforce it for local `cargo test`, and only during CI, but that needs https://github.com/rust-lang/cargo/issues/5034. --- .github/workflows/ci.yaml | 2 +- .github/workflows/metrics.yaml | 2 +- .github/workflows/release.yaml | 2 +- .github/workflows/rustdoc.yaml | 2 +- crates/base_db/src/input.rs | 4 +- crates/base_db/src/lib.rs | 4 +- crates/hir_def/src/body/lower.rs | 6 +-- crates/hir_def/src/lib.rs | 6 +-- crates/hir_def/src/nameres/collector.rs | 14 +++--- crates/hir_def/src/test_db.rs | 14 +++--- .../hir_ty/src/diagnostics/decl_check/case_conv.rs | 6 +-- crates/hir_ty/src/diagnostics/expr.rs | 15 ++---- crates/hir_ty/src/diagnostics/unsafe_check.rs | 8 ++-- crates/hir_ty/src/infer.rs | 6 +-- crates/hir_ty/src/infer/expr.rs | 2 +- crates/hir_ty/src/infer/unify.rs | 35 ++++++++------ crates/hir_ty/src/lib.rs | 3 +- crates/hir_ty/src/test_db.rs | 6 +-- crates/hir_ty/src/traits/chalk/tls.rs | 54 +++++++++++----------- crates/mbe/src/mbe_expander/matcher.rs | 10 ++-- crates/mbe/src/subtree_source.rs | 8 ++-- crates/proc_macro_api/src/process.rs | 8 ++-- crates/proc_macro_api/src/rpc.rs | 34 +++++++------- crates/proc_macro_srv/src/lib.rs | 1 + crates/profile/src/hprof.rs | 4 +- crates/profile/src/tree.rs | 16 +++---- crates/syntax/src/ast.rs | 2 +- crates/syntax/src/ast/generated.rs | 6 +-- crates/syntax/src/lib.rs | 11 +++-- crates/syntax/src/parsing.rs | 4 +- crates/syntax/src/parsing/text_token_source.rs | 2 +- crates/syntax/src/syntax_node.rs | 6 +-- crates/vfs/src/vfs_path.rs | 2 +- xtask/src/codegen/gen_parser_tests.rs | 10 ++-- 34 files changed, 157 insertions(+), 158 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 77f2e2e16..83b1d33a1 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -12,7 +12,7 @@ env: CARGO_NET_RETRY: 10 CI: 1 RUST_BACKTRACE: short - RUSTFLAGS: -D warnings + RUSTFLAGS: "-D warnings " # -W unreachable-pub" RUSTUP_MAX_RETRIES: 10 jobs: diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml index ea780e875..72050a527 100644 --- a/.github/workflows/metrics.yaml +++ b/.github/workflows/metrics.yaml @@ -7,7 +7,7 @@ on: env: CARGO_INCREMENTAL: 0 CARGO_NET_RETRY: 10 - RUSTFLAGS: -D warnings + RUSTFLAGS: "-D warnings " # -W unreachable-pub" RUSTUP_MAX_RETRIES: 10 jobs: diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index c1d56a8e0..fbefbd9eb 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -11,7 +11,7 @@ on: env: CARGO_INCREMENTAL: 0 CARGO_NET_RETRY: 10 - RUSTFLAGS: -D warnings + RUSTFLAGS: "-D warnings " # -W unreachable-pub" RUSTUP_MAX_RETRIES: 10 jobs: diff --git a/.github/workflows/rustdoc.yaml b/.github/workflows/rustdoc.yaml index cf4bca840..29ca3d3b2 100644 --- a/.github/workflows/rustdoc.yaml +++ b/.github/workflows/rustdoc.yaml @@ -7,7 +7,7 @@ on: env: CARGO_INCREMENTAL: 0 CARGO_NET_RETRY: 10 - RUSTFLAGS: -D warnings + RUSTFLAGS: "-D warnings " # -W unreachable-pub" RUSTUP_MAX_RETRIES: 10 jobs: diff --git a/crates/base_db/src/input.rs b/crates/base_db/src/input.rs index 87f0a0ce5..31907ed98 100644 --- a/crates/base_db/src/input.rs +++ b/crates/base_db/src/input.rs @@ -12,9 +12,7 @@ use cfg::CfgOptions; use rustc_hash::{FxHashMap, FxHashSet}; use syntax::SmolStr; use tt::TokenExpander; -use vfs::{file_set::FileSet, VfsPath}; - -pub use vfs::FileId; +use vfs::{file_set::FileSet, FileId, VfsPath}; /// Files are grouped into source roots. A source root is a directory on the /// file systems which is watched for changes. Typically it corresponds to a diff --git a/crates/base_db/src/lib.rs b/crates/base_db/src/lib.rs index 0804202d6..ce75a5337 100644 --- a/crates/base_db/src/lib.rs +++ b/crates/base_db/src/lib.rs @@ -14,11 +14,11 @@ pub use crate::{ change::Change, input::{ CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, Dependency, Edition, Env, - FileId, ProcMacroId, SourceRoot, SourceRootId, + ProcMacroId, SourceRoot, SourceRootId, }, }; pub use salsa; -pub use vfs::{file_set::FileSet, VfsPath}; +pub use vfs::{file_set::FileSet, FileId, VfsPath}; #[macro_export] macro_rules! impl_intern_key { diff --git a/crates/hir_def/src/body/lower.rs b/crates/hir_def/src/body/lower.rs index ddc267b83..1deaa90f2 100644 --- a/crates/hir_def/src/body/lower.rs +++ b/crates/hir_def/src/body/lower.rs @@ -45,14 +45,14 @@ pub(crate) struct LowerCtx { } impl LowerCtx { - pub fn new(db: &dyn DefDatabase, file_id: HirFileId) -> Self { + pub(crate) fn new(db: &dyn DefDatabase, file_id: HirFileId) -> Self { LowerCtx { hygiene: Hygiene::new(db.upcast(), file_id) } } - pub fn with_hygiene(hygiene: &Hygiene) -> Self { + pub(crate) fn with_hygiene(hygiene: &Hygiene) -> Self { LowerCtx { hygiene: hygiene.clone() } } - pub fn lower_path(&self, ast: ast::Path) -> Option { + pub(crate) fn lower_path(&self, ast: ast::Path) -> Option { Path::from_src(ast, &self.hygiene) } } diff --git a/crates/hir_def/src/lib.rs b/crates/hir_def/src/lib.rs index f24a1dd77..1b22d1eec 100644 --- a/crates/hir_def/src/lib.rs +++ b/crates/hir_def/src/lib.rs @@ -486,12 +486,12 @@ impl AsMacroCall for InFile<&ast::MacroCall> { /// Helper wrapper for `AstId` with `ModPath` #[derive(Clone, Debug, Eq, PartialEq)] struct AstIdWithPath { - pub ast_id: AstId, - pub path: path::ModPath, + ast_id: AstId, + path: path::ModPath, } impl AstIdWithPath { - pub fn new(file_id: HirFileId, ast_id: FileAstId, path: path::ModPath) -> AstIdWithPath { + fn new(file_id: HirFileId, ast_id: FileAstId, path: path::ModPath) -> AstIdWithPath { AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path } } } diff --git a/crates/hir_def/src/nameres/collector.rs b/crates/hir_def/src/nameres/collector.rs index 1ff45d244..59b6644c3 100644 --- a/crates/hir_def/src/nameres/collector.rs +++ b/crates/hir_def/src/nameres/collector.rs @@ -122,13 +122,13 @@ enum ImportSource { #[derive(Clone, Debug, Eq, PartialEq)] struct Import { - pub path: ModPath, - pub alias: Option, - pub visibility: RawVisibility, - pub is_glob: bool, - pub is_prelude: bool, - pub is_extern_crate: bool, - pub is_macro_use: bool, + path: ModPath, + alias: Option, + visibility: RawVisibility, + is_glob: bool, + is_prelude: bool, + is_extern_crate: bool, + is_macro_use: bool, source: ImportSource, } diff --git a/crates/hir_def/src/test_db.rs b/crates/hir_def/src/test_db.rs index 2b36c824a..00fe711fe 100644 --- a/crates/hir_def/src/test_db.rs +++ b/crates/hir_def/src/test_db.rs @@ -25,7 +25,7 @@ use crate::{db::DefDatabase, ModuleDefId}; crate::db::DefDatabaseStorage )] #[derive(Default)] -pub struct TestDB { +pub(crate) struct TestDB { storage: salsa::Storage, events: Mutex>>, } @@ -72,7 +72,7 @@ impl FileLoader for TestDB { } impl TestDB { - pub fn module_for_file(&self, file_id: FileId) -> crate::ModuleId { + pub(crate) fn module_for_file(&self, file_id: FileId) -> crate::ModuleId { for &krate in self.relevant_crates(file_id).iter() { let crate_def_map = self.crate_def_map(krate); for (local_id, data) in crate_def_map.modules.iter() { @@ -84,13 +84,13 @@ impl TestDB { panic!("Can't find module for file") } - pub fn log(&self, f: impl FnOnce()) -> Vec { + pub(crate) fn log(&self, f: impl FnOnce()) -> Vec { *self.events.lock().unwrap() = Some(Vec::new()); f(); self.events.lock().unwrap().take().unwrap() } - pub fn log_executed(&self, f: impl FnOnce()) -> Vec { + pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec { let events = self.log(f); events .into_iter() @@ -105,7 +105,7 @@ impl TestDB { .collect() } - pub fn extract_annotations(&self) -> FxHashMap> { + pub(crate) fn extract_annotations(&self) -> FxHashMap> { let mut files = Vec::new(); let crate_graph = self.crate_graph(); for krate in crate_graph.iter() { @@ -129,7 +129,7 @@ impl TestDB { .collect() } - pub fn diagnostics(&self, mut cb: F) { + pub(crate) fn diagnostics(&self, mut cb: F) { let crate_graph = self.crate_graph(); for krate in crate_graph.iter() { let crate_def_map = self.crate_def_map(krate); @@ -148,7 +148,7 @@ impl TestDB { } } - pub fn check_diagnostics(&self) { + pub(crate) fn check_diagnostics(&self) { let db: &TestDB = self; let annotations = db.extract_annotations(); assert!(!annotations.is_empty()); diff --git a/crates/hir_ty/src/diagnostics/decl_check/case_conv.rs b/crates/hir_ty/src/diagnostics/decl_check/case_conv.rs index b0144a289..14e4d92f0 100644 --- a/crates/hir_ty/src/diagnostics/decl_check/case_conv.rs +++ b/crates/hir_ty/src/diagnostics/decl_check/case_conv.rs @@ -6,7 +6,7 @@ /// Converts an identifier to an UpperCamelCase form. /// Returns `None` if the string is already is UpperCamelCase. -pub fn to_camel_case(ident: &str) -> Option { +pub(crate) fn to_camel_case(ident: &str) -> Option { if is_camel_case(ident) { return None; } @@ -59,7 +59,7 @@ pub fn to_camel_case(ident: &str) -> Option { /// Converts an identifier to a lower_snake_case form. /// Returns `None` if the string is already in lower_snake_case. -pub fn to_lower_snake_case(ident: &str) -> Option { +pub(crate) fn to_lower_snake_case(ident: &str) -> Option { if is_lower_snake_case(ident) { return None; } else if is_upper_snake_case(ident) { @@ -71,7 +71,7 @@ pub fn to_lower_snake_case(ident: &str) -> Option { /// Converts an identifier to an UPPER_SNAKE_CASE form. /// Returns `None` if the string is already is UPPER_SNAKE_CASE. -pub fn to_upper_snake_case(ident: &str) -> Option { +pub(crate) fn to_upper_snake_case(ident: &str) -> Option { if is_upper_snake_case(ident) { return None; } else if is_lower_snake_case(ident) { diff --git a/crates/hir_ty/src/diagnostics/expr.rs b/crates/hir_ty/src/diagnostics/expr.rs index 278a4b947..434b19354 100644 --- a/crates/hir_ty/src/diagnostics/expr.rs +++ b/crates/hir_ty/src/diagnostics/expr.rs @@ -17,17 +17,10 @@ use crate::{ ApplicationTy, InferenceResult, Ty, TypeCtor, }; -pub use hir_def::{ - body::{ - scope::{ExprScopes, ScopeEntry, ScopeId}, - Body, BodySourceMap, ExprPtr, ExprSource, PatPtr, PatSource, - }, - expr::{ - ArithOp, Array, BinaryOp, BindingAnnotation, CmpOp, Expr, ExprId, Literal, LogicOp, - MatchArm, Ordering, Pat, PatId, RecordFieldPat, RecordLitField, Statement, UnaryOp, - }, - src::HasSource, - LocalFieldId, Lookup, VariantId, +pub(crate) use hir_def::{ + body::{Body, BodySourceMap}, + expr::{Expr, ExprId, MatchArm, Pat, PatId}, + LocalFieldId, VariantId, }; pub(super) struct ExprValidator<'a, 'b: 'a> { diff --git a/crates/hir_ty/src/diagnostics/unsafe_check.rs b/crates/hir_ty/src/diagnostics/unsafe_check.rs index 2da9688ca..6dc862826 100644 --- a/crates/hir_ty/src/diagnostics/unsafe_check.rs +++ b/crates/hir_ty/src/diagnostics/unsafe_check.rs @@ -59,12 +59,12 @@ impl<'a, 'b> UnsafeValidator<'a, 'b> { } } -pub struct UnsafeExpr { - pub expr: ExprId, - pub inside_unsafe_block: bool, +pub(crate) struct UnsafeExpr { + pub(crate) expr: ExprId, + pub(crate) inside_unsafe_block: bool, } -pub fn unsafe_expressions( +pub(crate) fn unsafe_expressions( db: &dyn HirDatabase, infer: &InferenceResult, def: DefWithBodyId, diff --git a/crates/hir_ty/src/infer.rs b/crates/hir_ty/src/infer.rs index 644ebd42d..f4c1fa2f2 100644 --- a/crates/hir_ty/src/infer.rs +++ b/crates/hir_ty/src/infer.rs @@ -214,9 +214,9 @@ struct InferenceContext<'a> { #[derive(Clone, Debug)] struct BreakableContext { - pub may_break: bool, - pub break_ty: Ty, - pub label: Option, + may_break: bool, + break_ty: Ty, + label: Option, } fn find_breakable<'c>( diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs index 8ac4cf89a..605951b10 100644 --- a/crates/hir_ty/src/infer/expr.rs +++ b/crates/hir_ty/src/infer/expr.rs @@ -107,7 +107,7 @@ impl<'a> InferenceContext<'a> { } } - pub fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { + pub(crate) fn callable_sig(&mut self, ty: &Ty, num_args: usize) -> Option<(Vec, Ty)> { match ty.callable_sig(self.db) { Some(sig) => Some((sig.params().to_vec(), sig.ret().clone())), None => self.callable_sig_from_fn_trait(ty, num_args), diff --git a/crates/hir_ty/src/infer/unify.rs b/crates/hir_ty/src/infer/unify.rs index 2e895d911..2406a7361 100644 --- a/crates/hir_ty/src/infer/unify.rs +++ b/crates/hir_ty/src/infer/unify.rs @@ -127,7 +127,7 @@ where } impl Canonicalized { - pub fn decanonicalize_ty(&self, mut ty: Ty) -> Ty { + pub(super) fn decanonicalize_ty(&self, mut ty: Ty) -> Ty { ty.walk_mut_binders( &mut |ty, binders| { if let &mut Ty::Bound(bound) = ty { @@ -141,7 +141,11 @@ impl Canonicalized { ty } - pub fn apply_solution(&self, ctx: &mut InferenceContext<'_>, solution: Canonical) { + pub(super) fn apply_solution( + &self, + ctx: &mut InferenceContext<'_>, + solution: Canonical, + ) { // the solution may contain new variables, which we need to convert to new inference vars let new_vars = Substs( solution @@ -164,7 +168,7 @@ impl Canonicalized { } } -pub fn unify(tys: &Canonical<(Ty, Ty)>) -> Option { +pub(crate) fn unify(tys: &Canonical<(Ty, Ty)>) -> Option { let mut table = InferenceTable::new(); let vars = Substs( tys.kinds @@ -199,41 +203,46 @@ pub(crate) struct InferenceTable { } impl InferenceTable { - pub fn new() -> Self { + pub(crate) fn new() -> Self { InferenceTable { var_unification_table: InPlaceUnificationTable::new() } } - pub fn new_type_var(&mut self) -> Ty { + pub(crate) fn new_type_var(&mut self) -> Ty { Ty::Infer(InferTy::TypeVar(self.var_unification_table.new_key(TypeVarValue::Unknown))) } - pub fn new_integer_var(&mut self) -> Ty { + pub(crate) fn new_integer_var(&mut self) -> Ty { Ty::Infer(InferTy::IntVar(self.var_unification_table.new_key(TypeVarValue::Unknown))) } - pub fn new_float_var(&mut self) -> Ty { + pub(crate) fn new_float_var(&mut self) -> Ty { Ty::Infer(InferTy::FloatVar(self.var_unification_table.new_key(TypeVarValue::Unknown))) } - pub fn new_maybe_never_type_var(&mut self) -> Ty { + pub(crate) fn new_maybe_never_type_var(&mut self) -> Ty { Ty::Infer(InferTy::MaybeNeverTypeVar( self.var_unification_table.new_key(TypeVarValue::Unknown), )) } - pub fn resolve_ty_completely(&mut self, ty: Ty) -> Ty { + pub(crate) fn resolve_ty_completely(&mut self, ty: Ty) -> Ty { self.resolve_ty_completely_inner(&mut Vec::new(), ty) } - pub fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty { + pub(crate) fn resolve_ty_as_possible(&mut self, ty: Ty) -> Ty { self.resolve_ty_as_possible_inner(&mut Vec::new(), ty) } - pub fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool { + pub(crate) fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool { self.unify_inner(ty1, ty2, 0) } - pub fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs, depth: usize) -> bool { + pub(crate) fn unify_substs( + &mut self, + substs1: &Substs, + substs2: &Substs, + depth: usize, + ) -> bool { substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify_inner(t1, t2, depth)) } @@ -331,7 +340,7 @@ impl InferenceTable { /// If `ty` is a type variable with known type, returns that type; /// otherwise, return ty. - pub fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> { + pub(crate) fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> { let mut ty = Cow::Borrowed(ty); // The type variable could resolve to a int/float variable. Hence try // resolving up to three times; each type of variable shouldn't occur diff --git a/crates/hir_ty/src/lib.rs b/crates/hir_ty/src/lib.rs index 768d95eff..5a8c97198 100644 --- a/crates/hir_ty/src/lib.rs +++ b/crates/hir_ty/src/lib.rs @@ -1,6 +1,5 @@ //! The type system. We currently use this to infer types for completion, hover //! information and various assists. - #[allow(unused)] macro_rules! eprintln { ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; @@ -1115,5 +1114,5 @@ pub struct ReturnTypeImplTraits { #[derive(Clone, PartialEq, Eq, Debug, Hash)] pub(crate) struct ReturnTypeImplTrait { - pub bounds: Binders>, + pub(crate) bounds: Binders>, } diff --git a/crates/hir_ty/src/test_db.rs b/crates/hir_ty/src/test_db.rs index 15b8435e9..22254b765 100644 --- a/crates/hir_ty/src/test_db.rs +++ b/crates/hir_ty/src/test_db.rs @@ -21,7 +21,7 @@ use test_utils::extract_annotations; crate::db::HirDatabaseStorage )] #[derive(Default)] -pub struct TestDB { +pub(crate) struct TestDB { storage: salsa::Storage, events: Mutex>>, } @@ -113,13 +113,13 @@ impl TestDB { } impl TestDB { - pub fn log(&self, f: impl FnOnce()) -> Vec { + pub(crate) fn log(&self, f: impl FnOnce()) -> Vec { *self.events.lock().unwrap() = Some(Vec::new()); f(); self.events.lock().unwrap().take().unwrap() } - pub fn log_executed(&self, f: impl FnOnce()) -> Vec { + pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec { let events = self.log(f); events .into_iter() diff --git a/crates/hir_ty/src/traits/chalk/tls.rs b/crates/hir_ty/src/traits/chalk/tls.rs index 3c9766550..75b16172e 100644 --- a/crates/hir_ty/src/traits/chalk/tls.rs +++ b/crates/hir_ty/src/traits/chalk/tls.rs @@ -8,12 +8,12 @@ use super::{from_chalk, Interner, TypeAliasAsAssocType}; use crate::{db::HirDatabase, CallableDefId}; use hir_def::{AdtId, AssocContainerId, Lookup, TypeAliasId}; -pub use unsafe_tls::{set_current_program, with_current_program}; +pub(crate) use unsafe_tls::{set_current_program, with_current_program}; -pub struct DebugContext<'a>(&'a dyn HirDatabase); +pub(crate) struct DebugContext<'a>(&'a dyn HirDatabase); impl DebugContext<'_> { - pub fn debug_struct_id( + pub(crate) fn debug_struct_id( &self, id: super::AdtId, f: &mut fmt::Formatter<'_>, @@ -26,7 +26,7 @@ impl DebugContext<'_> { write!(f, "{}", name) } - pub fn debug_trait_id( + pub(crate) fn debug_trait_id( &self, id: super::TraitId, fmt: &mut fmt::Formatter<'_>, @@ -36,7 +36,7 @@ impl DebugContext<'_> { write!(fmt, "{}", trait_data.name) } - pub fn debug_assoc_type_id( + pub(crate) fn debug_assoc_type_id( &self, id: super::AssocTypeId, fmt: &mut fmt::Formatter<'_>, @@ -51,7 +51,7 @@ impl DebugContext<'_> { write!(fmt, "{}::{}", trait_data.name, type_alias_data.name) } - pub fn debug_opaque_ty_id( + pub(crate) fn debug_opaque_ty_id( &self, opaque_ty_id: chalk_ir::OpaqueTyId, fmt: &mut fmt::Formatter<'_>, @@ -59,7 +59,7 @@ impl DebugContext<'_> { fmt.debug_struct("OpaqueTyId").field("index", &opaque_ty_id.0).finish() } - pub fn debug_alias( + pub(crate) fn debug_alias( &self, alias_ty: &AliasTy, fmt: &mut fmt::Formatter<'_>, @@ -70,7 +70,7 @@ impl DebugContext<'_> { } } - pub fn debug_projection_ty( + pub(crate) fn debug_projection_ty( &self, projection_ty: &chalk_ir::ProjectionTy, fmt: &mut fmt::Formatter<'_>, @@ -95,7 +95,7 @@ impl DebugContext<'_> { write!(fmt, ">::{}", type_alias_data.name) } - pub fn debug_opaque_ty( + pub(crate) fn debug_opaque_ty( &self, opaque_ty: &chalk_ir::OpaqueTy, fmt: &mut fmt::Formatter<'_>, @@ -103,7 +103,7 @@ impl DebugContext<'_> { write!(fmt, "{:?}", opaque_ty.opaque_ty_id) } - pub fn debug_ty( + pub(crate) fn debug_ty( &self, ty: &chalk_ir::Ty, fmt: &mut fmt::Formatter<'_>, @@ -111,7 +111,7 @@ impl DebugContext<'_> { write!(fmt, "{:?}", ty.data(&Interner)) } - pub fn debug_lifetime( + pub(crate) fn debug_lifetime( &self, lifetime: &Lifetime, fmt: &mut fmt::Formatter<'_>, @@ -119,7 +119,7 @@ impl DebugContext<'_> { write!(fmt, "{:?}", lifetime.data(&Interner)) } - pub fn debug_generic_arg( + pub(crate) fn debug_generic_arg( &self, parameter: &GenericArg, fmt: &mut fmt::Formatter<'_>, @@ -127,7 +127,7 @@ impl DebugContext<'_> { write!(fmt, "{:?}", parameter.data(&Interner).inner_debug()) } - pub fn debug_goal( + pub(crate) fn debug_goal( &self, goal: &Goal, fmt: &mut fmt::Formatter<'_>, @@ -136,7 +136,7 @@ impl DebugContext<'_> { write!(fmt, "{:?}", goal_data) } - pub fn debug_goals( + pub(crate) fn debug_goals( &self, goals: &Goals, fmt: &mut fmt::Formatter<'_>, @@ -144,7 +144,7 @@ impl DebugContext<'_> { write!(fmt, "{:?}", goals.debug(&Interner)) } - pub fn debug_program_clause_implication( + pub(crate) fn debug_program_clause_implication( &self, pci: &ProgramClauseImplication, fmt: &mut fmt::Formatter<'_>, @@ -152,7 +152,7 @@ impl DebugContext<'_> { write!(fmt, "{:?}", pci.debug(&Interner)) } - pub fn debug_substitution( + pub(crate) fn debug_substitution( &self, substitution: &chalk_ir::Substitution, fmt: &mut fmt::Formatter<'_>, @@ -160,7 +160,7 @@ impl DebugContext<'_> { write!(fmt, "{:?}", substitution.debug(&Interner)) } - pub fn debug_separator_trait_ref( + pub(crate) fn debug_separator_trait_ref( &self, separator_trait_ref: &chalk_ir::SeparatorTraitRef, fmt: &mut fmt::Formatter<'_>, @@ -168,7 +168,7 @@ impl DebugContext<'_> { write!(fmt, "{:?}", separator_trait_ref.debug(&Interner)) } - pub fn debug_fn_def_id( + pub(crate) fn debug_fn_def_id( &self, fn_def_id: chalk_ir::FnDefId, fmt: &mut fmt::Formatter<'_>, @@ -190,7 +190,7 @@ impl DebugContext<'_> { } } - pub fn debug_const( + pub(crate) fn debug_const( &self, _constant: &chalk_ir::Const, fmt: &mut fmt::Formatter<'_>, @@ -198,42 +198,42 @@ impl DebugContext<'_> { write!(fmt, "const") } - pub fn debug_variable_kinds( + pub(crate) fn debug_variable_kinds( &self, variable_kinds: &chalk_ir::VariableKinds, fmt: &mut fmt::Formatter<'_>, ) -> fmt::Result { write!(fmt, "{:?}", variable_kinds.as_slice(&Interner)) } - pub fn debug_variable_kinds_with_angles( + pub(crate) fn debug_variable_kinds_with_angles( &self, variable_kinds: &chalk_ir::VariableKinds, fmt: &mut fmt::Formatter<'_>, ) -> fmt::Result { write!(fmt, "{:?}", variable_kinds.inner_debug(&Interner)) } - pub fn debug_canonical_var_kinds( + pub(crate) fn debug_canonical_var_kinds( &self, canonical_var_kinds: &chalk_ir::CanonicalVarKinds, fmt: &mut fmt::Formatter<'_>, ) -> fmt::Result { write!(fmt, "{:?}", canonical_var_kinds.as_slice(&Interner)) } - pub fn debug_program_clause( + pub(crate) fn debug_program_clause( &self, clause: &chalk_ir::ProgramClause, fmt: &mut fmt::Formatter<'_>, ) -> fmt::Result { write!(fmt, "{:?}", clause.data(&Interner)) } - pub fn debug_program_clauses( + pub(crate) fn debug_program_clauses( &self, clauses: &chalk_ir::ProgramClauses, fmt: &mut fmt::Formatter<'_>, ) -> fmt::Result { write!(fmt, "{:?}", clauses.as_slice(&Interner)) } - pub fn debug_quantified_where_clauses( + pub(crate) fn debug_quantified_where_clauses( &self, clauses: &chalk_ir::QuantifiedWhereClauses, fmt: &mut fmt::Formatter<'_>, @@ -249,7 +249,7 @@ mod unsafe_tls { scoped_thread_local!(static PROGRAM: DebugContext); - pub fn with_current_program( + pub(crate) fn with_current_program( op: impl for<'a> FnOnce(Option<&'a DebugContext<'a>>) -> R, ) -> R { if PROGRAM.is_set() { @@ -259,7 +259,7 @@ mod unsafe_tls { } } - pub fn set_current_program(p: &dyn HirDatabase, op: OP) -> R + pub(crate) fn set_current_program(p: &dyn HirDatabase, op: OP) -> R where OP: FnOnce() -> R, { diff --git a/crates/mbe/src/mbe_expander/matcher.rs b/crates/mbe/src/mbe_expander/matcher.rs index b698b9832..39a8eefbd 100644 --- a/crates/mbe/src/mbe_expander/matcher.rs +++ b/crates/mbe/src/mbe_expander/matcher.rs @@ -61,16 +61,16 @@ macro_rules! err { #[derive(Debug, Default)] pub(super) struct Match { - pub bindings: Bindings, + pub(super) bindings: Bindings, /// We currently just keep the first error and count the rest to compare matches. - pub err: Option, - pub err_count: usize, + pub(super) err: Option, + pub(super) err_count: usize, /// How many top-level token trees were left to match. - pub unmatched_tts: usize, + pub(super) unmatched_tts: usize, } impl Match { - pub fn add_err(&mut self, err: ExpandError) { + pub(super) fn add_err(&mut self, err: ExpandError) { let prev_err = self.err.take(); self.err = prev_err.or(Some(err)); self.err_count += 1; diff --git a/crates/mbe/src/subtree_source.rs b/crates/mbe/src/subtree_source.rs index 396ce8b16..38237cdcf 100644 --- a/crates/mbe/src/subtree_source.rs +++ b/crates/mbe/src/subtree_source.rs @@ -7,9 +7,9 @@ use tt::buffer::{Cursor, TokenBuffer}; #[derive(Debug, Clone, Eq, PartialEq)] struct TtToken { - pub kind: SyntaxKind, - pub is_joint_to_next: bool, - pub text: SmolStr, + kind: SyntaxKind, + is_joint_to_next: bool, + text: SmolStr, } pub(crate) struct SubtreeTokenSource<'a> { @@ -30,7 +30,7 @@ impl<'a> SubtreeTokenSource<'a> { } impl<'a> SubtreeTokenSource<'a> { - pub fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { + pub(crate) fn new(buffer: &'a TokenBuffer) -> SubtreeTokenSource<'a> { let cursor = buffer.begin(); let mut res = SubtreeTokenSource { diff --git a/crates/proc_macro_api/src/process.rs b/crates/proc_macro_api/src/process.rs index 51ffcaa78..907cb3db7 100644 --- a/crates/proc_macro_api/src/process.rs +++ b/crates/proc_macro_api/src/process.rs @@ -30,7 +30,7 @@ pub(crate) struct ProcMacroProcessThread { } impl ProcMacroProcessSrv { - pub fn run( + pub(crate) fn run( process_path: PathBuf, args: impl IntoIterator>, ) -> io::Result<(ProcMacroProcessThread, ProcMacroProcessSrv)> { @@ -48,7 +48,7 @@ impl ProcMacroProcessSrv { Ok((thread, srv)) } - pub fn find_proc_macros( + pub(crate) fn find_proc_macros( &self, dylib_path: &Path, ) -> Result, tt::ExpansionError> { @@ -58,7 +58,7 @@ impl ProcMacroProcessSrv { Ok(result.macros) } - pub fn custom_derive( + pub(crate) fn custom_derive( &self, dylib_path: &Path, subtree: &Subtree, @@ -75,7 +75,7 @@ impl ProcMacroProcessSrv { Ok(result.expansion) } - pub fn send_task(&self, req: Request) -> Result + pub(crate) fn send_task(&self, req: Request) -> Result where R: TryFrom, { diff --git a/crates/proc_macro_api/src/rpc.rs b/crates/proc_macro_api/src/rpc.rs index 47624163e..203109ca4 100644 --- a/crates/proc_macro_api/src/rpc.rs +++ b/crates/proc_macro_api/src/rpc.rs @@ -75,18 +75,18 @@ struct TokenIdDef(u32); #[serde(remote = "Delimiter")] struct DelimiterDef { #[serde(with = "TokenIdDef")] - pub id: TokenId, + id: TokenId, #[serde(with = "DelimiterKindDef")] - pub kind: DelimiterKind, + kind: DelimiterKind, } #[derive(Serialize, Deserialize)] #[serde(remote = "Subtree")] struct SubtreeDef { #[serde(default, with = "opt_delimiter_def")] - pub delimiter: Option, + delimiter: Option, #[serde(with = "vec_token_tree")] - pub token_trees: Vec, + token_trees: Vec, } #[derive(Serialize, Deserialize)] @@ -112,19 +112,19 @@ enum LeafDef { #[derive(Serialize, Deserialize)] #[serde(remote = "Literal")] struct LiteralDef { - pub text: SmolStr, + text: SmolStr, #[serde(with = "TokenIdDef")] - pub id: TokenId, + id: TokenId, } #[derive(Serialize, Deserialize)] #[serde(remote = "Punct")] struct PunctDef { - pub char: char, + char: char, #[serde(with = "SpacingDef")] - pub spacing: Spacing, + spacing: Spacing, #[serde(with = "TokenIdDef")] - pub id: TokenId, + id: TokenId, } #[derive(Serialize, Deserialize)] @@ -137,16 +137,16 @@ enum SpacingDef { #[derive(Serialize, Deserialize)] #[serde(remote = "Ident")] struct IdentDef { - pub text: SmolStr, + text: SmolStr, #[serde(with = "TokenIdDef")] - pub id: TokenId, + id: TokenId, } mod opt_delimiter_def { use super::{Delimiter, DelimiterDef}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; - pub fn serialize(value: &Option, serializer: S) -> Result + pub(super) fn serialize(value: &Option, serializer: S) -> Result where S: Serializer, { @@ -155,7 +155,7 @@ mod opt_delimiter_def { value.as_ref().map(Helper).serialize(serializer) } - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + pub(super) fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> where D: Deserializer<'de>, { @@ -170,7 +170,7 @@ mod opt_subtree_def { use super::{Subtree, SubtreeDef}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; - pub fn serialize(value: &Option, serializer: S) -> Result + pub(super) fn serialize(value: &Option, serializer: S) -> Result where S: Serializer, { @@ -179,7 +179,7 @@ mod opt_subtree_def { value.as_ref().map(Helper).serialize(serializer) } - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + pub(super) fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> where D: Deserializer<'de>, { @@ -194,7 +194,7 @@ mod vec_token_tree { use super::{TokenTree, TokenTreeDef}; use serde::{ser::SerializeSeq, Deserialize, Deserializer, Serialize, Serializer}; - pub fn serialize(value: &Vec, serializer: S) -> Result + pub(super) fn serialize(value: &Vec, serializer: S) -> Result where S: Serializer, { @@ -209,7 +209,7 @@ mod vec_token_tree { seq.end() } - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + pub(super) fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> where D: Deserializer<'de>, { diff --git a/crates/proc_macro_srv/src/lib.rs b/crates/proc_macro_srv/src/lib.rs index 7e4e4ad50..6e890f8e2 100644 --- a/crates/proc_macro_srv/src/lib.rs +++ b/crates/proc_macro_srv/src/lib.rs @@ -9,6 +9,7 @@ //! RA than `proc-macro2` token stream. //! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` //! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)… +#![allow(unreachable_pub)] #[allow(dead_code)] #[doc(hidden)] diff --git a/crates/profile/src/hprof.rs b/crates/profile/src/hprof.rs index 934cc8e37..8957ea016 100644 --- a/crates/profile/src/hprof.rs +++ b/crates/profile/src/hprof.rs @@ -27,7 +27,7 @@ pub fn init_from(spec: &str) { filter.install(); } -pub type Label = &'static str; +type Label = &'static str; /// This function starts a profiling scope in the current execution stack with a given description. /// It returns a `Profile` struct that measures elapsed time between this method invocation and `Profile` struct drop. @@ -173,7 +173,7 @@ impl ProfileStack { true } - pub fn pop(&mut self, label: Label, detail: Option) { + fn pop(&mut self, label: Label, detail: Option) { let start = self.starts.pop().unwrap(); let duration = start.elapsed(); self.messages.finish(Message { duration, label, detail }); diff --git a/crates/profile/src/tree.rs b/crates/profile/src/tree.rs index 096f58511..3fac1f36c 100644 --- a/crates/profile/src/tree.rs +++ b/crates/profile/src/tree.rs @@ -4,15 +4,15 @@ use std::ops; use arena::Arena; #[derive(Default)] -pub struct Tree { +pub(crate) struct Tree { nodes: Arena>, current_path: Vec<(Idx, Option>)>, } -pub type Idx = arena::Idx>; +pub(crate) type Idx = arena::Idx>; impl Tree { - pub fn start(&mut self) + pub(crate) fn start(&mut self) where T: Default, { @@ -30,19 +30,19 @@ impl Tree { self.current_path.push((me, None)); } - pub fn finish(&mut self, data: T) { + pub(crate) fn finish(&mut self, data: T) { let (me, _last_child) = self.current_path.pop().unwrap(); self.nodes[me].data = data; } - pub fn root(&self) -> Option> { + pub(crate) fn root(&self) -> Option> { self.nodes.iter().next().map(|(idx, _)| idx) } - pub fn children(&self, idx: Idx) -> impl Iterator> + '_ { + pub(crate) fn children(&self, idx: Idx) -> impl Iterator> + '_ { NodeIter { nodes: &self.nodes, next: self.nodes[idx].first_child } } - pub fn clear(&mut self) { + pub(crate) fn clear(&mut self) { self.nodes.clear(); self.current_path.clear(); } @@ -55,7 +55,7 @@ impl ops::Index> for Tree { } } -pub struct Node { +pub(crate) struct Node { data: T, first_child: Option>, next_sibling: Option>, diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index d536bb1e7..8a0e3d27b 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs @@ -17,7 +17,7 @@ use crate::{ pub use self::{ expr_ext::{ArrayExprKind, BinOp, Effect, ElseBranch, LiteralKind, PrefixOp, RangeOp}, - generated::*, + generated::{nodes::*, tokens::*}, node_ext::{ AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents, StructKind, TypeBoundKind, VisibilityKind, diff --git a/crates/syntax/src/ast/generated.rs b/crates/syntax/src/ast/generated.rs index 4a6f41ee7..843b43cf0 100644 --- a/crates/syntax/src/ast/generated.rs +++ b/crates/syntax/src/ast/generated.rs @@ -1,8 +1,8 @@ //! This file is actually hand-written, but the submodules are indeed generated. #[rustfmt::skip] -mod nodes; +pub(crate) mod nodes; #[rustfmt::skip] -mod tokens; +pub(crate) mod tokens; use crate::{ AstNode, @@ -10,7 +10,7 @@ use crate::{ SyntaxNode, }; -pub use {nodes::*, tokens::*}; +pub(crate) use nodes::*; // Stmt is the only nested enum, so it's easier to just hand-write it impl AstNode for Stmt { diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 849a1cdd6..e753b11bb 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -46,16 +46,19 @@ use text_edit::Indel; pub use crate::{ algo::InsertPosition, ast::{AstNode, AstToken}, - parsing::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token}, + parsing::lexer::{lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token}, ptr::{AstPtr, SyntaxNodePtr}, syntax_error::SyntaxError, syntax_node::{ - Direction, GreenNode, NodeOrToken, SyntaxElement, SyntaxElementChildren, SyntaxNode, - SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder, + SyntaxElement, SyntaxElementChildren, SyntaxNode, SyntaxNodeChildren, SyntaxToken, + SyntaxTreeBuilder, }, }; pub use parser::{SyntaxKind, T}; -pub use rowan::{SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent}; +pub use rowan::{ + Direction, GreenNode, NodeOrToken, SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, + WalkEvent, +}; /// `Parse` is the result of the parsing: a syntax tree and a collection of /// errors. diff --git a/crates/syntax/src/parsing.rs b/crates/syntax/src/parsing.rs index 68a39eb21..333bde54a 100644 --- a/crates/syntax/src/parsing.rs +++ b/crates/syntax/src/parsing.rs @@ -1,7 +1,7 @@ //! Lexing, bridging to parser (which does the actual parsing) and //! incremental reparsing. -mod lexer; +pub(crate) mod lexer; mod text_token_source; mod text_tree_sink; mod reparsing; @@ -10,7 +10,7 @@ use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode}; use text_token_source::TextTokenSource; use text_tree_sink::TextTreeSink; -pub use lexer::*; +pub(crate) use lexer::*; pub(crate) use self::reparsing::incremental_reparse; use parser::SyntaxKind; diff --git a/crates/syntax/src/parsing/text_token_source.rs b/crates/syntax/src/parsing/text_token_source.rs index df866dc2b..0614194a5 100644 --- a/crates/syntax/src/parsing/text_token_source.rs +++ b/crates/syntax/src/parsing/text_token_source.rs @@ -65,7 +65,7 @@ fn mk_token(pos: usize, token_offset_pairs: &[(Token, TextSize)]) -> parser::Tok impl<'t> TextTokenSource<'t> { /// Generate input from tokens(expect comment and whitespace). - pub fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> { + pub(crate) fn new(text: &'t str, raw_tokens: &'t [Token]) -> TextTokenSource<'t> { let token_offset_pairs: Vec<_> = raw_tokens .iter() .filter_map({ diff --git a/crates/syntax/src/syntax_node.rs b/crates/syntax/src/syntax_node.rs index b2abcbfbb..cc30138fa 100644 --- a/crates/syntax/src/syntax_node.rs +++ b/crates/syntax/src/syntax_node.rs @@ -10,9 +10,7 @@ use rowan::{GreenNodeBuilder, Language}; use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize}; -pub use rowan::GreenNode; - -pub(crate) use rowan::GreenToken; +pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken}; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum RustLanguage {} @@ -34,8 +32,6 @@ pub type SyntaxElement = rowan::SyntaxElement; pub type SyntaxNodeChildren = rowan::SyntaxNodeChildren; pub type SyntaxElementChildren = rowan::SyntaxElementChildren; -pub use rowan::{Direction, NodeOrToken}; - #[derive(Default)] pub struct SyntaxTreeBuilder { errors: Vec, diff --git a/crates/vfs/src/vfs_path.rs b/crates/vfs/src/vfs_path.rs index 022a0be1e..7a213fc3e 100644 --- a/crates/vfs/src/vfs_path.rs +++ b/crates/vfs/src/vfs_path.rs @@ -287,7 +287,7 @@ impl VirtualPath { Some(res) } - pub fn name_and_extension(&self) -> Option<(&str, Option<&str>)> { + pub(crate) fn name_and_extension(&self) -> Option<(&str, Option<&str>)> { let file_path = if self.0.ends_with('/') { &self.0[..&self.0.len() - 1] } else { &self.0 }; let file_name = match file_path.rfind('/') { Some(position) => &file_path[position + 1..], diff --git a/xtask/src/codegen/gen_parser_tests.rs b/xtask/src/codegen/gen_parser_tests.rs index 19ae949d4..6e4abd10c 100644 --- a/xtask/src/codegen/gen_parser_tests.rs +++ b/xtask/src/codegen/gen_parser_tests.rs @@ -45,15 +45,15 @@ pub fn generate_parser_tests(mode: Mode) -> Result<()> { #[derive(Debug)] struct Test { - pub name: String, - pub text: String, - pub ok: bool, + name: String, + text: String, + ok: bool, } #[derive(Default, Debug)] struct Tests { - pub ok: HashMap, - pub err: HashMap, + ok: HashMap, + err: HashMap, } fn collect_tests(s: &str) -> Vec { -- cgit v1.2.3