From ed20a857f485a471369cd99b843af19a4d875ad0 Mon Sep 17 00:00:00 2001 From: Aleksey Kladov Date: Thu, 13 Aug 2020 16:25:38 +0200 Subject: Rename ra_db -> base_db --- crates/base_db/Cargo.toml | 21 ++ crates/base_db/src/cancellation.rs | 48 ++++ crates/base_db/src/fixture.rs | 228 +++++++++++++++++++ crates/base_db/src/input.rs | 453 +++++++++++++++++++++++++++++++++++++ crates/base_db/src/lib.rs | 167 ++++++++++++++ 5 files changed, 917 insertions(+) create mode 100644 crates/base_db/Cargo.toml create mode 100644 crates/base_db/src/cancellation.rs create mode 100644 crates/base_db/src/fixture.rs create mode 100644 crates/base_db/src/input.rs create mode 100644 crates/base_db/src/lib.rs (limited to 'crates/base_db') diff --git a/crates/base_db/Cargo.toml b/crates/base_db/Cargo.toml new file mode 100644 index 000000000..7347d7528 --- /dev/null +++ b/crates/base_db/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "base_db" +version = "0.0.0" +license = "MIT OR Apache-2.0" +authors = ["rust-analyzer developers"] +edition = "2018" + +[lib] +doctest = false + +[dependencies] +salsa = "0.15.2" +rustc-hash = "1.1.0" + +syntax = { path = "../syntax" } +cfg = { path = "../cfg" } +profile = { path = "../profile" } +tt = { path = "../tt" } +test_utils = { path = "../test_utils" } +vfs = { path = "../vfs" } +stdx = { path = "../stdx" } diff --git a/crates/base_db/src/cancellation.rs b/crates/base_db/src/cancellation.rs new file mode 100644 index 000000000..7420a1976 --- /dev/null +++ b/crates/base_db/src/cancellation.rs @@ -0,0 +1,48 @@ +//! Utility types to support cancellation. +//! +//! In a typical IDE use-case, requests and modification happen concurrently, as +//! in the following scenario: +//! +//! * user types a character, +//! * a syntax highlighting process is started +//! * user types next character, while syntax highlighting *is still in +//! progress*. +//! +//! In this situation, we want to react to modification as quickly as possible. +//! At the same time, in-progress results are not very interesting, because they +//! are invalidated by the edit anyway. So, we first cancel all in-flight +//! requests, and then apply modification knowing that it won't interfere with +//! any background processing (this bit is handled by salsa, see the +//! `BaseDatabase::check_canceled` method). + +/// An "error" signifying that the operation was canceled. +#[derive(Clone, PartialEq, Eq, Hash)] +pub struct Canceled { + _private: (), +} + +impl Canceled { + pub(crate) fn new() -> Canceled { + Canceled { _private: () } + } + + pub fn throw() -> ! { + // We use resume and not panic here to avoid running the panic + // hook (that is, to avoid collecting and printing backtrace). + std::panic::resume_unwind(Box::new(Canceled::new())) + } +} + +impl std::fmt::Display for Canceled { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fmt.write_str("canceled") + } +} + +impl std::fmt::Debug for Canceled { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(fmt, "Canceled") + } +} + +impl std::error::Error for Canceled {} diff --git a/crates/base_db/src/fixture.rs b/crates/base_db/src/fixture.rs new file mode 100644 index 000000000..5ff8ead0e --- /dev/null +++ b/crates/base_db/src/fixture.rs @@ -0,0 +1,228 @@ +//! Fixtures are strings containing rust source code with optional metadata. +//! A fixture without metadata is parsed into a single source file. +//! Use this to test functionality local to one file. +//! +//! Simple Example: +//! ``` +//! r#" +//! fn main() { +//! println!("Hello World") +//! } +//! "# +//! ``` +//! +//! Metadata can be added to a fixture after a `//-` comment. +//! The basic form is specifying filenames, +//! which is also how to define multiple files in a single test fixture +//! +//! Example using two files in the same crate: +//! ``` +//! " +//! //- /main.rs +//! mod foo; +//! fn main() { +//! foo::bar(); +//! } +//! +//! //- /foo.rs +//! pub fn bar() {} +//! " +//! ``` +//! +//! Example using two crates with one file each, with one crate depending on the other: +//! ``` +//! r#" +//! //- /main.rs crate:a deps:b +//! fn main() { +//! b::foo(); +//! } +//! //- /lib.rs crate:b +//! pub fn b() { +//! println!("Hello World") +//! } +//! "# +//! ``` +//! +//! Metadata allows specifying all settings and variables +//! that are available in a real rust project: +//! - crate names via `crate:cratename` +//! - dependencies via `deps:dep1,dep2` +//! - configuration settings via `cfg:dbg=false,opt_level=2` +//! - environment variables via `env:PATH=/bin,RUST_LOG=debug` +//! +//! Example using all available metadata: +//! ``` +//! " +//! //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo +//! fn insert_source_code_here() {} +//! " +//! ``` +use std::{str::FromStr, sync::Arc}; + +use cfg::CfgOptions; +use rustc_hash::FxHashMap; +use test_utils::{extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER}; +use vfs::{file_set::FileSet, VfsPath}; + +use crate::{ + input::CrateName, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, SourceDatabaseExt, + SourceRoot, SourceRootId, +}; + +pub const WORKSPACE: SourceRootId = SourceRootId(0); + +pub trait WithFixture: Default + SourceDatabaseExt + 'static { + fn with_single_file(text: &str) -> (Self, FileId) { + let mut db = Self::default(); + let (_, files) = with_files(&mut db, text); + assert_eq!(files.len(), 1); + (db, files[0]) + } + + fn with_files(ra_fixture: &str) -> Self { + let mut db = Self::default(); + let (pos, _) = with_files(&mut db, ra_fixture); + assert!(pos.is_none()); + db + } + + fn with_position(ra_fixture: &str) -> (Self, FilePosition) { + let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture); + let offset = match range_or_offset { + RangeOrOffset::Range(_) => panic!(), + RangeOrOffset::Offset(it) => it, + }; + (db, FilePosition { file_id, offset }) + } + + fn with_range_or_offset(ra_fixture: &str) -> (Self, FileId, RangeOrOffset) { + let mut db = Self::default(); + let (pos, _) = with_files(&mut db, ra_fixture); + let (file_id, range_or_offset) = pos.unwrap(); + (db, file_id, range_or_offset) + } + + fn test_crate(&self) -> CrateId { + let crate_graph = self.crate_graph(); + let mut it = crate_graph.iter(); + let res = it.next().unwrap(); + assert!(it.next().is_none()); + res + } +} + +impl WithFixture for DB {} + +fn with_files( + db: &mut dyn SourceDatabaseExt, + fixture: &str, +) -> (Option<(FileId, RangeOrOffset)>, Vec) { + let fixture = Fixture::parse(fixture); + + let mut files = Vec::new(); + let mut crate_graph = CrateGraph::default(); + let mut crates = FxHashMap::default(); + let mut crate_deps = Vec::new(); + let mut default_crate_root: Option = None; + + let mut file_set = FileSet::default(); + let source_root_id = WORKSPACE; + let source_root_prefix = "/".to_string(); + let mut file_id = FileId(0); + + let mut file_position = None; + + for entry in fixture { + let text = if entry.text.contains(CURSOR_MARKER) { + let (range_or_offset, text) = extract_range_or_offset(&entry.text); + assert!(file_position.is_none()); + file_position = Some((file_id, range_or_offset)); + text.to_string() + } else { + entry.text.clone() + }; + + let meta = FileMeta::from(entry); + assert!(meta.path.starts_with(&source_root_prefix)); + + if let Some(krate) = meta.krate { + let crate_id = crate_graph.add_crate_root( + file_id, + meta.edition, + Some(krate.clone()), + meta.cfg, + meta.env, + Default::default(), + ); + let crate_name = CrateName::new(&krate).unwrap(); + let prev = crates.insert(crate_name.clone(), crate_id); + assert!(prev.is_none()); + for dep in meta.deps { + let dep = CrateName::new(&dep).unwrap(); + crate_deps.push((crate_name.clone(), dep)) + } + } else if meta.path == "/main.rs" || meta.path == "/lib.rs" { + assert!(default_crate_root.is_none()); + default_crate_root = Some(file_id); + } + + db.set_file_text(file_id, Arc::new(text)); + db.set_file_source_root(file_id, source_root_id); + let path = VfsPath::new_virtual_path(meta.path); + file_set.insert(file_id, path.into()); + files.push(file_id); + file_id.0 += 1; + } + + if crates.is_empty() { + let crate_root = default_crate_root.unwrap(); + crate_graph.add_crate_root( + crate_root, + Edition::Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + } else { + for (from, to) in crate_deps { + let from_id = crates[&from]; + let to_id = crates[&to]; + crate_graph.add_dep(from_id, CrateName::new(&to).unwrap(), to_id).unwrap(); + } + } + + db.set_source_root(source_root_id, Arc::new(SourceRoot::new_local(file_set))); + db.set_crate_graph(Arc::new(crate_graph)); + + (file_position, files) +} + +struct FileMeta { + path: String, + krate: Option, + deps: Vec, + cfg: CfgOptions, + edition: Edition, + env: Env, +} + +impl From for FileMeta { + fn from(f: Fixture) -> FileMeta { + let mut cfg = CfgOptions::default(); + f.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into())); + f.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into())); + + FileMeta { + path: f.path, + krate: f.krate, + deps: f.deps, + cfg, + edition: f + .edition + .as_ref() + .map_or(Edition::Edition2018, |v| Edition::from_str(&v).unwrap()), + env: f.env.into_iter().collect(), + } + } +} diff --git a/crates/base_db/src/input.rs b/crates/base_db/src/input.rs new file mode 100644 index 000000000..f3d65cdf0 --- /dev/null +++ b/crates/base_db/src/input.rs @@ -0,0 +1,453 @@ +//! This module specifies the input to rust-analyzer. In some sense, this is +//! **the** most important module, because all other fancy stuff is strictly +//! derived from this input. +//! +//! Note that neither this module, nor any other part of the analyzer's core do +//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how +//! actual IO is done and lowered to input. + +use std::{fmt, iter::FromIterator, ops, str::FromStr, sync::Arc}; + +use cfg::CfgOptions; +use rustc_hash::{FxHashMap, FxHashSet}; +use syntax::SmolStr; +use tt::TokenExpander; +use vfs::file_set::FileSet; + +pub use vfs::FileId; + +/// Files are grouped into source roots. A source root is a directory on the +/// file systems which is watched for changes. Typically it corresponds to a +/// Rust crate. Source roots *might* be nested: in this case, a file belongs to +/// the nearest enclosing source root. Paths to files are always relative to a +/// source root, and the analyzer does not know the root path of the source root at +/// all. So, a file from one source root can't refer to a file in another source +/// root by path. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] +pub struct SourceRootId(pub u32); + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct SourceRoot { + /// Sysroot or crates.io library. + /// + /// Libraries are considered mostly immutable, this assumption is used to + /// optimize salsa's query structure + pub is_library: bool, + pub(crate) file_set: FileSet, +} + +impl SourceRoot { + pub fn new_local(file_set: FileSet) -> SourceRoot { + SourceRoot { is_library: false, file_set } + } + pub fn new_library(file_set: FileSet) -> SourceRoot { + SourceRoot { is_library: true, file_set } + } + pub fn iter(&self) -> impl Iterator + '_ { + self.file_set.iter() + } +} + +/// `CrateGraph` is a bit of information which turns a set of text files into a +/// number of Rust crates. Each crate is defined by the `FileId` of its root module, +/// the set of cfg flags (not yet implemented) and the set of dependencies. Note +/// that, due to cfg's, there might be several crates for a single `FileId`! As +/// in the rust-lang proper, a crate does not have a name. Instead, names are +/// specified on dependency edges. That is, a crate might be known under +/// different names in different dependent crates. +/// +/// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust +/// language proper, not a concept of the build system. In practice, we get +/// `CrateGraph` by lowering `cargo metadata` output. +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct CrateGraph { + arena: FxHashMap, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct CrateId(pub u32); + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct CrateName(SmolStr); + +impl CrateName { + /// Creates a crate name, checking for dashes in the string provided. + /// Dashes are not allowed in the crate names, + /// hence the input string is returned as `Err` for those cases. + pub fn new(name: &str) -> Result { + if name.contains('-') { + Err(name) + } else { + Ok(Self(SmolStr::new(name))) + } + } + + /// Creates a crate name, unconditionally replacing the dashes with underscores. + pub fn normalize_dashes(name: &str) -> CrateName { + Self(SmolStr::new(name.replace('-', "_"))) + } +} + +impl fmt::Display for CrateName { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}", self.0) + } +} + +impl ops::Deref for CrateName { + type Target = str; + fn deref(&self) -> &Self::Target { + &*self.0 + } +} + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub struct ProcMacroId(pub u32); + +#[derive(Debug, Clone)] +pub struct ProcMacro { + pub name: SmolStr, + pub expander: Arc, +} + +impl Eq for ProcMacro {} +impl PartialEq for ProcMacro { + fn eq(&self, other: &ProcMacro) -> bool { + self.name == other.name && Arc::ptr_eq(&self.expander, &other.expander) + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct CrateData { + pub root_file_id: FileId, + pub edition: Edition, + /// The name to display to the end user. + /// This actual crate name can be different in a particular dependent crate + /// or may even be missing for some cases, such as a dummy crate for the code snippet. + pub display_name: Option, + pub cfg_options: CfgOptions, + pub env: Env, + pub dependencies: Vec, + pub proc_macro: Vec, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +pub enum Edition { + Edition2018, + Edition2015, +} + +#[derive(Default, Debug, Clone, PartialEq, Eq)] +pub struct Env { + entries: FxHashMap, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct Dependency { + pub crate_id: CrateId, + pub name: CrateName, +} + +impl CrateGraph { + pub fn add_crate_root( + &mut self, + file_id: FileId, + edition: Edition, + display_name: Option, + cfg_options: CfgOptions, + env: Env, + proc_macro: Vec<(SmolStr, Arc)>, + ) -> CrateId { + let proc_macro = + proc_macro.into_iter().map(|(name, it)| ProcMacro { name, expander: it }).collect(); + + let data = CrateData { + root_file_id: file_id, + edition, + display_name, + cfg_options, + env, + proc_macro, + dependencies: Vec::new(), + }; + let crate_id = CrateId(self.arena.len() as u32); + let prev = self.arena.insert(crate_id, data); + assert!(prev.is_none()); + crate_id + } + + pub fn add_dep( + &mut self, + from: CrateId, + name: CrateName, + to: CrateId, + ) -> Result<(), CyclicDependenciesError> { + if self.dfs_find(from, to, &mut FxHashSet::default()) { + return Err(CyclicDependenciesError); + } + self.arena.get_mut(&from).unwrap().add_dep(name, to); + Ok(()) + } + + pub fn is_empty(&self) -> bool { + self.arena.is_empty() + } + + pub fn iter(&self) -> impl Iterator + '_ { + self.arena.keys().copied() + } + + /// Returns an iterator over all transitive dependencies of the given crate. + pub fn transitive_deps(&self, of: CrateId) -> impl Iterator + '_ { + let mut worklist = vec![of]; + let mut deps = FxHashSet::default(); + + while let Some(krate) = worklist.pop() { + if !deps.insert(krate) { + continue; + } + + worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id)); + } + + deps.remove(&of); + deps.into_iter() + } + + // FIXME: this only finds one crate with the given root; we could have multiple + pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option { + let (&crate_id, _) = + self.arena.iter().find(|(_crate_id, data)| data.root_file_id == file_id)?; + Some(crate_id) + } + + /// Extends this crate graph by adding a complete disjoint second crate + /// graph. + /// + /// The ids of the crates in the `other` graph are shifted by the return + /// amount. + pub fn extend(&mut self, other: CrateGraph) -> u32 { + let start = self.arena.len() as u32; + self.arena.extend(other.arena.into_iter().map(|(id, mut data)| { + let new_id = id.shift(start); + for dep in &mut data.dependencies { + dep.crate_id = dep.crate_id.shift(start); + } + (new_id, data) + })); + start + } + + fn dfs_find(&self, target: CrateId, from: CrateId, visited: &mut FxHashSet) -> bool { + if !visited.insert(from) { + return false; + } + + if target == from { + return true; + } + + for dep in &self[from].dependencies { + let crate_id = dep.crate_id; + if self.dfs_find(target, crate_id, visited) { + return true; + } + } + false + } +} + +impl ops::Index for CrateGraph { + type Output = CrateData; + fn index(&self, crate_id: CrateId) -> &CrateData { + &self.arena[&crate_id] + } +} + +impl CrateId { + pub fn shift(self, amount: u32) -> CrateId { + CrateId(self.0 + amount) + } +} + +impl CrateData { + fn add_dep(&mut self, name: CrateName, crate_id: CrateId) { + self.dependencies.push(Dependency { name, crate_id }) + } +} + +impl FromStr for Edition { + type Err = ParseEditionError; + + fn from_str(s: &str) -> Result { + let res = match s { + "2015" => Edition::Edition2015, + "2018" => Edition::Edition2018, + _ => return Err(ParseEditionError { invalid_input: s.to_string() }), + }; + Ok(res) + } +} + +impl fmt::Display for Edition { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str(match self { + Edition::Edition2015 => "2015", + Edition::Edition2018 => "2018", + }) + } +} + +impl FromIterator<(String, String)> for Env { + fn from_iter>(iter: T) -> Self { + Env { entries: FromIterator::from_iter(iter) } + } +} + +impl Env { + pub fn set(&mut self, env: &str, value: String) { + self.entries.insert(env.to_owned(), value); + } + + pub fn get(&self, env: &str) -> Option { + self.entries.get(env).cloned() + } +} + +#[derive(Debug)] +pub struct ParseEditionError { + invalid_input: String, +} + +impl fmt::Display for ParseEditionError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "invalid edition: {:?}", self.invalid_input) + } +} + +impl std::error::Error for ParseEditionError {} + +#[derive(Debug)] +pub struct CyclicDependenciesError; + +#[cfg(test)] +mod tests { + use super::{CfgOptions, CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; + + #[test] + fn detect_cyclic_dependency_indirect() { + let mut graph = CrateGraph::default(); + let crate1 = graph.add_crate_root( + FileId(1u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let crate2 = graph.add_crate_root( + FileId(2u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let crate3 = graph.add_crate_root( + FileId(3u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); + assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); + assert!(graph.add_dep(crate3, CrateName::new("crate1").unwrap(), crate1).is_err()); + } + + #[test] + fn detect_cyclic_dependency_direct() { + let mut graph = CrateGraph::default(); + let crate1 = graph.add_crate_root( + FileId(1u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let crate2 = graph.add_crate_root( + FileId(2u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); + assert!(graph.add_dep(crate2, CrateName::new("crate2").unwrap(), crate2).is_err()); + } + + #[test] + fn it_works() { + let mut graph = CrateGraph::default(); + let crate1 = graph.add_crate_root( + FileId(1u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let crate2 = graph.add_crate_root( + FileId(2u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let crate3 = graph.add_crate_root( + FileId(3u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); + assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); + } + + #[test] + fn dashes_are_normalized() { + let mut graph = CrateGraph::default(); + let crate1 = graph.add_crate_root( + FileId(1u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + let crate2 = graph.add_crate_root( + FileId(2u32), + Edition2018, + None, + CfgOptions::default(), + Env::default(), + Default::default(), + ); + assert!(graph + .add_dep(crate1, CrateName::normalize_dashes("crate-name-with-dashes"), crate2) + .is_ok()); + assert_eq!( + graph[crate1].dependencies, + vec![Dependency { + crate_id: crate2, + name: CrateName::new("crate_name_with_dashes").unwrap() + }] + ); + } +} diff --git a/crates/base_db/src/lib.rs b/crates/base_db/src/lib.rs new file mode 100644 index 000000000..811057251 --- /dev/null +++ b/crates/base_db/src/lib.rs @@ -0,0 +1,167 @@ +//! base_db defines basic database traits. The concrete DB is defined by ra_ide. +mod cancellation; +mod input; +pub mod fixture; + +use std::{panic, sync::Arc}; + +use rustc_hash::FxHashSet; +use syntax::{ast, Parse, SourceFile, TextRange, TextSize}; + +pub use crate::{ + cancellation::Canceled, + input::{ + CrateData, CrateGraph, CrateId, CrateName, Dependency, Edition, Env, FileId, ProcMacroId, + SourceRoot, SourceRootId, + }, +}; +pub use salsa; +pub use vfs::{file_set::FileSet, VfsPath}; + +#[macro_export] +macro_rules! impl_intern_key { + ($name:ident) => { + impl $crate::salsa::InternKey for $name { + fn from_intern_id(v: $crate::salsa::InternId) -> Self { + $name(v) + } + fn as_intern_id(&self) -> $crate::salsa::InternId { + self.0 + } + } + }; +} + +pub trait Upcast { + fn upcast(&self) -> &T; +} + +pub trait CheckCanceled { + /// Aborts current query if there are pending changes. + /// + /// rust-analyzer needs to be able to answer semantic questions about the + /// code while the code is being modified. A common problem is that a + /// long-running query is being calculated when a new change arrives. + /// + /// We can't just apply the change immediately: this will cause the pending + /// query to see inconsistent state (it will observe an absence of + /// repeatable read). So what we do is we **cancel** all pending queries + /// before applying the change. + /// + /// We implement cancellation by panicking with a special value and catching + /// it on the API boundary. Salsa explicitly supports this use-case. + fn check_canceled(&self); + + fn catch_canceled(&self, f: F) -> Result + where + Self: Sized + panic::RefUnwindSafe, + F: FnOnce(&Self) -> T + panic::UnwindSafe, + { + panic::catch_unwind(|| f(self)).map_err(|err| match err.downcast::() { + Ok(canceled) => *canceled, + Err(payload) => panic::resume_unwind(payload), + }) + } +} + +impl CheckCanceled for T { + fn check_canceled(&self) { + if self.salsa_runtime().is_current_revision_canceled() { + Canceled::throw() + } + } +} + +#[derive(Clone, Copy, Debug)] +pub struct FilePosition { + pub file_id: FileId, + pub offset: TextSize, +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub struct FileRange { + pub file_id: FileId, + pub range: TextRange, +} + +pub const DEFAULT_LRU_CAP: usize = 128; + +pub trait FileLoader { + /// Text of the file. + fn file_text(&self, file_id: FileId) -> Arc; + /// Note that we intentionally accept a `&str` and not a `&Path` here. This + /// method exists to handle `#[path = "/some/path.rs"] mod foo;` and such, + /// so the input is guaranteed to be utf-8 string. One might be tempted to + /// introduce some kind of "utf-8 path with / separators", but that's a bad idea. Behold + /// `#[path = "C://no/way"]` + fn resolve_path(&self, anchor: FileId, path: &str) -> Option; + fn relevant_crates(&self, file_id: FileId) -> Arc>; +} + +/// Database which stores all significant input facts: source code and project +/// model. Everything else in rust-analyzer is derived from these queries. +#[salsa::query_group(SourceDatabaseStorage)] +pub trait SourceDatabase: CheckCanceled + FileLoader + std::fmt::Debug { + // Parses the file into the syntax tree. + #[salsa::invoke(parse_query)] + fn parse(&self, file_id: FileId) -> Parse; + + /// The crate graph. + #[salsa::input] + fn crate_graph(&self) -> Arc; +} + +fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse { + let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id)); + let text = db.file_text(file_id); + SourceFile::parse(&*text) +} + +/// We don't want to give HIR knowledge of source roots, hence we extract these +/// methods into a separate DB. +#[salsa::query_group(SourceDatabaseExtStorage)] +pub trait SourceDatabaseExt: SourceDatabase { + #[salsa::input] + fn file_text(&self, file_id: FileId) -> Arc; + /// Path to a file, relative to the root of its source root. + /// Source root of the file. + #[salsa::input] + fn file_source_root(&self, file_id: FileId) -> SourceRootId; + /// Contents of the source root. + #[salsa::input] + fn source_root(&self, id: SourceRootId) -> Arc; + + fn source_root_crates(&self, id: SourceRootId) -> Arc>; +} + +fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc> { + let graph = db.crate_graph(); + let res = graph + .iter() + .filter(|&krate| { + let root_file = graph[krate].root_file_id; + db.file_source_root(root_file) == id + }) + .collect::>(); + Arc::new(res) +} + +/// Silly workaround for cyclic deps between the traits +pub struct FileLoaderDelegate(pub T); + +impl FileLoader for FileLoaderDelegate<&'_ T> { + fn file_text(&self, file_id: FileId) -> Arc { + SourceDatabaseExt::file_text(self.0, file_id) + } + fn resolve_path(&self, anchor: FileId, path: &str) -> Option { + // FIXME: this *somehow* should be platform agnostic... + let source_root = self.0.file_source_root(anchor); + let source_root = self.0.source_root(source_root); + source_root.file_set.resolve_path(anchor, path) + } + + fn relevant_crates(&self, file_id: FileId) -> Arc> { + let source_root = self.0.file_source_root(file_id); + self.0.source_root_crates(source_root) + } +} -- cgit v1.2.3