diff options
Diffstat (limited to 'crates/base_db')
-rw-r--r-- | crates/base_db/Cargo.toml | 21 | ||||
-rw-r--r-- | crates/base_db/src/cancellation.rs | 48 | ||||
-rw-r--r-- | crates/base_db/src/fixture.rs | 228 | ||||
-rw-r--r-- | crates/base_db/src/input.rs | 453 | ||||
-rw-r--r-- | crates/base_db/src/lib.rs | 167 |
5 files changed, 917 insertions, 0 deletions
diff --git a/crates/base_db/Cargo.toml b/crates/base_db/Cargo.toml new file mode 100644 index 000000000..7347d7528 --- /dev/null +++ b/crates/base_db/Cargo.toml | |||
@@ -0,0 +1,21 @@ | |||
1 | [package] | ||
2 | name = "base_db" | ||
3 | version = "0.0.0" | ||
4 | license = "MIT OR Apache-2.0" | ||
5 | authors = ["rust-analyzer developers"] | ||
6 | edition = "2018" | ||
7 | |||
8 | [lib] | ||
9 | doctest = false | ||
10 | |||
11 | [dependencies] | ||
12 | salsa = "0.15.2" | ||
13 | rustc-hash = "1.1.0" | ||
14 | |||
15 | syntax = { path = "../syntax" } | ||
16 | cfg = { path = "../cfg" } | ||
17 | profile = { path = "../profile" } | ||
18 | tt = { path = "../tt" } | ||
19 | test_utils = { path = "../test_utils" } | ||
20 | vfs = { path = "../vfs" } | ||
21 | stdx = { path = "../stdx" } | ||
diff --git a/crates/base_db/src/cancellation.rs b/crates/base_db/src/cancellation.rs new file mode 100644 index 000000000..7420a1976 --- /dev/null +++ b/crates/base_db/src/cancellation.rs | |||
@@ -0,0 +1,48 @@ | |||
1 | //! Utility types to support cancellation. | ||
2 | //! | ||
3 | //! In a typical IDE use-case, requests and modification happen concurrently, as | ||
4 | //! in the following scenario: | ||
5 | //! | ||
6 | //! * user types a character, | ||
7 | //! * a syntax highlighting process is started | ||
8 | //! * user types next character, while syntax highlighting *is still in | ||
9 | //! progress*. | ||
10 | //! | ||
11 | //! In this situation, we want to react to modification as quickly as possible. | ||
12 | //! At the same time, in-progress results are not very interesting, because they | ||
13 | //! are invalidated by the edit anyway. So, we first cancel all in-flight | ||
14 | //! requests, and then apply modification knowing that it won't interfere with | ||
15 | //! any background processing (this bit is handled by salsa, see the | ||
16 | //! `BaseDatabase::check_canceled` method). | ||
17 | |||
18 | /// An "error" signifying that the operation was canceled. | ||
19 | #[derive(Clone, PartialEq, Eq, Hash)] | ||
20 | pub struct Canceled { | ||
21 | _private: (), | ||
22 | } | ||
23 | |||
24 | impl Canceled { | ||
25 | pub(crate) fn new() -> Canceled { | ||
26 | Canceled { _private: () } | ||
27 | } | ||
28 | |||
29 | pub fn throw() -> ! { | ||
30 | // We use resume and not panic here to avoid running the panic | ||
31 | // hook (that is, to avoid collecting and printing backtrace). | ||
32 | std::panic::resume_unwind(Box::new(Canceled::new())) | ||
33 | } | ||
34 | } | ||
35 | |||
36 | impl std::fmt::Display for Canceled { | ||
37 | fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
38 | fmt.write_str("canceled") | ||
39 | } | ||
40 | } | ||
41 | |||
42 | impl std::fmt::Debug for Canceled { | ||
43 | fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | ||
44 | write!(fmt, "Canceled") | ||
45 | } | ||
46 | } | ||
47 | |||
48 | impl std::error::Error for Canceled {} | ||
diff --git a/crates/base_db/src/fixture.rs b/crates/base_db/src/fixture.rs new file mode 100644 index 000000000..5ff8ead0e --- /dev/null +++ b/crates/base_db/src/fixture.rs | |||
@@ -0,0 +1,228 @@ | |||
1 | //! Fixtures are strings containing rust source code with optional metadata. | ||
2 | //! A fixture without metadata is parsed into a single source file. | ||
3 | //! Use this to test functionality local to one file. | ||
4 | //! | ||
5 | //! Simple Example: | ||
6 | //! ``` | ||
7 | //! r#" | ||
8 | //! fn main() { | ||
9 | //! println!("Hello World") | ||
10 | //! } | ||
11 | //! "# | ||
12 | //! ``` | ||
13 | //! | ||
14 | //! Metadata can be added to a fixture after a `//-` comment. | ||
15 | //! The basic form is specifying filenames, | ||
16 | //! which is also how to define multiple files in a single test fixture | ||
17 | //! | ||
18 | //! Example using two files in the same crate: | ||
19 | //! ``` | ||
20 | //! " | ||
21 | //! //- /main.rs | ||
22 | //! mod foo; | ||
23 | //! fn main() { | ||
24 | //! foo::bar(); | ||
25 | //! } | ||
26 | //! | ||
27 | //! //- /foo.rs | ||
28 | //! pub fn bar() {} | ||
29 | //! " | ||
30 | //! ``` | ||
31 | //! | ||
32 | //! Example using two crates with one file each, with one crate depending on the other: | ||
33 | //! ``` | ||
34 | //! r#" | ||
35 | //! //- /main.rs crate:a deps:b | ||
36 | //! fn main() { | ||
37 | //! b::foo(); | ||
38 | //! } | ||
39 | //! //- /lib.rs crate:b | ||
40 | //! pub fn b() { | ||
41 | //! println!("Hello World") | ||
42 | //! } | ||
43 | //! "# | ||
44 | //! ``` | ||
45 | //! | ||
46 | //! Metadata allows specifying all settings and variables | ||
47 | //! that are available in a real rust project: | ||
48 | //! - crate names via `crate:cratename` | ||
49 | //! - dependencies via `deps:dep1,dep2` | ||
50 | //! - configuration settings via `cfg:dbg=false,opt_level=2` | ||
51 | //! - environment variables via `env:PATH=/bin,RUST_LOG=debug` | ||
52 | //! | ||
53 | //! Example using all available metadata: | ||
54 | //! ``` | ||
55 | //! " | ||
56 | //! //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo | ||
57 | //! fn insert_source_code_here() {} | ||
58 | //! " | ||
59 | //! ``` | ||
60 | use std::{str::FromStr, sync::Arc}; | ||
61 | |||
62 | use cfg::CfgOptions; | ||
63 | use rustc_hash::FxHashMap; | ||
64 | use test_utils::{extract_range_or_offset, Fixture, RangeOrOffset, CURSOR_MARKER}; | ||
65 | use vfs::{file_set::FileSet, VfsPath}; | ||
66 | |||
67 | use crate::{ | ||
68 | input::CrateName, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, SourceDatabaseExt, | ||
69 | SourceRoot, SourceRootId, | ||
70 | }; | ||
71 | |||
72 | pub const WORKSPACE: SourceRootId = SourceRootId(0); | ||
73 | |||
74 | pub trait WithFixture: Default + SourceDatabaseExt + 'static { | ||
75 | fn with_single_file(text: &str) -> (Self, FileId) { | ||
76 | let mut db = Self::default(); | ||
77 | let (_, files) = with_files(&mut db, text); | ||
78 | assert_eq!(files.len(), 1); | ||
79 | (db, files[0]) | ||
80 | } | ||
81 | |||
82 | fn with_files(ra_fixture: &str) -> Self { | ||
83 | let mut db = Self::default(); | ||
84 | let (pos, _) = with_files(&mut db, ra_fixture); | ||
85 | assert!(pos.is_none()); | ||
86 | db | ||
87 | } | ||
88 | |||
89 | fn with_position(ra_fixture: &str) -> (Self, FilePosition) { | ||
90 | let (db, file_id, range_or_offset) = Self::with_range_or_offset(ra_fixture); | ||
91 | let offset = match range_or_offset { | ||
92 | RangeOrOffset::Range(_) => panic!(), | ||
93 | RangeOrOffset::Offset(it) => it, | ||
94 | }; | ||
95 | (db, FilePosition { file_id, offset }) | ||
96 | } | ||
97 | |||
98 | fn with_range_or_offset(ra_fixture: &str) -> (Self, FileId, RangeOrOffset) { | ||
99 | let mut db = Self::default(); | ||
100 | let (pos, _) = with_files(&mut db, ra_fixture); | ||
101 | let (file_id, range_or_offset) = pos.unwrap(); | ||
102 | (db, file_id, range_or_offset) | ||
103 | } | ||
104 | |||
105 | fn test_crate(&self) -> CrateId { | ||
106 | let crate_graph = self.crate_graph(); | ||
107 | let mut it = crate_graph.iter(); | ||
108 | let res = it.next().unwrap(); | ||
109 | assert!(it.next().is_none()); | ||
110 | res | ||
111 | } | ||
112 | } | ||
113 | |||
114 | impl<DB: SourceDatabaseExt + Default + 'static> WithFixture for DB {} | ||
115 | |||
116 | fn with_files( | ||
117 | db: &mut dyn SourceDatabaseExt, | ||
118 | fixture: &str, | ||
119 | ) -> (Option<(FileId, RangeOrOffset)>, Vec<FileId>) { | ||
120 | let fixture = Fixture::parse(fixture); | ||
121 | |||
122 | let mut files = Vec::new(); | ||
123 | let mut crate_graph = CrateGraph::default(); | ||
124 | let mut crates = FxHashMap::default(); | ||
125 | let mut crate_deps = Vec::new(); | ||
126 | let mut default_crate_root: Option<FileId> = None; | ||
127 | |||
128 | let mut file_set = FileSet::default(); | ||
129 | let source_root_id = WORKSPACE; | ||
130 | let source_root_prefix = "/".to_string(); | ||
131 | let mut file_id = FileId(0); | ||
132 | |||
133 | let mut file_position = None; | ||
134 | |||
135 | for entry in fixture { | ||
136 | let text = if entry.text.contains(CURSOR_MARKER) { | ||
137 | let (range_or_offset, text) = extract_range_or_offset(&entry.text); | ||
138 | assert!(file_position.is_none()); | ||
139 | file_position = Some((file_id, range_or_offset)); | ||
140 | text.to_string() | ||
141 | } else { | ||
142 | entry.text.clone() | ||
143 | }; | ||
144 | |||
145 | let meta = FileMeta::from(entry); | ||
146 | assert!(meta.path.starts_with(&source_root_prefix)); | ||
147 | |||
148 | if let Some(krate) = meta.krate { | ||
149 | let crate_id = crate_graph.add_crate_root( | ||
150 | file_id, | ||
151 | meta.edition, | ||
152 | Some(krate.clone()), | ||
153 | meta.cfg, | ||
154 | meta.env, | ||
155 | Default::default(), | ||
156 | ); | ||
157 | let crate_name = CrateName::new(&krate).unwrap(); | ||
158 | let prev = crates.insert(crate_name.clone(), crate_id); | ||
159 | assert!(prev.is_none()); | ||
160 | for dep in meta.deps { | ||
161 | let dep = CrateName::new(&dep).unwrap(); | ||
162 | crate_deps.push((crate_name.clone(), dep)) | ||
163 | } | ||
164 | } else if meta.path == "/main.rs" || meta.path == "/lib.rs" { | ||
165 | assert!(default_crate_root.is_none()); | ||
166 | default_crate_root = Some(file_id); | ||
167 | } | ||
168 | |||
169 | db.set_file_text(file_id, Arc::new(text)); | ||
170 | db.set_file_source_root(file_id, source_root_id); | ||
171 | let path = VfsPath::new_virtual_path(meta.path); | ||
172 | file_set.insert(file_id, path.into()); | ||
173 | files.push(file_id); | ||
174 | file_id.0 += 1; | ||
175 | } | ||
176 | |||
177 | if crates.is_empty() { | ||
178 | let crate_root = default_crate_root.unwrap(); | ||
179 | crate_graph.add_crate_root( | ||
180 | crate_root, | ||
181 | Edition::Edition2018, | ||
182 | None, | ||
183 | CfgOptions::default(), | ||
184 | Env::default(), | ||
185 | Default::default(), | ||
186 | ); | ||
187 | } else { | ||
188 | for (from, to) in crate_deps { | ||
189 | let from_id = crates[&from]; | ||
190 | let to_id = crates[&to]; | ||
191 | crate_graph.add_dep(from_id, CrateName::new(&to).unwrap(), to_id).unwrap(); | ||
192 | } | ||
193 | } | ||
194 | |||
195 | db.set_source_root(source_root_id, Arc::new(SourceRoot::new_local(file_set))); | ||
196 | db.set_crate_graph(Arc::new(crate_graph)); | ||
197 | |||
198 | (file_position, files) | ||
199 | } | ||
200 | |||
201 | struct FileMeta { | ||
202 | path: String, | ||
203 | krate: Option<String>, | ||
204 | deps: Vec<String>, | ||
205 | cfg: CfgOptions, | ||
206 | edition: Edition, | ||
207 | env: Env, | ||
208 | } | ||
209 | |||
210 | impl From<Fixture> for FileMeta { | ||
211 | fn from(f: Fixture) -> FileMeta { | ||
212 | let mut cfg = CfgOptions::default(); | ||
213 | f.cfg_atoms.iter().for_each(|it| cfg.insert_atom(it.into())); | ||
214 | f.cfg_key_values.iter().for_each(|(k, v)| cfg.insert_key_value(k.into(), v.into())); | ||
215 | |||
216 | FileMeta { | ||
217 | path: f.path, | ||
218 | krate: f.krate, | ||
219 | deps: f.deps, | ||
220 | cfg, | ||
221 | edition: f | ||
222 | .edition | ||
223 | .as_ref() | ||
224 | .map_or(Edition::Edition2018, |v| Edition::from_str(&v).unwrap()), | ||
225 | env: f.env.into_iter().collect(), | ||
226 | } | ||
227 | } | ||
228 | } | ||
diff --git a/crates/base_db/src/input.rs b/crates/base_db/src/input.rs new file mode 100644 index 000000000..f3d65cdf0 --- /dev/null +++ b/crates/base_db/src/input.rs | |||
@@ -0,0 +1,453 @@ | |||
1 | //! This module specifies the input to rust-analyzer. In some sense, this is | ||
2 | //! **the** most important module, because all other fancy stuff is strictly | ||
3 | //! derived from this input. | ||
4 | //! | ||
5 | //! Note that neither this module, nor any other part of the analyzer's core do | ||
6 | //! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how | ||
7 | //! actual IO is done and lowered to input. | ||
8 | |||
9 | use std::{fmt, iter::FromIterator, ops, str::FromStr, sync::Arc}; | ||
10 | |||
11 | use cfg::CfgOptions; | ||
12 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
13 | use syntax::SmolStr; | ||
14 | use tt::TokenExpander; | ||
15 | use vfs::file_set::FileSet; | ||
16 | |||
17 | pub use vfs::FileId; | ||
18 | |||
19 | /// Files are grouped into source roots. A source root is a directory on the | ||
20 | /// file systems which is watched for changes. Typically it corresponds to a | ||
21 | /// Rust crate. Source roots *might* be nested: in this case, a file belongs to | ||
22 | /// the nearest enclosing source root. Paths to files are always relative to a | ||
23 | /// source root, and the analyzer does not know the root path of the source root at | ||
24 | /// all. So, a file from one source root can't refer to a file in another source | ||
25 | /// root by path. | ||
26 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] | ||
27 | pub struct SourceRootId(pub u32); | ||
28 | |||
29 | #[derive(Clone, Debug, PartialEq, Eq)] | ||
30 | pub struct SourceRoot { | ||
31 | /// Sysroot or crates.io library. | ||
32 | /// | ||
33 | /// Libraries are considered mostly immutable, this assumption is used to | ||
34 | /// optimize salsa's query structure | ||
35 | pub is_library: bool, | ||
36 | pub(crate) file_set: FileSet, | ||
37 | } | ||
38 | |||
39 | impl SourceRoot { | ||
40 | pub fn new_local(file_set: FileSet) -> SourceRoot { | ||
41 | SourceRoot { is_library: false, file_set } | ||
42 | } | ||
43 | pub fn new_library(file_set: FileSet) -> SourceRoot { | ||
44 | SourceRoot { is_library: true, file_set } | ||
45 | } | ||
46 | pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ { | ||
47 | self.file_set.iter() | ||
48 | } | ||
49 | } | ||
50 | |||
51 | /// `CrateGraph` is a bit of information which turns a set of text files into a | ||
52 | /// number of Rust crates. Each crate is defined by the `FileId` of its root module, | ||
53 | /// the set of cfg flags (not yet implemented) and the set of dependencies. Note | ||
54 | /// that, due to cfg's, there might be several crates for a single `FileId`! As | ||
55 | /// in the rust-lang proper, a crate does not have a name. Instead, names are | ||
56 | /// specified on dependency edges. That is, a crate might be known under | ||
57 | /// different names in different dependent crates. | ||
58 | /// | ||
59 | /// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust | ||
60 | /// language proper, not a concept of the build system. In practice, we get | ||
61 | /// `CrateGraph` by lowering `cargo metadata` output. | ||
62 | #[derive(Debug, Clone, Default, PartialEq, Eq)] | ||
63 | pub struct CrateGraph { | ||
64 | arena: FxHashMap<CrateId, CrateData>, | ||
65 | } | ||
66 | |||
67 | #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] | ||
68 | pub struct CrateId(pub u32); | ||
69 | |||
70 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
71 | pub struct CrateName(SmolStr); | ||
72 | |||
73 | impl CrateName { | ||
74 | /// Creates a crate name, checking for dashes in the string provided. | ||
75 | /// Dashes are not allowed in the crate names, | ||
76 | /// hence the input string is returned as `Err` for those cases. | ||
77 | pub fn new(name: &str) -> Result<CrateName, &str> { | ||
78 | if name.contains('-') { | ||
79 | Err(name) | ||
80 | } else { | ||
81 | Ok(Self(SmolStr::new(name))) | ||
82 | } | ||
83 | } | ||
84 | |||
85 | /// Creates a crate name, unconditionally replacing the dashes with underscores. | ||
86 | pub fn normalize_dashes(name: &str) -> CrateName { | ||
87 | Self(SmolStr::new(name.replace('-', "_"))) | ||
88 | } | ||
89 | } | ||
90 | |||
91 | impl fmt::Display for CrateName { | ||
92 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
93 | write!(f, "{}", self.0) | ||
94 | } | ||
95 | } | ||
96 | |||
97 | impl ops::Deref for CrateName { | ||
98 | type Target = str; | ||
99 | fn deref(&self) -> &Self::Target { | ||
100 | &*self.0 | ||
101 | } | ||
102 | } | ||
103 | |||
104 | #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] | ||
105 | pub struct ProcMacroId(pub u32); | ||
106 | |||
107 | #[derive(Debug, Clone)] | ||
108 | pub struct ProcMacro { | ||
109 | pub name: SmolStr, | ||
110 | pub expander: Arc<dyn TokenExpander>, | ||
111 | } | ||
112 | |||
113 | impl Eq for ProcMacro {} | ||
114 | impl PartialEq for ProcMacro { | ||
115 | fn eq(&self, other: &ProcMacro) -> bool { | ||
116 | self.name == other.name && Arc::ptr_eq(&self.expander, &other.expander) | ||
117 | } | ||
118 | } | ||
119 | |||
120 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
121 | pub struct CrateData { | ||
122 | pub root_file_id: FileId, | ||
123 | pub edition: Edition, | ||
124 | /// The name to display to the end user. | ||
125 | /// This actual crate name can be different in a particular dependent crate | ||
126 | /// or may even be missing for some cases, such as a dummy crate for the code snippet. | ||
127 | pub display_name: Option<String>, | ||
128 | pub cfg_options: CfgOptions, | ||
129 | pub env: Env, | ||
130 | pub dependencies: Vec<Dependency>, | ||
131 | pub proc_macro: Vec<ProcMacro>, | ||
132 | } | ||
133 | |||
134 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | ||
135 | pub enum Edition { | ||
136 | Edition2018, | ||
137 | Edition2015, | ||
138 | } | ||
139 | |||
140 | #[derive(Default, Debug, Clone, PartialEq, Eq)] | ||
141 | pub struct Env { | ||
142 | entries: FxHashMap<String, String>, | ||
143 | } | ||
144 | |||
145 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
146 | pub struct Dependency { | ||
147 | pub crate_id: CrateId, | ||
148 | pub name: CrateName, | ||
149 | } | ||
150 | |||
151 | impl CrateGraph { | ||
152 | pub fn add_crate_root( | ||
153 | &mut self, | ||
154 | file_id: FileId, | ||
155 | edition: Edition, | ||
156 | display_name: Option<String>, | ||
157 | cfg_options: CfgOptions, | ||
158 | env: Env, | ||
159 | proc_macro: Vec<(SmolStr, Arc<dyn tt::TokenExpander>)>, | ||
160 | ) -> CrateId { | ||
161 | let proc_macro = | ||
162 | proc_macro.into_iter().map(|(name, it)| ProcMacro { name, expander: it }).collect(); | ||
163 | |||
164 | let data = CrateData { | ||
165 | root_file_id: file_id, | ||
166 | edition, | ||
167 | display_name, | ||
168 | cfg_options, | ||
169 | env, | ||
170 | proc_macro, | ||
171 | dependencies: Vec::new(), | ||
172 | }; | ||
173 | let crate_id = CrateId(self.arena.len() as u32); | ||
174 | let prev = self.arena.insert(crate_id, data); | ||
175 | assert!(prev.is_none()); | ||
176 | crate_id | ||
177 | } | ||
178 | |||
179 | pub fn add_dep( | ||
180 | &mut self, | ||
181 | from: CrateId, | ||
182 | name: CrateName, | ||
183 | to: CrateId, | ||
184 | ) -> Result<(), CyclicDependenciesError> { | ||
185 | if self.dfs_find(from, to, &mut FxHashSet::default()) { | ||
186 | return Err(CyclicDependenciesError); | ||
187 | } | ||
188 | self.arena.get_mut(&from).unwrap().add_dep(name, to); | ||
189 | Ok(()) | ||
190 | } | ||
191 | |||
192 | pub fn is_empty(&self) -> bool { | ||
193 | self.arena.is_empty() | ||
194 | } | ||
195 | |||
196 | pub fn iter(&self) -> impl Iterator<Item = CrateId> + '_ { | ||
197 | self.arena.keys().copied() | ||
198 | } | ||
199 | |||
200 | /// Returns an iterator over all transitive dependencies of the given crate. | ||
201 | pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> + '_ { | ||
202 | let mut worklist = vec![of]; | ||
203 | let mut deps = FxHashSet::default(); | ||
204 | |||
205 | while let Some(krate) = worklist.pop() { | ||
206 | if !deps.insert(krate) { | ||
207 | continue; | ||
208 | } | ||
209 | |||
210 | worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id)); | ||
211 | } | ||
212 | |||
213 | deps.remove(&of); | ||
214 | deps.into_iter() | ||
215 | } | ||
216 | |||
217 | // FIXME: this only finds one crate with the given root; we could have multiple | ||
218 | pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> { | ||
219 | let (&crate_id, _) = | ||
220 | self.arena.iter().find(|(_crate_id, data)| data.root_file_id == file_id)?; | ||
221 | Some(crate_id) | ||
222 | } | ||
223 | |||
224 | /// Extends this crate graph by adding a complete disjoint second crate | ||
225 | /// graph. | ||
226 | /// | ||
227 | /// The ids of the crates in the `other` graph are shifted by the return | ||
228 | /// amount. | ||
229 | pub fn extend(&mut self, other: CrateGraph) -> u32 { | ||
230 | let start = self.arena.len() as u32; | ||
231 | self.arena.extend(other.arena.into_iter().map(|(id, mut data)| { | ||
232 | let new_id = id.shift(start); | ||
233 | for dep in &mut data.dependencies { | ||
234 | dep.crate_id = dep.crate_id.shift(start); | ||
235 | } | ||
236 | (new_id, data) | ||
237 | })); | ||
238 | start | ||
239 | } | ||
240 | |||
241 | fn dfs_find(&self, target: CrateId, from: CrateId, visited: &mut FxHashSet<CrateId>) -> bool { | ||
242 | if !visited.insert(from) { | ||
243 | return false; | ||
244 | } | ||
245 | |||
246 | if target == from { | ||
247 | return true; | ||
248 | } | ||
249 | |||
250 | for dep in &self[from].dependencies { | ||
251 | let crate_id = dep.crate_id; | ||
252 | if self.dfs_find(target, crate_id, visited) { | ||
253 | return true; | ||
254 | } | ||
255 | } | ||
256 | false | ||
257 | } | ||
258 | } | ||
259 | |||
260 | impl ops::Index<CrateId> for CrateGraph { | ||
261 | type Output = CrateData; | ||
262 | fn index(&self, crate_id: CrateId) -> &CrateData { | ||
263 | &self.arena[&crate_id] | ||
264 | } | ||
265 | } | ||
266 | |||
267 | impl CrateId { | ||
268 | pub fn shift(self, amount: u32) -> CrateId { | ||
269 | CrateId(self.0 + amount) | ||
270 | } | ||
271 | } | ||
272 | |||
273 | impl CrateData { | ||
274 | fn add_dep(&mut self, name: CrateName, crate_id: CrateId) { | ||
275 | self.dependencies.push(Dependency { name, crate_id }) | ||
276 | } | ||
277 | } | ||
278 | |||
279 | impl FromStr for Edition { | ||
280 | type Err = ParseEditionError; | ||
281 | |||
282 | fn from_str(s: &str) -> Result<Self, Self::Err> { | ||
283 | let res = match s { | ||
284 | "2015" => Edition::Edition2015, | ||
285 | "2018" => Edition::Edition2018, | ||
286 | _ => return Err(ParseEditionError { invalid_input: s.to_string() }), | ||
287 | }; | ||
288 | Ok(res) | ||
289 | } | ||
290 | } | ||
291 | |||
292 | impl fmt::Display for Edition { | ||
293 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
294 | f.write_str(match self { | ||
295 | Edition::Edition2015 => "2015", | ||
296 | Edition::Edition2018 => "2018", | ||
297 | }) | ||
298 | } | ||
299 | } | ||
300 | |||
301 | impl FromIterator<(String, String)> for Env { | ||
302 | fn from_iter<T: IntoIterator<Item = (String, String)>>(iter: T) -> Self { | ||
303 | Env { entries: FromIterator::from_iter(iter) } | ||
304 | } | ||
305 | } | ||
306 | |||
307 | impl Env { | ||
308 | pub fn set(&mut self, env: &str, value: String) { | ||
309 | self.entries.insert(env.to_owned(), value); | ||
310 | } | ||
311 | |||
312 | pub fn get(&self, env: &str) -> Option<String> { | ||
313 | self.entries.get(env).cloned() | ||
314 | } | ||
315 | } | ||
316 | |||
317 | #[derive(Debug)] | ||
318 | pub struct ParseEditionError { | ||
319 | invalid_input: String, | ||
320 | } | ||
321 | |||
322 | impl fmt::Display for ParseEditionError { | ||
323 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { | ||
324 | write!(f, "invalid edition: {:?}", self.invalid_input) | ||
325 | } | ||
326 | } | ||
327 | |||
328 | impl std::error::Error for ParseEditionError {} | ||
329 | |||
330 | #[derive(Debug)] | ||
331 | pub struct CyclicDependenciesError; | ||
332 | |||
333 | #[cfg(test)] | ||
334 | mod tests { | ||
335 | use super::{CfgOptions, CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; | ||
336 | |||
337 | #[test] | ||
338 | fn detect_cyclic_dependency_indirect() { | ||
339 | let mut graph = CrateGraph::default(); | ||
340 | let crate1 = graph.add_crate_root( | ||
341 | FileId(1u32), | ||
342 | Edition2018, | ||
343 | None, | ||
344 | CfgOptions::default(), | ||
345 | Env::default(), | ||
346 | Default::default(), | ||
347 | ); | ||
348 | let crate2 = graph.add_crate_root( | ||
349 | FileId(2u32), | ||
350 | Edition2018, | ||
351 | None, | ||
352 | CfgOptions::default(), | ||
353 | Env::default(), | ||
354 | Default::default(), | ||
355 | ); | ||
356 | let crate3 = graph.add_crate_root( | ||
357 | FileId(3u32), | ||
358 | Edition2018, | ||
359 | None, | ||
360 | CfgOptions::default(), | ||
361 | Env::default(), | ||
362 | Default::default(), | ||
363 | ); | ||
364 | assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); | ||
365 | assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); | ||
366 | assert!(graph.add_dep(crate3, CrateName::new("crate1").unwrap(), crate1).is_err()); | ||
367 | } | ||
368 | |||
369 | #[test] | ||
370 | fn detect_cyclic_dependency_direct() { | ||
371 | let mut graph = CrateGraph::default(); | ||
372 | let crate1 = graph.add_crate_root( | ||
373 | FileId(1u32), | ||
374 | Edition2018, | ||
375 | None, | ||
376 | CfgOptions::default(), | ||
377 | Env::default(), | ||
378 | Default::default(), | ||
379 | ); | ||
380 | let crate2 = graph.add_crate_root( | ||
381 | FileId(2u32), | ||
382 | Edition2018, | ||
383 | None, | ||
384 | CfgOptions::default(), | ||
385 | Env::default(), | ||
386 | Default::default(), | ||
387 | ); | ||
388 | assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); | ||
389 | assert!(graph.add_dep(crate2, CrateName::new("crate2").unwrap(), crate2).is_err()); | ||
390 | } | ||
391 | |||
392 | #[test] | ||
393 | fn it_works() { | ||
394 | let mut graph = CrateGraph::default(); | ||
395 | let crate1 = graph.add_crate_root( | ||
396 | FileId(1u32), | ||
397 | Edition2018, | ||
398 | None, | ||
399 | CfgOptions::default(), | ||
400 | Env::default(), | ||
401 | Default::default(), | ||
402 | ); | ||
403 | let crate2 = graph.add_crate_root( | ||
404 | FileId(2u32), | ||
405 | Edition2018, | ||
406 | None, | ||
407 | CfgOptions::default(), | ||
408 | Env::default(), | ||
409 | Default::default(), | ||
410 | ); | ||
411 | let crate3 = graph.add_crate_root( | ||
412 | FileId(3u32), | ||
413 | Edition2018, | ||
414 | None, | ||
415 | CfgOptions::default(), | ||
416 | Env::default(), | ||
417 | Default::default(), | ||
418 | ); | ||
419 | assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); | ||
420 | assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); | ||
421 | } | ||
422 | |||
423 | #[test] | ||
424 | fn dashes_are_normalized() { | ||
425 | let mut graph = CrateGraph::default(); | ||
426 | let crate1 = graph.add_crate_root( | ||
427 | FileId(1u32), | ||
428 | Edition2018, | ||
429 | None, | ||
430 | CfgOptions::default(), | ||
431 | Env::default(), | ||
432 | Default::default(), | ||
433 | ); | ||
434 | let crate2 = graph.add_crate_root( | ||
435 | FileId(2u32), | ||
436 | Edition2018, | ||
437 | None, | ||
438 | CfgOptions::default(), | ||
439 | Env::default(), | ||
440 | Default::default(), | ||
441 | ); | ||
442 | assert!(graph | ||
443 | .add_dep(crate1, CrateName::normalize_dashes("crate-name-with-dashes"), crate2) | ||
444 | .is_ok()); | ||
445 | assert_eq!( | ||
446 | graph[crate1].dependencies, | ||
447 | vec![Dependency { | ||
448 | crate_id: crate2, | ||
449 | name: CrateName::new("crate_name_with_dashes").unwrap() | ||
450 | }] | ||
451 | ); | ||
452 | } | ||
453 | } | ||
diff --git a/crates/base_db/src/lib.rs b/crates/base_db/src/lib.rs new file mode 100644 index 000000000..ee3415850 --- /dev/null +++ b/crates/base_db/src/lib.rs | |||
@@ -0,0 +1,167 @@ | |||
1 | //! base_db defines basic database traits. The concrete DB is defined by ide. | ||
2 | mod cancellation; | ||
3 | mod input; | ||
4 | pub mod fixture; | ||
5 | |||
6 | use std::{panic, sync::Arc}; | ||
7 | |||
8 | use rustc_hash::FxHashSet; | ||
9 | use syntax::{ast, Parse, SourceFile, TextRange, TextSize}; | ||
10 | |||
11 | pub use crate::{ | ||
12 | cancellation::Canceled, | ||
13 | input::{ | ||
14 | CrateData, CrateGraph, CrateId, CrateName, Dependency, Edition, Env, FileId, ProcMacroId, | ||
15 | SourceRoot, SourceRootId, | ||
16 | }, | ||
17 | }; | ||
18 | pub use salsa; | ||
19 | pub use vfs::{file_set::FileSet, VfsPath}; | ||
20 | |||
21 | #[macro_export] | ||
22 | macro_rules! impl_intern_key { | ||
23 | ($name:ident) => { | ||
24 | impl $crate::salsa::InternKey for $name { | ||
25 | fn from_intern_id(v: $crate::salsa::InternId) -> Self { | ||
26 | $name(v) | ||
27 | } | ||
28 | fn as_intern_id(&self) -> $crate::salsa::InternId { | ||
29 | self.0 | ||
30 | } | ||
31 | } | ||
32 | }; | ||
33 | } | ||
34 | |||
35 | pub trait Upcast<T: ?Sized> { | ||
36 | fn upcast(&self) -> &T; | ||
37 | } | ||
38 | |||
39 | pub trait CheckCanceled { | ||
40 | /// Aborts current query if there are pending changes. | ||
41 | /// | ||
42 | /// rust-analyzer needs to be able to answer semantic questions about the | ||
43 | /// code while the code is being modified. A common problem is that a | ||
44 | /// long-running query is being calculated when a new change arrives. | ||
45 | /// | ||
46 | /// We can't just apply the change immediately: this will cause the pending | ||
47 | /// query to see inconsistent state (it will observe an absence of | ||
48 | /// repeatable read). So what we do is we **cancel** all pending queries | ||
49 | /// before applying the change. | ||
50 | /// | ||
51 | /// We implement cancellation by panicking with a special value and catching | ||
52 | /// it on the API boundary. Salsa explicitly supports this use-case. | ||
53 | fn check_canceled(&self); | ||
54 | |||
55 | fn catch_canceled<F, T>(&self, f: F) -> Result<T, Canceled> | ||
56 | where | ||
57 | Self: Sized + panic::RefUnwindSafe, | ||
58 | F: FnOnce(&Self) -> T + panic::UnwindSafe, | ||
59 | { | ||
60 | panic::catch_unwind(|| f(self)).map_err(|err| match err.downcast::<Canceled>() { | ||
61 | Ok(canceled) => *canceled, | ||
62 | Err(payload) => panic::resume_unwind(payload), | ||
63 | }) | ||
64 | } | ||
65 | } | ||
66 | |||
67 | impl<T: salsa::Database> CheckCanceled for T { | ||
68 | fn check_canceled(&self) { | ||
69 | if self.salsa_runtime().is_current_revision_canceled() { | ||
70 | Canceled::throw() | ||
71 | } | ||
72 | } | ||
73 | } | ||
74 | |||
75 | #[derive(Clone, Copy, Debug)] | ||
76 | pub struct FilePosition { | ||
77 | pub file_id: FileId, | ||
78 | pub offset: TextSize, | ||
79 | } | ||
80 | |||
81 | #[derive(Clone, Copy, Debug, Eq, PartialEq)] | ||
82 | pub struct FileRange { | ||
83 | pub file_id: FileId, | ||
84 | pub range: TextRange, | ||
85 | } | ||
86 | |||
87 | pub const DEFAULT_LRU_CAP: usize = 128; | ||
88 | |||
89 | pub trait FileLoader { | ||
90 | /// Text of the file. | ||
91 | fn file_text(&self, file_id: FileId) -> Arc<String>; | ||
92 | /// Note that we intentionally accept a `&str` and not a `&Path` here. This | ||
93 | /// method exists to handle `#[path = "/some/path.rs"] mod foo;` and such, | ||
94 | /// so the input is guaranteed to be utf-8 string. One might be tempted to | ||
95 | /// introduce some kind of "utf-8 path with / separators", but that's a bad idea. Behold | ||
96 | /// `#[path = "C://no/way"]` | ||
97 | fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId>; | ||
98 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>; | ||
99 | } | ||
100 | |||
101 | /// Database which stores all significant input facts: source code and project | ||
102 | /// model. Everything else in rust-analyzer is derived from these queries. | ||
103 | #[salsa::query_group(SourceDatabaseStorage)] | ||
104 | pub trait SourceDatabase: CheckCanceled + FileLoader + std::fmt::Debug { | ||
105 | // Parses the file into the syntax tree. | ||
106 | #[salsa::invoke(parse_query)] | ||
107 | fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>; | ||
108 | |||
109 | /// The crate graph. | ||
110 | #[salsa::input] | ||
111 | fn crate_graph(&self) -> Arc<CrateGraph>; | ||
112 | } | ||
113 | |||
114 | fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { | ||
115 | let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id)); | ||
116 | let text = db.file_text(file_id); | ||
117 | SourceFile::parse(&*text) | ||
118 | } | ||
119 | |||
120 | /// We don't want to give HIR knowledge of source roots, hence we extract these | ||
121 | /// methods into a separate DB. | ||
122 | #[salsa::query_group(SourceDatabaseExtStorage)] | ||
123 | pub trait SourceDatabaseExt: SourceDatabase { | ||
124 | #[salsa::input] | ||
125 | fn file_text(&self, file_id: FileId) -> Arc<String>; | ||
126 | /// Path to a file, relative to the root of its source root. | ||
127 | /// Source root of the file. | ||
128 | #[salsa::input] | ||
129 | fn file_source_root(&self, file_id: FileId) -> SourceRootId; | ||
130 | /// Contents of the source root. | ||
131 | #[salsa::input] | ||
132 | fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>; | ||
133 | |||
134 | fn source_root_crates(&self, id: SourceRootId) -> Arc<FxHashSet<CrateId>>; | ||
135 | } | ||
136 | |||
137 | fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHashSet<CrateId>> { | ||
138 | let graph = db.crate_graph(); | ||
139 | let res = graph | ||
140 | .iter() | ||
141 | .filter(|&krate| { | ||
142 | let root_file = graph[krate].root_file_id; | ||
143 | db.file_source_root(root_file) == id | ||
144 | }) | ||
145 | .collect::<FxHashSet<_>>(); | ||
146 | Arc::new(res) | ||
147 | } | ||
148 | |||
149 | /// Silly workaround for cyclic deps between the traits | ||
150 | pub struct FileLoaderDelegate<T>(pub T); | ||
151 | |||
152 | impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> { | ||
153 | fn file_text(&self, file_id: FileId) -> Arc<String> { | ||
154 | SourceDatabaseExt::file_text(self.0, file_id) | ||
155 | } | ||
156 | fn resolve_path(&self, anchor: FileId, path: &str) -> Option<FileId> { | ||
157 | // FIXME: this *somehow* should be platform agnostic... | ||
158 | let source_root = self.0.file_source_root(anchor); | ||
159 | let source_root = self.0.source_root(source_root); | ||
160 | source_root.file_set.resolve_path(anchor, path) | ||
161 | } | ||
162 | |||
163 | fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> { | ||
164 | let source_root = self.0.file_source_root(file_id); | ||
165 | self.0.source_root_crates(source_root) | ||
166 | } | ||
167 | } | ||