diff options
31 files changed, 782 insertions, 242 deletions
diff --git a/Cargo.lock b/Cargo.lock index b7480b8d1..b7489fda5 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -1023,6 +1023,7 @@ dependencies = [ | |||
1023 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", | 1023 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", |
1024 | "ra_db 0.1.0", | 1024 | "ra_db 0.1.0", |
1025 | "ra_hir 0.1.0", | 1025 | "ra_hir 0.1.0", |
1026 | "ra_ide_api 0.1.0", | ||
1026 | "ra_project_model 0.1.0", | 1027 | "ra_project_model 0.1.0", |
1027 | "ra_syntax 0.1.0", | 1028 | "ra_syntax 0.1.0", |
1028 | "ra_vfs 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", | 1029 | "ra_vfs 0.2.4 (registry+https://github.com/rust-lang/crates.io-index)", |
@@ -1079,6 +1080,7 @@ dependencies = [ | |||
1079 | "flexi_logger 0.11.5 (registry+https://github.com/rust-lang/crates.io-index)", | 1080 | "flexi_logger 0.11.5 (registry+https://github.com/rust-lang/crates.io-index)", |
1080 | "insta 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1081 | "insta 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)", |
1081 | "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", | 1082 | "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", |
1083 | "lalrpop-intern 0.15.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
1082 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", | 1084 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", |
1083 | "once_cell 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", | 1085 | "once_cell 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", |
1084 | "parking_lot 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1086 | "parking_lot 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", |
diff --git a/crates/ra_assists/src/fill_match_arms.rs b/crates/ra_assists/src/fill_match_arms.rs index a7de6ae9f..c7a8bce20 100644 --- a/crates/ra_assists/src/fill_match_arms.rs +++ b/crates/ra_assists/src/fill_match_arms.rs | |||
@@ -22,7 +22,7 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As | |||
22 | let expr = match_expr.expr()?; | 22 | let expr = match_expr.expr()?; |
23 | let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None); | 23 | let analyzer = hir::SourceAnalyzer::new(ctx.db, ctx.frange.file_id, expr.syntax(), None); |
24 | let match_expr_ty = analyzer.type_of(ctx.db, expr)?; | 24 | let match_expr_ty = analyzer.type_of(ctx.db, expr)?; |
25 | let enum_def = match_expr_ty.autoderef(ctx.db).find_map(|ty| match ty.as_adt() { | 25 | let enum_def = analyzer.autoderef(ctx.db, match_expr_ty).find_map(|ty| match ty.as_adt() { |
26 | Some((AdtDef::Enum(e), _)) => Some(e), | 26 | Some((AdtDef::Enum(e), _)) => Some(e), |
27 | _ => None, | 27 | _ => None, |
28 | })?; | 28 | })?; |
diff --git a/crates/ra_batch/Cargo.toml b/crates/ra_batch/Cargo.toml index 8bf085bbf..7d8837fc3 100644 --- a/crates/ra_batch/Cargo.toml +++ b/crates/ra_batch/Cargo.toml | |||
@@ -11,6 +11,7 @@ rustc-hash = "1.0" | |||
11 | ra_vfs = "0.2.0" | 11 | ra_vfs = "0.2.0" |
12 | ra_syntax = { path = "../ra_syntax" } | 12 | ra_syntax = { path = "../ra_syntax" } |
13 | ra_db = { path = "../ra_db" } | 13 | ra_db = { path = "../ra_db" } |
14 | ra_ide_api = { path = "../ra_ide_api" } | ||
14 | ra_hir = { path = "../ra_hir" } | 15 | ra_hir = { path = "../ra_hir" } |
15 | ra_project_model = { path = "../ra_project_model" } | 16 | ra_project_model = { path = "../ra_project_model" } |
16 | 17 | ||
diff --git a/crates/ra_batch/src/lib.rs b/crates/ra_batch/src/lib.rs index 96b32d9fe..43d3fb7e3 100644 --- a/crates/ra_batch/src/lib.rs +++ b/crates/ra_batch/src/lib.rs | |||
@@ -1,36 +1,19 @@ | |||
1 | mod vfs_filter; | 1 | mod vfs_filter; |
2 | 2 | ||
3 | use std::{sync::Arc, path::Path, collections::HashSet, error::Error}; | 3 | use std::{path::Path, collections::HashSet, error::Error}; |
4 | 4 | ||
5 | use rustc_hash::FxHashMap; | 5 | use rustc_hash::FxHashMap; |
6 | 6 | ||
7 | use ra_db::{ | 7 | use ra_db::{ |
8 | CrateGraph, FileId, SourceRoot, SourceRootId, SourceDatabase, salsa::{self, Database}, | 8 | CrateGraph, FileId, SourceRootId, |
9 | }; | 9 | }; |
10 | use ra_hir::db; | 10 | use ra_ide_api::{AnalysisHost, AnalysisChange}; |
11 | use ra_project_model::ProjectWorkspace; | 11 | use ra_project_model::{ProjectWorkspace, ProjectRoot}; |
12 | use ra_vfs::{Vfs, VfsChange}; | 12 | use ra_vfs::{Vfs, VfsChange}; |
13 | use vfs_filter::IncludeRustFiles; | 13 | use vfs_filter::IncludeRustFiles; |
14 | 14 | ||
15 | type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; | 15 | type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>; |
16 | 16 | ||
17 | #[salsa::database( | ||
18 | ra_db::SourceDatabaseStorage, | ||
19 | db::AstDatabaseStorage, | ||
20 | db::DefDatabaseStorage, | ||
21 | db::HirDatabaseStorage | ||
22 | )] | ||
23 | #[derive(Debug)] | ||
24 | pub struct BatchDatabase { | ||
25 | runtime: salsa::Runtime<BatchDatabase>, | ||
26 | } | ||
27 | |||
28 | impl salsa::Database for BatchDatabase { | ||
29 | fn salsa_runtime(&self) -> &salsa::Runtime<BatchDatabase> { | ||
30 | &self.runtime | ||
31 | } | ||
32 | } | ||
33 | |||
34 | fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId { | 17 | fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId { |
35 | FileId(f.0) | 18 | FileId(f.0) |
36 | } | 19 | } |
@@ -38,86 +21,87 @@ fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId { | |||
38 | SourceRootId(r.0) | 21 | SourceRootId(r.0) |
39 | } | 22 | } |
40 | 23 | ||
41 | impl BatchDatabase { | 24 | pub fn load_cargo(root: &Path) -> Result<(AnalysisHost, FxHashMap<SourceRootId, ProjectRoot>)> { |
42 | pub fn load(crate_graph: CrateGraph, vfs: &mut Vfs) -> BatchDatabase { | 25 | let root = std::env::current_dir()?.join(root); |
43 | let mut db = BatchDatabase { runtime: salsa::Runtime::default() }; | 26 | let ws = ProjectWorkspace::discover(root.as_ref())?; |
44 | let lru_cap = std::env::var("RA_LRU_CAP") | 27 | let project_roots = ws.to_roots(); |
45 | .ok() | 28 | let (mut vfs, roots) = Vfs::new(IncludeRustFiles::from_roots(project_roots.clone()).collect()); |
46 | .and_then(|it| it.parse::<usize>().ok()) | 29 | let crate_graph = ws.to_crate_graph(&mut |path: &Path| { |
47 | .unwrap_or(ra_db::DEFAULT_LRU_CAP); | 30 | let vfs_file = vfs.load(path); |
48 | db.query_mut(ra_db::ParseQuery).set_lru_capacity(lru_cap); | 31 | log::debug!("vfs file {:?} -> {:?}", path, vfs_file); |
49 | db.query_mut(ra_hir::db::ParseMacroQuery).set_lru_capacity(lru_cap); | 32 | vfs_file.map(vfs_file_to_id) |
50 | db.set_crate_graph(Arc::new(crate_graph)); | 33 | }); |
34 | log::debug!("crate graph: {:?}", crate_graph); | ||
35 | |||
36 | let source_roots = roots | ||
37 | .iter() | ||
38 | .map(|&vfs_root| { | ||
39 | let source_root_id = vfs_root_to_id(vfs_root); | ||
40 | let project_root = project_roots | ||
41 | .iter() | ||
42 | .find(|it| it.path() == &vfs.root2path(vfs_root)) | ||
43 | .unwrap() | ||
44 | .clone(); | ||
45 | (source_root_id, project_root) | ||
46 | }) | ||
47 | .collect::<FxHashMap<_, _>>(); | ||
48 | let host = load(&source_roots, crate_graph, &mut vfs); | ||
49 | Ok((host, source_roots)) | ||
50 | } | ||
51 | 51 | ||
52 | // wait until Vfs has loaded all roots | 52 | pub fn load( |
53 | let receiver = vfs.task_receiver().clone(); | 53 | source_roots: &FxHashMap<SourceRootId, ProjectRoot>, |
54 | let mut roots_loaded = HashSet::new(); | 54 | crate_graph: CrateGraph, |
55 | for task in receiver { | 55 | vfs: &mut Vfs, |
56 | vfs.handle_task(task); | 56 | ) -> AnalysisHost { |
57 | let mut done = false; | 57 | let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok()); |
58 | for change in vfs.commit_changes() { | 58 | let mut host = AnalysisHost::new(lru_cap); |
59 | match change { | 59 | let mut analysis_change = AnalysisChange::new(); |
60 | VfsChange::AddRoot { root, files } => { | 60 | analysis_change.set_crate_graph(crate_graph); |
61 | let source_root_id = vfs_root_to_id(root); | 61 | |
62 | log::debug!( | 62 | // wait until Vfs has loaded all roots |
63 | "loaded source root {:?} with path {:?}", | 63 | let receiver = vfs.task_receiver().clone(); |
64 | source_root_id, | 64 | let mut roots_loaded = HashSet::new(); |
65 | vfs.root2path(root) | 65 | for task in receiver { |
66 | ); | 66 | vfs.handle_task(task); |
67 | let mut file_map = FxHashMap::default(); | 67 | let mut done = false; |
68 | for (vfs_file, path, text) in files { | 68 | for change in vfs.commit_changes() { |
69 | let file_id = vfs_file_to_id(vfs_file); | 69 | match change { |
70 | db.set_file_text(file_id, text); | 70 | VfsChange::AddRoot { root, files } => { |
71 | db.set_file_relative_path(file_id, path.clone()); | 71 | let source_root_id = vfs_root_to_id(root); |
72 | db.set_file_source_root(file_id, source_root_id); | 72 | let is_local = source_roots[&source_root_id].is_member(); |
73 | file_map.insert(path, file_id); | 73 | log::debug!( |
74 | } | 74 | "loaded source root {:?} with path {:?}", |
75 | let source_root = SourceRoot { files: file_map }; | 75 | source_root_id, |
76 | db.set_source_root(source_root_id, Arc::new(source_root)); | 76 | vfs.root2path(root) |
77 | roots_loaded.insert(source_root_id); | 77 | ); |
78 | if roots_loaded.len() == vfs.n_roots() { | 78 | analysis_change.add_root(source_root_id, is_local); |
79 | done = true; | 79 | |
80 | } | 80 | let mut file_map = FxHashMap::default(); |
81 | for (vfs_file, path, text) in files { | ||
82 | let file_id = vfs_file_to_id(vfs_file); | ||
83 | analysis_change.add_file(source_root_id, file_id, path.clone(), text); | ||
84 | file_map.insert(path, file_id); | ||
81 | } | 85 | } |
82 | VfsChange::AddFile { .. } | 86 | roots_loaded.insert(source_root_id); |
83 | | VfsChange::RemoveFile { .. } | 87 | if roots_loaded.len() == vfs.n_roots() { |
84 | | VfsChange::ChangeFile { .. } => { | 88 | done = true; |
85 | // We just need the first scan, so just ignore these | ||
86 | } | 89 | } |
87 | } | 90 | } |
88 | } | 91 | VfsChange::AddFile { .. } |
89 | if done { | 92 | | VfsChange::RemoveFile { .. } |
90 | break; | 93 | | VfsChange::ChangeFile { .. } => { |
94 | // We just need the first scan, so just ignore these | ||
95 | } | ||
91 | } | 96 | } |
92 | } | 97 | } |
93 | 98 | if done { | |
94 | db | 99 | break; |
100 | } | ||
95 | } | 101 | } |
96 | 102 | ||
97 | pub fn load_cargo(root: impl AsRef<Path>) -> Result<(BatchDatabase, Vec<SourceRootId>)> { | 103 | host.apply_change(analysis_change); |
98 | let root = std::env::current_dir()?.join(root); | 104 | host |
99 | let ws = ProjectWorkspace::discover(root.as_ref())?; | ||
100 | let mut roots = Vec::new(); | ||
101 | roots.push(IncludeRustFiles::member(root.clone())); | ||
102 | roots.extend(IncludeRustFiles::from_roots(ws.to_roots())); | ||
103 | let (mut vfs, roots) = Vfs::new(roots); | ||
104 | let mut load = |path: &Path| { | ||
105 | let vfs_file = vfs.load(path); | ||
106 | log::debug!("vfs file {:?} -> {:?}", path, vfs_file); | ||
107 | vfs_file.map(vfs_file_to_id) | ||
108 | }; | ||
109 | let crate_graph = ws.to_crate_graph(&mut load); | ||
110 | log::debug!("crate graph: {:?}", crate_graph); | ||
111 | |||
112 | let local_roots = roots | ||
113 | .into_iter() | ||
114 | .filter(|r| vfs.root2path(*r).starts_with(&root)) | ||
115 | .map(vfs_root_to_id) | ||
116 | .collect(); | ||
117 | |||
118 | let db = BatchDatabase::load(crate_graph, &mut vfs); | ||
119 | Ok((db, local_roots)) | ||
120 | } | ||
121 | } | 105 | } |
122 | 106 | ||
123 | #[cfg(test)] | 107 | #[cfg(test)] |
@@ -128,10 +112,10 @@ mod tests { | |||
128 | #[test] | 112 | #[test] |
129 | fn test_loading_rust_analyzer() { | 113 | fn test_loading_rust_analyzer() { |
130 | let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); | 114 | let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); |
131 | let (db, roots) = BatchDatabase::load_cargo(path).unwrap(); | 115 | let (host, roots) = load_cargo(path).unwrap(); |
132 | let mut n_crates = 0; | 116 | let mut n_crates = 0; |
133 | for root in roots { | 117 | for (root, _) in roots { |
134 | for _krate in Crate::source_root_crates(&db, root) { | 118 | for _krate in Crate::source_root_crates(host.raw_database(), root) { |
135 | n_crates += 1; | 119 | n_crates += 1; |
136 | } | 120 | } |
137 | } | 121 | } |
diff --git a/crates/ra_batch/src/vfs_filter.rs b/crates/ra_batch/src/vfs_filter.rs index dd20c1203..8552ac999 100644 --- a/crates/ra_batch/src/vfs_filter.rs +++ b/crates/ra_batch/src/vfs_filter.rs | |||
@@ -1,54 +1,54 @@ | |||
1 | use std::path::PathBuf; | 1 | use std::path::PathBuf; |
2 | use ra_project_model::ProjectRoot; | 2 | use ra_project_model::ProjectRoot; |
3 | use ra_vfs::{RootEntry, Filter, RelativePath}; | 3 | use ra_vfs::{RootEntry, Filter, RelativePath}; |
4 | 4 | ||
5 | /// `IncludeRustFiles` is used to convert | 5 | /// `IncludeRustFiles` is used to convert |
6 | /// from `ProjectRoot` to `RootEntry` for VFS | 6 | /// from `ProjectRoot` to `RootEntry` for VFS |
7 | pub struct IncludeRustFiles { | 7 | pub struct IncludeRustFiles { |
8 | root: ProjectRoot, | 8 | root: ProjectRoot, |
9 | } | 9 | } |
10 | 10 | ||
11 | impl IncludeRustFiles { | 11 | impl IncludeRustFiles { |
12 | pub fn from_roots<R>(roots: R) -> impl Iterator<Item = RootEntry> | 12 | pub fn from_roots<R>(roots: R) -> impl Iterator<Item = RootEntry> |
13 | where | 13 | where |
14 | R: IntoIterator<Item = ProjectRoot>, | 14 | R: IntoIterator<Item = ProjectRoot>, |
15 | { | 15 | { |
16 | roots.into_iter().map(IncludeRustFiles::from_root) | 16 | roots.into_iter().map(IncludeRustFiles::from_root) |
17 | } | 17 | } |
18 | 18 | ||
19 | pub fn from_root(root: ProjectRoot) -> RootEntry { | 19 | pub fn from_root(root: ProjectRoot) -> RootEntry { |
20 | IncludeRustFiles::from(root).into() | 20 | IncludeRustFiles::from(root).into() |
21 | } | 21 | } |
22 | 22 | ||
23 | #[allow(unused)] | 23 | #[allow(unused)] |
24 | pub fn external(path: PathBuf) -> RootEntry { | 24 | pub fn external(path: PathBuf) -> RootEntry { |
25 | IncludeRustFiles::from_root(ProjectRoot::new(path, false)) | 25 | IncludeRustFiles::from_root(ProjectRoot::new(path, false)) |
26 | } | 26 | } |
27 | 27 | ||
28 | pub fn member(path: PathBuf) -> RootEntry { | 28 | pub fn member(path: PathBuf) -> RootEntry { |
29 | IncludeRustFiles::from_root(ProjectRoot::new(path, true)) | 29 | IncludeRustFiles::from_root(ProjectRoot::new(path, true)) |
30 | } | 30 | } |
31 | } | 31 | } |
32 | 32 | ||
33 | impl Filter for IncludeRustFiles { | 33 | impl Filter for IncludeRustFiles { |
34 | fn include_dir(&self, dir_path: &RelativePath) -> bool { | 34 | fn include_dir(&self, dir_path: &RelativePath) -> bool { |
35 | self.root.include_dir(dir_path) | 35 | self.root.include_dir(dir_path) |
36 | } | 36 | } |
37 | 37 | ||
38 | fn include_file(&self, file_path: &RelativePath) -> bool { | 38 | fn include_file(&self, file_path: &RelativePath) -> bool { |
39 | self.root.include_file(file_path) | 39 | self.root.include_file(file_path) |
40 | } | 40 | } |
41 | } | 41 | } |
42 | 42 | ||
43 | impl std::convert::From<ProjectRoot> for IncludeRustFiles { | 43 | impl From<ProjectRoot> for IncludeRustFiles { |
44 | fn from(v: ProjectRoot) -> IncludeRustFiles { | 44 | fn from(v: ProjectRoot) -> IncludeRustFiles { |
45 | IncludeRustFiles { root: v } | 45 | IncludeRustFiles { root: v } |
46 | } | 46 | } |
47 | } | 47 | } |
48 | 48 | ||
49 | impl std::convert::From<IncludeRustFiles> for RootEntry { | 49 | impl From<IncludeRustFiles> for RootEntry { |
50 | fn from(v: IncludeRustFiles) -> RootEntry { | 50 | fn from(v: IncludeRustFiles) -> RootEntry { |
51 | let path = v.root.path().clone(); | 51 | let path = v.root.path().clone(); |
52 | RootEntry::new(path, Box::new(v)) | 52 | RootEntry::new(path, Box::new(v)) |
53 | } | 53 | } |
54 | } | 54 | } |
diff --git a/crates/ra_cli/src/analysis_bench.rs b/crates/ra_cli/src/analysis_bench.rs new file mode 100644 index 000000000..33d472838 --- /dev/null +++ b/crates/ra_cli/src/analysis_bench.rs | |||
@@ -0,0 +1,92 @@ | |||
1 | use std::{ | ||
2 | path::{PathBuf, Path}, | ||
3 | time::Instant, | ||
4 | }; | ||
5 | |||
6 | use ra_db::{SourceDatabase, salsa::Database}; | ||
7 | use ra_ide_api::{AnalysisHost, Analysis, LineCol, FilePosition}; | ||
8 | |||
9 | use crate::Result; | ||
10 | |||
11 | pub(crate) enum Op { | ||
12 | Highlight { path: PathBuf }, | ||
13 | Complete { path: PathBuf, line: u32, column: u32 }, | ||
14 | } | ||
15 | |||
16 | pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> { | ||
17 | let start = Instant::now(); | ||
18 | eprint!("loading: "); | ||
19 | let (host, roots) = ra_batch::load_cargo(path)?; | ||
20 | let db = host.raw_database(); | ||
21 | eprintln!("{:?}\n", start.elapsed()); | ||
22 | |||
23 | let file_id = { | ||
24 | let path = match &op { | ||
25 | Op::Highlight { path } => path, | ||
26 | Op::Complete { path, .. } => path, | ||
27 | }; | ||
28 | let path = std::env::current_dir()?.join(path).canonicalize()?; | ||
29 | roots | ||
30 | .iter() | ||
31 | .find_map(|(source_root_id, project_root)| { | ||
32 | if project_root.is_member() { | ||
33 | for (rel_path, file_id) in &db.source_root(*source_root_id).files { | ||
34 | let abs_path = rel_path.to_path(project_root.path()); | ||
35 | if abs_path == path { | ||
36 | return Some(*file_id); | ||
37 | } | ||
38 | } | ||
39 | } | ||
40 | None | ||
41 | }) | ||
42 | .ok_or_else(|| format!("Can't find {:?}", path))? | ||
43 | }; | ||
44 | |||
45 | match op { | ||
46 | Op::Highlight { .. } => { | ||
47 | let res = do_work(&host, |analysis| { | ||
48 | analysis.diagnostics(file_id).unwrap(); | ||
49 | analysis.highlight_as_html(file_id, false).unwrap() | ||
50 | }); | ||
51 | if verbose { | ||
52 | println!("\n{}", res); | ||
53 | } | ||
54 | } | ||
55 | Op::Complete { line, column, .. } => { | ||
56 | let offset = host | ||
57 | .analysis() | ||
58 | .file_line_index(file_id) | ||
59 | .offset(LineCol { line, col_utf16: column }); | ||
60 | let file_postion = FilePosition { file_id, offset }; | ||
61 | |||
62 | let res = do_work(&host, |analysis| analysis.completions(file_postion)); | ||
63 | if verbose { | ||
64 | println!("\n{:#?}", res); | ||
65 | } | ||
66 | } | ||
67 | } | ||
68 | Ok(()) | ||
69 | } | ||
70 | |||
71 | fn do_work<F: Fn(&Analysis) -> T, T>(host: &AnalysisHost, work: F) -> T { | ||
72 | { | ||
73 | let start = Instant::now(); | ||
74 | eprint!("from scratch: "); | ||
75 | work(&host.analysis()); | ||
76 | eprintln!("{:?}", start.elapsed()); | ||
77 | } | ||
78 | { | ||
79 | let start = Instant::now(); | ||
80 | eprint!("no change: "); | ||
81 | work(&host.analysis()); | ||
82 | eprintln!("{:?}", start.elapsed()); | ||
83 | } | ||
84 | { | ||
85 | let start = Instant::now(); | ||
86 | eprint!("trivial change: "); | ||
87 | host.raw_database().salsa_runtime().next_revision(); | ||
88 | let res = work(&host.analysis()); | ||
89 | eprintln!("{:?}", start.elapsed()); | ||
90 | res | ||
91 | } | ||
92 | } | ||
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs index c19976bd2..ed98fc7f6 100644 --- a/crates/ra_cli/src/analysis_stats.rs +++ b/crates/ra_cli/src/analysis_stats.rs | |||
@@ -1,25 +1,28 @@ | |||
1 | use std::{collections::HashSet, time::Instant, fmt::Write}; | 1 | use std::{collections::HashSet, time::Instant, fmt::Write, path::Path}; |
2 | 2 | ||
3 | use ra_db::SourceDatabase; | 3 | use ra_db::SourceDatabase; |
4 | use ra_batch::BatchDatabase; | ||
5 | use ra_hir::{Crate, ModuleDef, Ty, ImplItem, HasSource}; | 4 | use ra_hir::{Crate, ModuleDef, Ty, ImplItem, HasSource}; |
6 | use ra_syntax::AstNode; | 5 | use ra_syntax::AstNode; |
7 | 6 | ||
8 | use crate::Result; | 7 | use crate::Result; |
9 | 8 | ||
10 | pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { | 9 | pub fn run(verbose: bool, path: &Path, only: Option<&str>) -> Result<()> { |
11 | let db_load_time = Instant::now(); | 10 | let db_load_time = Instant::now(); |
12 | let (db, roots) = BatchDatabase::load_cargo(path)?; | 11 | let (host, roots) = ra_batch::load_cargo(path)?; |
12 | let db = host.raw_database(); | ||
13 | println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed()); | 13 | println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed()); |
14 | let analysis_time = Instant::now(); | 14 | let analysis_time = Instant::now(); |
15 | let mut num_crates = 0; | 15 | let mut num_crates = 0; |
16 | let mut visited_modules = HashSet::new(); | 16 | let mut visited_modules = HashSet::new(); |
17 | let mut visit_queue = Vec::new(); | 17 | let mut visit_queue = Vec::new(); |
18 | for root in roots { | 18 | for (source_root_id, project_root) in roots { |
19 | for krate in Crate::source_root_crates(&db, root) { | 19 | if project_root.is_member() { |
20 | num_crates += 1; | 20 | for krate in Crate::source_root_crates(db, source_root_id) { |
21 | let module = krate.root_module(&db).expect("crate in source root without root module"); | 21 | num_crates += 1; |
22 | visit_queue.push(module); | 22 | let module = |
23 | krate.root_module(db).expect("crate in source root without root module"); | ||
24 | visit_queue.push(module); | ||
25 | } | ||
23 | } | 26 | } |
24 | } | 27 | } |
25 | println!("Crates in this dir: {}", num_crates); | 28 | println!("Crates in this dir: {}", num_crates); |
@@ -27,17 +30,17 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { | |||
27 | let mut funcs = Vec::new(); | 30 | let mut funcs = Vec::new(); |
28 | while let Some(module) = visit_queue.pop() { | 31 | while let Some(module) = visit_queue.pop() { |
29 | if visited_modules.insert(module) { | 32 | if visited_modules.insert(module) { |
30 | visit_queue.extend(module.children(&db)); | 33 | visit_queue.extend(module.children(db)); |
31 | 34 | ||
32 | for decl in module.declarations(&db) { | 35 | for decl in module.declarations(db) { |
33 | num_decls += 1; | 36 | num_decls += 1; |
34 | if let ModuleDef::Function(f) = decl { | 37 | if let ModuleDef::Function(f) = decl { |
35 | funcs.push(f); | 38 | funcs.push(f); |
36 | } | 39 | } |
37 | } | 40 | } |
38 | 41 | ||
39 | for impl_block in module.impl_blocks(&db) { | 42 | for impl_block in module.impl_blocks(db) { |
40 | for item in impl_block.items(&db) { | 43 | for item in impl_block.items(db) { |
41 | num_decls += 1; | 44 | num_decls += 1; |
42 | if let ImplItem::Method(f) = item { | 45 | if let ImplItem::Method(f) = item { |
43 | funcs.push(f); | 46 | funcs.push(f); |
@@ -61,11 +64,11 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { | |||
61 | let mut num_exprs_unknown = 0; | 64 | let mut num_exprs_unknown = 0; |
62 | let mut num_exprs_partially_unknown = 0; | 65 | let mut num_exprs_partially_unknown = 0; |
63 | for f in funcs { | 66 | for f in funcs { |
64 | let name = f.name(&db); | 67 | let name = f.name(db); |
65 | let mut msg = format!("processing: {}", name); | 68 | let mut msg = format!("processing: {}", name); |
66 | if verbose { | 69 | if verbose { |
67 | let src = f.source(&db); | 70 | let src = f.source(db); |
68 | let original_file = src.file_id.original_file(&db); | 71 | let original_file = src.file_id.original_file(db); |
69 | let path = db.file_relative_path(original_file); | 72 | let path = db.file_relative_path(original_file); |
70 | let syntax_range = src.ast.syntax().range(); | 73 | let syntax_range = src.ast.syntax().range(); |
71 | write!(msg, " ({:?} {})", path, syntax_range).unwrap(); | 74 | write!(msg, " ({:?} {})", path, syntax_range).unwrap(); |
@@ -76,8 +79,8 @@ pub fn run(verbose: bool, path: &str, only: Option<&str>) -> Result<()> { | |||
76 | continue; | 79 | continue; |
77 | } | 80 | } |
78 | } | 81 | } |
79 | let body = f.body(&db); | 82 | let body = f.body(db); |
80 | let inference_result = f.infer(&db); | 83 | let inference_result = f.infer(db); |
81 | for (expr_id, _) in body.exprs() { | 84 | for (expr_id, _) in body.exprs() { |
82 | let ty = &inference_result[expr_id]; | 85 | let ty = &inference_result[expr_id]; |
83 | num_exprs += 1; | 86 | num_exprs += 1; |
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs index 1db98aec1..5adf8b096 100644 --- a/crates/ra_cli/src/main.rs +++ b/crates/ra_cli/src/main.rs | |||
@@ -1,4 +1,5 @@ | |||
1 | mod analysis_stats; | 1 | mod analysis_stats; |
2 | mod analysis_bench; | ||
2 | 3 | ||
3 | use std::{io::Read, error::Error}; | 4 | use std::{io::Read, error::Error}; |
4 | 5 | ||
@@ -26,6 +27,27 @@ fn main() -> Result<()> { | |||
26 | .arg(Arg::with_name("only").short("o").takes_value(true)) | 27 | .arg(Arg::with_name("only").short("o").takes_value(true)) |
27 | .arg(Arg::with_name("path")), | 28 | .arg(Arg::with_name("path")), |
28 | ) | 29 | ) |
30 | .subcommand( | ||
31 | SubCommand::with_name("analysis-bench") | ||
32 | .arg(Arg::with_name("verbose").short("v").long("verbose")) | ||
33 | .arg( | ||
34 | Arg::with_name("highlight") | ||
35 | .long("highlight") | ||
36 | .takes_value(true) | ||
37 | .conflicts_with("complete") | ||
38 | .value_name("PATH") | ||
39 | .help("highlight this file"), | ||
40 | ) | ||
41 | .arg( | ||
42 | Arg::with_name("complete") | ||
43 | .long("complete") | ||
44 | .takes_value(true) | ||
45 | .conflicts_with("highlight") | ||
46 | .value_name("PATH:LINE:COLUMN") | ||
47 | .help("compute completions at this location"), | ||
48 | ) | ||
49 | .arg(Arg::with_name("path").value_name("PATH").help("project to analyze")), | ||
50 | ) | ||
29 | .get_matches(); | 51 | .get_matches(); |
30 | match matches.subcommand() { | 52 | match matches.subcommand() { |
31 | ("parse", Some(matches)) => { | 53 | ("parse", Some(matches)) => { |
@@ -51,7 +73,25 @@ fn main() -> Result<()> { | |||
51 | let verbose = matches.is_present("verbose"); | 73 | let verbose = matches.is_present("verbose"); |
52 | let path = matches.value_of("path").unwrap_or(""); | 74 | let path = matches.value_of("path").unwrap_or(""); |
53 | let only = matches.value_of("only"); | 75 | let only = matches.value_of("only"); |
54 | analysis_stats::run(verbose, path, only)?; | 76 | analysis_stats::run(verbose, path.as_ref(), only)?; |
77 | } | ||
78 | ("analysis-bench", Some(matches)) => { | ||
79 | let verbose = matches.is_present("verbose"); | ||
80 | let path = matches.value_of("path").unwrap_or(""); | ||
81 | let op = if let Some(path) = matches.value_of("highlight") { | ||
82 | analysis_bench::Op::Highlight { path: path.into() } | ||
83 | } else if let Some(path_line_col) = matches.value_of("complete") { | ||
84 | let (path_line, column) = rsplit_at_char(path_line_col, ':')?; | ||
85 | let (path, line) = rsplit_at_char(path_line, ':')?; | ||
86 | analysis_bench::Op::Complete { | ||
87 | path: path.into(), | ||
88 | line: line.parse()?, | ||
89 | column: column.parse()?, | ||
90 | } | ||
91 | } else { | ||
92 | panic!("either --highlight or --complete must be set") | ||
93 | }; | ||
94 | analysis_bench::run(verbose, path.as_ref(), op)?; | ||
55 | } | 95 | } |
56 | _ => unreachable!(), | 96 | _ => unreachable!(), |
57 | } | 97 | } |
@@ -68,3 +108,8 @@ fn read_stdin() -> Result<String> { | |||
68 | std::io::stdin().read_to_string(&mut buff)?; | 108 | std::io::stdin().read_to_string(&mut buff)?; |
69 | Ok(buff) | 109 | Ok(buff) |
70 | } | 110 | } |
111 | |||
112 | fn rsplit_at_char(s: &str, c: char) -> Result<(&str, &str)> { | ||
113 | let idx = s.rfind(":").ok_or_else(|| format!("no `{}` in {}", c, s))?; | ||
114 | Ok((&s[..idx], &s[idx + 1..])) | ||
115 | } | ||
diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 12d849f37..aaace85e5 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml | |||
@@ -25,6 +25,7 @@ ra_prof = { path = "../ra_prof" } | |||
25 | chalk-solve = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" } | 25 | chalk-solve = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" } |
26 | chalk-rust-ir = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" } | 26 | chalk-rust-ir = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" } |
27 | chalk-ir = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" } | 27 | chalk-ir = { git = "https://github.com/flodiebold/chalk.git", branch = "fuel" } |
28 | lalrpop-intern = "0.15.1" | ||
28 | 29 | ||
29 | [dev-dependencies] | 30 | [dev-dependencies] |
30 | flexi_logger = "0.11.0" | 31 | flexi_logger = "0.11.0" |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 830aea1f3..6602d1220 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -779,6 +779,18 @@ impl Trait { | |||
779 | self.trait_data(db).items().to_vec() | 779 | self.trait_data(db).items().to_vec() |
780 | } | 780 | } |
781 | 781 | ||
782 | pub fn associated_type_by_name(self, db: &impl DefDatabase, name: Name) -> Option<TypeAlias> { | ||
783 | let trait_data = self.trait_data(db); | ||
784 | trait_data | ||
785 | .items() | ||
786 | .iter() | ||
787 | .filter_map(|item| match item { | ||
788 | TraitItem::TypeAlias(t) => Some(*t), | ||
789 | _ => None, | ||
790 | }) | ||
791 | .find(|t| t.name(db) == name) | ||
792 | } | ||
793 | |||
782 | pub(crate) fn trait_data(self, db: &impl DefDatabase) -> Arc<TraitData> { | 794 | pub(crate) fn trait_data(self, db: &impl DefDatabase) -> Arc<TraitData> { |
783 | db.trait_data(self) | 795 | db.trait_data(self) |
784 | } | 796 | } |
@@ -831,8 +843,12 @@ impl TypeAlias { | |||
831 | } | 843 | } |
832 | } | 844 | } |
833 | 845 | ||
834 | pub fn type_ref(self, db: &impl DefDatabase) -> Arc<TypeRef> { | 846 | pub fn type_ref(self, db: &impl DefDatabase) -> Option<TypeRef> { |
835 | db.type_alias_ref(self) | 847 | db.type_alias_data(self).type_ref.clone() |
848 | } | ||
849 | |||
850 | pub fn name(self, db: &impl DefDatabase) -> Name { | ||
851 | db.type_alias_data(self).name.clone() | ||
836 | } | 852 | } |
837 | 853 | ||
838 | /// Builds a resolver for the type references in this type alias. | 854 | /// Builds a resolver for the type references in this type alias. |
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index d2d6f95b7..c4dd54596 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -16,9 +16,8 @@ use crate::{ | |||
16 | adt::{StructData, EnumData}, | 16 | adt::{StructData, EnumData}, |
17 | impl_block::{ModuleImplBlocks, ImplSourceMap, ImplBlock}, | 17 | impl_block::{ModuleImplBlocks, ImplSourceMap, ImplBlock}, |
18 | generics::{GenericParams, GenericDef}, | 18 | generics::{GenericParams, GenericDef}, |
19 | type_ref::TypeRef, | ||
20 | traits::TraitData, | 19 | traits::TraitData, |
21 | lang_item::{LangItems, LangItemTarget}, | 20 | lang_item::{LangItems, LangItemTarget}, type_alias::TypeAliasData, |
22 | }; | 21 | }; |
23 | 22 | ||
24 | // This database has access to source code, so queries here are not really | 23 | // This database has access to source code, so queries here are not really |
@@ -113,8 +112,8 @@ pub trait DefDatabase: SourceDatabase { | |||
113 | #[salsa::invoke(crate::FnSignature::fn_signature_query)] | 112 | #[salsa::invoke(crate::FnSignature::fn_signature_query)] |
114 | fn fn_signature(&self, func: Function) -> Arc<FnSignature>; | 113 | fn fn_signature(&self, func: Function) -> Arc<FnSignature>; |
115 | 114 | ||
116 | #[salsa::invoke(crate::type_alias::type_alias_ref_query)] | 115 | #[salsa::invoke(crate::type_alias::type_alias_data_query)] |
117 | fn type_alias_ref(&self, typ: TypeAlias) -> Arc<TypeRef>; | 116 | fn type_alias_data(&self, typ: TypeAlias) -> Arc<TypeAliasData>; |
118 | 117 | ||
119 | #[salsa::invoke(crate::ConstSignature::const_signature_query)] | 118 | #[salsa::invoke(crate::ConstSignature::const_signature_query)] |
120 | fn const_signature(&self, konst: Const) -> Arc<ConstSignature>; | 119 | fn const_signature(&self, konst: Const) -> Arc<ConstSignature>; |
@@ -185,6 +184,13 @@ pub trait HirDatabase: DefDatabase + AstDatabase { | |||
185 | krate: Crate, | 184 | krate: Crate, |
186 | goal: crate::ty::Canonical<crate::ty::TraitRef>, | 185 | goal: crate::ty::Canonical<crate::ty::TraitRef>, |
187 | ) -> Option<crate::ty::traits::Solution>; | 186 | ) -> Option<crate::ty::traits::Solution>; |
187 | |||
188 | #[salsa::invoke(crate::ty::traits::normalize_query)] | ||
189 | fn normalize( | ||
190 | &self, | ||
191 | krate: Crate, | ||
192 | goal: crate::ty::Canonical<crate::ty::ProjectionPredicate>, | ||
193 | ) -> Option<crate::ty::traits::Solution>; | ||
188 | } | 194 | } |
189 | 195 | ||
190 | #[test] | 196 | #[test] |
diff --git a/crates/ra_hir/src/lang_item.rs b/crates/ra_hir/src/lang_item.rs index cdc9182d6..18ac0fcf9 100644 --- a/crates/ra_hir/src/lang_item.rs +++ b/crates/ra_hir/src/lang_item.rs | |||
@@ -1,10 +1,11 @@ | |||
1 | use std::sync::Arc; | 1 | use std::sync::Arc; |
2 | use rustc_hash::FxHashMap; | 2 | use rustc_hash::FxHashMap; |
3 | 3 | ||
4 | use ra_syntax::{SmolStr, ast::AttrsOwner}; | 4 | use ra_syntax::{SmolStr, TreeArc, ast::AttrsOwner}; |
5 | 5 | ||
6 | use crate::{ | 6 | use crate::{ |
7 | Crate, DefDatabase, Enum, Function, HirDatabase, ImplBlock, Module, Static, Struct, Trait, AstDatabase, | 7 | Crate, DefDatabase, Enum, Function, HirDatabase, ImplBlock, Module, |
8 | Static, Struct, Trait, ModuleDef, AstDatabase, HasSource | ||
8 | }; | 9 | }; |
9 | 10 | ||
10 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] | 11 | #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] |
@@ -87,23 +88,51 @@ impl LangItems { | |||
87 | let source = module.definition_source(db).ast; | 88 | let source = module.definition_source(db).ast; |
88 | for (impl_id, _) in impl_blocks.impls.iter() { | 89 | for (impl_id, _) in impl_blocks.impls.iter() { |
89 | let impl_block = source_map.get(&source, impl_id); | 90 | let impl_block = source_map.get(&source, impl_id); |
90 | let lang_item_name = impl_block | 91 | if let Some(lang_item_name) = lang_item_name(&*impl_block) { |
91 | .attrs() | ||
92 | .filter_map(|a| a.as_key_value()) | ||
93 | .filter(|(key, _)| key == "lang") | ||
94 | .map(|(_, val)| val) | ||
95 | .nth(0); | ||
96 | if let Some(lang_item_name) = lang_item_name { | ||
97 | let imp = ImplBlock::from_id(*module, impl_id); | 92 | let imp = ImplBlock::from_id(*module, impl_id); |
98 | self.items.entry(lang_item_name).or_insert_with(|| LangItemTarget::ImplBlock(imp)); | 93 | self.items.entry(lang_item_name).or_insert_with(|| LangItemTarget::ImplBlock(imp)); |
99 | } | 94 | } |
100 | } | 95 | } |
101 | 96 | ||
102 | // FIXME we should look for the other lang item targets (traits, structs, ...) | 97 | for def in module.declarations(db) { |
98 | match def { | ||
99 | ModuleDef::Trait(trait_) => { | ||
100 | self.collect_lang_item(db, trait_, LangItemTarget::Trait) | ||
101 | } | ||
102 | ModuleDef::Enum(e) => self.collect_lang_item(db, e, LangItemTarget::Enum), | ||
103 | ModuleDef::Struct(s) => self.collect_lang_item(db, s, LangItemTarget::Struct), | ||
104 | ModuleDef::Function(f) => self.collect_lang_item(db, f, LangItemTarget::Function), | ||
105 | ModuleDef::Static(s) => self.collect_lang_item(db, s, LangItemTarget::Static), | ||
106 | _ => {} | ||
107 | } | ||
108 | } | ||
103 | 109 | ||
104 | // Look for lang items in the children | 110 | // Look for lang items in the children |
105 | for child in module.children(db) { | 111 | for child in module.children(db) { |
106 | self.collect_lang_items_recursive(db, &child); | 112 | self.collect_lang_items_recursive(db, &child); |
107 | } | 113 | } |
108 | } | 114 | } |
115 | |||
116 | fn collect_lang_item<T, N>( | ||
117 | &mut self, | ||
118 | db: &(impl DefDatabase + AstDatabase), | ||
119 | item: T, | ||
120 | constructor: fn(T) -> LangItemTarget, | ||
121 | ) where | ||
122 | T: Copy + HasSource<Ast = TreeArc<N>>, | ||
123 | N: AttrsOwner, | ||
124 | { | ||
125 | let node = item.source(db).ast; | ||
126 | if let Some(lang_item_name) = lang_item_name(&*node) { | ||
127 | self.items.entry(lang_item_name).or_insert(constructor(item)); | ||
128 | } | ||
129 | } | ||
130 | } | ||
131 | |||
132 | fn lang_item_name<T: AttrsOwner>(node: &T) -> Option<SmolStr> { | ||
133 | node.attrs() | ||
134 | .filter_map(|a| a.as_key_value()) | ||
135 | .filter(|(key, _)| key == "lang") | ||
136 | .map(|(_, val)| val) | ||
137 | .nth(0) | ||
109 | } | 138 | } |
diff --git a/crates/ra_hir/src/name.rs b/crates/ra_hir/src/name.rs index e9003e00b..ba17958eb 100644 --- a/crates/ra_hir/src/name.rs +++ b/crates/ra_hir/src/name.rs | |||
@@ -46,6 +46,11 @@ impl Name { | |||
46 | Name::new(idx.to_string().into()) | 46 | Name::new(idx.to_string().into()) |
47 | } | 47 | } |
48 | 48 | ||
49 | // Needed for Deref | ||
50 | pub(crate) fn target() -> Name { | ||
51 | Name::new("Target".into()) | ||
52 | } | ||
53 | |||
49 | // There's should be no way to extract a string out of `Name`: `Name` in the | 54 | // There's should be no way to extract a string out of `Name`: `Name` in the |
50 | // future, `Name` will include hygiene information, and you can't encode | 55 | // future, `Name` will include hygiene information, and you can't encode |
51 | // hygiene into a String. | 56 | // hygiene into a String. |
diff --git a/crates/ra_hir/src/source_binder.rs b/crates/ra_hir/src/source_binder.rs index 4f9e8c5a9..08e86844d 100644 --- a/crates/ra_hir/src/source_binder.rs +++ b/crates/ra_hir/src/source_binder.rs | |||
@@ -369,6 +369,17 @@ impl SourceAnalyzer { | |||
369 | ) | 369 | ) |
370 | } | 370 | } |
371 | 371 | ||
372 | pub fn autoderef<'a>( | ||
373 | &'a self, | ||
374 | db: &'a impl HirDatabase, | ||
375 | ty: Ty, | ||
376 | ) -> impl Iterator<Item = Ty> + 'a { | ||
377 | // There should be no inference vars in types passed here | ||
378 | // FIXME check that? | ||
379 | let canonical = crate::ty::Canonical { value: ty, num_vars: 0 }; | ||
380 | crate::ty::autoderef(db, &self.resolver, canonical).map(|canonical| canonical.value) | ||
381 | } | ||
382 | |||
372 | #[cfg(test)] | 383 | #[cfg(test)] |
373 | pub(crate) fn body_source_map(&self) -> Arc<BodySourceMap> { | 384 | pub(crate) fn body_source_map(&self) -> Arc<BodySourceMap> { |
374 | self.body_source_map.clone().unwrap() | 385 | self.body_source_map.clone().unwrap() |
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index 4a37e0268..842d49e1f 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs | |||
@@ -16,12 +16,14 @@ use std::sync::Arc; | |||
16 | use std::ops::Deref; | 16 | use std::ops::Deref; |
17 | use std::{fmt, mem}; | 17 | use std::{fmt, mem}; |
18 | 18 | ||
19 | use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait, GenericParams}; | 19 | use crate::{Name, AdtDef, type_ref::Mutability, db::HirDatabase, Trait, GenericParams, TypeAlias}; |
20 | use display::{HirDisplay, HirFormatter}; | 20 | use display::{HirDisplay, HirFormatter}; |
21 | 21 | ||
22 | pub(crate) use lower::{TypableDef, type_for_def, type_for_field, callable_item_sig, generic_predicates, generic_defaults}; | 22 | pub(crate) use lower::{TypableDef, type_for_def, type_for_field, callable_item_sig, generic_predicates, generic_defaults}; |
23 | pub(crate) use infer::{infer_query, InferenceResult, InferTy}; | 23 | pub(crate) use infer::{infer_query, InferenceResult, InferTy}; |
24 | pub use lower::CallableDef; | 24 | pub use lower::CallableDef; |
25 | pub(crate) use autoderef::autoderef; | ||
26 | pub(crate) use traits::ProjectionPredicate; | ||
25 | 27 | ||
26 | /// A type constructor or type name: this might be something like the primitive | 28 | /// A type constructor or type name: this might be something like the primitive |
27 | /// type `bool`, a struct like `Vec`, or things like function pointers or | 29 | /// type `bool`, a struct like `Vec`, or things like function pointers or |
@@ -100,6 +102,15 @@ pub struct ApplicationTy { | |||
100 | pub parameters: Substs, | 102 | pub parameters: Substs, |
101 | } | 103 | } |
102 | 104 | ||
105 | /// A "projection" type corresponds to an (unnormalized) | ||
106 | /// projection like `<P0 as Trait<P1..Pn>>::Foo`. Note that the | ||
107 | /// trait and all its parameters are fully known. | ||
108 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] | ||
109 | pub struct ProjectionTy { | ||
110 | pub associated_ty: TypeAlias, | ||
111 | pub parameters: Substs, | ||
112 | } | ||
113 | |||
103 | /// A type. | 114 | /// A type. |
104 | /// | 115 | /// |
105 | /// See also the `TyKind` enum in rustc (librustc/ty/sty.rs), which represents | 116 | /// See also the `TyKind` enum in rustc (librustc/ty/sty.rs), which represents |
@@ -216,8 +227,8 @@ impl Deref for Substs { | |||
216 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] | 227 | #[derive(Clone, PartialEq, Eq, Debug, Hash)] |
217 | pub struct TraitRef { | 228 | pub struct TraitRef { |
218 | /// FIXME name? | 229 | /// FIXME name? |
219 | trait_: Trait, | 230 | pub trait_: Trait, |
220 | substs: Substs, | 231 | pub substs: Substs, |
221 | } | 232 | } |
222 | 233 | ||
223 | impl TraitRef { | 234 | impl TraitRef { |
@@ -464,6 +475,17 @@ impl Ty { | |||
464 | _ => None, | 475 | _ => None, |
465 | } | 476 | } |
466 | } | 477 | } |
478 | |||
479 | /// Shifts up `Ty::Bound` vars by `n`. | ||
480 | pub fn shift_bound_vars(self, n: i32) -> Ty { | ||
481 | self.fold(&mut |ty| match ty { | ||
482 | Ty::Bound(idx) => { | ||
483 | assert!(idx as i32 >= -n); | ||
484 | Ty::Bound((idx as i32 + n) as u32) | ||
485 | } | ||
486 | ty => ty, | ||
487 | }) | ||
488 | } | ||
467 | } | 489 | } |
468 | 490 | ||
469 | impl HirDisplay for &Ty { | 491 | impl HirDisplay for &Ty { |
diff --git a/crates/ra_hir/src/ty/autoderef.rs b/crates/ra_hir/src/ty/autoderef.rs index a442a856c..1f443d49b 100644 --- a/crates/ra_hir/src/ty/autoderef.rs +++ b/crates/ra_hir/src/ty/autoderef.rs | |||
@@ -5,17 +5,88 @@ | |||
5 | 5 | ||
6 | use std::iter::successors; | 6 | use std::iter::successors; |
7 | 7 | ||
8 | use crate::HirDatabase; | 8 | use log::{info, warn}; |
9 | use super::Ty; | ||
10 | 9 | ||
11 | impl Ty { | 10 | use crate::{HirDatabase, Name, Resolver, HasGenericParams}; |
12 | /// Iterates over the possible derefs of `ty`. | 11 | use super::{traits::Solution, Ty, Canonical}; |
13 | pub fn autoderef<'a>(self, db: &'a impl HirDatabase) -> impl Iterator<Item = Ty> + 'a { | 12 | |
14 | successors(Some(self), move |ty| ty.autoderef_step(db)) | 13 | const AUTODEREF_RECURSION_LIMIT: usize = 10; |
14 | |||
15 | pub(crate) fn autoderef<'a>( | ||
16 | db: &'a impl HirDatabase, | ||
17 | resolver: &'a Resolver, | ||
18 | ty: Canonical<Ty>, | ||
19 | ) -> impl Iterator<Item = Canonical<Ty>> + 'a { | ||
20 | successors(Some(ty), move |ty| deref(db, resolver, ty)).take(AUTODEREF_RECURSION_LIMIT) | ||
21 | } | ||
22 | |||
23 | pub(crate) fn deref( | ||
24 | db: &impl HirDatabase, | ||
25 | resolver: &Resolver, | ||
26 | ty: &Canonical<Ty>, | ||
27 | ) -> Option<Canonical<Ty>> { | ||
28 | if let Some(derefed) = ty.value.builtin_deref() { | ||
29 | Some(Canonical { value: derefed, num_vars: ty.num_vars }) | ||
30 | } else { | ||
31 | deref_by_trait(db, resolver, ty) | ||
32 | } | ||
33 | } | ||
34 | |||
35 | fn deref_by_trait( | ||
36 | db: &impl HirDatabase, | ||
37 | resolver: &Resolver, | ||
38 | ty: &Canonical<Ty>, | ||
39 | ) -> Option<Canonical<Ty>> { | ||
40 | let krate = resolver.krate()?; | ||
41 | let deref_trait = match db.lang_item(krate, "deref".into())? { | ||
42 | crate::lang_item::LangItemTarget::Trait(t) => t, | ||
43 | _ => return None, | ||
44 | }; | ||
45 | let target = deref_trait.associated_type_by_name(db, Name::target())?; | ||
46 | |||
47 | if target.generic_params(db).count_params_including_parent() != 1 { | ||
48 | // the Target type + Deref trait should only have one generic parameter, | ||
49 | // namely Deref's Self type | ||
50 | return None; | ||
15 | } | 51 | } |
16 | 52 | ||
17 | fn autoderef_step(&self, _db: &impl HirDatabase) -> Option<Ty> { | 53 | // FIXME make the Canonical handling nicer |
18 | // FIXME Deref::deref | 54 | |
19 | self.builtin_deref() | 55 | let projection = super::traits::ProjectionPredicate { |
56 | ty: Ty::Bound(0), | ||
57 | projection_ty: super::ProjectionTy { | ||
58 | associated_ty: target, | ||
59 | parameters: vec![ty.value.clone().shift_bound_vars(1)].into(), | ||
60 | }, | ||
61 | }; | ||
62 | |||
63 | let canonical = super::Canonical { num_vars: 1 + ty.num_vars, value: projection }; | ||
64 | |||
65 | let solution = db.normalize(krate, canonical)?; | ||
66 | |||
67 | match &solution { | ||
68 | Solution::Unique(vars) => { | ||
69 | // FIXME: vars may contain solutions for any inference variables | ||
70 | // that happened to be inside ty. To correctly handle these, we | ||
71 | // would have to pass the solution up to the inference context, but | ||
72 | // that requires a larger refactoring (especially if the deref | ||
73 | // happens during method resolution). So for the moment, we just | ||
74 | // check that we're not in the situation we're we would actually | ||
75 | // need to handle the values of the additional variables, i.e. | ||
76 | // they're just being 'passed through'. In the 'standard' case where | ||
77 | // we have `impl<T> Deref for Foo<T> { Target = T }`, that should be | ||
78 | // the case. | ||
79 | for i in 1..vars.0.num_vars { | ||
80 | if vars.0.value[i] != Ty::Bound((i - 1) as u32) { | ||
81 | warn!("complex solution for derefing {:?}: {:?}, ignoring", ty, solution); | ||
82 | return None; | ||
83 | } | ||
84 | } | ||
85 | Some(Canonical { value: vars.0.value[0].clone(), num_vars: vars.0.num_vars }) | ||
86 | } | ||
87 | Solution::Ambig(_) => { | ||
88 | info!("Ambiguous solution for derefing {:?}: {:?}", ty, solution); | ||
89 | None | ||
90 | } | ||
20 | } | 91 | } |
21 | } | 92 | } |
diff --git a/crates/ra_hir/src/ty/infer.rs b/crates/ra_hir/src/ty/infer.rs index e150d7fd8..1ee40c70a 100644 --- a/crates/ra_hir/src/ty/infer.rs +++ b/crates/ra_hir/src/ty/infer.rs | |||
@@ -46,7 +46,7 @@ use crate::{ | |||
46 | use super::{ | 46 | use super::{ |
47 | Ty, TypableDef, Substs, primitive, op, ApplicationTy, TypeCtor, CallableDef, TraitRef, | 47 | Ty, TypableDef, Substs, primitive, op, ApplicationTy, TypeCtor, CallableDef, TraitRef, |
48 | traits::{Solution, Obligation, Guidance}, | 48 | traits::{Solution, Obligation, Guidance}, |
49 | method_resolution, | 49 | method_resolution, autoderef, |
50 | }; | 50 | }; |
51 | 51 | ||
52 | mod unify; | 52 | mod unify; |
@@ -1074,25 +1074,27 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
1074 | } | 1074 | } |
1075 | Expr::Field { expr, name } => { | 1075 | Expr::Field { expr, name } => { |
1076 | let receiver_ty = self.infer_expr(*expr, &Expectation::none()); | 1076 | let receiver_ty = self.infer_expr(*expr, &Expectation::none()); |
1077 | let ty = receiver_ty | 1077 | let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty); |
1078 | .autoderef(self.db) | 1078 | let ty = autoderef::autoderef( |
1079 | .find_map(|derefed_ty| match derefed_ty { | 1079 | self.db, |
1080 | Ty::Apply(a_ty) => match a_ty.ctor { | 1080 | &self.resolver.clone(), |
1081 | TypeCtor::Tuple { .. } => { | 1081 | canonicalized.value.clone(), |
1082 | let i = name.to_string().parse::<usize>().ok(); | 1082 | ) |
1083 | i.and_then(|i| a_ty.parameters.0.get(i).cloned()) | 1083 | .find_map(|derefed_ty| match canonicalized.decanonicalize_ty(derefed_ty.value) { |
1084 | } | 1084 | Ty::Apply(a_ty) => match a_ty.ctor { |
1085 | TypeCtor::Adt(AdtDef::Struct(s)) => { | 1085 | TypeCtor::Tuple { .. } => { |
1086 | s.field(self.db, name).map(|field| { | 1086 | let i = name.to_string().parse::<usize>().ok(); |
1087 | self.write_field_resolution(tgt_expr, field); | 1087 | i.and_then(|i| a_ty.parameters.0.get(i).cloned()) |
1088 | field.ty(self.db).subst(&a_ty.parameters) | 1088 | } |
1089 | }) | 1089 | TypeCtor::Adt(AdtDef::Struct(s)) => s.field(self.db, name).map(|field| { |
1090 | } | 1090 | self.write_field_resolution(tgt_expr, field); |
1091 | _ => None, | 1091 | field.ty(self.db).subst(&a_ty.parameters) |
1092 | }, | 1092 | }), |
1093 | _ => None, | 1093 | _ => None, |
1094 | }) | 1094 | }, |
1095 | .unwrap_or(Ty::Unknown); | 1095 | _ => None, |
1096 | }) | ||
1097 | .unwrap_or(Ty::Unknown); | ||
1096 | self.insert_type_vars(ty) | 1098 | self.insert_type_vars(ty) |
1097 | } | 1099 | } |
1098 | Expr::Try { expr } => { | 1100 | Expr::Try { expr } => { |
@@ -1124,10 +1126,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
1124 | let inner_ty = self.infer_expr(*expr, &Expectation::none()); | 1126 | let inner_ty = self.infer_expr(*expr, &Expectation::none()); |
1125 | match op { | 1127 | match op { |
1126 | UnaryOp::Deref => { | 1128 | UnaryOp::Deref => { |
1127 | if let Some(derefed_ty) = inner_ty.builtin_deref() { | 1129 | let canonicalized = self.canonicalizer().canonicalize_ty(inner_ty); |
1128 | derefed_ty | 1130 | if let Some(derefed_ty) = |
1131 | autoderef::deref(self.db, &self.resolver, &canonicalized.value) | ||
1132 | { | ||
1133 | canonicalized.decanonicalize_ty(derefed_ty.value) | ||
1129 | } else { | 1134 | } else { |
1130 | // FIXME Deref::deref | ||
1131 | Ty::Unknown | 1135 | Ty::Unknown |
1132 | } | 1136 | } |
1133 | } | 1137 | } |
diff --git a/crates/ra_hir/src/ty/lower.rs b/crates/ra_hir/src/ty/lower.rs index 26c213a41..300616a53 100644 --- a/crates/ra_hir/src/ty/lower.rs +++ b/crates/ra_hir/src/ty/lower.rs | |||
@@ -460,7 +460,7 @@ fn type_for_type_alias(db: &impl HirDatabase, t: TypeAlias) -> Ty { | |||
460 | let resolver = t.resolver(db); | 460 | let resolver = t.resolver(db); |
461 | let type_ref = t.type_ref(db); | 461 | let type_ref = t.type_ref(db); |
462 | let substs = Substs::identity(&generics); | 462 | let substs = Substs::identity(&generics); |
463 | let inner = Ty::from_hir(db, &resolver, &type_ref); | 463 | let inner = Ty::from_hir(db, &resolver, &type_ref.unwrap_or(TypeRef::Error)); |
464 | inner.subst(&substs) | 464 | inner.subst(&substs) |
465 | } | 465 | } |
466 | 466 | ||
diff --git a/crates/ra_hir/src/ty/method_resolution.rs b/crates/ra_hir/src/ty/method_resolution.rs index 646e58aa9..ad26d591c 100644 --- a/crates/ra_hir/src/ty/method_resolution.rs +++ b/crates/ra_hir/src/ty/method_resolution.rs | |||
@@ -16,7 +16,7 @@ use crate::{ | |||
16 | generics::HasGenericParams, | 16 | generics::HasGenericParams, |
17 | ty::primitive::{UncertainIntTy, UncertainFloatTy} | 17 | ty::primitive::{UncertainIntTy, UncertainFloatTy} |
18 | }; | 18 | }; |
19 | use super::{TraitRef, Canonical}; | 19 | use super::{TraitRef, Canonical, autoderef}; |
20 | 20 | ||
21 | /// This is used as a key for indexing impls. | 21 | /// This is used as a key for indexing impls. |
22 | #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] | 22 | #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] |
@@ -162,8 +162,7 @@ pub(crate) fn iterate_method_candidates<T>( | |||
162 | // rustc does an autoderef and then autoref again). | 162 | // rustc does an autoderef and then autoref again). |
163 | 163 | ||
164 | let krate = resolver.krate()?; | 164 | let krate = resolver.krate()?; |
165 | for derefed_ty in ty.value.clone().autoderef(db) { | 165 | for derefed_ty in autoderef::autoderef(db, resolver, ty.clone()) { |
166 | let derefed_ty = Canonical { value: derefed_ty, num_vars: ty.num_vars }; | ||
167 | if let Some(result) = iterate_inherent_methods(&derefed_ty, db, name, krate, &mut callback) | 166 | if let Some(result) = iterate_inherent_methods(&derefed_ty, db, name, krate, &mut callback) |
168 | { | 167 | { |
169 | return Some(result); | 168 | return Some(result); |
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs index 54b2a8c16..0fe7805e2 100644 --- a/crates/ra_hir/src/ty/tests.rs +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -2737,6 +2737,90 @@ fn main() { | |||
2737 | assert_eq!(t, "Foo"); | 2737 | assert_eq!(t, "Foo"); |
2738 | } | 2738 | } |
2739 | 2739 | ||
2740 | #[test] | ||
2741 | fn deref_trait() { | ||
2742 | let t = type_at( | ||
2743 | r#" | ||
2744 | //- /main.rs | ||
2745 | #[lang = "deref"] | ||
2746 | trait Deref { | ||
2747 | type Target; | ||
2748 | fn deref(&self) -> &Self::Target; | ||
2749 | } | ||
2750 | |||
2751 | struct Arc<T>; | ||
2752 | impl<T> Deref for Arc<T> { | ||
2753 | type Target = T; | ||
2754 | } | ||
2755 | |||
2756 | struct S; | ||
2757 | impl S { | ||
2758 | fn foo(&self) -> u128 {} | ||
2759 | } | ||
2760 | |||
2761 | fn test(s: Arc<S>) { | ||
2762 | (*s, s.foo())<|> | ||
2763 | } | ||
2764 | "#, | ||
2765 | ); | ||
2766 | assert_eq!(t, "(S, u128)"); | ||
2767 | } | ||
2768 | |||
2769 | #[test] | ||
2770 | fn deref_trait_with_inference_var() { | ||
2771 | let t = type_at( | ||
2772 | r#" | ||
2773 | //- /main.rs | ||
2774 | #[lang = "deref"] | ||
2775 | trait Deref { | ||
2776 | type Target; | ||
2777 | fn deref(&self) -> &Self::Target; | ||
2778 | } | ||
2779 | |||
2780 | struct Arc<T>; | ||
2781 | fn new_arc<T>() -> Arc<T> {} | ||
2782 | impl<T> Deref for Arc<T> { | ||
2783 | type Target = T; | ||
2784 | } | ||
2785 | |||
2786 | struct S; | ||
2787 | fn foo(a: Arc<S>) {} | ||
2788 | |||
2789 | fn test() { | ||
2790 | let a = new_arc(); | ||
2791 | let b = (*a)<|>; | ||
2792 | foo(a); | ||
2793 | } | ||
2794 | "#, | ||
2795 | ); | ||
2796 | assert_eq!(t, "S"); | ||
2797 | } | ||
2798 | |||
2799 | #[test] | ||
2800 | fn deref_trait_infinite_recursion() { | ||
2801 | let t = type_at( | ||
2802 | r#" | ||
2803 | //- /main.rs | ||
2804 | #[lang = "deref"] | ||
2805 | trait Deref { | ||
2806 | type Target; | ||
2807 | fn deref(&self) -> &Self::Target; | ||
2808 | } | ||
2809 | |||
2810 | struct S; | ||
2811 | |||
2812 | impl Deref for S { | ||
2813 | type Target = S; | ||
2814 | } | ||
2815 | |||
2816 | fn test(s: S) { | ||
2817 | s.foo()<|>; | ||
2818 | } | ||
2819 | "#, | ||
2820 | ); | ||
2821 | assert_eq!(t, "{unknown}"); | ||
2822 | } | ||
2823 | |||
2740 | fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String { | 2824 | fn type_at_pos(db: &MockDatabase, pos: FilePosition) -> String { |
2741 | let file = db.parse(pos.file_id).ok().unwrap(); | 2825 | let file = db.parse(pos.file_id).ok().unwrap(); |
2742 | let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); | 2826 | let expr = algo::find_node_at_offset::<ast::Expr>(file.syntax(), pos.offset).unwrap(); |
diff --git a/crates/ra_hir/src/ty/traits.rs b/crates/ra_hir/src/ty/traits.rs index fda7f9c04..9a6349d4b 100644 --- a/crates/ra_hir/src/ty/traits.rs +++ b/crates/ra_hir/src/ty/traits.rs | |||
@@ -8,7 +8,7 @@ use chalk_ir::cast::Cast; | |||
8 | use ra_prof::profile; | 8 | use ra_prof::profile; |
9 | 9 | ||
10 | use crate::{Crate, Trait, db::HirDatabase, ImplBlock}; | 10 | use crate::{Crate, Trait, db::HirDatabase, ImplBlock}; |
11 | use super::{TraitRef, Ty, Canonical}; | 11 | use super::{TraitRef, Ty, Canonical, ProjectionTy}; |
12 | 12 | ||
13 | use self::chalk::{ToChalk, from_chalk}; | 13 | use self::chalk::{ToChalk, from_chalk}; |
14 | 14 | ||
@@ -75,6 +75,13 @@ pub enum Obligation { | |||
75 | /// Prove that a certain type implements a trait (the type is the `Self` type | 75 | /// Prove that a certain type implements a trait (the type is the `Self` type |
76 | /// parameter to the `TraitRef`). | 76 | /// parameter to the `TraitRef`). |
77 | Trait(TraitRef), | 77 | Trait(TraitRef), |
78 | // Projection(ProjectionPredicate), | ||
79 | } | ||
80 | |||
81 | #[derive(Clone, Debug, PartialEq, Eq, Hash)] | ||
82 | pub struct ProjectionPredicate { | ||
83 | pub projection_ty: ProjectionTy, | ||
84 | pub ty: Ty, | ||
78 | } | 85 | } |
79 | 86 | ||
80 | /// Check using Chalk whether trait is implemented for given parameters including `Self` type. | 87 | /// Check using Chalk whether trait is implemented for given parameters including `Self` type. |
@@ -98,6 +105,30 @@ pub(crate) fn implements_query( | |||
98 | solution.map(|solution| solution_from_chalk(db, solution)) | 105 | solution.map(|solution| solution_from_chalk(db, solution)) |
99 | } | 106 | } |
100 | 107 | ||
108 | pub(crate) fn normalize_query( | ||
109 | db: &impl HirDatabase, | ||
110 | krate: Crate, | ||
111 | projection: Canonical<ProjectionPredicate>, | ||
112 | ) -> Option<Solution> { | ||
113 | let goal: chalk_ir::Goal = chalk_ir::Normalize { | ||
114 | projection: projection.value.projection_ty.to_chalk(db), | ||
115 | ty: projection.value.ty.to_chalk(db), | ||
116 | } | ||
117 | .cast(); | ||
118 | debug!("goal: {:?}", goal); | ||
119 | // FIXME unify with `implements` | ||
120 | let env = chalk_ir::Environment::new(); | ||
121 | let in_env = chalk_ir::InEnvironment::new(&env, goal); | ||
122 | let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT); | ||
123 | let canonical = | ||
124 | chalk_ir::Canonical { value: in_env, binders: vec![parameter; projection.num_vars] }; | ||
125 | // We currently don't deal with universes (I think / hope they're not yet | ||
126 | // relevant for our use cases?) | ||
127 | let u_canonical = chalk_ir::UCanonical { canonical, universes: 1 }; | ||
128 | let solution = solve(db, krate, &u_canonical); | ||
129 | solution.map(|solution| solution_from_chalk(db, solution)) | ||
130 | } | ||
131 | |||
101 | fn solution_from_chalk(db: &impl HirDatabase, solution: chalk_solve::Solution) -> Solution { | 132 | fn solution_from_chalk(db: &impl HirDatabase, solution: chalk_solve::Solution) -> Solution { |
102 | let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution>| { | 133 | let convert_subst = |subst: chalk_ir::Canonical<chalk_ir::Substitution>| { |
103 | let value = subst | 134 | let value = subst |
diff --git a/crates/ra_hir/src/ty/traits/chalk.rs b/crates/ra_hir/src/ty/traits/chalk.rs index 1e4806db0..5105588ee 100644 --- a/crates/ra_hir/src/ty/traits/chalk.rs +++ b/crates/ra_hir/src/ty/traits/chalk.rs | |||
@@ -3,7 +3,7 @@ use std::sync::Arc; | |||
3 | 3 | ||
4 | use log::debug; | 4 | use log::debug; |
5 | 5 | ||
6 | use chalk_ir::{TypeId, ImplId, TypeKindId, ProjectionTy, Parameter, Identifier, cast::Cast, PlaceholderIndex, UniverseIndex, TypeName}; | 6 | use chalk_ir::{TypeId, ImplId, TypeKindId, Parameter, Identifier, cast::Cast, PlaceholderIndex, UniverseIndex, TypeName}; |
7 | use chalk_rust_ir::{AssociatedTyDatum, TraitDatum, StructDatum, ImplDatum}; | 7 | use chalk_rust_ir::{AssociatedTyDatum, TraitDatum, StructDatum, ImplDatum}; |
8 | 8 | ||
9 | use test_utils::tested_by; | 9 | use test_utils::tested_by; |
@@ -12,9 +12,9 @@ use ra_db::salsa::{InternId, InternKey}; | |||
12 | use crate::{ | 12 | use crate::{ |
13 | Trait, HasGenericParams, ImplBlock, | 13 | Trait, HasGenericParams, ImplBlock, |
14 | db::HirDatabase, | 14 | db::HirDatabase, |
15 | ty::{TraitRef, Ty, ApplicationTy, TypeCtor, Substs, GenericPredicate, CallableDef}, | 15 | ty::{TraitRef, Ty, ApplicationTy, TypeCtor, Substs, GenericPredicate, CallableDef, ProjectionTy}, |
16 | ty::display::HirDisplay, | 16 | ty::display::HirDisplay, |
17 | generics::GenericDef, | 17 | generics::GenericDef, TypeAlias, ImplItem, |
18 | }; | 18 | }; |
19 | use super::ChalkContext; | 19 | use super::ChalkContext; |
20 | 20 | ||
@@ -156,6 +156,18 @@ impl ToChalk for ImplBlock { | |||
156 | } | 156 | } |
157 | } | 157 | } |
158 | 158 | ||
159 | impl ToChalk for TypeAlias { | ||
160 | type Chalk = chalk_ir::TypeId; | ||
161 | |||
162 | fn to_chalk(self, _db: &impl HirDatabase) -> chalk_ir::TypeId { | ||
163 | self.id.into() | ||
164 | } | ||
165 | |||
166 | fn from_chalk(_db: &impl HirDatabase, impl_id: chalk_ir::TypeId) -> TypeAlias { | ||
167 | TypeAlias { id: impl_id.into() } | ||
168 | } | ||
169 | } | ||
170 | |||
159 | impl ToChalk for GenericPredicate { | 171 | impl ToChalk for GenericPredicate { |
160 | type Chalk = chalk_ir::QuantifiedWhereClause; | 172 | type Chalk = chalk_ir::QuantifiedWhereClause; |
161 | 173 | ||
@@ -183,6 +195,24 @@ impl ToChalk for GenericPredicate { | |||
183 | } | 195 | } |
184 | } | 196 | } |
185 | 197 | ||
198 | impl ToChalk for ProjectionTy { | ||
199 | type Chalk = chalk_ir::ProjectionTy; | ||
200 | |||
201 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::ProjectionTy { | ||
202 | chalk_ir::ProjectionTy { | ||
203 | associated_ty_id: self.associated_ty.to_chalk(db), | ||
204 | parameters: self.parameters.to_chalk(db), | ||
205 | } | ||
206 | } | ||
207 | |||
208 | fn from_chalk(db: &impl HirDatabase, projection_ty: chalk_ir::ProjectionTy) -> ProjectionTy { | ||
209 | ProjectionTy { | ||
210 | associated_ty: from_chalk(db, projection_ty.associated_ty_id), | ||
211 | parameters: from_chalk(db, projection_ty.parameters), | ||
212 | } | ||
213 | } | ||
214 | } | ||
215 | |||
186 | fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> { | 216 | fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> { |
187 | chalk_ir::Binders { | 217 | chalk_ir::Binders { |
188 | value, | 218 | value, |
@@ -225,8 +255,29 @@ impl<'a, DB> chalk_solve::RustIrDatabase for ChalkContext<'a, DB> | |||
225 | where | 255 | where |
226 | DB: HirDatabase, | 256 | DB: HirDatabase, |
227 | { | 257 | { |
228 | fn associated_ty_data(&self, _ty: TypeId) -> Arc<AssociatedTyDatum> { | 258 | fn associated_ty_data(&self, id: TypeId) -> Arc<AssociatedTyDatum> { |
229 | unimplemented!() | 259 | debug!("associated_ty_data {:?}", id); |
260 | let type_alias: TypeAlias = from_chalk(self.db, id); | ||
261 | let trait_ = match type_alias.container(self.db) { | ||
262 | Some(crate::Container::Trait(t)) => t, | ||
263 | _ => panic!("associated type not in trait"), | ||
264 | }; | ||
265 | let generic_params = type_alias.generic_params(self.db); | ||
266 | let parameter_kinds = generic_params | ||
267 | .params_including_parent() | ||
268 | .into_iter() | ||
269 | .map(|p| chalk_ir::ParameterKind::Ty(lalrpop_intern::intern(&p.name.to_string()))) | ||
270 | .collect(); | ||
271 | let datum = AssociatedTyDatum { | ||
272 | trait_id: trait_.to_chalk(self.db), | ||
273 | id, | ||
274 | name: lalrpop_intern::intern(&type_alias.name(self.db).to_string()), | ||
275 | parameter_kinds, | ||
276 | // FIXME add bounds and where clauses | ||
277 | bounds: vec![], | ||
278 | where_clauses: vec![], | ||
279 | }; | ||
280 | Arc::new(datum) | ||
230 | } | 281 | } |
231 | fn trait_datum(&self, trait_id: chalk_ir::TraitId) -> Arc<TraitDatum> { | 282 | fn trait_datum(&self, trait_id: chalk_ir::TraitId) -> Arc<TraitDatum> { |
232 | debug!("trait_datum {:?}", trait_id); | 283 | debug!("trait_datum {:?}", trait_id); |
@@ -260,7 +311,15 @@ where | |||
260 | fundamental: false, | 311 | fundamental: false, |
261 | }; | 312 | }; |
262 | let where_clauses = convert_where_clauses(self.db, trait_.into(), &bound_vars); | 313 | let where_clauses = convert_where_clauses(self.db, trait_.into(), &bound_vars); |
263 | let associated_ty_ids = Vec::new(); // FIXME add associated tys | 314 | let associated_ty_ids = trait_ |
315 | .items(self.db) | ||
316 | .into_iter() | ||
317 | .filter_map(|trait_item| match trait_item { | ||
318 | crate::traits::TraitItem::TypeAlias(type_alias) => Some(type_alias), | ||
319 | _ => None, | ||
320 | }) | ||
321 | .map(|type_alias| type_alias.to_chalk(self.db)) | ||
322 | .collect(); | ||
264 | let trait_datum_bound = | 323 | let trait_datum_bound = |
265 | chalk_rust_ir::TraitDatumBound { trait_ref, where_clauses, flags, associated_ty_ids }; | 324 | chalk_rust_ir::TraitDatumBound { trait_ref, where_clauses, flags, associated_ty_ids }; |
266 | let trait_datum = TraitDatum { binders: make_binders(trait_datum_bound, bound_vars.len()) }; | 325 | let trait_datum = TraitDatum { binders: make_binders(trait_datum_bound, bound_vars.len()) }; |
@@ -359,7 +418,29 @@ where | |||
359 | trait_ref.display(self.db), | 418 | trait_ref.display(self.db), |
360 | where_clauses | 419 | where_clauses |
361 | ); | 420 | ); |
421 | let trait_ = trait_ref.trait_; | ||
362 | let trait_ref = trait_ref.to_chalk(self.db); | 422 | let trait_ref = trait_ref.to_chalk(self.db); |
423 | let associated_ty_values = impl_block | ||
424 | .items(self.db) | ||
425 | .into_iter() | ||
426 | .filter_map(|item| match item { | ||
427 | ImplItem::TypeAlias(t) => Some(t), | ||
428 | _ => None, | ||
429 | }) | ||
430 | .filter_map(|t| { | ||
431 | let assoc_ty = trait_.associated_type_by_name(self.db, t.name(self.db))?; | ||
432 | let ty = self.db.type_for_def(t.into(), crate::Namespace::Types).subst(&bound_vars); | ||
433 | Some(chalk_rust_ir::AssociatedTyValue { | ||
434 | impl_id, | ||
435 | associated_ty_id: assoc_ty.to_chalk(self.db), | ||
436 | value: chalk_ir::Binders { | ||
437 | value: chalk_rust_ir::AssociatedTyValueBound { ty: ty.to_chalk(self.db) }, | ||
438 | binders: vec![], // we don't support GATs yet | ||
439 | }, | ||
440 | }) | ||
441 | }) | ||
442 | .collect(); | ||
443 | |||
363 | let impl_datum_bound = chalk_rust_ir::ImplDatumBound { | 444 | let impl_datum_bound = chalk_rust_ir::ImplDatumBound { |
364 | trait_ref: if negative { | 445 | trait_ref: if negative { |
365 | chalk_rust_ir::PolarizedTraitRef::Negative(trait_ref) | 446 | chalk_rust_ir::PolarizedTraitRef::Negative(trait_ref) |
@@ -367,9 +448,10 @@ where | |||
367 | chalk_rust_ir::PolarizedTraitRef::Positive(trait_ref) | 448 | chalk_rust_ir::PolarizedTraitRef::Positive(trait_ref) |
368 | }, | 449 | }, |
369 | where_clauses, | 450 | where_clauses, |
370 | associated_ty_values: Vec::new(), // FIXME add associated type values | 451 | associated_ty_values, |
371 | impl_type, | 452 | impl_type, |
372 | }; | 453 | }; |
454 | debug!("impl_datum: {:?}", impl_datum_bound); | ||
373 | let impl_datum = ImplDatum { binders: make_binders(impl_datum_bound, bound_vars.len()) }; | 455 | let impl_datum = ImplDatum { binders: make_binders(impl_datum_bound, bound_vars.len()) }; |
374 | Arc::new(impl_datum) | 456 | Arc::new(impl_datum) |
375 | } | 457 | } |
@@ -405,7 +487,7 @@ where | |||
405 | } | 487 | } |
406 | fn split_projection<'p>( | 488 | fn split_projection<'p>( |
407 | &self, | 489 | &self, |
408 | projection: &'p ProjectionTy, | 490 | projection: &'p chalk_ir::ProjectionTy, |
409 | ) -> (Arc<AssociatedTyDatum>, &'p [Parameter], &'p [Parameter]) { | 491 | ) -> (Arc<AssociatedTyDatum>, &'p [Parameter], &'p [Parameter]) { |
410 | debug!("split_projection {:?}", projection); | 492 | debug!("split_projection {:?}", projection); |
411 | unimplemented!() | 493 | unimplemented!() |
@@ -440,6 +522,18 @@ impl From<crate::ids::TraitId> for chalk_ir::TraitId { | |||
440 | } | 522 | } |
441 | } | 523 | } |
442 | 524 | ||
525 | impl From<chalk_ir::TypeId> for crate::ids::TypeAliasId { | ||
526 | fn from(type_id: chalk_ir::TypeId) -> Self { | ||
527 | id_from_chalk(type_id.0) | ||
528 | } | ||
529 | } | ||
530 | |||
531 | impl From<crate::ids::TypeAliasId> for chalk_ir::TypeId { | ||
532 | fn from(type_id: crate::ids::TypeAliasId) -> Self { | ||
533 | chalk_ir::TypeId(id_to_chalk(type_id)) | ||
534 | } | ||
535 | } | ||
536 | |||
443 | impl From<chalk_ir::StructId> for crate::ids::TypeCtorId { | 537 | impl From<chalk_ir::StructId> for crate::ids::TypeCtorId { |
444 | fn from(struct_id: chalk_ir::StructId) -> Self { | 538 | fn from(struct_id: chalk_ir::StructId) -> Self { |
445 | id_from_chalk(struct_id.0) | 539 | id_from_chalk(struct_id.0) |
diff --git a/crates/ra_hir/src/type_alias.rs b/crates/ra_hir/src/type_alias.rs index 87b9caa8a..eada37274 100644 --- a/crates/ra_hir/src/type_alias.rs +++ b/crates/ra_hir/src/type_alias.rs | |||
@@ -2,12 +2,22 @@ | |||
2 | 2 | ||
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use crate::{TypeAlias, DefDatabase, AstDatabase, HasSource, type_ref::TypeRef}; | 5 | use ra_syntax::ast::NameOwner; |
6 | 6 | ||
7 | pub(crate) fn type_alias_ref_query( | 7 | use crate::{TypeAlias, db::{DefDatabase, AstDatabase}, type_ref::TypeRef, name::{Name, AsName}, HasSource}; |
8 | |||
9 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
10 | pub struct TypeAliasData { | ||
11 | pub(crate) name: Name, | ||
12 | pub(crate) type_ref: Option<TypeRef>, | ||
13 | } | ||
14 | |||
15 | pub(crate) fn type_alias_data_query( | ||
8 | db: &(impl DefDatabase + AstDatabase), | 16 | db: &(impl DefDatabase + AstDatabase), |
9 | typ: TypeAlias, | 17 | typ: TypeAlias, |
10 | ) -> Arc<TypeRef> { | 18 | ) -> Arc<TypeAliasData> { |
11 | let node = typ.source(db).ast; | 19 | let node = typ.source(db).ast; |
12 | Arc::new(TypeRef::from_ast_opt(node.type_ref())) | 20 | let name = node.name().map_or_else(Name::missing, |n| n.as_name()); |
21 | let type_ref = node.type_ref().map(TypeRef::from_ast); | ||
22 | Arc::new(TypeAliasData { name, type_ref }) | ||
13 | } | 23 | } |
diff --git a/crates/ra_ide_api/src/completion/complete_dot.rs b/crates/ra_ide_api/src/completion/complete_dot.rs index 0822a0e7e..f26fd06b3 100644 --- a/crates/ra_ide_api/src/completion/complete_dot.rs +++ b/crates/ra_ide_api/src/completion/complete_dot.rs | |||
@@ -15,7 +15,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) { | |||
15 | } | 15 | } |
16 | 16 | ||
17 | fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) { | 17 | fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) { |
18 | for receiver in receiver.autoderef(ctx.db) { | 18 | for receiver in ctx.analyzer.autoderef(ctx.db, receiver) { |
19 | if let Ty::Apply(a_ty) = receiver { | 19 | if let Ty::Apply(a_ty) = receiver { |
20 | match a_ty.ctor { | 20 | match a_ty.ctor { |
21 | TypeCtor::Adt(AdtDef::Struct(s)) => { | 21 | TypeCtor::Adt(AdtDef::Struct(s)) => { |
diff --git a/crates/ra_ide_api/src/goto_type_definition.rs b/crates/ra_ide_api/src/goto_type_definition.rs index 0f638b170..6f5164e0b 100644 --- a/crates/ra_ide_api/src/goto_type_definition.rs +++ b/crates/ra_ide_api/src/goto_type_definition.rs | |||
@@ -30,7 +30,7 @@ pub(crate) fn goto_type_definition( | |||
30 | return None; | 30 | return None; |
31 | }; | 31 | }; |
32 | 32 | ||
33 | let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt().map(|adt| adt.0))?; | 33 | let adt_def = analyzer.autoderef(db, ty).find_map(|ty| ty.as_adt().map(|adt| adt.0))?; |
34 | 34 | ||
35 | let nav = NavigationTarget::from_adt_def(db, adt_def); | 35 | let nav = NavigationTarget::from_adt_def(db, adt_def); |
36 | Some(RangeInfo::new(node.range(), vec![nav])) | 36 | Some(RangeInfo::new(node.range(), vec![nav])) |
diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs index 8741e736f..e61d5627e 100644 --- a/crates/ra_ide_api/src/lib.rs +++ b/crates/ra_ide_api/src/lib.rs | |||
@@ -276,6 +276,9 @@ impl AnalysisHost { | |||
276 | pub fn collect_garbage(&mut self) { | 276 | pub fn collect_garbage(&mut self) { |
277 | self.db.collect_garbage(); | 277 | self.db.collect_garbage(); |
278 | } | 278 | } |
279 | pub fn raw_database(&self) -> &(impl hir::db::HirDatabase + salsa::Database) { | ||
280 | &self.db | ||
281 | } | ||
279 | } | 282 | } |
280 | 283 | ||
281 | /// Analysis is a snapshot of a world state at a moment in time. It is the main | 284 | /// Analysis is a snapshot of a world state at a moment in time. It is the main |
diff --git a/crates/ra_ide_api/src/line_index.rs b/crates/ra_ide_api/src/line_index.rs index 087dfafed..a53cf9ee0 100644 --- a/crates/ra_ide_api/src/line_index.rs +++ b/crates/ra_ide_api/src/line_index.rs | |||
@@ -10,7 +10,9 @@ pub struct LineIndex { | |||
10 | 10 | ||
11 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] | 11 | #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] |
12 | pub struct LineCol { | 12 | pub struct LineCol { |
13 | /// Zero-based | ||
13 | pub line: u32, | 14 | pub line: u32, |
15 | /// Zero-based | ||
14 | pub col_utf16: u32, | 16 | pub col_utf16: u32, |
15 | } | 17 | } |
16 | 18 | ||
diff --git a/crates/ra_lsp_server/src/main.rs b/crates/ra_lsp_server/src/main.rs index 7ed35c24a..4aadb5ea8 100644 --- a/crates/ra_lsp_server/src/main.rs +++ b/crates/ra_lsp_server/src/main.rs | |||
@@ -17,7 +17,7 @@ fn main() -> Result<()> { | |||
17 | Err(_) => ra_prof::Filter::disabled(), | 17 | Err(_) => ra_prof::Filter::disabled(), |
18 | }); | 18 | }); |
19 | log::info!("lifecycle: server started"); | 19 | log::info!("lifecycle: server started"); |
20 | match ::std::panic::catch_unwind(main_inner) { | 20 | match std::panic::catch_unwind(main_inner) { |
21 | Ok(res) => { | 21 | Ok(res) => { |
22 | log::info!("lifecycle: terminating process with {:?}", res); | 22 | log::info!("lifecycle: terminating process with {:?}", res); |
23 | res | 23 | res |
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs index a3af153f1..42156bea6 100644 --- a/crates/ra_project_model/src/lib.rs +++ b/crates/ra_project_model/src/lib.rs | |||
@@ -37,6 +37,7 @@ pub enum ProjectWorkspace { | |||
37 | /// `ProjectRoot` describes a workspace root folder. | 37 | /// `ProjectRoot` describes a workspace root folder. |
38 | /// Which may be an external dependency, or a member of | 38 | /// Which may be an external dependency, or a member of |
39 | /// the current workspace. | 39 | /// the current workspace. |
40 | #[derive(Clone)] | ||
40 | pub struct ProjectRoot { | 41 | pub struct ProjectRoot { |
41 | /// Path to the root folder | 42 | /// Path to the root folder |
42 | path: PathBuf, | 43 | path: PathBuf, |
diff --git a/crates/ra_syntax/src/ast/expr_extensions.rs b/crates/ra_syntax/src/ast/expr_extensions.rs index 17763809d..d88671d45 100644 --- a/crates/ra_syntax/src/ast/expr_extensions.rs +++ b/crates/ra_syntax/src/ast/expr_extensions.rs | |||
@@ -229,8 +229,12 @@ pub enum LiteralKind { | |||
229 | 229 | ||
230 | impl ast::Literal { | 230 | impl ast::Literal { |
231 | pub fn token(&self) -> SyntaxToken { | 231 | pub fn token(&self) -> SyntaxToken { |
232 | match self.syntax().first_child_or_token().unwrap() { | 232 | let elem = self |
233 | SyntaxElement::Token(token) => token, | 233 | .syntax() |
234 | .children_with_tokens() | ||
235 | .find(|e| e.kind() != ATTR && !e.kind().is_trivia()); | ||
236 | match elem { | ||
237 | Some(SyntaxElement::Token(token)) => token, | ||
234 | _ => unreachable!(), | 238 | _ => unreachable!(), |
235 | } | 239 | } |
236 | } | 240 | } |
@@ -268,6 +272,13 @@ impl ast::Literal { | |||
268 | } | 272 | } |
269 | } | 273 | } |
270 | 274 | ||
275 | #[test] | ||
276 | fn test_literal_with_attr() { | ||
277 | let parse = ast::SourceFile::parse(r#"const _: &str = { #[attr] "Hello" };"#); | ||
278 | let lit = parse.tree.syntax().descendants().find_map(ast::Literal::cast).unwrap(); | ||
279 | assert_eq!(lit.token().text(), r#""Hello""#); | ||
280 | } | ||
281 | |||
271 | impl ast::NamedField { | 282 | impl ast::NamedField { |
272 | pub fn parent_struct_lit(&self) -> &ast::StructLit { | 283 | pub fn parent_struct_lit(&self) -> &ast::StructLit { |
273 | self.syntax().ancestors().find_map(ast::StructLit::cast).unwrap() | 284 | self.syntax().ancestors().find_map(ast::StructLit::cast).unwrap() |
diff --git a/docs/dev/README.md b/docs/dev/README.md index d34ff96c8..3dc37e86e 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md | |||
@@ -147,3 +147,16 @@ RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more tha | |||
147 | ``` | 147 | ``` |
148 | 148 | ||
149 | In particular, I have `export RA_PROFILE='*>10' in my shell profile. | 149 | In particular, I have `export RA_PROFILE='*>10' in my shell profile. |
150 | |||
151 | To measure time for from-scratch analysis, use something like this: | ||
152 | |||
153 | ``` | ||
154 | $ cargo run --release -p ra_cli -- analysis-stats ../chalk/ | ||
155 | ``` | ||
156 | |||
157 | For measuring time of incremental analysis, use either of these: | ||
158 | |||
159 | ``` | ||
160 | $ cargo run --release -p ra_cli -- analysis-bench ../chalk/ --highlight ../chalk/chalk-engine/src/logic.rs | ||
161 | $ cargo run --release -p ra_cli -- analysis-bench ../chalk/ --complete ../chalk/chalk-engine/src/logic.rs:94:0 | ||
162 | ``` | ||