diff options
42 files changed, 1536 insertions, 1356 deletions
diff --git a/Cargo.lock b/Cargo.lock index 965d5aaa0..0826b056a 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -891,17 +891,14 @@ version = "0.1.0" | |||
891 | name = "ra_assists" | 891 | name = "ra_assists" |
892 | version = "0.1.0" | 892 | version = "0.1.0" |
893 | dependencies = [ | 893 | dependencies = [ |
894 | "arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", | ||
895 | "format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 894 | "format-buf 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)", |
896 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | 895 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", |
897 | "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", | 896 | "join_to_string 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", |
898 | "once_cell 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
899 | "ra_db 0.1.0", | 897 | "ra_db 0.1.0", |
900 | "ra_fmt 0.1.0", | 898 | "ra_fmt 0.1.0", |
901 | "ra_hir 0.1.0", | 899 | "ra_hir 0.1.0", |
902 | "ra_syntax 0.1.0", | 900 | "ra_syntax 0.1.0", |
903 | "ra_text_edit 0.1.0", | 901 | "ra_text_edit 0.1.0", |
904 | "rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", | ||
905 | "rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", | 902 | "rustc_lexer 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", |
906 | "test_utils 0.1.0", | 903 | "test_utils 0.1.0", |
907 | ] | 904 | ] |
@@ -1031,8 +1028,6 @@ dependencies = [ | |||
1031 | "lsp-server 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1028 | "lsp-server 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1032 | "lsp-types 0.61.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1029 | "lsp-types 0.61.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1033 | "parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", | 1030 | "parking_lot 0.9.0 (registry+https://github.com/rust-lang/crates.io-index)", |
1034 | "ra_cfg 0.1.0", | ||
1035 | "ra_db 0.1.0", | ||
1036 | "ra_ide_api 0.1.0", | 1031 | "ra_ide_api 0.1.0", |
1037 | "ra_prof 0.1.0", | 1032 | "ra_prof 0.1.0", |
1038 | "ra_project_model 0.1.0", | 1033 | "ra_project_model 0.1.0", |
@@ -1053,7 +1048,6 @@ dependencies = [ | |||
1053 | name = "ra_mbe" | 1048 | name = "ra_mbe" |
1054 | version = "0.1.0" | 1049 | version = "0.1.0" |
1055 | dependencies = [ | 1050 | dependencies = [ |
1056 | "itertools 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", | ||
1057 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", | 1051 | "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", |
1058 | "ra_parser 0.1.0", | 1052 | "ra_parser 0.1.0", |
1059 | "ra_syntax 0.1.0", | 1053 | "ra_syntax 0.1.0", |
diff --git a/crates/ra_assists/Cargo.toml b/crates/ra_assists/Cargo.toml index d3b6aeb36..beebccbd9 100644 --- a/crates/ra_assists/Cargo.toml +++ b/crates/ra_assists/Cargo.toml | |||
@@ -6,11 +6,8 @@ authors = ["rust-analyzer developers"] | |||
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | format-buf = "1.0.0" | 8 | format-buf = "1.0.0" |
9 | once_cell = "1.0.1" | ||
10 | join_to_string = "0.1.3" | 9 | join_to_string = "0.1.3" |
11 | itertools = "0.8.0" | 10 | itertools = "0.8.0" |
12 | arrayvec = "0.4.10" | ||
13 | rustc-hash = "1.0.1" | ||
14 | rustc_lexer = "0.1.0" | 11 | rustc_lexer = "0.1.0" |
15 | 12 | ||
16 | ra_syntax = { path = "../ra_syntax" } | 13 | ra_syntax = { path = "../ra_syntax" } |
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs index 189cad7d0..e270c5d60 100644 --- a/crates/ra_assists/src/assist_ctx.rs +++ b/crates/ra_assists/src/assist_ctx.rs | |||
@@ -138,6 +138,7 @@ impl AssistBuilder { | |||
138 | 138 | ||
139 | /// Replaces specified `node` of text with a given string, reindenting the | 139 | /// Replaces specified `node` of text with a given string, reindenting the |
140 | /// string to maintain `node`'s existing indent. | 140 | /// string to maintain `node`'s existing indent. |
141 | // FIXME: remove in favor of ra_syntax::edit::IndentLevel::increase_indent | ||
141 | pub(crate) fn replace_node_and_indent( | 142 | pub(crate) fn replace_node_and_indent( |
142 | &mut self, | 143 | &mut self, |
143 | node: &SyntaxNode, | 144 | node: &SyntaxNode, |
diff --git a/crates/ra_assists/src/assists/fill_match_arms.rs b/crates/ra_assists/src/assists/fill_match_arms.rs index 7335cce09..e3f30b5de 100644 --- a/crates/ra_assists/src/assists/fill_match_arms.rs +++ b/crates/ra_assists/src/assists/fill_match_arms.rs | |||
@@ -3,7 +3,7 @@ | |||
3 | use std::iter; | 3 | use std::iter; |
4 | 4 | ||
5 | use hir::{db::HirDatabase, Adt, HasSource}; | 5 | use hir::{db::HirDatabase, Adt, HasSource}; |
6 | use ra_syntax::ast::{self, make, AstNode, NameOwner}; | 6 | use ra_syntax::ast::{self, edit::IndentLevel, make, AstNode, NameOwner}; |
7 | 7 | ||
8 | use crate::{Assist, AssistCtx, AssistId}; | 8 | use crate::{Assist, AssistCtx, AssistId}; |
9 | 9 | ||
@@ -30,15 +30,19 @@ pub(crate) fn fill_match_arms(mut ctx: AssistCtx<impl HirDatabase>) -> Option<As | |||
30 | let variant_list = enum_def.variant_list()?; | 30 | let variant_list = enum_def.variant_list()?; |
31 | 31 | ||
32 | ctx.add_action(AssistId("fill_match_arms"), "fill match arms", |edit| { | 32 | ctx.add_action(AssistId("fill_match_arms"), "fill match arms", |edit| { |
33 | let variants = variant_list.variants(); | 33 | let indent_level = IndentLevel::from_node(match_arm_list.syntax()); |
34 | let arms = variants | 34 | |
35 | .filter_map(build_pat) | 35 | let new_arm_list = { |
36 | .map(|pat| make::match_arm(iter::once(pat), make::expr_unit())); | 36 | let variants = variant_list.variants(); |
37 | let new_arm_list = make::match_arm_list(arms); | 37 | let arms = variants |
38 | .filter_map(build_pat) | ||
39 | .map(|pat| make::match_arm(iter::once(pat), make::expr_unit())); | ||
40 | indent_level.increase_indent(make::match_arm_list(arms)) | ||
41 | }; | ||
38 | 42 | ||
39 | edit.target(match_expr.syntax().text_range()); | 43 | edit.target(match_expr.syntax().text_range()); |
40 | edit.set_cursor(expr.syntax().text_range().start()); | 44 | edit.set_cursor(expr.syntax().text_range().start()); |
41 | edit.replace_node_and_indent(match_arm_list.syntax(), new_arm_list.syntax().text()); | 45 | edit.replace_ast(match_arm_list, new_arm_list); |
42 | }); | 46 | }); |
43 | 47 | ||
44 | ctx.build() | 48 | ctx.build() |
diff --git a/crates/ra_assists/src/assists/move_bounds.rs b/crates/ra_assists/src/assists/move_bounds.rs index f791d22b0..d2444b6b9 100644 --- a/crates/ra_assists/src/assists/move_bounds.rs +++ b/crates/ra_assists/src/assists/move_bounds.rs | |||
@@ -18,7 +18,7 @@ pub(crate) fn move_bounds_to_where_clause(mut ctx: AssistCtx<impl HirDatabase>) | |||
18 | } | 18 | } |
19 | 19 | ||
20 | let parent = type_param_list.syntax().parent()?; | 20 | let parent = type_param_list.syntax().parent()?; |
21 | if parent.children_with_tokens().find(|it| it.kind() == WHERE_CLAUSE).is_some() { | 21 | if parent.children_with_tokens().any(|it| it.kind() == WHERE_CLAUSE) { |
22 | return None; | 22 | return None; |
23 | } | 23 | } |
24 | 24 | ||
diff --git a/crates/ra_batch/src/lib.rs b/crates/ra_batch/src/lib.rs index 602beb439..df49eb13d 100644 --- a/crates/ra_batch/src/lib.rs +++ b/crates/ra_batch/src/lib.rs | |||
@@ -141,14 +141,8 @@ mod tests { | |||
141 | #[test] | 141 | #[test] |
142 | fn test_loading_rust_analyzer() { | 142 | fn test_loading_rust_analyzer() { |
143 | let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); | 143 | let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); |
144 | let (host, roots) = load_cargo(path).unwrap(); | 144 | let (host, _roots) = load_cargo(path).unwrap(); |
145 | let mut n_crates = 0; | 145 | let n_crates = Crate::all(host.raw_database()).len(); |
146 | for (root, _) in roots { | ||
147 | for _krate in Crate::source_root_crates(host.raw_database(), root) { | ||
148 | n_crates += 1; | ||
149 | } | ||
150 | } | ||
151 | |||
152 | // RA has quite a few crates, but the exact count doesn't matter | 146 | // RA has quite a few crates, but the exact count doesn't matter |
153 | assert!(n_crates > 20); | 147 | assert!(n_crates > 20); |
154 | } | 148 | } |
diff --git a/crates/ra_cli/src/analysis_bench.rs b/crates/ra_cli/src/analysis_bench.rs index 727f1e62b..8bbe5d9e8 100644 --- a/crates/ra_cli/src/analysis_bench.rs +++ b/crates/ra_cli/src/analysis_bench.rs | |||
@@ -8,7 +8,7 @@ use std::{ | |||
8 | 8 | ||
9 | use ra_db::{ | 9 | use ra_db::{ |
10 | salsa::{Database, Durability}, | 10 | salsa::{Database, Durability}, |
11 | FileId, SourceDatabase, | 11 | FileId, SourceDatabaseExt, |
12 | }; | 12 | }; |
13 | use ra_ide_api::{Analysis, AnalysisChange, AnalysisHost, FilePosition, LineCol}; | 13 | use ra_ide_api::{Analysis, AnalysisChange, AnalysisHost, FilePosition, LineCol}; |
14 | 14 | ||
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs index a8a110bd9..35c867dce 100644 --- a/crates/ra_cli/src/analysis_stats.rs +++ b/crates/ra_cli/src/analysis_stats.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | 2 | ||
3 | use std::{collections::HashSet, fmt::Write, path::Path, time::Instant}; | 3 | use std::{collections::HashSet, fmt::Write, path::Path, time::Instant}; |
4 | 4 | ||
5 | use ra_db::SourceDatabase; | 5 | use ra_db::SourceDatabaseExt; |
6 | use ra_hir::{AssocItem, Crate, HasBodySource, HasSource, HirDisplay, ModuleDef, Ty, TypeWalk}; | 6 | use ra_hir::{AssocItem, Crate, HasBodySource, HasSource, HirDisplay, ModuleDef, Ty, TypeWalk}; |
7 | use ra_syntax::AstNode; | 7 | use ra_syntax::AstNode; |
8 | 8 | ||
@@ -22,16 +22,29 @@ pub fn run( | |||
22 | let mut num_crates = 0; | 22 | let mut num_crates = 0; |
23 | let mut visited_modules = HashSet::new(); | 23 | let mut visited_modules = HashSet::new(); |
24 | let mut visit_queue = Vec::new(); | 24 | let mut visit_queue = Vec::new(); |
25 | for (source_root_id, project_root) in roots { | 25 | |
26 | if project_root.is_member() { | 26 | let members = roots |
27 | for krate in Crate::source_root_crates(db, source_root_id) { | 27 | .into_iter() |
28 | num_crates += 1; | 28 | .filter_map( |
29 | let module = | 29 | |(source_root_id, project_root)| { |
30 | krate.root_module(db).expect("crate in source root without root module"); | 30 | if project_root.is_member() { |
31 | visit_queue.push(module); | 31 | Some(source_root_id) |
32 | } | 32 | } else { |
33 | None | ||
34 | } | ||
35 | }, | ||
36 | ) | ||
37 | .collect::<HashSet<_>>(); | ||
38 | |||
39 | for krate in Crate::all(db) { | ||
40 | let module = krate.root_module(db).expect("crate without root module"); | ||
41 | let file_id = module.definition_source(db).file_id; | ||
42 | if members.contains(&db.file_source_root(file_id.original_file(db))) { | ||
43 | num_crates += 1; | ||
44 | visit_queue.push(module); | ||
33 | } | 45 | } |
34 | } | 46 | } |
47 | |||
35 | println!("Crates in this dir: {}", num_crates); | 48 | println!("Crates in this dir: {}", num_crates); |
36 | let mut num_decls = 0; | 49 | let mut num_decls = 0; |
37 | let mut funcs = Vec::new(); | 50 | let mut funcs = Vec::new(); |
diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs index cae51b02c..eafa95921 100644 --- a/crates/ra_db/src/input.rs +++ b/crates/ra_db/src/input.rs | |||
@@ -57,7 +57,7 @@ impl SourceRoot { | |||
57 | pub fn walk(&self) -> impl Iterator<Item = FileId> + '_ { | 57 | pub fn walk(&self) -> impl Iterator<Item = FileId> + '_ { |
58 | self.files.values().copied() | 58 | self.files.values().copied() |
59 | } | 59 | } |
60 | pub(crate) fn file_by_relative_path(&self, path: &RelativePath) -> Option<FileId> { | 60 | pub fn file_by_relative_path(&self, path: &RelativePath) -> Option<FileId> { |
61 | self.files.get(path).copied() | 61 | self.files.get(path).copied() |
62 | } | 62 | } |
63 | } | 63 | } |
diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs index 4d3a9c036..fc5d6d396 100644 --- a/crates/ra_db/src/lib.rs +++ b/crates/ra_db/src/lib.rs | |||
@@ -64,21 +64,39 @@ pub struct FileRange { | |||
64 | 64 | ||
65 | pub const DEFAULT_LRU_CAP: usize = 128; | 65 | pub const DEFAULT_LRU_CAP: usize = 128; |
66 | 66 | ||
67 | /// Database which stores all significant input facts: source code and project | 67 | pub trait FileLoader { |
68 | /// model. Everything else in rust-analyzer is derived from these queries. | ||
69 | #[salsa::query_group(SourceDatabaseStorage)] | ||
70 | pub trait SourceDatabase: CheckCanceled + std::fmt::Debug { | ||
71 | /// Text of the file. | 68 | /// Text of the file. |
72 | #[salsa::input] | ||
73 | fn file_text(&self, file_id: FileId) -> Arc<String>; | 69 | fn file_text(&self, file_id: FileId) -> Arc<String>; |
74 | |||
75 | #[salsa::transparent] | ||
76 | fn resolve_relative_path(&self, anchor: FileId, relative_path: &RelativePath) | 70 | fn resolve_relative_path(&self, anchor: FileId, relative_path: &RelativePath) |
77 | -> Option<FileId>; | 71 | -> Option<FileId>; |
72 | fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>>; | ||
73 | } | ||
78 | 74 | ||
75 | /// Database which stores all significant input facts: source code and project | ||
76 | /// model. Everything else in rust-analyzer is derived from these queries. | ||
77 | #[salsa::query_group(SourceDatabaseStorage)] | ||
78 | pub trait SourceDatabase: CheckCanceled + FileLoader + std::fmt::Debug { | ||
79 | // Parses the file into the syntax tree. | 79 | // Parses the file into the syntax tree. |
80 | #[salsa::invoke(parse_query)] | 80 | #[salsa::invoke(parse_query)] |
81 | fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>; | 81 | fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>; |
82 | |||
83 | /// The crate graph. | ||
84 | #[salsa::input] | ||
85 | fn crate_graph(&self) -> Arc<CrateGraph>; | ||
86 | } | ||
87 | |||
88 | fn parse_query(db: &impl SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { | ||
89 | let _p = profile("parse_query"); | ||
90 | let text = db.file_text(file_id); | ||
91 | SourceFile::parse(&*text) | ||
92 | } | ||
93 | |||
94 | /// We don't want to give HIR knowledge of source roots, hence we extract these | ||
95 | /// methods into a separate DB. | ||
96 | #[salsa::query_group(SourceDatabaseExtStorage)] | ||
97 | pub trait SourceDatabaseExt: SourceDatabase { | ||
98 | #[salsa::input] | ||
99 | fn file_text(&self, file_id: FileId) -> Arc<String>; | ||
82 | /// Path to a file, relative to the root of its source root. | 100 | /// Path to a file, relative to the root of its source root. |
83 | #[salsa::input] | 101 | #[salsa::input] |
84 | fn file_relative_path(&self, file_id: FileId) -> RelativePathBuf; | 102 | fn file_relative_path(&self, file_id: FileId) -> RelativePathBuf; |
@@ -88,40 +106,48 @@ pub trait SourceDatabase: CheckCanceled + std::fmt::Debug { | |||
88 | /// Contents of the source root. | 106 | /// Contents of the source root. |
89 | #[salsa::input] | 107 | #[salsa::input] |
90 | fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>; | 108 | fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>; |
91 | fn source_root_crates(&self, id: SourceRootId) -> Arc<Vec<CrateId>>; | ||
92 | /// The crate graph. | ||
93 | #[salsa::input] | ||
94 | fn crate_graph(&self) -> Arc<CrateGraph>; | ||
95 | } | ||
96 | 109 | ||
97 | fn resolve_relative_path( | 110 | fn source_root_crates(&self, id: SourceRootId) -> Arc<Vec<CrateId>>; |
98 | db: &impl SourceDatabase, | ||
99 | anchor: FileId, | ||
100 | relative_path: &RelativePath, | ||
101 | ) -> Option<FileId> { | ||
102 | let path = { | ||
103 | let mut path = db.file_relative_path(anchor); | ||
104 | // Workaround for relative path API: turn `lib.rs` into ``. | ||
105 | if !path.pop() { | ||
106 | path = RelativePathBuf::default(); | ||
107 | } | ||
108 | path.push(relative_path); | ||
109 | path.normalize() | ||
110 | }; | ||
111 | let source_root = db.file_source_root(anchor); | ||
112 | let source_root = db.source_root(source_root); | ||
113 | source_root.file_by_relative_path(&path) | ||
114 | } | 111 | } |
115 | 112 | ||
116 | fn source_root_crates(db: &impl SourceDatabase, id: SourceRootId) -> Arc<Vec<CrateId>> { | 113 | fn source_root_crates( |
114 | db: &(impl SourceDatabaseExt + SourceDatabase), | ||
115 | id: SourceRootId, | ||
116 | ) -> Arc<Vec<CrateId>> { | ||
117 | let root = db.source_root(id); | 117 | let root = db.source_root(id); |
118 | let graph = db.crate_graph(); | 118 | let graph = db.crate_graph(); |
119 | let res = root.walk().filter_map(|it| graph.crate_id_for_crate_root(it)).collect::<Vec<_>>(); | 119 | let res = root.walk().filter_map(|it| graph.crate_id_for_crate_root(it)).collect::<Vec<_>>(); |
120 | Arc::new(res) | 120 | Arc::new(res) |
121 | } | 121 | } |
122 | 122 | ||
123 | fn parse_query(db: &impl SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> { | 123 | /// Silly workaround for cyclic deps between the traits |
124 | let _p = profile("parse_query"); | 124 | pub struct FileLoaderDelegate<T>(pub T); |
125 | let text = db.file_text(file_id); | 125 | |
126 | SourceFile::parse(&*text) | 126 | impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> { |
127 | fn file_text(&self, file_id: FileId) -> Arc<String> { | ||
128 | SourceDatabaseExt::file_text(self.0, file_id) | ||
129 | } | ||
130 | fn resolve_relative_path( | ||
131 | &self, | ||
132 | anchor: FileId, | ||
133 | relative_path: &RelativePath, | ||
134 | ) -> Option<FileId> { | ||
135 | let path = { | ||
136 | let mut path = self.0.file_relative_path(anchor); | ||
137 | // Workaround for relative path API: turn `lib.rs` into ``. | ||
138 | if !path.pop() { | ||
139 | path = RelativePathBuf::default(); | ||
140 | } | ||
141 | path.push(relative_path); | ||
142 | path.normalize() | ||
143 | }; | ||
144 | let source_root = self.0.file_source_root(anchor); | ||
145 | let source_root = self.0.source_root(source_root); | ||
146 | source_root.file_by_relative_path(&path) | ||
147 | } | ||
148 | |||
149 | fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> { | ||
150 | let source_root = self.0.file_source_root(file_id); | ||
151 | self.0.source_root_crates(source_root) | ||
152 | } | ||
127 | } | 153 | } |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index 8055a07db..8eb3c577d 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -5,7 +5,7 @@ pub(crate) mod docs; | |||
5 | 5 | ||
6 | use std::sync::Arc; | 6 | use std::sync::Arc; |
7 | 7 | ||
8 | use ra_db::{CrateId, Edition, FileId, SourceRootId}; | 8 | use ra_db::{CrateId, Edition, FileId}; |
9 | use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; | 9 | use ra_syntax::ast::{self, NameOwner, TypeAscriptionOwner}; |
10 | 10 | ||
11 | use crate::{ | 11 | use crate::{ |
@@ -76,10 +76,8 @@ impl Crate { | |||
76 | crate_graph.edition(self.crate_id) | 76 | crate_graph.edition(self.crate_id) |
77 | } | 77 | } |
78 | 78 | ||
79 | // FIXME: should this be in source_binder? | 79 | pub fn all(db: &impl DefDatabase) -> Vec<Crate> { |
80 | pub fn source_root_crates(db: &impl DefDatabase, source_root: SourceRootId) -> Vec<Crate> { | 80 | db.crate_graph().iter().map(|crate_id| Crate { crate_id }).collect() |
81 | let crate_ids = db.source_root_crates(source_root); | ||
82 | crate_ids.iter().map(|&crate_id| Crate { crate_id }).collect() | ||
83 | } | 81 | } |
84 | } | 82 | } |
85 | 83 | ||
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index 73d7d6fb6..489a3b19c 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -111,37 +111,37 @@ pub trait DefDatabase: InternDatabase + HirDebugDatabase { | |||
111 | #[salsa::invoke(CrateDefMap::crate_def_map_query)] | 111 | #[salsa::invoke(CrateDefMap::crate_def_map_query)] |
112 | fn crate_def_map(&self, krate: Crate) -> Arc<CrateDefMap>; | 112 | fn crate_def_map(&self, krate: Crate) -> Arc<CrateDefMap>; |
113 | 113 | ||
114 | #[salsa::invoke(crate::impl_block::impls_in_module_with_source_map_query)] | 114 | #[salsa::invoke(ModuleImplBlocks::impls_in_module_with_source_map_query)] |
115 | fn impls_in_module_with_source_map( | 115 | fn impls_in_module_with_source_map( |
116 | &self, | 116 | &self, |
117 | module: Module, | 117 | module: Module, |
118 | ) -> (Arc<ModuleImplBlocks>, Arc<ImplSourceMap>); | 118 | ) -> (Arc<ModuleImplBlocks>, Arc<ImplSourceMap>); |
119 | 119 | ||
120 | #[salsa::invoke(crate::impl_block::impls_in_module)] | 120 | #[salsa::invoke(ModuleImplBlocks::impls_in_module_query)] |
121 | fn impls_in_module(&self, module: Module) -> Arc<ModuleImplBlocks>; | 121 | fn impls_in_module(&self, module: Module) -> Arc<ModuleImplBlocks>; |
122 | 122 | ||
123 | #[salsa::invoke(crate::generics::GenericParams::generic_params_query)] | 123 | #[salsa::invoke(crate::generics::GenericParams::generic_params_query)] |
124 | fn generic_params(&self, def: GenericDef) -> Arc<GenericParams>; | 124 | fn generic_params(&self, def: GenericDef) -> Arc<GenericParams>; |
125 | 125 | ||
126 | #[salsa::invoke(crate::FnData::fn_data_query)] | 126 | #[salsa::invoke(FnData::fn_data_query)] |
127 | fn fn_data(&self, func: Function) -> Arc<FnData>; | 127 | fn fn_data(&self, func: Function) -> Arc<FnData>; |
128 | 128 | ||
129 | #[salsa::invoke(crate::type_alias::type_alias_data_query)] | 129 | #[salsa::invoke(TypeAliasData::type_alias_data_query)] |
130 | fn type_alias_data(&self, typ: TypeAlias) -> Arc<TypeAliasData>; | 130 | fn type_alias_data(&self, typ: TypeAlias) -> Arc<TypeAliasData>; |
131 | 131 | ||
132 | #[salsa::invoke(crate::ConstData::const_data_query)] | 132 | #[salsa::invoke(ConstData::const_data_query)] |
133 | fn const_data(&self, konst: Const) -> Arc<ConstData>; | 133 | fn const_data(&self, konst: Const) -> Arc<ConstData>; |
134 | 134 | ||
135 | #[salsa::invoke(crate::ConstData::static_data_query)] | 135 | #[salsa::invoke(ConstData::static_data_query)] |
136 | fn static_data(&self, konst: Static) -> Arc<ConstData>; | 136 | fn static_data(&self, konst: Static) -> Arc<ConstData>; |
137 | 137 | ||
138 | #[salsa::invoke(crate::lang_item::LangItems::module_lang_items_query)] | 138 | #[salsa::invoke(LangItems::module_lang_items_query)] |
139 | fn module_lang_items(&self, module: Module) -> Option<Arc<LangItems>>; | 139 | fn module_lang_items(&self, module: Module) -> Option<Arc<LangItems>>; |
140 | 140 | ||
141 | #[salsa::invoke(crate::lang_item::LangItems::crate_lang_items_query)] | 141 | #[salsa::invoke(LangItems::crate_lang_items_query)] |
142 | fn crate_lang_items(&self, krate: Crate) -> Arc<LangItems>; | 142 | fn crate_lang_items(&self, krate: Crate) -> Arc<LangItems>; |
143 | 143 | ||
144 | #[salsa::invoke(crate::lang_item::LangItems::lang_item_query)] | 144 | #[salsa::invoke(LangItems::lang_item_query)] |
145 | fn lang_item(&self, start_crate: Crate, item: SmolStr) -> Option<LangItemTarget>; | 145 | fn lang_item(&self, start_crate: Crate, item: SmolStr) -> Option<LangItemTarget>; |
146 | 146 | ||
147 | #[salsa::invoke(crate::code_model::docs::documentation_query)] | 147 | #[salsa::invoke(crate::code_model::docs::documentation_query)] |
diff --git a/crates/ra_hir/src/from_source.rs b/crates/ra_hir/src/from_source.rs index a012f33f7..f80d8eb5f 100644 --- a/crates/ra_hir/src/from_source.rs +++ b/crates/ra_hir/src/from_source.rs | |||
@@ -189,14 +189,14 @@ impl Module { | |||
189 | ModuleSource::SourceFile(_) => None, | 189 | ModuleSource::SourceFile(_) => None, |
190 | }; | 190 | }; |
191 | 191 | ||
192 | let source_root_id = db.file_source_root(src.file_id.original_file(db)); | 192 | db.relevant_crates(src.file_id.original_file(db)) |
193 | db.source_root_crates(source_root_id).iter().map(|&crate_id| Crate { crate_id }).find_map( | 193 | .iter() |
194 | |krate| { | 194 | .map(|&crate_id| Crate { crate_id }) |
195 | .find_map(|krate| { | ||
195 | let def_map = db.crate_def_map(krate); | 196 | let def_map = db.crate_def_map(krate); |
196 | let module_id = def_map.find_module_by_source(src.file_id, decl_id)?; | 197 | let module_id = def_map.find_module_by_source(src.file_id, decl_id)?; |
197 | Some(Module { krate, module_id }) | 198 | Some(Module { krate, module_id }) |
198 | }, | 199 | }) |
199 | ) | ||
200 | } | 200 | } |
201 | } | 201 | } |
202 | 202 | ||
diff --git a/crates/ra_hir/src/ids.rs b/crates/ra_hir/src/ids.rs index 85b022744..499dcafea 100644 --- a/crates/ra_hir/src/ids.rs +++ b/crates/ra_hir/src/ids.rs | |||
@@ -85,11 +85,7 @@ impl HirFileId { | |||
85 | // Note: | 85 | // Note: |
86 | // The final goal we would like to make all parse_macro success, | 86 | // The final goal we would like to make all parse_macro success, |
87 | // such that the following log will not call anyway. | 87 | // such that the following log will not call anyway. |
88 | log::warn!( | 88 | log::warn!("fail on macro_parse: (reason: {})", err,); |
89 | "fail on macro_parse: (reason: {}) {}", | ||
90 | err, | ||
91 | macro_call_id.debug_dump(db) | ||
92 | ); | ||
93 | }) | 89 | }) |
94 | .ok()?; | 90 | .ok()?; |
95 | match macro_file.macro_file_kind { | 91 | match macro_file.macro_file_kind { |
@@ -367,35 +363,6 @@ impl AstItemDef<ast::TypeAliasDef> for TypeAliasId { | |||
367 | } | 363 | } |
368 | } | 364 | } |
369 | 365 | ||
370 | impl MacroCallId { | ||
371 | pub fn debug_dump(self, db: &impl AstDatabase) -> String { | ||
372 | let loc = self.loc(db); | ||
373 | let node = loc.ast_id.to_node(db); | ||
374 | let syntax_str = { | ||
375 | let mut res = String::new(); | ||
376 | node.syntax().text().for_each_chunk(|chunk| { | ||
377 | if !res.is_empty() { | ||
378 | res.push(' ') | ||
379 | } | ||
380 | res.push_str(chunk) | ||
381 | }); | ||
382 | res | ||
383 | }; | ||
384 | |||
385 | // dump the file name | ||
386 | let file_id: HirFileId = self.loc(db).ast_id.file_id(); | ||
387 | let original = file_id.original_file(db); | ||
388 | let macro_rules = db.macro_def(loc.def); | ||
389 | |||
390 | format!( | ||
391 | "macro call [file: {:?}] : {}\nhas rules: {}", | ||
392 | db.file_relative_path(original), | ||
393 | syntax_str, | ||
394 | macro_rules.is_some() | ||
395 | ) | ||
396 | } | ||
397 | } | ||
398 | |||
399 | /// This exists just for Chalk, because Chalk just has a single `StructId` where | 366 | /// This exists just for Chalk, because Chalk just has a single `StructId` where |
400 | /// we have different kinds of ADTs, primitive types and special type | 367 | /// we have different kinds of ADTs, primitive types and special type |
401 | /// constructors like tuples and function pointers. | 368 | /// constructors like tuples and function pointers. |
diff --git a/crates/ra_hir/src/impl_block.rs b/crates/ra_hir/src/impl_block.rs index 55dfc393b..33ef87563 100644 --- a/crates/ra_hir/src/impl_block.rs +++ b/crates/ra_hir/src/impl_block.rs | |||
@@ -176,6 +176,25 @@ pub struct ModuleImplBlocks { | |||
176 | } | 176 | } |
177 | 177 | ||
178 | impl ModuleImplBlocks { | 178 | impl ModuleImplBlocks { |
179 | pub(crate) fn impls_in_module_with_source_map_query( | ||
180 | db: &(impl DefDatabase + AstDatabase), | ||
181 | module: Module, | ||
182 | ) -> (Arc<ModuleImplBlocks>, Arc<ImplSourceMap>) { | ||
183 | let mut source_map = ImplSourceMap::default(); | ||
184 | let crate_graph = db.crate_graph(); | ||
185 | let cfg_options = crate_graph.cfg_options(module.krate.crate_id()); | ||
186 | |||
187 | let result = ModuleImplBlocks::collect(db, cfg_options, module, &mut source_map); | ||
188 | (Arc::new(result), Arc::new(source_map)) | ||
189 | } | ||
190 | |||
191 | pub(crate) fn impls_in_module_query( | ||
192 | db: &impl DefDatabase, | ||
193 | module: Module, | ||
194 | ) -> Arc<ModuleImplBlocks> { | ||
195 | db.impls_in_module_with_source_map(module).0 | ||
196 | } | ||
197 | |||
179 | fn collect( | 198 | fn collect( |
180 | db: &(impl DefDatabase + AstDatabase), | 199 | db: &(impl DefDatabase + AstDatabase), |
181 | cfg_options: &CfgOptions, | 200 | cfg_options: &CfgOptions, |
@@ -264,19 +283,3 @@ impl ModuleImplBlocks { | |||
264 | } | 283 | } |
265 | } | 284 | } |
266 | } | 285 | } |
267 | |||
268 | pub(crate) fn impls_in_module_with_source_map_query( | ||
269 | db: &(impl DefDatabase + AstDatabase), | ||
270 | module: Module, | ||
271 | ) -> (Arc<ModuleImplBlocks>, Arc<ImplSourceMap>) { | ||
272 | let mut source_map = ImplSourceMap::default(); | ||
273 | let crate_graph = db.crate_graph(); | ||
274 | let cfg_options = crate_graph.cfg_options(module.krate.crate_id()); | ||
275 | |||
276 | let result = ModuleImplBlocks::collect(db, cfg_options, module, &mut source_map); | ||
277 | (Arc::new(result), Arc::new(source_map)) | ||
278 | } | ||
279 | |||
280 | pub(crate) fn impls_in_module(db: &impl DefDatabase, module: Module) -> Arc<ModuleImplBlocks> { | ||
281 | db.impls_in_module_with_source_map(module).0 | ||
282 | } | ||
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index 9cbd9a8ae..ca261e8f5 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs | |||
@@ -51,6 +51,7 @@ mod lang_item; | |||
51 | mod generics; | 51 | mod generics; |
52 | mod resolve; | 52 | mod resolve; |
53 | pub mod diagnostics; | 53 | pub mod diagnostics; |
54 | mod util; | ||
54 | 55 | ||
55 | mod code_model; | 56 | mod code_model; |
56 | 57 | ||
diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs index 827424983..0b278deb3 100644 --- a/crates/ra_hir/src/mock.rs +++ b/crates/ra_hir/src/mock.rs | |||
@@ -5,10 +5,10 @@ use std::{panic, sync::Arc}; | |||
5 | use parking_lot::Mutex; | 5 | use parking_lot::Mutex; |
6 | use ra_cfg::CfgOptions; | 6 | use ra_cfg::CfgOptions; |
7 | use ra_db::{ | 7 | use ra_db::{ |
8 | salsa, CrateGraph, CrateId, Edition, FileId, FilePosition, SourceDatabase, SourceRoot, | 8 | salsa, CrateGraph, CrateId, Edition, FileId, FileLoader, FileLoaderDelegate, FilePosition, |
9 | SourceRootId, | 9 | SourceDatabase, SourceDatabaseExt, SourceRoot, SourceRootId, |
10 | }; | 10 | }; |
11 | use relative_path::RelativePathBuf; | 11 | use relative_path::{RelativePath, RelativePathBuf}; |
12 | use rustc_hash::FxHashMap; | 12 | use rustc_hash::FxHashMap; |
13 | use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; | 13 | use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; |
14 | 14 | ||
@@ -17,6 +17,7 @@ use crate::{db, debug::HirDebugHelper, diagnostics::DiagnosticSink}; | |||
17 | pub const WORKSPACE: SourceRootId = SourceRootId(0); | 17 | pub const WORKSPACE: SourceRootId = SourceRootId(0); |
18 | 18 | ||
19 | #[salsa::database( | 19 | #[salsa::database( |
20 | ra_db::SourceDatabaseExtStorage, | ||
20 | ra_db::SourceDatabaseStorage, | 21 | ra_db::SourceDatabaseStorage, |
21 | db::InternDatabaseStorage, | 22 | db::InternDatabaseStorage, |
22 | db::AstDatabaseStorage, | 23 | db::AstDatabaseStorage, |
@@ -34,6 +35,22 @@ pub struct MockDatabase { | |||
34 | 35 | ||
35 | impl panic::RefUnwindSafe for MockDatabase {} | 36 | impl panic::RefUnwindSafe for MockDatabase {} |
36 | 37 | ||
38 | impl FileLoader for MockDatabase { | ||
39 | fn file_text(&self, file_id: FileId) -> Arc<String> { | ||
40 | FileLoaderDelegate(self).file_text(file_id) | ||
41 | } | ||
42 | fn resolve_relative_path( | ||
43 | &self, | ||
44 | anchor: FileId, | ||
45 | relative_path: &RelativePath, | ||
46 | ) -> Option<FileId> { | ||
47 | FileLoaderDelegate(self).resolve_relative_path(anchor, relative_path) | ||
48 | } | ||
49 | fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> { | ||
50 | FileLoaderDelegate(self).relevant_crates(file_id) | ||
51 | } | ||
52 | } | ||
53 | |||
37 | impl HirDebugHelper for MockDatabase { | 54 | impl HirDebugHelper for MockDatabase { |
38 | fn crate_name(&self, krate: CrateId) -> Option<String> { | 55 | fn crate_name(&self, krate: CrateId) -> Option<String> { |
39 | self.crate_names.get(&krate).cloned() | 56 | self.crate_names.get(&krate).cloned() |
diff --git a/crates/ra_hir/src/nameres/tests/incremental.rs b/crates/ra_hir/src/nameres/tests/incremental.rs index c41862a0b..af9c39760 100644 --- a/crates/ra_hir/src/nameres/tests/incremental.rs +++ b/crates/ra_hir/src/nameres/tests/incremental.rs | |||
@@ -2,7 +2,7 @@ use super::*; | |||
2 | 2 | ||
3 | use std::sync::Arc; | 3 | use std::sync::Arc; |
4 | 4 | ||
5 | use ra_db::SourceDatabase; | 5 | use ra_db::{SourceDatabase, SourceDatabaseExt}; |
6 | 6 | ||
7 | fn check_def_map_is_not_recomputed(initial: &str, file_change: &str) { | 7 | fn check_def_map_is_not_recomputed(initial: &str, file_change: &str) { |
8 | let (mut db, pos) = MockDatabase::with_position(initial); | 8 | let (mut db, pos) = MockDatabase::with_position(initial); |
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs index d161735e8..cc9746f6d 100644 --- a/crates/ra_hir/src/ty.rs +++ b/crates/ra_hir/src/ty.rs | |||
@@ -17,8 +17,8 @@ use std::sync::Arc; | |||
17 | use std::{fmt, iter, mem}; | 17 | use std::{fmt, iter, mem}; |
18 | 18 | ||
19 | use crate::{ | 19 | use crate::{ |
20 | db::HirDatabase, expr::ExprId, type_ref::Mutability, Adt, Crate, DefWithBody, GenericParams, | 20 | db::HirDatabase, expr::ExprId, type_ref::Mutability, util::make_mut_slice, Adt, Crate, |
21 | HasGenericParams, Name, Trait, TypeAlias, | 21 | DefWithBody, GenericParams, HasGenericParams, Name, Trait, TypeAlias, |
22 | }; | 22 | }; |
23 | use display::{HirDisplay, HirFormatter}; | 23 | use display::{HirDisplay, HirFormatter}; |
24 | 24 | ||
@@ -308,12 +308,9 @@ impl Substs { | |||
308 | } | 308 | } |
309 | 309 | ||
310 | pub fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { | 310 | pub fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { |
311 | // Without an Arc::make_mut_slice, we can't avoid the clone here: | 311 | for t in make_mut_slice(&mut self.0) { |
312 | let mut v: Vec<_> = self.0.iter().cloned().collect(); | ||
313 | for t in &mut v { | ||
314 | t.walk_mut(f); | 312 | t.walk_mut(f); |
315 | } | 313 | } |
316 | self.0 = v.into(); | ||
317 | } | 314 | } |
318 | 315 | ||
319 | pub fn as_single(&self) -> &Ty { | 316 | pub fn as_single(&self) -> &Ty { |
@@ -330,8 +327,7 @@ impl Substs { | |||
330 | .params_including_parent() | 327 | .params_including_parent() |
331 | .into_iter() | 328 | .into_iter() |
332 | .map(|p| Ty::Param { idx: p.idx, name: p.name.clone() }) | 329 | .map(|p| Ty::Param { idx: p.idx, name: p.name.clone() }) |
333 | .collect::<Vec<_>>() | 330 | .collect(), |
334 | .into(), | ||
335 | ) | 331 | ) |
336 | } | 332 | } |
337 | 333 | ||
@@ -342,8 +338,7 @@ impl Substs { | |||
342 | .params_including_parent() | 338 | .params_including_parent() |
343 | .into_iter() | 339 | .into_iter() |
344 | .map(|p| Ty::Bound(p.idx)) | 340 | .map(|p| Ty::Bound(p.idx)) |
345 | .collect::<Vec<_>>() | 341 | .collect(), |
346 | .into(), | ||
347 | ) | 342 | ) |
348 | } | 343 | } |
349 | 344 | ||
@@ -541,12 +536,9 @@ impl TypeWalk for FnSig { | |||
541 | } | 536 | } |
542 | 537 | ||
543 | fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { | 538 | fn walk_mut(&mut self, f: &mut impl FnMut(&mut Ty)) { |
544 | // Without an Arc::make_mut_slice, we can't avoid the clone here: | 539 | for t in make_mut_slice(&mut self.params_and_return) { |
545 | let mut v: Vec<_> = self.params_and_return.iter().cloned().collect(); | ||
546 | for t in &mut v { | ||
547 | t.walk_mut(f); | 540 | t.walk_mut(f); |
548 | } | 541 | } |
549 | self.params_and_return = v.into(); | ||
550 | } | 542 | } |
551 | } | 543 | } |
552 | 544 | ||
@@ -756,11 +748,9 @@ impl TypeWalk for Ty { | |||
756 | p_ty.parameters.walk_mut(f); | 748 | p_ty.parameters.walk_mut(f); |
757 | } | 749 | } |
758 | Ty::Dyn(predicates) | Ty::Opaque(predicates) => { | 750 | Ty::Dyn(predicates) | Ty::Opaque(predicates) => { |
759 | let mut v: Vec<_> = predicates.iter().cloned().collect(); | 751 | for p in make_mut_slice(predicates) { |
760 | for p in &mut v { | ||
761 | p.walk_mut(f); | 752 | p.walk_mut(f); |
762 | } | 753 | } |
763 | *predicates = v.into(); | ||
764 | } | 754 | } |
765 | Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} | 755 | Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} |
766 | } | 756 | } |
diff --git a/crates/ra_hir/src/ty/infer.rs b/crates/ra_hir/src/ty/infer.rs index a69f04ff1..cb28fc6bc 100644 --- a/crates/ra_hir/src/ty/infer.rs +++ b/crates/ra_hir/src/ty/infer.rs | |||
@@ -14,7 +14,6 @@ | |||
14 | //! the `ena` crate, which is extracted from rustc. | 14 | //! the `ena` crate, which is extracted from rustc. |
15 | 15 | ||
16 | use std::borrow::Cow; | 16 | use std::borrow::Cow; |
17 | use std::iter::{repeat, repeat_with}; | ||
18 | use std::mem; | 17 | use std::mem; |
19 | use std::ops::Index; | 18 | use std::ops::Index; |
20 | use std::sync::Arc; | 19 | use std::sync::Arc; |
@@ -27,33 +26,39 @@ use ra_prof::profile; | |||
27 | use test_utils::tested_by; | 26 | use test_utils::tested_by; |
28 | 27 | ||
29 | use super::{ | 28 | use super::{ |
30 | autoderef, lower, method_resolution, op, primitive, | 29 | lower, primitive, |
31 | traits::{Guidance, Obligation, ProjectionPredicate, Solution}, | 30 | traits::{Guidance, Obligation, ProjectionPredicate, Solution}, |
32 | ApplicationTy, CallableDef, InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, | 31 | ApplicationTy, InEnvironment, ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypableDef, |
33 | Ty, TypableDef, TypeCtor, TypeWalk, | 32 | TypeCtor, TypeWalk, |
34 | }; | 33 | }; |
35 | use crate::{ | 34 | use crate::{ |
36 | adt::VariantDef, | 35 | adt::VariantDef, |
37 | code_model::TypeAlias, | 36 | code_model::TypeAlias, |
38 | db::HirDatabase, | 37 | db::HirDatabase, |
39 | diagnostics::DiagnosticSink, | 38 | diagnostics::DiagnosticSink, |
40 | expr::{ | 39 | expr::{BindingAnnotation, Body, ExprId, PatId}, |
41 | self, Array, BinaryOp, BindingAnnotation, Body, Expr, ExprId, Literal, Pat, PatId, | ||
42 | RecordFieldPat, Statement, UnaryOp, | ||
43 | }, | ||
44 | generics::{GenericParams, HasGenericParams}, | ||
45 | lang_item::LangItemTarget, | ||
46 | name, | 40 | name, |
47 | nameres::Namespace, | 41 | path::known, |
48 | path::{known, GenericArg, GenericArgs}, | ||
49 | resolve::{Resolver, TypeNs}, | 42 | resolve::{Resolver, TypeNs}, |
50 | ty::infer::diagnostics::InferenceDiagnostic, | 43 | ty::infer::diagnostics::InferenceDiagnostic, |
51 | type_ref::{Mutability, TypeRef}, | 44 | type_ref::{Mutability, TypeRef}, |
52 | Adt, AssocItem, ConstData, DefWithBody, FnData, Function, HasBody, Name, Path, StructField, | 45 | Adt, AssocItem, ConstData, DefWithBody, FnData, Function, HasBody, Path, StructField, |
53 | }; | 46 | }; |
54 | 47 | ||
48 | macro_rules! ty_app { | ||
49 | ($ctor:pat, $param:pat) => { | ||
50 | crate::ty::Ty::Apply(crate::ty::ApplicationTy { ctor: $ctor, parameters: $param }) | ||
51 | }; | ||
52 | ($ctor:pat) => { | ||
53 | ty_app!($ctor, _) | ||
54 | }; | ||
55 | } | ||
56 | |||
55 | mod unify; | 57 | mod unify; |
56 | mod path; | 58 | mod path; |
59 | mod expr; | ||
60 | mod pat; | ||
61 | mod coerce; | ||
57 | 62 | ||
58 | /// The entry point of type inference. | 63 | /// The entry point of type inference. |
59 | pub fn infer_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<InferenceResult> { | 64 | pub fn infer_query(db: &impl HirDatabase, def: DefWithBody) -> Arc<InferenceResult> { |
@@ -197,15 +202,6 @@ struct InferenceContext<'a, D: HirDatabase> { | |||
197 | coerce_unsized_map: FxHashMap<(TypeCtor, TypeCtor), usize>, | 202 | coerce_unsized_map: FxHashMap<(TypeCtor, TypeCtor), usize>, |
198 | } | 203 | } |
199 | 204 | ||
200 | macro_rules! ty_app { | ||
201 | ($ctor:pat, $param:pat) => { | ||
202 | Ty::Apply(ApplicationTy { ctor: $ctor, parameters: $param }) | ||
203 | }; | ||
204 | ($ctor:pat) => { | ||
205 | ty_app!($ctor, _) | ||
206 | }; | ||
207 | } | ||
208 | |||
209 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { | 205 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { |
210 | fn new(db: &'a D, body: Arc<Body>, resolver: Resolver) -> Self { | 206 | fn new(db: &'a D, body: Arc<Body>, resolver: Resolver) -> Self { |
211 | InferenceContext { | 207 | InferenceContext { |
@@ -221,45 +217,6 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
221 | } | 217 | } |
222 | } | 218 | } |
223 | 219 | ||
224 | fn init_coerce_unsized_map( | ||
225 | db: &'a D, | ||
226 | resolver: &Resolver, | ||
227 | ) -> FxHashMap<(TypeCtor, TypeCtor), usize> { | ||
228 | let krate = resolver.krate().unwrap(); | ||
229 | let impls = match db.lang_item(krate, "coerce_unsized".into()) { | ||
230 | Some(LangItemTarget::Trait(trait_)) => db.impls_for_trait(krate, trait_), | ||
231 | _ => return FxHashMap::default(), | ||
232 | }; | ||
233 | |||
234 | impls | ||
235 | .iter() | ||
236 | .filter_map(|impl_block| { | ||
237 | // `CoerseUnsized` has one generic parameter for the target type. | ||
238 | let trait_ref = impl_block.target_trait_ref(db)?; | ||
239 | let cur_from_ty = trait_ref.substs.0.get(0)?; | ||
240 | let cur_to_ty = trait_ref.substs.0.get(1)?; | ||
241 | |||
242 | match (&cur_from_ty, cur_to_ty) { | ||
243 | (ty_app!(ctor1, st1), ty_app!(ctor2, st2)) => { | ||
244 | // FIXME: We return the first non-equal bound as the type parameter to coerce to unsized type. | ||
245 | // This works for smart-pointer-like coercion, which covers all impls from std. | ||
246 | st1.iter().zip(st2.iter()).enumerate().find_map(|(i, (ty1, ty2))| { | ||
247 | match (ty1, ty2) { | ||
248 | (Ty::Param { idx: p1, .. }, Ty::Param { idx: p2, .. }) | ||
249 | if p1 != p2 => | ||
250 | { | ||
251 | Some(((*ctor1, *ctor2), i)) | ||
252 | } | ||
253 | _ => None, | ||
254 | } | ||
255 | }) | ||
256 | } | ||
257 | _ => None, | ||
258 | } | ||
259 | }) | ||
260 | .collect() | ||
261 | } | ||
262 | |||
263 | fn resolve_all(mut self) -> InferenceResult { | 220 | fn resolve_all(mut self) -> InferenceResult { |
264 | // FIXME resolve obligations as well (use Guidance if necessary) | 221 | // FIXME resolve obligations as well (use Guidance if necessary) |
265 | let mut result = mem::replace(&mut self.result, InferenceResult::default()); | 222 | let mut result = mem::replace(&mut self.result, InferenceResult::default()); |
@@ -595,1080 +552,6 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
595 | } | 552 | } |
596 | } | 553 | } |
597 | 554 | ||
598 | fn infer_tuple_struct_pat( | ||
599 | &mut self, | ||
600 | path: Option<&Path>, | ||
601 | subpats: &[PatId], | ||
602 | expected: &Ty, | ||
603 | default_bm: BindingMode, | ||
604 | ) -> Ty { | ||
605 | let (ty, def) = self.resolve_variant(path); | ||
606 | |||
607 | self.unify(&ty, expected); | ||
608 | |||
609 | let substs = ty.substs().unwrap_or_else(Substs::empty); | ||
610 | |||
611 | for (i, &subpat) in subpats.iter().enumerate() { | ||
612 | let expected_ty = def | ||
613 | .and_then(|d| d.field(self.db, &Name::new_tuple_field(i))) | ||
614 | .map_or(Ty::Unknown, |field| field.ty(self.db)) | ||
615 | .subst(&substs); | ||
616 | let expected_ty = self.normalize_associated_types_in(expected_ty); | ||
617 | self.infer_pat(subpat, &expected_ty, default_bm); | ||
618 | } | ||
619 | |||
620 | ty | ||
621 | } | ||
622 | |||
623 | fn infer_record_pat( | ||
624 | &mut self, | ||
625 | path: Option<&Path>, | ||
626 | subpats: &[RecordFieldPat], | ||
627 | expected: &Ty, | ||
628 | default_bm: BindingMode, | ||
629 | id: PatId, | ||
630 | ) -> Ty { | ||
631 | let (ty, def) = self.resolve_variant(path); | ||
632 | if let Some(variant) = def { | ||
633 | self.write_variant_resolution(id.into(), variant); | ||
634 | } | ||
635 | |||
636 | self.unify(&ty, expected); | ||
637 | |||
638 | let substs = ty.substs().unwrap_or_else(Substs::empty); | ||
639 | |||
640 | for subpat in subpats { | ||
641 | let matching_field = def.and_then(|it| it.field(self.db, &subpat.name)); | ||
642 | let expected_ty = | ||
643 | matching_field.map_or(Ty::Unknown, |field| field.ty(self.db)).subst(&substs); | ||
644 | let expected_ty = self.normalize_associated_types_in(expected_ty); | ||
645 | self.infer_pat(subpat.pat, &expected_ty, default_bm); | ||
646 | } | ||
647 | |||
648 | ty | ||
649 | } | ||
650 | |||
651 | fn infer_pat(&mut self, pat: PatId, mut expected: &Ty, mut default_bm: BindingMode) -> Ty { | ||
652 | let body = Arc::clone(&self.body); // avoid borrow checker problem | ||
653 | |||
654 | let is_non_ref_pat = match &body[pat] { | ||
655 | Pat::Tuple(..) | ||
656 | | Pat::TupleStruct { .. } | ||
657 | | Pat::Record { .. } | ||
658 | | Pat::Range { .. } | ||
659 | | Pat::Slice { .. } => true, | ||
660 | // FIXME: Path/Lit might actually evaluate to ref, but inference is unimplemented. | ||
661 | Pat::Path(..) | Pat::Lit(..) => true, | ||
662 | Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Missing => false, | ||
663 | }; | ||
664 | if is_non_ref_pat { | ||
665 | while let Some((inner, mutability)) = expected.as_reference() { | ||
666 | expected = inner; | ||
667 | default_bm = match default_bm { | ||
668 | BindingMode::Move => BindingMode::Ref(mutability), | ||
669 | BindingMode::Ref(Mutability::Shared) => BindingMode::Ref(Mutability::Shared), | ||
670 | BindingMode::Ref(Mutability::Mut) => BindingMode::Ref(mutability), | ||
671 | } | ||
672 | } | ||
673 | } else if let Pat::Ref { .. } = &body[pat] { | ||
674 | tested_by!(match_ergonomics_ref); | ||
675 | // When you encounter a `&pat` pattern, reset to Move. | ||
676 | // This is so that `w` is by value: `let (_, &w) = &(1, &2);` | ||
677 | default_bm = BindingMode::Move; | ||
678 | } | ||
679 | |||
680 | // Lose mutability. | ||
681 | let default_bm = default_bm; | ||
682 | let expected = expected; | ||
683 | |||
684 | let ty = match &body[pat] { | ||
685 | Pat::Tuple(ref args) => { | ||
686 | let expectations = match expected.as_tuple() { | ||
687 | Some(parameters) => &*parameters.0, | ||
688 | _ => &[], | ||
689 | }; | ||
690 | let expectations_iter = expectations.iter().chain(repeat(&Ty::Unknown)); | ||
691 | |||
692 | let inner_tys = args | ||
693 | .iter() | ||
694 | .zip(expectations_iter) | ||
695 | .map(|(&pat, ty)| self.infer_pat(pat, ty, default_bm)) | ||
696 | .collect(); | ||
697 | |||
698 | Ty::apply(TypeCtor::Tuple { cardinality: args.len() as u16 }, Substs(inner_tys)) | ||
699 | } | ||
700 | Pat::Ref { pat, mutability } => { | ||
701 | let expectation = match expected.as_reference() { | ||
702 | Some((inner_ty, exp_mut)) => { | ||
703 | if *mutability != exp_mut { | ||
704 | // FIXME: emit type error? | ||
705 | } | ||
706 | inner_ty | ||
707 | } | ||
708 | _ => &Ty::Unknown, | ||
709 | }; | ||
710 | let subty = self.infer_pat(*pat, expectation, default_bm); | ||
711 | Ty::apply_one(TypeCtor::Ref(*mutability), subty) | ||
712 | } | ||
713 | Pat::TupleStruct { path: p, args: subpats } => { | ||
714 | self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm) | ||
715 | } | ||
716 | Pat::Record { path: p, args: fields } => { | ||
717 | self.infer_record_pat(p.as_ref(), fields, expected, default_bm, pat) | ||
718 | } | ||
719 | Pat::Path(path) => { | ||
720 | // FIXME use correct resolver for the surrounding expression | ||
721 | let resolver = self.resolver.clone(); | ||
722 | self.infer_path(&resolver, &path, pat.into()).unwrap_or(Ty::Unknown) | ||
723 | } | ||
724 | Pat::Bind { mode, name: _, subpat } => { | ||
725 | let mode = if mode == &BindingAnnotation::Unannotated { | ||
726 | default_bm | ||
727 | } else { | ||
728 | BindingMode::convert(*mode) | ||
729 | }; | ||
730 | let inner_ty = if let Some(subpat) = subpat { | ||
731 | self.infer_pat(*subpat, expected, default_bm) | ||
732 | } else { | ||
733 | expected.clone() | ||
734 | }; | ||
735 | let inner_ty = self.insert_type_vars_shallow(inner_ty); | ||
736 | |||
737 | let bound_ty = match mode { | ||
738 | BindingMode::Ref(mutability) => { | ||
739 | Ty::apply_one(TypeCtor::Ref(mutability), inner_ty.clone()) | ||
740 | } | ||
741 | BindingMode::Move => inner_ty.clone(), | ||
742 | }; | ||
743 | let bound_ty = self.resolve_ty_as_possible(&mut vec![], bound_ty); | ||
744 | self.write_pat_ty(pat, bound_ty); | ||
745 | return inner_ty; | ||
746 | } | ||
747 | _ => Ty::Unknown, | ||
748 | }; | ||
749 | // use a new type variable if we got Ty::Unknown here | ||
750 | let ty = self.insert_type_vars_shallow(ty); | ||
751 | self.unify(&ty, expected); | ||
752 | let ty = self.resolve_ty_as_possible(&mut vec![], ty); | ||
753 | self.write_pat_ty(pat, ty.clone()); | ||
754 | ty | ||
755 | } | ||
756 | |||
757 | fn substs_for_method_call( | ||
758 | &mut self, | ||
759 | def_generics: Option<Arc<GenericParams>>, | ||
760 | generic_args: Option<&GenericArgs>, | ||
761 | receiver_ty: &Ty, | ||
762 | ) -> Substs { | ||
763 | let (parent_param_count, param_count) = | ||
764 | def_generics.as_ref().map_or((0, 0), |g| (g.count_parent_params(), g.params.len())); | ||
765 | let mut substs = Vec::with_capacity(parent_param_count + param_count); | ||
766 | // Parent arguments are unknown, except for the receiver type | ||
767 | if let Some(parent_generics) = def_generics.and_then(|p| p.parent_params.clone()) { | ||
768 | for param in &parent_generics.params { | ||
769 | if param.name == name::SELF_TYPE { | ||
770 | substs.push(receiver_ty.clone()); | ||
771 | } else { | ||
772 | substs.push(Ty::Unknown); | ||
773 | } | ||
774 | } | ||
775 | } | ||
776 | // handle provided type arguments | ||
777 | if let Some(generic_args) = generic_args { | ||
778 | // if args are provided, it should be all of them, but we can't rely on that | ||
779 | for arg in generic_args.args.iter().take(param_count) { | ||
780 | match arg { | ||
781 | GenericArg::Type(type_ref) => { | ||
782 | let ty = self.make_ty(type_ref); | ||
783 | substs.push(ty); | ||
784 | } | ||
785 | } | ||
786 | } | ||
787 | }; | ||
788 | let supplied_params = substs.len(); | ||
789 | for _ in supplied_params..parent_param_count + param_count { | ||
790 | substs.push(Ty::Unknown); | ||
791 | } | ||
792 | assert_eq!(substs.len(), parent_param_count + param_count); | ||
793 | Substs(substs.into()) | ||
794 | } | ||
795 | |||
796 | fn register_obligations_for_call(&mut self, callable_ty: &Ty) { | ||
797 | if let Ty::Apply(a_ty) = callable_ty { | ||
798 | if let TypeCtor::FnDef(def) = a_ty.ctor { | ||
799 | let generic_predicates = self.db.generic_predicates(def.into()); | ||
800 | for predicate in generic_predicates.iter() { | ||
801 | let predicate = predicate.clone().subst(&a_ty.parameters); | ||
802 | if let Some(obligation) = Obligation::from_predicate(predicate) { | ||
803 | self.obligations.push(obligation); | ||
804 | } | ||
805 | } | ||
806 | // add obligation for trait implementation, if this is a trait method | ||
807 | match def { | ||
808 | CallableDef::Function(f) => { | ||
809 | if let Some(trait_) = f.parent_trait(self.db) { | ||
810 | // construct a TraitDef | ||
811 | let substs = a_ty.parameters.prefix( | ||
812 | trait_.generic_params(self.db).count_params_including_parent(), | ||
813 | ); | ||
814 | self.obligations.push(Obligation::Trait(TraitRef { trait_, substs })); | ||
815 | } | ||
816 | } | ||
817 | CallableDef::Struct(_) | CallableDef::EnumVariant(_) => {} | ||
818 | } | ||
819 | } | ||
820 | } | ||
821 | } | ||
822 | |||
823 | fn infer_method_call( | ||
824 | &mut self, | ||
825 | tgt_expr: ExprId, | ||
826 | receiver: ExprId, | ||
827 | args: &[ExprId], | ||
828 | method_name: &Name, | ||
829 | generic_args: Option<&GenericArgs>, | ||
830 | ) -> Ty { | ||
831 | let receiver_ty = self.infer_expr(receiver, &Expectation::none()); | ||
832 | let canonicalized_receiver = self.canonicalizer().canonicalize_ty(receiver_ty.clone()); | ||
833 | let resolved = method_resolution::lookup_method( | ||
834 | &canonicalized_receiver.value, | ||
835 | self.db, | ||
836 | method_name, | ||
837 | &self.resolver, | ||
838 | ); | ||
839 | let (derefed_receiver_ty, method_ty, def_generics) = match resolved { | ||
840 | Some((ty, func)) => { | ||
841 | let ty = canonicalized_receiver.decanonicalize_ty(ty); | ||
842 | self.write_method_resolution(tgt_expr, func); | ||
843 | ( | ||
844 | ty, | ||
845 | self.db.type_for_def(func.into(), Namespace::Values), | ||
846 | Some(func.generic_params(self.db)), | ||
847 | ) | ||
848 | } | ||
849 | None => (receiver_ty, Ty::Unknown, None), | ||
850 | }; | ||
851 | let substs = self.substs_for_method_call(def_generics, generic_args, &derefed_receiver_ty); | ||
852 | let method_ty = method_ty.apply_substs(substs); | ||
853 | let method_ty = self.insert_type_vars(method_ty); | ||
854 | self.register_obligations_for_call(&method_ty); | ||
855 | let (expected_receiver_ty, param_tys, ret_ty) = match method_ty.callable_sig(self.db) { | ||
856 | Some(sig) => { | ||
857 | if !sig.params().is_empty() { | ||
858 | (sig.params()[0].clone(), sig.params()[1..].to_vec(), sig.ret().clone()) | ||
859 | } else { | ||
860 | (Ty::Unknown, Vec::new(), sig.ret().clone()) | ||
861 | } | ||
862 | } | ||
863 | None => (Ty::Unknown, Vec::new(), Ty::Unknown), | ||
864 | }; | ||
865 | // Apply autoref so the below unification works correctly | ||
866 | // FIXME: return correct autorefs from lookup_method | ||
867 | let actual_receiver_ty = match expected_receiver_ty.as_reference() { | ||
868 | Some((_, mutability)) => Ty::apply_one(TypeCtor::Ref(mutability), derefed_receiver_ty), | ||
869 | _ => derefed_receiver_ty, | ||
870 | }; | ||
871 | self.unify(&expected_receiver_ty, &actual_receiver_ty); | ||
872 | |||
873 | self.check_call_arguments(args, ¶m_tys); | ||
874 | let ret_ty = self.normalize_associated_types_in(ret_ty); | ||
875 | ret_ty | ||
876 | } | ||
877 | |||
878 | /// Infer type of expression with possibly implicit coerce to the expected type. | ||
879 | /// Return the type after possible coercion. | ||
880 | fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty { | ||
881 | let ty = self.infer_expr_inner(expr, &expected); | ||
882 | let ty = if !self.coerce(&ty, &expected.ty) { | ||
883 | self.result | ||
884 | .type_mismatches | ||
885 | .insert(expr, TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() }); | ||
886 | // Return actual type when type mismatch. | ||
887 | // This is needed for diagnostic when return type mismatch. | ||
888 | ty | ||
889 | } else if expected.ty == Ty::Unknown { | ||
890 | ty | ||
891 | } else { | ||
892 | expected.ty.clone() | ||
893 | }; | ||
894 | |||
895 | self.resolve_ty_as_possible(&mut vec![], ty) | ||
896 | } | ||
897 | |||
898 | /// Merge two types from different branches, with possible implicit coerce. | ||
899 | /// | ||
900 | /// Note that it is only possible that one type are coerced to another. | ||
901 | /// Coercing both types to another least upper bound type is not possible in rustc, | ||
902 | /// which will simply result in "incompatible types" error. | ||
903 | fn coerce_merge_branch<'t>(&mut self, ty1: &Ty, ty2: &Ty) -> Ty { | ||
904 | if self.coerce(ty1, ty2) { | ||
905 | ty2.clone() | ||
906 | } else if self.coerce(ty2, ty1) { | ||
907 | ty1.clone() | ||
908 | } else { | ||
909 | tested_by!(coerce_merge_fail_fallback); | ||
910 | // For incompatible types, we use the latter one as result | ||
911 | // to be better recovery for `if` without `else`. | ||
912 | ty2.clone() | ||
913 | } | ||
914 | } | ||
915 | |||
916 | /// Unify two types, but may coerce the first one to the second one | ||
917 | /// using "implicit coercion rules" if needed. | ||
918 | /// | ||
919 | /// See: https://doc.rust-lang.org/nomicon/coercions.html | ||
920 | fn coerce(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool { | ||
921 | let from_ty = self.resolve_ty_shallow(from_ty).into_owned(); | ||
922 | let to_ty = self.resolve_ty_shallow(to_ty); | ||
923 | self.coerce_inner(from_ty, &to_ty) | ||
924 | } | ||
925 | |||
926 | fn coerce_inner(&mut self, mut from_ty: Ty, to_ty: &Ty) -> bool { | ||
927 | match (&from_ty, to_ty) { | ||
928 | // Never type will make type variable to fallback to Never Type instead of Unknown. | ||
929 | (ty_app!(TypeCtor::Never), Ty::Infer(InferTy::TypeVar(tv))) => { | ||
930 | let var = self.new_maybe_never_type_var(); | ||
931 | self.var_unification_table.union_value(*tv, TypeVarValue::Known(var)); | ||
932 | return true; | ||
933 | } | ||
934 | (ty_app!(TypeCtor::Never), _) => return true, | ||
935 | |||
936 | // Trivial cases, this should go after `never` check to | ||
937 | // avoid infer result type to be never | ||
938 | _ => { | ||
939 | if self.unify_inner_trivial(&from_ty, &to_ty) { | ||
940 | return true; | ||
941 | } | ||
942 | } | ||
943 | } | ||
944 | |||
945 | // Pointer weakening and function to pointer | ||
946 | match (&mut from_ty, to_ty) { | ||
947 | // `*mut T`, `&mut T, `&T`` -> `*const T` | ||
948 | // `&mut T` -> `&T` | ||
949 | // `&mut T` -> `*mut T` | ||
950 | (ty_app!(c1@TypeCtor::RawPtr(_)), ty_app!(c2@TypeCtor::RawPtr(Mutability::Shared))) | ||
951 | | (ty_app!(c1@TypeCtor::Ref(_)), ty_app!(c2@TypeCtor::RawPtr(Mutability::Shared))) | ||
952 | | (ty_app!(c1@TypeCtor::Ref(_)), ty_app!(c2@TypeCtor::Ref(Mutability::Shared))) | ||
953 | | (ty_app!(c1@TypeCtor::Ref(Mutability::Mut)), ty_app!(c2@TypeCtor::RawPtr(_))) => { | ||
954 | *c1 = *c2; | ||
955 | } | ||
956 | |||
957 | // Illegal mutablity conversion | ||
958 | ( | ||
959 | ty_app!(TypeCtor::RawPtr(Mutability::Shared)), | ||
960 | ty_app!(TypeCtor::RawPtr(Mutability::Mut)), | ||
961 | ) | ||
962 | | ( | ||
963 | ty_app!(TypeCtor::Ref(Mutability::Shared)), | ||
964 | ty_app!(TypeCtor::Ref(Mutability::Mut)), | ||
965 | ) => return false, | ||
966 | |||
967 | // `{function_type}` -> `fn()` | ||
968 | (ty_app!(TypeCtor::FnDef(_)), ty_app!(TypeCtor::FnPtr { .. })) => { | ||
969 | match from_ty.callable_sig(self.db) { | ||
970 | None => return false, | ||
971 | Some(sig) => { | ||
972 | let num_args = sig.params_and_return.len() as u16 - 1; | ||
973 | from_ty = | ||
974 | Ty::apply(TypeCtor::FnPtr { num_args }, Substs(sig.params_and_return)); | ||
975 | } | ||
976 | } | ||
977 | } | ||
978 | |||
979 | _ => {} | ||
980 | } | ||
981 | |||
982 | if let Some(ret) = self.try_coerce_unsized(&from_ty, &to_ty) { | ||
983 | return ret; | ||
984 | } | ||
985 | |||
986 | // Auto Deref if cannot coerce | ||
987 | match (&from_ty, to_ty) { | ||
988 | // FIXME: DerefMut | ||
989 | (ty_app!(TypeCtor::Ref(_), st1), ty_app!(TypeCtor::Ref(_), st2)) => { | ||
990 | self.unify_autoderef_behind_ref(&st1[0], &st2[0]) | ||
991 | } | ||
992 | |||
993 | // Otherwise, normal unify | ||
994 | _ => self.unify(&from_ty, to_ty), | ||
995 | } | ||
996 | } | ||
997 | |||
998 | /// Coerce a type using `from_ty: CoerceUnsized<ty_ty>` | ||
999 | /// | ||
1000 | /// See: https://doc.rust-lang.org/nightly/std/marker/trait.CoerceUnsized.html | ||
1001 | fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> Option<bool> { | ||
1002 | let (ctor1, st1, ctor2, st2) = match (from_ty, to_ty) { | ||
1003 | (ty_app!(ctor1, st1), ty_app!(ctor2, st2)) => (ctor1, st1, ctor2, st2), | ||
1004 | _ => return None, | ||
1005 | }; | ||
1006 | |||
1007 | let coerce_generic_index = *self.coerce_unsized_map.get(&(*ctor1, *ctor2))?; | ||
1008 | |||
1009 | // Check `Unsize` first | ||
1010 | match self.check_unsize_and_coerce( | ||
1011 | st1.0.get(coerce_generic_index)?, | ||
1012 | st2.0.get(coerce_generic_index)?, | ||
1013 | 0, | ||
1014 | ) { | ||
1015 | Some(true) => {} | ||
1016 | ret => return ret, | ||
1017 | } | ||
1018 | |||
1019 | let ret = st1 | ||
1020 | .iter() | ||
1021 | .zip(st2.iter()) | ||
1022 | .enumerate() | ||
1023 | .filter(|&(idx, _)| idx != coerce_generic_index) | ||
1024 | .all(|(_, (ty1, ty2))| self.unify(ty1, ty2)); | ||
1025 | |||
1026 | Some(ret) | ||
1027 | } | ||
1028 | |||
1029 | /// Check if `from_ty: Unsize<to_ty>`, and coerce to `to_ty` if it holds. | ||
1030 | /// | ||
1031 | /// It should not be directly called. It is only used by `try_coerce_unsized`. | ||
1032 | /// | ||
1033 | /// See: https://doc.rust-lang.org/nightly/std/marker/trait.Unsize.html | ||
1034 | fn check_unsize_and_coerce(&mut self, from_ty: &Ty, to_ty: &Ty, depth: usize) -> Option<bool> { | ||
1035 | if depth > 1000 { | ||
1036 | panic!("Infinite recursion in coercion"); | ||
1037 | } | ||
1038 | |||
1039 | match (&from_ty, &to_ty) { | ||
1040 | // `[T; N]` -> `[T]` | ||
1041 | (ty_app!(TypeCtor::Array, st1), ty_app!(TypeCtor::Slice, st2)) => { | ||
1042 | Some(self.unify(&st1[0], &st2[0])) | ||
1043 | } | ||
1044 | |||
1045 | // `T` -> `dyn Trait` when `T: Trait` | ||
1046 | (_, Ty::Dyn(_)) => { | ||
1047 | // FIXME: Check predicates | ||
1048 | Some(true) | ||
1049 | } | ||
1050 | |||
1051 | // `(..., T)` -> `(..., U)` when `T: Unsize<U>` | ||
1052 | ( | ||
1053 | ty_app!(TypeCtor::Tuple { cardinality: len1 }, st1), | ||
1054 | ty_app!(TypeCtor::Tuple { cardinality: len2 }, st2), | ||
1055 | ) => { | ||
1056 | if len1 != len2 || *len1 == 0 { | ||
1057 | return None; | ||
1058 | } | ||
1059 | |||
1060 | match self.check_unsize_and_coerce( | ||
1061 | st1.last().unwrap(), | ||
1062 | st2.last().unwrap(), | ||
1063 | depth + 1, | ||
1064 | ) { | ||
1065 | Some(true) => {} | ||
1066 | ret => return ret, | ||
1067 | } | ||
1068 | |||
1069 | let ret = st1[..st1.len() - 1] | ||
1070 | .iter() | ||
1071 | .zip(&st2[..st2.len() - 1]) | ||
1072 | .all(|(ty1, ty2)| self.unify(ty1, ty2)); | ||
1073 | |||
1074 | Some(ret) | ||
1075 | } | ||
1076 | |||
1077 | // Foo<..., T, ...> is Unsize<Foo<..., U, ...>> if: | ||
1078 | // - T: Unsize<U> | ||
1079 | // - Foo is a struct | ||
1080 | // - Only the last field of Foo has a type involving T | ||
1081 | // - T is not part of the type of any other fields | ||
1082 | // - Bar<T>: Unsize<Bar<U>>, if the last field of Foo has type Bar<T> | ||
1083 | ( | ||
1084 | ty_app!(TypeCtor::Adt(Adt::Struct(struct1)), st1), | ||
1085 | ty_app!(TypeCtor::Adt(Adt::Struct(struct2)), st2), | ||
1086 | ) if struct1 == struct2 => { | ||
1087 | let fields = struct1.fields(self.db); | ||
1088 | let (last_field, prev_fields) = fields.split_last()?; | ||
1089 | |||
1090 | // Get the generic parameter involved in the last field. | ||
1091 | let unsize_generic_index = { | ||
1092 | let mut index = None; | ||
1093 | let mut multiple_param = false; | ||
1094 | last_field.ty(self.db).walk(&mut |ty| match ty { | ||
1095 | &Ty::Param { idx, .. } => { | ||
1096 | if index.is_none() { | ||
1097 | index = Some(idx); | ||
1098 | } else if Some(idx) != index { | ||
1099 | multiple_param = true; | ||
1100 | } | ||
1101 | } | ||
1102 | _ => {} | ||
1103 | }); | ||
1104 | |||
1105 | if multiple_param { | ||
1106 | return None; | ||
1107 | } | ||
1108 | index? | ||
1109 | }; | ||
1110 | |||
1111 | // Check other fields do not involve it. | ||
1112 | let mut multiple_used = false; | ||
1113 | prev_fields.iter().for_each(|field| { | ||
1114 | field.ty(self.db).walk(&mut |ty| match ty { | ||
1115 | &Ty::Param { idx, .. } if idx == unsize_generic_index => { | ||
1116 | multiple_used = true | ||
1117 | } | ||
1118 | _ => {} | ||
1119 | }) | ||
1120 | }); | ||
1121 | if multiple_used { | ||
1122 | return None; | ||
1123 | } | ||
1124 | |||
1125 | let unsize_generic_index = unsize_generic_index as usize; | ||
1126 | |||
1127 | // Check `Unsize` first | ||
1128 | match self.check_unsize_and_coerce( | ||
1129 | st1.get(unsize_generic_index)?, | ||
1130 | st2.get(unsize_generic_index)?, | ||
1131 | depth + 1, | ||
1132 | ) { | ||
1133 | Some(true) => {} | ||
1134 | ret => return ret, | ||
1135 | } | ||
1136 | |||
1137 | // Then unify other parameters | ||
1138 | let ret = st1 | ||
1139 | .iter() | ||
1140 | .zip(st2.iter()) | ||
1141 | .enumerate() | ||
1142 | .filter(|&(idx, _)| idx != unsize_generic_index) | ||
1143 | .all(|(_, (ty1, ty2))| self.unify(ty1, ty2)); | ||
1144 | |||
1145 | Some(ret) | ||
1146 | } | ||
1147 | |||
1148 | _ => None, | ||
1149 | } | ||
1150 | } | ||
1151 | |||
1152 | /// Unify `from_ty` to `to_ty` with optional auto Deref | ||
1153 | /// | ||
1154 | /// Note that the parameters are already stripped the outer reference. | ||
1155 | fn unify_autoderef_behind_ref(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool { | ||
1156 | let canonicalized = self.canonicalizer().canonicalize_ty(from_ty.clone()); | ||
1157 | let to_ty = self.resolve_ty_shallow(&to_ty); | ||
1158 | // FIXME: Auto DerefMut | ||
1159 | for derefed_ty in | ||
1160 | autoderef::autoderef(self.db, &self.resolver.clone(), canonicalized.value.clone()) | ||
1161 | { | ||
1162 | let derefed_ty = canonicalized.decanonicalize_ty(derefed_ty.value); | ||
1163 | match (&*self.resolve_ty_shallow(&derefed_ty), &*to_ty) { | ||
1164 | // Stop when constructor matches. | ||
1165 | (ty_app!(from_ctor, st1), ty_app!(to_ctor, st2)) if from_ctor == to_ctor => { | ||
1166 | // It will not recurse to `coerce`. | ||
1167 | return self.unify_substs(st1, st2, 0); | ||
1168 | } | ||
1169 | _ => {} | ||
1170 | } | ||
1171 | } | ||
1172 | |||
1173 | false | ||
1174 | } | ||
1175 | |||
1176 | fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { | ||
1177 | let ty = self.infer_expr_inner(tgt_expr, expected); | ||
1178 | let could_unify = self.unify(&ty, &expected.ty); | ||
1179 | if !could_unify { | ||
1180 | self.result.type_mismatches.insert( | ||
1181 | tgt_expr, | ||
1182 | TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() }, | ||
1183 | ); | ||
1184 | } | ||
1185 | let ty = self.resolve_ty_as_possible(&mut vec![], ty); | ||
1186 | ty | ||
1187 | } | ||
1188 | |||
1189 | fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { | ||
1190 | let body = Arc::clone(&self.body); // avoid borrow checker problem | ||
1191 | let ty = match &body[tgt_expr] { | ||
1192 | Expr::Missing => Ty::Unknown, | ||
1193 | Expr::If { condition, then_branch, else_branch } => { | ||
1194 | // if let is desugared to match, so this is always simple if | ||
1195 | self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool))); | ||
1196 | |||
1197 | let then_ty = self.infer_expr_inner(*then_branch, &expected); | ||
1198 | let else_ty = match else_branch { | ||
1199 | Some(else_branch) => self.infer_expr_inner(*else_branch, &expected), | ||
1200 | None => Ty::unit(), | ||
1201 | }; | ||
1202 | |||
1203 | self.coerce_merge_branch(&then_ty, &else_ty) | ||
1204 | } | ||
1205 | Expr::Block { statements, tail } => self.infer_block(statements, *tail, expected), | ||
1206 | Expr::TryBlock { body } => { | ||
1207 | let _inner = self.infer_expr(*body, expected); | ||
1208 | // FIXME should be std::result::Result<{inner}, _> | ||
1209 | Ty::Unknown | ||
1210 | } | ||
1211 | Expr::Loop { body } => { | ||
1212 | self.infer_expr(*body, &Expectation::has_type(Ty::unit())); | ||
1213 | // FIXME handle break with value | ||
1214 | Ty::simple(TypeCtor::Never) | ||
1215 | } | ||
1216 | Expr::While { condition, body } => { | ||
1217 | // while let is desugared to a match loop, so this is always simple while | ||
1218 | self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool))); | ||
1219 | self.infer_expr(*body, &Expectation::has_type(Ty::unit())); | ||
1220 | Ty::unit() | ||
1221 | } | ||
1222 | Expr::For { iterable, body, pat } => { | ||
1223 | let iterable_ty = self.infer_expr(*iterable, &Expectation::none()); | ||
1224 | |||
1225 | let pat_ty = match self.resolve_into_iter_item() { | ||
1226 | Some(into_iter_item_alias) => { | ||
1227 | let pat_ty = self.new_type_var(); | ||
1228 | let projection = ProjectionPredicate { | ||
1229 | ty: pat_ty.clone(), | ||
1230 | projection_ty: ProjectionTy { | ||
1231 | associated_ty: into_iter_item_alias, | ||
1232 | parameters: Substs::single(iterable_ty), | ||
1233 | }, | ||
1234 | }; | ||
1235 | self.obligations.push(Obligation::Projection(projection)); | ||
1236 | self.resolve_ty_as_possible(&mut vec![], pat_ty) | ||
1237 | } | ||
1238 | None => Ty::Unknown, | ||
1239 | }; | ||
1240 | |||
1241 | self.infer_pat(*pat, &pat_ty, BindingMode::default()); | ||
1242 | self.infer_expr(*body, &Expectation::has_type(Ty::unit())); | ||
1243 | Ty::unit() | ||
1244 | } | ||
1245 | Expr::Lambda { body, args, arg_types } => { | ||
1246 | assert_eq!(args.len(), arg_types.len()); | ||
1247 | |||
1248 | let mut sig_tys = Vec::new(); | ||
1249 | |||
1250 | for (arg_pat, arg_type) in args.iter().zip(arg_types.iter()) { | ||
1251 | let expected = if let Some(type_ref) = arg_type { | ||
1252 | self.make_ty(type_ref) | ||
1253 | } else { | ||
1254 | Ty::Unknown | ||
1255 | }; | ||
1256 | let arg_ty = self.infer_pat(*arg_pat, &expected, BindingMode::default()); | ||
1257 | sig_tys.push(arg_ty); | ||
1258 | } | ||
1259 | |||
1260 | // add return type | ||
1261 | let ret_ty = self.new_type_var(); | ||
1262 | sig_tys.push(ret_ty.clone()); | ||
1263 | let sig_ty = Ty::apply( | ||
1264 | TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1 }, | ||
1265 | Substs(sig_tys.into()), | ||
1266 | ); | ||
1267 | let closure_ty = Ty::apply_one( | ||
1268 | TypeCtor::Closure { def: self.body.owner(), expr: tgt_expr }, | ||
1269 | sig_ty, | ||
1270 | ); | ||
1271 | |||
1272 | // Eagerly try to relate the closure type with the expected | ||
1273 | // type, otherwise we often won't have enough information to | ||
1274 | // infer the body. | ||
1275 | self.coerce(&closure_ty, &expected.ty); | ||
1276 | |||
1277 | self.infer_expr(*body, &Expectation::has_type(ret_ty)); | ||
1278 | closure_ty | ||
1279 | } | ||
1280 | Expr::Call { callee, args } => { | ||
1281 | let callee_ty = self.infer_expr(*callee, &Expectation::none()); | ||
1282 | let (param_tys, ret_ty) = match callee_ty.callable_sig(self.db) { | ||
1283 | Some(sig) => (sig.params().to_vec(), sig.ret().clone()), | ||
1284 | None => { | ||
1285 | // Not callable | ||
1286 | // FIXME: report an error | ||
1287 | (Vec::new(), Ty::Unknown) | ||
1288 | } | ||
1289 | }; | ||
1290 | self.register_obligations_for_call(&callee_ty); | ||
1291 | self.check_call_arguments(args, ¶m_tys); | ||
1292 | let ret_ty = self.normalize_associated_types_in(ret_ty); | ||
1293 | ret_ty | ||
1294 | } | ||
1295 | Expr::MethodCall { receiver, args, method_name, generic_args } => self | ||
1296 | .infer_method_call(tgt_expr, *receiver, &args, &method_name, generic_args.as_ref()), | ||
1297 | Expr::Match { expr, arms } => { | ||
1298 | let input_ty = self.infer_expr(*expr, &Expectation::none()); | ||
1299 | |||
1300 | let mut result_ty = self.new_maybe_never_type_var(); | ||
1301 | |||
1302 | for arm in arms { | ||
1303 | for &pat in &arm.pats { | ||
1304 | let _pat_ty = self.infer_pat(pat, &input_ty, BindingMode::default()); | ||
1305 | } | ||
1306 | if let Some(guard_expr) = arm.guard { | ||
1307 | self.infer_expr( | ||
1308 | guard_expr, | ||
1309 | &Expectation::has_type(Ty::simple(TypeCtor::Bool)), | ||
1310 | ); | ||
1311 | } | ||
1312 | |||
1313 | let arm_ty = self.infer_expr_inner(arm.expr, &expected); | ||
1314 | result_ty = self.coerce_merge_branch(&result_ty, &arm_ty); | ||
1315 | } | ||
1316 | |||
1317 | result_ty | ||
1318 | } | ||
1319 | Expr::Path(p) => { | ||
1320 | // FIXME this could be more efficient... | ||
1321 | let resolver = expr::resolver_for_expr(self.body.clone(), self.db, tgt_expr); | ||
1322 | self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown) | ||
1323 | } | ||
1324 | Expr::Continue => Ty::simple(TypeCtor::Never), | ||
1325 | Expr::Break { expr } => { | ||
1326 | if let Some(expr) = expr { | ||
1327 | // FIXME handle break with value | ||
1328 | self.infer_expr(*expr, &Expectation::none()); | ||
1329 | } | ||
1330 | Ty::simple(TypeCtor::Never) | ||
1331 | } | ||
1332 | Expr::Return { expr } => { | ||
1333 | if let Some(expr) = expr { | ||
1334 | self.infer_expr(*expr, &Expectation::has_type(self.return_ty.clone())); | ||
1335 | } | ||
1336 | Ty::simple(TypeCtor::Never) | ||
1337 | } | ||
1338 | Expr::RecordLit { path, fields, spread } => { | ||
1339 | let (ty, def_id) = self.resolve_variant(path.as_ref()); | ||
1340 | if let Some(variant) = def_id { | ||
1341 | self.write_variant_resolution(tgt_expr.into(), variant); | ||
1342 | } | ||
1343 | |||
1344 | self.unify(&ty, &expected.ty); | ||
1345 | |||
1346 | let substs = ty.substs().unwrap_or_else(Substs::empty); | ||
1347 | for (field_idx, field) in fields.iter().enumerate() { | ||
1348 | let field_ty = def_id | ||
1349 | .and_then(|it| match it.field(self.db, &field.name) { | ||
1350 | Some(field) => Some(field), | ||
1351 | None => { | ||
1352 | self.push_diagnostic(InferenceDiagnostic::NoSuchField { | ||
1353 | expr: tgt_expr, | ||
1354 | field: field_idx, | ||
1355 | }); | ||
1356 | None | ||
1357 | } | ||
1358 | }) | ||
1359 | .map_or(Ty::Unknown, |field| field.ty(self.db)) | ||
1360 | .subst(&substs); | ||
1361 | self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty)); | ||
1362 | } | ||
1363 | if let Some(expr) = spread { | ||
1364 | self.infer_expr(*expr, &Expectation::has_type(ty.clone())); | ||
1365 | } | ||
1366 | ty | ||
1367 | } | ||
1368 | Expr::Field { expr, name } => { | ||
1369 | let receiver_ty = self.infer_expr(*expr, &Expectation::none()); | ||
1370 | let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty); | ||
1371 | let ty = autoderef::autoderef( | ||
1372 | self.db, | ||
1373 | &self.resolver.clone(), | ||
1374 | canonicalized.value.clone(), | ||
1375 | ) | ||
1376 | .find_map(|derefed_ty| match canonicalized.decanonicalize_ty(derefed_ty.value) { | ||
1377 | Ty::Apply(a_ty) => match a_ty.ctor { | ||
1378 | TypeCtor::Tuple { .. } => name | ||
1379 | .as_tuple_index() | ||
1380 | .and_then(|idx| a_ty.parameters.0.get(idx).cloned()), | ||
1381 | TypeCtor::Adt(Adt::Struct(s)) => s.field(self.db, name).map(|field| { | ||
1382 | self.write_field_resolution(tgt_expr, field); | ||
1383 | field.ty(self.db).subst(&a_ty.parameters) | ||
1384 | }), | ||
1385 | _ => None, | ||
1386 | }, | ||
1387 | _ => None, | ||
1388 | }) | ||
1389 | .unwrap_or(Ty::Unknown); | ||
1390 | let ty = self.insert_type_vars(ty); | ||
1391 | self.normalize_associated_types_in(ty) | ||
1392 | } | ||
1393 | Expr::Await { expr } => { | ||
1394 | let inner_ty = self.infer_expr(*expr, &Expectation::none()); | ||
1395 | let ty = match self.resolve_future_future_output() { | ||
1396 | Some(future_future_output_alias) => { | ||
1397 | let ty = self.new_type_var(); | ||
1398 | let projection = ProjectionPredicate { | ||
1399 | ty: ty.clone(), | ||
1400 | projection_ty: ProjectionTy { | ||
1401 | associated_ty: future_future_output_alias, | ||
1402 | parameters: Substs::single(inner_ty), | ||
1403 | }, | ||
1404 | }; | ||
1405 | self.obligations.push(Obligation::Projection(projection)); | ||
1406 | self.resolve_ty_as_possible(&mut vec![], ty) | ||
1407 | } | ||
1408 | None => Ty::Unknown, | ||
1409 | }; | ||
1410 | ty | ||
1411 | } | ||
1412 | Expr::Try { expr } => { | ||
1413 | let inner_ty = self.infer_expr(*expr, &Expectation::none()); | ||
1414 | let ty = match self.resolve_ops_try_ok() { | ||
1415 | Some(ops_try_ok_alias) => { | ||
1416 | let ty = self.new_type_var(); | ||
1417 | let projection = ProjectionPredicate { | ||
1418 | ty: ty.clone(), | ||
1419 | projection_ty: ProjectionTy { | ||
1420 | associated_ty: ops_try_ok_alias, | ||
1421 | parameters: Substs::single(inner_ty), | ||
1422 | }, | ||
1423 | }; | ||
1424 | self.obligations.push(Obligation::Projection(projection)); | ||
1425 | self.resolve_ty_as_possible(&mut vec![], ty) | ||
1426 | } | ||
1427 | None => Ty::Unknown, | ||
1428 | }; | ||
1429 | ty | ||
1430 | } | ||
1431 | Expr::Cast { expr, type_ref } => { | ||
1432 | let _inner_ty = self.infer_expr(*expr, &Expectation::none()); | ||
1433 | let cast_ty = self.make_ty(type_ref); | ||
1434 | // FIXME check the cast... | ||
1435 | cast_ty | ||
1436 | } | ||
1437 | Expr::Ref { expr, mutability } => { | ||
1438 | let expectation = | ||
1439 | if let Some((exp_inner, exp_mutability)) = &expected.ty.as_reference() { | ||
1440 | if *exp_mutability == Mutability::Mut && *mutability == Mutability::Shared { | ||
1441 | // FIXME: throw type error - expected mut reference but found shared ref, | ||
1442 | // which cannot be coerced | ||
1443 | } | ||
1444 | Expectation::has_type(Ty::clone(exp_inner)) | ||
1445 | } else { | ||
1446 | Expectation::none() | ||
1447 | }; | ||
1448 | // FIXME reference coercions etc. | ||
1449 | let inner_ty = self.infer_expr(*expr, &expectation); | ||
1450 | Ty::apply_one(TypeCtor::Ref(*mutability), inner_ty) | ||
1451 | } | ||
1452 | Expr::Box { expr } => { | ||
1453 | let inner_ty = self.infer_expr(*expr, &Expectation::none()); | ||
1454 | if let Some(box_) = self.resolve_boxed_box() { | ||
1455 | Ty::apply_one(TypeCtor::Adt(box_), inner_ty) | ||
1456 | } else { | ||
1457 | Ty::Unknown | ||
1458 | } | ||
1459 | } | ||
1460 | Expr::UnaryOp { expr, op } => { | ||
1461 | let inner_ty = self.infer_expr(*expr, &Expectation::none()); | ||
1462 | match op { | ||
1463 | UnaryOp::Deref => { | ||
1464 | let canonicalized = self.canonicalizer().canonicalize_ty(inner_ty); | ||
1465 | if let Some(derefed_ty) = | ||
1466 | autoderef::deref(self.db, &self.resolver, &canonicalized.value) | ||
1467 | { | ||
1468 | canonicalized.decanonicalize_ty(derefed_ty.value) | ||
1469 | } else { | ||
1470 | Ty::Unknown | ||
1471 | } | ||
1472 | } | ||
1473 | UnaryOp::Neg => { | ||
1474 | match &inner_ty { | ||
1475 | Ty::Apply(a_ty) => match a_ty.ctor { | ||
1476 | TypeCtor::Int(primitive::UncertainIntTy::Unknown) | ||
1477 | | TypeCtor::Int(primitive::UncertainIntTy::Known( | ||
1478 | primitive::IntTy { | ||
1479 | signedness: primitive::Signedness::Signed, | ||
1480 | .. | ||
1481 | }, | ||
1482 | )) | ||
1483 | | TypeCtor::Float(..) => inner_ty, | ||
1484 | _ => Ty::Unknown, | ||
1485 | }, | ||
1486 | Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => { | ||
1487 | inner_ty | ||
1488 | } | ||
1489 | // FIXME: resolve ops::Neg trait | ||
1490 | _ => Ty::Unknown, | ||
1491 | } | ||
1492 | } | ||
1493 | UnaryOp::Not => { | ||
1494 | match &inner_ty { | ||
1495 | Ty::Apply(a_ty) => match a_ty.ctor { | ||
1496 | TypeCtor::Bool | TypeCtor::Int(_) => inner_ty, | ||
1497 | _ => Ty::Unknown, | ||
1498 | }, | ||
1499 | Ty::Infer(InferTy::IntVar(..)) => inner_ty, | ||
1500 | // FIXME: resolve ops::Not trait for inner_ty | ||
1501 | _ => Ty::Unknown, | ||
1502 | } | ||
1503 | } | ||
1504 | } | ||
1505 | } | ||
1506 | Expr::BinaryOp { lhs, rhs, op } => match op { | ||
1507 | Some(op) => { | ||
1508 | let lhs_expectation = match op { | ||
1509 | BinaryOp::LogicOp(..) => Expectation::has_type(Ty::simple(TypeCtor::Bool)), | ||
1510 | _ => Expectation::none(), | ||
1511 | }; | ||
1512 | let lhs_ty = self.infer_expr(*lhs, &lhs_expectation); | ||
1513 | // FIXME: find implementation of trait corresponding to operation | ||
1514 | // symbol and resolve associated `Output` type | ||
1515 | let rhs_expectation = op::binary_op_rhs_expectation(*op, lhs_ty); | ||
1516 | let rhs_ty = self.infer_expr(*rhs, &Expectation::has_type(rhs_expectation)); | ||
1517 | |||
1518 | // FIXME: similar as above, return ty is often associated trait type | ||
1519 | op::binary_op_return_ty(*op, rhs_ty) | ||
1520 | } | ||
1521 | _ => Ty::Unknown, | ||
1522 | }, | ||
1523 | Expr::Index { base, index } => { | ||
1524 | let _base_ty = self.infer_expr(*base, &Expectation::none()); | ||
1525 | let _index_ty = self.infer_expr(*index, &Expectation::none()); | ||
1526 | // FIXME: use `std::ops::Index::Output` to figure out the real return type | ||
1527 | Ty::Unknown | ||
1528 | } | ||
1529 | Expr::Tuple { exprs } => { | ||
1530 | let mut tys = match &expected.ty { | ||
1531 | ty_app!(TypeCtor::Tuple { .. }, st) => st | ||
1532 | .iter() | ||
1533 | .cloned() | ||
1534 | .chain(repeat_with(|| self.new_type_var())) | ||
1535 | .take(exprs.len()) | ||
1536 | .collect::<Vec<_>>(), | ||
1537 | _ => (0..exprs.len()).map(|_| self.new_type_var()).collect(), | ||
1538 | }; | ||
1539 | |||
1540 | for (expr, ty) in exprs.iter().zip(tys.iter_mut()) { | ||
1541 | self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone())); | ||
1542 | } | ||
1543 | |||
1544 | Ty::apply(TypeCtor::Tuple { cardinality: tys.len() as u16 }, Substs(tys.into())) | ||
1545 | } | ||
1546 | Expr::Array(array) => { | ||
1547 | let elem_ty = match &expected.ty { | ||
1548 | ty_app!(TypeCtor::Array, st) | ty_app!(TypeCtor::Slice, st) => { | ||
1549 | st.as_single().clone() | ||
1550 | } | ||
1551 | _ => self.new_type_var(), | ||
1552 | }; | ||
1553 | |||
1554 | match array { | ||
1555 | Array::ElementList(items) => { | ||
1556 | for expr in items.iter() { | ||
1557 | self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone())); | ||
1558 | } | ||
1559 | } | ||
1560 | Array::Repeat { initializer, repeat } => { | ||
1561 | self.infer_expr_coerce( | ||
1562 | *initializer, | ||
1563 | &Expectation::has_type(elem_ty.clone()), | ||
1564 | ); | ||
1565 | self.infer_expr( | ||
1566 | *repeat, | ||
1567 | &Expectation::has_type(Ty::simple(TypeCtor::Int( | ||
1568 | primitive::UncertainIntTy::Known(primitive::IntTy::usize()), | ||
1569 | ))), | ||
1570 | ); | ||
1571 | } | ||
1572 | } | ||
1573 | |||
1574 | Ty::apply_one(TypeCtor::Array, elem_ty) | ||
1575 | } | ||
1576 | Expr::Literal(lit) => match lit { | ||
1577 | Literal::Bool(..) => Ty::simple(TypeCtor::Bool), | ||
1578 | Literal::String(..) => { | ||
1579 | Ty::apply_one(TypeCtor::Ref(Mutability::Shared), Ty::simple(TypeCtor::Str)) | ||
1580 | } | ||
1581 | Literal::ByteString(..) => { | ||
1582 | let byte_type = Ty::simple(TypeCtor::Int(primitive::UncertainIntTy::Known( | ||
1583 | primitive::IntTy::u8(), | ||
1584 | ))); | ||
1585 | let slice_type = Ty::apply_one(TypeCtor::Slice, byte_type); | ||
1586 | Ty::apply_one(TypeCtor::Ref(Mutability::Shared), slice_type) | ||
1587 | } | ||
1588 | Literal::Char(..) => Ty::simple(TypeCtor::Char), | ||
1589 | Literal::Int(_v, ty) => Ty::simple(TypeCtor::Int(*ty)), | ||
1590 | Literal::Float(_v, ty) => Ty::simple(TypeCtor::Float(*ty)), | ||
1591 | }, | ||
1592 | }; | ||
1593 | // use a new type variable if we got Ty::Unknown here | ||
1594 | let ty = self.insert_type_vars_shallow(ty); | ||
1595 | let ty = self.resolve_ty_as_possible(&mut vec![], ty); | ||
1596 | self.write_expr_ty(tgt_expr, ty.clone()); | ||
1597 | ty | ||
1598 | } | ||
1599 | |||
1600 | fn infer_block( | ||
1601 | &mut self, | ||
1602 | statements: &[Statement], | ||
1603 | tail: Option<ExprId>, | ||
1604 | expected: &Expectation, | ||
1605 | ) -> Ty { | ||
1606 | let mut diverges = false; | ||
1607 | for stmt in statements { | ||
1608 | match stmt { | ||
1609 | Statement::Let { pat, type_ref, initializer } => { | ||
1610 | let decl_ty = | ||
1611 | type_ref.as_ref().map(|tr| self.make_ty(tr)).unwrap_or(Ty::Unknown); | ||
1612 | |||
1613 | // Always use the declared type when specified | ||
1614 | let mut ty = decl_ty.clone(); | ||
1615 | |||
1616 | if let Some(expr) = initializer { | ||
1617 | let actual_ty = | ||
1618 | self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone())); | ||
1619 | if decl_ty == Ty::Unknown { | ||
1620 | ty = actual_ty; | ||
1621 | } | ||
1622 | } | ||
1623 | |||
1624 | let ty = self.resolve_ty_as_possible(&mut vec![], ty); | ||
1625 | self.infer_pat(*pat, &ty, BindingMode::default()); | ||
1626 | } | ||
1627 | Statement::Expr(expr) => { | ||
1628 | if let ty_app!(TypeCtor::Never) = self.infer_expr(*expr, &Expectation::none()) { | ||
1629 | diverges = true; | ||
1630 | } | ||
1631 | } | ||
1632 | } | ||
1633 | } | ||
1634 | |||
1635 | let ty = if let Some(expr) = tail { | ||
1636 | self.infer_expr_coerce(expr, expected) | ||
1637 | } else { | ||
1638 | self.coerce(&Ty::unit(), &expected.ty); | ||
1639 | Ty::unit() | ||
1640 | }; | ||
1641 | if diverges { | ||
1642 | Ty::simple(TypeCtor::Never) | ||
1643 | } else { | ||
1644 | ty | ||
1645 | } | ||
1646 | } | ||
1647 | |||
1648 | fn check_call_arguments(&mut self, args: &[ExprId], param_tys: &[Ty]) { | ||
1649 | // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 -- | ||
1650 | // We do this in a pretty awful way: first we type-check any arguments | ||
1651 | // that are not closures, then we type-check the closures. This is so | ||
1652 | // that we have more information about the types of arguments when we | ||
1653 | // type-check the functions. This isn't really the right way to do this. | ||
1654 | for &check_closures in &[false, true] { | ||
1655 | let param_iter = param_tys.iter().cloned().chain(repeat(Ty::Unknown)); | ||
1656 | for (&arg, param_ty) in args.iter().zip(param_iter) { | ||
1657 | let is_closure = match &self.body[arg] { | ||
1658 | Expr::Lambda { .. } => true, | ||
1659 | _ => false, | ||
1660 | }; | ||
1661 | |||
1662 | if is_closure != check_closures { | ||
1663 | continue; | ||
1664 | } | ||
1665 | |||
1666 | let param_ty = self.normalize_associated_types_in(param_ty); | ||
1667 | self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone())); | ||
1668 | } | ||
1669 | } | ||
1670 | } | ||
1671 | |||
1672 | fn collect_const(&mut self, data: &ConstData) { | 555 | fn collect_const(&mut self, data: &ConstData) { |
1673 | self.return_ty = self.make_ty(data.type_ref()); | 556 | self.return_ty = self.make_ty(data.type_ref()); |
1674 | } | 557 | } |
diff --git a/crates/ra_hir/src/ty/infer/coerce.rs b/crates/ra_hir/src/ty/infer/coerce.rs new file mode 100644 index 000000000..0429a9866 --- /dev/null +++ b/crates/ra_hir/src/ty/infer/coerce.rs | |||
@@ -0,0 +1,336 @@ | |||
1 | //! Coercion logic. Coercions are certain type conversions that can implicitly | ||
2 | //! happen in certain places, e.g. weakening `&mut` to `&` or deref coercions | ||
3 | //! like going from `&Vec<T>` to `&[T]`. | ||
4 | //! | ||
5 | //! See: https://doc.rust-lang.org/nomicon/coercions.html | ||
6 | |||
7 | use rustc_hash::FxHashMap; | ||
8 | |||
9 | use test_utils::tested_by; | ||
10 | |||
11 | use super::{InferTy, InferenceContext, TypeVarValue}; | ||
12 | use crate::{ | ||
13 | db::HirDatabase, | ||
14 | lang_item::LangItemTarget, | ||
15 | resolve::Resolver, | ||
16 | ty::{autoderef, Substs, Ty, TypeCtor, TypeWalk}, | ||
17 | type_ref::Mutability, | ||
18 | Adt, | ||
19 | }; | ||
20 | |||
21 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { | ||
22 | /// Unify two types, but may coerce the first one to the second one | ||
23 | /// using "implicit coercion rules" if needed. | ||
24 | pub(super) fn coerce(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool { | ||
25 | let from_ty = self.resolve_ty_shallow(from_ty).into_owned(); | ||
26 | let to_ty = self.resolve_ty_shallow(to_ty); | ||
27 | self.coerce_inner(from_ty, &to_ty) | ||
28 | } | ||
29 | |||
30 | /// Merge two types from different branches, with possible implicit coerce. | ||
31 | /// | ||
32 | /// Note that it is only possible that one type are coerced to another. | ||
33 | /// Coercing both types to another least upper bound type is not possible in rustc, | ||
34 | /// which will simply result in "incompatible types" error. | ||
35 | pub(super) fn coerce_merge_branch<'t>(&mut self, ty1: &Ty, ty2: &Ty) -> Ty { | ||
36 | if self.coerce(ty1, ty2) { | ||
37 | ty2.clone() | ||
38 | } else if self.coerce(ty2, ty1) { | ||
39 | ty1.clone() | ||
40 | } else { | ||
41 | tested_by!(coerce_merge_fail_fallback); | ||
42 | // For incompatible types, we use the latter one as result | ||
43 | // to be better recovery for `if` without `else`. | ||
44 | ty2.clone() | ||
45 | } | ||
46 | } | ||
47 | |||
48 | pub(super) fn init_coerce_unsized_map( | ||
49 | db: &'a D, | ||
50 | resolver: &Resolver, | ||
51 | ) -> FxHashMap<(TypeCtor, TypeCtor), usize> { | ||
52 | let krate = resolver.krate().unwrap(); | ||
53 | let impls = match db.lang_item(krate, "coerce_unsized".into()) { | ||
54 | Some(LangItemTarget::Trait(trait_)) => db.impls_for_trait(krate, trait_), | ||
55 | _ => return FxHashMap::default(), | ||
56 | }; | ||
57 | |||
58 | impls | ||
59 | .iter() | ||
60 | .filter_map(|impl_block| { | ||
61 | // `CoerseUnsized` has one generic parameter for the target type. | ||
62 | let trait_ref = impl_block.target_trait_ref(db)?; | ||
63 | let cur_from_ty = trait_ref.substs.0.get(0)?; | ||
64 | let cur_to_ty = trait_ref.substs.0.get(1)?; | ||
65 | |||
66 | match (&cur_from_ty, cur_to_ty) { | ||
67 | (ty_app!(ctor1, st1), ty_app!(ctor2, st2)) => { | ||
68 | // FIXME: We return the first non-equal bound as the type parameter to coerce to unsized type. | ||
69 | // This works for smart-pointer-like coercion, which covers all impls from std. | ||
70 | st1.iter().zip(st2.iter()).enumerate().find_map(|(i, (ty1, ty2))| { | ||
71 | match (ty1, ty2) { | ||
72 | (Ty::Param { idx: p1, .. }, Ty::Param { idx: p2, .. }) | ||
73 | if p1 != p2 => | ||
74 | { | ||
75 | Some(((*ctor1, *ctor2), i)) | ||
76 | } | ||
77 | _ => None, | ||
78 | } | ||
79 | }) | ||
80 | } | ||
81 | _ => None, | ||
82 | } | ||
83 | }) | ||
84 | .collect() | ||
85 | } | ||
86 | |||
87 | fn coerce_inner(&mut self, mut from_ty: Ty, to_ty: &Ty) -> bool { | ||
88 | match (&from_ty, to_ty) { | ||
89 | // Never type will make type variable to fallback to Never Type instead of Unknown. | ||
90 | (ty_app!(TypeCtor::Never), Ty::Infer(InferTy::TypeVar(tv))) => { | ||
91 | let var = self.new_maybe_never_type_var(); | ||
92 | self.var_unification_table.union_value(*tv, TypeVarValue::Known(var)); | ||
93 | return true; | ||
94 | } | ||
95 | (ty_app!(TypeCtor::Never), _) => return true, | ||
96 | |||
97 | // Trivial cases, this should go after `never` check to | ||
98 | // avoid infer result type to be never | ||
99 | _ => { | ||
100 | if self.unify_inner_trivial(&from_ty, &to_ty) { | ||
101 | return true; | ||
102 | } | ||
103 | } | ||
104 | } | ||
105 | |||
106 | // Pointer weakening and function to pointer | ||
107 | match (&mut from_ty, to_ty) { | ||
108 | // `*mut T`, `&mut T, `&T`` -> `*const T` | ||
109 | // `&mut T` -> `&T` | ||
110 | // `&mut T` -> `*mut T` | ||
111 | (ty_app!(c1@TypeCtor::RawPtr(_)), ty_app!(c2@TypeCtor::RawPtr(Mutability::Shared))) | ||
112 | | (ty_app!(c1@TypeCtor::Ref(_)), ty_app!(c2@TypeCtor::RawPtr(Mutability::Shared))) | ||
113 | | (ty_app!(c1@TypeCtor::Ref(_)), ty_app!(c2@TypeCtor::Ref(Mutability::Shared))) | ||
114 | | (ty_app!(c1@TypeCtor::Ref(Mutability::Mut)), ty_app!(c2@TypeCtor::RawPtr(_))) => { | ||
115 | *c1 = *c2; | ||
116 | } | ||
117 | |||
118 | // Illegal mutablity conversion | ||
119 | ( | ||
120 | ty_app!(TypeCtor::RawPtr(Mutability::Shared)), | ||
121 | ty_app!(TypeCtor::RawPtr(Mutability::Mut)), | ||
122 | ) | ||
123 | | ( | ||
124 | ty_app!(TypeCtor::Ref(Mutability::Shared)), | ||
125 | ty_app!(TypeCtor::Ref(Mutability::Mut)), | ||
126 | ) => return false, | ||
127 | |||
128 | // `{function_type}` -> `fn()` | ||
129 | (ty_app!(TypeCtor::FnDef(_)), ty_app!(TypeCtor::FnPtr { .. })) => { | ||
130 | match from_ty.callable_sig(self.db) { | ||
131 | None => return false, | ||
132 | Some(sig) => { | ||
133 | let num_args = sig.params_and_return.len() as u16 - 1; | ||
134 | from_ty = | ||
135 | Ty::apply(TypeCtor::FnPtr { num_args }, Substs(sig.params_and_return)); | ||
136 | } | ||
137 | } | ||
138 | } | ||
139 | |||
140 | _ => {} | ||
141 | } | ||
142 | |||
143 | if let Some(ret) = self.try_coerce_unsized(&from_ty, &to_ty) { | ||
144 | return ret; | ||
145 | } | ||
146 | |||
147 | // Auto Deref if cannot coerce | ||
148 | match (&from_ty, to_ty) { | ||
149 | // FIXME: DerefMut | ||
150 | (ty_app!(TypeCtor::Ref(_), st1), ty_app!(TypeCtor::Ref(_), st2)) => { | ||
151 | self.unify_autoderef_behind_ref(&st1[0], &st2[0]) | ||
152 | } | ||
153 | |||
154 | // Otherwise, normal unify | ||
155 | _ => self.unify(&from_ty, to_ty), | ||
156 | } | ||
157 | } | ||
158 | |||
159 | /// Coerce a type using `from_ty: CoerceUnsized<ty_ty>` | ||
160 | /// | ||
161 | /// See: https://doc.rust-lang.org/nightly/std/marker/trait.CoerceUnsized.html | ||
162 | fn try_coerce_unsized(&mut self, from_ty: &Ty, to_ty: &Ty) -> Option<bool> { | ||
163 | let (ctor1, st1, ctor2, st2) = match (from_ty, to_ty) { | ||
164 | (ty_app!(ctor1, st1), ty_app!(ctor2, st2)) => (ctor1, st1, ctor2, st2), | ||
165 | _ => return None, | ||
166 | }; | ||
167 | |||
168 | let coerce_generic_index = *self.coerce_unsized_map.get(&(*ctor1, *ctor2))?; | ||
169 | |||
170 | // Check `Unsize` first | ||
171 | match self.check_unsize_and_coerce( | ||
172 | st1.0.get(coerce_generic_index)?, | ||
173 | st2.0.get(coerce_generic_index)?, | ||
174 | 0, | ||
175 | ) { | ||
176 | Some(true) => {} | ||
177 | ret => return ret, | ||
178 | } | ||
179 | |||
180 | let ret = st1 | ||
181 | .iter() | ||
182 | .zip(st2.iter()) | ||
183 | .enumerate() | ||
184 | .filter(|&(idx, _)| idx != coerce_generic_index) | ||
185 | .all(|(_, (ty1, ty2))| self.unify(ty1, ty2)); | ||
186 | |||
187 | Some(ret) | ||
188 | } | ||
189 | |||
190 | /// Check if `from_ty: Unsize<to_ty>`, and coerce to `to_ty` if it holds. | ||
191 | /// | ||
192 | /// It should not be directly called. It is only used by `try_coerce_unsized`. | ||
193 | /// | ||
194 | /// See: https://doc.rust-lang.org/nightly/std/marker/trait.Unsize.html | ||
195 | fn check_unsize_and_coerce(&mut self, from_ty: &Ty, to_ty: &Ty, depth: usize) -> Option<bool> { | ||
196 | if depth > 1000 { | ||
197 | panic!("Infinite recursion in coercion"); | ||
198 | } | ||
199 | |||
200 | match (&from_ty, &to_ty) { | ||
201 | // `[T; N]` -> `[T]` | ||
202 | (ty_app!(TypeCtor::Array, st1), ty_app!(TypeCtor::Slice, st2)) => { | ||
203 | Some(self.unify(&st1[0], &st2[0])) | ||
204 | } | ||
205 | |||
206 | // `T` -> `dyn Trait` when `T: Trait` | ||
207 | (_, Ty::Dyn(_)) => { | ||
208 | // FIXME: Check predicates | ||
209 | Some(true) | ||
210 | } | ||
211 | |||
212 | // `(..., T)` -> `(..., U)` when `T: Unsize<U>` | ||
213 | ( | ||
214 | ty_app!(TypeCtor::Tuple { cardinality: len1 }, st1), | ||
215 | ty_app!(TypeCtor::Tuple { cardinality: len2 }, st2), | ||
216 | ) => { | ||
217 | if len1 != len2 || *len1 == 0 { | ||
218 | return None; | ||
219 | } | ||
220 | |||
221 | match self.check_unsize_and_coerce( | ||
222 | st1.last().unwrap(), | ||
223 | st2.last().unwrap(), | ||
224 | depth + 1, | ||
225 | ) { | ||
226 | Some(true) => {} | ||
227 | ret => return ret, | ||
228 | } | ||
229 | |||
230 | let ret = st1[..st1.len() - 1] | ||
231 | .iter() | ||
232 | .zip(&st2[..st2.len() - 1]) | ||
233 | .all(|(ty1, ty2)| self.unify(ty1, ty2)); | ||
234 | |||
235 | Some(ret) | ||
236 | } | ||
237 | |||
238 | // Foo<..., T, ...> is Unsize<Foo<..., U, ...>> if: | ||
239 | // - T: Unsize<U> | ||
240 | // - Foo is a struct | ||
241 | // - Only the last field of Foo has a type involving T | ||
242 | // - T is not part of the type of any other fields | ||
243 | // - Bar<T>: Unsize<Bar<U>>, if the last field of Foo has type Bar<T> | ||
244 | ( | ||
245 | ty_app!(TypeCtor::Adt(Adt::Struct(struct1)), st1), | ||
246 | ty_app!(TypeCtor::Adt(Adt::Struct(struct2)), st2), | ||
247 | ) if struct1 == struct2 => { | ||
248 | let fields = struct1.fields(self.db); | ||
249 | let (last_field, prev_fields) = fields.split_last()?; | ||
250 | |||
251 | // Get the generic parameter involved in the last field. | ||
252 | let unsize_generic_index = { | ||
253 | let mut index = None; | ||
254 | let mut multiple_param = false; | ||
255 | last_field.ty(self.db).walk(&mut |ty| match ty { | ||
256 | &Ty::Param { idx, .. } => { | ||
257 | if index.is_none() { | ||
258 | index = Some(idx); | ||
259 | } else if Some(idx) != index { | ||
260 | multiple_param = true; | ||
261 | } | ||
262 | } | ||
263 | _ => {} | ||
264 | }); | ||
265 | |||
266 | if multiple_param { | ||
267 | return None; | ||
268 | } | ||
269 | index? | ||
270 | }; | ||
271 | |||
272 | // Check other fields do not involve it. | ||
273 | let mut multiple_used = false; | ||
274 | prev_fields.iter().for_each(|field| { | ||
275 | field.ty(self.db).walk(&mut |ty| match ty { | ||
276 | &Ty::Param { idx, .. } if idx == unsize_generic_index => { | ||
277 | multiple_used = true | ||
278 | } | ||
279 | _ => {} | ||
280 | }) | ||
281 | }); | ||
282 | if multiple_used { | ||
283 | return None; | ||
284 | } | ||
285 | |||
286 | let unsize_generic_index = unsize_generic_index as usize; | ||
287 | |||
288 | // Check `Unsize` first | ||
289 | match self.check_unsize_and_coerce( | ||
290 | st1.get(unsize_generic_index)?, | ||
291 | st2.get(unsize_generic_index)?, | ||
292 | depth + 1, | ||
293 | ) { | ||
294 | Some(true) => {} | ||
295 | ret => return ret, | ||
296 | } | ||
297 | |||
298 | // Then unify other parameters | ||
299 | let ret = st1 | ||
300 | .iter() | ||
301 | .zip(st2.iter()) | ||
302 | .enumerate() | ||
303 | .filter(|&(idx, _)| idx != unsize_generic_index) | ||
304 | .all(|(_, (ty1, ty2))| self.unify(ty1, ty2)); | ||
305 | |||
306 | Some(ret) | ||
307 | } | ||
308 | |||
309 | _ => None, | ||
310 | } | ||
311 | } | ||
312 | |||
313 | /// Unify `from_ty` to `to_ty` with optional auto Deref | ||
314 | /// | ||
315 | /// Note that the parameters are already stripped the outer reference. | ||
316 | fn unify_autoderef_behind_ref(&mut self, from_ty: &Ty, to_ty: &Ty) -> bool { | ||
317 | let canonicalized = self.canonicalizer().canonicalize_ty(from_ty.clone()); | ||
318 | let to_ty = self.resolve_ty_shallow(&to_ty); | ||
319 | // FIXME: Auto DerefMut | ||
320 | for derefed_ty in | ||
321 | autoderef::autoderef(self.db, &self.resolver.clone(), canonicalized.value.clone()) | ||
322 | { | ||
323 | let derefed_ty = canonicalized.decanonicalize_ty(derefed_ty.value); | ||
324 | match (&*self.resolve_ty_shallow(&derefed_ty), &*to_ty) { | ||
325 | // Stop when constructor matches. | ||
326 | (ty_app!(from_ctor, st1), ty_app!(to_ctor, st2)) if from_ctor == to_ctor => { | ||
327 | // It will not recurse to `coerce`. | ||
328 | return self.unify_substs(st1, st2, 0); | ||
329 | } | ||
330 | _ => {} | ||
331 | } | ||
332 | } | ||
333 | |||
334 | false | ||
335 | } | ||
336 | } | ||
diff --git a/crates/ra_hir/src/ty/infer/expr.rs b/crates/ra_hir/src/ty/infer/expr.rs new file mode 100644 index 000000000..f8807c742 --- /dev/null +++ b/crates/ra_hir/src/ty/infer/expr.rs | |||
@@ -0,0 +1,658 @@ | |||
1 | //! Type inference for expressions. | ||
2 | |||
3 | use std::iter::{repeat, repeat_with}; | ||
4 | use std::sync::Arc; | ||
5 | |||
6 | use super::{BindingMode, Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch}; | ||
7 | use crate::{ | ||
8 | db::HirDatabase, | ||
9 | expr::{self, Array, BinaryOp, Expr, ExprId, Literal, Statement, UnaryOp}, | ||
10 | generics::{GenericParams, HasGenericParams}, | ||
11 | name, | ||
12 | nameres::Namespace, | ||
13 | path::{GenericArg, GenericArgs}, | ||
14 | ty::{ | ||
15 | autoderef, method_resolution, op, primitive, CallableDef, InferTy, Mutability, Obligation, | ||
16 | ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, TypeCtor, TypeWalk, | ||
17 | }, | ||
18 | Adt, Name, | ||
19 | }; | ||
20 | |||
21 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { | ||
22 | pub(super) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { | ||
23 | let ty = self.infer_expr_inner(tgt_expr, expected); | ||
24 | let could_unify = self.unify(&ty, &expected.ty); | ||
25 | if !could_unify { | ||
26 | self.result.type_mismatches.insert( | ||
27 | tgt_expr, | ||
28 | TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() }, | ||
29 | ); | ||
30 | } | ||
31 | let ty = self.resolve_ty_as_possible(&mut vec![], ty); | ||
32 | ty | ||
33 | } | ||
34 | |||
35 | /// Infer type of expression with possibly implicit coerce to the expected type. | ||
36 | /// Return the type after possible coercion. | ||
37 | fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty { | ||
38 | let ty = self.infer_expr_inner(expr, &expected); | ||
39 | let ty = if !self.coerce(&ty, &expected.ty) { | ||
40 | self.result | ||
41 | .type_mismatches | ||
42 | .insert(expr, TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() }); | ||
43 | // Return actual type when type mismatch. | ||
44 | // This is needed for diagnostic when return type mismatch. | ||
45 | ty | ||
46 | } else if expected.ty == Ty::Unknown { | ||
47 | ty | ||
48 | } else { | ||
49 | expected.ty.clone() | ||
50 | }; | ||
51 | |||
52 | self.resolve_ty_as_possible(&mut vec![], ty) | ||
53 | } | ||
54 | |||
55 | fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty { | ||
56 | let body = Arc::clone(&self.body); // avoid borrow checker problem | ||
57 | let ty = match &body[tgt_expr] { | ||
58 | Expr::Missing => Ty::Unknown, | ||
59 | Expr::If { condition, then_branch, else_branch } => { | ||
60 | // if let is desugared to match, so this is always simple if | ||
61 | self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool))); | ||
62 | |||
63 | let then_ty = self.infer_expr_inner(*then_branch, &expected); | ||
64 | let else_ty = match else_branch { | ||
65 | Some(else_branch) => self.infer_expr_inner(*else_branch, &expected), | ||
66 | None => Ty::unit(), | ||
67 | }; | ||
68 | |||
69 | self.coerce_merge_branch(&then_ty, &else_ty) | ||
70 | } | ||
71 | Expr::Block { statements, tail } => self.infer_block(statements, *tail, expected), | ||
72 | Expr::TryBlock { body } => { | ||
73 | let _inner = self.infer_expr(*body, expected); | ||
74 | // FIXME should be std::result::Result<{inner}, _> | ||
75 | Ty::Unknown | ||
76 | } | ||
77 | Expr::Loop { body } => { | ||
78 | self.infer_expr(*body, &Expectation::has_type(Ty::unit())); | ||
79 | // FIXME handle break with value | ||
80 | Ty::simple(TypeCtor::Never) | ||
81 | } | ||
82 | Expr::While { condition, body } => { | ||
83 | // while let is desugared to a match loop, so this is always simple while | ||
84 | self.infer_expr(*condition, &Expectation::has_type(Ty::simple(TypeCtor::Bool))); | ||
85 | self.infer_expr(*body, &Expectation::has_type(Ty::unit())); | ||
86 | Ty::unit() | ||
87 | } | ||
88 | Expr::For { iterable, body, pat } => { | ||
89 | let iterable_ty = self.infer_expr(*iterable, &Expectation::none()); | ||
90 | |||
91 | let pat_ty = match self.resolve_into_iter_item() { | ||
92 | Some(into_iter_item_alias) => { | ||
93 | let pat_ty = self.new_type_var(); | ||
94 | let projection = ProjectionPredicate { | ||
95 | ty: pat_ty.clone(), | ||
96 | projection_ty: ProjectionTy { | ||
97 | associated_ty: into_iter_item_alias, | ||
98 | parameters: Substs::single(iterable_ty), | ||
99 | }, | ||
100 | }; | ||
101 | self.obligations.push(Obligation::Projection(projection)); | ||
102 | self.resolve_ty_as_possible(&mut vec![], pat_ty) | ||
103 | } | ||
104 | None => Ty::Unknown, | ||
105 | }; | ||
106 | |||
107 | self.infer_pat(*pat, &pat_ty, BindingMode::default()); | ||
108 | self.infer_expr(*body, &Expectation::has_type(Ty::unit())); | ||
109 | Ty::unit() | ||
110 | } | ||
111 | Expr::Lambda { body, args, arg_types } => { | ||
112 | assert_eq!(args.len(), arg_types.len()); | ||
113 | |||
114 | let mut sig_tys = Vec::new(); | ||
115 | |||
116 | for (arg_pat, arg_type) in args.iter().zip(arg_types.iter()) { | ||
117 | let expected = if let Some(type_ref) = arg_type { | ||
118 | self.make_ty(type_ref) | ||
119 | } else { | ||
120 | Ty::Unknown | ||
121 | }; | ||
122 | let arg_ty = self.infer_pat(*arg_pat, &expected, BindingMode::default()); | ||
123 | sig_tys.push(arg_ty); | ||
124 | } | ||
125 | |||
126 | // add return type | ||
127 | let ret_ty = self.new_type_var(); | ||
128 | sig_tys.push(ret_ty.clone()); | ||
129 | let sig_ty = Ty::apply( | ||
130 | TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1 }, | ||
131 | Substs(sig_tys.into()), | ||
132 | ); | ||
133 | let closure_ty = Ty::apply_one( | ||
134 | TypeCtor::Closure { def: self.body.owner(), expr: tgt_expr }, | ||
135 | sig_ty, | ||
136 | ); | ||
137 | |||
138 | // Eagerly try to relate the closure type with the expected | ||
139 | // type, otherwise we often won't have enough information to | ||
140 | // infer the body. | ||
141 | self.coerce(&closure_ty, &expected.ty); | ||
142 | |||
143 | self.infer_expr(*body, &Expectation::has_type(ret_ty)); | ||
144 | closure_ty | ||
145 | } | ||
146 | Expr::Call { callee, args } => { | ||
147 | let callee_ty = self.infer_expr(*callee, &Expectation::none()); | ||
148 | let (param_tys, ret_ty) = match callee_ty.callable_sig(self.db) { | ||
149 | Some(sig) => (sig.params().to_vec(), sig.ret().clone()), | ||
150 | None => { | ||
151 | // Not callable | ||
152 | // FIXME: report an error | ||
153 | (Vec::new(), Ty::Unknown) | ||
154 | } | ||
155 | }; | ||
156 | self.register_obligations_for_call(&callee_ty); | ||
157 | self.check_call_arguments(args, ¶m_tys); | ||
158 | let ret_ty = self.normalize_associated_types_in(ret_ty); | ||
159 | ret_ty | ||
160 | } | ||
161 | Expr::MethodCall { receiver, args, method_name, generic_args } => self | ||
162 | .infer_method_call(tgt_expr, *receiver, &args, &method_name, generic_args.as_ref()), | ||
163 | Expr::Match { expr, arms } => { | ||
164 | let input_ty = self.infer_expr(*expr, &Expectation::none()); | ||
165 | |||
166 | let mut result_ty = self.new_maybe_never_type_var(); | ||
167 | |||
168 | for arm in arms { | ||
169 | for &pat in &arm.pats { | ||
170 | let _pat_ty = self.infer_pat(pat, &input_ty, BindingMode::default()); | ||
171 | } | ||
172 | if let Some(guard_expr) = arm.guard { | ||
173 | self.infer_expr( | ||
174 | guard_expr, | ||
175 | &Expectation::has_type(Ty::simple(TypeCtor::Bool)), | ||
176 | ); | ||
177 | } | ||
178 | |||
179 | let arm_ty = self.infer_expr_inner(arm.expr, &expected); | ||
180 | result_ty = self.coerce_merge_branch(&result_ty, &arm_ty); | ||
181 | } | ||
182 | |||
183 | result_ty | ||
184 | } | ||
185 | Expr::Path(p) => { | ||
186 | // FIXME this could be more efficient... | ||
187 | let resolver = expr::resolver_for_expr(self.body.clone(), self.db, tgt_expr); | ||
188 | self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown) | ||
189 | } | ||
190 | Expr::Continue => Ty::simple(TypeCtor::Never), | ||
191 | Expr::Break { expr } => { | ||
192 | if let Some(expr) = expr { | ||
193 | // FIXME handle break with value | ||
194 | self.infer_expr(*expr, &Expectation::none()); | ||
195 | } | ||
196 | Ty::simple(TypeCtor::Never) | ||
197 | } | ||
198 | Expr::Return { expr } => { | ||
199 | if let Some(expr) = expr { | ||
200 | self.infer_expr(*expr, &Expectation::has_type(self.return_ty.clone())); | ||
201 | } | ||
202 | Ty::simple(TypeCtor::Never) | ||
203 | } | ||
204 | Expr::RecordLit { path, fields, spread } => { | ||
205 | let (ty, def_id) = self.resolve_variant(path.as_ref()); | ||
206 | if let Some(variant) = def_id { | ||
207 | self.write_variant_resolution(tgt_expr.into(), variant); | ||
208 | } | ||
209 | |||
210 | self.unify(&ty, &expected.ty); | ||
211 | |||
212 | let substs = ty.substs().unwrap_or_else(Substs::empty); | ||
213 | for (field_idx, field) in fields.iter().enumerate() { | ||
214 | let field_ty = def_id | ||
215 | .and_then(|it| match it.field(self.db, &field.name) { | ||
216 | Some(field) => Some(field), | ||
217 | None => { | ||
218 | self.push_diagnostic(InferenceDiagnostic::NoSuchField { | ||
219 | expr: tgt_expr, | ||
220 | field: field_idx, | ||
221 | }); | ||
222 | None | ||
223 | } | ||
224 | }) | ||
225 | .map_or(Ty::Unknown, |field| field.ty(self.db)) | ||
226 | .subst(&substs); | ||
227 | self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty)); | ||
228 | } | ||
229 | if let Some(expr) = spread { | ||
230 | self.infer_expr(*expr, &Expectation::has_type(ty.clone())); | ||
231 | } | ||
232 | ty | ||
233 | } | ||
234 | Expr::Field { expr, name } => { | ||
235 | let receiver_ty = self.infer_expr(*expr, &Expectation::none()); | ||
236 | let canonicalized = self.canonicalizer().canonicalize_ty(receiver_ty); | ||
237 | let ty = autoderef::autoderef( | ||
238 | self.db, | ||
239 | &self.resolver.clone(), | ||
240 | canonicalized.value.clone(), | ||
241 | ) | ||
242 | .find_map(|derefed_ty| match canonicalized.decanonicalize_ty(derefed_ty.value) { | ||
243 | Ty::Apply(a_ty) => match a_ty.ctor { | ||
244 | TypeCtor::Tuple { .. } => name | ||
245 | .as_tuple_index() | ||
246 | .and_then(|idx| a_ty.parameters.0.get(idx).cloned()), | ||
247 | TypeCtor::Adt(Adt::Struct(s)) => s.field(self.db, name).map(|field| { | ||
248 | self.write_field_resolution(tgt_expr, field); | ||
249 | field.ty(self.db).subst(&a_ty.parameters) | ||
250 | }), | ||
251 | _ => None, | ||
252 | }, | ||
253 | _ => None, | ||
254 | }) | ||
255 | .unwrap_or(Ty::Unknown); | ||
256 | let ty = self.insert_type_vars(ty); | ||
257 | self.normalize_associated_types_in(ty) | ||
258 | } | ||
259 | Expr::Await { expr } => { | ||
260 | let inner_ty = self.infer_expr(*expr, &Expectation::none()); | ||
261 | let ty = match self.resolve_future_future_output() { | ||
262 | Some(future_future_output_alias) => { | ||
263 | let ty = self.new_type_var(); | ||
264 | let projection = ProjectionPredicate { | ||
265 | ty: ty.clone(), | ||
266 | projection_ty: ProjectionTy { | ||
267 | associated_ty: future_future_output_alias, | ||
268 | parameters: Substs::single(inner_ty), | ||
269 | }, | ||
270 | }; | ||
271 | self.obligations.push(Obligation::Projection(projection)); | ||
272 | self.resolve_ty_as_possible(&mut vec![], ty) | ||
273 | } | ||
274 | None => Ty::Unknown, | ||
275 | }; | ||
276 | ty | ||
277 | } | ||
278 | Expr::Try { expr } => { | ||
279 | let inner_ty = self.infer_expr(*expr, &Expectation::none()); | ||
280 | let ty = match self.resolve_ops_try_ok() { | ||
281 | Some(ops_try_ok_alias) => { | ||
282 | let ty = self.new_type_var(); | ||
283 | let projection = ProjectionPredicate { | ||
284 | ty: ty.clone(), | ||
285 | projection_ty: ProjectionTy { | ||
286 | associated_ty: ops_try_ok_alias, | ||
287 | parameters: Substs::single(inner_ty), | ||
288 | }, | ||
289 | }; | ||
290 | self.obligations.push(Obligation::Projection(projection)); | ||
291 | self.resolve_ty_as_possible(&mut vec![], ty) | ||
292 | } | ||
293 | None => Ty::Unknown, | ||
294 | }; | ||
295 | ty | ||
296 | } | ||
297 | Expr::Cast { expr, type_ref } => { | ||
298 | let _inner_ty = self.infer_expr(*expr, &Expectation::none()); | ||
299 | let cast_ty = self.make_ty(type_ref); | ||
300 | // FIXME check the cast... | ||
301 | cast_ty | ||
302 | } | ||
303 | Expr::Ref { expr, mutability } => { | ||
304 | let expectation = | ||
305 | if let Some((exp_inner, exp_mutability)) = &expected.ty.as_reference() { | ||
306 | if *exp_mutability == Mutability::Mut && *mutability == Mutability::Shared { | ||
307 | // FIXME: throw type error - expected mut reference but found shared ref, | ||
308 | // which cannot be coerced | ||
309 | } | ||
310 | Expectation::has_type(Ty::clone(exp_inner)) | ||
311 | } else { | ||
312 | Expectation::none() | ||
313 | }; | ||
314 | // FIXME reference coercions etc. | ||
315 | let inner_ty = self.infer_expr(*expr, &expectation); | ||
316 | Ty::apply_one(TypeCtor::Ref(*mutability), inner_ty) | ||
317 | } | ||
318 | Expr::Box { expr } => { | ||
319 | let inner_ty = self.infer_expr(*expr, &Expectation::none()); | ||
320 | if let Some(box_) = self.resolve_boxed_box() { | ||
321 | Ty::apply_one(TypeCtor::Adt(box_), inner_ty) | ||
322 | } else { | ||
323 | Ty::Unknown | ||
324 | } | ||
325 | } | ||
326 | Expr::UnaryOp { expr, op } => { | ||
327 | let inner_ty = self.infer_expr(*expr, &Expectation::none()); | ||
328 | match op { | ||
329 | UnaryOp::Deref => { | ||
330 | let canonicalized = self.canonicalizer().canonicalize_ty(inner_ty); | ||
331 | if let Some(derefed_ty) = | ||
332 | autoderef::deref(self.db, &self.resolver, &canonicalized.value) | ||
333 | { | ||
334 | canonicalized.decanonicalize_ty(derefed_ty.value) | ||
335 | } else { | ||
336 | Ty::Unknown | ||
337 | } | ||
338 | } | ||
339 | UnaryOp::Neg => { | ||
340 | match &inner_ty { | ||
341 | Ty::Apply(a_ty) => match a_ty.ctor { | ||
342 | TypeCtor::Int(primitive::UncertainIntTy::Unknown) | ||
343 | | TypeCtor::Int(primitive::UncertainIntTy::Known( | ||
344 | primitive::IntTy { | ||
345 | signedness: primitive::Signedness::Signed, | ||
346 | .. | ||
347 | }, | ||
348 | )) | ||
349 | | TypeCtor::Float(..) => inner_ty, | ||
350 | _ => Ty::Unknown, | ||
351 | }, | ||
352 | Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => { | ||
353 | inner_ty | ||
354 | } | ||
355 | // FIXME: resolve ops::Neg trait | ||
356 | _ => Ty::Unknown, | ||
357 | } | ||
358 | } | ||
359 | UnaryOp::Not => { | ||
360 | match &inner_ty { | ||
361 | Ty::Apply(a_ty) => match a_ty.ctor { | ||
362 | TypeCtor::Bool | TypeCtor::Int(_) => inner_ty, | ||
363 | _ => Ty::Unknown, | ||
364 | }, | ||
365 | Ty::Infer(InferTy::IntVar(..)) => inner_ty, | ||
366 | // FIXME: resolve ops::Not trait for inner_ty | ||
367 | _ => Ty::Unknown, | ||
368 | } | ||
369 | } | ||
370 | } | ||
371 | } | ||
372 | Expr::BinaryOp { lhs, rhs, op } => match op { | ||
373 | Some(op) => { | ||
374 | let lhs_expectation = match op { | ||
375 | BinaryOp::LogicOp(..) => Expectation::has_type(Ty::simple(TypeCtor::Bool)), | ||
376 | _ => Expectation::none(), | ||
377 | }; | ||
378 | let lhs_ty = self.infer_expr(*lhs, &lhs_expectation); | ||
379 | // FIXME: find implementation of trait corresponding to operation | ||
380 | // symbol and resolve associated `Output` type | ||
381 | let rhs_expectation = op::binary_op_rhs_expectation(*op, lhs_ty); | ||
382 | let rhs_ty = self.infer_expr(*rhs, &Expectation::has_type(rhs_expectation)); | ||
383 | |||
384 | // FIXME: similar as above, return ty is often associated trait type | ||
385 | op::binary_op_return_ty(*op, rhs_ty) | ||
386 | } | ||
387 | _ => Ty::Unknown, | ||
388 | }, | ||
389 | Expr::Index { base, index } => { | ||
390 | let _base_ty = self.infer_expr(*base, &Expectation::none()); | ||
391 | let _index_ty = self.infer_expr(*index, &Expectation::none()); | ||
392 | // FIXME: use `std::ops::Index::Output` to figure out the real return type | ||
393 | Ty::Unknown | ||
394 | } | ||
395 | Expr::Tuple { exprs } => { | ||
396 | let mut tys = match &expected.ty { | ||
397 | ty_app!(TypeCtor::Tuple { .. }, st) => st | ||
398 | .iter() | ||
399 | .cloned() | ||
400 | .chain(repeat_with(|| self.new_type_var())) | ||
401 | .take(exprs.len()) | ||
402 | .collect::<Vec<_>>(), | ||
403 | _ => (0..exprs.len()).map(|_| self.new_type_var()).collect(), | ||
404 | }; | ||
405 | |||
406 | for (expr, ty) in exprs.iter().zip(tys.iter_mut()) { | ||
407 | self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone())); | ||
408 | } | ||
409 | |||
410 | Ty::apply(TypeCtor::Tuple { cardinality: tys.len() as u16 }, Substs(tys.into())) | ||
411 | } | ||
412 | Expr::Array(array) => { | ||
413 | let elem_ty = match &expected.ty { | ||
414 | ty_app!(TypeCtor::Array, st) | ty_app!(TypeCtor::Slice, st) => { | ||
415 | st.as_single().clone() | ||
416 | } | ||
417 | _ => self.new_type_var(), | ||
418 | }; | ||
419 | |||
420 | match array { | ||
421 | Array::ElementList(items) => { | ||
422 | for expr in items.iter() { | ||
423 | self.infer_expr_coerce(*expr, &Expectation::has_type(elem_ty.clone())); | ||
424 | } | ||
425 | } | ||
426 | Array::Repeat { initializer, repeat } => { | ||
427 | self.infer_expr_coerce( | ||
428 | *initializer, | ||
429 | &Expectation::has_type(elem_ty.clone()), | ||
430 | ); | ||
431 | self.infer_expr( | ||
432 | *repeat, | ||
433 | &Expectation::has_type(Ty::simple(TypeCtor::Int( | ||
434 | primitive::UncertainIntTy::Known(primitive::IntTy::usize()), | ||
435 | ))), | ||
436 | ); | ||
437 | } | ||
438 | } | ||
439 | |||
440 | Ty::apply_one(TypeCtor::Array, elem_ty) | ||
441 | } | ||
442 | Expr::Literal(lit) => match lit { | ||
443 | Literal::Bool(..) => Ty::simple(TypeCtor::Bool), | ||
444 | Literal::String(..) => { | ||
445 | Ty::apply_one(TypeCtor::Ref(Mutability::Shared), Ty::simple(TypeCtor::Str)) | ||
446 | } | ||
447 | Literal::ByteString(..) => { | ||
448 | let byte_type = Ty::simple(TypeCtor::Int(primitive::UncertainIntTy::Known( | ||
449 | primitive::IntTy::u8(), | ||
450 | ))); | ||
451 | let slice_type = Ty::apply_one(TypeCtor::Slice, byte_type); | ||
452 | Ty::apply_one(TypeCtor::Ref(Mutability::Shared), slice_type) | ||
453 | } | ||
454 | Literal::Char(..) => Ty::simple(TypeCtor::Char), | ||
455 | Literal::Int(_v, ty) => Ty::simple(TypeCtor::Int(*ty)), | ||
456 | Literal::Float(_v, ty) => Ty::simple(TypeCtor::Float(*ty)), | ||
457 | }, | ||
458 | }; | ||
459 | // use a new type variable if we got Ty::Unknown here | ||
460 | let ty = self.insert_type_vars_shallow(ty); | ||
461 | let ty = self.resolve_ty_as_possible(&mut vec![], ty); | ||
462 | self.write_expr_ty(tgt_expr, ty.clone()); | ||
463 | ty | ||
464 | } | ||
465 | |||
466 | fn infer_block( | ||
467 | &mut self, | ||
468 | statements: &[Statement], | ||
469 | tail: Option<ExprId>, | ||
470 | expected: &Expectation, | ||
471 | ) -> Ty { | ||
472 | let mut diverges = false; | ||
473 | for stmt in statements { | ||
474 | match stmt { | ||
475 | Statement::Let { pat, type_ref, initializer } => { | ||
476 | let decl_ty = | ||
477 | type_ref.as_ref().map(|tr| self.make_ty(tr)).unwrap_or(Ty::Unknown); | ||
478 | |||
479 | // Always use the declared type when specified | ||
480 | let mut ty = decl_ty.clone(); | ||
481 | |||
482 | if let Some(expr) = initializer { | ||
483 | let actual_ty = | ||
484 | self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone())); | ||
485 | if decl_ty == Ty::Unknown { | ||
486 | ty = actual_ty; | ||
487 | } | ||
488 | } | ||
489 | |||
490 | let ty = self.resolve_ty_as_possible(&mut vec![], ty); | ||
491 | self.infer_pat(*pat, &ty, BindingMode::default()); | ||
492 | } | ||
493 | Statement::Expr(expr) => { | ||
494 | if let ty_app!(TypeCtor::Never) = self.infer_expr(*expr, &Expectation::none()) { | ||
495 | diverges = true; | ||
496 | } | ||
497 | } | ||
498 | } | ||
499 | } | ||
500 | |||
501 | let ty = if let Some(expr) = tail { | ||
502 | self.infer_expr_coerce(expr, expected) | ||
503 | } else { | ||
504 | self.coerce(&Ty::unit(), &expected.ty); | ||
505 | Ty::unit() | ||
506 | }; | ||
507 | if diverges { | ||
508 | Ty::simple(TypeCtor::Never) | ||
509 | } else { | ||
510 | ty | ||
511 | } | ||
512 | } | ||
513 | |||
514 | fn infer_method_call( | ||
515 | &mut self, | ||
516 | tgt_expr: ExprId, | ||
517 | receiver: ExprId, | ||
518 | args: &[ExprId], | ||
519 | method_name: &Name, | ||
520 | generic_args: Option<&GenericArgs>, | ||
521 | ) -> Ty { | ||
522 | let receiver_ty = self.infer_expr(receiver, &Expectation::none()); | ||
523 | let canonicalized_receiver = self.canonicalizer().canonicalize_ty(receiver_ty.clone()); | ||
524 | let resolved = method_resolution::lookup_method( | ||
525 | &canonicalized_receiver.value, | ||
526 | self.db, | ||
527 | method_name, | ||
528 | &self.resolver, | ||
529 | ); | ||
530 | let (derefed_receiver_ty, method_ty, def_generics) = match resolved { | ||
531 | Some((ty, func)) => { | ||
532 | let ty = canonicalized_receiver.decanonicalize_ty(ty); | ||
533 | self.write_method_resolution(tgt_expr, func); | ||
534 | ( | ||
535 | ty, | ||
536 | self.db.type_for_def(func.into(), Namespace::Values), | ||
537 | Some(func.generic_params(self.db)), | ||
538 | ) | ||
539 | } | ||
540 | None => (receiver_ty, Ty::Unknown, None), | ||
541 | }; | ||
542 | let substs = self.substs_for_method_call(def_generics, generic_args, &derefed_receiver_ty); | ||
543 | let method_ty = method_ty.apply_substs(substs); | ||
544 | let method_ty = self.insert_type_vars(method_ty); | ||
545 | self.register_obligations_for_call(&method_ty); | ||
546 | let (expected_receiver_ty, param_tys, ret_ty) = match method_ty.callable_sig(self.db) { | ||
547 | Some(sig) => { | ||
548 | if !sig.params().is_empty() { | ||
549 | (sig.params()[0].clone(), sig.params()[1..].to_vec(), sig.ret().clone()) | ||
550 | } else { | ||
551 | (Ty::Unknown, Vec::new(), sig.ret().clone()) | ||
552 | } | ||
553 | } | ||
554 | None => (Ty::Unknown, Vec::new(), Ty::Unknown), | ||
555 | }; | ||
556 | // Apply autoref so the below unification works correctly | ||
557 | // FIXME: return correct autorefs from lookup_method | ||
558 | let actual_receiver_ty = match expected_receiver_ty.as_reference() { | ||
559 | Some((_, mutability)) => Ty::apply_one(TypeCtor::Ref(mutability), derefed_receiver_ty), | ||
560 | _ => derefed_receiver_ty, | ||
561 | }; | ||
562 | self.unify(&expected_receiver_ty, &actual_receiver_ty); | ||
563 | |||
564 | self.check_call_arguments(args, ¶m_tys); | ||
565 | let ret_ty = self.normalize_associated_types_in(ret_ty); | ||
566 | ret_ty | ||
567 | } | ||
568 | |||
569 | fn check_call_arguments(&mut self, args: &[ExprId], param_tys: &[Ty]) { | ||
570 | // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 -- | ||
571 | // We do this in a pretty awful way: first we type-check any arguments | ||
572 | // that are not closures, then we type-check the closures. This is so | ||
573 | // that we have more information about the types of arguments when we | ||
574 | // type-check the functions. This isn't really the right way to do this. | ||
575 | for &check_closures in &[false, true] { | ||
576 | let param_iter = param_tys.iter().cloned().chain(repeat(Ty::Unknown)); | ||
577 | for (&arg, param_ty) in args.iter().zip(param_iter) { | ||
578 | let is_closure = match &self.body[arg] { | ||
579 | Expr::Lambda { .. } => true, | ||
580 | _ => false, | ||
581 | }; | ||
582 | |||
583 | if is_closure != check_closures { | ||
584 | continue; | ||
585 | } | ||
586 | |||
587 | let param_ty = self.normalize_associated_types_in(param_ty); | ||
588 | self.infer_expr_coerce(arg, &Expectation::has_type(param_ty.clone())); | ||
589 | } | ||
590 | } | ||
591 | } | ||
592 | |||
593 | fn substs_for_method_call( | ||
594 | &mut self, | ||
595 | def_generics: Option<Arc<GenericParams>>, | ||
596 | generic_args: Option<&GenericArgs>, | ||
597 | receiver_ty: &Ty, | ||
598 | ) -> Substs { | ||
599 | let (parent_param_count, param_count) = | ||
600 | def_generics.as_ref().map_or((0, 0), |g| (g.count_parent_params(), g.params.len())); | ||
601 | let mut substs = Vec::with_capacity(parent_param_count + param_count); | ||
602 | // Parent arguments are unknown, except for the receiver type | ||
603 | if let Some(parent_generics) = def_generics.and_then(|p| p.parent_params.clone()) { | ||
604 | for param in &parent_generics.params { | ||
605 | if param.name == name::SELF_TYPE { | ||
606 | substs.push(receiver_ty.clone()); | ||
607 | } else { | ||
608 | substs.push(Ty::Unknown); | ||
609 | } | ||
610 | } | ||
611 | } | ||
612 | // handle provided type arguments | ||
613 | if let Some(generic_args) = generic_args { | ||
614 | // if args are provided, it should be all of them, but we can't rely on that | ||
615 | for arg in generic_args.args.iter().take(param_count) { | ||
616 | match arg { | ||
617 | GenericArg::Type(type_ref) => { | ||
618 | let ty = self.make_ty(type_ref); | ||
619 | substs.push(ty); | ||
620 | } | ||
621 | } | ||
622 | } | ||
623 | }; | ||
624 | let supplied_params = substs.len(); | ||
625 | for _ in supplied_params..parent_param_count + param_count { | ||
626 | substs.push(Ty::Unknown); | ||
627 | } | ||
628 | assert_eq!(substs.len(), parent_param_count + param_count); | ||
629 | Substs(substs.into()) | ||
630 | } | ||
631 | |||
632 | fn register_obligations_for_call(&mut self, callable_ty: &Ty) { | ||
633 | if let Ty::Apply(a_ty) = callable_ty { | ||
634 | if let TypeCtor::FnDef(def) = a_ty.ctor { | ||
635 | let generic_predicates = self.db.generic_predicates(def.into()); | ||
636 | for predicate in generic_predicates.iter() { | ||
637 | let predicate = predicate.clone().subst(&a_ty.parameters); | ||
638 | if let Some(obligation) = Obligation::from_predicate(predicate) { | ||
639 | self.obligations.push(obligation); | ||
640 | } | ||
641 | } | ||
642 | // add obligation for trait implementation, if this is a trait method | ||
643 | match def { | ||
644 | CallableDef::Function(f) => { | ||
645 | if let Some(trait_) = f.parent_trait(self.db) { | ||
646 | // construct a TraitDef | ||
647 | let substs = a_ty.parameters.prefix( | ||
648 | trait_.generic_params(self.db).count_params_including_parent(), | ||
649 | ); | ||
650 | self.obligations.push(Obligation::Trait(TraitRef { trait_, substs })); | ||
651 | } | ||
652 | } | ||
653 | CallableDef::Struct(_) | CallableDef::EnumVariant(_) => {} | ||
654 | } | ||
655 | } | ||
656 | } | ||
657 | } | ||
658 | } | ||
diff --git a/crates/ra_hir/src/ty/infer/pat.rs b/crates/ra_hir/src/ty/infer/pat.rs new file mode 100644 index 000000000..c125ddfbc --- /dev/null +++ b/crates/ra_hir/src/ty/infer/pat.rs | |||
@@ -0,0 +1,180 @@ | |||
1 | //! Type inference for patterns. | ||
2 | |||
3 | use std::iter::repeat; | ||
4 | use std::sync::Arc; | ||
5 | |||
6 | use test_utils::tested_by; | ||
7 | |||
8 | use super::{BindingMode, InferenceContext}; | ||
9 | use crate::{ | ||
10 | db::HirDatabase, | ||
11 | expr::{BindingAnnotation, Pat, PatId, RecordFieldPat}, | ||
12 | ty::{Mutability, Substs, Ty, TypeCtor, TypeWalk}, | ||
13 | Name, Path, | ||
14 | }; | ||
15 | |||
16 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { | ||
17 | fn infer_tuple_struct_pat( | ||
18 | &mut self, | ||
19 | path: Option<&Path>, | ||
20 | subpats: &[PatId], | ||
21 | expected: &Ty, | ||
22 | default_bm: BindingMode, | ||
23 | ) -> Ty { | ||
24 | let (ty, def) = self.resolve_variant(path); | ||
25 | |||
26 | self.unify(&ty, expected); | ||
27 | |||
28 | let substs = ty.substs().unwrap_or_else(Substs::empty); | ||
29 | |||
30 | for (i, &subpat) in subpats.iter().enumerate() { | ||
31 | let expected_ty = def | ||
32 | .and_then(|d| d.field(self.db, &Name::new_tuple_field(i))) | ||
33 | .map_or(Ty::Unknown, |field| field.ty(self.db)) | ||
34 | .subst(&substs); | ||
35 | let expected_ty = self.normalize_associated_types_in(expected_ty); | ||
36 | self.infer_pat(subpat, &expected_ty, default_bm); | ||
37 | } | ||
38 | |||
39 | ty | ||
40 | } | ||
41 | |||
42 | fn infer_record_pat( | ||
43 | &mut self, | ||
44 | path: Option<&Path>, | ||
45 | subpats: &[RecordFieldPat], | ||
46 | expected: &Ty, | ||
47 | default_bm: BindingMode, | ||
48 | id: PatId, | ||
49 | ) -> Ty { | ||
50 | let (ty, def) = self.resolve_variant(path); | ||
51 | if let Some(variant) = def { | ||
52 | self.write_variant_resolution(id.into(), variant); | ||
53 | } | ||
54 | |||
55 | self.unify(&ty, expected); | ||
56 | |||
57 | let substs = ty.substs().unwrap_or_else(Substs::empty); | ||
58 | |||
59 | for subpat in subpats { | ||
60 | let matching_field = def.and_then(|it| it.field(self.db, &subpat.name)); | ||
61 | let expected_ty = | ||
62 | matching_field.map_or(Ty::Unknown, |field| field.ty(self.db)).subst(&substs); | ||
63 | let expected_ty = self.normalize_associated_types_in(expected_ty); | ||
64 | self.infer_pat(subpat.pat, &expected_ty, default_bm); | ||
65 | } | ||
66 | |||
67 | ty | ||
68 | } | ||
69 | |||
70 | pub(super) fn infer_pat( | ||
71 | &mut self, | ||
72 | pat: PatId, | ||
73 | mut expected: &Ty, | ||
74 | mut default_bm: BindingMode, | ||
75 | ) -> Ty { | ||
76 | let body = Arc::clone(&self.body); // avoid borrow checker problem | ||
77 | |||
78 | let is_non_ref_pat = match &body[pat] { | ||
79 | Pat::Tuple(..) | ||
80 | | Pat::TupleStruct { .. } | ||
81 | | Pat::Record { .. } | ||
82 | | Pat::Range { .. } | ||
83 | | Pat::Slice { .. } => true, | ||
84 | // FIXME: Path/Lit might actually evaluate to ref, but inference is unimplemented. | ||
85 | Pat::Path(..) | Pat::Lit(..) => true, | ||
86 | Pat::Wild | Pat::Bind { .. } | Pat::Ref { .. } | Pat::Missing => false, | ||
87 | }; | ||
88 | if is_non_ref_pat { | ||
89 | while let Some((inner, mutability)) = expected.as_reference() { | ||
90 | expected = inner; | ||
91 | default_bm = match default_bm { | ||
92 | BindingMode::Move => BindingMode::Ref(mutability), | ||
93 | BindingMode::Ref(Mutability::Shared) => BindingMode::Ref(Mutability::Shared), | ||
94 | BindingMode::Ref(Mutability::Mut) => BindingMode::Ref(mutability), | ||
95 | } | ||
96 | } | ||
97 | } else if let Pat::Ref { .. } = &body[pat] { | ||
98 | tested_by!(match_ergonomics_ref); | ||
99 | // When you encounter a `&pat` pattern, reset to Move. | ||
100 | // This is so that `w` is by value: `let (_, &w) = &(1, &2);` | ||
101 | default_bm = BindingMode::Move; | ||
102 | } | ||
103 | |||
104 | // Lose mutability. | ||
105 | let default_bm = default_bm; | ||
106 | let expected = expected; | ||
107 | |||
108 | let ty = match &body[pat] { | ||
109 | Pat::Tuple(ref args) => { | ||
110 | let expectations = match expected.as_tuple() { | ||
111 | Some(parameters) => &*parameters.0, | ||
112 | _ => &[], | ||
113 | }; | ||
114 | let expectations_iter = expectations.iter().chain(repeat(&Ty::Unknown)); | ||
115 | |||
116 | let inner_tys = args | ||
117 | .iter() | ||
118 | .zip(expectations_iter) | ||
119 | .map(|(&pat, ty)| self.infer_pat(pat, ty, default_bm)) | ||
120 | .collect(); | ||
121 | |||
122 | Ty::apply(TypeCtor::Tuple { cardinality: args.len() as u16 }, Substs(inner_tys)) | ||
123 | } | ||
124 | Pat::Ref { pat, mutability } => { | ||
125 | let expectation = match expected.as_reference() { | ||
126 | Some((inner_ty, exp_mut)) => { | ||
127 | if *mutability != exp_mut { | ||
128 | // FIXME: emit type error? | ||
129 | } | ||
130 | inner_ty | ||
131 | } | ||
132 | _ => &Ty::Unknown, | ||
133 | }; | ||
134 | let subty = self.infer_pat(*pat, expectation, default_bm); | ||
135 | Ty::apply_one(TypeCtor::Ref(*mutability), subty) | ||
136 | } | ||
137 | Pat::TupleStruct { path: p, args: subpats } => { | ||
138 | self.infer_tuple_struct_pat(p.as_ref(), subpats, expected, default_bm) | ||
139 | } | ||
140 | Pat::Record { path: p, args: fields } => { | ||
141 | self.infer_record_pat(p.as_ref(), fields, expected, default_bm, pat) | ||
142 | } | ||
143 | Pat::Path(path) => { | ||
144 | // FIXME use correct resolver for the surrounding expression | ||
145 | let resolver = self.resolver.clone(); | ||
146 | self.infer_path(&resolver, &path, pat.into()).unwrap_or(Ty::Unknown) | ||
147 | } | ||
148 | Pat::Bind { mode, name: _, subpat } => { | ||
149 | let mode = if mode == &BindingAnnotation::Unannotated { | ||
150 | default_bm | ||
151 | } else { | ||
152 | BindingMode::convert(*mode) | ||
153 | }; | ||
154 | let inner_ty = if let Some(subpat) = subpat { | ||
155 | self.infer_pat(*subpat, expected, default_bm) | ||
156 | } else { | ||
157 | expected.clone() | ||
158 | }; | ||
159 | let inner_ty = self.insert_type_vars_shallow(inner_ty); | ||
160 | |||
161 | let bound_ty = match mode { | ||
162 | BindingMode::Ref(mutability) => { | ||
163 | Ty::apply_one(TypeCtor::Ref(mutability), inner_ty.clone()) | ||
164 | } | ||
165 | BindingMode::Move => inner_ty.clone(), | ||
166 | }; | ||
167 | let bound_ty = self.resolve_ty_as_possible(&mut vec![], bound_ty); | ||
168 | self.write_pat_ty(pat, bound_ty); | ||
169 | return inner_ty; | ||
170 | } | ||
171 | _ => Ty::Unknown, | ||
172 | }; | ||
173 | // use a new type variable if we got Ty::Unknown here | ||
174 | let ty = self.insert_type_vars_shallow(ty); | ||
175 | self.unify(&ty, expected); | ||
176 | let ty = self.resolve_ty_as_possible(&mut vec![], ty); | ||
177 | self.write_pat_ty(pat, ty.clone()); | ||
178 | ty | ||
179 | } | ||
180 | } | ||
diff --git a/crates/ra_hir/src/ty/infer/unify.rs b/crates/ra_hir/src/ty/infer/unify.rs index d161aa6b3..014c7981f 100644 --- a/crates/ra_hir/src/ty/infer/unify.rs +++ b/crates/ra_hir/src/ty/infer/unify.rs | |||
@@ -6,6 +6,7 @@ use crate::ty::{ | |||
6 | Canonical, InEnvironment, InferTy, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, | 6 | Canonical, InEnvironment, InferTy, ProjectionPredicate, ProjectionTy, Substs, TraitRef, Ty, |
7 | TypeWalk, | 7 | TypeWalk, |
8 | }; | 8 | }; |
9 | use crate::util::make_mut_slice; | ||
9 | 10 | ||
10 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { | 11 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { |
11 | pub(super) fn canonicalizer<'b>(&'b mut self) -> Canonicalizer<'a, 'b, D> | 12 | pub(super) fn canonicalizer<'b>(&'b mut self) -> Canonicalizer<'a, 'b, D> |
@@ -74,10 +75,11 @@ where | |||
74 | }) | 75 | }) |
75 | } | 76 | } |
76 | 77 | ||
77 | fn do_canonicalize_trait_ref(&mut self, trait_ref: TraitRef) -> TraitRef { | 78 | fn do_canonicalize_trait_ref(&mut self, mut trait_ref: TraitRef) -> TraitRef { |
78 | let substs = | 79 | for ty in make_mut_slice(&mut trait_ref.substs.0) { |
79 | trait_ref.substs.iter().map(|ty| self.do_canonicalize_ty(ty.clone())).collect(); | 80 | *ty = self.do_canonicalize_ty(ty.clone()); |
80 | TraitRef { trait_: trait_ref.trait_, substs: Substs(substs) } | 81 | } |
82 | trait_ref | ||
81 | } | 83 | } |
82 | 84 | ||
83 | fn into_canonicalized<T>(self, result: T) -> Canonicalized<T> { | 85 | fn into_canonicalized<T>(self, result: T) -> Canonicalized<T> { |
@@ -87,10 +89,11 @@ where | |||
87 | } | 89 | } |
88 | } | 90 | } |
89 | 91 | ||
90 | fn do_canonicalize_projection_ty(&mut self, projection_ty: ProjectionTy) -> ProjectionTy { | 92 | fn do_canonicalize_projection_ty(&mut self, mut projection_ty: ProjectionTy) -> ProjectionTy { |
91 | let params = | 93 | for ty in make_mut_slice(&mut projection_ty.parameters.0) { |
92 | projection_ty.parameters.iter().map(|ty| self.do_canonicalize_ty(ty.clone())).collect(); | 94 | *ty = self.do_canonicalize_ty(ty.clone()); |
93 | ProjectionTy { associated_ty: projection_ty.associated_ty, parameters: Substs(params) } | 95 | } |
96 | projection_ty | ||
94 | } | 97 | } |
95 | 98 | ||
96 | fn do_canonicalize_projection_predicate( | 99 | fn do_canonicalize_projection_predicate( |
diff --git a/crates/ra_hir/src/ty/lower.rs b/crates/ra_hir/src/ty/lower.rs index a604c02e2..366556134 100644 --- a/crates/ra_hir/src/ty/lower.rs +++ b/crates/ra_hir/src/ty/lower.rs | |||
@@ -22,6 +22,7 @@ use crate::{ | |||
22 | resolve::{Resolver, TypeNs}, | 22 | resolve::{Resolver, TypeNs}, |
23 | ty::Adt, | 23 | ty::Adt, |
24 | type_ref::{TypeBound, TypeRef}, | 24 | type_ref::{TypeBound, TypeRef}, |
25 | util::make_mut_slice, | ||
25 | BuiltinType, Const, Enum, EnumVariant, Function, ModuleDef, Path, Static, Struct, StructField, | 26 | BuiltinType, Const, Enum, EnumVariant, Function, ModuleDef, Path, Static, Struct, StructField, |
26 | Trait, TypeAlias, Union, | 27 | Trait, TypeAlias, Union, |
27 | }; | 28 | }; |
@@ -31,11 +32,11 @@ impl Ty { | |||
31 | match type_ref { | 32 | match type_ref { |
32 | TypeRef::Never => Ty::simple(TypeCtor::Never), | 33 | TypeRef::Never => Ty::simple(TypeCtor::Never), |
33 | TypeRef::Tuple(inner) => { | 34 | TypeRef::Tuple(inner) => { |
34 | let inner_tys = | 35 | let inner_tys: Arc<[Ty]> = |
35 | inner.iter().map(|tr| Ty::from_hir(db, resolver, tr)).collect::<Vec<_>>(); | 36 | inner.iter().map(|tr| Ty::from_hir(db, resolver, tr)).collect(); |
36 | Ty::apply( | 37 | Ty::apply( |
37 | TypeCtor::Tuple { cardinality: inner_tys.len() as u16 }, | 38 | TypeCtor::Tuple { cardinality: inner_tys.len() as u16 }, |
38 | Substs(inner_tys.into()), | 39 | Substs(inner_tys), |
39 | ) | 40 | ) |
40 | } | 41 | } |
41 | TypeRef::Path(path) => Ty::from_hir_path(db, resolver, path), | 42 | TypeRef::Path(path) => Ty::from_hir_path(db, resolver, path), |
@@ -57,9 +58,7 @@ impl Ty { | |||
57 | } | 58 | } |
58 | TypeRef::Placeholder => Ty::Unknown, | 59 | TypeRef::Placeholder => Ty::Unknown, |
59 | TypeRef::Fn(params) => { | 60 | TypeRef::Fn(params) => { |
60 | let inner_tys = | 61 | let sig = Substs(params.iter().map(|tr| Ty::from_hir(db, resolver, tr)).collect()); |
61 | params.iter().map(|tr| Ty::from_hir(db, resolver, tr)).collect::<Vec<_>>(); | ||
62 | let sig = Substs(inner_tys.into()); | ||
63 | Ty::apply(TypeCtor::FnPtr { num_args: sig.len() as u16 - 1 }, sig) | 62 | Ty::apply(TypeCtor::FnPtr { num_args: sig.len() as u16 - 1 }, sig) |
64 | } | 63 | } |
65 | TypeRef::DynTrait(bounds) => { | 64 | TypeRef::DynTrait(bounds) => { |
@@ -69,8 +68,8 @@ impl Ty { | |||
69 | .flat_map(|b| { | 68 | .flat_map(|b| { |
70 | GenericPredicate::from_type_bound(db, resolver, b, self_ty.clone()) | 69 | GenericPredicate::from_type_bound(db, resolver, b, self_ty.clone()) |
71 | }) | 70 | }) |
72 | .collect::<Vec<_>>(); | 71 | .collect(); |
73 | Ty::Dyn(predicates.into()) | 72 | Ty::Dyn(predicates) |
74 | } | 73 | } |
75 | TypeRef::ImplTrait(bounds) => { | 74 | TypeRef::ImplTrait(bounds) => { |
76 | let self_ty = Ty::Bound(0); | 75 | let self_ty = Ty::Bound(0); |
@@ -79,8 +78,8 @@ impl Ty { | |||
79 | .flat_map(|b| { | 78 | .flat_map(|b| { |
80 | GenericPredicate::from_type_bound(db, resolver, b, self_ty.clone()) | 79 | GenericPredicate::from_type_bound(db, resolver, b, self_ty.clone()) |
81 | }) | 80 | }) |
82 | .collect::<Vec<_>>(); | 81 | .collect(); |
83 | Ty::Opaque(predicates.into()) | 82 | Ty::Opaque(predicates) |
84 | } | 83 | } |
85 | TypeRef::Error => Ty::Unknown, | 84 | TypeRef::Error => Ty::Unknown, |
86 | } | 85 | } |
@@ -392,10 +391,7 @@ impl TraitRef { | |||
392 | ) -> Self { | 391 | ) -> Self { |
393 | let mut substs = TraitRef::substs_from_path(db, resolver, segment, resolved); | 392 | let mut substs = TraitRef::substs_from_path(db, resolver, segment, resolved); |
394 | if let Some(self_ty) = explicit_self_ty { | 393 | if let Some(self_ty) = explicit_self_ty { |
395 | // FIXME this could be nicer | 394 | make_mut_slice(&mut substs.0)[0] = self_ty; |
396 | let mut substs_vec = substs.0.to_vec(); | ||
397 | substs_vec[0] = self_ty; | ||
398 | substs.0 = substs_vec.into(); | ||
399 | } | 395 | } |
400 | TraitRef { trait_: resolved, substs } | 396 | TraitRef { trait_: resolved, substs } |
401 | } | 397 | } |
@@ -558,13 +554,12 @@ pub(crate) fn generic_predicates_for_param_query( | |||
558 | param_idx: u32, | 554 | param_idx: u32, |
559 | ) -> Arc<[GenericPredicate]> { | 555 | ) -> Arc<[GenericPredicate]> { |
560 | let resolver = def.resolver(db); | 556 | let resolver = def.resolver(db); |
561 | let predicates = resolver | 557 | resolver |
562 | .where_predicates_in_scope() | 558 | .where_predicates_in_scope() |
563 | // we have to filter out all other predicates *first*, before attempting to lower them | 559 | // we have to filter out all other predicates *first*, before attempting to lower them |
564 | .filter(|pred| Ty::from_hir_only_param(db, &resolver, &pred.type_ref) == Some(param_idx)) | 560 | .filter(|pred| Ty::from_hir_only_param(db, &resolver, &pred.type_ref) == Some(param_idx)) |
565 | .flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) | 561 | .flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) |
566 | .collect::<Vec<_>>(); | 562 | .collect() |
567 | predicates.into() | ||
568 | } | 563 | } |
569 | 564 | ||
570 | pub(crate) fn trait_env( | 565 | pub(crate) fn trait_env( |
@@ -585,11 +580,10 @@ pub(crate) fn generic_predicates_query( | |||
585 | def: GenericDef, | 580 | def: GenericDef, |
586 | ) -> Arc<[GenericPredicate]> { | 581 | ) -> Arc<[GenericPredicate]> { |
587 | let resolver = def.resolver(db); | 582 | let resolver = def.resolver(db); |
588 | let predicates = resolver | 583 | resolver |
589 | .where_predicates_in_scope() | 584 | .where_predicates_in_scope() |
590 | .flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) | 585 | .flat_map(|pred| GenericPredicate::from_where_predicate(db, &resolver, pred)) |
591 | .collect::<Vec<_>>(); | 586 | .collect() |
592 | predicates.into() | ||
593 | } | 587 | } |
594 | 588 | ||
595 | /// Resolve the default type params from generics | 589 | /// Resolve the default type params from generics |
@@ -603,9 +597,9 @@ pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDef) -> | |||
603 | .map(|p| { | 597 | .map(|p| { |
604 | p.default.as_ref().map_or(Ty::Unknown, |path| Ty::from_hir_path(db, &resolver, path)) | 598 | p.default.as_ref().map_or(Ty::Unknown, |path| Ty::from_hir_path(db, &resolver, path)) |
605 | }) | 599 | }) |
606 | .collect::<Vec<_>>(); | 600 | .collect(); |
607 | 601 | ||
608 | Substs(defaults.into()) | 602 | Substs(defaults) |
609 | } | 603 | } |
610 | 604 | ||
611 | fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig { | 605 | fn fn_sig_for_fn(db: &impl HirDatabase, def: Function) -> FnSig { |
diff --git a/crates/ra_hir/src/ty/traits.rs b/crates/ra_hir/src/ty/traits.rs index b0f67ae50..0cb5c3798 100644 --- a/crates/ra_hir/src/ty/traits.rs +++ b/crates/ra_hir/src/ty/traits.rs | |||
@@ -89,7 +89,7 @@ pub(crate) fn impls_for_trait_query( | |||
89 | } | 89 | } |
90 | let crate_impl_blocks = db.impls_in_crate(krate); | 90 | let crate_impl_blocks = db.impls_in_crate(krate); |
91 | impls.extend(crate_impl_blocks.lookup_impl_blocks_for_trait(trait_)); | 91 | impls.extend(crate_impl_blocks.lookup_impl_blocks_for_trait(trait_)); |
92 | impls.into_iter().collect::<Vec<_>>().into() | 92 | impls.into_iter().collect() |
93 | } | 93 | } |
94 | 94 | ||
95 | /// A set of clauses that we assume to be true. E.g. if we are inside this function: | 95 | /// A set of clauses that we assume to be true. E.g. if we are inside this function: |
diff --git a/crates/ra_hir/src/ty/traits/chalk.rs b/crates/ra_hir/src/ty/traits/chalk.rs index 9168de709..00aaf65d9 100644 --- a/crates/ra_hir/src/ty/traits/chalk.rs +++ b/crates/ra_hir/src/ty/traits/chalk.rs | |||
@@ -126,8 +126,7 @@ impl ToChalk for Substs { | |||
126 | chalk_ir::Parameter(chalk_ir::ParameterKind::Ty(ty)) => from_chalk(db, ty), | 126 | chalk_ir::Parameter(chalk_ir::ParameterKind::Ty(ty)) => from_chalk(db, ty), |
127 | chalk_ir::Parameter(chalk_ir::ParameterKind::Lifetime(_)) => unimplemented!(), | 127 | chalk_ir::Parameter(chalk_ir::ParameterKind::Lifetime(_)) => unimplemented!(), |
128 | }) | 128 | }) |
129 | .collect::<Vec<_>>() | 129 | .collect(); |
130 | .into(); | ||
131 | Substs(tys) | 130 | Substs(tys) |
132 | } | 131 | } |
133 | } | 132 | } |
diff --git a/crates/ra_hir/src/type_alias.rs b/crates/ra_hir/src/type_alias.rs index 3b38c4740..674a46102 100644 --- a/crates/ra_hir/src/type_alias.rs +++ b/crates/ra_hir/src/type_alias.rs | |||
@@ -17,12 +17,14 @@ pub struct TypeAliasData { | |||
17 | pub(crate) type_ref: Option<TypeRef>, | 17 | pub(crate) type_ref: Option<TypeRef>, |
18 | } | 18 | } |
19 | 19 | ||
20 | pub(crate) fn type_alias_data_query( | 20 | impl TypeAliasData { |
21 | db: &(impl DefDatabase + AstDatabase), | 21 | pub(crate) fn type_alias_data_query( |
22 | typ: TypeAlias, | 22 | db: &(impl DefDatabase + AstDatabase), |
23 | ) -> Arc<TypeAliasData> { | 23 | typ: TypeAlias, |
24 | let node = typ.source(db).ast; | 24 | ) -> Arc<TypeAliasData> { |
25 | let name = node.name().map_or_else(Name::missing, |n| n.as_name()); | 25 | let node = typ.source(db).ast; |
26 | let type_ref = node.type_ref().map(TypeRef::from_ast); | 26 | let name = node.name().map_or_else(Name::missing, |n| n.as_name()); |
27 | Arc::new(TypeAliasData { name, type_ref }) | 27 | let type_ref = node.type_ref().map(TypeRef::from_ast); |
28 | Arc::new(TypeAliasData { name, type_ref }) | ||
29 | } | ||
28 | } | 30 | } |
diff --git a/crates/ra_hir/src/util.rs b/crates/ra_hir/src/util.rs new file mode 100644 index 000000000..0095ee45d --- /dev/null +++ b/crates/ra_hir/src/util.rs | |||
@@ -0,0 +1,12 @@ | |||
1 | //! Internal utility functions. | ||
2 | |||
3 | use std::sync::Arc; | ||
4 | |||
5 | /// Helper for mutating `Arc<[T]>` (i.e. `Arc::make_mut` for Arc slices). | ||
6 | /// The underlying values are cloned if there are other strong references. | ||
7 | pub(crate) fn make_mut_slice<T: Clone>(a: &mut Arc<[T]>) -> &mut [T] { | ||
8 | if Arc::get_mut(a).is_none() { | ||
9 | *a = a.iter().cloned().collect(); | ||
10 | } | ||
11 | Arc::get_mut(a).unwrap() | ||
12 | } | ||
diff --git a/crates/ra_ide_api/src/change.rs b/crates/ra_ide_api/src/change.rs index 09913787b..050249c0e 100644 --- a/crates/ra_ide_api/src/change.rs +++ b/crates/ra_ide_api/src/change.rs | |||
@@ -4,7 +4,7 @@ use std::{fmt, sync::Arc, time}; | |||
4 | 4 | ||
5 | use ra_db::{ | 5 | use ra_db::{ |
6 | salsa::{Database, Durability, SweepStrategy}, | 6 | salsa::{Database, Durability, SweepStrategy}, |
7 | CrateGraph, CrateId, FileId, SourceDatabase, SourceRoot, SourceRootId, | 7 | CrateGraph, CrateId, FileId, SourceDatabase, SourceDatabaseExt, SourceRoot, SourceRootId, |
8 | }; | 8 | }; |
9 | use ra_prof::{memory_usage, profile, Bytes}; | 9 | use ra_prof::{memory_usage, profile, Bytes}; |
10 | use ra_syntax::SourceFile; | 10 | use ra_syntax::SourceFile; |
diff --git a/crates/ra_ide_api/src/db.rs b/crates/ra_ide_api/src/db.rs index ea0714add..bbf04bcf7 100644 --- a/crates/ra_ide_api/src/db.rs +++ b/crates/ra_ide_api/src/db.rs | |||
@@ -4,8 +4,10 @@ use std::sync::Arc; | |||
4 | 4 | ||
5 | use ra_db::{ | 5 | use ra_db::{ |
6 | salsa::{self, Database, Durability}, | 6 | salsa::{self, Database, Durability}, |
7 | Canceled, CheckCanceled, CrateId, FileId, SourceDatabase, SourceRootId, | 7 | Canceled, CheckCanceled, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, |
8 | SourceDatabaseExt, SourceRootId, | ||
8 | }; | 9 | }; |
10 | use relative_path::RelativePath; | ||
9 | use rustc_hash::FxHashMap; | 11 | use rustc_hash::FxHashMap; |
10 | 12 | ||
11 | use crate::{ | 13 | use crate::{ |
@@ -15,6 +17,7 @@ use crate::{ | |||
15 | 17 | ||
16 | #[salsa::database( | 18 | #[salsa::database( |
17 | ra_db::SourceDatabaseStorage, | 19 | ra_db::SourceDatabaseStorage, |
20 | ra_db::SourceDatabaseExtStorage, | ||
18 | LineIndexDatabaseStorage, | 21 | LineIndexDatabaseStorage, |
19 | symbol_index::SymbolsDatabaseStorage, | 22 | symbol_index::SymbolsDatabaseStorage, |
20 | hir::db::InternDatabaseStorage, | 23 | hir::db::InternDatabaseStorage, |
@@ -31,6 +34,22 @@ pub(crate) struct RootDatabase { | |||
31 | pub(crate) last_gc_check: crate::wasm_shims::Instant, | 34 | pub(crate) last_gc_check: crate::wasm_shims::Instant, |
32 | } | 35 | } |
33 | 36 | ||
37 | impl FileLoader for RootDatabase { | ||
38 | fn file_text(&self, file_id: FileId) -> Arc<String> { | ||
39 | FileLoaderDelegate(self).file_text(file_id) | ||
40 | } | ||
41 | fn resolve_relative_path( | ||
42 | &self, | ||
43 | anchor: FileId, | ||
44 | relative_path: &RelativePath, | ||
45 | ) -> Option<FileId> { | ||
46 | FileLoaderDelegate(self).resolve_relative_path(anchor, relative_path) | ||
47 | } | ||
48 | fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> { | ||
49 | FileLoaderDelegate(self).relevant_crates(file_id) | ||
50 | } | ||
51 | } | ||
52 | |||
34 | impl hir::debug::HirDebugHelper for RootDatabase { | 53 | impl hir::debug::HirDebugHelper for RootDatabase { |
35 | fn crate_name(&self, krate: CrateId) -> Option<String> { | 54 | fn crate_name(&self, krate: CrateId) -> Option<String> { |
36 | self.debug_data.crate_names.get(&krate).cloned() | 55 | self.debug_data.crate_names.get(&krate).cloned() |
@@ -104,7 +123,7 @@ pub(crate) trait LineIndexDatabase: ra_db::SourceDatabase + CheckCanceled { | |||
104 | fn line_index(&self, file_id: FileId) -> Arc<LineIndex>; | 123 | fn line_index(&self, file_id: FileId) -> Arc<LineIndex>; |
105 | } | 124 | } |
106 | 125 | ||
107 | fn line_index(db: &impl ra_db::SourceDatabase, file_id: FileId) -> Arc<LineIndex> { | 126 | fn line_index(db: &impl LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> { |
108 | let text = db.file_text(file_id); | 127 | let text = db.file_text(file_id); |
109 | Arc::new(LineIndex::new(&*text)) | 128 | Arc::new(LineIndex::new(&*text)) |
110 | } | 129 | } |
diff --git a/crates/ra_ide_api/src/diagnostics.rs b/crates/ra_ide_api/src/diagnostics.rs index 65f061443..8743a3a79 100644 --- a/crates/ra_ide_api/src/diagnostics.rs +++ b/crates/ra_ide_api/src/diagnostics.rs | |||
@@ -4,7 +4,7 @@ use std::cell::RefCell; | |||
4 | 4 | ||
5 | use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}; | 5 | use hir::diagnostics::{AstDiagnostic, Diagnostic as _, DiagnosticSink}; |
6 | use itertools::Itertools; | 6 | use itertools::Itertools; |
7 | use ra_db::SourceDatabase; | 7 | use ra_db::{SourceDatabase, SourceDatabaseExt}; |
8 | use ra_prof::profile; | 8 | use ra_prof::profile; |
9 | use ra_syntax::{ | 9 | use ra_syntax::{ |
10 | algo, | 10 | algo, |
diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs index 2d92fe1c5..f7fd42f65 100644 --- a/crates/ra_ide_api/src/lib.rs +++ b/crates/ra_ide_api/src/lib.rs | |||
@@ -52,7 +52,7 @@ use std::sync::Arc; | |||
52 | use ra_cfg::CfgOptions; | 52 | use ra_cfg::CfgOptions; |
53 | use ra_db::{ | 53 | use ra_db::{ |
54 | salsa::{self, ParallelDatabase}, | 54 | salsa::{self, ParallelDatabase}, |
55 | CheckCanceled, SourceDatabase, | 55 | CheckCanceled, FileLoader, SourceDatabase, |
56 | }; | 56 | }; |
57 | use ra_syntax::{SourceFile, TextRange, TextUnit}; | 57 | use ra_syntax::{SourceFile, TextRange, TextUnit}; |
58 | use ra_text_edit::TextEdit; | 58 | use ra_text_edit::TextEdit; |
@@ -289,10 +289,14 @@ impl AnalysisHost { | |||
289 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, ra_prof::Bytes)> { | 289 | pub fn per_query_memory_usage(&mut self) -> Vec<(String, ra_prof::Bytes)> { |
290 | self.db.per_query_memory_usage() | 290 | self.db.per_query_memory_usage() |
291 | } | 291 | } |
292 | pub fn raw_database(&self) -> &(impl hir::db::HirDatabase + salsa::Database) { | 292 | pub fn raw_database( |
293 | &self, | ||
294 | ) -> &(impl hir::db::HirDatabase + salsa::Database + ra_db::SourceDatabaseExt) { | ||
293 | &self.db | 295 | &self.db |
294 | } | 296 | } |
295 | pub fn raw_database_mut(&mut self) -> &mut (impl hir::db::HirDatabase + salsa::Database) { | 297 | pub fn raw_database_mut( |
298 | &mut self, | ||
299 | ) -> &mut (impl hir::db::HirDatabase + salsa::Database + ra_db::SourceDatabaseExt) { | ||
296 | &mut self.db | 300 | &mut self.db |
297 | } | 301 | } |
298 | } | 302 | } |
diff --git a/crates/ra_ide_api/src/references.rs b/crates/ra_ide_api/src/references.rs index c95c47bf1..4247c6d90 100644 --- a/crates/ra_ide_api/src/references.rs +++ b/crates/ra_ide_api/src/references.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use hir::{Either, ModuleSource}; | 3 | use hir::{Either, ModuleSource}; |
4 | use ra_db::SourceDatabase; | 4 | use ra_db::{SourceDatabase, SourceDatabaseExt}; |
5 | use ra_syntax::{algo::find_node_at_offset, ast, AstNode, SourceFile, SyntaxNode}; | 5 | use ra_syntax::{algo::find_node_at_offset, ast, AstNode, SourceFile, SyntaxNode}; |
6 | use relative_path::{RelativePath, RelativePathBuf}; | 6 | use relative_path::{RelativePath, RelativePathBuf}; |
7 | 7 | ||
diff --git a/crates/ra_ide_api/src/symbol_index.rs b/crates/ra_ide_api/src/symbol_index.rs index 797e9926f..5729eb5b3 100644 --- a/crates/ra_ide_api/src/symbol_index.rs +++ b/crates/ra_ide_api/src/symbol_index.rs | |||
@@ -29,7 +29,7 @@ use std::{ | |||
29 | use fst::{self, Streamer}; | 29 | use fst::{self, Streamer}; |
30 | use ra_db::{ | 30 | use ra_db::{ |
31 | salsa::{self, ParallelDatabase}, | 31 | salsa::{self, ParallelDatabase}, |
32 | SourceDatabase, SourceRootId, | 32 | SourceDatabaseExt, SourceRootId, |
33 | }; | 33 | }; |
34 | use ra_syntax::{ | 34 | use ra_syntax::{ |
35 | ast::{self, NameOwner}, | 35 | ast::{self, NameOwner}, |
diff --git a/crates/ra_lsp_server/Cargo.toml b/crates/ra_lsp_server/Cargo.toml index aedc55a95..46a0f958c 100644 --- a/crates/ra_lsp_server/Cargo.toml +++ b/crates/ra_lsp_server/Cargo.toml | |||
@@ -18,8 +18,6 @@ parking_lot = "0.9.0" | |||
18 | jod-thread = "0.1.0" | 18 | jod-thread = "0.1.0" |
19 | ra_vfs = "0.4.0" | 19 | ra_vfs = "0.4.0" |
20 | ra_syntax = { path = "../ra_syntax" } | 20 | ra_syntax = { path = "../ra_syntax" } |
21 | ra_db = { path = "../ra_db" } | ||
22 | ra_cfg = { path = "../ra_cfg" } | ||
23 | ra_text_edit = { path = "../ra_text_edit" } | 21 | ra_text_edit = { path = "../ra_text_edit" } |
24 | ra_ide_api = { path = "../ra_ide_api" } | 22 | ra_ide_api = { path = "../ra_ide_api" } |
25 | lsp-server = "0.2.0" | 23 | lsp-server = "0.2.0" |
diff --git a/crates/ra_mbe/Cargo.toml b/crates/ra_mbe/Cargo.toml index b058dde91..e8ef2457b 100644 --- a/crates/ra_mbe/Cargo.toml +++ b/crates/ra_mbe/Cargo.toml | |||
@@ -8,7 +8,6 @@ authors = ["rust-analyzer developers"] | |||
8 | ra_syntax = { path = "../ra_syntax" } | 8 | ra_syntax = { path = "../ra_syntax" } |
9 | ra_parser = { path = "../ra_parser" } | 9 | ra_parser = { path = "../ra_parser" } |
10 | tt = { path = "../ra_tt", package = "ra_tt" } | 10 | tt = { path = "../ra_tt", package = "ra_tt" } |
11 | itertools = "0.8.0" | ||
12 | rustc-hash = "1.0.0" | 11 | rustc-hash = "1.0.0" |
13 | smallvec = "0.6.9" | 12 | smallvec = "0.6.9" |
14 | log = "0.4.5" | 13 | log = "0.4.5" |
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml index 9bc85404a..68c594202 100644 --- a/crates/ra_syntax/Cargo.toml +++ b/crates/ra_syntax/Cargo.toml | |||
@@ -15,6 +15,7 @@ rustc-hash = "1.0.1" | |||
15 | arrayvec = "0.4.10" | 15 | arrayvec = "0.4.10" |
16 | once_cell = "1.2.0" | 16 | once_cell = "1.2.0" |
17 | 17 | ||
18 | # This crate transitively depends on `smol_str` via `rowan`. | ||
18 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here | 19 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here |
19 | # to reduce number of compilations | 20 | # to reduce number of compilations |
20 | smol_str = { version = "0.1.12", features = ["serde"] } | 21 | smol_str = { version = "0.1.12", features = ["serde"] } |
diff --git a/crates/ra_syntax/src/ast/edit.rs b/crates/ra_syntax/src/ast/edit.rs index 03f3b5fbb..ea92284b8 100644 --- a/crates/ra_syntax/src/ast/edit.rs +++ b/crates/ra_syntax/src/ast/edit.rs | |||
@@ -15,7 +15,7 @@ use crate::{ | |||
15 | }, | 15 | }, |
16 | AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, | 16 | AstToken, Direction, InsertPosition, SmolStr, SyntaxElement, |
17 | SyntaxKind::{ATTR, COMMENT, WHITESPACE}, | 17 | SyntaxKind::{ATTR, COMMENT, WHITESPACE}, |
18 | SyntaxNode, T, | 18 | SyntaxNode, SyntaxToken, T, |
19 | }; | 19 | }; |
20 | 20 | ||
21 | impl ast::FnDef { | 21 | impl ast::FnDef { |
@@ -231,12 +231,64 @@ pub fn replace_descendants<N: AstNode, D: AstNode>( | |||
231 | N::cast(new_syntax).unwrap() | 231 | N::cast(new_syntax).unwrap() |
232 | } | 232 | } |
233 | 233 | ||
234 | // Note this is copy-pasted from fmt. It seems like fmt should be a separate | 234 | #[derive(Debug, Clone, Copy)] |
235 | // crate, but basic tree building should be this crate. However, tree building | 235 | pub struct IndentLevel(pub u8); |
236 | // might want to call into fmt... | 236 | |
237 | impl From<u8> for IndentLevel { | ||
238 | fn from(level: u8) -> IndentLevel { | ||
239 | IndentLevel(level) | ||
240 | } | ||
241 | } | ||
242 | |||
243 | impl IndentLevel { | ||
244 | pub fn from_node(node: &SyntaxNode) -> IndentLevel { | ||
245 | let first_token = match node.first_token() { | ||
246 | Some(it) => it, | ||
247 | None => return IndentLevel(0), | ||
248 | }; | ||
249 | for ws in prev_tokens(first_token).filter_map(ast::Whitespace::cast) { | ||
250 | let text = ws.syntax().text(); | ||
251 | if let Some(pos) = text.rfind('\n') { | ||
252 | let level = text[pos + 1..].chars().count() / 4; | ||
253 | return IndentLevel(level as u8); | ||
254 | } | ||
255 | } | ||
256 | IndentLevel(0) | ||
257 | } | ||
258 | |||
259 | pub fn increase_indent<N: AstNode>(self, node: N) -> N { | ||
260 | N::cast(self._increase_indent(node.syntax().clone())).unwrap() | ||
261 | } | ||
262 | |||
263 | fn _increase_indent(self, node: SyntaxNode) -> SyntaxNode { | ||
264 | let replacements: FxHashMap<SyntaxElement, SyntaxElement> = node | ||
265 | .descendants_with_tokens() | ||
266 | .filter_map(|el| el.into_token()) | ||
267 | .filter_map(ast::Whitespace::cast) | ||
268 | .filter(|ws| { | ||
269 | let text = ws.syntax().text(); | ||
270 | text.contains('\n') | ||
271 | }) | ||
272 | .map(|ws| { | ||
273 | ( | ||
274 | ws.syntax().clone().into(), | ||
275 | make::tokens::whitespace(&format!( | ||
276 | "{}{:width$}", | ||
277 | ws.syntax().text(), | ||
278 | "", | ||
279 | width = self.0 as usize * 4 | ||
280 | )) | ||
281 | .into(), | ||
282 | ) | ||
283 | }) | ||
284 | .collect(); | ||
285 | algo::replace_descendants(&node, &replacements) | ||
286 | } | ||
287 | } | ||
288 | |||
289 | // FIXME: replace usages with IndentLevel above | ||
237 | fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> { | 290 | fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> { |
238 | let prev_tokens = std::iter::successors(node.first_token(), |token| token.prev_token()); | 291 | for token in prev_tokens(node.first_token()?) { |
239 | for token in prev_tokens { | ||
240 | if let Some(ws) = ast::Whitespace::cast(token.clone()) { | 292 | if let Some(ws) = ast::Whitespace::cast(token.clone()) { |
241 | let ws_text = ws.text(); | 293 | let ws_text = ws.text(); |
242 | if let Some(pos) = ws_text.rfind('\n') { | 294 | if let Some(pos) = ws_text.rfind('\n') { |
@@ -250,6 +302,10 @@ fn leading_indent(node: &SyntaxNode) -> Option<SmolStr> { | |||
250 | None | 302 | None |
251 | } | 303 | } |
252 | 304 | ||
305 | fn prev_tokens(token: SyntaxToken) -> impl Iterator<Item = SyntaxToken> { | ||
306 | iter::successors(Some(token), |token| token.prev_token()) | ||
307 | } | ||
308 | |||
253 | #[must_use] | 309 | #[must_use] |
254 | fn insert_children<N: AstNode>( | 310 | fn insert_children<N: AstNode>( |
255 | parent: &N, | 311 | parent: &N, |
@@ -269,3 +325,26 @@ fn replace_children<N: AstNode>( | |||
269 | let new_syntax = algo::replace_children(parent.syntax(), to_replace, &mut to_insert); | 325 | let new_syntax = algo::replace_children(parent.syntax(), to_replace, &mut to_insert); |
270 | N::cast(new_syntax).unwrap() | 326 | N::cast(new_syntax).unwrap() |
271 | } | 327 | } |
328 | |||
329 | #[test] | ||
330 | fn test_increase_indent() { | ||
331 | let arm_list = { | ||
332 | let arm = make::match_arm(iter::once(make::placeholder_pat().into()), make::expr_unit()); | ||
333 | make::match_arm_list(vec![arm.clone(), arm].into_iter()) | ||
334 | }; | ||
335 | assert_eq!( | ||
336 | arm_list.syntax().to_string(), | ||
337 | "{ | ||
338 | _ => (), | ||
339 | _ => (), | ||
340 | }" | ||
341 | ); | ||
342 | let indented = IndentLevel(2).increase_indent(arm_list); | ||
343 | assert_eq!( | ||
344 | indented.syntax().to_string(), | ||
345 | "{ | ||
346 | _ => (), | ||
347 | _ => (), | ||
348 | }" | ||
349 | ); | ||
350 | } | ||
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs index 287a40bee..143835172 100644 --- a/crates/ra_syntax/src/ast/make.rs +++ b/crates/ra_syntax/src/ast/make.rs | |||
@@ -160,6 +160,12 @@ pub mod tokens { | |||
160 | .unwrap() | 160 | .unwrap() |
161 | } | 161 | } |
162 | 162 | ||
163 | pub fn whitespace(text: &str) -> SyntaxToken { | ||
164 | assert!(text.trim().is_empty()); | ||
165 | let sf = SourceFile::parse(text).ok().unwrap(); | ||
166 | sf.syntax().first_child_or_token().unwrap().into_token().unwrap() | ||
167 | } | ||
168 | |||
163 | pub fn single_newline() -> SyntaxToken { | 169 | pub fn single_newline() -> SyntaxToken { |
164 | SOURCE_FILE | 170 | SOURCE_FILE |
165 | .tree() | 171 | .tree() |
diff --git a/crates/ra_tt/Cargo.toml b/crates/ra_tt/Cargo.toml index 3328d312f..3fcc7f085 100644 --- a/crates/ra_tt/Cargo.toml +++ b/crates/ra_tt/Cargo.toml | |||
@@ -5,4 +5,6 @@ version = "0.1.0" | |||
5 | authors = ["rust-analyzer developers"] | 5 | authors = ["rust-analyzer developers"] |
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | smol_str = "0.1.9" | 8 | # ideally, `serde` should be enabled by `ra_lsp_server`, but we enable it here |
9 | # to reduce number of compilations | ||
10 | smol_str = { version = "0.1.12", features = ["serde"] } | ||
diff --git a/docs/dev/README.md b/docs/dev/README.md index 0db3e731e..5e18e4ffe 100644 --- a/docs/dev/README.md +++ b/docs/dev/README.md | |||
@@ -14,7 +14,7 @@ To learn more about how rust-analyzer works, see | |||
14 | 14 | ||
15 | We also publish rustdoc docs to pages: | 15 | We also publish rustdoc docs to pages: |
16 | 16 | ||
17 | https://rust-analyzer.github.io/rust-analyzer/ra_ide_api/index.html | 17 | https://rust-analyzer.github.io/rust-analyzer/api-docs/ra_ide_api/ |
18 | 18 | ||
19 | Various organizational and process issues are discussed in this document. | 19 | Various organizational and process issues are discussed in this document. |
20 | 20 | ||