diff options
author | bors[bot] <bors[bot]@users.noreply.github.com> | 2018-12-24 14:40:11 +0000 |
---|---|---|
committer | bors[bot] <bors[bot]@users.noreply.github.com> | 2018-12-24 14:40:11 +0000 |
commit | 67e768466ff2e2611eead0f30b2e9c4083c80c20 (patch) | |
tree | 8984028019837c91131fc30f60eecf8c2a457368 | |
parent | abe09eb5edfe8f4c58baa16140acbd414635836f (diff) | |
parent | 4befde1eee5b1e2b7ddc9bf764b77f82b792c318 (diff) |
Merge #327
327: Beginnings of type inference r=flodiebold a=flodiebold
I was a bit bored, so I thought I'd try to start implementing the type system and see how far I come :wink: This is obviously still extremely WIP, only very basic stuff working, but I thought I'd post this now to get some feedback as to whether this approach makes sense at all.
There's no user-visible effect yet, but the type inference has tests similar to the ones for the parser. My next step will probably be to implement struct types, after which this could probably be used to complete fields.
I realize this may all get thrown away when/if the compiler query system gets usable, but I feel like there are lots of IDE features that could be implemented with somewhat working type inference in the meantime :smile:
Co-authored-by: Florian Diebold <[email protected]>
-rw-r--r-- | Cargo.lock | 1 | ||||
-rw-r--r-- | crates/ra_analysis/src/db.rs | 2 | ||||
-rw-r--r-- | crates/ra_analysis/src/imp.rs | 20 | ||||
-rw-r--r-- | crates/ra_analysis/src/lib.rs | 3 | ||||
-rw-r--r-- | crates/ra_hir/Cargo.toml | 3 | ||||
-rw-r--r-- | crates/ra_hir/src/db.rs | 11 | ||||
-rw-r--r-- | crates/ra_hir/src/function.rs | 18 | ||||
-rw-r--r-- | crates/ra_hir/src/lib.rs | 20 | ||||
-rw-r--r-- | crates/ra_hir/src/mock.rs | 13 | ||||
-rw-r--r-- | crates/ra_hir/src/module.rs | 1 | ||||
-rw-r--r-- | crates/ra_hir/src/module/nameres.rs | 4 | ||||
-rw-r--r-- | crates/ra_hir/src/query_definitions.rs | 12 | ||||
-rw-r--r-- | crates/ra_hir/src/ty.rs | 601 | ||||
-rw-r--r-- | crates/ra_hir/src/ty/primitive.rs | 130 | ||||
-rw-r--r-- | crates/ra_hir/src/ty/tests.rs | 134 | ||||
-rw-r--r-- | crates/ra_hir/src/ty/tests/data/0001_basics.txt | 13 | ||||
-rw-r--r-- | crates/ra_hir/src/ty/tests/data/0002_let.txt | 7 | ||||
-rw-r--r-- | crates/ra_hir/src/ty/tests/data/0003_paths.txt | 9 | ||||
-rw-r--r-- | crates/ra_syntax/src/ast/generated.rs | 60 | ||||
-rw-r--r-- | crates/ra_syntax/src/grammar.ron | 19 | ||||
-rw-r--r-- | crates/ra_syntax/tests/test.rs | 157 | ||||
-rw-r--r-- | crates/test_utils/src/lib.rs | 99 |
22 files changed, 1189 insertions, 148 deletions
diff --git a/Cargo.lock b/Cargo.lock index 5bf946b34..51cf1825d 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -695,6 +695,7 @@ name = "ra_hir" | |||
695 | version = "0.1.0" | 695 | version = "0.1.0" |
696 | dependencies = [ | 696 | dependencies = [ |
697 | "arrayvec 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", | 697 | "arrayvec 0.4.9 (registry+https://github.com/rust-lang/crates.io-index)", |
698 | "flexi_logger 0.10.3 (registry+https://github.com/rust-lang/crates.io-index)", | ||
698 | "id-arena 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", | 699 | "id-arena 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", |
699 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", | 700 | "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", |
700 | "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", | 701 | "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", |
diff --git a/crates/ra_analysis/src/db.rs b/crates/ra_analysis/src/db.rs index 94729d296..780a84291 100644 --- a/crates/ra_analysis/src/db.rs +++ b/crates/ra_analysis/src/db.rs | |||
@@ -93,6 +93,8 @@ salsa::database_storage! { | |||
93 | fn item_map() for hir::db::ItemMapQuery; | 93 | fn item_map() for hir::db::ItemMapQuery; |
94 | fn fn_syntax() for hir::db::FnSyntaxQuery; | 94 | fn fn_syntax() for hir::db::FnSyntaxQuery; |
95 | fn submodules() for hir::db::SubmodulesQuery; | 95 | fn submodules() for hir::db::SubmodulesQuery; |
96 | fn infer() for hir::db::InferQuery; | ||
97 | fn type_for_def() for hir::db::TypeForDefQuery; | ||
96 | } | 98 | } |
97 | } | 99 | } |
98 | } | 100 | } |
diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs index b01382808..40996bfd7 100644 --- a/crates/ra_analysis/src/imp.rs +++ b/crates/ra_analysis/src/imp.rs | |||
@@ -5,7 +5,8 @@ use std::{ | |||
5 | 5 | ||
6 | use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit}; | 6 | use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit}; |
7 | use ra_syntax::{ | 7 | use ra_syntax::{ |
8 | ast::{self, ArgListOwner, Expr, NameOwner}, | 8 | ast::{self, ArgListOwner, Expr, NameOwner, FnDef}, |
9 | algo::find_covering_node, | ||
9 | AstNode, SourceFileNode, | 10 | AstNode, SourceFileNode, |
10 | SyntaxKind::*, | 11 | SyntaxKind::*, |
11 | SyntaxNodeRef, TextRange, TextUnit, | 12 | SyntaxNodeRef, TextRange, TextUnit, |
@@ -510,6 +511,23 @@ impl AnalysisImpl { | |||
510 | Ok(None) | 511 | Ok(None) |
511 | } | 512 | } |
512 | 513 | ||
514 | pub fn type_of(&self, file_id: FileId, range: TextRange) -> Cancelable<Option<String>> { | ||
515 | let file = self.db.source_file(file_id); | ||
516 | let syntax = file.syntax(); | ||
517 | let node = find_covering_node(syntax, range); | ||
518 | let parent_fn = node.ancestors().filter_map(FnDef::cast).next(); | ||
519 | let parent_fn = if let Some(p) = parent_fn { | ||
520 | p | ||
521 | } else { | ||
522 | return Ok(None); | ||
523 | }; | ||
524 | let function = ctry!(source_binder::function_from_source( | ||
525 | &*self.db, file_id, parent_fn | ||
526 | )?); | ||
527 | let infer = function.infer(&*self.db)?; | ||
528 | Ok(infer.type_of_node(node).map(|t| t.to_string())) | ||
529 | } | ||
530 | |||
513 | fn index_resolve(&self, name_ref: ast::NameRef) -> Cancelable<Vec<(FileId, FileSymbol)>> { | 531 | fn index_resolve(&self, name_ref: ast::NameRef) -> Cancelable<Vec<(FileId, FileSymbol)>> { |
514 | let name = name_ref.text(); | 532 | let name = name_ref.text(); |
515 | let mut query = Query::new(name.to_string()); | 533 | let mut query = Query::new(name.to_string()); |
diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs index 85df9c089..830898140 100644 --- a/crates/ra_analysis/src/lib.rs +++ b/crates/ra_analysis/src/lib.rs | |||
@@ -366,6 +366,9 @@ impl Analysis { | |||
366 | ) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> { | 366 | ) -> Cancelable<Option<(FnSignatureInfo, Option<usize>)>> { |
367 | self.imp.resolve_callable(position) | 367 | self.imp.resolve_callable(position) |
368 | } | 368 | } |
369 | pub fn type_of(&self, file_id: FileId, range: TextRange) -> Cancelable<Option<String>> { | ||
370 | self.imp.type_of(file_id, range) | ||
371 | } | ||
369 | } | 372 | } |
370 | 373 | ||
371 | pub struct LibraryData { | 374 | pub struct LibraryData { |
diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 61650cee9..594176337 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml | |||
@@ -16,3 +16,6 @@ ra_syntax = { path = "../ra_syntax" } | |||
16 | ra_editor = { path = "../ra_editor" } | 16 | ra_editor = { path = "../ra_editor" } |
17 | ra_db = { path = "../ra_db" } | 17 | ra_db = { path = "../ra_db" } |
18 | test_utils = { path = "../test_utils" } | 18 | test_utils = { path = "../test_utils" } |
19 | |||
20 | [dev-dependencies] | ||
21 | flexi_logger = "0.10.0" | ||
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs index 62cf9ab17..d94f75857 100644 --- a/crates/ra_hir/src/db.rs +++ b/crates/ra_hir/src/db.rs | |||
@@ -14,6 +14,7 @@ use crate::{ | |||
14 | function::FnId, | 14 | function::FnId, |
15 | module::{ModuleId, ModuleTree, ModuleSource, | 15 | module::{ModuleId, ModuleTree, ModuleSource, |
16 | nameres::{ItemMap, InputModuleItems}}, | 16 | nameres::{ItemMap, InputModuleItems}}, |
17 | ty::{InferenceResult, Ty}, | ||
17 | }; | 18 | }; |
18 | 19 | ||
19 | salsa::query_group! { | 20 | salsa::query_group! { |
@@ -30,6 +31,16 @@ pub trait HirDatabase: SyntaxDatabase | |||
30 | use fn query_definitions::fn_syntax; | 31 | use fn query_definitions::fn_syntax; |
31 | } | 32 | } |
32 | 33 | ||
34 | fn infer(fn_id: FnId) -> Cancelable<Arc<InferenceResult>> { | ||
35 | type InferQuery; | ||
36 | use fn query_definitions::infer; | ||
37 | } | ||
38 | |||
39 | fn type_for_def(def_id: DefId) -> Cancelable<Ty> { | ||
40 | type TypeForDefQuery; | ||
41 | use fn query_definitions::type_for_def; | ||
42 | } | ||
43 | |||
33 | fn file_items(file_id: FileId) -> Arc<SourceFileItems> { | 44 | fn file_items(file_id: FileId) -> Arc<SourceFileItems> { |
34 | type SourceFileItemsQuery; | 45 | type SourceFileItemsQuery; |
35 | use fn query_definitions::file_items; | 46 | use fn query_definitions::file_items; |
diff --git a/crates/ra_hir/src/function.rs b/crates/ra_hir/src/function.rs index 2925beb16..d36477b48 100644 --- a/crates/ra_hir/src/function.rs +++ b/crates/ra_hir/src/function.rs | |||
@@ -5,12 +5,13 @@ use std::{ | |||
5 | sync::Arc, | 5 | sync::Arc, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | use ra_db::Cancelable; | ||
8 | use ra_syntax::{ | 9 | use ra_syntax::{ |
9 | TextRange, TextUnit, | 10 | TextRange, TextUnit, |
10 | ast::{self, AstNode, DocCommentsOwner, NameOwner}, | 11 | ast::{self, AstNode, DocCommentsOwner, NameOwner}, |
11 | }; | 12 | }; |
12 | 13 | ||
13 | use crate::{ DefId, HirDatabase }; | 14 | use crate::{ DefId, HirDatabase, ty::InferenceResult, Module }; |
14 | 15 | ||
15 | pub use self::scope::FnScopes; | 16 | pub use self::scope::FnScopes; |
16 | 17 | ||
@@ -18,7 +19,7 @@ pub use self::scope::FnScopes; | |||
18 | pub struct FnId(pub(crate) DefId); | 19 | pub struct FnId(pub(crate) DefId); |
19 | 20 | ||
20 | pub struct Function { | 21 | pub struct Function { |
21 | fn_id: FnId, | 22 | pub(crate) fn_id: FnId, |
22 | } | 23 | } |
23 | 24 | ||
24 | impl Function { | 25 | impl Function { |
@@ -27,6 +28,10 @@ impl Function { | |||
27 | Function { fn_id } | 28 | Function { fn_id } |
28 | } | 29 | } |
29 | 30 | ||
31 | pub fn syntax(&self, db: &impl HirDatabase) -> ast::FnDefNode { | ||
32 | db.fn_syntax(self.fn_id) | ||
33 | } | ||
34 | |||
30 | pub fn scopes(&self, db: &impl HirDatabase) -> Arc<FnScopes> { | 35 | pub fn scopes(&self, db: &impl HirDatabase) -> Arc<FnScopes> { |
31 | db.fn_scopes(self.fn_id) | 36 | db.fn_scopes(self.fn_id) |
32 | } | 37 | } |
@@ -35,6 +40,15 @@ impl Function { | |||
35 | let syntax = db.fn_syntax(self.fn_id); | 40 | let syntax = db.fn_syntax(self.fn_id); |
36 | FnSignatureInfo::new(syntax.borrowed()) | 41 | FnSignatureInfo::new(syntax.borrowed()) |
37 | } | 42 | } |
43 | |||
44 | pub fn infer(&self, db: &impl HirDatabase) -> Cancelable<Arc<InferenceResult>> { | ||
45 | db.infer(self.fn_id) | ||
46 | } | ||
47 | |||
48 | pub fn module(&self, db: &impl HirDatabase) -> Cancelable<Module> { | ||
49 | let loc = self.fn_id.0.loc(db); | ||
50 | Module::new(db, loc.source_root_id, loc.module_id) | ||
51 | } | ||
38 | } | 52 | } |
39 | 53 | ||
40 | #[derive(Debug, Clone)] | 54 | #[derive(Debug, Clone)] |
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs index f56214b47..a0d99a84d 100644 --- a/crates/ra_hir/src/lib.rs +++ b/crates/ra_hir/src/lib.rs | |||
@@ -25,10 +25,11 @@ pub mod source_binder; | |||
25 | mod krate; | 25 | mod krate; |
26 | mod module; | 26 | mod module; |
27 | mod function; | 27 | mod function; |
28 | mod ty; | ||
28 | 29 | ||
29 | use std::ops::Index; | 30 | use std::ops::Index; |
30 | 31 | ||
31 | use ra_syntax::{SyntaxNodeRef, SyntaxNode}; | 32 | use ra_syntax::{SyntaxNodeRef, SyntaxNode, SyntaxKind}; |
32 | use ra_db::{LocationIntener, SourceRootId, FileId, Cancelable}; | 33 | use ra_db::{LocationIntener, SourceRootId, FileId, Cancelable}; |
33 | 34 | ||
34 | use crate::{ | 35 | use crate::{ |
@@ -66,6 +67,23 @@ pub struct DefLoc { | |||
66 | source_item_id: SourceItemId, | 67 | source_item_id: SourceItemId, |
67 | } | 68 | } |
68 | 69 | ||
70 | impl DefKind { | ||
71 | pub(crate) fn for_syntax_kind(kind: SyntaxKind) -> Option<DefKind> { | ||
72 | match kind { | ||
73 | SyntaxKind::FN_DEF => Some(DefKind::Function), | ||
74 | SyntaxKind::MODULE => Some(DefKind::Module), | ||
75 | // These define items, but don't have their own DefKinds yet: | ||
76 | SyntaxKind::STRUCT_DEF => Some(DefKind::Item), | ||
77 | SyntaxKind::ENUM_DEF => Some(DefKind::Item), | ||
78 | SyntaxKind::TRAIT_DEF => Some(DefKind::Item), | ||
79 | SyntaxKind::TYPE_DEF => Some(DefKind::Item), | ||
80 | SyntaxKind::CONST_DEF => Some(DefKind::Item), | ||
81 | SyntaxKind::STATIC_DEF => Some(DefKind::Item), | ||
82 | _ => None, | ||
83 | } | ||
84 | } | ||
85 | } | ||
86 | |||
69 | impl DefId { | 87 | impl DefId { |
70 | pub(crate) fn loc(self, db: &impl AsRef<LocationIntener<DefLoc, DefId>>) -> DefLoc { | 88 | pub(crate) fn loc(self, db: &impl AsRef<LocationIntener<DefLoc, DefId>>) -> DefLoc { |
71 | db.as_ref().id2loc(self) | 89 | db.as_ref().id2loc(self) |
diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs index 9423e6571..b5a997170 100644 --- a/crates/ra_hir/src/mock.rs +++ b/crates/ra_hir/src/mock.rs | |||
@@ -8,7 +8,7 @@ use test_utils::{parse_fixture, CURSOR_MARKER, extract_offset}; | |||
8 | 8 | ||
9 | use crate::{db, DefId, DefLoc}; | 9 | use crate::{db, DefId, DefLoc}; |
10 | 10 | ||
11 | const WORKSPACE: SourceRootId = SourceRootId(0); | 11 | pub const WORKSPACE: SourceRootId = SourceRootId(0); |
12 | 12 | ||
13 | #[derive(Debug)] | 13 | #[derive(Debug)] |
14 | pub(crate) struct MockDatabase { | 14 | pub(crate) struct MockDatabase { |
@@ -24,6 +24,15 @@ impl MockDatabase { | |||
24 | (db, source_root) | 24 | (db, source_root) |
25 | } | 25 | } |
26 | 26 | ||
27 | pub(crate) fn with_single_file(text: &str) -> (MockDatabase, SourceRoot, FileId) { | ||
28 | let mut db = MockDatabase::default(); | ||
29 | let mut source_root = SourceRoot::default(); | ||
30 | let file_id = db.add_file(&mut source_root, "/main.rs", text); | ||
31 | db.query_mut(ra_db::SourceRootQuery) | ||
32 | .set(WORKSPACE, Arc::new(source_root.clone())); | ||
33 | (db, source_root, file_id) | ||
34 | } | ||
35 | |||
27 | pub(crate) fn with_position(fixture: &str) -> (MockDatabase, FilePosition) { | 36 | pub(crate) fn with_position(fixture: &str) -> (MockDatabase, FilePosition) { |
28 | let (db, _, position) = MockDatabase::from_fixture(fixture); | 37 | let (db, _, position) = MockDatabase::from_fixture(fixture); |
29 | let position = position.expect("expected a marker ( <|> )"); | 38 | let position = position.expect("expected a marker ( <|> )"); |
@@ -182,6 +191,8 @@ salsa::database_storage! { | |||
182 | fn item_map() for db::ItemMapQuery; | 191 | fn item_map() for db::ItemMapQuery; |
183 | fn fn_syntax() for db::FnSyntaxQuery; | 192 | fn fn_syntax() for db::FnSyntaxQuery; |
184 | fn submodules() for db::SubmodulesQuery; | 193 | fn submodules() for db::SubmodulesQuery; |
194 | fn infer() for db::InferQuery; | ||
195 | fn type_for_def() for db::TypeForDefQuery; | ||
185 | } | 196 | } |
186 | } | 197 | } |
187 | } | 198 | } |
diff --git a/crates/ra_hir/src/module.rs b/crates/ra_hir/src/module.rs index cd31e8cfe..891119953 100644 --- a/crates/ra_hir/src/module.rs +++ b/crates/ra_hir/src/module.rs | |||
@@ -2,6 +2,7 @@ pub(super) mod imp; | |||
2 | pub(super) mod nameres; | 2 | pub(super) mod nameres; |
3 | 3 | ||
4 | use std::sync::Arc; | 4 | use std::sync::Arc; |
5 | use log; | ||
5 | 6 | ||
6 | use ra_syntax::{ | 7 | use ra_syntax::{ |
7 | algo::generate, | 8 | algo::generate, |
diff --git a/crates/ra_hir/src/module/nameres.rs b/crates/ra_hir/src/module/nameres.rs index 39e891cda..0b152a406 100644 --- a/crates/ra_hir/src/module/nameres.rs +++ b/crates/ra_hir/src/module/nameres.rs | |||
@@ -272,13 +272,13 @@ where | |||
272 | } | 272 | } |
273 | } | 273 | } |
274 | } | 274 | } |
275 | // Populate explicitelly declared items, except modules | 275 | // Populate explicitly declared items, except modules |
276 | for item in input.items.iter() { | 276 | for item in input.items.iter() { |
277 | if item.kind == MODULE { | 277 | if item.kind == MODULE { |
278 | continue; | 278 | continue; |
279 | } | 279 | } |
280 | let def_loc = DefLoc { | 280 | let def_loc = DefLoc { |
281 | kind: DefKind::Item, | 281 | kind: DefKind::for_syntax_kind(item.kind).unwrap_or(DefKind::Item), |
282 | source_root_id: self.source_root, | 282 | source_root_id: self.source_root, |
283 | module_id, | 283 | module_id, |
284 | source_item_id: SourceItemId { | 284 | source_item_id: SourceItemId { |
diff --git a/crates/ra_hir/src/query_definitions.rs b/crates/ra_hir/src/query_definitions.rs index efaeb1525..b654af920 100644 --- a/crates/ra_hir/src/query_definitions.rs +++ b/crates/ra_hir/src/query_definitions.rs | |||
@@ -11,7 +11,7 @@ use ra_syntax::{ | |||
11 | use ra_db::{SourceRootId, FileId, Cancelable,}; | 11 | use ra_db::{SourceRootId, FileId, Cancelable,}; |
12 | 12 | ||
13 | use crate::{ | 13 | use crate::{ |
14 | SourceFileItems, SourceItemId, DefKind, | 14 | SourceFileItems, SourceItemId, DefKind, Function, DefId, |
15 | db::HirDatabase, | 15 | db::HirDatabase, |
16 | function::{FnScopes, FnId}, | 16 | function::{FnScopes, FnId}, |
17 | module::{ | 17 | module::{ |
@@ -19,6 +19,7 @@ use crate::{ | |||
19 | imp::Submodule, | 19 | imp::Submodule, |
20 | nameres::{InputModuleItems, ItemMap, Resolver}, | 20 | nameres::{InputModuleItems, ItemMap, Resolver}, |
21 | }, | 21 | }, |
22 | ty::{self, InferenceResult, Ty} | ||
22 | }; | 23 | }; |
23 | 24 | ||
24 | /// Resolve `FnId` to the corresponding `SyntaxNode` | 25 | /// Resolve `FnId` to the corresponding `SyntaxNode` |
@@ -35,6 +36,15 @@ pub(super) fn fn_scopes(db: &impl HirDatabase, fn_id: FnId) -> Arc<FnScopes> { | |||
35 | Arc::new(res) | 36 | Arc::new(res) |
36 | } | 37 | } |
37 | 38 | ||
39 | pub(super) fn infer(db: &impl HirDatabase, fn_id: FnId) -> Cancelable<Arc<InferenceResult>> { | ||
40 | let function = Function { fn_id }; | ||
41 | ty::infer(db, function).map(Arc::new) | ||
42 | } | ||
43 | |||
44 | pub(super) fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Ty> { | ||
45 | ty::type_for_def(db, def_id) | ||
46 | } | ||
47 | |||
38 | pub(super) fn file_items(db: &impl HirDatabase, file_id: FileId) -> Arc<SourceFileItems> { | 48 | pub(super) fn file_items(db: &impl HirDatabase, file_id: FileId) -> Arc<SourceFileItems> { |
39 | let mut res = SourceFileItems::new(file_id); | 49 | let mut res = SourceFileItems::new(file_id); |
40 | let source_file = db.source_file(file_id); | 50 | let source_file = db.source_file(file_id); |
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs new file mode 100644 index 000000000..c759d4c8b --- /dev/null +++ b/crates/ra_hir/src/ty.rs | |||
@@ -0,0 +1,601 @@ | |||
1 | mod primitive; | ||
2 | #[cfg(test)] | ||
3 | mod tests; | ||
4 | |||
5 | use std::sync::Arc; | ||
6 | use std::fmt; | ||
7 | |||
8 | use log; | ||
9 | use rustc_hash::{FxHashMap}; | ||
10 | |||
11 | use ra_db::{LocalSyntaxPtr, Cancelable}; | ||
12 | use ra_syntax::{ | ||
13 | SmolStr, | ||
14 | ast::{self, AstNode, LoopBodyOwner, ArgListOwner}, | ||
15 | SyntaxNodeRef | ||
16 | }; | ||
17 | |||
18 | use crate::{Def, DefId, FnScopes, Module, Function, Path, db::HirDatabase}; | ||
19 | |||
20 | #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] | ||
21 | pub enum Ty { | ||
22 | /// The primitive boolean type. Written as `bool`. | ||
23 | Bool, | ||
24 | |||
25 | /// The primitive character type; holds a Unicode scalar value | ||
26 | /// (a non-surrogate code point). Written as `char`. | ||
27 | Char, | ||
28 | |||
29 | /// A primitive signed integer type. For example, `i32`. | ||
30 | Int(primitive::IntTy), | ||
31 | |||
32 | /// A primitive unsigned integer type. For example, `u32`. | ||
33 | Uint(primitive::UintTy), | ||
34 | |||
35 | /// A primitive floating-point type. For example, `f64`. | ||
36 | Float(primitive::FloatTy), | ||
37 | |||
38 | // Structures, enumerations and unions. | ||
39 | // Adt(AdtDef, Substs), | ||
40 | /// The pointee of a string slice. Written as `str`. | ||
41 | Str, | ||
42 | |||
43 | // An array with the given length. Written as `[T; n]`. | ||
44 | // Array(Ty, ty::Const), | ||
45 | /// The pointee of an array slice. Written as `[T]`. | ||
46 | Slice(TyRef), | ||
47 | |||
48 | // A raw pointer. Written as `*mut T` or `*const T` | ||
49 | // RawPtr(TypeAndMut<'tcx>), | ||
50 | |||
51 | // A reference; a pointer with an associated lifetime. Written as | ||
52 | // `&'a mut T` or `&'a T`. | ||
53 | // Ref(Ty<'tcx>, hir::Mutability), | ||
54 | /// A pointer to a function. Written as `fn() -> i32`. | ||
55 | /// | ||
56 | /// For example the type of `bar` here: | ||
57 | /// | ||
58 | /// ```rust | ||
59 | /// fn foo() -> i32 { 1 } | ||
60 | /// let bar: fn() -> i32 = foo; | ||
61 | /// ``` | ||
62 | FnPtr(Arc<FnSig>), | ||
63 | |||
64 | // A trait, defined with `dyn trait`. | ||
65 | // Dynamic(), | ||
66 | /// The anonymous type of a closure. Used to represent the type of | ||
67 | /// `|a| a`. | ||
68 | // Closure(DefId, ClosureSubsts<'tcx>), | ||
69 | |||
70 | /// The anonymous type of a generator. Used to represent the type of | ||
71 | /// `|a| yield a`. | ||
72 | // Generator(DefId, GeneratorSubsts<'tcx>, hir::GeneratorMovability), | ||
73 | |||
74 | /// A type representin the types stored inside a generator. | ||
75 | /// This should only appear in GeneratorInteriors. | ||
76 | // GeneratorWitness(Binder<&'tcx List<Ty<'tcx>>>), | ||
77 | |||
78 | /// The never type `!` | ||
79 | Never, | ||
80 | |||
81 | /// A tuple type. For example, `(i32, bool)`. | ||
82 | Tuple(Vec<Ty>), | ||
83 | |||
84 | // The projection of an associated type. For example, | ||
85 | // `<T as Trait<..>>::N`. | ||
86 | // Projection(ProjectionTy), | ||
87 | |||
88 | // Opaque (`impl Trait`) type found in a return type. | ||
89 | // The `DefId` comes either from | ||
90 | // * the `impl Trait` ast::Ty node, | ||
91 | // * or the `existential type` declaration | ||
92 | // The substitutions are for the generics of the function in question. | ||
93 | // Opaque(DefId, Substs), | ||
94 | |||
95 | // A type parameter; for example, `T` in `fn f<T>(x: T) {} | ||
96 | // Param(ParamTy), | ||
97 | |||
98 | // A placeholder type - universally quantified higher-ranked type. | ||
99 | // Placeholder(ty::PlaceholderType), | ||
100 | |||
101 | // A type variable used during type checking. | ||
102 | // Infer(InferTy), | ||
103 | /// A placeholder for a type which could not be computed; this is | ||
104 | /// propagated to avoid useless error messages. | ||
105 | Unknown, | ||
106 | } | ||
107 | |||
108 | type TyRef = Arc<Ty>; | ||
109 | |||
110 | #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] | ||
111 | pub struct FnSig { | ||
112 | input: Vec<Ty>, | ||
113 | output: Ty, | ||
114 | } | ||
115 | |||
116 | impl Ty { | ||
117 | pub fn new(_db: &impl HirDatabase, node: ast::TypeRef) -> Cancelable<Self> { | ||
118 | use ra_syntax::ast::TypeRef::*; | ||
119 | Ok(match node { | ||
120 | ParenType(_inner) => Ty::Unknown, // TODO | ||
121 | TupleType(_inner) => Ty::Unknown, // TODO | ||
122 | NeverType(..) => Ty::Never, | ||
123 | PathType(inner) => { | ||
124 | let path = if let Some(p) = inner.path() { | ||
125 | p | ||
126 | } else { | ||
127 | return Ok(Ty::Unknown); | ||
128 | }; | ||
129 | if path.qualifier().is_none() { | ||
130 | let name = path | ||
131 | .segment() | ||
132 | .and_then(|s| s.name_ref()) | ||
133 | .map(|n| n.text()) | ||
134 | .unwrap_or(SmolStr::new("")); | ||
135 | if let Some(int_ty) = primitive::IntTy::from_string(&name) { | ||
136 | Ty::Int(int_ty) | ||
137 | } else if let Some(uint_ty) = primitive::UintTy::from_string(&name) { | ||
138 | Ty::Uint(uint_ty) | ||
139 | } else if let Some(float_ty) = primitive::FloatTy::from_string(&name) { | ||
140 | Ty::Float(float_ty) | ||
141 | } else { | ||
142 | // TODO | ||
143 | Ty::Unknown | ||
144 | } | ||
145 | } else { | ||
146 | // TODO | ||
147 | Ty::Unknown | ||
148 | } | ||
149 | } | ||
150 | PointerType(_inner) => Ty::Unknown, // TODO | ||
151 | ArrayType(_inner) => Ty::Unknown, // TODO | ||
152 | SliceType(_inner) => Ty::Unknown, // TODO | ||
153 | ReferenceType(_inner) => Ty::Unknown, // TODO | ||
154 | PlaceholderType(_inner) => Ty::Unknown, // TODO | ||
155 | FnPointerType(_inner) => Ty::Unknown, // TODO | ||
156 | ForType(_inner) => Ty::Unknown, // TODO | ||
157 | ImplTraitType(_inner) => Ty::Unknown, // TODO | ||
158 | DynTraitType(_inner) => Ty::Unknown, // TODO | ||
159 | }) | ||
160 | } | ||
161 | |||
162 | pub fn unit() -> Self { | ||
163 | Ty::Tuple(Vec::new()) | ||
164 | } | ||
165 | } | ||
166 | |||
167 | impl fmt::Display for Ty { | ||
168 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
169 | match self { | ||
170 | Ty::Bool => write!(f, "bool"), | ||
171 | Ty::Char => write!(f, "char"), | ||
172 | Ty::Int(t) => write!(f, "{}", t.ty_to_string()), | ||
173 | Ty::Uint(t) => write!(f, "{}", t.ty_to_string()), | ||
174 | Ty::Float(t) => write!(f, "{}", t.ty_to_string()), | ||
175 | Ty::Str => write!(f, "str"), | ||
176 | Ty::Slice(t) => write!(f, "[{}]", t), | ||
177 | Ty::Never => write!(f, "!"), | ||
178 | Ty::Tuple(ts) => { | ||
179 | write!(f, "(")?; | ||
180 | for t in ts { | ||
181 | write!(f, "{},", t)?; | ||
182 | } | ||
183 | write!(f, ")") | ||
184 | } | ||
185 | Ty::FnPtr(sig) => { | ||
186 | write!(f, "fn(")?; | ||
187 | for t in &sig.input { | ||
188 | write!(f, "{},", t)?; | ||
189 | } | ||
190 | write!(f, ") -> {}", sig.output) | ||
191 | } | ||
192 | Ty::Unknown => write!(f, "[unknown]"), | ||
193 | } | ||
194 | } | ||
195 | } | ||
196 | |||
197 | pub fn type_for_fn(db: &impl HirDatabase, f: Function) -> Cancelable<Ty> { | ||
198 | let syntax = f.syntax(db); | ||
199 | let node = syntax.borrowed(); | ||
200 | // TODO we ignore type parameters for now | ||
201 | let input = node | ||
202 | .param_list() | ||
203 | .map(|pl| { | ||
204 | pl.params() | ||
205 | .map(|p| { | ||
206 | p.type_ref() | ||
207 | .map(|t| Ty::new(db, t)) | ||
208 | .unwrap_or(Ok(Ty::Unknown)) | ||
209 | }) | ||
210 | .collect() | ||
211 | }) | ||
212 | .unwrap_or_else(|| Ok(Vec::new()))?; | ||
213 | let output = node | ||
214 | .ret_type() | ||
215 | .and_then(|rt| rt.type_ref()) | ||
216 | .map(|t| Ty::new(db, t)) | ||
217 | .unwrap_or(Ok(Ty::Unknown))?; | ||
218 | let sig = FnSig { input, output }; | ||
219 | Ok(Ty::FnPtr(Arc::new(sig))) | ||
220 | } | ||
221 | |||
222 | // TODO this should probably be per namespace (i.e. types vs. values), since for | ||
223 | // a tuple struct `struct Foo(Bar)`, Foo has function type as a value, but | ||
224 | // defines the struct type Foo when used in the type namespace. rustc has a | ||
225 | // separate DefId for the constructor, but with the current DefId approach, that | ||
226 | // seems complicated. | ||
227 | pub fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Ty> { | ||
228 | let def = def_id.resolve(db)?; | ||
229 | match def { | ||
230 | Def::Module(..) => { | ||
231 | log::debug!("trying to get type for module {:?}", def_id); | ||
232 | Ok(Ty::Unknown) | ||
233 | } | ||
234 | Def::Function(f) => type_for_fn(db, f), | ||
235 | Def::Item => { | ||
236 | log::debug!("trying to get type for item of unknown type {:?}", def_id); | ||
237 | Ok(Ty::Unknown) | ||
238 | } | ||
239 | } | ||
240 | } | ||
241 | |||
242 | #[derive(Clone, PartialEq, Eq, Debug)] | ||
243 | pub struct InferenceResult { | ||
244 | type_of: FxHashMap<LocalSyntaxPtr, Ty>, | ||
245 | } | ||
246 | |||
247 | impl InferenceResult { | ||
248 | pub fn type_of_node(&self, node: SyntaxNodeRef) -> Option<Ty> { | ||
249 | self.type_of.get(&LocalSyntaxPtr::new(node)).cloned() | ||
250 | } | ||
251 | } | ||
252 | |||
253 | #[derive(Clone, Debug)] | ||
254 | pub struct InferenceContext<'a, D: HirDatabase> { | ||
255 | db: &'a D, | ||
256 | scopes: Arc<FnScopes>, | ||
257 | module: Module, | ||
258 | // TODO unification tables... | ||
259 | type_of: FxHashMap<LocalSyntaxPtr, Ty>, | ||
260 | } | ||
261 | |||
262 | impl<'a, D: HirDatabase> InferenceContext<'a, D> { | ||
263 | fn new(db: &'a D, scopes: Arc<FnScopes>, module: Module) -> Self { | ||
264 | InferenceContext { | ||
265 | type_of: FxHashMap::default(), | ||
266 | db, | ||
267 | scopes, | ||
268 | module, | ||
269 | } | ||
270 | } | ||
271 | |||
272 | fn write_ty(&mut self, node: SyntaxNodeRef, ty: Ty) { | ||
273 | self.type_of.insert(LocalSyntaxPtr::new(node), ty); | ||
274 | } | ||
275 | |||
276 | fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> Option<Ty> { | ||
277 | if *ty1 == Ty::Unknown { | ||
278 | return Some(ty2.clone()); | ||
279 | } | ||
280 | if *ty2 == Ty::Unknown { | ||
281 | return Some(ty1.clone()); | ||
282 | } | ||
283 | if ty1 == ty2 { | ||
284 | return Some(ty1.clone()); | ||
285 | } | ||
286 | // TODO implement actual unification | ||
287 | return None; | ||
288 | } | ||
289 | |||
290 | fn unify_with_coercion(&mut self, ty1: &Ty, ty2: &Ty) -> Option<Ty> { | ||
291 | // TODO implement coercion | ||
292 | self.unify(ty1, ty2) | ||
293 | } | ||
294 | |||
295 | fn infer_path_expr(&mut self, expr: ast::PathExpr) -> Cancelable<Option<Ty>> { | ||
296 | let ast_path = ctry!(expr.path()); | ||
297 | let path = ctry!(Path::from_ast(ast_path)); | ||
298 | if path.is_ident() { | ||
299 | // resolve locally | ||
300 | let name = ctry!(ast_path.segment().and_then(|s| s.name_ref())); | ||
301 | if let Some(scope_entry) = self.scopes.resolve_local_name(name) { | ||
302 | let ty = ctry!(self.type_of.get(&scope_entry.ptr())); | ||
303 | return Ok(Some(ty.clone())); | ||
304 | }; | ||
305 | }; | ||
306 | |||
307 | // resolve in module | ||
308 | let resolved = ctry!(self.module.resolve_path(self.db, path)?); | ||
309 | let ty = self.db.type_for_def(resolved)?; | ||
310 | // TODO we will need to add type variables for type parameters etc. here | ||
311 | Ok(Some(ty)) | ||
312 | } | ||
313 | |||
314 | fn infer_expr(&mut self, expr: ast::Expr) -> Cancelable<Ty> { | ||
315 | let ty = match expr { | ||
316 | ast::Expr::IfExpr(e) => { | ||
317 | if let Some(condition) = e.condition() { | ||
318 | if let Some(e) = condition.expr() { | ||
319 | // TODO if no pat, this should be bool | ||
320 | self.infer_expr(e)?; | ||
321 | } | ||
322 | // TODO write type for pat | ||
323 | }; | ||
324 | let if_ty = if let Some(block) = e.then_branch() { | ||
325 | self.infer_block(block)? | ||
326 | } else { | ||
327 | Ty::Unknown | ||
328 | }; | ||
329 | let else_ty = if let Some(block) = e.else_branch() { | ||
330 | self.infer_block(block)? | ||
331 | } else { | ||
332 | Ty::Unknown | ||
333 | }; | ||
334 | if let Some(ty) = self.unify(&if_ty, &else_ty) { | ||
335 | ty | ||
336 | } else { | ||
337 | // TODO report diagnostic | ||
338 | Ty::Unknown | ||
339 | } | ||
340 | } | ||
341 | ast::Expr::BlockExpr(e) => { | ||
342 | if let Some(block) = e.block() { | ||
343 | self.infer_block(block)? | ||
344 | } else { | ||
345 | Ty::Unknown | ||
346 | } | ||
347 | } | ||
348 | ast::Expr::LoopExpr(e) => { | ||
349 | if let Some(block) = e.loop_body() { | ||
350 | self.infer_block(block)?; | ||
351 | }; | ||
352 | // TODO never, or the type of the break param | ||
353 | Ty::Unknown | ||
354 | } | ||
355 | ast::Expr::WhileExpr(e) => { | ||
356 | if let Some(condition) = e.condition() { | ||
357 | if let Some(e) = condition.expr() { | ||
358 | // TODO if no pat, this should be bool | ||
359 | self.infer_expr(e)?; | ||
360 | } | ||
361 | // TODO write type for pat | ||
362 | }; | ||
363 | if let Some(block) = e.loop_body() { | ||
364 | // TODO | ||
365 | self.infer_block(block)?; | ||
366 | }; | ||
367 | // TODO always unit? | ||
368 | Ty::Unknown | ||
369 | } | ||
370 | ast::Expr::ForExpr(e) => { | ||
371 | if let Some(expr) = e.iterable() { | ||
372 | self.infer_expr(expr)?; | ||
373 | } | ||
374 | if let Some(_pat) = e.pat() { | ||
375 | // TODO write type for pat | ||
376 | } | ||
377 | if let Some(block) = e.loop_body() { | ||
378 | self.infer_block(block)?; | ||
379 | } | ||
380 | // TODO always unit? | ||
381 | Ty::Unknown | ||
382 | } | ||
383 | ast::Expr::LambdaExpr(e) => { | ||
384 | let _body_ty = if let Some(body) = e.body() { | ||
385 | self.infer_expr(body)? | ||
386 | } else { | ||
387 | Ty::Unknown | ||
388 | }; | ||
389 | Ty::Unknown | ||
390 | } | ||
391 | ast::Expr::CallExpr(e) => { | ||
392 | let callee_ty = if let Some(e) = e.expr() { | ||
393 | self.infer_expr(e)? | ||
394 | } else { | ||
395 | Ty::Unknown | ||
396 | }; | ||
397 | if let Some(arg_list) = e.arg_list() { | ||
398 | for arg in arg_list.args() { | ||
399 | // TODO unify / expect argument type | ||
400 | self.infer_expr(arg)?; | ||
401 | } | ||
402 | } | ||
403 | match callee_ty { | ||
404 | Ty::FnPtr(sig) => sig.output.clone(), | ||
405 | _ => { | ||
406 | // not callable | ||
407 | // TODO report an error? | ||
408 | Ty::Unknown | ||
409 | } | ||
410 | } | ||
411 | } | ||
412 | ast::Expr::MethodCallExpr(e) => { | ||
413 | let _receiver_ty = if let Some(e) = e.expr() { | ||
414 | self.infer_expr(e)? | ||
415 | } else { | ||
416 | Ty::Unknown | ||
417 | }; | ||
418 | if let Some(arg_list) = e.arg_list() { | ||
419 | for arg in arg_list.args() { | ||
420 | // TODO unify / expect argument type | ||
421 | self.infer_expr(arg)?; | ||
422 | } | ||
423 | } | ||
424 | Ty::Unknown | ||
425 | } | ||
426 | ast::Expr::MatchExpr(e) => { | ||
427 | let _ty = if let Some(match_expr) = e.expr() { | ||
428 | self.infer_expr(match_expr)? | ||
429 | } else { | ||
430 | Ty::Unknown | ||
431 | }; | ||
432 | if let Some(match_arm_list) = e.match_arm_list() { | ||
433 | for arm in match_arm_list.arms() { | ||
434 | // TODO type the bindings in pat | ||
435 | // TODO type the guard | ||
436 | let _ty = if let Some(e) = arm.expr() { | ||
437 | self.infer_expr(e)? | ||
438 | } else { | ||
439 | Ty::Unknown | ||
440 | }; | ||
441 | } | ||
442 | // TODO unify all the match arm types | ||
443 | Ty::Unknown | ||
444 | } else { | ||
445 | Ty::Unknown | ||
446 | } | ||
447 | } | ||
448 | ast::Expr::TupleExpr(_e) => Ty::Unknown, | ||
449 | ast::Expr::ArrayExpr(_e) => Ty::Unknown, | ||
450 | ast::Expr::PathExpr(e) => self.infer_path_expr(e)?.unwrap_or(Ty::Unknown), | ||
451 | ast::Expr::ContinueExpr(_e) => Ty::Never, | ||
452 | ast::Expr::BreakExpr(_e) => Ty::Never, | ||
453 | ast::Expr::ParenExpr(e) => { | ||
454 | if let Some(e) = e.expr() { | ||
455 | self.infer_expr(e)? | ||
456 | } else { | ||
457 | Ty::Unknown | ||
458 | } | ||
459 | } | ||
460 | ast::Expr::Label(_e) => Ty::Unknown, | ||
461 | ast::Expr::ReturnExpr(e) => { | ||
462 | if let Some(e) = e.expr() { | ||
463 | // TODO unify with return type | ||
464 | self.infer_expr(e)?; | ||
465 | }; | ||
466 | Ty::Never | ||
467 | } | ||
468 | ast::Expr::MatchArmList(_) | ast::Expr::MatchArm(_) | ast::Expr::MatchGuard(_) => { | ||
469 | // Can this even occur outside of a match expression? | ||
470 | Ty::Unknown | ||
471 | } | ||
472 | ast::Expr::StructLit(_e) => Ty::Unknown, | ||
473 | ast::Expr::NamedFieldList(_) | ast::Expr::NamedField(_) => { | ||
474 | // Can this even occur outside of a struct literal? | ||
475 | Ty::Unknown | ||
476 | } | ||
477 | ast::Expr::IndexExpr(_e) => Ty::Unknown, | ||
478 | ast::Expr::FieldExpr(_e) => Ty::Unknown, | ||
479 | ast::Expr::TryExpr(e) => { | ||
480 | let _inner_ty = if let Some(e) = e.expr() { | ||
481 | self.infer_expr(e)? | ||
482 | } else { | ||
483 | Ty::Unknown | ||
484 | }; | ||
485 | Ty::Unknown | ||
486 | } | ||
487 | ast::Expr::CastExpr(e) => { | ||
488 | let _inner_ty = if let Some(e) = e.expr() { | ||
489 | self.infer_expr(e)? | ||
490 | } else { | ||
491 | Ty::Unknown | ||
492 | }; | ||
493 | let cast_ty = e | ||
494 | .type_ref() | ||
495 | .map(|t| Ty::new(self.db, t)) | ||
496 | .unwrap_or(Ok(Ty::Unknown))?; | ||
497 | // TODO do the coercion... | ||
498 | cast_ty | ||
499 | } | ||
500 | ast::Expr::RefExpr(e) => { | ||
501 | let _inner_ty = if let Some(e) = e.expr() { | ||
502 | self.infer_expr(e)? | ||
503 | } else { | ||
504 | Ty::Unknown | ||
505 | }; | ||
506 | Ty::Unknown | ||
507 | } | ||
508 | ast::Expr::PrefixExpr(e) => { | ||
509 | let _inner_ty = if let Some(e) = e.expr() { | ||
510 | self.infer_expr(e)? | ||
511 | } else { | ||
512 | Ty::Unknown | ||
513 | }; | ||
514 | Ty::Unknown | ||
515 | } | ||
516 | ast::Expr::RangeExpr(_e) => Ty::Unknown, | ||
517 | ast::Expr::BinExpr(_e) => Ty::Unknown, | ||
518 | ast::Expr::Literal(_e) => Ty::Unknown, | ||
519 | }; | ||
520 | self.write_ty(expr.syntax(), ty.clone()); | ||
521 | Ok(ty) | ||
522 | } | ||
523 | |||
524 | fn infer_block(&mut self, node: ast::Block) -> Cancelable<Ty> { | ||
525 | for stmt in node.statements() { | ||
526 | match stmt { | ||
527 | ast::Stmt::LetStmt(stmt) => { | ||
528 | let decl_ty = if let Some(type_ref) = stmt.type_ref() { | ||
529 | Ty::new(self.db, type_ref)? | ||
530 | } else { | ||
531 | Ty::Unknown | ||
532 | }; | ||
533 | let ty = if let Some(expr) = stmt.initializer() { | ||
534 | // TODO pass expectation | ||
535 | let expr_ty = self.infer_expr(expr)?; | ||
536 | self.unify_with_coercion(&expr_ty, &decl_ty) | ||
537 | .unwrap_or(decl_ty) | ||
538 | } else { | ||
539 | decl_ty | ||
540 | }; | ||
541 | |||
542 | if let Some(pat) = stmt.pat() { | ||
543 | self.write_ty(pat.syntax(), ty); | ||
544 | }; | ||
545 | } | ||
546 | ast::Stmt::ExprStmt(expr_stmt) => { | ||
547 | if let Some(expr) = expr_stmt.expr() { | ||
548 | self.infer_expr(expr)?; | ||
549 | } | ||
550 | } | ||
551 | } | ||
552 | } | ||
553 | let ty = if let Some(expr) = node.expr() { | ||
554 | self.infer_expr(expr)? | ||
555 | } else { | ||
556 | Ty::unit() | ||
557 | }; | ||
558 | self.write_ty(node.syntax(), ty.clone()); | ||
559 | Ok(ty) | ||
560 | } | ||
561 | } | ||
562 | |||
563 | pub fn infer(db: &impl HirDatabase, function: Function) -> Cancelable<InferenceResult> { | ||
564 | let scopes = function.scopes(db); | ||
565 | let module = function.module(db)?; | ||
566 | let mut ctx = InferenceContext::new(db, scopes, module); | ||
567 | |||
568 | let syntax = function.syntax(db); | ||
569 | let node = syntax.borrowed(); | ||
570 | |||
571 | if let Some(param_list) = node.param_list() { | ||
572 | for param in param_list.params() { | ||
573 | let pat = if let Some(pat) = param.pat() { | ||
574 | pat | ||
575 | } else { | ||
576 | continue; | ||
577 | }; | ||
578 | if let Some(type_ref) = param.type_ref() { | ||
579 | let ty = Ty::new(db, type_ref)?; | ||
580 | ctx.type_of.insert(LocalSyntaxPtr::new(pat.syntax()), ty); | ||
581 | } else { | ||
582 | // TODO self param | ||
583 | ctx.type_of | ||
584 | .insert(LocalSyntaxPtr::new(pat.syntax()), Ty::Unknown); | ||
585 | }; | ||
586 | } | ||
587 | } | ||
588 | |||
589 | // TODO get Ty for node.ret_type() and pass that to infer_block as expectation | ||
590 | // (see Expectation in rustc_typeck) | ||
591 | |||
592 | if let Some(block) = node.body() { | ||
593 | ctx.infer_block(block)?; | ||
594 | } | ||
595 | |||
596 | // TODO 'resolve' the types: replace inference variables by their inferred results | ||
597 | |||
598 | Ok(InferenceResult { | ||
599 | type_of: ctx.type_of, | ||
600 | }) | ||
601 | } | ||
diff --git a/crates/ra_hir/src/ty/primitive.rs b/crates/ra_hir/src/ty/primitive.rs new file mode 100644 index 000000000..ad79b17e4 --- /dev/null +++ b/crates/ra_hir/src/ty/primitive.rs | |||
@@ -0,0 +1,130 @@ | |||
1 | use std::fmt; | ||
2 | |||
3 | #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Copy)] | ||
4 | pub enum IntTy { | ||
5 | Isize, | ||
6 | I8, | ||
7 | I16, | ||
8 | I32, | ||
9 | I64, | ||
10 | I128, | ||
11 | } | ||
12 | |||
13 | impl fmt::Debug for IntTy { | ||
14 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
15 | fmt::Display::fmt(self, f) | ||
16 | } | ||
17 | } | ||
18 | |||
19 | impl fmt::Display for IntTy { | ||
20 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
21 | write!(f, "{}", self.ty_to_string()) | ||
22 | } | ||
23 | } | ||
24 | |||
25 | impl IntTy { | ||
26 | pub fn ty_to_string(&self) -> &'static str { | ||
27 | match *self { | ||
28 | IntTy::Isize => "isize", | ||
29 | IntTy::I8 => "i8", | ||
30 | IntTy::I16 => "i16", | ||
31 | IntTy::I32 => "i32", | ||
32 | IntTy::I64 => "i64", | ||
33 | IntTy::I128 => "i128", | ||
34 | } | ||
35 | } | ||
36 | |||
37 | pub fn from_string(s: &str) -> Option<IntTy> { | ||
38 | match s { | ||
39 | "isize" => Some(IntTy::Isize), | ||
40 | "i8" => Some(IntTy::I8), | ||
41 | "i16" => Some(IntTy::I16), | ||
42 | "i32" => Some(IntTy::I32), | ||
43 | "i64" => Some(IntTy::I64), | ||
44 | "i128" => Some(IntTy::I128), | ||
45 | _ => None, | ||
46 | } | ||
47 | } | ||
48 | } | ||
49 | |||
50 | #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Copy)] | ||
51 | pub enum UintTy { | ||
52 | Usize, | ||
53 | U8, | ||
54 | U16, | ||
55 | U32, | ||
56 | U64, | ||
57 | U128, | ||
58 | } | ||
59 | |||
60 | impl UintTy { | ||
61 | pub fn ty_to_string(&self) -> &'static str { | ||
62 | match *self { | ||
63 | UintTy::Usize => "usize", | ||
64 | UintTy::U8 => "u8", | ||
65 | UintTy::U16 => "u16", | ||
66 | UintTy::U32 => "u32", | ||
67 | UintTy::U64 => "u64", | ||
68 | UintTy::U128 => "u128", | ||
69 | } | ||
70 | } | ||
71 | |||
72 | pub fn from_string(s: &str) -> Option<UintTy> { | ||
73 | match s { | ||
74 | "usize" => Some(UintTy::Usize), | ||
75 | "u8" => Some(UintTy::U8), | ||
76 | "u16" => Some(UintTy::U16), | ||
77 | "u32" => Some(UintTy::U32), | ||
78 | "u64" => Some(UintTy::U64), | ||
79 | "u128" => Some(UintTy::U128), | ||
80 | _ => None, | ||
81 | } | ||
82 | } | ||
83 | } | ||
84 | |||
85 | impl fmt::Debug for UintTy { | ||
86 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
87 | fmt::Display::fmt(self, f) | ||
88 | } | ||
89 | } | ||
90 | |||
91 | impl fmt::Display for UintTy { | ||
92 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
93 | write!(f, "{}", self.ty_to_string()) | ||
94 | } | ||
95 | } | ||
96 | |||
97 | #[derive(Clone, PartialEq, Eq, Hash, Copy, PartialOrd, Ord)] | ||
98 | pub enum FloatTy { | ||
99 | F32, | ||
100 | F64, | ||
101 | } | ||
102 | |||
103 | impl fmt::Debug for FloatTy { | ||
104 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
105 | fmt::Display::fmt(self, f) | ||
106 | } | ||
107 | } | ||
108 | |||
109 | impl fmt::Display for FloatTy { | ||
110 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
111 | write!(f, "{}", self.ty_to_string()) | ||
112 | } | ||
113 | } | ||
114 | |||
115 | impl FloatTy { | ||
116 | pub fn ty_to_string(self) -> &'static str { | ||
117 | match self { | ||
118 | FloatTy::F32 => "f32", | ||
119 | FloatTy::F64 => "f64", | ||
120 | } | ||
121 | } | ||
122 | |||
123 | pub fn from_string(s: &str) -> Option<FloatTy> { | ||
124 | match s { | ||
125 | "f32" => Some(FloatTy::F32), | ||
126 | "f64" => Some(FloatTy::F64), | ||
127 | _ => None, | ||
128 | } | ||
129 | } | ||
130 | } | ||
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs new file mode 100644 index 000000000..b6c02cd80 --- /dev/null +++ b/crates/ra_hir/src/ty/tests.rs | |||
@@ -0,0 +1,134 @@ | |||
1 | use std::fmt::Write; | ||
2 | use std::path::{PathBuf, Path}; | ||
3 | use std::fs; | ||
4 | |||
5 | use ra_db::{SyntaxDatabase}; | ||
6 | use ra_syntax::ast::{self, AstNode}; | ||
7 | use test_utils::{project_dir, assert_eq_text, read_text}; | ||
8 | |||
9 | use crate::{ | ||
10 | source_binder, | ||
11 | mock::MockDatabase, | ||
12 | }; | ||
13 | |||
14 | // These tests compare the inference results for all expressions in a file | ||
15 | // against snapshots of the current results. If you change something and these | ||
16 | // tests fail expectedly, you can update the comparison files by deleting them | ||
17 | // and running the tests again. Similarly, to add a new test, just write the | ||
18 | // test here in the same pattern and it will automatically write the snapshot. | ||
19 | |||
20 | #[test] | ||
21 | fn infer_basics() { | ||
22 | check_inference( | ||
23 | r#" | ||
24 | fn test(a: u32, b: isize, c: !, d: &str) { | ||
25 | a; | ||
26 | b; | ||
27 | c; | ||
28 | d; | ||
29 | 1usize; | ||
30 | 1isize; | ||
31 | "test"; | ||
32 | 1.0f32; | ||
33 | }"#, | ||
34 | "0001_basics.txt", | ||
35 | ); | ||
36 | } | ||
37 | |||
38 | #[test] | ||
39 | fn infer_let() { | ||
40 | check_inference( | ||
41 | r#" | ||
42 | fn test() { | ||
43 | let a = 1isize; | ||
44 | let b: usize = 1; | ||
45 | let c = b; | ||
46 | } | ||
47 | }"#, | ||
48 | "0002_let.txt", | ||
49 | ); | ||
50 | } | ||
51 | |||
52 | #[test] | ||
53 | fn infer_paths() { | ||
54 | check_inference( | ||
55 | r#" | ||
56 | fn a() -> u32 { 1 } | ||
57 | |||
58 | mod b { | ||
59 | fn c() -> u32 { 1 } | ||
60 | } | ||
61 | |||
62 | fn test() { | ||
63 | a(); | ||
64 | b::c(); | ||
65 | } | ||
66 | }"#, | ||
67 | "0003_paths.txt", | ||
68 | ); | ||
69 | } | ||
70 | |||
71 | fn infer(content: &str) -> String { | ||
72 | let (db, _, file_id) = MockDatabase::with_single_file(content); | ||
73 | let source_file = db.source_file(file_id); | ||
74 | let mut acc = String::new(); | ||
75 | for fn_def in source_file | ||
76 | .syntax() | ||
77 | .descendants() | ||
78 | .filter_map(ast::FnDef::cast) | ||
79 | { | ||
80 | let func = source_binder::function_from_source(&db, file_id, fn_def) | ||
81 | .unwrap() | ||
82 | .unwrap(); | ||
83 | let inference_result = func.infer(&db).unwrap(); | ||
84 | for (syntax_ptr, ty) in &inference_result.type_of { | ||
85 | let node = syntax_ptr.resolve(&source_file); | ||
86 | write!( | ||
87 | acc, | ||
88 | "{} '{}': {}\n", | ||
89 | syntax_ptr.range(), | ||
90 | ellipsize(node.text().to_string().replace("\n", " "), 15), | ||
91 | ty | ||
92 | ) | ||
93 | .unwrap(); | ||
94 | } | ||
95 | } | ||
96 | acc | ||
97 | } | ||
98 | |||
99 | fn check_inference(content: &str, data_file: impl AsRef<Path>) { | ||
100 | let data_file_path = test_data_dir().join(data_file); | ||
101 | let result = infer(content); | ||
102 | |||
103 | if !data_file_path.exists() { | ||
104 | println!("File with expected result doesn't exist, creating...\n"); | ||
105 | println!("{}\n{}", content, result); | ||
106 | fs::write(&data_file_path, &result).unwrap(); | ||
107 | panic!("File {:?} with expected result was created", data_file_path); | ||
108 | } | ||
109 | |||
110 | let expected = read_text(&data_file_path); | ||
111 | assert_eq_text!(&expected, &result); | ||
112 | } | ||
113 | |||
114 | fn ellipsize(mut text: String, max_len: usize) -> String { | ||
115 | if text.len() <= max_len { | ||
116 | return text; | ||
117 | } | ||
118 | let ellipsis = "..."; | ||
119 | let e_len = ellipsis.len(); | ||
120 | let mut prefix_len = (max_len - e_len) / 2; | ||
121 | while !text.is_char_boundary(prefix_len) { | ||
122 | prefix_len += 1; | ||
123 | } | ||
124 | let mut suffix_len = max_len - e_len - prefix_len; | ||
125 | while !text.is_char_boundary(text.len() - suffix_len) { | ||
126 | suffix_len += 1; | ||
127 | } | ||
128 | text.replace_range(prefix_len..text.len() - suffix_len, ellipsis); | ||
129 | text | ||
130 | } | ||
131 | |||
132 | fn test_data_dir() -> PathBuf { | ||
133 | project_dir().join("crates/ra_hir/src/ty/tests/data") | ||
134 | } | ||
diff --git a/crates/ra_hir/src/ty/tests/data/0001_basics.txt b/crates/ra_hir/src/ty/tests/data/0001_basics.txt new file mode 100644 index 000000000..0c46f243a --- /dev/null +++ b/crates/ra_hir/src/ty/tests/data/0001_basics.txt | |||
@@ -0,0 +1,13 @@ | |||
1 | [33; 34) 'd': [unknown] | ||
2 | [88; 94) '1isize': [unknown] | ||
3 | [48; 49) 'a': u32 | ||
4 | [55; 56) 'b': isize | ||
5 | [112; 118) '1.0f32': [unknown] | ||
6 | [76; 82) '1usize': [unknown] | ||
7 | [9; 10) 'a': u32 | ||
8 | [27; 28) 'c': ! | ||
9 | [62; 63) 'c': ! | ||
10 | [17; 18) 'b': isize | ||
11 | [100; 106) '"test"': [unknown] | ||
12 | [42; 121) '{ ...f32; }': () | ||
13 | [69; 70) 'd': [unknown] | ||
diff --git a/crates/ra_hir/src/ty/tests/data/0002_let.txt b/crates/ra_hir/src/ty/tests/data/0002_let.txt new file mode 100644 index 000000000..2d0d1f57b --- /dev/null +++ b/crates/ra_hir/src/ty/tests/data/0002_let.txt | |||
@@ -0,0 +1,7 @@ | |||
1 | [21; 22) 'a': [unknown] | ||
2 | [52; 53) '1': [unknown] | ||
3 | [11; 71) '{ ...= b; }': () | ||
4 | [63; 64) 'c': usize | ||
5 | [25; 31) '1isize': [unknown] | ||
6 | [41; 42) 'b': usize | ||
7 | [67; 68) 'b': usize | ||
diff --git a/crates/ra_hir/src/ty/tests/data/0003_paths.txt b/crates/ra_hir/src/ty/tests/data/0003_paths.txt new file mode 100644 index 000000000..dcb5456ae --- /dev/null +++ b/crates/ra_hir/src/ty/tests/data/0003_paths.txt | |||
@@ -0,0 +1,9 @@ | |||
1 | [15; 20) '{ 1 }': [unknown] | ||
2 | [17; 18) '1': [unknown] | ||
3 | [50; 51) '1': [unknown] | ||
4 | [48; 53) '{ 1 }': [unknown] | ||
5 | [82; 88) 'b::c()': u32 | ||
6 | [67; 91) '{ ...c(); }': () | ||
7 | [73; 74) 'a': fn() -> u32 | ||
8 | [73; 76) 'a()': u32 | ||
9 | [82; 86) 'b::c': fn() -> u32 | ||
diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs index bf056131e..c73533861 100644 --- a/crates/ra_syntax/src/ast/generated.rs +++ b/crates/ra_syntax/src/ast/generated.rs | |||
@@ -523,7 +523,15 @@ impl<R: TreeRoot<RaTypes>> CastExprNode<R> { | |||
523 | } | 523 | } |
524 | 524 | ||
525 | 525 | ||
526 | impl<'a> CastExpr<'a> {} | 526 | impl<'a> CastExpr<'a> { |
527 | pub fn expr(self) -> Option<Expr<'a>> { | ||
528 | super::child_opt(self) | ||
529 | } | ||
530 | |||
531 | pub fn type_ref(self) -> Option<TypeRef<'a>> { | ||
532 | super::child_opt(self) | ||
533 | } | ||
534 | } | ||
527 | 535 | ||
528 | // Char | 536 | // Char |
529 | #[derive(Debug, Clone, Copy,)] | 537 | #[derive(Debug, Clone, Copy,)] |
@@ -1553,6 +1561,10 @@ impl<'a> LetStmt<'a> { | |||
1553 | super::child_opt(self) | 1561 | super::child_opt(self) |
1554 | } | 1562 | } |
1555 | 1563 | ||
1564 | pub fn type_ref(self) -> Option<TypeRef<'a>> { | ||
1565 | super::child_opt(self) | ||
1566 | } | ||
1567 | |||
1556 | pub fn initializer(self) -> Option<Expr<'a>> { | 1568 | pub fn initializer(self) -> Option<Expr<'a>> { |
1557 | super::child_opt(self) | 1569 | super::child_opt(self) |
1558 | } | 1570 | } |
@@ -2312,6 +2324,10 @@ impl<'a> Param<'a> { | |||
2312 | pub fn pat(self) -> Option<Pat<'a>> { | 2324 | pub fn pat(self) -> Option<Pat<'a>> { |
2313 | super::child_opt(self) | 2325 | super::child_opt(self) |
2314 | } | 2326 | } |
2327 | |||
2328 | pub fn type_ref(self) -> Option<TypeRef<'a>> { | ||
2329 | super::child_opt(self) | ||
2330 | } | ||
2315 | } | 2331 | } |
2316 | 2332 | ||
2317 | // ParamList | 2333 | // ParamList |
@@ -2394,7 +2410,11 @@ impl<R: TreeRoot<RaTypes>> ParenExprNode<R> { | |||
2394 | } | 2410 | } |
2395 | 2411 | ||
2396 | 2412 | ||
2397 | impl<'a> ParenExpr<'a> {} | 2413 | impl<'a> ParenExpr<'a> { |
2414 | pub fn expr(self) -> Option<Expr<'a>> { | ||
2415 | super::child_opt(self) | ||
2416 | } | ||
2417 | } | ||
2398 | 2418 | ||
2399 | // ParenType | 2419 | // ParenType |
2400 | #[derive(Debug, Clone, Copy,)] | 2420 | #[derive(Debug, Clone, Copy,)] |
@@ -2681,7 +2701,11 @@ impl<R: TreeRoot<RaTypes>> PathTypeNode<R> { | |||
2681 | } | 2701 | } |
2682 | 2702 | ||
2683 | 2703 | ||
2684 | impl<'a> PathType<'a> {} | 2704 | impl<'a> PathType<'a> { |
2705 | pub fn path(self) -> Option<Path<'a>> { | ||
2706 | super::child_opt(self) | ||
2707 | } | ||
2708 | } | ||
2685 | 2709 | ||
2686 | // PlaceholderPat | 2710 | // PlaceholderPat |
2687 | #[derive(Debug, Clone, Copy,)] | 2711 | #[derive(Debug, Clone, Copy,)] |
@@ -2829,7 +2853,11 @@ impl<R: TreeRoot<RaTypes>> PrefixExprNode<R> { | |||
2829 | } | 2853 | } |
2830 | 2854 | ||
2831 | 2855 | ||
2832 | impl<'a> PrefixExpr<'a> {} | 2856 | impl<'a> PrefixExpr<'a> { |
2857 | pub fn expr(self) -> Option<Expr<'a>> { | ||
2858 | super::child_opt(self) | ||
2859 | } | ||
2860 | } | ||
2833 | 2861 | ||
2834 | // RangeExpr | 2862 | // RangeExpr |
2835 | #[derive(Debug, Clone, Copy,)] | 2863 | #[derive(Debug, Clone, Copy,)] |
@@ -2940,7 +2968,11 @@ impl<R: TreeRoot<RaTypes>> RefExprNode<R> { | |||
2940 | } | 2968 | } |
2941 | 2969 | ||
2942 | 2970 | ||
2943 | impl<'a> RefExpr<'a> {} | 2971 | impl<'a> RefExpr<'a> { |
2972 | pub fn expr(self) -> Option<Expr<'a>> { | ||
2973 | super::child_opt(self) | ||
2974 | } | ||
2975 | } | ||
2944 | 2976 | ||
2945 | // RefPat | 2977 | // RefPat |
2946 | #[derive(Debug, Clone, Copy,)] | 2978 | #[derive(Debug, Clone, Copy,)] |
@@ -3051,7 +3083,11 @@ impl<R: TreeRoot<RaTypes>> RetTypeNode<R> { | |||
3051 | } | 3083 | } |
3052 | 3084 | ||
3053 | 3085 | ||
3054 | impl<'a> RetType<'a> {} | 3086 | impl<'a> RetType<'a> { |
3087 | pub fn type_ref(self) -> Option<TypeRef<'a>> { | ||
3088 | super::child_opt(self) | ||
3089 | } | ||
3090 | } | ||
3055 | 3091 | ||
3056 | // ReturnExpr | 3092 | // ReturnExpr |
3057 | #[derive(Debug, Clone, Copy,)] | 3093 | #[derive(Debug, Clone, Copy,)] |
@@ -3088,7 +3124,11 @@ impl<R: TreeRoot<RaTypes>> ReturnExprNode<R> { | |||
3088 | } | 3124 | } |
3089 | 3125 | ||
3090 | 3126 | ||
3091 | impl<'a> ReturnExpr<'a> {} | 3127 | impl<'a> ReturnExpr<'a> { |
3128 | pub fn expr(self) -> Option<Expr<'a>> { | ||
3129 | super::child_opt(self) | ||
3130 | } | ||
3131 | } | ||
3092 | 3132 | ||
3093 | // SelfParam | 3133 | // SelfParam |
3094 | #[derive(Debug, Clone, Copy,)] | 3134 | #[derive(Debug, Clone, Copy,)] |
@@ -3578,7 +3618,11 @@ impl<R: TreeRoot<RaTypes>> TryExprNode<R> { | |||
3578 | } | 3618 | } |
3579 | 3619 | ||
3580 | 3620 | ||
3581 | impl<'a> TryExpr<'a> {} | 3621 | impl<'a> TryExpr<'a> { |
3622 | pub fn expr(self) -> Option<Expr<'a>> { | ||
3623 | super::child_opt(self) | ||
3624 | } | ||
3625 | } | ||
3582 | 3626 | ||
3583 | // TupleExpr | 3627 | // TupleExpr |
3584 | #[derive(Debug, Clone, Copy,)] | 3628 | #[derive(Debug, Clone, Copy,)] |
diff --git a/crates/ra_syntax/src/grammar.ron b/crates/ra_syntax/src/grammar.ron index eed67637e..e3b9032a0 100644 --- a/crates/ra_syntax/src/grammar.ron +++ b/crates/ra_syntax/src/grammar.ron | |||
@@ -254,7 +254,7 @@ Grammar( | |||
254 | ], | 254 | ], |
255 | options: [ "ParamList", ["body", "Block"], "RetType" ], | 255 | options: [ "ParamList", ["body", "Block"], "RetType" ], |
256 | ), | 256 | ), |
257 | "RetType": (), | 257 | "RetType": (options: ["TypeRef"]), |
258 | "StructDef": ( | 258 | "StructDef": ( |
259 | traits: [ | 259 | traits: [ |
260 | "NameOwner", | 260 | "NameOwner", |
@@ -304,7 +304,7 @@ Grammar( | |||
304 | "ParenType": (), | 304 | "ParenType": (), |
305 | "TupleType": (), | 305 | "TupleType": (), |
306 | "NeverType": (), | 306 | "NeverType": (), |
307 | "PathType": (), | 307 | "PathType": (options: ["Path"]), |
308 | "PointerType": (), | 308 | "PointerType": (), |
309 | "ArrayType": (), | 309 | "ArrayType": (), |
310 | "SliceType": (), | 310 | "SliceType": (), |
@@ -346,7 +346,7 @@ Grammar( | |||
346 | 346 | ||
347 | "TupleExpr": (), | 347 | "TupleExpr": (), |
348 | "ArrayExpr": (), | 348 | "ArrayExpr": (), |
349 | "ParenExpr": (), | 349 | "ParenExpr": (options: ["Expr"]), |
350 | "PathExpr": (options: ["Path"]), | 350 | "PathExpr": (options: ["Path"]), |
351 | "LambdaExpr": ( | 351 | "LambdaExpr": ( |
352 | options: [ | 352 | options: [ |
@@ -377,7 +377,7 @@ Grammar( | |||
377 | "BlockExpr": ( | 377 | "BlockExpr": ( |
378 | options: [ "Block" ] | 378 | options: [ "Block" ] |
379 | ), | 379 | ), |
380 | "ReturnExpr": (), | 380 | "ReturnExpr": (options: ["Expr"]), |
381 | "MatchExpr": ( | 381 | "MatchExpr": ( |
382 | options: [ "Expr", "MatchArmList" ], | 382 | options: [ "Expr", "MatchArmList" ], |
383 | ), | 383 | ), |
@@ -405,10 +405,10 @@ Grammar( | |||
405 | ), | 405 | ), |
406 | "IndexExpr": (), | 406 | "IndexExpr": (), |
407 | "FieldExpr": (), | 407 | "FieldExpr": (), |
408 | "TryExpr": (), | 408 | "TryExpr": (options: ["Expr"]), |
409 | "CastExpr": (), | 409 | "CastExpr": (options: ["Expr", "TypeRef"]), |
410 | "RefExpr": (), | 410 | "RefExpr": (options: ["Expr"]), |
411 | "PrefixExpr": (), | 411 | "PrefixExpr": (options: ["Expr"]), |
412 | "RangeExpr": (), | 412 | "RangeExpr": (), |
413 | "BinExpr": (), | 413 | "BinExpr": (), |
414 | "String": (), | 414 | "String": (), |
@@ -499,6 +499,7 @@ Grammar( | |||
499 | ), | 499 | ), |
500 | "LetStmt": ( options: [ | 500 | "LetStmt": ( options: [ |
501 | ["pat", "Pat"], | 501 | ["pat", "Pat"], |
502 | ["type_ref", "TypeRef"], | ||
502 | ["initializer", "Expr"], | 503 | ["initializer", "Expr"], |
503 | ]), | 504 | ]), |
504 | "Condition": ( | 505 | "Condition": ( |
@@ -521,7 +522,7 @@ Grammar( | |||
521 | ), | 522 | ), |
522 | "SelfParam": (), | 523 | "SelfParam": (), |
523 | "Param": ( | 524 | "Param": ( |
524 | options: [ "Pat" ], | 525 | options: [ "Pat", "TypeRef" ], |
525 | ), | 526 | ), |
526 | "UseItem": ( | 527 | "UseItem": ( |
527 | options: [ "UseTree" ] | 528 | options: [ "UseTree" ] |
diff --git a/crates/ra_syntax/tests/test.rs b/crates/ra_syntax/tests/test.rs index 4266864bd..2235dc401 100644 --- a/crates/ra_syntax/tests/test.rs +++ b/crates/ra_syntax/tests/test.rs | |||
@@ -1,14 +1,13 @@ | |||
1 | extern crate ra_syntax; | 1 | extern crate ra_syntax; |
2 | #[macro_use] | ||
3 | extern crate test_utils; | 2 | extern crate test_utils; |
4 | extern crate walkdir; | 3 | extern crate walkdir; |
5 | 4 | ||
6 | use std::{ | 5 | use std::{ |
7 | fmt::Write, | 6 | fmt::Write, |
8 | fs, | 7 | path::{PathBuf, Component}, |
9 | path::{Path, PathBuf, Component}, | ||
10 | }; | 8 | }; |
11 | 9 | ||
10 | use test_utils::{project_dir, dir_tests, read_text, collect_tests}; | ||
12 | use ra_syntax::{ | 11 | use ra_syntax::{ |
13 | utils::{check_fuzz_invariants, dump_tree}, | 12 | utils::{check_fuzz_invariants, dump_tree}, |
14 | SourceFileNode, | 13 | SourceFileNode, |
@@ -16,7 +15,7 @@ use ra_syntax::{ | |||
16 | 15 | ||
17 | #[test] | 16 | #[test] |
18 | fn lexer_tests() { | 17 | fn lexer_tests() { |
19 | dir_tests(&["lexer"], |text, _| { | 18 | dir_tests(&test_data_dir(), &["lexer"], |text, _| { |
20 | let tokens = ra_syntax::tokenize(text); | 19 | let tokens = ra_syntax::tokenize(text); |
21 | dump_tokens(&tokens, text) | 20 | dump_tokens(&tokens, text) |
22 | }) | 21 | }) |
@@ -24,33 +23,41 @@ fn lexer_tests() { | |||
24 | 23 | ||
25 | #[test] | 24 | #[test] |
26 | fn parser_tests() { | 25 | fn parser_tests() { |
27 | dir_tests(&["parser/inline/ok", "parser/ok"], |text, path| { | 26 | dir_tests( |
28 | let file = SourceFileNode::parse(text); | 27 | &test_data_dir(), |
29 | let errors = file.errors(); | 28 | &["parser/inline/ok", "parser/ok"], |
30 | assert_eq!( | 29 | |text, path| { |
31 | &*errors, | 30 | let file = SourceFileNode::parse(text); |
32 | &[] as &[ra_syntax::SyntaxError], | 31 | let errors = file.errors(); |
33 | "There should be no errors in the file {:?}", | 32 | assert_eq!( |
34 | path.display() | 33 | &*errors, |
35 | ); | 34 | &[] as &[ra_syntax::SyntaxError], |
36 | dump_tree(file.syntax()) | 35 | "There should be no errors in the file {:?}", |
37 | }); | 36 | path.display() |
38 | dir_tests(&["parser/err", "parser/inline/err"], |text, path| { | 37 | ); |
39 | let file = SourceFileNode::parse(text); | 38 | dump_tree(file.syntax()) |
40 | let errors = file.errors(); | 39 | }, |
41 | assert_ne!( | 40 | ); |
42 | &*errors, | 41 | dir_tests( |
43 | &[] as &[ra_syntax::SyntaxError], | 42 | &test_data_dir(), |
44 | "There should be errors in the file {:?}", | 43 | &["parser/err", "parser/inline/err"], |
45 | path.display() | 44 | |text, path| { |
46 | ); | 45 | let file = SourceFileNode::parse(text); |
47 | dump_tree(file.syntax()) | 46 | let errors = file.errors(); |
48 | }); | 47 | assert_ne!( |
48 | &*errors, | ||
49 | &[] as &[ra_syntax::SyntaxError], | ||
50 | "There should be errors in the file {:?}", | ||
51 | path.display() | ||
52 | ); | ||
53 | dump_tree(file.syntax()) | ||
54 | }, | ||
55 | ); | ||
49 | } | 56 | } |
50 | 57 | ||
51 | #[test] | 58 | #[test] |
52 | fn parser_fuzz_tests() { | 59 | fn parser_fuzz_tests() { |
53 | for (_, text) in collect_tests(&["parser/fuzz-failures"]) { | 60 | for (_, text) in collect_tests(&test_data_dir(), &["parser/fuzz-failures"]) { |
54 | check_fuzz_invariants(&text) | 61 | check_fuzz_invariants(&text) |
55 | } | 62 | } |
56 | } | 63 | } |
@@ -92,102 +99,6 @@ fn self_hosting_parsing() { | |||
92 | "self_hosting_parsing found too few files - is it running in the right directory?" | 99 | "self_hosting_parsing found too few files - is it running in the right directory?" |
93 | ) | 100 | ) |
94 | } | 101 | } |
95 | /// Read file and normalize newlines. | ||
96 | /// | ||
97 | /// `rustc` seems to always normalize `\r\n` newlines to `\n`: | ||
98 | /// | ||
99 | /// ``` | ||
100 | /// let s = " | ||
101 | /// "; | ||
102 | /// assert_eq!(s.as_bytes(), &[10]); | ||
103 | /// ``` | ||
104 | /// | ||
105 | /// so this should always be correct. | ||
106 | fn read_text(path: &Path) -> String { | ||
107 | fs::read_to_string(path) | ||
108 | .expect(&format!("File at {:?} should be valid", path)) | ||
109 | .replace("\r\n", "\n") | ||
110 | } | ||
111 | |||
112 | fn dir_tests<F>(paths: &[&str], f: F) | ||
113 | where | ||
114 | F: Fn(&str, &Path) -> String, | ||
115 | { | ||
116 | for (path, input_code) in collect_tests(paths) { | ||
117 | let parse_tree = f(&input_code, &path); | ||
118 | let path = path.with_extension("txt"); | ||
119 | if !path.exists() { | ||
120 | println!("\nfile: {}", path.display()); | ||
121 | println!("No .txt file with expected result, creating...\n"); | ||
122 | println!("{}\n{}", input_code, parse_tree); | ||
123 | fs::write(&path, &parse_tree).unwrap(); | ||
124 | panic!("No expected result") | ||
125 | } | ||
126 | let expected = read_text(&path); | ||
127 | let expected = expected.as_str(); | ||
128 | let parse_tree = parse_tree.as_str(); | ||
129 | assert_equal_text(expected, parse_tree, &path); | ||
130 | } | ||
131 | } | ||
132 | |||
133 | const REWRITE: bool = false; | ||
134 | |||
135 | fn assert_equal_text(expected: &str, actual: &str, path: &Path) { | ||
136 | if expected == actual { | ||
137 | return; | ||
138 | } | ||
139 | let dir = project_dir(); | ||
140 | let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path); | ||
141 | if expected.trim() == actual.trim() { | ||
142 | println!("whitespace difference, rewriting"); | ||
143 | println!("file: {}\n", pretty_path.display()); | ||
144 | fs::write(path, actual).unwrap(); | ||
145 | return; | ||
146 | } | ||
147 | if REWRITE { | ||
148 | println!("rewriting {}", pretty_path.display()); | ||
149 | fs::write(path, actual).unwrap(); | ||
150 | return; | ||
151 | } | ||
152 | assert_eq_text!(expected, actual, "file: {}", pretty_path.display()); | ||
153 | } | ||
154 | |||
155 | fn collect_tests(paths: &[&str]) -> Vec<(PathBuf, String)> { | ||
156 | paths | ||
157 | .iter() | ||
158 | .flat_map(|path| { | ||
159 | let path = test_data_dir().join(path); | ||
160 | test_from_dir(&path).into_iter() | ||
161 | }) | ||
162 | .map(|path| { | ||
163 | let text = read_text(&path); | ||
164 | (path, text) | ||
165 | }) | ||
166 | .collect() | ||
167 | } | ||
168 | |||
169 | fn test_from_dir(dir: &Path) -> Vec<PathBuf> { | ||
170 | let mut acc = Vec::new(); | ||
171 | for file in fs::read_dir(&dir).unwrap() { | ||
172 | let file = file.unwrap(); | ||
173 | let path = file.path(); | ||
174 | if path.extension().unwrap_or_default() == "rs" { | ||
175 | acc.push(path); | ||
176 | } | ||
177 | } | ||
178 | acc.sort(); | ||
179 | acc | ||
180 | } | ||
181 | |||
182 | fn project_dir() -> PathBuf { | ||
183 | let dir = env!("CARGO_MANIFEST_DIR"); | ||
184 | PathBuf::from(dir) | ||
185 | .parent() | ||
186 | .unwrap() | ||
187 | .parent() | ||
188 | .unwrap() | ||
189 | .to_owned() | ||
190 | } | ||
191 | 102 | ||
192 | fn test_data_dir() -> PathBuf { | 103 | fn test_data_dir() -> PathBuf { |
193 | project_dir().join("crates/ra_syntax/tests/data") | 104 | project_dir().join("crates/ra_syntax/tests/data") |
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs index beb936c61..012b1d0b4 100644 --- a/crates/test_utils/src/lib.rs +++ b/crates/test_utils/src/lib.rs | |||
@@ -1,4 +1,6 @@ | |||
1 | use std::fmt; | 1 | use std::fmt; |
2 | use std::fs; | ||
3 | use std::path::{Path, PathBuf}; | ||
2 | 4 | ||
3 | use itertools::Itertools; | 5 | use itertools::Itertools; |
4 | use text_unit::{TextRange, TextUnit}; | 6 | use text_unit::{TextRange, TextUnit}; |
@@ -262,3 +264,100 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a | |||
262 | _ => Some((expected, actual)), | 264 | _ => Some((expected, actual)), |
263 | } | 265 | } |
264 | } | 266 | } |
267 | |||
268 | pub fn dir_tests<F>(test_data_dir: &Path, paths: &[&str], f: F) | ||
269 | where | ||
270 | F: Fn(&str, &Path) -> String, | ||
271 | { | ||
272 | for (path, input_code) in collect_tests(test_data_dir, paths) { | ||
273 | let parse_tree = f(&input_code, &path); | ||
274 | let path = path.with_extension("txt"); | ||
275 | if !path.exists() { | ||
276 | println!("\nfile: {}", path.display()); | ||
277 | println!("No .txt file with expected result, creating...\n"); | ||
278 | println!("{}\n{}", input_code, parse_tree); | ||
279 | fs::write(&path, &parse_tree).unwrap(); | ||
280 | panic!("No expected result") | ||
281 | } | ||
282 | let expected = read_text(&path); | ||
283 | let expected = expected.as_str(); | ||
284 | let parse_tree = parse_tree.as_str(); | ||
285 | assert_equal_text(expected, parse_tree, &path); | ||
286 | } | ||
287 | } | ||
288 | |||
289 | pub fn collect_tests(test_data_dir: &Path, paths: &[&str]) -> Vec<(PathBuf, String)> { | ||
290 | paths | ||
291 | .iter() | ||
292 | .flat_map(|path| { | ||
293 | let path = test_data_dir.to_owned().join(path); | ||
294 | test_from_dir(&path).into_iter() | ||
295 | }) | ||
296 | .map(|path| { | ||
297 | let text = read_text(&path); | ||
298 | (path, text) | ||
299 | }) | ||
300 | .collect() | ||
301 | } | ||
302 | |||
303 | fn test_from_dir(dir: &Path) -> Vec<PathBuf> { | ||
304 | let mut acc = Vec::new(); | ||
305 | for file in fs::read_dir(&dir).unwrap() { | ||
306 | let file = file.unwrap(); | ||
307 | let path = file.path(); | ||
308 | if path.extension().unwrap_or_default() == "rs" { | ||
309 | acc.push(path); | ||
310 | } | ||
311 | } | ||
312 | acc.sort(); | ||
313 | acc | ||
314 | } | ||
315 | |||
316 | pub fn project_dir() -> PathBuf { | ||
317 | let dir = env!("CARGO_MANIFEST_DIR"); | ||
318 | PathBuf::from(dir) | ||
319 | .parent() | ||
320 | .unwrap() | ||
321 | .parent() | ||
322 | .unwrap() | ||
323 | .to_owned() | ||
324 | } | ||
325 | |||
326 | /// Read file and normalize newlines. | ||
327 | /// | ||
328 | /// `rustc` seems to always normalize `\r\n` newlines to `\n`: | ||
329 | /// | ||
330 | /// ``` | ||
331 | /// let s = " | ||
332 | /// "; | ||
333 | /// assert_eq!(s.as_bytes(), &[10]); | ||
334 | /// ``` | ||
335 | /// | ||
336 | /// so this should always be correct. | ||
337 | pub fn read_text(path: &Path) -> String { | ||
338 | fs::read_to_string(path) | ||
339 | .expect(&format!("File at {:?} should be valid", path)) | ||
340 | .replace("\r\n", "\n") | ||
341 | } | ||
342 | |||
343 | const REWRITE: bool = false; | ||
344 | |||
345 | fn assert_equal_text(expected: &str, actual: &str, path: &Path) { | ||
346 | if expected == actual { | ||
347 | return; | ||
348 | } | ||
349 | let dir = project_dir(); | ||
350 | let pretty_path = path.strip_prefix(&dir).unwrap_or_else(|_| path); | ||
351 | if expected.trim() == actual.trim() { | ||
352 | println!("whitespace difference, rewriting"); | ||
353 | println!("file: {}\n", pretty_path.display()); | ||
354 | fs::write(path, actual).unwrap(); | ||
355 | return; | ||
356 | } | ||
357 | if REWRITE { | ||
358 | println!("rewriting {}", pretty_path.display()); | ||
359 | fs::write(path, actual).unwrap(); | ||
360 | return; | ||
361 | } | ||
362 | assert_eq_text!(expected, actual, "file: {}", pretty_path.display()); | ||
363 | } | ||