aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock1
-rw-r--r--crates/ra_analysis/src/completion.rs8
-rw-r--r--crates/ra_analysis/src/completion/complete_dot.rs98
-rw-r--r--crates/ra_analysis/src/completion/complete_path.rs2
-rw-r--r--crates/ra_analysis/src/completion/completion_context.rs54
-rw-r--r--crates/ra_analysis/src/completion/completion_item.rs36
-rw-r--r--crates/ra_analysis/src/db.rs3
-rw-r--r--crates/ra_analysis/src/imp.rs61
-rw-r--r--crates/ra_analysis/src/lib.rs5
-rw-r--r--crates/ra_db/Cargo.toml1
-rw-r--r--crates/ra_db/src/cancelation.rs85
-rw-r--r--crates/ra_db/src/lib.rs18
-rw-r--r--crates/ra_editor/src/code_actions.rs7
-rw-r--r--crates/ra_editor/src/lib.rs90
-rw-r--r--crates/ra_editor/src/typing.rs27
-rw-r--r--crates/ra_hir/src/adt.rs194
-rw-r--r--crates/ra_hir/src/db.rs17
-rw-r--r--crates/ra_hir/src/function.rs3
-rw-r--r--crates/ra_hir/src/lib.rs47
-rw-r--r--crates/ra_hir/src/mock.rs3
-rw-r--r--crates/ra_hir/src/module.rs44
-rw-r--r--crates/ra_hir/src/module/nameres.rs154
-rw-r--r--crates/ra_hir/src/module/nameres/tests.rs6
-rw-r--r--crates/ra_hir/src/path.rs4
-rw-r--r--crates/ra_hir/src/query_definitions.rs29
-rw-r--r--crates/ra_hir/src/ty.rs448
-rw-r--r--crates/ra_hir/src/ty/tests.rs45
-rw-r--r--crates/ra_hir/src/ty/tests/data/0001_basics.txt4
-rw-r--r--crates/ra_hir/src/ty/tests/data/0004_struct.txt16
-rw-r--r--crates/ra_hir/src/ty/tests/data/0005_refs.txt23
-rw-r--r--crates/ra_hir/src/type_ref.rs110
-rw-r--r--crates/ra_lsp_server/src/caps.rs2
-rw-r--r--crates/ra_lsp_server/src/conv.rs3
-rw-r--r--crates/ra_lsp_server/src/main_loop.rs2
-rw-r--r--crates/ra_lsp_server/tests/heavy_tests/main.rs4
-rw-r--r--crates/ra_syntax/src/ast.rs70
-rw-r--r--crates/ra_syntax/src/ast/generated.rs321
-rw-r--r--crates/ra_syntax/src/grammar.ron38
-rw-r--r--crates/ra_syntax/src/grammar/expressions.rs16
-rw-r--r--crates/ra_syntax/src/string_lexing.rs10
-rw-r--r--crates/ra_syntax/src/string_lexing/byte.rs51
-rw-r--r--crates/ra_syntax/src/string_lexing/byte_string.rs51
-rw-r--r--crates/ra_syntax/src/string_lexing/char.rs176
-rw-r--r--crates/ra_syntax/src/string_lexing/parser.rs92
-rw-r--r--crates/ra_syntax/src/string_lexing/string.rs197
-rw-r--r--crates/ra_syntax/src/utils.rs7
-rw-r--r--crates/ra_syntax/src/validation/byte.rs14
-rw-r--r--crates/ra_syntax/src/validation/byte_string.rs13
-rw-r--r--crates/ra_syntax/src/validation/char.rs14
-rw-r--r--crates/ra_syntax/src/validation/string.rs24
-rw-r--r--crates/ra_syntax/src/yellow/syntax_error.rs2
-rw-r--r--crates/ra_syntax/tests/data/parser/err/0029_field_completion.rs3
-rw-r--r--crates/ra_syntax/tests/data/parser/err/0029_field_completion.txt35
-rw-r--r--crates/ra_syntax/tests/data/parser/err/0030_string_suffixes.rs6
-rw-r--r--crates/ra_syntax/tests/data/parser/err/0030_string_suffixes.txt73
-rw-r--r--crates/tools/src/lib.rs2
56 files changed, 2079 insertions, 790 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 127d7772a..0d417c024 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -708,6 +708,7 @@ dependencies = [
708name = "ra_db" 708name = "ra_db"
709version = "0.1.0" 709version = "0.1.0"
710dependencies = [ 710dependencies = [
711 "backtrace 0.3.13 (registry+https://github.com/rust-lang/crates.io-index)",
711 "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", 712 "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
712 "ra_editor 0.1.0", 713 "ra_editor 0.1.0",
713 "ra_syntax 0.1.0", 714 "ra_syntax 0.1.0",
diff --git a/crates/ra_analysis/src/completion.rs b/crates/ra_analysis/src/completion.rs
index d742d6295..fe580700f 100644
--- a/crates/ra_analysis/src/completion.rs
+++ b/crates/ra_analysis/src/completion.rs
@@ -1,6 +1,7 @@
1mod completion_item; 1mod completion_item;
2mod completion_context; 2mod completion_context;
3 3
4mod complete_dot;
4mod complete_fn_param; 5mod complete_fn_param;
5mod complete_keyword; 6mod complete_keyword;
6mod complete_snippet; 7mod complete_snippet;
@@ -20,13 +21,13 @@ use crate::{
20 21
21pub use crate::completion::completion_item::{CompletionItem, InsertText, CompletionItemKind}; 22pub use crate::completion::completion_item::{CompletionItem, InsertText, CompletionItemKind};
22 23
23/// Main entry point for copmletion. We run comletion as a two-phase process. 24/// Main entry point for completion. We run completion as a two-phase process.
24/// 25///
25/// First, we look at the position and collect a so-called `CompletionContext. 26/// First, we look at the position and collect a so-called `CompletionContext.
26/// This is a somewhat messy process, because, during completion, syntax tree is 27/// This is a somewhat messy process, because, during completion, syntax tree is
27/// incomplete and can look readlly weired. 28/// incomplete and can look really weird.
28/// 29///
29/// Once the context is collected, we run a series of completion routines whihc 30/// Once the context is collected, we run a series of completion routines which
30/// look at the context and produce completion items. 31/// look at the context and produce completion items.
31pub(crate) fn completions( 32pub(crate) fn completions(
32 db: &db::RootDatabase, 33 db: &db::RootDatabase,
@@ -43,6 +44,7 @@ pub(crate) fn completions(
43 complete_snippet::complete_item_snippet(&mut acc, &ctx); 44 complete_snippet::complete_item_snippet(&mut acc, &ctx);
44 complete_path::complete_path(&mut acc, &ctx)?; 45 complete_path::complete_path(&mut acc, &ctx)?;
45 complete_scope::complete_scope(&mut acc, &ctx)?; 46 complete_scope::complete_scope(&mut acc, &ctx)?;
47 complete_dot::complete_dot(&mut acc, &ctx)?;
46 48
47 Ok(Some(acc)) 49 Ok(Some(acc))
48} 50}
diff --git a/crates/ra_analysis/src/completion/complete_dot.rs b/crates/ra_analysis/src/completion/complete_dot.rs
new file mode 100644
index 000000000..93d657576
--- /dev/null
+++ b/crates/ra_analysis/src/completion/complete_dot.rs
@@ -0,0 +1,98 @@
1use ra_syntax::ast::AstNode;
2use hir::{Ty, Def};
3
4use crate::Cancelable;
5use crate::completion::{CompletionContext, Completions, CompletionKind, CompletionItem, CompletionItemKind};
6
7/// Complete dot accesses, i.e. fields or methods (currently only fields).
8pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) -> Cancelable<()> {
9 let module = if let Some(module) = &ctx.module {
10 module
11 } else {
12 return Ok(());
13 };
14 let function = if let Some(fn_def) = ctx.enclosing_fn {
15 hir::source_binder::function_from_module(ctx.db, module, fn_def)
16 } else {
17 return Ok(());
18 };
19 let receiver = if let Some(receiver) = ctx.dot_receiver {
20 receiver
21 } else {
22 return Ok(());
23 };
24 let infer_result = function.infer(ctx.db)?;
25 let receiver_ty = if let Some(ty) = infer_result.type_of_node(receiver.syntax()) {
26 ty
27 } else {
28 return Ok(());
29 };
30 if !ctx.is_method_call {
31 complete_fields(acc, ctx, receiver_ty)?;
32 }
33 Ok(())
34}
35
36fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: Ty) -> Cancelable<()> {
37 // TODO: autoderef etc.
38 match receiver {
39 Ty::Adt { def_id, .. } => {
40 match def_id.resolve(ctx.db)? {
41 Def::Struct(s) => {
42 let variant_data = s.variant_data(ctx.db)?;
43 for field in variant_data.fields() {
44 CompletionItem::new(CompletionKind::Reference, field.name().to_string())
45 .kind(CompletionItemKind::Field)
46 .add_to(acc);
47 }
48 }
49 // TODO unions
50 _ => {}
51 }
52 }
53 Ty::Tuple(fields) => {
54 for (i, _ty) in fields.iter().enumerate() {
55 CompletionItem::new(CompletionKind::Reference, i.to_string())
56 .kind(CompletionItemKind::Field)
57 .add_to(acc);
58 }
59 }
60 _ => {}
61 };
62 Ok(())
63}
64
65#[cfg(test)]
66mod tests {
67 use crate::completion::*;
68
69 fn check_ref_completion(code: &str, expected_completions: &str) {
70 check_completion(code, expected_completions, CompletionKind::Reference);
71 }
72
73 #[test]
74 fn test_struct_field_completion() {
75 check_ref_completion(
76 r"
77 struct A { the_field: u32 }
78 fn foo(a: A) {
79 a.<|>
80 }
81 ",
82 r#"the_field"#,
83 );
84 }
85
86 #[test]
87 fn test_no_struct_field_completion_for_method_call() {
88 check_ref_completion(
89 r"
90 struct A { the_field: u32 }
91 fn foo(a: A) {
92 a.<|>()
93 }
94 ",
95 r#""#,
96 );
97 }
98}
diff --git a/crates/ra_analysis/src/completion/complete_path.rs b/crates/ra_analysis/src/completion/complete_path.rs
index ad4d68a33..aaa2c7cee 100644
--- a/crates/ra_analysis/src/completion/complete_path.rs
+++ b/crates/ra_analysis/src/completion/complete_path.rs
@@ -8,7 +8,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) -> C
8 (Some(path), Some(module)) => (path.clone(), module), 8 (Some(path), Some(module)) => (path.clone(), module),
9 _ => return Ok(()), 9 _ => return Ok(()),
10 }; 10 };
11 let def_id = match module.resolve_path(ctx.db, path)? { 11 let def_id = match module.resolve_path(ctx.db, &path)?.take_types() {
12 Some(it) => it, 12 Some(it) => it,
13 None => return Ok(()), 13 None => return Ok(()),
14 }; 14 };
diff --git a/crates/ra_analysis/src/completion/completion_context.rs b/crates/ra_analysis/src/completion/completion_context.rs
index 064fbc6f7..978772fd4 100644
--- a/crates/ra_analysis/src/completion/completion_context.rs
+++ b/crates/ra_analysis/src/completion/completion_context.rs
@@ -1,12 +1,13 @@
1use ra_editor::find_node_at_offset; 1use ra_editor::find_node_at_offset;
2use ra_text_edit::AtomTextEdit; 2use ra_text_edit::AtomTextEdit;
3use ra_syntax::{ 3use ra_syntax::{
4 algo::find_leaf_at_offset, 4 algo::{find_leaf_at_offset, find_covering_node},
5 ast, 5 ast,
6 AstNode, 6 AstNode,
7 SyntaxNodeRef, 7 SyntaxNodeRef,
8 SourceFileNode, 8 SourceFileNode,
9 TextUnit, 9 TextUnit,
10 TextRange,
10 SyntaxKind::*, 11 SyntaxKind::*,
11}; 12};
12use hir::source_binder; 13use hir::source_binder;
@@ -31,6 +32,10 @@ pub(super) struct CompletionContext<'a> {
31 pub(super) is_stmt: bool, 32 pub(super) is_stmt: bool,
32 /// Something is typed at the "top" level, in module or impl/trait. 33 /// Something is typed at the "top" level, in module or impl/trait.
33 pub(super) is_new_item: bool, 34 pub(super) is_new_item: bool,
35 /// The receiver if this is a field or method access, i.e. writing something.<|>
36 pub(super) dot_receiver: Option<ast::Expr<'a>>,
37 /// If this is a method call in particular, i.e. the () are already there.
38 pub(super) is_method_call: bool,
34} 39}
35 40
36impl<'a> CompletionContext<'a> { 41impl<'a> CompletionContext<'a> {
@@ -54,12 +59,14 @@ impl<'a> CompletionContext<'a> {
54 after_if: false, 59 after_if: false,
55 is_stmt: false, 60 is_stmt: false,
56 is_new_item: false, 61 is_new_item: false,
62 dot_receiver: None,
63 is_method_call: false,
57 }; 64 };
58 ctx.fill(original_file, position.offset); 65 ctx.fill(original_file, position.offset);
59 Ok(Some(ctx)) 66 Ok(Some(ctx))
60 } 67 }
61 68
62 fn fill(&mut self, original_file: &SourceFileNode, offset: TextUnit) { 69 fn fill(&mut self, original_file: &'a SourceFileNode, offset: TextUnit) {
63 // Insert a fake ident to get a valid parse tree. We will use this file 70 // Insert a fake ident to get a valid parse tree. We will use this file
64 // to determine context, though the original_file will be used for 71 // to determine context, though the original_file will be used for
65 // actual completion. 72 // actual completion.
@@ -76,7 +83,7 @@ impl<'a> CompletionContext<'a> {
76 self.is_param = true; 83 self.is_param = true;
77 return; 84 return;
78 } 85 }
79 self.classify_name_ref(&file, name_ref); 86 self.classify_name_ref(original_file, name_ref);
80 } 87 }
81 88
82 // Otherwise, see if this is a declaration. We can use heuristics to 89 // Otherwise, see if this is a declaration. We can use heuristics to
@@ -88,7 +95,7 @@ impl<'a> CompletionContext<'a> {
88 } 95 }
89 } 96 }
90 } 97 }
91 fn classify_name_ref(&mut self, file: &SourceFileNode, name_ref: ast::NameRef) { 98 fn classify_name_ref(&mut self, original_file: &'a SourceFileNode, name_ref: ast::NameRef) {
92 let name_range = name_ref.syntax().range(); 99 let name_range = name_ref.syntax().range();
93 let top_node = name_ref 100 let top_node = name_ref
94 .syntax() 101 .syntax()
@@ -105,6 +112,12 @@ impl<'a> CompletionContext<'a> {
105 _ => (), 112 _ => (),
106 } 113 }
107 114
115 self.enclosing_fn = self
116 .leaf
117 .ancestors()
118 .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
119 .find_map(ast::FnDef::cast);
120
108 let parent = match name_ref.syntax().parent() { 121 let parent = match name_ref.syntax().parent() {
109 Some(it) => it, 122 Some(it) => it,
110 None => return, 123 None => return,
@@ -120,11 +133,6 @@ impl<'a> CompletionContext<'a> {
120 } 133 }
121 if path.qualifier().is_none() { 134 if path.qualifier().is_none() {
122 self.is_trivial_path = true; 135 self.is_trivial_path = true;
123 self.enclosing_fn = self
124 .leaf
125 .ancestors()
126 .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
127 .find_map(ast::FnDef::cast);
128 136
129 self.is_stmt = match name_ref 137 self.is_stmt = match name_ref
130 .syntax() 138 .syntax()
@@ -137,7 +145,9 @@ impl<'a> CompletionContext<'a> {
137 }; 145 };
138 146
139 if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) { 147 if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) {
140 if let Some(if_expr) = find_node_at_offset::<ast::IfExpr>(file.syntax(), off) { 148 if let Some(if_expr) =
149 find_node_at_offset::<ast::IfExpr>(original_file.syntax(), off)
150 {
141 if if_expr.syntax().range().end() < name_ref.syntax().range().start() { 151 if if_expr.syntax().range().end() < name_ref.syntax().range().start() {
142 self.after_if = true; 152 self.after_if = true;
143 } 153 }
@@ -145,9 +155,33 @@ impl<'a> CompletionContext<'a> {
145 } 155 }
146 } 156 }
147 } 157 }
158 if let Some(field_expr) = ast::FieldExpr::cast(parent) {
159 // The receiver comes before the point of insertion of the fake
160 // ident, so it should have the same range in the non-modified file
161 self.dot_receiver = field_expr
162 .expr()
163 .map(|e| e.syntax().range())
164 .and_then(|r| find_node_with_range(original_file.syntax(), r));
165 }
166 if let Some(method_call_expr) = ast::MethodCallExpr::cast(parent) {
167 // As above
168 self.dot_receiver = method_call_expr
169 .expr()
170 .map(|e| e.syntax().range())
171 .and_then(|r| find_node_with_range(original_file.syntax(), r));
172 self.is_method_call = true;
173 }
148 } 174 }
149} 175}
150 176
177fn find_node_with_range<'a, N: AstNode<'a>>(
178 syntax: SyntaxNodeRef<'a>,
179 range: TextRange,
180) -> Option<N> {
181 let node = find_covering_node(syntax, range);
182 node.ancestors().find_map(N::cast)
183}
184
151fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool { 185fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool {
152 match node.ancestors().filter_map(N::cast).next() { 186 match node.ancestors().filter_map(N::cast).next() {
153 None => false, 187 None => false,
diff --git a/crates/ra_analysis/src/completion/completion_item.rs b/crates/ra_analysis/src/completion/completion_item.rs
index 911f08468..c9f9f495d 100644
--- a/crates/ra_analysis/src/completion/completion_item.rs
+++ b/crates/ra_analysis/src/completion/completion_item.rs
@@ -1,5 +1,7 @@
1use crate::db; 1use crate::db;
2 2
3use hir::PerNs;
4
3/// `CompletionItem` describes a single completion variant in the editor pop-up. 5/// `CompletionItem` describes a single completion variant in the editor pop-up.
4/// It is basically a POD with various properties. To construct a 6/// It is basically a POD with various properties. To construct a
5/// `CompletionItem`, use `new` method and the `Builder` struct. 7/// `CompletionItem`, use `new` method and the `Builder` struct.
@@ -25,7 +27,10 @@ pub enum CompletionItemKind {
25 Keyword, 27 Keyword,
26 Module, 28 Module,
27 Function, 29 Function,
30 Struct,
31 Enum,
28 Binding, 32 Binding,
33 Field,
29} 34}
30 35
31#[derive(Debug, PartialEq, Eq)] 36#[derive(Debug, PartialEq, Eq)]
@@ -117,16 +122,27 @@ impl Builder {
117 db: &db::RootDatabase, 122 db: &db::RootDatabase,
118 resolution: &hir::Resolution, 123 resolution: &hir::Resolution,
119 ) -> Builder { 124 ) -> Builder {
120 if let Some(def_id) = resolution.def_id { 125 let resolved = resolution.def_id.and_then(|d| d.resolve(db).ok());
121 if let Ok(def) = def_id.resolve(db) { 126 let kind = match resolved {
122 let kind = match def { 127 PerNs {
123 hir::Def::Module(..) => CompletionItemKind::Module, 128 types: Some(hir::Def::Module(..)),
124 hir::Def::Function(..) => CompletionItemKind::Function, 129 ..
125 _ => return self, 130 } => CompletionItemKind::Module,
126 }; 131 PerNs {
127 self.kind = Some(kind); 132 types: Some(hir::Def::Struct(..)),
128 } 133 ..
129 } 134 } => CompletionItemKind::Struct,
135 PerNs {
136 types: Some(hir::Def::Enum(..)),
137 ..
138 } => CompletionItemKind::Enum,
139 PerNs {
140 values: Some(hir::Def::Function(..)),
141 ..
142 } => CompletionItemKind::Function,
143 _ => return self,
144 };
145 self.kind = Some(kind);
130 self 146 self
131 } 147 }
132} 148}
diff --git a/crates/ra_analysis/src/db.rs b/crates/ra_analysis/src/db.rs
index 780a84291..036e284bf 100644
--- a/crates/ra_analysis/src/db.rs
+++ b/crates/ra_analysis/src/db.rs
@@ -95,6 +95,9 @@ salsa::database_storage! {
95 fn submodules() for hir::db::SubmodulesQuery; 95 fn submodules() for hir::db::SubmodulesQuery;
96 fn infer() for hir::db::InferQuery; 96 fn infer() for hir::db::InferQuery;
97 fn type_for_def() for hir::db::TypeForDefQuery; 97 fn type_for_def() for hir::db::TypeForDefQuery;
98 fn type_for_field() for hir::db::TypeForFieldQuery;
99 fn struct_data() for hir::db::StructDataQuery;
100 fn enum_data() for hir::db::EnumDataQuery;
98 } 101 }
99 } 102 }
100} 103}
diff --git a/crates/ra_analysis/src/imp.rs b/crates/ra_analysis/src/imp.rs
index a547c5a20..38a5c1a7d 100644
--- a/crates/ra_analysis/src/imp.rs
+++ b/crates/ra_analysis/src/imp.rs
@@ -3,31 +3,32 @@ use std::{
3 sync::Arc, 3 sync::Arc,
4}; 4};
5 5
6use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit, Severity};
7use ra_syntax::{
8 ast::{self, ArgListOwner, Expr, NameOwner, FnDef},
9 algo::find_covering_node,
10 AstNode, SourceFileNode,
11 SyntaxKind::*,
12 SyntaxNodeRef, TextRange, TextUnit,
13};
14use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase};
15use rayon::prelude::*; 6use rayon::prelude::*;
16use salsa::{Database, ParallelDatabase}; 7use salsa::{Database, ParallelDatabase};
8
17use hir::{ 9use hir::{
18 self, 10 self,
19 source_binder,
20 FnSignatureInfo, 11 FnSignatureInfo,
21 Problem, 12 Problem,
13 source_binder,
14};
15use ra_db::{FilesDatabase, SourceRoot, SourceRootId, SyntaxDatabase};
16use ra_editor::{self, FileSymbol, find_node_at_offset, LineIndex, LocalEdit, Severity};
17use ra_syntax::{
18 algo::find_covering_node,
19 ast::{self, ArgListOwner, Expr, FnDef, NameOwner},
20 AstNode, SourceFileNode,
21 SyntaxKind::*,
22 SyntaxNodeRef, TextRange, TextUnit,
22}; 23};
23 24
24use crate::{ 25use crate::{
25 completion::{completions, CompletionItem}, 26 AnalysisChange,
26 db, 27 Cancelable,
27 symbol_index::{SymbolIndex, SymbolsDatabase, LibrarySymbolsQuery}, 28 completion::{CompletionItem, completions},
28 AnalysisChange, RootChange, Cancelable, CrateId, Diagnostic, FileId, 29 CrateId, db, Diagnostic, FileId, FilePosition, FileSystemEdit,
29 FileSystemEdit, FilePosition, Query, SourceChange, SourceFileEdit, 30 Query, ReferenceResolution, RootChange, SourceChange, SourceFileEdit,
30 ReferenceResolution, 31 symbol_index::{LibrarySymbolsQuery, SymbolIndex, SymbolsDatabase},
31}; 32};
32 33
33#[derive(Debug, Default)] 34#[derive(Debug, Default)]
@@ -366,7 +367,7 @@ impl AnalysisImpl {
366 range: d.range, 367 range: d.range,
367 message: d.msg, 368 message: d.msg,
368 severity: d.severity, 369 severity: d.severity,
369 fix: None, 370 fix: d.fix.map(|fix| SourceChange::from_local_edit(file_id, fix)),
370 }) 371 })
371 .collect::<Vec<_>>(); 372 .collect::<Vec<_>>();
372 if let Some(m) = source_binder::module_from_file_id(&*self.db, file_id)? { 373 if let Some(m) = source_binder::module_from_file_id(&*self.db, file_id)? {
@@ -425,25 +426,15 @@ impl AnalysisImpl {
425 let file = self.file_syntax(file_id); 426 let file = self.file_syntax(file_id);
426 let offset = range.start(); 427 let offset = range.start();
427 let actions = vec![ 428 let actions = vec![
428 ( 429 ra_editor::flip_comma(&file, offset).map(|f| f()),
429 "flip comma", 430 ra_editor::add_derive(&file, offset).map(|f| f()),
430 ra_editor::flip_comma(&file, offset).map(|f| f()), 431 ra_editor::add_impl(&file, offset).map(|f| f()),
431 ), 432 ra_editor::make_pub_crate(&file, offset).map(|f| f()),
432 ( 433 ra_editor::introduce_variable(&file, range).map(|f| f()),
433 "add `#[derive]`",
434 ra_editor::add_derive(&file, offset).map(|f| f()),
435 ),
436 ("add impl", ra_editor::add_impl(&file, offset).map(|f| f())),
437 (
438 "introduce variable",
439 ra_editor::introduce_variable(&file, range).map(|f| f()),
440 ),
441 ]; 434 ];
442 actions 435 actions
443 .into_iter() 436 .into_iter()
444 .filter_map(|(name, local_edit)| { 437 .filter_map(|local_edit| Some(SourceChange::from_local_edit(file_id, local_edit?)))
445 Some(SourceChange::from_local_edit(file_id, name, local_edit?))
446 })
447 .collect() 438 .collect()
448 } 439 }
449 440
@@ -541,13 +532,13 @@ impl AnalysisImpl {
541} 532}
542 533
543impl SourceChange { 534impl SourceChange {
544 pub(crate) fn from_local_edit(file_id: FileId, label: &str, edit: LocalEdit) -> SourceChange { 535 pub(crate) fn from_local_edit(file_id: FileId, edit: LocalEdit) -> SourceChange {
545 let file_edit = SourceFileEdit { 536 let file_edit = SourceFileEdit {
546 file_id, 537 file_id,
547 edit: edit.edit, 538 edit: edit.edit,
548 }; 539 };
549 SourceChange { 540 SourceChange {
550 label: label.to_string(), 541 label: edit.label,
551 source_file_edits: vec![file_edit], 542 source_file_edits: vec![file_edit],
552 file_system_edits: vec![], 543 file_system_edits: vec![],
553 cursor_position: edit 544 cursor_position: edit
diff --git a/crates/ra_analysis/src/lib.rs b/crates/ra_analysis/src/lib.rs
index a029f66b4..476d1b438 100644
--- a/crates/ra_analysis/src/lib.rs
+++ b/crates/ra_analysis/src/lib.rs
@@ -288,19 +288,18 @@ impl Analysis {
288 } 288 }
289 pub fn join_lines(&self, file_id: FileId, range: TextRange) -> SourceChange { 289 pub fn join_lines(&self, file_id: FileId, range: TextRange) -> SourceChange {
290 let file = self.imp.file_syntax(file_id); 290 let file = self.imp.file_syntax(file_id);
291 SourceChange::from_local_edit(file_id, "join lines", ra_editor::join_lines(&file, range)) 291 SourceChange::from_local_edit(file_id, ra_editor::join_lines(&file, range))
292 } 292 }
293 pub fn on_enter(&self, position: FilePosition) -> Option<SourceChange> { 293 pub fn on_enter(&self, position: FilePosition) -> Option<SourceChange> {
294 let file = self.imp.file_syntax(position.file_id); 294 let file = self.imp.file_syntax(position.file_id);
295 let edit = ra_editor::on_enter(&file, position.offset)?; 295 let edit = ra_editor::on_enter(&file, position.offset)?;
296 let res = SourceChange::from_local_edit(position.file_id, "on enter", edit); 296 let res = SourceChange::from_local_edit(position.file_id, edit);
297 Some(res) 297 Some(res)
298 } 298 }
299 pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> { 299 pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> {
300 let file = self.imp.file_syntax(position.file_id); 300 let file = self.imp.file_syntax(position.file_id);
301 Some(SourceChange::from_local_edit( 301 Some(SourceChange::from_local_edit(
302 position.file_id, 302 position.file_id,
303 "add semicolon",
304 ra_editor::on_eq_typed(&file, position.offset)?, 303 ra_editor::on_eq_typed(&file, position.offset)?,
305 )) 304 ))
306 } 305 }
diff --git a/crates/ra_db/Cargo.toml b/crates/ra_db/Cargo.toml
index f316c0ab2..4be32b5f3 100644
--- a/crates/ra_db/Cargo.toml
+++ b/crates/ra_db/Cargo.toml
@@ -5,6 +5,7 @@ version = "0.1.0"
5authors = ["Aleksey Kladov <[email protected]>"] 5authors = ["Aleksey Kladov <[email protected]>"]
6 6
7[dependencies] 7[dependencies]
8backtrace = "0.3.1"
8relative-path = "0.4.0" 9relative-path = "0.4.0"
9salsa = "0.8.0" 10salsa = "0.8.0"
10rustc-hash = "1.0" 11rustc-hash = "1.0"
diff --git a/crates/ra_db/src/cancelation.rs b/crates/ra_db/src/cancelation.rs
new file mode 100644
index 000000000..73444b015
--- /dev/null
+++ b/crates/ra_db/src/cancelation.rs
@@ -0,0 +1,85 @@
1//! Utility types to support cancellation.
2//!
3//! In a typical IDE use-case, requests and modification happen concurrently, as
4//! in the following scenario:
5//!
6//! * user types a character,
7//! * a syntax highlighting process is started
8//! * user types next character, while syntax highlighting *is still in
9//! progress*.
10//!
11//! In this situation, we want to react to modification as quckly as possible.
12//! At the same time, in-progress results are not very interesting, because they
13//! are invalidated by the edit anyway. So, we first cancel all in-flight
14//! requests, and then apply modification knowing that it won't intrfere with
15//! any background processing (this bit is handled by salsa, see
16//! `BaseDatabase::check_canceled` method).
17
18use std::{
19 cmp,
20 hash::{Hash, Hasher},
21 sync::Arc,
22};
23
24use backtrace::Backtrace;
25use parking_lot::Mutex;
26
27/// An "error" signifing that the operation was canceled.
28#[derive(Clone)]
29pub struct Canceled {
30 backtrace: Arc<Mutex<Backtrace>>,
31}
32
33pub type Cancelable<T> = Result<T, Canceled>;
34
35impl Canceled {
36 pub(crate) fn new() -> Canceled {
37 let bt = Backtrace::new_unresolved();
38 Canceled {
39 backtrace: Arc::new(Mutex::new(bt)),
40 }
41 }
42}
43
44impl std::fmt::Display for Canceled {
45 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
46 fmt.write_str("canceled")
47 }
48}
49
50impl std::fmt::Debug for Canceled {
51 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
52 let mut bt = self.backtrace.lock();
53 let bt: &mut Backtrace = &mut *bt;
54 bt.resolve();
55 write!(fmt, "canceled at:\n{:?}", bt)
56 }
57}
58
59impl std::error::Error for Canceled {}
60
61impl PartialEq for Canceled {
62 fn eq(&self, _: &Canceled) -> bool {
63 true
64 }
65}
66
67impl Eq for Canceled {}
68
69impl Hash for Canceled {
70 fn hash<H: Hasher>(&self, hasher: &mut H) {
71 ().hash(hasher)
72 }
73}
74
75impl cmp::Ord for Canceled {
76 fn cmp(&self, _: &Canceled) -> cmp::Ordering {
77 cmp::Ordering::Equal
78 }
79}
80
81impl cmp::PartialOrd for Canceled {
82 fn partial_cmp(&self, other: &Canceled) -> Option<cmp::Ordering> {
83 Some(self.cmp(other))
84 }
85}
diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs
index 78f2cbf12..1f7c9187b 100644
--- a/crates/ra_db/src/lib.rs
+++ b/crates/ra_db/src/lib.rs
@@ -1,27 +1,17 @@
1//! ra_db defines basic database traits. Concrete DB is defined by ra_analysis. 1//! ra_db defines basic database traits. Concrete DB is defined by ra_analysis.
2mod cancelation;
2mod syntax_ptr; 3mod syntax_ptr;
3mod input; 4mod input;
4mod loc2id; 5mod loc2id;
5pub mod mock; 6pub mod mock;
6 7
7use std::sync::Arc; 8use std::sync::Arc;
9
8use ra_editor::LineIndex; 10use ra_editor::LineIndex;
9use ra_syntax::{TextUnit, SourceFileNode}; 11use ra_syntax::{TextUnit, SourceFileNode};
10 12
11#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
12pub struct Canceled;
13
14pub type Cancelable<T> = Result<T, Canceled>;
15
16impl std::fmt::Display for Canceled {
17 fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
18 fmt.write_str("canceled")
19 }
20}
21
22impl std::error::Error for Canceled {}
23
24pub use crate::{ 13pub use crate::{
14 cancelation::{Canceled, Cancelable},
25 syntax_ptr::LocalSyntaxPtr, 15 syntax_ptr::LocalSyntaxPtr,
26 input::{ 16 input::{
27 FilesDatabase, FileId, CrateId, SourceRoot, SourceRootId, CrateGraph, 17 FilesDatabase, FileId, CrateId, SourceRoot, SourceRootId, CrateGraph,
@@ -48,7 +38,7 @@ macro_rules! impl_numeric_id {
48pub trait BaseDatabase: salsa::Database { 38pub trait BaseDatabase: salsa::Database {
49 fn check_canceled(&self) -> Cancelable<()> { 39 fn check_canceled(&self) -> Cancelable<()> {
50 if self.salsa_runtime().is_current_revision_canceled() { 40 if self.salsa_runtime().is_current_revision_canceled() {
51 Err(Canceled) 41 Err(Canceled::new())
52 } else { 42 } else {
53 Ok(()) 43 Ok(())
54 } 44 }
diff --git a/crates/ra_editor/src/code_actions.rs b/crates/ra_editor/src/code_actions.rs
index 1d78cb7e8..7615f37a6 100644
--- a/crates/ra_editor/src/code_actions.rs
+++ b/crates/ra_editor/src/code_actions.rs
@@ -12,6 +12,7 @@ use crate::{find_node_at_offset, TextEdit, TextEditBuilder};
12 12
13#[derive(Debug)] 13#[derive(Debug)]
14pub struct LocalEdit { 14pub struct LocalEdit {
15 pub label: String,
15 pub edit: TextEdit, 16 pub edit: TextEdit,
16 pub cursor_position: Option<TextUnit>, 17 pub cursor_position: Option<TextUnit>,
17} 18}
@@ -30,6 +31,7 @@ pub fn flip_comma<'a>(
30 edit.replace(prev.range(), next.text().to_string()); 31 edit.replace(prev.range(), next.text().to_string());
31 edit.replace(next.range(), prev.text().to_string()); 32 edit.replace(next.range(), prev.text().to_string());
32 LocalEdit { 33 LocalEdit {
34 label: "flip comma".to_string(),
33 edit: edit.finish(), 35 edit: edit.finish(),
34 cursor_position: None, 36 cursor_position: None,
35 } 37 }
@@ -58,6 +60,7 @@ pub fn add_derive<'a>(
58 Some(tt) => tt.syntax().range().end() - TextUnit::of_char(')'), 60 Some(tt) => tt.syntax().range().end() - TextUnit::of_char(')'),
59 }; 61 };
60 LocalEdit { 62 LocalEdit {
63 label: "add `#[derive]`".to_string(),
61 edit: edit.finish(), 64 edit: edit.finish(),
62 cursor_position: Some(offset), 65 cursor_position: Some(offset),
63 } 66 }
@@ -109,6 +112,7 @@ pub fn add_impl<'a>(
109 buf.push_str("\n}"); 112 buf.push_str("\n}");
110 edit.insert(start_offset, buf); 113 edit.insert(start_offset, buf);
111 LocalEdit { 114 LocalEdit {
115 label: "add impl".to_string(),
112 edit: edit.finish(), 116 edit: edit.finish(),
113 cursor_position: Some(offset), 117 cursor_position: Some(offset),
114 } 118 }
@@ -148,6 +152,7 @@ pub fn introduce_variable<'a>(
148 } 152 }
149 let cursor_position = anchor_stmt.range().start() + TextUnit::of_str("let "); 153 let cursor_position = anchor_stmt.range().start() + TextUnit::of_str("let ");
150 LocalEdit { 154 LocalEdit {
155 label: "introduce variable".to_string(),
151 edit: edit.finish(), 156 edit: edit.finish(),
152 cursor_position: Some(cursor_position), 157 cursor_position: Some(cursor_position),
153 } 158 }
@@ -194,6 +199,7 @@ pub fn make_pub_crate<'a>(
194 || parent.children().any(|child| child.kind() == VISIBILITY) 199 || parent.children().any(|child| child.kind() == VISIBILITY)
195 { 200 {
196 return LocalEdit { 201 return LocalEdit {
202 label: "make pub crate".to_string(),
197 edit: edit.finish(), 203 edit: edit.finish(),
198 cursor_position: Some(offset), 204 cursor_position: Some(offset),
199 }; 205 };
@@ -201,6 +207,7 @@ pub fn make_pub_crate<'a>(
201 207
202 edit.insert(node_start, "pub(crate) ".to_string()); 208 edit.insert(node_start, "pub(crate) ".to_string());
203 LocalEdit { 209 LocalEdit {
210 label: "make pub crate".to_string(),
204 edit: edit.finish(), 211 edit: edit.finish(),
205 cursor_position: Some(node_start), 212 cursor_position: Some(node_start),
206 } 213 }
diff --git a/crates/ra_editor/src/lib.rs b/crates/ra_editor/src/lib.rs
index 619497f0b..d9b89155b 100644
--- a/crates/ra_editor/src/lib.rs
+++ b/crates/ra_editor/src/lib.rs
@@ -26,6 +26,7 @@ use ra_syntax::{
26 SyntaxKind::{self, *}, 26 SyntaxKind::{self, *},
27 SyntaxNodeRef, TextRange, TextUnit, 27 SyntaxNodeRef, TextRange, TextUnit,
28}; 28};
29use itertools::Itertools;
29 30
30#[derive(Debug)] 31#[derive(Debug)]
31pub struct HighlightedRange { 32pub struct HighlightedRange {
@@ -44,6 +45,7 @@ pub struct Diagnostic {
44 pub range: TextRange, 45 pub range: TextRange,
45 pub msg: String, 46 pub msg: String,
46 pub severity: Severity, 47 pub severity: Severity,
48 pub fix: Option<LocalEdit>,
47} 49}
48 50
49#[derive(Debug)] 51#[derive(Debug)]
@@ -113,6 +115,7 @@ pub fn diagnostics(file: &SourceFileNode) -> Vec<Diagnostic> {
113 range: location_to_range(err.location()), 115 range: location_to_range(err.location()),
114 msg: format!("Syntax Error: {}", err), 116 msg: format!("Syntax Error: {}", err),
115 severity: Severity::Error, 117 severity: Severity::Error,
118 fix: None,
116 }) 119 })
117 .collect(); 120 .collect();
118 121
@@ -126,11 +129,28 @@ fn check_unnecessary_braces_in_use_statement(file: &SourceFileNode) -> Vec<Diagn
126 let mut diagnostics = Vec::new(); 129 let mut diagnostics = Vec::new();
127 for node in file.syntax().descendants() { 130 for node in file.syntax().descendants() {
128 if let Some(use_tree_list) = ast::UseTreeList::cast(node) { 131 if let Some(use_tree_list) = ast::UseTreeList::cast(node) {
129 if use_tree_list.use_trees().count() <= 1 { 132 if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
133 let range = use_tree_list.syntax().range();
134 let edit = text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(
135 single_use_tree,
136 )
137 .unwrap_or_else(|| {
138 let to_replace = single_use_tree.syntax().text().to_string();
139 let mut edit_builder = TextEditBuilder::new();
140 edit_builder.delete(range);
141 edit_builder.insert(range.start(), to_replace);
142 edit_builder.finish()
143 });
144
130 diagnostics.push(Diagnostic { 145 diagnostics.push(Diagnostic {
131 range: use_tree_list.syntax().range(), 146 range: range,
132 msg: format!("Unnecessary braces in use statement"), 147 msg: format!("Unnecessary braces in use statement"),
133 severity: Severity::WeakWarning, 148 severity: Severity::WeakWarning,
149 fix: Some(LocalEdit {
150 label: "Remove unnecessary braces".to_string(),
151 edit: edit,
152 cursor_position: None,
153 }),
134 }) 154 })
135 } 155 }
136 } 156 }
@@ -139,6 +159,28 @@ fn check_unnecessary_braces_in_use_statement(file: &SourceFileNode) -> Vec<Diagn
139 diagnostics 159 diagnostics
140} 160}
141 161
162fn text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(
163 single_use_tree: ast::UseTree,
164) -> Option<TextEdit> {
165 let use_tree_list_node = single_use_tree.syntax().parent()?;
166 if single_use_tree
167 .path()?
168 .segment()?
169 .syntax()
170 .first_child()?
171 .kind()
172 == SyntaxKind::SELF_KW
173 {
174 let start = use_tree_list_node.prev_sibling()?.range().start();
175 let end = use_tree_list_node.range().end();
176 let range = TextRange::from_to(start, end);
177 let mut edit_builder = TextEditBuilder::new();
178 edit_builder.delete(range);
179 return Some(edit_builder.finish());
180 }
181 None
182}
183
142pub fn syntax_tree(file: &SourceFileNode) -> String { 184pub fn syntax_tree(file: &SourceFileNode) -> String {
143 ::ra_syntax::utils::dump_tree(file.syntax()) 185 ::ra_syntax::utils::dump_tree(file.syntax())
144} 186}
@@ -175,8 +217,9 @@ pub fn find_node_at_offset<'a, N: AstNode<'a>>(
175 217
176#[cfg(test)] 218#[cfg(test)]
177mod tests { 219mod tests {
220 use crate::test_utils::{add_cursor, assert_eq_dbg, assert_eq_text, extract_offset};
221
178 use super::*; 222 use super::*;
179 use crate::test_utils::{add_cursor, assert_eq_dbg, extract_offset, assert_eq_text};
180 223
181 #[test] 224 #[test]
182 fn test_highlighting() { 225 fn test_highlighting() {
@@ -240,22 +283,31 @@ fn test_foo() {}
240 283
241 #[test] 284 #[test]
242 fn test_check_unnecessary_braces_in_use_statement() { 285 fn test_check_unnecessary_braces_in_use_statement() {
243 let file = SourceFileNode::parse( 286 fn check_not_applicable(code: &str) {
244 r#" 287 let file = SourceFileNode::parse(code);
245use a; 288 let diagnostics = check_unnecessary_braces_in_use_statement(&file);
246use {b}; 289 assert!(diagnostics.is_empty());
247use a::{c}; 290 }
248use a::{c, d::e}; 291
249use a::{c, d::{e}}; 292 fn check_apply(before: &str, after: &str) {
250fn main() {} 293 let file = SourceFileNode::parse(before);
251"#, 294 let diagnostic = check_unnecessary_braces_in_use_statement(&file)
295 .pop()
296 .unwrap_or_else(|| panic!("no diagnostics for:\n{}\n", before));
297 let fix = diagnostic.fix.unwrap();
298 let actual = fix.edit.apply(&before);
299 assert_eq_text!(after, &actual);
300 }
301
302 check_not_applicable(
303 "
304 use a;
305 use a::{c, d::e};
306 ",
252 ); 307 );
253 let diagnostics = check_unnecessary_braces_in_use_statement(&file); 308 check_apply("use {b};", "use b;");
254 assert_eq_dbg( 309 check_apply("use a::{c};", "use a::c;");
255 r#"[Diagnostic { range: [12; 15), msg: "Unnecessary braces in use statement", severity: WeakWarning }, 310 check_apply("use a::{self};", "use a;");
256 Diagnostic { range: [24; 27), msg: "Unnecessary braces in use statement", severity: WeakWarning }, 311 check_apply("use a::{c, d::{e}};", "use a::{c, d::e};");
257 Diagnostic { range: [61; 64), msg: "Unnecessary braces in use statement", severity: WeakWarning }]"#,
258 &diagnostics,
259 )
260 } 312 }
261} 313}
diff --git a/crates/ra_editor/src/typing.rs b/crates/ra_editor/src/typing.rs
index 5e412bcfa..21d068a7b 100644
--- a/crates/ra_editor/src/typing.rs
+++ b/crates/ra_editor/src/typing.rs
@@ -9,6 +9,7 @@ use ra_syntax::{
9 SyntaxNodeRef, TextRange, TextUnit, 9 SyntaxNodeRef, TextRange, TextUnit,
10}; 10};
11use ra_text_edit::text_utils::contains_offset_nonstrict; 11use ra_text_edit::text_utils::contains_offset_nonstrict;
12use itertools::Itertools;
12 13
13use crate::{find_node_at_offset, TextEditBuilder, LocalEdit}; 14use crate::{find_node_at_offset, TextEditBuilder, LocalEdit};
14 15
@@ -19,6 +20,7 @@ pub fn join_lines(file: &SourceFileNode, range: TextRange) -> LocalEdit {
19 let pos = match text.find('\n') { 20 let pos = match text.find('\n') {
20 None => { 21 None => {
21 return LocalEdit { 22 return LocalEdit {
23 label: "join lines".to_string(),
22 edit: TextEditBuilder::new().finish(), 24 edit: TextEditBuilder::new().finish(),
23 cursor_position: None, 25 cursor_position: None,
24 }; 26 };
@@ -51,6 +53,7 @@ pub fn join_lines(file: &SourceFileNode, range: TextRange) -> LocalEdit {
51 } 53 }
52 54
53 LocalEdit { 55 LocalEdit {
56 label: "join lines".to_string(),
54 edit: edit.finish(), 57 edit: edit.finish(),
55 cursor_position: None, 58 cursor_position: None,
56 } 59 }
@@ -76,6 +79,7 @@ pub fn on_enter(file: &SourceFileNode, offset: TextUnit) -> Option<LocalEdit> {
76 let mut edit = TextEditBuilder::new(); 79 let mut edit = TextEditBuilder::new();
77 edit.insert(offset, inserted); 80 edit.insert(offset, inserted);
78 Some(LocalEdit { 81 Some(LocalEdit {
82 label: "on enter".to_string(),
79 edit: edit.finish(), 83 edit: edit.finish(),
80 cursor_position: Some(cursor_position), 84 cursor_position: Some(cursor_position),
81 }) 85 })
@@ -126,6 +130,7 @@ pub fn on_eq_typed(file: &SourceFileNode, offset: TextUnit) -> Option<LocalEdit>
126 let mut edit = TextEditBuilder::new(); 130 let mut edit = TextEditBuilder::new();
127 edit.insert(offset, ";".to_string()); 131 edit.insert(offset, ";".to_string());
128 Some(LocalEdit { 132 Some(LocalEdit {
133 label: "add semicolon".to_string(),
129 edit: edit.finish(), 134 edit: edit.finish(),
130 cursor_position: None, 135 cursor_position: None,
131 }) 136 })
@@ -240,7 +245,7 @@ fn single_expr(block: ast::Block) -> Option<ast::Expr> {
240 245
241fn join_single_use_tree(edit: &mut TextEditBuilder, node: SyntaxNodeRef) -> Option<()> { 246fn join_single_use_tree(edit: &mut TextEditBuilder, node: SyntaxNodeRef) -> Option<()> {
242 let use_tree_list = ast::UseTreeList::cast(node.parent()?)?; 247 let use_tree_list = ast::UseTreeList::cast(node.parent()?)?;
243 let tree = single_use_tree(use_tree_list)?; 248 let (tree,) = use_tree_list.use_trees().collect_tuple()?;
244 edit.replace( 249 edit.replace(
245 use_tree_list.syntax().range(), 250 use_tree_list.syntax().range(),
246 tree.syntax().text().to_string(), 251 tree.syntax().text().to_string(),
@@ -248,26 +253,6 @@ fn join_single_use_tree(edit: &mut TextEditBuilder, node: SyntaxNodeRef) -> Opti
248 Some(()) 253 Some(())
249} 254}
250 255
251fn single_use_tree(tree_list: ast::UseTreeList) -> Option<ast::UseTree> {
252 let mut res = None;
253 for child in tree_list.syntax().children() {
254 if let Some(tree) = ast::UseTree::cast(child) {
255 if tree.syntax().text().contains('\n') {
256 return None;
257 }
258 if mem::replace(&mut res, Some(tree)).is_some() {
259 return None;
260 }
261 } else {
262 match child.kind() {
263 WHITESPACE | L_CURLY | R_CURLY | COMMA => (),
264 _ => return None,
265 }
266 }
267 }
268 res
269}
270
271fn compute_ws(left: SyntaxNodeRef, right: SyntaxNodeRef) -> &'static str { 256fn compute_ws(left: SyntaxNodeRef, right: SyntaxNodeRef) -> &'static str {
272 match left.kind() { 257 match left.kind() {
273 L_PAREN | L_BRACK => return "", 258 L_PAREN | L_BRACK => return "",
diff --git a/crates/ra_hir/src/adt.rs b/crates/ra_hir/src/adt.rs
new file mode 100644
index 000000000..65c461148
--- /dev/null
+++ b/crates/ra_hir/src/adt.rs
@@ -0,0 +1,194 @@
1use std::sync::Arc;
2
3use ra_syntax::{SmolStr, ast::{self, NameOwner, StructFlavor}};
4
5use crate::{
6 DefId, Cancelable,
7 db::{HirDatabase},
8 type_ref::TypeRef,
9};
10
11pub struct Struct {
12 def_id: DefId,
13}
14
15impl Struct {
16 pub(crate) fn new(def_id: DefId) -> Self {
17 Struct { def_id }
18 }
19
20 pub fn def_id(&self) -> DefId {
21 self.def_id
22 }
23
24 pub fn variant_data(&self, db: &impl HirDatabase) -> Cancelable<Arc<VariantData>> {
25 Ok(db.struct_data(self.def_id)?.variant_data.clone())
26 }
27
28 pub fn struct_data(&self, db: &impl HirDatabase) -> Cancelable<Arc<StructData>> {
29 Ok(db.struct_data(self.def_id)?)
30 }
31
32 pub fn name(&self, db: &impl HirDatabase) -> Cancelable<Option<SmolStr>> {
33 Ok(db.struct_data(self.def_id)?.name.clone())
34 }
35}
36
37#[derive(Debug, Clone, PartialEq, Eq)]
38pub struct StructData {
39 name: Option<SmolStr>,
40 variant_data: Arc<VariantData>,
41}
42
43impl StructData {
44 pub(crate) fn new(struct_def: ast::StructDef) -> StructData {
45 let name = struct_def.name().map(|n| n.text());
46 let variant_data = VariantData::new(struct_def.flavor());
47 let variant_data = Arc::new(variant_data);
48 StructData { name, variant_data }
49 }
50
51 pub fn name(&self) -> Option<&SmolStr> {
52 self.name.as_ref()
53 }
54
55 pub fn variant_data(&self) -> &Arc<VariantData> {
56 &self.variant_data
57 }
58}
59
60pub struct Enum {
61 def_id: DefId,
62}
63
64impl Enum {
65 pub(crate) fn new(def_id: DefId) -> Self {
66 Enum { def_id }
67 }
68
69 pub fn def_id(&self) -> DefId {
70 self.def_id
71 }
72
73 pub fn name(&self, db: &impl HirDatabase) -> Cancelable<Option<SmolStr>> {
74 Ok(db.enum_data(self.def_id)?.name.clone())
75 }
76}
77
78#[derive(Debug, Clone, PartialEq, Eq)]
79pub struct EnumData {
80 name: Option<SmolStr>,
81 variants: Vec<(SmolStr, Arc<VariantData>)>,
82}
83
84impl EnumData {
85 pub(crate) fn new(enum_def: ast::EnumDef) -> Self {
86 let name = enum_def.name().map(|n| n.text());
87 let variants = if let Some(evl) = enum_def.variant_list() {
88 evl.variants()
89 .map(|v| {
90 (
91 v.name()
92 .map(|n| n.text())
93 .unwrap_or_else(|| SmolStr::new("[error]")),
94 Arc::new(VariantData::new(v.flavor())),
95 )
96 })
97 .collect()
98 } else {
99 Vec::new()
100 };
101 EnumData { name, variants }
102 }
103}
104
105/// A single field of an enum variant or struct
106#[derive(Debug, Clone, PartialEq, Eq)]
107pub struct StructField {
108 name: SmolStr,
109 type_ref: TypeRef,
110}
111
112impl StructField {
113 pub fn name(&self) -> SmolStr {
114 self.name.clone()
115 }
116 pub fn type_ref(&self) -> &TypeRef {
117 &self.type_ref
118 }
119}
120
121/// Fields of an enum variant or struct
122#[derive(Debug, Clone, PartialEq, Eq)]
123pub enum VariantData {
124 Struct(Vec<StructField>),
125 Tuple(Vec<StructField>),
126 Unit,
127}
128
129impl VariantData {
130 pub fn new(flavor: StructFlavor) -> Self {
131 match flavor {
132 StructFlavor::Tuple(fl) => {
133 let fields = fl
134 .fields()
135 .enumerate()
136 .map(|(i, fd)| StructField {
137 name: SmolStr::new(i.to_string()),
138 type_ref: TypeRef::from_ast_opt(fd.type_ref()),
139 })
140 .collect();
141 VariantData::Tuple(fields)
142 }
143 StructFlavor::Named(fl) => {
144 let fields = fl
145 .fields()
146 .map(|fd| StructField {
147 name: fd
148 .name()
149 .map(|n| n.text())
150 .unwrap_or_else(|| SmolStr::new("[error]")),
151 type_ref: TypeRef::from_ast_opt(fd.type_ref()),
152 })
153 .collect();
154 VariantData::Struct(fields)
155 }
156 StructFlavor::Unit => VariantData::Unit,
157 }
158 }
159
160 pub(crate) fn get_field_type_ref(&self, field_name: &str) -> Option<&TypeRef> {
161 self.fields()
162 .iter()
163 .find(|f| f.name == field_name)
164 .map(|f| &f.type_ref)
165 }
166
167 pub fn fields(&self) -> &[StructField] {
168 match *self {
169 VariantData::Struct(ref fields) | VariantData::Tuple(ref fields) => fields,
170 _ => &[],
171 }
172 }
173 pub fn is_struct(&self) -> bool {
174 if let VariantData::Struct(..) = *self {
175 true
176 } else {
177 false
178 }
179 }
180 pub fn is_tuple(&self) -> bool {
181 if let VariantData::Tuple(..) = *self {
182 true
183 } else {
184 false
185 }
186 }
187 pub fn is_unit(&self) -> bool {
188 if let VariantData::Unit = *self {
189 true
190 } else {
191 false
192 }
193 }
194}
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs
index d94f75857..e7f9afa77 100644
--- a/crates/ra_hir/src/db.rs
+++ b/crates/ra_hir/src/db.rs
@@ -1,6 +1,7 @@
1use std::sync::Arc; 1use std::sync::Arc;
2 2
3use ra_syntax::{ 3use ra_syntax::{
4 SmolStr,
4 SyntaxNode, 5 SyntaxNode,
5 ast::FnDefNode, 6 ast::FnDefNode,
6}; 7};
@@ -15,6 +16,7 @@ use crate::{
15 module::{ModuleId, ModuleTree, ModuleSource, 16 module::{ModuleId, ModuleTree, ModuleSource,
16 nameres::{ItemMap, InputModuleItems}}, 17 nameres::{ItemMap, InputModuleItems}},
17 ty::{InferenceResult, Ty}, 18 ty::{InferenceResult, Ty},
19 adt::{StructData, EnumData},
18}; 20};
19 21
20salsa::query_group! { 22salsa::query_group! {
@@ -31,6 +33,16 @@ pub trait HirDatabase: SyntaxDatabase
31 use fn query_definitions::fn_syntax; 33 use fn query_definitions::fn_syntax;
32 } 34 }
33 35
36 fn struct_data(def_id: DefId) -> Cancelable<Arc<StructData>> {
37 type StructDataQuery;
38 use fn query_definitions::struct_data;
39 }
40
41 fn enum_data(def_id: DefId) -> Cancelable<Arc<EnumData>> {
42 type EnumDataQuery;
43 use fn query_definitions::enum_data;
44 }
45
34 fn infer(fn_id: FnId) -> Cancelable<Arc<InferenceResult>> { 46 fn infer(fn_id: FnId) -> Cancelable<Arc<InferenceResult>> {
35 type InferQuery; 47 type InferQuery;
36 use fn query_definitions::infer; 48 use fn query_definitions::infer;
@@ -41,6 +53,11 @@ pub trait HirDatabase: SyntaxDatabase
41 use fn query_definitions::type_for_def; 53 use fn query_definitions::type_for_def;
42 } 54 }
43 55
56 fn type_for_field(def_id: DefId, field: SmolStr) -> Cancelable<Ty> {
57 type TypeForFieldQuery;
58 use fn query_definitions::type_for_field;
59 }
60
44 fn file_items(file_id: FileId) -> Arc<SourceFileItems> { 61 fn file_items(file_id: FileId) -> Arc<SourceFileItems> {
45 type SourceFileItemsQuery; 62 type SourceFileItemsQuery;
46 use fn query_definitions::file_items; 63 use fn query_definitions::file_items;
diff --git a/crates/ra_hir/src/function.rs b/crates/ra_hir/src/function.rs
index d36477b48..01f0f3a66 100644
--- a/crates/ra_hir/src/function.rs
+++ b/crates/ra_hir/src/function.rs
@@ -46,8 +46,7 @@ impl Function {
46 } 46 }
47 47
48 pub fn module(&self, db: &impl HirDatabase) -> Cancelable<Module> { 48 pub fn module(&self, db: &impl HirDatabase) -> Cancelable<Module> {
49 let loc = self.fn_id.0.loc(db); 49 self.fn_id.0.module(db)
50 Module::new(db, loc.source_root_id, loc.module_id)
51 } 50 }
52} 51}
53 52
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs
index a0d99a84d..f1cc0ccd0 100644
--- a/crates/ra_hir/src/lib.rs
+++ b/crates/ra_hir/src/lib.rs
@@ -25,6 +25,8 @@ pub mod source_binder;
25mod krate; 25mod krate;
26mod module; 26mod module;
27mod function; 27mod function;
28mod adt;
29mod type_ref;
28mod ty; 30mod ty;
29 31
30use std::ops::Index; 32use std::ops::Index;
@@ -40,8 +42,10 @@ use crate::{
40pub use self::{ 42pub use self::{
41 path::{Path, PathKind}, 43 path::{Path, PathKind},
42 krate::Crate, 44 krate::Crate,
43 module::{Module, ModuleId, Problem, nameres::ItemMap, ModuleScope, Resolution}, 45 module::{Module, ModuleId, Problem, nameres::{ItemMap, PerNs, Namespace}, ModuleScope, Resolution},
44 function::{Function, FnScopes}, 46 function::{Function, FnScopes},
47 adt::{Struct, Enum},
48 ty::Ty,
45}; 49};
46 50
47pub use self::function::FnSignatureInfo; 51pub use self::function::FnSignatureInfo;
@@ -56,7 +60,11 @@ ra_db::impl_numeric_id!(DefId);
56pub(crate) enum DefKind { 60pub(crate) enum DefKind {
57 Module, 61 Module,
58 Function, 62 Function,
63 Struct,
64 Enum,
59 Item, 65 Item,
66
67 StructCtor,
60} 68}
61 69
62#[derive(Clone, Debug, PartialEq, Eq, Hash)] 70#[derive(Clone, Debug, PartialEq, Eq, Hash)]
@@ -68,18 +76,18 @@ pub struct DefLoc {
68} 76}
69 77
70impl DefKind { 78impl DefKind {
71 pub(crate) fn for_syntax_kind(kind: SyntaxKind) -> Option<DefKind> { 79 pub(crate) fn for_syntax_kind(kind: SyntaxKind) -> PerNs<DefKind> {
72 match kind { 80 match kind {
73 SyntaxKind::FN_DEF => Some(DefKind::Function), 81 SyntaxKind::FN_DEF => PerNs::values(DefKind::Function),
74 SyntaxKind::MODULE => Some(DefKind::Module), 82 SyntaxKind::MODULE => PerNs::types(DefKind::Module),
83 SyntaxKind::STRUCT_DEF => PerNs::both(DefKind::Struct, DefKind::StructCtor),
84 SyntaxKind::ENUM_DEF => PerNs::types(DefKind::Enum),
75 // These define items, but don't have their own DefKinds yet: 85 // These define items, but don't have their own DefKinds yet:
76 SyntaxKind::STRUCT_DEF => Some(DefKind::Item), 86 SyntaxKind::TRAIT_DEF => PerNs::types(DefKind::Item),
77 SyntaxKind::ENUM_DEF => Some(DefKind::Item), 87 SyntaxKind::TYPE_DEF => PerNs::types(DefKind::Item),
78 SyntaxKind::TRAIT_DEF => Some(DefKind::Item), 88 SyntaxKind::CONST_DEF => PerNs::values(DefKind::Item),
79 SyntaxKind::TYPE_DEF => Some(DefKind::Item), 89 SyntaxKind::STATIC_DEF => PerNs::values(DefKind::Item),
80 SyntaxKind::CONST_DEF => Some(DefKind::Item), 90 _ => PerNs::none(),
81 SyntaxKind::STATIC_DEF => Some(DefKind::Item),
82 _ => None,
83 } 91 }
84 } 92 }
85} 93}
@@ -99,6 +107,8 @@ impl DefLoc {
99pub enum Def { 107pub enum Def {
100 Module(Module), 108 Module(Module),
101 Function(Function), 109 Function(Function),
110 Struct(Struct),
111 Enum(Enum),
102 Item, 112 Item,
103} 113}
104 114
@@ -114,10 +124,25 @@ impl DefId {
114 let function = Function::new(self); 124 let function = Function::new(self);
115 Def::Function(function) 125 Def::Function(function)
116 } 126 }
127 DefKind::Struct => {
128 let struct_def = Struct::new(self);
129 Def::Struct(struct_def)
130 }
131 DefKind::Enum => {
132 let enum_def = Enum::new(self);
133 Def::Enum(enum_def)
134 }
135 DefKind::StructCtor => Def::Item,
117 DefKind::Item => Def::Item, 136 DefKind::Item => Def::Item,
118 }; 137 };
119 Ok(res) 138 Ok(res)
120 } 139 }
140
141 /// For a module, returns that module; for any other def, returns the containing module.
142 pub fn module(self, db: &impl HirDatabase) -> Cancelable<Module> {
143 let loc = self.loc(db);
144 Module::new(db, loc.source_root_id, loc.module_id)
145 }
121} 146}
122 147
123/// Identifier of item within a specific file. This is stable over reparses, so 148/// Identifier of item within a specific file. This is stable over reparses, so
diff --git a/crates/ra_hir/src/mock.rs b/crates/ra_hir/src/mock.rs
index b5a997170..f6882cb77 100644
--- a/crates/ra_hir/src/mock.rs
+++ b/crates/ra_hir/src/mock.rs
@@ -193,6 +193,9 @@ salsa::database_storage! {
193 fn submodules() for db::SubmodulesQuery; 193 fn submodules() for db::SubmodulesQuery;
194 fn infer() for db::InferQuery; 194 fn infer() for db::InferQuery;
195 fn type_for_def() for db::TypeForDefQuery; 195 fn type_for_def() for db::TypeForDefQuery;
196 fn type_for_field() for db::TypeForFieldQuery;
197 fn struct_data() for db::StructDataQuery;
198 fn enum_data() for db::EnumDataQuery;
196 } 199 }
197 } 200 }
198} 201}
diff --git a/crates/ra_hir/src/module.rs b/crates/ra_hir/src/module.rs
index 891119953..b9d36f01f 100644
--- a/crates/ra_hir/src/module.rs
+++ b/crates/ra_hir/src/module.rs
@@ -17,7 +17,7 @@ use crate::{
17 arena::{Arena, Id}, 17 arena::{Arena, Id},
18}; 18};
19 19
20pub use self::nameres::{ModuleScope, Resolution}; 20pub use self::nameres::{ModuleScope, Resolution, Namespace, PerNs};
21 21
22/// `Module` is API entry point to get all the information 22/// `Module` is API entry point to get all the information
23/// about a particular module. 23/// about a particular module.
@@ -115,16 +115,29 @@ impl Module {
115 Ok(res) 115 Ok(res)
116 } 116 }
117 117
118 pub fn resolve_path(&self, db: &impl HirDatabase, path: Path) -> Cancelable<Option<DefId>> { 118 pub fn resolve_path(&self, db: &impl HirDatabase, path: &Path) -> Cancelable<PerNs<DefId>> {
119 let mut curr = match path.kind { 119 let mut curr_per_ns = PerNs::types(
120 PathKind::Crate => self.crate_root(), 120 match path.kind {
121 PathKind::Self_ | PathKind::Plain => self.clone(), 121 PathKind::Crate => self.crate_root(),
122 PathKind::Super => ctry!(self.parent()), 122 PathKind::Self_ | PathKind::Plain => self.clone(),
123 } 123 PathKind::Super => {
124 .def_id(db); 124 if let Some(p) = self.parent() {
125 p
126 } else {
127 return Ok(PerNs::none());
128 }
129 }
130 }
131 .def_id(db),
132 );
125 133
126 let segments = path.segments; 134 let segments = &path.segments;
127 for name in segments.iter() { 135 for name in segments.iter() {
136 let curr = if let Some(r) = curr_per_ns.as_ref().take(Namespace::Types) {
137 r
138 } else {
139 return Ok(PerNs::none());
140 };
128 let module = match curr.loc(db) { 141 let module = match curr.loc(db) {
129 DefLoc { 142 DefLoc {
130 kind: DefKind::Module, 143 kind: DefKind::Module,
@@ -132,12 +145,17 @@ impl Module {
132 module_id, 145 module_id,
133 .. 146 ..
134 } => Module::new(db, source_root_id, module_id)?, 147 } => Module::new(db, source_root_id, module_id)?,
135 _ => return Ok(None), 148 // TODO here would be the place to handle enum variants...
149 _ => return Ok(PerNs::none()),
136 }; 150 };
137 let scope = module.scope(db)?; 151 let scope = module.scope(db)?;
138 curr = ctry!(ctry!(scope.get(&name)).def_id); 152 curr_per_ns = if let Some(r) = scope.get(&name) {
153 r.def_id
154 } else {
155 return Ok(PerNs::none());
156 };
139 } 157 }
140 Ok(Some(curr)) 158 Ok(curr_per_ns)
141 } 159 }
142 160
143 pub fn problems(&self, db: &impl HirDatabase) -> Vec<(SyntaxNode, Problem)> { 161 pub fn problems(&self, db: &impl HirDatabase) -> Vec<(SyntaxNode, Problem)> {
@@ -145,7 +163,7 @@ impl Module {
145 } 163 }
146} 164}
147 165
148/// Phisically, rust source is organized as a set of files, but logically it is 166/// Physically, rust source is organized as a set of files, but logically it is
149/// organized as a tree of modules. Usually, a single file corresponds to a 167/// organized as a tree of modules. Usually, a single file corresponds to a
150/// single module, but it is not nessary the case. 168/// single module, but it is not nessary the case.
151/// 169///
diff --git a/crates/ra_hir/src/module/nameres.rs b/crates/ra_hir/src/module/nameres.rs
index 0b152a406..98cd225dd 100644
--- a/crates/ra_hir/src/module/nameres.rs
+++ b/crates/ra_hir/src/module/nameres.rs
@@ -118,22 +118,96 @@ enum ImportKind {
118#[derive(Debug, Clone, PartialEq, Eq)] 118#[derive(Debug, Clone, PartialEq, Eq)]
119pub struct Resolution { 119pub struct Resolution {
120 /// None for unresolved 120 /// None for unresolved
121 pub def_id: Option<DefId>, 121 pub def_id: PerNs<DefId>,
122 /// ident by whitch this is imported into local scope. 122 /// ident by whitch this is imported into local scope.
123 pub import: Option<NamedImport>, 123 pub import: Option<NamedImport>,
124} 124}
125 125
126// #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)] 126#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
127// enum Namespace { 127pub enum Namespace {
128// Types, 128 Types,
129// Values, 129 Values,
130// } 130}
131
132#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
133pub struct PerNs<T> {
134 pub types: Option<T>,
135 pub values: Option<T>,
136}
137
138impl<T> PerNs<T> {
139 pub fn none() -> PerNs<T> {
140 PerNs {
141 types: None,
142 values: None,
143 }
144 }
145
146 pub fn values(t: T) -> PerNs<T> {
147 PerNs {
148 types: None,
149 values: Some(t),
150 }
151 }
152
153 pub fn types(t: T) -> PerNs<T> {
154 PerNs {
155 types: Some(t),
156 values: None,
157 }
158 }
159
160 pub fn both(types: T, values: T) -> PerNs<T> {
161 PerNs {
162 types: Some(types),
163 values: Some(values),
164 }
165 }
166
167 pub fn is_none(&self) -> bool {
168 self.types.is_none() && self.values.is_none()
169 }
170
171 pub fn take(self, namespace: Namespace) -> Option<T> {
172 match namespace {
173 Namespace::Types => self.types,
174 Namespace::Values => self.values,
175 }
176 }
177
178 pub fn take_types(self) -> Option<T> {
179 self.types
180 }
181
182 pub fn take_values(self) -> Option<T> {
183 self.values
184 }
131 185
132// #[derive(Debug)] 186 pub fn get(&self, namespace: Namespace) -> Option<&T> {
133// struct PerNs<T> { 187 self.as_ref().take(namespace)
134// types: Option<T>, 188 }
135// values: Option<T>, 189
136// } 190 pub fn as_ref(&self) -> PerNs<&T> {
191 PerNs {
192 types: self.types.as_ref(),
193 values: self.values.as_ref(),
194 }
195 }
196
197 pub fn and_then<U>(self, f: impl Fn(T) -> Option<U>) -> PerNs<U> {
198 PerNs {
199 types: self.types.and_then(&f),
200 values: self.values.and_then(&f),
201 }
202 }
203
204 pub fn map<U>(self, f: impl Fn(T) -> U) -> PerNs<U> {
205 PerNs {
206 types: self.types.map(&f),
207 values: self.values.map(&f),
208 }
209 }
210}
137 211
138impl InputModuleItems { 212impl InputModuleItems {
139 pub(crate) fn new<'a>( 213 pub(crate) fn new<'a>(
@@ -254,7 +328,7 @@ where
254 for dep in krate.dependencies(self.db) { 328 for dep in krate.dependencies(self.db) {
255 if let Some(module) = dep.krate.root_module(self.db)? { 329 if let Some(module) = dep.krate.root_module(self.db)? {
256 let def_id = module.def_id(self.db); 330 let def_id = module.def_id(self.db);
257 self.add_module_item(&mut module_items, dep.name, def_id); 331 self.add_module_item(&mut module_items, dep.name, PerNs::types(def_id));
258 } 332 }
259 } 333 }
260 }; 334 };
@@ -265,7 +339,7 @@ where
265 module_items.items.insert( 339 module_items.items.insert(
266 name.clone(), 340 name.clone(),
267 Resolution { 341 Resolution {
268 def_id: None, 342 def_id: PerNs::none(),
269 import: Some(import), 343 import: Some(import),
270 }, 344 },
271 ); 345 );
@@ -277,18 +351,23 @@ where
277 if item.kind == MODULE { 351 if item.kind == MODULE {
278 continue; 352 continue;
279 } 353 }
280 let def_loc = DefLoc { 354 // depending on the item kind, the location can define something in
281 kind: DefKind::for_syntax_kind(item.kind).unwrap_or(DefKind::Item), 355 // the values namespace, the types namespace, or both
282 source_root_id: self.source_root, 356 let kind = DefKind::for_syntax_kind(item.kind);
283 module_id, 357 let def_id = kind.map(|k| {
284 source_item_id: SourceItemId { 358 let def_loc = DefLoc {
285 file_id, 359 kind: k,
286 item_id: Some(item.id), 360 source_root_id: self.source_root,
287 }, 361 module_id,
288 }; 362 source_item_id: SourceItemId {
289 let def_id = def_loc.id(self.db); 363 file_id,
364 item_id: Some(item.id),
365 },
366 };
367 def_loc.id(self.db)
368 });
290 let resolution = Resolution { 369 let resolution = Resolution {
291 def_id: Some(def_id), 370 def_id,
292 import: None, 371 import: None,
293 }; 372 };
294 module_items.items.insert(item.name.clone(), resolution); 373 module_items.items.insert(item.name.clone(), resolution);
@@ -303,16 +382,16 @@ where
303 source_item_id: module_id.source(&self.module_tree).0, 382 source_item_id: module_id.source(&self.module_tree).0,
304 }; 383 };
305 let def_id = def_loc.id(self.db); 384 let def_id = def_loc.id(self.db);
306 self.add_module_item(&mut module_items, name, def_id); 385 self.add_module_item(&mut module_items, name, PerNs::types(def_id));
307 } 386 }
308 387
309 self.result.per_module.insert(module_id, module_items); 388 self.result.per_module.insert(module_id, module_items);
310 Ok(()) 389 Ok(())
311 } 390 }
312 391
313 fn add_module_item(&self, module_items: &mut ModuleScope, name: SmolStr, def_id: DefId) { 392 fn add_module_item(&self, module_items: &mut ModuleScope, name: SmolStr, def_id: PerNs<DefId>) {
314 let resolution = Resolution { 393 let resolution = Resolution {
315 def_id: Some(def_id), 394 def_id,
316 import: None, 395 import: None,
317 }; 396 };
318 module_items.items.insert(name, resolution); 397 module_items.items.insert(name, resolution);
@@ -347,15 +426,17 @@ where
347 let is_last = i == import.path.segments.len() - 1; 426 let is_last = i == import.path.segments.len() - 1;
348 427
349 let def_id = match self.result.per_module[&curr].items.get(name) { 428 let def_id = match self.result.per_module[&curr].items.get(name) {
350 None => return Ok(()), 429 Some(res) if !res.def_id.is_none() => res.def_id,
351 Some(res) => match res.def_id { 430 _ => return Ok(()),
352 Some(it) => it,
353 None => return Ok(()),
354 },
355 }; 431 };
356 432
357 if !is_last { 433 if !is_last {
358 curr = match def_id.loc(self.db) { 434 let type_def_id = if let Some(d) = def_id.take(Namespace::Types) {
435 d
436 } else {
437 return Ok(());
438 };
439 curr = match type_def_id.loc(self.db) {
359 DefLoc { 440 DefLoc {
360 kind: DefKind::Module, 441 kind: DefKind::Module,
361 module_id: target_module_id, 442 module_id: target_module_id,
@@ -370,10 +451,11 @@ where
370 segments: import.path.segments[i + 1..].iter().cloned().collect(), 451 segments: import.path.segments[i + 1..].iter().cloned().collect(),
371 kind: PathKind::Crate, 452 kind: PathKind::Crate,
372 }; 453 };
373 if let Some(def_id) = module.resolve_path(self.db, path)? { 454 let def_id = module.resolve_path(self.db, &path)?;
455 if !def_id.is_none() {
374 self.update(module_id, |items| { 456 self.update(module_id, |items| {
375 let res = Resolution { 457 let res = Resolution {
376 def_id: Some(def_id), 458 def_id: def_id,
377 import: Some(ptr), 459 import: Some(ptr),
378 }; 460 };
379 items.items.insert(name.clone(), res); 461 items.items.insert(name.clone(), res);
@@ -387,7 +469,7 @@ where
387 } else { 469 } else {
388 self.update(module_id, |items| { 470 self.update(module_id, |items| {
389 let res = Resolution { 471 let res = Resolution {
390 def_id: Some(def_id), 472 def_id: def_id,
391 import: Some(ptr), 473 import: Some(ptr),
392 }; 474 };
393 items.items.insert(name.clone(), res); 475 items.items.insert(name.clone(), res);
diff --git a/crates/ra_hir/src/module/nameres/tests.rs b/crates/ra_hir/src/module/nameres/tests.rs
index 3e29c3954..03ea5c1d6 100644
--- a/crates/ra_hir/src/module/nameres/tests.rs
+++ b/crates/ra_hir/src/module/nameres/tests.rs
@@ -40,7 +40,7 @@ fn item_map_smoke_test() {
40 ); 40 );
41 let name = SmolStr::from("Baz"); 41 let name = SmolStr::from("Baz");
42 let resolution = &item_map.per_module[&module_id].items[&name]; 42 let resolution = &item_map.per_module[&module_id].items[&name];
43 assert!(resolution.def_id.is_some()); 43 assert!(resolution.def_id.take_types().is_some());
44} 44}
45 45
46#[test] 46#[test]
@@ -59,7 +59,7 @@ fn test_self() {
59 ); 59 );
60 let name = SmolStr::from("Baz"); 60 let name = SmolStr::from("Baz");
61 let resolution = &item_map.per_module[&module_id].items[&name]; 61 let resolution = &item_map.per_module[&module_id].items[&name];
62 assert!(resolution.def_id.is_some()); 62 assert!(resolution.def_id.take_types().is_some());
63} 63}
64 64
65#[test] 65#[test]
@@ -92,7 +92,7 @@ fn item_map_across_crates() {
92 92
93 let name = SmolStr::from("Baz"); 93 let name = SmolStr::from("Baz");
94 let resolution = &item_map.per_module[&module_id].items[&name]; 94 let resolution = &item_map.per_module[&module_id].items[&name];
95 assert!(resolution.def_id.is_some()); 95 assert!(resolution.def_id.take_types().is_some());
96} 96}
97 97
98#[test] 98#[test]
diff --git a/crates/ra_hir/src/path.rs b/crates/ra_hir/src/path.rs
index e04d00900..0b260072c 100644
--- a/crates/ra_hir/src/path.rs
+++ b/crates/ra_hir/src/path.rs
@@ -1,12 +1,12 @@
1use ra_syntax::{SmolStr, ast, AstNode, TextRange}; 1use ra_syntax::{SmolStr, ast, AstNode, TextRange};
2 2
3#[derive(Debug, Clone, PartialEq, Eq)] 3#[derive(Debug, Clone, PartialEq, Eq, Hash)]
4pub struct Path { 4pub struct Path {
5 pub kind: PathKind, 5 pub kind: PathKind,
6 pub segments: Vec<SmolStr>, 6 pub segments: Vec<SmolStr>,
7} 7}
8 8
9#[derive(Debug, Clone, Copy, PartialEq, Eq)] 9#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
10pub enum PathKind { 10pub enum PathKind {
11 Plain, 11 Plain,
12 Self_, 12 Self_,
diff --git a/crates/ra_hir/src/query_definitions.rs b/crates/ra_hir/src/query_definitions.rs
index b654af920..4a7958a12 100644
--- a/crates/ra_hir/src/query_definitions.rs
+++ b/crates/ra_hir/src/query_definitions.rs
@@ -19,7 +19,8 @@ use crate::{
19 imp::Submodule, 19 imp::Submodule,
20 nameres::{InputModuleItems, ItemMap, Resolver}, 20 nameres::{InputModuleItems, ItemMap, Resolver},
21 }, 21 },
22 ty::{self, InferenceResult, Ty} 22 ty::{self, InferenceResult, Ty},
23 adt::{StructData, EnumData},
23}; 24};
24 25
25/// Resolve `FnId` to the corresponding `SyntaxNode` 26/// Resolve `FnId` to the corresponding `SyntaxNode`
@@ -45,6 +46,32 @@ pub(super) fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable<T
45 ty::type_for_def(db, def_id) 46 ty::type_for_def(db, def_id)
46} 47}
47 48
49pub(super) fn type_for_field(
50 db: &impl HirDatabase,
51 def_id: DefId,
52 field: SmolStr,
53) -> Cancelable<Ty> {
54 ty::type_for_field(db, def_id, field)
55}
56
57pub(super) fn struct_data(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Arc<StructData>> {
58 let def_loc = def_id.loc(db);
59 assert!(def_loc.kind == DefKind::Struct);
60 let syntax = db.file_item(def_loc.source_item_id);
61 let struct_def =
62 ast::StructDef::cast(syntax.borrowed()).expect("struct def should point to StructDef node");
63 Ok(Arc::new(StructData::new(struct_def.borrowed())))
64}
65
66pub(super) fn enum_data(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Arc<EnumData>> {
67 let def_loc = def_id.loc(db);
68 assert!(def_loc.kind == DefKind::Enum);
69 let syntax = db.file_item(def_loc.source_item_id);
70 let enum_def =
71 ast::EnumDef::cast(syntax.borrowed()).expect("enum def should point to EnumDef node");
72 Ok(Arc::new(EnumData::new(enum_def.borrowed())))
73}
74
48pub(super) fn file_items(db: &impl HirDatabase, file_id: FileId) -> Arc<SourceFileItems> { 75pub(super) fn file_items(db: &impl HirDatabase, file_id: FileId) -> Arc<SourceFileItems> {
49 let mut res = SourceFileItems::new(file_id); 76 let mut res = SourceFileItems::new(file_id);
50 let source_file = db.source_file(file_id); 77 let source_file = db.source_file(file_id);
diff --git a/crates/ra_hir/src/ty.rs b/crates/ra_hir/src/ty.rs
index c759d4c8b..67b523c2c 100644
--- a/crates/ra_hir/src/ty.rs
+++ b/crates/ra_hir/src/ty.rs
@@ -11,13 +11,18 @@ use rustc_hash::{FxHashMap};
11use ra_db::{LocalSyntaxPtr, Cancelable}; 11use ra_db::{LocalSyntaxPtr, Cancelable};
12use ra_syntax::{ 12use ra_syntax::{
13 SmolStr, 13 SmolStr,
14 ast::{self, AstNode, LoopBodyOwner, ArgListOwner}, 14 ast::{self, AstNode, LoopBodyOwner, ArgListOwner, PrefixOp},
15 SyntaxNodeRef 15 SyntaxNodeRef
16}; 16};
17 17
18use crate::{Def, DefId, FnScopes, Module, Function, Path, db::HirDatabase}; 18use crate::{
19 Def, DefId, FnScopes, Module, Function, Struct, Enum, Path,
20 db::HirDatabase,
21 adt::VariantData,
22 type_ref::{TypeRef, Mutability},
23};
19 24
20#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] 25#[derive(Clone, PartialEq, Eq, Hash, Debug)]
21pub enum Ty { 26pub enum Ty {
22 /// The primitive boolean type. Written as `bool`. 27 /// The primitive boolean type. Written as `bool`.
23 Bool, 28 Bool,
@@ -35,8 +40,15 @@ pub enum Ty {
35 /// A primitive floating-point type. For example, `f64`. 40 /// A primitive floating-point type. For example, `f64`.
36 Float(primitive::FloatTy), 41 Float(primitive::FloatTy),
37 42
38 // Structures, enumerations and unions. 43 /// Structures, enumerations and unions.
39 // Adt(AdtDef, Substs), 44 Adt {
45 /// The DefId of the struct/enum.
46 def_id: DefId,
47 /// The name, for displaying.
48 name: SmolStr,
49 // later we'll need generic substitutions here
50 },
51
40 /// The pointee of a string slice. Written as `str`. 52 /// The pointee of a string slice. Written as `str`.
41 Str, 53 Str,
42 54
@@ -45,12 +57,13 @@ pub enum Ty {
45 /// The pointee of an array slice. Written as `[T]`. 57 /// The pointee of an array slice. Written as `[T]`.
46 Slice(TyRef), 58 Slice(TyRef),
47 59
48 // A raw pointer. Written as `*mut T` or `*const T` 60 /// A raw pointer. Written as `*mut T` or `*const T`
49 // RawPtr(TypeAndMut<'tcx>), 61 RawPtr(TyRef, Mutability),
62
63 /// A reference; a pointer with an associated lifetime. Written as
64 /// `&'a mut T` or `&'a T`.
65 Ref(TyRef, Mutability),
50 66
51 // A reference; a pointer with an associated lifetime. Written as
52 // `&'a mut T` or `&'a T`.
53 // Ref(Ty<'tcx>, hir::Mutability),
54 /// A pointer to a function. Written as `fn() -> i32`. 67 /// A pointer to a function. Written as `fn() -> i32`.
55 /// 68 ///
56 /// For example the type of `bar` here: 69 /// For example the type of `bar` here:
@@ -107,58 +120,104 @@ pub enum Ty {
107 120
108type TyRef = Arc<Ty>; 121type TyRef = Arc<Ty>;
109 122
110#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] 123#[derive(Clone, PartialEq, Eq, Hash, Debug)]
111pub struct FnSig { 124pub struct FnSig {
112 input: Vec<Ty>, 125 input: Vec<Ty>,
113 output: Ty, 126 output: Ty,
114} 127}
115 128
116impl Ty { 129impl Ty {
117 pub fn new(_db: &impl HirDatabase, node: ast::TypeRef) -> Cancelable<Self> { 130 pub(crate) fn from_hir(
118 use ra_syntax::ast::TypeRef::*; 131 db: &impl HirDatabase,
119 Ok(match node { 132 module: &Module,
120 ParenType(_inner) => Ty::Unknown, // TODO 133 type_ref: &TypeRef,
121 TupleType(_inner) => Ty::Unknown, // TODO 134 ) -> Cancelable<Self> {
122 NeverType(..) => Ty::Never, 135 Ok(match type_ref {
123 PathType(inner) => { 136 TypeRef::Never => Ty::Never,
124 let path = if let Some(p) = inner.path() { 137 TypeRef::Tuple(inner) => {
125 p 138 let inner_tys = inner
126 } else { 139 .iter()
127 return Ok(Ty::Unknown); 140 .map(|tr| Ty::from_hir(db, module, tr))
141 .collect::<Cancelable<_>>()?;
142 Ty::Tuple(inner_tys)
143 }
144 TypeRef::Path(path) => Ty::from_hir_path(db, module, path)?,
145 TypeRef::RawPtr(inner, mutability) => {
146 let inner_ty = Ty::from_hir(db, module, inner)?;
147 Ty::RawPtr(Arc::new(inner_ty), *mutability)
148 }
149 TypeRef::Array(_inner) => Ty::Unknown, // TODO
150 TypeRef::Slice(inner) => {
151 let inner_ty = Ty::from_hir(db, module, inner)?;
152 Ty::Slice(Arc::new(inner_ty))
153 }
154 TypeRef::Reference(inner, mutability) => {
155 let inner_ty = Ty::from_hir(db, module, inner)?;
156 Ty::Ref(Arc::new(inner_ty), *mutability)
157 }
158 TypeRef::Placeholder => Ty::Unknown, // TODO
159 TypeRef::Fn(params) => {
160 let mut inner_tys = params
161 .iter()
162 .map(|tr| Ty::from_hir(db, module, tr))
163 .collect::<Cancelable<Vec<_>>>()?;
164 let return_ty = inner_tys
165 .pop()
166 .expect("TypeRef::Fn should always have at least return type");
167 let sig = FnSig {
168 input: inner_tys,
169 output: return_ty,
128 }; 170 };
129 if path.qualifier().is_none() { 171 Ty::FnPtr(Arc::new(sig))
130 let name = path
131 .segment()
132 .and_then(|s| s.name_ref())
133 .map(|n| n.text())
134 .unwrap_or(SmolStr::new(""));
135 if let Some(int_ty) = primitive::IntTy::from_string(&name) {
136 Ty::Int(int_ty)
137 } else if let Some(uint_ty) = primitive::UintTy::from_string(&name) {
138 Ty::Uint(uint_ty)
139 } else if let Some(float_ty) = primitive::FloatTy::from_string(&name) {
140 Ty::Float(float_ty)
141 } else {
142 // TODO
143 Ty::Unknown
144 }
145 } else {
146 // TODO
147 Ty::Unknown
148 }
149 } 172 }
150 PointerType(_inner) => Ty::Unknown, // TODO 173 TypeRef::Error => Ty::Unknown,
151 ArrayType(_inner) => Ty::Unknown, // TODO
152 SliceType(_inner) => Ty::Unknown, // TODO
153 ReferenceType(_inner) => Ty::Unknown, // TODO
154 PlaceholderType(_inner) => Ty::Unknown, // TODO
155 FnPointerType(_inner) => Ty::Unknown, // TODO
156 ForType(_inner) => Ty::Unknown, // TODO
157 ImplTraitType(_inner) => Ty::Unknown, // TODO
158 DynTraitType(_inner) => Ty::Unknown, // TODO
159 }) 174 })
160 } 175 }
161 176
177 pub(crate) fn from_hir_path(
178 db: &impl HirDatabase,
179 module: &Module,
180 path: &Path,
181 ) -> Cancelable<Self> {
182 if path.is_ident() {
183 let name = &path.segments[0];
184 if let Some(int_ty) = primitive::IntTy::from_string(&name) {
185 return Ok(Ty::Int(int_ty));
186 } else if let Some(uint_ty) = primitive::UintTy::from_string(&name) {
187 return Ok(Ty::Uint(uint_ty));
188 } else if let Some(float_ty) = primitive::FloatTy::from_string(&name) {
189 return Ok(Ty::Float(float_ty));
190 }
191 }
192
193 // Resolve in module (in type namespace)
194 let resolved = if let Some(r) = module.resolve_path(db, path)?.take_types() {
195 r
196 } else {
197 return Ok(Ty::Unknown);
198 };
199 let ty = db.type_for_def(resolved)?;
200 Ok(ty)
201 }
202
203 // TODO: These should not be necessary long-term, since everything will work on HIR
204 pub(crate) fn from_ast_opt(
205 db: &impl HirDatabase,
206 module: &Module,
207 node: Option<ast::TypeRef>,
208 ) -> Cancelable<Self> {
209 node.map(|n| Ty::from_ast(db, module, n))
210 .unwrap_or(Ok(Ty::Unknown))
211 }
212
213 pub(crate) fn from_ast(
214 db: &impl HirDatabase,
215 module: &Module,
216 node: ast::TypeRef,
217 ) -> Cancelable<Self> {
218 Ty::from_hir(db, module, &TypeRef::from_ast(node))
219 }
220
162 pub fn unit() -> Self { 221 pub fn unit() -> Self {
163 Ty::Tuple(Vec::new()) 222 Ty::Tuple(Vec::new())
164 } 223 }
@@ -174,6 +233,8 @@ impl fmt::Display for Ty {
174 Ty::Float(t) => write!(f, "{}", t.ty_to_string()), 233 Ty::Float(t) => write!(f, "{}", t.ty_to_string()),
175 Ty::Str => write!(f, "str"), 234 Ty::Str => write!(f, "str"),
176 Ty::Slice(t) => write!(f, "[{}]", t), 235 Ty::Slice(t) => write!(f, "[{}]", t),
236 Ty::RawPtr(t, m) => write!(f, "*{}{}", m.as_keyword_for_ptr(), t),
237 Ty::Ref(t, m) => write!(f, "&{}{}", m.as_keyword_for_ref(), t),
177 Ty::Never => write!(f, "!"), 238 Ty::Never => write!(f, "!"),
178 Ty::Tuple(ts) => { 239 Ty::Tuple(ts) => {
179 write!(f, "(")?; 240 write!(f, "(")?;
@@ -189,6 +250,7 @@ impl fmt::Display for Ty {
189 } 250 }
190 write!(f, ") -> {}", sig.output) 251 write!(f, ") -> {}", sig.output)
191 } 252 }
253 Ty::Adt { name, .. } => write!(f, "{}", name),
192 Ty::Unknown => write!(f, "[unknown]"), 254 Ty::Unknown => write!(f, "[unknown]"),
193 } 255 }
194 } 256 }
@@ -196,34 +258,40 @@ impl fmt::Display for Ty {
196 258
197pub fn type_for_fn(db: &impl HirDatabase, f: Function) -> Cancelable<Ty> { 259pub fn type_for_fn(db: &impl HirDatabase, f: Function) -> Cancelable<Ty> {
198 let syntax = f.syntax(db); 260 let syntax = f.syntax(db);
261 let module = f.module(db)?;
199 let node = syntax.borrowed(); 262 let node = syntax.borrowed();
200 // TODO we ignore type parameters for now 263 // TODO we ignore type parameters for now
201 let input = node 264 let input = node
202 .param_list() 265 .param_list()
203 .map(|pl| { 266 .map(|pl| {
204 pl.params() 267 pl.params()
205 .map(|p| { 268 .map(|p| Ty::from_ast_opt(db, &module, p.type_ref()))
206 p.type_ref()
207 .map(|t| Ty::new(db, t))
208 .unwrap_or(Ok(Ty::Unknown))
209 })
210 .collect() 269 .collect()
211 }) 270 })
212 .unwrap_or_else(|| Ok(Vec::new()))?; 271 .unwrap_or_else(|| Ok(Vec::new()))?;
213 let output = node 272 let output = Ty::from_ast_opt(db, &module, node.ret_type().and_then(|rt| rt.type_ref()))?;
214 .ret_type()
215 .and_then(|rt| rt.type_ref())
216 .map(|t| Ty::new(db, t))
217 .unwrap_or(Ok(Ty::Unknown))?;
218 let sig = FnSig { input, output }; 273 let sig = FnSig { input, output };
219 Ok(Ty::FnPtr(Arc::new(sig))) 274 Ok(Ty::FnPtr(Arc::new(sig)))
220} 275}
221 276
222// TODO this should probably be per namespace (i.e. types vs. values), since for 277pub fn type_for_struct(db: &impl HirDatabase, s: Struct) -> Cancelable<Ty> {
223// a tuple struct `struct Foo(Bar)`, Foo has function type as a value, but 278 Ok(Ty::Adt {
224// defines the struct type Foo when used in the type namespace. rustc has a 279 def_id: s.def_id(),
225// separate DefId for the constructor, but with the current DefId approach, that 280 name: s
226// seems complicated. 281 .name(db)?
282 .unwrap_or_else(|| SmolStr::new("[unnamed struct]")),
283 })
284}
285
286pub fn type_for_enum(db: &impl HirDatabase, s: Enum) -> Cancelable<Ty> {
287 Ok(Ty::Adt {
288 def_id: s.def_id(),
289 name: s
290 .name(db)?
291 .unwrap_or_else(|| SmolStr::new("[unnamed enum]")),
292 })
293}
294
227pub fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Ty> { 295pub fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Ty> {
228 let def = def_id.resolve(db)?; 296 let def = def_id.resolve(db)?;
229 match def { 297 match def {
@@ -232,6 +300,8 @@ pub fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Ty> {
232 Ok(Ty::Unknown) 300 Ok(Ty::Unknown)
233 } 301 }
234 Def::Function(f) => type_for_fn(db, f), 302 Def::Function(f) => type_for_fn(db, f),
303 Def::Struct(s) => type_for_struct(db, s),
304 Def::Enum(e) => type_for_enum(db, e),
235 Def::Item => { 305 Def::Item => {
236 log::debug!("trying to get type for item of unknown type {:?}", def_id); 306 log::debug!("trying to get type for item of unknown type {:?}", def_id);
237 Ok(Ty::Unknown) 307 Ok(Ty::Unknown)
@@ -239,6 +309,33 @@ pub fn type_for_def(db: &impl HirDatabase, def_id: DefId) -> Cancelable<Ty> {
239 } 309 }
240} 310}
241 311
312pub(super) fn type_for_field(
313 db: &impl HirDatabase,
314 def_id: DefId,
315 field: SmolStr,
316) -> Cancelable<Ty> {
317 let def = def_id.resolve(db)?;
318 let variant_data = match def {
319 Def::Struct(s) => {
320 let variant_data = s.variant_data(db)?;
321 variant_data
322 }
323 // TODO: unions
324 // TODO: enum variants
325 _ => panic!(
326 "trying to get type for field in non-struct/variant {:?}",
327 def_id
328 ),
329 };
330 let module = def_id.module(db)?;
331 let type_ref = if let Some(tr) = variant_data.get_field_type_ref(&field) {
332 tr
333 } else {
334 return Ok(Ty::Unknown);
335 };
336 Ty::from_hir(db, &module, &type_ref)
337}
338
242#[derive(Clone, PartialEq, Eq, Debug)] 339#[derive(Clone, PartialEq, Eq, Debug)]
243pub struct InferenceResult { 340pub struct InferenceResult {
244 type_of: FxHashMap<LocalSyntaxPtr, Ty>, 341 type_of: FxHashMap<LocalSyntaxPtr, Ty>,
@@ -305,32 +402,54 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
305 }; 402 };
306 403
307 // resolve in module 404 // resolve in module
308 let resolved = ctry!(self.module.resolve_path(self.db, path)?); 405 let resolved = ctry!(self.module.resolve_path(self.db, &path)?.take_values());
309 let ty = self.db.type_for_def(resolved)?; 406 let ty = self.db.type_for_def(resolved)?;
310 // TODO we will need to add type variables for type parameters etc. here 407 // TODO we will need to add type variables for type parameters etc. here
311 Ok(Some(ty)) 408 Ok(Some(ty))
312 } 409 }
313 410
411 fn resolve_variant(
412 &self,
413 path: Option<ast::Path>,
414 ) -> Cancelable<(Ty, Option<Arc<VariantData>>)> {
415 let path = if let Some(path) = path.and_then(Path::from_ast) {
416 path
417 } else {
418 return Ok((Ty::Unknown, None));
419 };
420 let def_id = if let Some(def_id) = self.module.resolve_path(self.db, &path)?.take_types() {
421 def_id
422 } else {
423 return Ok((Ty::Unknown, None));
424 };
425 Ok(match def_id.resolve(self.db)? {
426 Def::Struct(s) => {
427 let struct_data = self.db.struct_data(def_id)?;
428 let ty = type_for_struct(self.db, s)?;
429 (ty, Some(struct_data.variant_data().clone()))
430 }
431 _ => (Ty::Unknown, None),
432 })
433 }
434
435 fn infer_expr_opt(&mut self, expr: Option<ast::Expr>) -> Cancelable<Ty> {
436 if let Some(e) = expr {
437 self.infer_expr(e)
438 } else {
439 Ok(Ty::Unknown)
440 }
441 }
442
314 fn infer_expr(&mut self, expr: ast::Expr) -> Cancelable<Ty> { 443 fn infer_expr(&mut self, expr: ast::Expr) -> Cancelable<Ty> {
315 let ty = match expr { 444 let ty = match expr {
316 ast::Expr::IfExpr(e) => { 445 ast::Expr::IfExpr(e) => {
317 if let Some(condition) = e.condition() { 446 if let Some(condition) = e.condition() {
318 if let Some(e) = condition.expr() { 447 // TODO if no pat, this should be bool
319 // TODO if no pat, this should be bool 448 self.infer_expr_opt(condition.expr())?;
320 self.infer_expr(e)?;
321 }
322 // TODO write type for pat 449 // TODO write type for pat
323 }; 450 };
324 let if_ty = if let Some(block) = e.then_branch() { 451 let if_ty = self.infer_block_opt(e.then_branch())?;
325 self.infer_block(block)? 452 let else_ty = self.infer_block_opt(e.else_branch())?;
326 } else {
327 Ty::Unknown
328 };
329 let else_ty = if let Some(block) = e.else_branch() {
330 self.infer_block(block)?
331 } else {
332 Ty::Unknown
333 };
334 if let Some(ty) = self.unify(&if_ty, &else_ty) { 453 if let Some(ty) = self.unify(&if_ty, &else_ty) {
335 ty 454 ty
336 } else { 455 } else {
@@ -338,62 +457,37 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
338 Ty::Unknown 457 Ty::Unknown
339 } 458 }
340 } 459 }
341 ast::Expr::BlockExpr(e) => { 460 ast::Expr::BlockExpr(e) => self.infer_block_opt(e.block())?,
342 if let Some(block) = e.block() {
343 self.infer_block(block)?
344 } else {
345 Ty::Unknown
346 }
347 }
348 ast::Expr::LoopExpr(e) => { 461 ast::Expr::LoopExpr(e) => {
349 if let Some(block) = e.loop_body() { 462 self.infer_block_opt(e.loop_body())?;
350 self.infer_block(block)?;
351 };
352 // TODO never, or the type of the break param 463 // TODO never, or the type of the break param
353 Ty::Unknown 464 Ty::Unknown
354 } 465 }
355 ast::Expr::WhileExpr(e) => { 466 ast::Expr::WhileExpr(e) => {
356 if let Some(condition) = e.condition() { 467 if let Some(condition) = e.condition() {
357 if let Some(e) = condition.expr() { 468 // TODO if no pat, this should be bool
358 // TODO if no pat, this should be bool 469 self.infer_expr_opt(condition.expr())?;
359 self.infer_expr(e)?;
360 }
361 // TODO write type for pat 470 // TODO write type for pat
362 }; 471 };
363 if let Some(block) = e.loop_body() { 472 self.infer_block_opt(e.loop_body())?;
364 // TODO
365 self.infer_block(block)?;
366 };
367 // TODO always unit? 473 // TODO always unit?
368 Ty::Unknown 474 Ty::Unknown
369 } 475 }
370 ast::Expr::ForExpr(e) => { 476 ast::Expr::ForExpr(e) => {
371 if let Some(expr) = e.iterable() { 477 let _iterable_ty = self.infer_expr_opt(e.iterable());
372 self.infer_expr(expr)?;
373 }
374 if let Some(_pat) = e.pat() { 478 if let Some(_pat) = e.pat() {
375 // TODO write type for pat 479 // TODO write type for pat
376 } 480 }
377 if let Some(block) = e.loop_body() { 481 self.infer_block_opt(e.loop_body())?;
378 self.infer_block(block)?;
379 }
380 // TODO always unit? 482 // TODO always unit?
381 Ty::Unknown 483 Ty::Unknown
382 } 484 }
383 ast::Expr::LambdaExpr(e) => { 485 ast::Expr::LambdaExpr(e) => {
384 let _body_ty = if let Some(body) = e.body() { 486 let _body_ty = self.infer_expr_opt(e.body())?;
385 self.infer_expr(body)?
386 } else {
387 Ty::Unknown
388 };
389 Ty::Unknown 487 Ty::Unknown
390 } 488 }
391 ast::Expr::CallExpr(e) => { 489 ast::Expr::CallExpr(e) => {
392 let callee_ty = if let Some(e) = e.expr() { 490 let callee_ty = self.infer_expr_opt(e.expr())?;
393 self.infer_expr(e)?
394 } else {
395 Ty::Unknown
396 };
397 if let Some(arg_list) = e.arg_list() { 491 if let Some(arg_list) = e.arg_list() {
398 for arg in arg_list.args() { 492 for arg in arg_list.args() {
399 // TODO unify / expect argument type 493 // TODO unify / expect argument type
@@ -410,11 +504,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
410 } 504 }
411 } 505 }
412 ast::Expr::MethodCallExpr(e) => { 506 ast::Expr::MethodCallExpr(e) => {
413 let _receiver_ty = if let Some(e) = e.expr() { 507 let _receiver_ty = self.infer_expr_opt(e.expr())?;
414 self.infer_expr(e)?
415 } else {
416 Ty::Unknown
417 };
418 if let Some(arg_list) = e.arg_list() { 508 if let Some(arg_list) = e.arg_list() {
419 for arg in arg_list.args() { 509 for arg in arg_list.args() {
420 // TODO unify / expect argument type 510 // TODO unify / expect argument type
@@ -424,20 +514,12 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
424 Ty::Unknown 514 Ty::Unknown
425 } 515 }
426 ast::Expr::MatchExpr(e) => { 516 ast::Expr::MatchExpr(e) => {
427 let _ty = if let Some(match_expr) = e.expr() { 517 let _ty = self.infer_expr_opt(e.expr())?;
428 self.infer_expr(match_expr)?
429 } else {
430 Ty::Unknown
431 };
432 if let Some(match_arm_list) = e.match_arm_list() { 518 if let Some(match_arm_list) = e.match_arm_list() {
433 for arm in match_arm_list.arms() { 519 for arm in match_arm_list.arms() {
434 // TODO type the bindings in pat 520 // TODO type the bindings in pat
435 // TODO type the guard 521 // TODO type the guard
436 let _ty = if let Some(e) = arm.expr() { 522 let _ty = self.infer_expr_opt(arm.expr())?;
437 self.infer_expr(e)?
438 } else {
439 Ty::Unknown
440 };
441 } 523 }
442 // TODO unify all the match arm types 524 // TODO unify all the match arm types
443 Ty::Unknown 525 Ty::Unknown
@@ -450,68 +532,78 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
450 ast::Expr::PathExpr(e) => self.infer_path_expr(e)?.unwrap_or(Ty::Unknown), 532 ast::Expr::PathExpr(e) => self.infer_path_expr(e)?.unwrap_or(Ty::Unknown),
451 ast::Expr::ContinueExpr(_e) => Ty::Never, 533 ast::Expr::ContinueExpr(_e) => Ty::Never,
452 ast::Expr::BreakExpr(_e) => Ty::Never, 534 ast::Expr::BreakExpr(_e) => Ty::Never,
453 ast::Expr::ParenExpr(e) => { 535 ast::Expr::ParenExpr(e) => self.infer_expr_opt(e.expr())?,
454 if let Some(e) = e.expr() {
455 self.infer_expr(e)?
456 } else {
457 Ty::Unknown
458 }
459 }
460 ast::Expr::Label(_e) => Ty::Unknown, 536 ast::Expr::Label(_e) => Ty::Unknown,
461 ast::Expr::ReturnExpr(e) => { 537 ast::Expr::ReturnExpr(e) => {
462 if let Some(e) = e.expr() { 538 self.infer_expr_opt(e.expr())?;
463 // TODO unify with return type
464 self.infer_expr(e)?;
465 };
466 Ty::Never 539 Ty::Never
467 } 540 }
468 ast::Expr::MatchArmList(_) | ast::Expr::MatchArm(_) | ast::Expr::MatchGuard(_) => { 541 ast::Expr::MatchArmList(_) | ast::Expr::MatchArm(_) | ast::Expr::MatchGuard(_) => {
469 // Can this even occur outside of a match expression? 542 // Can this even occur outside of a match expression?
470 Ty::Unknown 543 Ty::Unknown
471 } 544 }
472 ast::Expr::StructLit(_e) => Ty::Unknown, 545 ast::Expr::StructLit(e) => {
546 let (ty, _variant_data) = self.resolve_variant(e.path())?;
547 if let Some(nfl) = e.named_field_list() {
548 for field in nfl.fields() {
549 // TODO unify with / expect field type
550 self.infer_expr_opt(field.expr())?;
551 }
552 }
553 ty
554 }
473 ast::Expr::NamedFieldList(_) | ast::Expr::NamedField(_) => { 555 ast::Expr::NamedFieldList(_) | ast::Expr::NamedField(_) => {
474 // Can this even occur outside of a struct literal? 556 // Can this even occur outside of a struct literal?
475 Ty::Unknown 557 Ty::Unknown
476 } 558 }
477 ast::Expr::IndexExpr(_e) => Ty::Unknown, 559 ast::Expr::IndexExpr(_e) => Ty::Unknown,
478 ast::Expr::FieldExpr(_e) => Ty::Unknown, 560 ast::Expr::FieldExpr(e) => {
479 ast::Expr::TryExpr(e) => { 561 let receiver_ty = self.infer_expr_opt(e.expr())?;
480 let _inner_ty = if let Some(e) = e.expr() { 562 if let Some(nr) = e.name_ref() {
481 self.infer_expr(e)? 563 let text = nr.text();
564 match receiver_ty {
565 Ty::Tuple(fields) => {
566 let i = text.parse::<usize>().ok();
567 i.and_then(|i| fields.get(i).cloned())
568 .unwrap_or(Ty::Unknown)
569 }
570 Ty::Adt { def_id, .. } => self.db.type_for_field(def_id, text)?,
571 _ => Ty::Unknown,
572 }
482 } else { 573 } else {
483 Ty::Unknown 574 Ty::Unknown
484 }; 575 }
576 }
577 ast::Expr::TryExpr(e) => {
578 let _inner_ty = self.infer_expr_opt(e.expr())?;
485 Ty::Unknown 579 Ty::Unknown
486 } 580 }
487 ast::Expr::CastExpr(e) => { 581 ast::Expr::CastExpr(e) => {
488 let _inner_ty = if let Some(e) = e.expr() { 582 let _inner_ty = self.infer_expr_opt(e.expr())?;
489 self.infer_expr(e)? 583 let cast_ty = Ty::from_ast_opt(self.db, &self.module, e.type_ref())?;
490 } else {
491 Ty::Unknown
492 };
493 let cast_ty = e
494 .type_ref()
495 .map(|t| Ty::new(self.db, t))
496 .unwrap_or(Ok(Ty::Unknown))?;
497 // TODO do the coercion... 584 // TODO do the coercion...
498 cast_ty 585 cast_ty
499 } 586 }
500 ast::Expr::RefExpr(e) => { 587 ast::Expr::RefExpr(e) => {
501 let _inner_ty = if let Some(e) = e.expr() { 588 let inner_ty = self.infer_expr_opt(e.expr())?;
502 self.infer_expr(e)? 589 let m = Mutability::from_mutable(e.is_mut());
503 } else { 590 // TODO reference coercions etc.
504 Ty::Unknown 591 Ty::Ref(Arc::new(inner_ty), m)
505 };
506 Ty::Unknown
507 } 592 }
508 ast::Expr::PrefixExpr(e) => { 593 ast::Expr::PrefixExpr(e) => {
509 let _inner_ty = if let Some(e) = e.expr() { 594 let inner_ty = self.infer_expr_opt(e.expr())?;
510 self.infer_expr(e)? 595 match e.op() {
511 } else { 596 Some(PrefixOp::Deref) => {
512 Ty::Unknown 597 match inner_ty {
513 }; 598 // builtin deref:
514 Ty::Unknown 599 Ty::Ref(ref_inner, _) => (*ref_inner).clone(),
600 Ty::RawPtr(ptr_inner, _) => (*ptr_inner).clone(),
601 // TODO Deref::deref
602 _ => Ty::Unknown,
603 }
604 }
605 _ => Ty::Unknown,
606 }
515 } 607 }
516 ast::Expr::RangeExpr(_e) => Ty::Unknown, 608 ast::Expr::RangeExpr(_e) => Ty::Unknown,
517 ast::Expr::BinExpr(_e) => Ty::Unknown, 609 ast::Expr::BinExpr(_e) => Ty::Unknown,
@@ -521,15 +613,19 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
521 Ok(ty) 613 Ok(ty)
522 } 614 }
523 615
616 fn infer_block_opt(&mut self, node: Option<ast::Block>) -> Cancelable<Ty> {
617 if let Some(b) = node {
618 self.infer_block(b)
619 } else {
620 Ok(Ty::Unknown)
621 }
622 }
623
524 fn infer_block(&mut self, node: ast::Block) -> Cancelable<Ty> { 624 fn infer_block(&mut self, node: ast::Block) -> Cancelable<Ty> {
525 for stmt in node.statements() { 625 for stmt in node.statements() {
526 match stmt { 626 match stmt {
527 ast::Stmt::LetStmt(stmt) => { 627 ast::Stmt::LetStmt(stmt) => {
528 let decl_ty = if let Some(type_ref) = stmt.type_ref() { 628 let decl_ty = Ty::from_ast_opt(self.db, &self.module, stmt.type_ref())?;
529 Ty::new(self.db, type_ref)?
530 } else {
531 Ty::Unknown
532 };
533 let ty = if let Some(expr) = stmt.initializer() { 629 let ty = if let Some(expr) = stmt.initializer() {
534 // TODO pass expectation 630 // TODO pass expectation
535 let expr_ty = self.infer_expr(expr)?; 631 let expr_ty = self.infer_expr(expr)?;
@@ -544,9 +640,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
544 }; 640 };
545 } 641 }
546 ast::Stmt::ExprStmt(expr_stmt) => { 642 ast::Stmt::ExprStmt(expr_stmt) => {
547 if let Some(expr) = expr_stmt.expr() { 643 self.infer_expr_opt(expr_stmt.expr())?;
548 self.infer_expr(expr)?;
549 }
550 } 644 }
551 } 645 }
552 } 646 }
@@ -576,7 +670,7 @@ pub fn infer(db: &impl HirDatabase, function: Function) -> Cancelable<InferenceR
576 continue; 670 continue;
577 }; 671 };
578 if let Some(type_ref) = param.type_ref() { 672 if let Some(type_ref) = param.type_ref() {
579 let ty = Ty::new(db, type_ref)?; 673 let ty = Ty::from_ast(db, &ctx.module, type_ref)?;
580 ctx.type_of.insert(LocalSyntaxPtr::new(pat.syntax()), ty); 674 ctx.type_of.insert(LocalSyntaxPtr::new(pat.syntax()), ty);
581 } else { 675 } else {
582 // TODO self param 676 // TODO self param
diff --git a/crates/ra_hir/src/ty/tests.rs b/crates/ra_hir/src/ty/tests.rs
index b6c02cd80..a76925b58 100644
--- a/crates/ra_hir/src/ty/tests.rs
+++ b/crates/ra_hir/src/ty/tests.rs
@@ -68,6 +68,51 @@ fn test() {
68 ); 68 );
69} 69}
70 70
71#[test]
72fn infer_struct() {
73 check_inference(
74 r#"
75struct A {
76 b: B,
77 c: C,
78}
79struct B;
80struct C(usize);
81
82fn test() {
83 let c = C(1);
84 B;
85 let a: A = A { b: B, c: C(1) };
86 a.b;
87 a.c;
88}
89"#,
90 "0004_struct.txt",
91 );
92}
93
94#[test]
95fn infer_refs_and_ptrs() {
96 check_inference(
97 r#"
98fn test(a: &u32, b: &mut u32, c: *const u32, d: *mut u32) {
99 a;
100 *a;
101 &a;
102 &mut a;
103 b;
104 *b;
105 &b;
106 c;
107 *c;
108 d;
109 *d;
110}
111"#,
112 "0005_refs.txt",
113 );
114}
115
71fn infer(content: &str) -> String { 116fn infer(content: &str) -> String {
72 let (db, _, file_id) = MockDatabase::with_single_file(content); 117 let (db, _, file_id) = MockDatabase::with_single_file(content);
73 let source_file = db.source_file(file_id); 118 let source_file = db.source_file(file_id);
diff --git a/crates/ra_hir/src/ty/tests/data/0001_basics.txt b/crates/ra_hir/src/ty/tests/data/0001_basics.txt
index 0c46f243a..212e92e00 100644
--- a/crates/ra_hir/src/ty/tests/data/0001_basics.txt
+++ b/crates/ra_hir/src/ty/tests/data/0001_basics.txt
@@ -1,4 +1,4 @@
1[33; 34) 'd': [unknown] 1[33; 34) 'd': &[unknown]
2[88; 94) '1isize': [unknown] 2[88; 94) '1isize': [unknown]
3[48; 49) 'a': u32 3[48; 49) 'a': u32
4[55; 56) 'b': isize 4[55; 56) 'b': isize
@@ -10,4 +10,4 @@
10[17; 18) 'b': isize 10[17; 18) 'b': isize
11[100; 106) '"test"': [unknown] 11[100; 106) '"test"': [unknown]
12[42; 121) '{ ...f32; }': () 12[42; 121) '{ ...f32; }': ()
13[69; 70) 'd': [unknown] 13[69; 70) 'd': &[unknown]
diff --git a/crates/ra_hir/src/ty/tests/data/0004_struct.txt b/crates/ra_hir/src/ty/tests/data/0004_struct.txt
new file mode 100644
index 000000000..cc8f3665b
--- /dev/null
+++ b/crates/ra_hir/src/ty/tests/data/0004_struct.txt
@@ -0,0 +1,16 @@
1[86; 90) 'C(1)': [unknown]
2[121; 122) 'B': [unknown]
3[86; 87) 'C': [unknown]
4[129; 130) '1': [unknown]
5[107; 108) 'a': A
6[127; 128) 'C': [unknown]
7[139; 142) 'a.b': B
8[114; 133) 'A { b:...C(1) }': A
9[148; 151) 'a.c': C
10[148; 149) 'a': A
11[139; 140) 'a': A
12[72; 154) '{ ...a.c; }': ()
13[96; 97) 'B': [unknown]
14[88; 89) '1': [unknown]
15[82; 83) 'c': [unknown]
16[127; 131) 'C(1)': [unknown]
diff --git a/crates/ra_hir/src/ty/tests/data/0005_refs.txt b/crates/ra_hir/src/ty/tests/data/0005_refs.txt
new file mode 100644
index 000000000..296e955c1
--- /dev/null
+++ b/crates/ra_hir/src/ty/tests/data/0005_refs.txt
@@ -0,0 +1,23 @@
1[115; 117) '&b': &&mut u32
2[88; 94) '&mut a': &mut &u32
3[146; 147) 'd': *mut u32
4[145; 147) '*d': u32
5[65; 66) 'a': &u32
6[46; 47) 'd': *mut u32
7[59; 150) '{ ... *d; }': ()
8[116; 117) 'b': &mut u32
9[131; 132) 'c': *const u32
10[130; 132) '*c': u32
11[72; 74) '*a': u32
12[107; 109) '*b': u32
13[108; 109) 'b': &mut u32
14[9; 10) 'a': &u32
15[18; 19) 'b': &mut u32
16[93; 94) 'a': &u32
17[100; 101) 'b': &mut u32
18[81; 82) 'a': &u32
19[80; 82) '&a': &&u32
20[73; 74) 'a': &u32
21[123; 124) 'c': *const u32
22[31; 32) 'c': *const u32
23[138; 139) 'd': *mut u32
diff --git a/crates/ra_hir/src/type_ref.rs b/crates/ra_hir/src/type_ref.rs
new file mode 100644
index 000000000..b36bb35d8
--- /dev/null
+++ b/crates/ra_hir/src/type_ref.rs
@@ -0,0 +1,110 @@
1//! HIR for references to types. Paths in these are not yet resolved. They can
2//! be directly created from an ast::TypeRef, without further queries.
3
4use ra_syntax::ast;
5
6use crate::Path;
7
8#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
9pub enum Mutability {
10 Shared,
11 Mut,
12}
13
14impl Mutability {
15 pub fn from_mutable(mutable: bool) -> Mutability {
16 if mutable {
17 Mutability::Mut
18 } else {
19 Mutability::Shared
20 }
21 }
22
23 pub fn as_keyword_for_ref(self) -> &'static str {
24 match self {
25 Mutability::Shared => "",
26 Mutability::Mut => "mut ",
27 }
28 }
29
30 pub fn as_keyword_for_ptr(self) -> &'static str {
31 match self {
32 Mutability::Shared => "const ",
33 Mutability::Mut => "mut ",
34 }
35 }
36}
37
38/// Compare ty::Ty
39#[derive(Clone, PartialEq, Eq, Hash, Debug)]
40pub enum TypeRef {
41 Never,
42 Placeholder,
43 Tuple(Vec<TypeRef>),
44 Path(Path),
45 RawPtr(Box<TypeRef>, Mutability),
46 Reference(Box<TypeRef>, Mutability),
47 Array(Box<TypeRef> /*, Expr*/),
48 Slice(Box<TypeRef>),
49 /// A fn pointer. Last element of the vector is the return type.
50 Fn(Vec<TypeRef>),
51 // For
52 // ImplTrait,
53 // DynTrait,
54 Error,
55}
56
57impl TypeRef {
58 /// Converts an `ast::TypeRef` to a `hir::TypeRef`.
59 pub(crate) fn from_ast(node: ast::TypeRef) -> Self {
60 use ra_syntax::ast::TypeRef::*;
61 match node {
62 ParenType(inner) => TypeRef::from_ast_opt(inner.type_ref()),
63 TupleType(inner) => TypeRef::Tuple(inner.fields().map(TypeRef::from_ast).collect()),
64 NeverType(..) => TypeRef::Never,
65 PathType(inner) => inner
66 .path()
67 .and_then(Path::from_ast)
68 .map(TypeRef::Path)
69 .unwrap_or(TypeRef::Error),
70 PointerType(inner) => {
71 let inner_ty = TypeRef::from_ast_opt(inner.type_ref());
72 let mutability = Mutability::from_mutable(inner.is_mut());
73 TypeRef::RawPtr(Box::new(inner_ty), mutability)
74 }
75 ArrayType(inner) => TypeRef::Array(Box::new(TypeRef::from_ast_opt(inner.type_ref()))),
76 SliceType(inner) => TypeRef::Slice(Box::new(TypeRef::from_ast_opt(inner.type_ref()))),
77 ReferenceType(inner) => {
78 let inner_ty = TypeRef::from_ast_opt(inner.type_ref());
79 let mutability = Mutability::from_mutable(inner.is_mut());
80 TypeRef::Reference(Box::new(inner_ty), mutability)
81 }
82 PlaceholderType(_inner) => TypeRef::Placeholder,
83 FnPointerType(inner) => {
84 let ret_ty = TypeRef::from_ast_opt(inner.ret_type().and_then(|rt| rt.type_ref()));
85 let mut params = if let Some(pl) = inner.param_list() {
86 pl.params()
87 .map(|p| p.type_ref())
88 .map(TypeRef::from_ast_opt)
89 .collect()
90 } else {
91 Vec::new()
92 };
93 params.push(ret_ty);
94 TypeRef::Fn(params)
95 }
96 // for types are close enough for our purposes to the inner type for now...
97 ForType(inner) => TypeRef::from_ast_opt(inner.type_ref()),
98 ImplTraitType(_inner) => TypeRef::Error,
99 DynTraitType(_inner) => TypeRef::Error,
100 }
101 }
102
103 pub(crate) fn from_ast_opt(node: Option<ast::TypeRef>) -> Self {
104 if let Some(node) = node {
105 TypeRef::from_ast(node)
106 } else {
107 TypeRef::Error
108 }
109 }
110}
diff --git a/crates/ra_lsp_server/src/caps.rs b/crates/ra_lsp_server/src/caps.rs
index 560f64989..5f7038f63 100644
--- a/crates/ra_lsp_server/src/caps.rs
+++ b/crates/ra_lsp_server/src/caps.rs
@@ -19,7 +19,7 @@ pub fn server_capabilities() -> ServerCapabilities {
19 hover_provider: Some(true), 19 hover_provider: Some(true),
20 completion_provider: Some(CompletionOptions { 20 completion_provider: Some(CompletionOptions {
21 resolve_provider: None, 21 resolve_provider: None,
22 trigger_characters: Some(vec![":".to_string()]), 22 trigger_characters: Some(vec![":".to_string(), ".".to_string()]),
23 }), 23 }),
24 signature_help_provider: Some(SignatureHelpOptions { 24 signature_help_provider: Some(SignatureHelpOptions {
25 trigger_characters: Some(vec!["(".to_string(), ",".to_string(), ")".to_string()]), 25 trigger_characters: Some(vec!["(".to_string(), ",".to_string(), ")".to_string()]),
diff --git a/crates/ra_lsp_server/src/conv.rs b/crates/ra_lsp_server/src/conv.rs
index 5a911d9d2..d3670104e 100644
--- a/crates/ra_lsp_server/src/conv.rs
+++ b/crates/ra_lsp_server/src/conv.rs
@@ -55,7 +55,10 @@ impl Conv for CompletionItemKind {
55 CompletionItemKind::Snippet => Snippet, 55 CompletionItemKind::Snippet => Snippet,
56 CompletionItemKind::Module => Module, 56 CompletionItemKind::Module => Module,
57 CompletionItemKind::Function => Function, 57 CompletionItemKind::Function => Function,
58 CompletionItemKind::Struct => Struct,
59 CompletionItemKind::Enum => Enum,
58 CompletionItemKind::Binding => Variable, 60 CompletionItemKind::Binding => Variable,
61 CompletionItemKind::Field => Field,
59 } 62 }
60 } 63 }
61} 64}
diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs
index 60f13267c..1edb9fae4 100644
--- a/crates/ra_lsp_server/src/main_loop.rs
+++ b/crates/ra_lsp_server/src/main_loop.rs
@@ -427,7 +427,7 @@ impl<'a> PoolDispatcher<'a> {
427 RawResponse::err( 427 RawResponse::err(
428 id, 428 id,
429 ErrorCode::ContentModified as i32, 429 ErrorCode::ContentModified as i32,
430 format!("content modified: {}", e), 430 format!("content modified: {:?}", e),
431 ) 431 )
432 } else { 432 } else {
433 RawResponse::err( 433 RawResponse::err(
diff --git a/crates/ra_lsp_server/tests/heavy_tests/main.rs b/crates/ra_lsp_server/tests/heavy_tests/main.rs
index 029a55d40..1f5cc5e8b 100644
--- a/crates/ra_lsp_server/tests/heavy_tests/main.rs
+++ b/crates/ra_lsp_server/tests/heavy_tests/main.rs
@@ -141,7 +141,7 @@ fn main() {}
141 server.request::<CodeActionRequest>( 141 server.request::<CodeActionRequest>(
142 CodeActionParams { 142 CodeActionParams {
143 text_document: server.doc_id("src/lib.rs"), 143 text_document: server.doc_id("src/lib.rs"),
144 range: Range::new(Position::new(0, 0), Position::new(0, 7)), 144 range: Range::new(Position::new(0, 4), Position::new(0, 7)),
145 context: empty_context(), 145 context: empty_context(),
146 }, 146 },
147 json!([ 147 json!([
@@ -168,7 +168,7 @@ fn main() {}
168 server.request::<CodeActionRequest>( 168 server.request::<CodeActionRequest>(
169 CodeActionParams { 169 CodeActionParams {
170 text_document: server.doc_id("src/lib.rs"), 170 text_document: server.doc_id("src/lib.rs"),
171 range: Range::new(Position::new(2, 0), Position::new(2, 7)), 171 range: Range::new(Position::new(2, 4), Position::new(2, 7)),
172 context: empty_context(), 172 context: empty_context(),
173 }, 173 },
174 json!([]), 174 json!([]),
diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs
index f12479fb4..8fb6b6408 100644
--- a/crates/ra_syntax/src/ast.rs
+++ b/crates/ra_syntax/src/ast.rs
@@ -363,3 +363,73 @@ impl<'a, N: AstNode<'a>> Iterator for AstChildren<'a, N> {
363 } 363 }
364 } 364 }
365} 365}
366
367#[derive(Debug, Clone, PartialEq, Eq)]
368pub enum StructFlavor<'a> {
369 Tuple(PosFieldList<'a>),
370 Named(NamedFieldDefList<'a>),
371 Unit,
372}
373
374impl<'a> StructFlavor<'a> {
375 fn from_node<N: AstNode<'a>>(node: N) -> StructFlavor<'a> {
376 if let Some(nfdl) = child_opt::<_, NamedFieldDefList>(node) {
377 StructFlavor::Named(nfdl)
378 } else if let Some(pfl) = child_opt::<_, PosFieldList>(node) {
379 StructFlavor::Tuple(pfl)
380 } else {
381 StructFlavor::Unit
382 }
383 }
384}
385
386impl<'a> StructDef<'a> {
387 pub fn flavor(self) -> StructFlavor<'a> {
388 StructFlavor::from_node(self)
389 }
390}
391
392impl<'a> EnumVariant<'a> {
393 pub fn flavor(self) -> StructFlavor<'a> {
394 StructFlavor::from_node(self)
395 }
396}
397
398impl<'a> PointerType<'a> {
399 pub fn is_mut(&self) -> bool {
400 self.syntax().children().any(|n| n.kind() == MUT_KW)
401 }
402}
403
404impl<'a> ReferenceType<'a> {
405 pub fn is_mut(&self) -> bool {
406 self.syntax().children().any(|n| n.kind() == MUT_KW)
407 }
408}
409
410impl<'a> RefExpr<'a> {
411 pub fn is_mut(&self) -> bool {
412 self.syntax().children().any(|n| n.kind() == MUT_KW)
413 }
414}
415
416#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
417pub enum PrefixOp {
418 /// The `*` operator for dereferencing
419 Deref,
420 /// The `!` operator for logical inversion
421 Not,
422 /// The `-` operator for negation
423 Neg,
424}
425
426impl<'a> PrefixExpr<'a> {
427 pub fn op(&self) -> Option<PrefixOp> {
428 match self.syntax().first_child()?.kind() {
429 STAR => Some(PrefixOp::Deref),
430 EXCL => Some(PrefixOp::Not),
431 MINUS => Some(PrefixOp::Neg),
432 _ => None,
433 }
434 }
435}
diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs
index c73533861..c22e026cf 100644
--- a/crates/ra_syntax/src/ast/generated.rs
+++ b/crates/ra_syntax/src/ast/generated.rs
@@ -131,7 +131,15 @@ impl<R: TreeRoot<RaTypes>> ArrayTypeNode<R> {
131} 131}
132 132
133 133
134impl<'a> ArrayType<'a> {} 134impl<'a> ArrayType<'a> {
135 pub fn type_ref(self) -> Option<TypeRef<'a>> {
136 super::child_opt(self)
137 }
138
139 pub fn expr(self) -> Option<Expr<'a>> {
140 super::child_opt(self)
141 }
142}
135 143
136// Attr 144// Attr
137#[derive(Debug, Clone, Copy,)] 145#[derive(Debug, Clone, Copy,)]
@@ -806,7 +814,94 @@ impl<'a> ast::NameOwner<'a> for EnumDef<'a> {}
806impl<'a> ast::TypeParamsOwner<'a> for EnumDef<'a> {} 814impl<'a> ast::TypeParamsOwner<'a> for EnumDef<'a> {}
807impl<'a> ast::AttrsOwner<'a> for EnumDef<'a> {} 815impl<'a> ast::AttrsOwner<'a> for EnumDef<'a> {}
808impl<'a> ast::DocCommentsOwner<'a> for EnumDef<'a> {} 816impl<'a> ast::DocCommentsOwner<'a> for EnumDef<'a> {}
809impl<'a> EnumDef<'a> {} 817impl<'a> EnumDef<'a> {
818 pub fn variant_list(self) -> Option<EnumVariantList<'a>> {
819 super::child_opt(self)
820 }
821}
822
823// EnumVariant
824#[derive(Debug, Clone, Copy,)]
825pub struct EnumVariantNode<R: TreeRoot<RaTypes> = OwnedRoot> {
826 pub(crate) syntax: SyntaxNode<R>,
827}
828pub type EnumVariant<'a> = EnumVariantNode<RefRoot<'a>>;
829
830impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<EnumVariantNode<R1>> for EnumVariantNode<R2> {
831 fn eq(&self, other: &EnumVariantNode<R1>) -> bool { self.syntax == other.syntax }
832}
833impl<R: TreeRoot<RaTypes>> Eq for EnumVariantNode<R> {}
834impl<R: TreeRoot<RaTypes>> Hash for EnumVariantNode<R> {
835 fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
836}
837
838impl<'a> AstNode<'a> for EnumVariant<'a> {
839 fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
840 match syntax.kind() {
841 ENUM_VARIANT => Some(EnumVariant { syntax }),
842 _ => None,
843 }
844 }
845 fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
846}
847
848impl<R: TreeRoot<RaTypes>> EnumVariantNode<R> {
849 pub fn borrowed(&self) -> EnumVariant {
850 EnumVariantNode { syntax: self.syntax.borrowed() }
851 }
852 pub fn owned(&self) -> EnumVariantNode {
853 EnumVariantNode { syntax: self.syntax.owned() }
854 }
855}
856
857
858impl<'a> ast::NameOwner<'a> for EnumVariant<'a> {}
859impl<'a> EnumVariant<'a> {
860 pub fn expr(self) -> Option<Expr<'a>> {
861 super::child_opt(self)
862 }
863}
864
865// EnumVariantList
866#[derive(Debug, Clone, Copy,)]
867pub struct EnumVariantListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
868 pub(crate) syntax: SyntaxNode<R>,
869}
870pub type EnumVariantList<'a> = EnumVariantListNode<RefRoot<'a>>;
871
872impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<EnumVariantListNode<R1>> for EnumVariantListNode<R2> {
873 fn eq(&self, other: &EnumVariantListNode<R1>) -> bool { self.syntax == other.syntax }
874}
875impl<R: TreeRoot<RaTypes>> Eq for EnumVariantListNode<R> {}
876impl<R: TreeRoot<RaTypes>> Hash for EnumVariantListNode<R> {
877 fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
878}
879
880impl<'a> AstNode<'a> for EnumVariantList<'a> {
881 fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
882 match syntax.kind() {
883 ENUM_VARIANT_LIST => Some(EnumVariantList { syntax }),
884 _ => None,
885 }
886 }
887 fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
888}
889
890impl<R: TreeRoot<RaTypes>> EnumVariantListNode<R> {
891 pub fn borrowed(&self) -> EnumVariantList {
892 EnumVariantListNode { syntax: self.syntax.borrowed() }
893 }
894 pub fn owned(&self) -> EnumVariantListNode {
895 EnumVariantListNode { syntax: self.syntax.owned() }
896 }
897}
898
899
900impl<'a> EnumVariantList<'a> {
901 pub fn variants(self) -> impl Iterator<Item = EnumVariant<'a>> + 'a {
902 super::children(self)
903 }
904}
810 905
811// Expr 906// Expr
812#[derive(Debug, Clone, Copy, PartialEq, Eq)] 907#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -1036,7 +1131,15 @@ impl<R: TreeRoot<RaTypes>> FieldExprNode<R> {
1036} 1131}
1037 1132
1038 1133
1039impl<'a> FieldExpr<'a> {} 1134impl<'a> FieldExpr<'a> {
1135 pub fn expr(self) -> Option<Expr<'a>> {
1136 super::child_opt(self)
1137 }
1138
1139 pub fn name_ref(self) -> Option<NameRef<'a>> {
1140 super::child_opt(self)
1141 }
1142}
1040 1143
1041// FieldPatList 1144// FieldPatList
1042#[derive(Debug, Clone, Copy,)] 1145#[derive(Debug, Clone, Copy,)]
@@ -1163,7 +1266,15 @@ impl<R: TreeRoot<RaTypes>> FnPointerTypeNode<R> {
1163} 1266}
1164 1267
1165 1268
1166impl<'a> FnPointerType<'a> {} 1269impl<'a> FnPointerType<'a> {
1270 pub fn param_list(self) -> Option<ParamList<'a>> {
1271 super::child_opt(self)
1272 }
1273
1274 pub fn ret_type(self) -> Option<RetType<'a>> {
1275 super::child_opt(self)
1276 }
1277}
1167 1278
1168// ForExpr 1279// ForExpr
1169#[derive(Debug, Clone, Copy,)] 1280#[derive(Debug, Clone, Copy,)]
@@ -1246,7 +1357,11 @@ impl<R: TreeRoot<RaTypes>> ForTypeNode<R> {
1246} 1357}
1247 1358
1248 1359
1249impl<'a> ForType<'a> {} 1360impl<'a> ForType<'a> {
1361 pub fn type_ref(self) -> Option<TypeRef<'a>> {
1362 super::child_opt(self)
1363 }
1364}
1250 1365
1251// IfExpr 1366// IfExpr
1252#[derive(Debug, Clone, Copy,)] 1367#[derive(Debug, Clone, Copy,)]
@@ -1935,6 +2050,10 @@ impl<'a> MethodCallExpr<'a> {
1935 pub fn expr(self) -> Option<Expr<'a>> { 2050 pub fn expr(self) -> Option<Expr<'a>> {
1936 super::child_opt(self) 2051 super::child_opt(self)
1937 } 2052 }
2053
2054 pub fn name_ref(self) -> Option<NameRef<'a>> {
2055 super::child_opt(self)
2056 }
1938} 2057}
1939 2058
1940// Module 2059// Module
@@ -2142,7 +2261,15 @@ impl<R: TreeRoot<RaTypes>> NamedFieldNode<R> {
2142} 2261}
2143 2262
2144 2263
2145impl<'a> NamedField<'a> {} 2264impl<'a> NamedField<'a> {
2265 pub fn name_ref(self) -> Option<NameRef<'a>> {
2266 super::child_opt(self)
2267 }
2268
2269 pub fn expr(self) -> Option<Expr<'a>> {
2270 super::child_opt(self)
2271 }
2272}
2146 2273
2147// NamedFieldDef 2274// NamedFieldDef
2148#[derive(Debug, Clone, Copy,)] 2275#[derive(Debug, Clone, Copy,)]
@@ -2181,7 +2308,52 @@ impl<R: TreeRoot<RaTypes>> NamedFieldDefNode<R> {
2181 2308
2182impl<'a> ast::NameOwner<'a> for NamedFieldDef<'a> {} 2309impl<'a> ast::NameOwner<'a> for NamedFieldDef<'a> {}
2183impl<'a> ast::AttrsOwner<'a> for NamedFieldDef<'a> {} 2310impl<'a> ast::AttrsOwner<'a> for NamedFieldDef<'a> {}
2184impl<'a> NamedFieldDef<'a> {} 2311impl<'a> NamedFieldDef<'a> {
2312 pub fn type_ref(self) -> Option<TypeRef<'a>> {
2313 super::child_opt(self)
2314 }
2315}
2316
2317// NamedFieldDefList
2318#[derive(Debug, Clone, Copy,)]
2319pub struct NamedFieldDefListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
2320 pub(crate) syntax: SyntaxNode<R>,
2321}
2322pub type NamedFieldDefList<'a> = NamedFieldDefListNode<RefRoot<'a>>;
2323
2324impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<NamedFieldDefListNode<R1>> for NamedFieldDefListNode<R2> {
2325 fn eq(&self, other: &NamedFieldDefListNode<R1>) -> bool { self.syntax == other.syntax }
2326}
2327impl<R: TreeRoot<RaTypes>> Eq for NamedFieldDefListNode<R> {}
2328impl<R: TreeRoot<RaTypes>> Hash for NamedFieldDefListNode<R> {
2329 fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
2330}
2331
2332impl<'a> AstNode<'a> for NamedFieldDefList<'a> {
2333 fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
2334 match syntax.kind() {
2335 NAMED_FIELD_DEF_LIST => Some(NamedFieldDefList { syntax }),
2336 _ => None,
2337 }
2338 }
2339 fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
2340}
2341
2342impl<R: TreeRoot<RaTypes>> NamedFieldDefListNode<R> {
2343 pub fn borrowed(&self) -> NamedFieldDefList {
2344 NamedFieldDefListNode { syntax: self.syntax.borrowed() }
2345 }
2346 pub fn owned(&self) -> NamedFieldDefListNode {
2347 NamedFieldDefListNode { syntax: self.syntax.owned() }
2348 }
2349}
2350
2351
2352impl<'a> NamedFieldDefList<'a> {
2353 pub fn fields(self) -> impl Iterator<Item = NamedFieldDef<'a>> + 'a {
2354 super::children(self)
2355 }
2356}
2185 2357
2186// NamedFieldList 2358// NamedFieldList
2187#[derive(Debug, Clone, Copy,)] 2359#[derive(Debug, Clone, Copy,)]
@@ -2218,7 +2390,11 @@ impl<R: TreeRoot<RaTypes>> NamedFieldListNode<R> {
2218} 2390}
2219 2391
2220 2392
2221impl<'a> NamedFieldList<'a> {} 2393impl<'a> NamedFieldList<'a> {
2394 pub fn fields(self) -> impl Iterator<Item = NamedField<'a>> + 'a {
2395 super::children(self)
2396 }
2397}
2222 2398
2223// NeverType 2399// NeverType
2224#[derive(Debug, Clone, Copy,)] 2400#[derive(Debug, Clone, Copy,)]
@@ -2451,7 +2627,11 @@ impl<R: TreeRoot<RaTypes>> ParenTypeNode<R> {
2451} 2627}
2452 2628
2453 2629
2454impl<'a> ParenType<'a> {} 2630impl<'a> ParenType<'a> {
2631 pub fn type_ref(self) -> Option<TypeRef<'a>> {
2632 super::child_opt(self)
2633 }
2634}
2455 2635
2456// Pat 2636// Pat
2457#[derive(Debug, Clone, Copy, PartialEq, Eq)] 2637#[derive(Debug, Clone, Copy, PartialEq, Eq)]
@@ -2816,7 +2996,94 @@ impl<R: TreeRoot<RaTypes>> PointerTypeNode<R> {
2816} 2996}
2817 2997
2818 2998
2819impl<'a> PointerType<'a> {} 2999impl<'a> PointerType<'a> {
3000 pub fn type_ref(self) -> Option<TypeRef<'a>> {
3001 super::child_opt(self)
3002 }
3003}
3004
3005// PosField
3006#[derive(Debug, Clone, Copy,)]
3007pub struct PosFieldNode<R: TreeRoot<RaTypes> = OwnedRoot> {
3008 pub(crate) syntax: SyntaxNode<R>,
3009}
3010pub type PosField<'a> = PosFieldNode<RefRoot<'a>>;
3011
3012impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PosFieldNode<R1>> for PosFieldNode<R2> {
3013 fn eq(&self, other: &PosFieldNode<R1>) -> bool { self.syntax == other.syntax }
3014}
3015impl<R: TreeRoot<RaTypes>> Eq for PosFieldNode<R> {}
3016impl<R: TreeRoot<RaTypes>> Hash for PosFieldNode<R> {
3017 fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
3018}
3019
3020impl<'a> AstNode<'a> for PosField<'a> {
3021 fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
3022 match syntax.kind() {
3023 POS_FIELD => Some(PosField { syntax }),
3024 _ => None,
3025 }
3026 }
3027 fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
3028}
3029
3030impl<R: TreeRoot<RaTypes>> PosFieldNode<R> {
3031 pub fn borrowed(&self) -> PosField {
3032 PosFieldNode { syntax: self.syntax.borrowed() }
3033 }
3034 pub fn owned(&self) -> PosFieldNode {
3035 PosFieldNode { syntax: self.syntax.owned() }
3036 }
3037}
3038
3039
3040impl<'a> ast::AttrsOwner<'a> for PosField<'a> {}
3041impl<'a> PosField<'a> {
3042 pub fn type_ref(self) -> Option<TypeRef<'a>> {
3043 super::child_opt(self)
3044 }
3045}
3046
3047// PosFieldList
3048#[derive(Debug, Clone, Copy,)]
3049pub struct PosFieldListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
3050 pub(crate) syntax: SyntaxNode<R>,
3051}
3052pub type PosFieldList<'a> = PosFieldListNode<RefRoot<'a>>;
3053
3054impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PosFieldListNode<R1>> for PosFieldListNode<R2> {
3055 fn eq(&self, other: &PosFieldListNode<R1>) -> bool { self.syntax == other.syntax }
3056}
3057impl<R: TreeRoot<RaTypes>> Eq for PosFieldListNode<R> {}
3058impl<R: TreeRoot<RaTypes>> Hash for PosFieldListNode<R> {
3059 fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
3060}
3061
3062impl<'a> AstNode<'a> for PosFieldList<'a> {
3063 fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
3064 match syntax.kind() {
3065 POS_FIELD_LIST => Some(PosFieldList { syntax }),
3066 _ => None,
3067 }
3068 }
3069 fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
3070}
3071
3072impl<R: TreeRoot<RaTypes>> PosFieldListNode<R> {
3073 pub fn borrowed(&self) -> PosFieldList {
3074 PosFieldListNode { syntax: self.syntax.borrowed() }
3075 }
3076 pub fn owned(&self) -> PosFieldListNode {
3077 PosFieldListNode { syntax: self.syntax.owned() }
3078 }
3079}
3080
3081
3082impl<'a> PosFieldList<'a> {
3083 pub fn fields(self) -> impl Iterator<Item = PosField<'a>> + 'a {
3084 super::children(self)
3085 }
3086}
2820 3087
2821// PrefixExpr 3088// PrefixExpr
2822#[derive(Debug, Clone, Copy,)] 3089#[derive(Debug, Clone, Copy,)]
@@ -3046,7 +3313,11 @@ impl<R: TreeRoot<RaTypes>> ReferenceTypeNode<R> {
3046} 3313}
3047 3314
3048 3315
3049impl<'a> ReferenceType<'a> {} 3316impl<'a> ReferenceType<'a> {
3317 pub fn type_ref(self) -> Option<TypeRef<'a>> {
3318 super::child_opt(self)
3319 }
3320}
3050 3321
3051// RetType 3322// RetType
3052#[derive(Debug, Clone, Copy,)] 3323#[derive(Debug, Clone, Copy,)]
@@ -3239,7 +3510,11 @@ impl<R: TreeRoot<RaTypes>> SliceTypeNode<R> {
3239} 3510}
3240 3511
3241 3512
3242impl<'a> SliceType<'a> {} 3513impl<'a> SliceType<'a> {
3514 pub fn type_ref(self) -> Option<TypeRef<'a>> {
3515 super::child_opt(self)
3516 }
3517}
3243 3518
3244// SourceFile 3519// SourceFile
3245#[derive(Debug, Clone, Copy,)] 3520#[derive(Debug, Clone, Copy,)]
@@ -3426,11 +3701,7 @@ impl<'a> ast::NameOwner<'a> for StructDef<'a> {}
3426impl<'a> ast::TypeParamsOwner<'a> for StructDef<'a> {} 3701impl<'a> ast::TypeParamsOwner<'a> for StructDef<'a> {}
3427impl<'a> ast::AttrsOwner<'a> for StructDef<'a> {} 3702impl<'a> ast::AttrsOwner<'a> for StructDef<'a> {}
3428impl<'a> ast::DocCommentsOwner<'a> for StructDef<'a> {} 3703impl<'a> ast::DocCommentsOwner<'a> for StructDef<'a> {}
3429impl<'a> StructDef<'a> { 3704impl<'a> StructDef<'a> {}
3430 pub fn fields(self) -> impl Iterator<Item = NamedFieldDef<'a>> + 'a {
3431 super::children(self)
3432 }
3433}
3434 3705
3435// StructLit 3706// StructLit
3436#[derive(Debug, Clone, Copy,)] 3707#[derive(Debug, Clone, Copy,)]
@@ -3467,7 +3738,15 @@ impl<R: TreeRoot<RaTypes>> StructLitNode<R> {
3467} 3738}
3468 3739
3469 3740
3470impl<'a> StructLit<'a> {} 3741impl<'a> StructLit<'a> {
3742 pub fn path(self) -> Option<Path<'a>> {
3743 super::child_opt(self)
3744 }
3745
3746 pub fn named_field_list(self) -> Option<NamedFieldList<'a>> {
3747 super::child_opt(self)
3748 }
3749}
3471 3750
3472// StructPat 3751// StructPat
3473#[derive(Debug, Clone, Copy,)] 3752#[derive(Debug, Clone, Copy,)]
@@ -3770,7 +4049,11 @@ impl<R: TreeRoot<RaTypes>> TupleTypeNode<R> {
3770} 4049}
3771 4050
3772 4051
3773impl<'a> TupleType<'a> {} 4052impl<'a> TupleType<'a> {
4053 pub fn fields(self) -> impl Iterator<Item = TypeRef<'a>> + 'a {
4054 super::children(self)
4055 }
4056}
3774 4057
3775// TypeDef 4058// TypeDef
3776#[derive(Debug, Clone, Copy,)] 4059#[derive(Debug, Clone, Copy,)]
diff --git a/crates/ra_syntax/src/grammar.ron b/crates/ra_syntax/src/grammar.ron
index e3b9032a0..4bcff4e14 100644
--- a/crates/ra_syntax/src/grammar.ron
+++ b/crates/ra_syntax/src/grammar.ron
@@ -261,18 +261,20 @@ Grammar(
261 "TypeParamsOwner", 261 "TypeParamsOwner",
262 "AttrsOwner", 262 "AttrsOwner",
263 "DocCommentsOwner" 263 "DocCommentsOwner"
264 ],
265 collections: [
266 ["fields", "NamedFieldDef"]
267 ] 264 ]
268 ), 265 ),
269 "NamedFieldDef": ( traits: ["NameOwner", "AttrsOwner"] ), 266 "NamedFieldDefList": (collections: [["fields", "NamedFieldDef"]]),
267 "NamedFieldDef": ( traits: ["NameOwner", "AttrsOwner"], options: ["TypeRef"] ),
268 "PosFieldList": (collections: [["fields", "PosField"]]),
269 "PosField": ( traits: ["AttrsOwner"], options: ["TypeRef"]),
270 "EnumDef": ( traits: [ 270 "EnumDef": ( traits: [
271 "NameOwner", 271 "NameOwner",
272 "TypeParamsOwner", 272 "TypeParamsOwner",
273 "AttrsOwner", 273 "AttrsOwner",
274 "DocCommentsOwner" 274 "DocCommentsOwner"
275 ] ), 275 ], options: [["variant_list", "EnumVariantList"]] ),
276 "EnumVariantList": ( collections: [["variants", "EnumVariant"]] ),
277 "EnumVariant": ( traits: ["NameOwner"], options: ["Expr"] ),
276 "TraitDef": ( traits: ["NameOwner", "AttrsOwner", "DocCommentsOwner"] ), 278 "TraitDef": ( traits: ["NameOwner", "AttrsOwner", "DocCommentsOwner"] ),
277 "Module": ( 279 "Module": (
278 traits: ["NameOwner", "AttrsOwner", "DocCommentsOwner" ], 280 traits: ["NameOwner", "AttrsOwner", "DocCommentsOwner" ],
@@ -301,17 +303,17 @@ Grammar(
301 ] ), 303 ] ),
302 "ImplItem": (), 304 "ImplItem": (),
303 305
304 "ParenType": (), 306 "ParenType": (options: ["TypeRef"]),
305 "TupleType": (), 307 "TupleType": ( collections: [["fields", "TypeRef"]] ),
306 "NeverType": (), 308 "NeverType": (),
307 "PathType": (options: ["Path"]), 309 "PathType": (options: ["Path"]),
308 "PointerType": (), 310 "PointerType": (options: ["TypeRef"]),
309 "ArrayType": (), 311 "ArrayType": ( options: ["TypeRef", "Expr"] ),
310 "SliceType": (), 312 "SliceType": ( options: ["TypeRef"] ),
311 "ReferenceType": (), 313 "ReferenceType": (options: ["TypeRef"]),
312 "PlaceholderType": (), 314 "PlaceholderType": (),
313 "FnPointerType": (), 315 "FnPointerType": (options: ["ParamList", "RetType"]),
314 "ForType": (), 316 "ForType": (options: ["TypeRef"]),
315 "ImplTraitType": (), 317 "ImplTraitType": (),
316 "DynTraitType": (), 318 "DynTraitType": (),
317 319
@@ -392,19 +394,19 @@ Grammar(
392 collections: [ [ "pats", "Pat" ] ] 394 collections: [ [ "pats", "Pat" ] ]
393 ), 395 ),
394 "MatchGuard": (), 396 "MatchGuard": (),
395 "StructLit": (), 397 "StructLit": (options: ["Path", "NamedFieldList"]),
396 "NamedFieldList": (), 398 "NamedFieldList": (collections: [ ["fields", "NamedField"] ]),
397 "NamedField": (), 399 "NamedField": (options: ["NameRef", "Expr"]),
398 "CallExpr": ( 400 "CallExpr": (
399 traits: ["ArgListOwner"], 401 traits: ["ArgListOwner"],
400 options: [ "Expr" ], 402 options: [ "Expr" ],
401 ), 403 ),
402 "MethodCallExpr": ( 404 "MethodCallExpr": (
403 traits: ["ArgListOwner"], 405 traits: ["ArgListOwner"],
404 options: [ "Expr" ], 406 options: [ "Expr", "NameRef" ],
405 ), 407 ),
406 "IndexExpr": (), 408 "IndexExpr": (),
407 "FieldExpr": (), 409 "FieldExpr": (options: ["Expr", "NameRef"]),
408 "TryExpr": (options: ["Expr"]), 410 "TryExpr": (options: ["Expr"]),
409 "CastExpr": (options: ["Expr", "TypeRef"]), 411 "CastExpr": (options: ["Expr", "TypeRef"]),
410 "RefExpr": (options: ["Expr"]), 412 "RefExpr": (options: ["Expr"]),
diff --git a/crates/ra_syntax/src/grammar/expressions.rs b/crates/ra_syntax/src/grammar/expressions.rs
index da78d85a2..2d1f17491 100644
--- a/crates/ra_syntax/src/grammar/expressions.rs
+++ b/crates/ra_syntax/src/grammar/expressions.rs
@@ -283,14 +283,10 @@ fn postfix_expr(
283 // } 283 // }
284 L_PAREN if allow_calls => call_expr(p, lhs), 284 L_PAREN if allow_calls => call_expr(p, lhs),
285 L_BRACK if allow_calls => index_expr(p, lhs), 285 L_BRACK if allow_calls => index_expr(p, lhs),
286 DOT if p.nth(1) == IDENT => { 286 DOT if p.nth(1) == IDENT && (p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON) => {
287 if p.nth(2) == L_PAREN || p.nth(2) == COLONCOLON { 287 method_call_expr(p, lhs)
288 method_call_expr(p, lhs)
289 } else {
290 field_expr(p, lhs)
291 }
292 } 288 }
293 DOT if p.nth(1) == INT_NUMBER => field_expr(p, lhs), 289 DOT => field_expr(p, lhs),
294 // test postfix_range 290 // test postfix_range
295 // fn foo() { let x = 1..; } 291 // fn foo() { let x = 1..; }
296 DOTDOT | DOTDOTEQ if !EXPR_FIRST.contains(p.nth(1)) => { 292 DOTDOT | DOTDOTEQ if !EXPR_FIRST.contains(p.nth(1)) => {
@@ -355,13 +351,15 @@ fn method_call_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
355// x.0.bar; 351// x.0.bar;
356// } 352// }
357fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker { 353fn field_expr(p: &mut Parser, lhs: CompletedMarker) -> CompletedMarker {
358 assert!(p.at(DOT) && (p.nth(1) == IDENT || p.nth(1) == INT_NUMBER)); 354 assert!(p.at(DOT));
359 let m = lhs.precede(p); 355 let m = lhs.precede(p);
360 p.bump(); 356 p.bump();
361 if p.at(IDENT) { 357 if p.at(IDENT) {
362 name_ref(p) 358 name_ref(p)
363 } else { 359 } else if p.at(INT_NUMBER) {
364 p.bump() 360 p.bump()
361 } else {
362 p.error("expected field name or number")
365 } 363 }
366 m.complete(p, FIELD_EXPR) 364 m.complete(p, FIELD_EXPR)
367} 365}
diff --git a/crates/ra_syntax/src/string_lexing.rs b/crates/ra_syntax/src/string_lexing.rs
index 94853331f..349733f3f 100644
--- a/crates/ra_syntax/src/string_lexing.rs
+++ b/crates/ra_syntax/src/string_lexing.rs
@@ -1,13 +1,7 @@
1mod parser; 1mod parser;
2mod byte;
3mod byte_string;
4mod char;
5mod string; 2mod string;
6 3
7pub use self::{ 4pub use self::{
8 byte::parse_byte_literal, 5 parser::{StringComponent, StringComponentKind},
9 byte_string::parse_byte_string_literal, 6 string::{parse_string_literal, parse_char_literal, parse_byte_literal, parse_byte_string_literal},
10 char::parse_char_literal,
11 parser::{CharComponent, CharComponentKind, StringComponent, StringComponentKind},
12 string::parse_string_literal,
13}; 7};
diff --git a/crates/ra_syntax/src/string_lexing/byte.rs b/crates/ra_syntax/src/string_lexing/byte.rs
deleted file mode 100644
index b3228d6ca..000000000
--- a/crates/ra_syntax/src/string_lexing/byte.rs
+++ /dev/null
@@ -1,51 +0,0 @@
1use super::parser::Parser;
2use super::CharComponent;
3
4pub fn parse_byte_literal(src: &str) -> ByteComponentIterator {
5 ByteComponentIterator {
6 parser: Parser::new(src),
7 has_closing_quote: false,
8 }
9}
10
11pub struct ByteComponentIterator<'a> {
12 parser: Parser<'a>,
13 pub has_closing_quote: bool,
14}
15
16impl<'a> Iterator for ByteComponentIterator<'a> {
17 type Item = CharComponent;
18 fn next(&mut self) -> Option<CharComponent> {
19 if self.parser.pos == 0 {
20 assert!(
21 self.parser.advance() == 'b',
22 "Byte literal should start with a `b`"
23 );
24
25 assert!(
26 self.parser.advance() == '\'',
27 "Byte literal should start with a `b`, followed by a quote"
28 );
29 }
30
31 if let Some(component) = self.parser.parse_char_component() {
32 return Some(component);
33 }
34
35 // We get here when there are no char components left to parse
36 if self.parser.peek() == Some('\'') {
37 self.parser.advance();
38 self.has_closing_quote = true;
39 }
40
41 assert!(
42 self.parser.peek() == None,
43 "byte literal should leave no unparsed input: src = {:?}, pos = {}, length = {}",
44 self.parser.src,
45 self.parser.pos,
46 self.parser.src.len()
47 );
48
49 None
50 }
51}
diff --git a/crates/ra_syntax/src/string_lexing/byte_string.rs b/crates/ra_syntax/src/string_lexing/byte_string.rs
deleted file mode 100644
index a6056159b..000000000
--- a/crates/ra_syntax/src/string_lexing/byte_string.rs
+++ /dev/null
@@ -1,51 +0,0 @@
1use super::parser::Parser;
2use super::StringComponent;
3
4pub fn parse_byte_string_literal(src: &str) -> ByteStringComponentIterator {
5 ByteStringComponentIterator {
6 parser: Parser::new(src),
7 has_closing_quote: false,
8 }
9}
10
11pub struct ByteStringComponentIterator<'a> {
12 parser: Parser<'a>,
13 pub has_closing_quote: bool,
14}
15
16impl<'a> Iterator for ByteStringComponentIterator<'a> {
17 type Item = StringComponent;
18 fn next(&mut self) -> Option<StringComponent> {
19 if self.parser.pos == 0 {
20 assert!(
21 self.parser.advance() == 'b',
22 "byte string literal should start with a `b`"
23 );
24
25 assert!(
26 self.parser.advance() == '"',
27 "byte string literal should start with a `b`, followed by double quotes"
28 );
29 }
30
31 if let Some(component) = self.parser.parse_string_component() {
32 return Some(component);
33 }
34
35 // We get here when there are no char components left to parse
36 if self.parser.peek() == Some('"') {
37 self.parser.advance();
38 self.has_closing_quote = true;
39 }
40
41 assert!(
42 self.parser.peek() == None,
43 "byte string literal should leave no unparsed input: src = {:?}, pos = {}, length = {}",
44 self.parser.src,
45 self.parser.pos,
46 self.parser.src.len()
47 );
48
49 None
50 }
51}
diff --git a/crates/ra_syntax/src/string_lexing/char.rs b/crates/ra_syntax/src/string_lexing/char.rs
deleted file mode 100644
index e01813176..000000000
--- a/crates/ra_syntax/src/string_lexing/char.rs
+++ /dev/null
@@ -1,176 +0,0 @@
1use super::parser::Parser;
2use super::CharComponent;
3
4pub fn parse_char_literal(src: &str) -> CharComponentIterator {
5 CharComponentIterator {
6 parser: Parser::new(src),
7 has_closing_quote: false,
8 }
9}
10
11pub struct CharComponentIterator<'a> {
12 parser: Parser<'a>,
13 pub has_closing_quote: bool,
14}
15
16impl<'a> Iterator for CharComponentIterator<'a> {
17 type Item = CharComponent;
18 fn next(&mut self) -> Option<CharComponent> {
19 if self.parser.pos == 0 {
20 assert!(
21 self.parser.advance() == '\'',
22 "char literal should start with a quote"
23 );
24 }
25
26 if let Some(component) = self.parser.parse_char_component() {
27 return Some(component);
28 }
29
30 // We get here when there are no char components left to parse
31 if self.parser.peek() == Some('\'') {
32 self.parser.advance();
33 self.has_closing_quote = true;
34 }
35
36 assert!(
37 self.parser.peek() == None,
38 "char literal should leave no unparsed input: src = {:?}, pos = {}, length = {}",
39 self.parser.src,
40 self.parser.pos,
41 self.parser.src.len()
42 );
43
44 None
45 }
46}
47
48#[cfg(test)]
49mod tests {
50 use rowan::TextRange;
51 use crate::string_lexing::{
52 CharComponent,
53 CharComponentKind::*,
54};
55
56 fn parse(src: &str) -> (bool, Vec<CharComponent>) {
57 let component_iterator = &mut super::parse_char_literal(src);
58 let components: Vec<_> = component_iterator.collect();
59 (component_iterator.has_closing_quote, components)
60 }
61
62 fn unclosed_char_component(src: &str) -> CharComponent {
63 let (has_closing_quote, components) = parse(src);
64 assert!(!has_closing_quote, "char should not have closing quote");
65 assert!(components.len() == 1);
66 components[0].clone()
67 }
68
69 fn closed_char_component(src: &str) -> CharComponent {
70 let (has_closing_quote, components) = parse(src);
71 assert!(has_closing_quote, "char should have closing quote");
72 assert!(
73 components.len() == 1,
74 "Literal: {}\nComponents: {:#?}",
75 src,
76 components
77 );
78 components[0].clone()
79 }
80
81 fn closed_char_components(src: &str) -> Vec<CharComponent> {
82 let (has_closing_quote, components) = parse(src);
83 assert!(has_closing_quote, "char should have closing quote");
84 components
85 }
86
87 fn range_closed(src: &str) -> TextRange {
88 TextRange::from_to(1.into(), (src.len() as u32 - 1).into())
89 }
90
91 fn range_unclosed(src: &str) -> TextRange {
92 TextRange::from_to(1.into(), (src.len() as u32).into())
93 }
94
95 #[test]
96 fn test_unicode_escapes() {
97 let unicode_escapes = &[r"{DEAD}", "{BEEF}", "{FF}", "{}", ""];
98 for escape in unicode_escapes {
99 let escape_sequence = format!(r"'\u{}'", escape);
100 let component = closed_char_component(&escape_sequence);
101 let expected_range = range_closed(&escape_sequence);
102 assert_eq!(component.kind, UnicodeEscape);
103 assert_eq!(component.range, expected_range);
104 }
105 }
106
107 #[test]
108 fn test_unicode_escapes_unclosed() {
109 let unicode_escapes = &["{DEAD", "{BEEF", "{FF"];
110 for escape in unicode_escapes {
111 let escape_sequence = format!(r"'\u{}'", escape);
112 let component = unclosed_char_component(&escape_sequence);
113 let expected_range = range_unclosed(&escape_sequence);
114 assert_eq!(component.kind, UnicodeEscape);
115 assert_eq!(component.range, expected_range);
116 }
117 }
118
119 #[test]
120 fn test_empty_char() {
121 let (has_closing_quote, components) = parse("''");
122 assert!(has_closing_quote, "char should have closing quote");
123 assert!(components.len() == 0);
124 }
125
126 #[test]
127 fn test_unclosed_char() {
128 let component = unclosed_char_component("'a");
129 assert!(component.kind == CodePoint);
130 assert!(component.range == TextRange::from_to(1.into(), 2.into()));
131 }
132
133 #[test]
134 fn test_digit_escapes() {
135 let literals = &[r"", r"5", r"55"];
136
137 for literal in literals {
138 let lit_text = format!(r"'\x{}'", literal);
139 let component = closed_char_component(&lit_text);
140 assert!(component.kind == AsciiCodeEscape);
141 assert!(component.range == range_closed(&lit_text));
142 }
143
144 // More than 2 digits starts a new codepoint
145 let components = closed_char_components(r"'\x555'");
146 assert!(components.len() == 2);
147 assert!(components[1].kind == CodePoint);
148 }
149
150 #[test]
151 fn test_ascii_escapes() {
152 let literals = &[
153 r"\'", "\\\"", // equivalent to \"
154 r"\n", r"\r", r"\t", r"\\", r"\0",
155 ];
156
157 for literal in literals {
158 let lit_text = format!("'{}'", literal);
159 let component = closed_char_component(&lit_text);
160 assert!(component.kind == AsciiEscape);
161 assert!(component.range == range_closed(&lit_text));
162 }
163 }
164
165 #[test]
166 fn test_no_escapes() {
167 let literals = &['"', 'n', 'r', 't', '0', 'x', 'u'];
168
169 for &literal in literals {
170 let lit_text = format!("'{}'", literal);
171 let component = closed_char_component(&lit_text);
172 assert!(component.kind == CodePoint);
173 assert!(component.range == range_closed(&lit_text));
174 }
175 }
176}
diff --git a/crates/ra_syntax/src/string_lexing/parser.rs b/crates/ra_syntax/src/string_lexing/parser.rs
index 4a6d5bc93..14c6015c2 100644
--- a/crates/ra_syntax/src/string_lexing/parser.rs
+++ b/crates/ra_syntax/src/string_lexing/parser.rs
@@ -1,15 +1,16 @@
1use rowan::{TextRange, TextUnit}; 1use rowan::{TextRange, TextUnit};
2 2
3use self::CharComponentKind::*; 3use self::StringComponentKind::*;
4 4
5pub struct Parser<'a> { 5pub struct Parser<'a> {
6 pub(super) quote: u8,
6 pub(super) src: &'a str, 7 pub(super) src: &'a str,
7 pub(super) pos: usize, 8 pub(super) pos: usize,
8} 9}
9 10
10impl<'a> Parser<'a> { 11impl<'a> Parser<'a> {
11 pub fn new(src: &'a str) -> Parser<'a> { 12 pub fn new(src: &'a str, quote: u8) -> Parser<'a> {
12 Parser { src, pos: 0 } 13 Parser { quote, src, pos: 0 }
13 } 14 }
14 15
15 // Utility methods 16 // Utility methods
@@ -42,7 +43,7 @@ impl<'a> Parser<'a> {
42 43
43 // Char parsing methods 44 // Char parsing methods
44 45
45 fn parse_unicode_escape(&mut self, start: TextUnit) -> CharComponent { 46 fn parse_unicode_escape(&mut self, start: TextUnit) -> StringComponent {
46 match self.peek() { 47 match self.peek() {
47 Some('{') => { 48 Some('{') => {
48 self.advance(); 49 self.advance();
@@ -56,16 +57,16 @@ impl<'a> Parser<'a> {
56 } 57 }
57 58
58 let end = self.get_pos(); 59 let end = self.get_pos();
59 CharComponent::new(TextRange::from_to(start, end), UnicodeEscape) 60 StringComponent::new(TextRange::from_to(start, end), UnicodeEscape)
60 } 61 }
61 Some(_) | None => { 62 Some(_) | None => {
62 let end = self.get_pos(); 63 let end = self.get_pos();
63 CharComponent::new(TextRange::from_to(start, end), UnicodeEscape) 64 StringComponent::new(TextRange::from_to(start, end), UnicodeEscape)
64 } 65 }
65 } 66 }
66 } 67 }
67 68
68 fn parse_ascii_code_escape(&mut self, start: TextUnit) -> CharComponent { 69 fn parse_ascii_code_escape(&mut self, start: TextUnit) -> StringComponent {
69 let code_start = self.get_pos(); 70 let code_start = self.get_pos();
70 while let Some(next) = self.peek() { 71 while let Some(next) = self.peek() {
71 if next == '\'' || (self.get_pos() - code_start == 2.into()) { 72 if next == '\'' || (self.get_pos() - code_start == 2.into()) {
@@ -76,12 +77,12 @@ impl<'a> Parser<'a> {
76 } 77 }
77 78
78 let end = self.get_pos(); 79 let end = self.get_pos();
79 CharComponent::new(TextRange::from_to(start, end), AsciiCodeEscape) 80 StringComponent::new(TextRange::from_to(start, end), AsciiCodeEscape)
80 } 81 }
81 82
82 fn parse_escape(&mut self, start: TextUnit) -> CharComponent { 83 fn parse_escape(&mut self, start: TextUnit) -> StringComponent {
83 if self.peek().is_none() { 84 if self.peek().is_none() {
84 return CharComponent::new(TextRange::from_to(start, start), AsciiEscape); 85 return StringComponent::new(TextRange::from_to(start, start), AsciiEscape);
85 } 86 }
86 87
87 let next = self.advance(); 88 let next = self.advance();
@@ -90,29 +91,7 @@ impl<'a> Parser<'a> {
90 match next { 91 match next {
91 'x' => self.parse_ascii_code_escape(start), 92 'x' => self.parse_ascii_code_escape(start),
92 'u' => self.parse_unicode_escape(start), 93 'u' => self.parse_unicode_escape(start),
93 _ => CharComponent::new(range, AsciiEscape), 94 _ => StringComponent::new(range, AsciiEscape),
94 }
95 }
96
97 pub fn parse_char_component(&mut self) -> Option<CharComponent> {
98 let next = self.peek()?;
99
100 // Ignore character close
101 if next == '\'' {
102 return None;
103 }
104
105 let start = self.get_pos();
106 self.advance();
107
108 if next == '\\' {
109 Some(self.parse_escape(start))
110 } else {
111 let end = self.get_pos();
112 Some(CharComponent::new(
113 TextRange::from_to(start, end),
114 CodePoint,
115 ))
116 } 95 }
117 } 96 }
118 97
@@ -131,11 +110,11 @@ impl<'a> Parser<'a> {
131 } 110 }
132 } 111 }
133 112
134 pub fn parse_string_component(&mut self) -> Option<StringComponent> { 113 pub fn parse_component(&mut self) -> Option<StringComponent> {
135 let next = self.peek()?; 114 let next = self.peek()?;
136 115
137 // Ignore string close 116 // Ignore string close
138 if next == '"' { 117 if next == self.quote as char {
139 return None; 118 return None;
140 } 119 }
141 120
@@ -145,21 +124,31 @@ impl<'a> Parser<'a> {
145 if next == '\\' { 124 if next == '\\' {
146 // Strings can use `\` to ignore newlines, so we first try to parse one of those 125 // Strings can use `\` to ignore newlines, so we first try to parse one of those
147 // before falling back to parsing char escapes 126 // before falling back to parsing char escapes
148 self.parse_ignore_newline(start).or_else(|| { 127 if self.quote == b'"' {
149 let char_component = self.parse_escape(start); 128 if let Some(component) = self.parse_ignore_newline(start) {
150 Some(StringComponent::new( 129 return Some(component);
151 char_component.range, 130 }
152 StringComponentKind::Char(char_component.kind), 131 }
153 )) 132
154 }) 133 Some(self.parse_escape(start))
155 } else { 134 } else {
156 let end = self.get_pos(); 135 let end = self.get_pos();
157 Some(StringComponent::new( 136 Some(StringComponent::new(
158 TextRange::from_to(start, end), 137 TextRange::from_to(start, end),
159 StringComponentKind::Char(CodePoint), 138 CodePoint,
160 )) 139 ))
161 } 140 }
162 } 141 }
142
143 pub fn parse_suffix(&mut self) -> Option<TextRange> {
144 let start = self.get_pos();
145 let _ = self.peek()?;
146 while let Some(_) = self.peek() {
147 self.advance();
148 }
149 let end = self.get_pos();
150 Some(TextRange::from_to(start, end))
151 }
163} 152}
164 153
165#[derive(Debug, Eq, PartialEq, Clone)] 154#[derive(Debug, Eq, PartialEq, Clone)]
@@ -177,23 +166,6 @@ impl StringComponent {
177#[derive(Debug, Eq, PartialEq, Clone)] 166#[derive(Debug, Eq, PartialEq, Clone)]
178pub enum StringComponentKind { 167pub enum StringComponentKind {
179 IgnoreNewline, 168 IgnoreNewline,
180 Char(CharComponentKind),
181}
182
183#[derive(Debug, Eq, PartialEq, Clone)]
184pub struct CharComponent {
185 pub range: TextRange,
186 pub kind: CharComponentKind,
187}
188
189impl CharComponent {
190 fn new(range: TextRange, kind: CharComponentKind) -> CharComponent {
191 CharComponent { range, kind }
192 }
193}
194
195#[derive(Debug, Eq, PartialEq, Clone)]
196pub enum CharComponentKind {
197 CodePoint, 169 CodePoint,
198 AsciiEscape, 170 AsciiEscape,
199 AsciiCodeEscape, 171 AsciiCodeEscape,
diff --git a/crates/ra_syntax/src/string_lexing/string.rs b/crates/ra_syntax/src/string_lexing/string.rs
index d8351e9af..064f08544 100644
--- a/crates/ra_syntax/src/string_lexing/string.rs
+++ b/crates/ra_syntax/src/string_lexing/string.rs
@@ -1,41 +1,92 @@
1use super::parser::Parser; 1use crate::{
2use super::StringComponent; 2 TextRange,
3 string_lexing::{
4 parser::Parser,
5 StringComponent,
6}};
3 7
4pub fn parse_string_literal(src: &str) -> StringComponentIterator { 8pub fn parse_string_literal(src: &str) -> StringComponentIterator {
5 StringComponentIterator { 9 StringComponentIterator {
6 parser: Parser::new(src), 10 parser: Parser::new(src, b'"'),
7 has_closing_quote: false, 11 has_closing_quote: false,
12 suffix: None,
13 prefix: None,
14 quote: b'"',
15 }
16}
17
18pub fn parse_byte_string_literal(src: &str) -> StringComponentIterator {
19 StringComponentIterator {
20 parser: Parser::new(src, b'"'),
21 has_closing_quote: false,
22 suffix: None,
23 prefix: Some(b'b'),
24 quote: b'"',
25 }
26}
27
28pub fn parse_char_literal(src: &str) -> StringComponentIterator {
29 StringComponentIterator {
30 parser: Parser::new(src, b'\''),
31 has_closing_quote: false,
32 suffix: None,
33 prefix: None,
34 quote: b'\'',
35 }
36}
37
38pub fn parse_byte_literal(src: &str) -> StringComponentIterator {
39 StringComponentIterator {
40 parser: Parser::new(src, b'\''),
41 has_closing_quote: false,
42 suffix: None,
43 prefix: Some(b'b'),
44 quote: b'\'',
8 } 45 }
9} 46}
10 47
11pub struct StringComponentIterator<'a> { 48pub struct StringComponentIterator<'a> {
12 parser: Parser<'a>, 49 parser: Parser<'a>,
13 pub has_closing_quote: bool, 50 pub has_closing_quote: bool,
51 pub suffix: Option<TextRange>,
52 prefix: Option<u8>,
53 quote: u8,
14} 54}
15 55
16impl<'a> Iterator for StringComponentIterator<'a> { 56impl<'a> Iterator for StringComponentIterator<'a> {
17 type Item = StringComponent; 57 type Item = StringComponent;
18 fn next(&mut self) -> Option<StringComponent> { 58 fn next(&mut self) -> Option<StringComponent> {
19 if self.parser.pos == 0 { 59 if self.parser.pos == 0 {
60 if let Some(prefix) = self.prefix {
61 assert!(
62 self.parser.advance() == prefix as char,
63 "literal should start with a {:?}",
64 prefix as char,
65 );
66 }
20 assert!( 67 assert!(
21 self.parser.advance() == '"', 68 self.parser.advance() == self.quote as char,
22 "string literal should start with double quotes" 69 "literal should start with a {:?}",
70 self.quote as char,
23 ); 71 );
24 } 72 }
25 73
26 if let Some(component) = self.parser.parse_string_component() { 74 if let Some(component) = self.parser.parse_component() {
27 return Some(component); 75 return Some(component);
28 } 76 }
29 77
30 // We get here when there are no char components left to parse 78 // We get here when there are no char components left to parse
31 if self.parser.peek() == Some('"') { 79 if self.parser.peek() == Some(self.quote as char) {
32 self.parser.advance(); 80 self.parser.advance();
33 self.has_closing_quote = true; 81 self.has_closing_quote = true;
82 if let Some(range) = self.parser.parse_suffix() {
83 self.suffix = Some(range);
84 }
34 } 85 }
35 86
36 assert!( 87 assert!(
37 self.parser.peek() == None, 88 self.parser.peek() == None,
38 "string literal should leave no unparsed input: src = {:?}, pos = {}, length = {}", 89 "literal should leave no unparsed input: src = {:?}, pos = {}, length = {}",
39 self.parser.src, 90 self.parser.src,
40 self.parser.pos, 91 self.parser.pos,
41 self.parser.src.len() 92 self.parser.src.len()
@@ -44,3 +95,133 @@ impl<'a> Iterator for StringComponentIterator<'a> {
44 None 95 None
45 } 96 }
46} 97}
98
99#[cfg(test)]
100mod tests {
101 use rowan::TextRange;
102 use crate::string_lexing::{
103 StringComponent,
104 StringComponentKind::*,
105};
106
107 fn parse(src: &str) -> (bool, Vec<StringComponent>) {
108 let component_iterator = &mut super::parse_char_literal(src);
109 let components: Vec<_> = component_iterator.collect();
110 (component_iterator.has_closing_quote, components)
111 }
112
113 fn unclosed_char_component(src: &str) -> StringComponent {
114 let (has_closing_quote, components) = parse(src);
115 assert!(!has_closing_quote, "char should not have closing quote");
116 assert!(components.len() == 1);
117 components[0].clone()
118 }
119
120 fn closed_char_component(src: &str) -> StringComponent {
121 let (has_closing_quote, components) = parse(src);
122 assert!(has_closing_quote, "char should have closing quote");
123 assert!(
124 components.len() == 1,
125 "Literal: {}\nComponents: {:#?}",
126 src,
127 components
128 );
129 components[0].clone()
130 }
131
132 fn closed_char_components(src: &str) -> Vec<StringComponent> {
133 let (has_closing_quote, components) = parse(src);
134 assert!(has_closing_quote, "char should have closing quote");
135 components
136 }
137
138 fn range_closed(src: &str) -> TextRange {
139 TextRange::from_to(1.into(), (src.len() as u32 - 1).into())
140 }
141
142 fn range_unclosed(src: &str) -> TextRange {
143 TextRange::from_to(1.into(), (src.len() as u32).into())
144 }
145
146 #[test]
147 fn test_unicode_escapes() {
148 let unicode_escapes = &[r"{DEAD}", "{BEEF}", "{FF}", "{}", ""];
149 for escape in unicode_escapes {
150 let escape_sequence = format!(r"'\u{}'", escape);
151 let component = closed_char_component(&escape_sequence);
152 let expected_range = range_closed(&escape_sequence);
153 assert_eq!(component.kind, UnicodeEscape);
154 assert_eq!(component.range, expected_range);
155 }
156 }
157
158 #[test]
159 fn test_unicode_escapes_unclosed() {
160 let unicode_escapes = &["{DEAD", "{BEEF", "{FF"];
161 for escape in unicode_escapes {
162 let escape_sequence = format!(r"'\u{}'", escape);
163 let component = unclosed_char_component(&escape_sequence);
164 let expected_range = range_unclosed(&escape_sequence);
165 assert_eq!(component.kind, UnicodeEscape);
166 assert_eq!(component.range, expected_range);
167 }
168 }
169
170 #[test]
171 fn test_empty_char() {
172 let (has_closing_quote, components) = parse("''");
173 assert!(has_closing_quote, "char should have closing quote");
174 assert!(components.len() == 0);
175 }
176
177 #[test]
178 fn test_unclosed_char() {
179 let component = unclosed_char_component("'a");
180 assert!(component.kind == CodePoint);
181 assert!(component.range == TextRange::from_to(1.into(), 2.into()));
182 }
183
184 #[test]
185 fn test_digit_escapes() {
186 let literals = &[r"", r"5", r"55"];
187
188 for literal in literals {
189 let lit_text = format!(r"'\x{}'", literal);
190 let component = closed_char_component(&lit_text);
191 assert!(component.kind == AsciiCodeEscape);
192 assert!(component.range == range_closed(&lit_text));
193 }
194
195 // More than 2 digits starts a new codepoint
196 let components = closed_char_components(r"'\x555'");
197 assert!(components.len() == 2);
198 assert!(components[1].kind == CodePoint);
199 }
200
201 #[test]
202 fn test_ascii_escapes() {
203 let literals = &[
204 r"\'", "\\\"", // equivalent to \"
205 r"\n", r"\r", r"\t", r"\\", r"\0",
206 ];
207
208 for literal in literals {
209 let lit_text = format!("'{}'", literal);
210 let component = closed_char_component(&lit_text);
211 assert!(component.kind == AsciiEscape);
212 assert!(component.range == range_closed(&lit_text));
213 }
214 }
215
216 #[test]
217 fn test_no_escapes() {
218 let literals = &['"', 'n', 'r', 't', '0', 'x', 'u'];
219
220 for &literal in literals {
221 let lit_text = format!("'{}'", literal);
222 let component = closed_char_component(&lit_text);
223 assert!(component.kind == CodePoint);
224 assert!(component.range == range_closed(&lit_text));
225 }
226 }
227}
diff --git a/crates/ra_syntax/src/utils.rs b/crates/ra_syntax/src/utils.rs
index 5bbdf80bb..0a2b6afbc 100644
--- a/crates/ra_syntax/src/utils.rs
+++ b/crates/ra_syntax/src/utils.rs
@@ -1,10 +1,13 @@
1use crate::{SourceFileNode, SyntaxKind, SyntaxNodeRef, WalkEvent}; 1use crate::{SourceFileNode, SyntaxKind, SyntaxNodeRef, WalkEvent, AstNode};
2use std::fmt::Write; 2use std::fmt::Write;
3use std::str; 3use std::str;
4 4
5/// Parse a file and create a string representation of the resulting parse tree. 5/// Parse a file and create a string representation of the resulting parse tree.
6pub fn dump_tree(syntax: SyntaxNodeRef) -> String { 6pub fn dump_tree(syntax: SyntaxNodeRef) -> String {
7 let mut errors: Vec<_> = syntax.root_data().to_vec(); 7 let mut errors: Vec<_> = match syntax.ancestors().find_map(SourceFileNode::cast) {
8 Some(file) => file.owned().errors(),
9 None => syntax.root_data().to_vec(),
10 };
8 errors.sort_by_key(|e| e.offset()); 11 errors.sort_by_key(|e| e.offset());
9 let mut err_pos = 0; 12 let mut err_pos = 0;
10 let mut level = 0; 13 let mut level = 0;
diff --git a/crates/ra_syntax/src/validation/byte.rs b/crates/ra_syntax/src/validation/byte.rs
index 43c0d7edd..d0897eeed 100644
--- a/crates/ra_syntax/src/validation/byte.rs
+++ b/crates/ra_syntax/src/validation/byte.rs
@@ -2,7 +2,7 @@
2 2
3use crate::{ 3use crate::{
4 ast::{self, AstNode}, 4 ast::{self, AstNode},
5 string_lexing::{self, CharComponentKind}, 5 string_lexing::{self, StringComponentKind},
6 TextRange, 6 TextRange,
7 validation::char, 7 validation::char,
8 yellow::{ 8 yellow::{
@@ -27,6 +27,13 @@ pub(super) fn validate_byte_node(node: ast::Byte, errors: &mut Vec<SyntaxError>)
27 errors.push(SyntaxError::new(UnclosedByte, literal_range)); 27 errors.push(SyntaxError::new(UnclosedByte, literal_range));
28 } 28 }
29 29
30 if let Some(range) = components.suffix {
31 errors.push(SyntaxError::new(
32 InvalidSuffix,
33 range + literal_range.start(),
34 ));
35 }
36
30 if len == 0 { 37 if len == 0 {
31 errors.push(SyntaxError::new(EmptyByte, literal_range)); 38 errors.push(SyntaxError::new(EmptyByte, literal_range));
32 } 39 }
@@ -38,11 +45,11 @@ pub(super) fn validate_byte_node(node: ast::Byte, errors: &mut Vec<SyntaxError>)
38 45
39pub(super) fn validate_byte_component( 46pub(super) fn validate_byte_component(
40 text: &str, 47 text: &str,
41 kind: CharComponentKind, 48 kind: StringComponentKind,
42 range: TextRange, 49 range: TextRange,
43 errors: &mut Vec<SyntaxError>, 50 errors: &mut Vec<SyntaxError>,
44) { 51) {
45 use self::CharComponentKind::*; 52 use self::StringComponentKind::*;
46 match kind { 53 match kind {
47 AsciiEscape => validate_byte_escape(text, range, errors), 54 AsciiEscape => validate_byte_escape(text, range, errors),
48 AsciiCodeEscape => validate_byte_code_escape(text, range, errors), 55 AsciiCodeEscape => validate_byte_code_escape(text, range, errors),
@@ -63,6 +70,7 @@ pub(super) fn validate_byte_component(
63 errors.push(SyntaxError::new(ByteOutOfRange, range)); 70 errors.push(SyntaxError::new(ByteOutOfRange, range));
64 } 71 }
65 } 72 }
73 IgnoreNewline => { /* always valid */ }
66 } 74 }
67} 75}
68 76
diff --git a/crates/ra_syntax/src/validation/byte_string.rs b/crates/ra_syntax/src/validation/byte_string.rs
index 7b830e97c..f7a4fb156 100644
--- a/crates/ra_syntax/src/validation/byte_string.rs
+++ b/crates/ra_syntax/src/validation/byte_string.rs
@@ -17,21 +17,28 @@ pub(crate) fn validate_byte_string_node(node: ast::ByteString, errors: &mut Vec<
17 let range = component.range + literal_range.start(); 17 let range = component.range + literal_range.start();
18 18
19 match component.kind { 19 match component.kind {
20 StringComponentKind::Char(kind) => { 20 StringComponentKind::IgnoreNewline => { /* always valid */ }
21 _ => {
21 // Chars must escape \t, \n and \r codepoints, but strings don't 22 // Chars must escape \t, \n and \r codepoints, but strings don't
22 let text = &literal_text[component.range]; 23 let text = &literal_text[component.range];
23 match text { 24 match text {
24 "\t" | "\n" | "\r" => { /* always valid */ } 25 "\t" | "\n" | "\r" => { /* always valid */ }
25 _ => byte::validate_byte_component(text, kind, range, errors), 26 _ => byte::validate_byte_component(text, component.kind, range, errors),
26 } 27 }
27 } 28 }
28 StringComponentKind::IgnoreNewline => { /* always valid */ }
29 } 29 }
30 } 30 }
31 31
32 if !components.has_closing_quote { 32 if !components.has_closing_quote {
33 errors.push(SyntaxError::new(UnclosedString, literal_range)); 33 errors.push(SyntaxError::new(UnclosedString, literal_range));
34 } 34 }
35
36 if let Some(range) = components.suffix {
37 errors.push(SyntaxError::new(
38 InvalidSuffix,
39 range + literal_range.start(),
40 ));
41 }
35} 42}
36 43
37#[cfg(test)] 44#[cfg(test)]
diff --git a/crates/ra_syntax/src/validation/char.rs b/crates/ra_syntax/src/validation/char.rs
index 4728c85e6..19cd3830f 100644
--- a/crates/ra_syntax/src/validation/char.rs
+++ b/crates/ra_syntax/src/validation/char.rs
@@ -6,7 +6,7 @@ use arrayvec::ArrayString;
6 6
7use crate::{ 7use crate::{
8 ast::{self, AstNode}, 8 ast::{self, AstNode},
9 string_lexing::{self, CharComponentKind}, 9 string_lexing::{self, StringComponentKind},
10 TextRange, 10 TextRange,
11 yellow::{ 11 yellow::{
12 SyntaxError, 12 SyntaxError,
@@ -30,6 +30,13 @@ pub(super) fn validate_char_node(node: ast::Char, errors: &mut Vec<SyntaxError>)
30 errors.push(SyntaxError::new(UnclosedChar, literal_range)); 30 errors.push(SyntaxError::new(UnclosedChar, literal_range));
31 } 31 }
32 32
33 if let Some(range) = components.suffix {
34 errors.push(SyntaxError::new(
35 InvalidSuffix,
36 range + literal_range.start(),
37 ));
38 }
39
33 if len == 0 { 40 if len == 0 {
34 errors.push(SyntaxError::new(EmptyChar, literal_range)); 41 errors.push(SyntaxError::new(EmptyChar, literal_range));
35 } 42 }
@@ -41,12 +48,12 @@ pub(super) fn validate_char_node(node: ast::Char, errors: &mut Vec<SyntaxError>)
41 48
42pub(super) fn validate_char_component( 49pub(super) fn validate_char_component(
43 text: &str, 50 text: &str,
44 kind: CharComponentKind, 51 kind: StringComponentKind,
45 range: TextRange, 52 range: TextRange,
46 errors: &mut Vec<SyntaxError>, 53 errors: &mut Vec<SyntaxError>,
47) { 54) {
48 // Validate escapes 55 // Validate escapes
49 use self::CharComponentKind::*; 56 use self::StringComponentKind::*;
50 match kind { 57 match kind {
51 AsciiEscape => validate_ascii_escape(text, range, errors), 58 AsciiEscape => validate_ascii_escape(text, range, errors),
52 AsciiCodeEscape => validate_ascii_code_escape(text, range, errors), 59 AsciiCodeEscape => validate_ascii_code_escape(text, range, errors),
@@ -57,6 +64,7 @@ pub(super) fn validate_char_component(
57 errors.push(SyntaxError::new(UnescapedCodepoint, range)); 64 errors.push(SyntaxError::new(UnescapedCodepoint, range));
58 } 65 }
59 } 66 }
67 StringComponentKind::IgnoreNewline => { /* always valid */ }
60 } 68 }
61} 69}
62 70
diff --git a/crates/ra_syntax/src/validation/string.rs b/crates/ra_syntax/src/validation/string.rs
index 089879d15..1371bb1f0 100644
--- a/crates/ra_syntax/src/validation/string.rs
+++ b/crates/ra_syntax/src/validation/string.rs
@@ -1,6 +1,6 @@
1use crate::{ 1use crate::{
2 ast::{self, AstNode}, 2 ast::{self, AstNode},
3 string_lexing::{self, StringComponentKind}, 3 string_lexing,
4 yellow::{ 4 yellow::{
5 SyntaxError, 5 SyntaxError,
6 SyntaxErrorKind::*, 6 SyntaxErrorKind::*,
@@ -16,22 +16,24 @@ pub(crate) fn validate_string_node(node: ast::String, errors: &mut Vec<SyntaxErr
16 for component in &mut components { 16 for component in &mut components {
17 let range = component.range + literal_range.start(); 17 let range = component.range + literal_range.start();
18 18
19 match component.kind { 19 // Chars must escape \t, \n and \r codepoints, but strings don't
20 StringComponentKind::Char(kind) => { 20 let text = &literal_text[component.range];
21 // Chars must escape \t, \n and \r codepoints, but strings don't 21 match text {
22 let text = &literal_text[component.range]; 22 "\t" | "\n" | "\r" => { /* always valid */ }
23 match text { 23 _ => char::validate_char_component(text, component.kind, range, errors),
24 "\t" | "\n" | "\r" => { /* always valid */ }
25 _ => char::validate_char_component(text, kind, range, errors),
26 }
27 }
28 StringComponentKind::IgnoreNewline => { /* always valid */ }
29 } 24 }
30 } 25 }
31 26
32 if !components.has_closing_quote { 27 if !components.has_closing_quote {
33 errors.push(SyntaxError::new(UnclosedString, literal_range)); 28 errors.push(SyntaxError::new(UnclosedString, literal_range));
34 } 29 }
30
31 if let Some(range) = components.suffix {
32 errors.push(SyntaxError::new(
33 InvalidSuffix,
34 range + literal_range.start(),
35 ));
36 }
35} 37}
36 38
37#[cfg(test)] 39#[cfg(test)]
diff --git a/crates/ra_syntax/src/yellow/syntax_error.rs b/crates/ra_syntax/src/yellow/syntax_error.rs
index c32ee650d..534f3511e 100644
--- a/crates/ra_syntax/src/yellow/syntax_error.rs
+++ b/crates/ra_syntax/src/yellow/syntax_error.rs
@@ -93,6 +93,7 @@ pub enum SyntaxErrorKind {
93 OverlongUnicodeEscape, 93 OverlongUnicodeEscape,
94 UnicodeEscapeOutOfRange, 94 UnicodeEscapeOutOfRange,
95 UnclosedString, 95 UnclosedString,
96 InvalidSuffix,
96} 97}
97 98
98#[derive(Debug, Clone, PartialEq, Eq, Hash)] 99#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@@ -134,6 +135,7 @@ impl fmt::Display for SyntaxErrorKind {
134 } 135 }
135 UnicodeEscapeOutOfRange => write!(f, "Unicode escape code should be at most 0x10FFFF"), 136 UnicodeEscapeOutOfRange => write!(f, "Unicode escape code should be at most 0x10FFFF"),
136 UnclosedString => write!(f, "Unclosed string literal"), 137 UnclosedString => write!(f, "Unclosed string literal"),
138 InvalidSuffix => write!(f, "Invalid literal suffix"),
137 ParseError(msg) => write!(f, "{}", msg.0), 139 ParseError(msg) => write!(f, "{}", msg.0),
138 } 140 }
139 } 141 }
diff --git a/crates/ra_syntax/tests/data/parser/err/0029_field_completion.rs b/crates/ra_syntax/tests/data/parser/err/0029_field_completion.rs
new file mode 100644
index 000000000..a7cdc17bb
--- /dev/null
+++ b/crates/ra_syntax/tests/data/parser/err/0029_field_completion.rs
@@ -0,0 +1,3 @@
1fn foo(a: A) {
2 a.
3}
diff --git a/crates/ra_syntax/tests/data/parser/err/0029_field_completion.txt b/crates/ra_syntax/tests/data/parser/err/0029_field_completion.txt
new file mode 100644
index 000000000..fd2a3f37b
--- /dev/null
+++ b/crates/ra_syntax/tests/data/parser/err/0029_field_completion.txt
@@ -0,0 +1,35 @@
1SOURCE_FILE@[0; 24)
2 FN_DEF@[0; 23)
3 FN_KW@[0; 2)
4 WHITESPACE@[2; 3)
5 NAME@[3; 6)
6 IDENT@[3; 6) "foo"
7 PARAM_LIST@[6; 12)
8 L_PAREN@[6; 7)
9 PARAM@[7; 11)
10 BIND_PAT@[7; 8)
11 NAME@[7; 8)
12 IDENT@[7; 8) "a"
13 COLON@[8; 9)
14 WHITESPACE@[9; 10)
15 PATH_TYPE@[10; 11)
16 PATH@[10; 11)
17 PATH_SEGMENT@[10; 11)
18 NAME_REF@[10; 11)
19 IDENT@[10; 11) "A"
20 R_PAREN@[11; 12)
21 WHITESPACE@[12; 13)
22 BLOCK@[13; 23)
23 L_CURLY@[13; 14)
24 WHITESPACE@[14; 19)
25 FIELD_EXPR@[19; 21)
26 PATH_EXPR@[19; 20)
27 PATH@[19; 20)
28 PATH_SEGMENT@[19; 20)
29 NAME_REF@[19; 20)
30 IDENT@[19; 20) "a"
31 DOT@[20; 21)
32 err: `expected field name or number`
33 WHITESPACE@[21; 22)
34 R_CURLY@[22; 23)
35 WHITESPACE@[23; 24)
diff --git a/crates/ra_syntax/tests/data/parser/err/0030_string_suffixes.rs b/crates/ra_syntax/tests/data/parser/err/0030_string_suffixes.rs
new file mode 100644
index 000000000..261aad1fb
--- /dev/null
+++ b/crates/ra_syntax/tests/data/parser/err/0030_string_suffixes.rs
@@ -0,0 +1,6 @@
1fn main() {
2 let _ = 'c'u32;
3 let _ = "string"invalid;
4 let _ = b'b'_suff;
5 let _ = b"bs"invalid;
6}
diff --git a/crates/ra_syntax/tests/data/parser/err/0030_string_suffixes.txt b/crates/ra_syntax/tests/data/parser/err/0030_string_suffixes.txt
new file mode 100644
index 000000000..dc10e9ee7
--- /dev/null
+++ b/crates/ra_syntax/tests/data/parser/err/0030_string_suffixes.txt
@@ -0,0 +1,73 @@
1SOURCE_FILE@[0; 112)
2 FN_DEF@[0; 111)
3 FN_KW@[0; 2)
4 WHITESPACE@[2; 3)
5 NAME@[3; 7)
6 IDENT@[3; 7) "main"
7 PARAM_LIST@[7; 9)
8 L_PAREN@[7; 8)
9 R_PAREN@[8; 9)
10 WHITESPACE@[9; 10)
11 BLOCK@[10; 111)
12 L_CURLY@[10; 11)
13 WHITESPACE@[11; 16)
14 LET_STMT@[16; 27)
15 LET_KW@[16; 19)
16 WHITESPACE@[19; 20)
17 PLACEHOLDER_PAT@[20; 21)
18 UNDERSCORE@[20; 21)
19 WHITESPACE@[21; 22)
20 EQ@[22; 23)
21 WHITESPACE@[23; 24)
22 LITERAL@[24; 27)
23 CHAR@[24; 27)
24 err: `expected SEMI`
25 EXPR_STMT@[27; 31)
26 PATH_EXPR@[27; 30)
27 PATH@[27; 30)
28 PATH_SEGMENT@[27; 30)
29 NAME_REF@[27; 30)
30 IDENT@[27; 30) "u32"
31 SEMI@[30; 31)
32 WHITESPACE@[31; 36)
33 LET_STMT@[36; 60)
34 LET_KW@[36; 39)
35 WHITESPACE@[39; 40)
36 PLACEHOLDER_PAT@[40; 41)
37 UNDERSCORE@[40; 41)
38 WHITESPACE@[41; 42)
39 EQ@[42; 43)
40 WHITESPACE@[43; 44)
41 LITERAL@[44; 59)
42 STRING@[44; 59)
43 err: `Invalid literal suffix`
44 SEMI@[59; 60)
45 WHITESPACE@[60; 65)
46 LET_STMT@[65; 83)
47 LET_KW@[65; 68)
48 WHITESPACE@[68; 69)
49 PLACEHOLDER_PAT@[69; 70)
50 UNDERSCORE@[69; 70)
51 WHITESPACE@[70; 71)
52 EQ@[71; 72)
53 WHITESPACE@[72; 73)
54 LITERAL@[73; 82)
55 BYTE@[73; 82)
56 err: `Invalid literal suffix`
57 SEMI@[82; 83)
58 WHITESPACE@[83; 88)
59 LET_STMT@[88; 109)
60 LET_KW@[88; 91)
61 WHITESPACE@[91; 92)
62 PLACEHOLDER_PAT@[92; 93)
63 UNDERSCORE@[92; 93)
64 WHITESPACE@[93; 94)
65 EQ@[94; 95)
66 WHITESPACE@[95; 96)
67 LITERAL@[96; 108)
68 BYTE_STRING@[96; 108)
69 err: `Invalid literal suffix`
70 SEMI@[108; 109)
71 WHITESPACE@[109; 110)
72 R_CURLY@[110; 111)
73 WHITESPACE@[111; 112)
diff --git a/crates/tools/src/lib.rs b/crates/tools/src/lib.rs
index 2795afe0b..6f96b8120 100644
--- a/crates/tools/src/lib.rs
+++ b/crates/tools/src/lib.rs
@@ -29,7 +29,7 @@ pub fn collect_tests(s: &str) -> Vec<(usize, Test)> {
29 let prefix = "// "; 29 let prefix = "// ";
30 let comment_blocks = s 30 let comment_blocks = s
31 .lines() 31 .lines()
32 .map(str::trim_left) 32 .map(str::trim_start)
33 .enumerate() 33 .enumerate()
34 .group_by(|(_idx, line)| line.starts_with(prefix)); 34 .group_by(|(_idx, line)| line.starts_with(prefix));
35 35