aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock16
-rw-r--r--crates/ra_assists/src/handlers/add_from_impl_for_enum.rs2
-rw-r--r--crates/ra_assists/src/handlers/introduce_variable.rs2
-rw-r--r--crates/ra_assists/src/handlers/merge_imports.rs2
-rw-r--r--crates/ra_assists/src/handlers/split_import.rs7
-rw-r--r--crates/ra_db/src/fixture.rs2
-rw-r--r--crates/ra_db/src/input.rs2
-rw-r--r--crates/ra_hir/src/semantics.rs4
-rw-r--r--crates/ra_hir/src/source_analyzer.rs11
-rw-r--r--crates/ra_hir_def/src/body/lower.rs8
-rw-r--r--crates/ra_hir_def/src/body/scope.rs4
-rw-r--r--crates/ra_hir_expand/src/ast_id_map.rs2
-rw-r--r--crates/ra_hir_expand/src/builtin_macro.rs8
-rw-r--r--crates/ra_hir_ty/Cargo.toml6
-rw-r--r--crates/ra_hir_ty/src/db.rs7
-rw-r--r--crates/ra_hir_ty/src/infer.rs4
-rw-r--r--crates/ra_hir_ty/src/infer/pat.rs6
-rw-r--r--crates/ra_hir_ty/src/tests/patterns.rs26
-rw-r--r--crates/ra_hir_ty/src/traits.rs2
-rw-r--r--crates/ra_hir_ty/src/traits/chalk.rs145
-rw-r--r--crates/ra_hir_ty/src/traits/chalk/tls.rs33
-rw-r--r--crates/ra_ide/src/extend_selection.rs2
-rw-r--r--crates/ra_ide/src/goto_definition.rs28
-rw-r--r--crates/ra_ide/src/snapshots/highlighting.html6
-rw-r--r--crates/ra_ide/src/snapshots/rainbow_highlighting.html6
-rw-r--r--crates/ra_ide/src/syntax_highlighting.rs29
-rw-r--r--crates/ra_ide/src/syntax_highlighting/tags.rs2
-rw-r--r--crates/ra_ide_db/src/defs.rs9
-rw-r--r--crates/ra_ide_db/src/marks.rs1
-rw-r--r--crates/ra_mbe/src/mbe_expander/matcher.rs6
-rw-r--r--crates/ra_mbe/src/subtree_source.rs15
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs2
-rw-r--r--crates/ra_mbe/src/tests.rs30
-rw-r--r--crates/ra_mbe/src/tt_iter.rs8
-rw-r--r--crates/ra_proc_macro/src/lib.rs17
-rw-r--r--crates/ra_proc_macro/src/msg.rs37
-rw-r--r--crates/ra_proc_macro/src/process.rs118
-rw-r--r--crates/ra_proc_macro/src/rpc.rs18
-rw-r--r--crates/ra_proc_macro_srv/src/cli.rs58
-rw-r--r--crates/ra_proc_macro_srv/src/dylib.rs103
-rw-r--r--crates/ra_proc_macro_srv/src/lib.rs36
-rw-r--r--crates/ra_proc_macro_srv/src/rustc_server.rs2
-rw-r--r--crates/ra_proc_macro_srv/src/tests/utils.rs2
-rw-r--r--crates/ra_project_model/src/cargo_workspace.rs3
-rw-r--r--crates/ra_syntax/src/algo.rs8
-rw-r--r--crates/ra_syntax/src/ast/edit.rs6
-rw-r--r--crates/rust-analyzer/src/bin/args.rs16
-rw-r--r--crates/rust-analyzer/src/bin/main.rs4
-rw-r--r--crates/rust-analyzer/src/cli/load_cargo.rs2
-rw-r--r--crates/rust-analyzer/src/conv.rs5
-rw-r--r--crates/rust-analyzer/src/semantic_tokens.rs3
-rw-r--r--crates/rust-analyzer/src/world.rs25
-rw-r--r--docs/user/readme.adoc43
-rw-r--r--editors/code/package.json4
54 files changed, 595 insertions, 358 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 3826ae1c6..37455bc57 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -114,7 +114,7 @@ checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822"
114[[package]] 114[[package]]
115name = "chalk-derive" 115name = "chalk-derive"
116version = "0.1.0" 116version = "0.1.0"
117source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" 117source = "git+https://github.com/rust-lang/chalk.git?rev=2c072cc830d04af5f10b390e6643327f85108282#2c072cc830d04af5f10b390e6643327f85108282"
118dependencies = [ 118dependencies = [
119 "proc-macro2", 119 "proc-macro2",
120 "quote", 120 "quote",
@@ -125,7 +125,7 @@ dependencies = [
125[[package]] 125[[package]]
126name = "chalk-engine" 126name = "chalk-engine"
127version = "0.9.0" 127version = "0.9.0"
128source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" 128source = "git+https://github.com/rust-lang/chalk.git?rev=2c072cc830d04af5f10b390e6643327f85108282#2c072cc830d04af5f10b390e6643327f85108282"
129dependencies = [ 129dependencies = [
130 "chalk-macros", 130 "chalk-macros",
131 "rustc-hash", 131 "rustc-hash",
@@ -134,7 +134,7 @@ dependencies = [
134[[package]] 134[[package]]
135name = "chalk-ir" 135name = "chalk-ir"
136version = "0.1.0" 136version = "0.1.0"
137source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" 137source = "git+https://github.com/rust-lang/chalk.git?rev=2c072cc830d04af5f10b390e6643327f85108282#2c072cc830d04af5f10b390e6643327f85108282"
138dependencies = [ 138dependencies = [
139 "chalk-derive", 139 "chalk-derive",
140 "chalk-engine", 140 "chalk-engine",
@@ -144,7 +144,7 @@ dependencies = [
144[[package]] 144[[package]]
145name = "chalk-macros" 145name = "chalk-macros"
146version = "0.1.1" 146version = "0.1.1"
147source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" 147source = "git+https://github.com/rust-lang/chalk.git?rev=2c072cc830d04af5f10b390e6643327f85108282#2c072cc830d04af5f10b390e6643327f85108282"
148dependencies = [ 148dependencies = [
149 "lazy_static", 149 "lazy_static",
150] 150]
@@ -152,7 +152,7 @@ dependencies = [
152[[package]] 152[[package]]
153name = "chalk-rust-ir" 153name = "chalk-rust-ir"
154version = "0.1.0" 154version = "0.1.0"
155source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" 155source = "git+https://github.com/rust-lang/chalk.git?rev=2c072cc830d04af5f10b390e6643327f85108282#2c072cc830d04af5f10b390e6643327f85108282"
156dependencies = [ 156dependencies = [
157 "chalk-derive", 157 "chalk-derive",
158 "chalk-engine", 158 "chalk-engine",
@@ -163,7 +163,7 @@ dependencies = [
163[[package]] 163[[package]]
164name = "chalk-solve" 164name = "chalk-solve"
165version = "0.1.0" 165version = "0.1.0"
166source = "git+https://github.com/rust-lang/chalk.git?rev=28cef6ff403d403e6ad2f3d27d944e9ffac1bce8#28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" 166source = "git+https://github.com/rust-lang/chalk.git?rev=2c072cc830d04af5f10b390e6643327f85108282#2c072cc830d04af5f10b390e6643327f85108282"
167dependencies = [ 167dependencies = [
168 "chalk-derive", 168 "chalk-derive",
169 "chalk-engine", 169 "chalk-engine",
@@ -1288,9 +1288,9 @@ checksum = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84"
1288 1288
1289[[package]] 1289[[package]]
1290name = "regex" 1290name = "regex"
1291version = "1.3.6" 1291version = "1.3.7"
1292source = "registry+https://github.com/rust-lang/crates.io-index" 1292source = "registry+https://github.com/rust-lang/crates.io-index"
1293checksum = "7f6946991529684867e47d86474e3a6d0c0ab9b82d5821e314b1ede31fa3a4b3" 1293checksum = "a6020f034922e3194c711b82a627453881bc4682166cabb07134a10c26ba7692"
1294dependencies = [ 1294dependencies = [
1295 "aho-corasick", 1295 "aho-corasick",
1296 "memchr", 1296 "memchr",
diff --git a/crates/ra_assists/src/handlers/add_from_impl_for_enum.rs b/crates/ra_assists/src/handlers/add_from_impl_for_enum.rs
index 864373aa5..0621487e8 100644
--- a/crates/ra_assists/src/handlers/add_from_impl_for_enum.rs
+++ b/crates/ra_assists/src/handlers/add_from_impl_for_enum.rs
@@ -98,7 +98,7 @@ fn already_has_from_impl(
98 }; 98 };
99 let var_ty = hir_enum_var.fields(sema.db)[0].signature_ty(sema.db); 99 let var_ty = hir_enum_var.fields(sema.db)[0].signature_ty(sema.db);
100 100
101 e_ty.impls_trait(sema.db, from_trait, &[var_ty.clone()]) 101 e_ty.impls_trait(sema.db, from_trait, &[var_ty])
102} 102}
103 103
104#[cfg(test)] 104#[cfg(test)]
diff --git a/crates/ra_assists/src/handlers/introduce_variable.rs b/crates/ra_assists/src/handlers/introduce_variable.rs
index 8d0f7e922..8c09e6bcd 100644
--- a/crates/ra_assists/src/handlers/introduce_variable.rs
+++ b/crates/ra_assists/src/handlers/introduce_variable.rs
@@ -124,7 +124,7 @@ fn anchor_stmt(expr: ast::Expr) -> Option<(SyntaxNode, bool)> {
124 } 124 }
125 } 125 }
126 126
127 if ast::Stmt::cast(node.clone().into()).is_some() { 127 if ast::Stmt::cast(node.clone()).is_some() {
128 return Some((node, false)); 128 return Some((node, false));
129 } 129 }
130 130
diff --git a/crates/ra_assists/src/handlers/merge_imports.rs b/crates/ra_assists/src/handlers/merge_imports.rs
index ef0ce0586..4be1238f1 100644
--- a/crates/ra_assists/src/handlers/merge_imports.rs
+++ b/crates/ra_assists/src/handlers/merge_imports.rs
@@ -30,7 +30,7 @@ pub(crate) fn merge_imports(ctx: AssistCtx) -> Option<Assist> {
30 .filter_map(|dir| neighbor(&use_item, dir)) 30 .filter_map(|dir| neighbor(&use_item, dir))
31 .filter_map(|it| Some((it.clone(), it.use_tree()?))) 31 .filter_map(|it| Some((it.clone(), it.use_tree()?)))
32 .find_map(|(use_item, use_tree)| { 32 .find_map(|(use_item, use_tree)| {
33 Some((try_merge_trees(&tree, &use_tree)?, use_item.clone())) 33 Some((try_merge_trees(&tree, &use_tree)?, use_item))
34 })?; 34 })?;
35 35
36 rewriter.replace_ast(&tree, &merged); 36 rewriter.replace_ast(&tree, &merged);
diff --git a/crates/ra_assists/src/handlers/split_import.rs b/crates/ra_assists/src/handlers/split_import.rs
index d9244f22d..f25826796 100644
--- a/crates/ra_assists/src/handlers/split_import.rs
+++ b/crates/ra_assists/src/handlers/split_import.rs
@@ -37,7 +37,7 @@ pub(crate) fn split_import(ctx: AssistCtx) -> Option<Assist> {
37 37
38#[cfg(test)] 38#[cfg(test)]
39mod tests { 39mod tests {
40 use crate::helpers::{check_assist, check_assist_target}; 40 use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target};
41 41
42 use super::*; 42 use super::*;
43 43
@@ -63,4 +63,9 @@ mod tests {
63 fn split_import_target() { 63 fn split_import_target() {
64 check_assist_target(split_import, "use crate::<|>db::{RootDatabase, FileSymbol}", "::"); 64 check_assist_target(split_import, "use crate::<|>db::{RootDatabase, FileSymbol}", "::");
65 } 65 }
66
67 #[test]
68 fn issue4044() {
69 check_assist_not_applicable(split_import, "use crate::<|>:::self;")
70 }
66} 71}
diff --git a/crates/ra_db/src/fixture.rs b/crates/ra_db/src/fixture.rs
index 7777ce81e..8248684ee 100644
--- a/crates/ra_db/src/fixture.rs
+++ b/crates/ra_db/src/fixture.rs
@@ -235,7 +235,7 @@ fn parse_meta(meta: &str) -> ParsedMeta {
235 "env" => { 235 "env" => {
236 for key in value.split(',') { 236 for key in value.split(',') {
237 if let Some((k, v)) = split1(key, '=') { 237 if let Some((k, v)) = split1(key, '=') {
238 env.set(k.into(), v.into()); 238 env.set(k, v.into());
239 } 239 }
240 } 240 }
241 } 241 }
diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs
index 5ddce98c6..ab14e2d5e 100644
--- a/crates/ra_db/src/input.rs
+++ b/crates/ra_db/src/input.rs
@@ -327,7 +327,7 @@ impl ExternSource {
327 self.extern_paths.iter().find_map(|(root_path, id)| { 327 self.extern_paths.iter().find_map(|(root_path, id)| {
328 if let Ok(rel_path) = path.strip_prefix(root_path) { 328 if let Ok(rel_path) = path.strip_prefix(root_path) {
329 let rel_path = RelativePathBuf::from_path(rel_path).ok()?; 329 let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
330 Some((id.clone(), rel_path)) 330 Some((*id, rel_path))
331 } else { 331 } else {
332 None 332 None
333 } 333 }
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs
index 0b477f0e9..5d6edc45c 100644
--- a/crates/ra_hir/src/semantics.rs
+++ b/crates/ra_hir/src/semantics.rs
@@ -195,6 +195,10 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
195 self.analyze(field.syntax()).resolve_record_field(self.db, field) 195 self.analyze(field.syntax()).resolve_record_field(self.db, field)
196 } 196 }
197 197
198 pub fn resolve_record_field_pat(&self, field: &ast::RecordFieldPat) -> Option<StructField> {
199 self.analyze(field.syntax()).resolve_record_field_pat(self.db, field)
200 }
201
198 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> { 202 pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<MacroDef> {
199 let sa = self.analyze(macro_call.syntax()); 203 let sa = self.analyze(macro_call.syntax());
200 let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call); 204 let macro_call = self.find_file(macro_call.syntax().clone()).with_value(macro_call);
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs
index 23af400b8..0ed6d0958 100644
--- a/crates/ra_hir/src/source_analyzer.rs
+++ b/crates/ra_hir/src/source_analyzer.rs
@@ -95,6 +95,7 @@ impl SourceAnalyzer {
95 } 95 }
96 96
97 fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> { 97 fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
98 // FIXME: macros, see `expr_id`
98 let src = InFile { file_id: self.file_id, value: pat }; 99 let src = InFile { file_id: self.file_id, value: pat };
99 self.body_source_map.as_ref()?.node_pat(src) 100 self.body_source_map.as_ref()?.node_pat(src)
100 } 101 }
@@ -167,6 +168,16 @@ impl SourceAnalyzer {
167 Some((struct_field.into(), local)) 168 Some((struct_field.into(), local))
168 } 169 }
169 170
171 pub(crate) fn resolve_record_field_pat(
172 &self,
173 _db: &dyn HirDatabase,
174 field: &ast::RecordFieldPat,
175 ) -> Option<StructField> {
176 let pat_id = self.pat_id(&field.pat()?)?;
177 let struct_field = self.infer.as_ref()?.record_field_pat_resolution(pat_id)?;
178 Some(struct_field.into())
179 }
180
170 pub(crate) fn resolve_macro_call( 181 pub(crate) fn resolve_macro_call(
171 &self, 182 &self,
172 db: &dyn HirDatabase, 183 db: &dyn HirDatabase,
diff --git a/crates/ra_hir_def/src/body/lower.rs b/crates/ra_hir_def/src/body/lower.rs
index 82a52804d..0caedd8d8 100644
--- a/crates/ra_hir_def/src/body/lower.rs
+++ b/crates/ra_hir_def/src/body/lower.rs
@@ -473,16 +473,14 @@ impl ExprCollector<'_> {
473 self.collect_block_items(&block); 473 self.collect_block_items(&block);
474 let statements = block 474 let statements = block
475 .statements() 475 .statements()
476 .filter_map(|s| match s { 476 .map(|s| match s {
477 ast::Stmt::LetStmt(stmt) => { 477 ast::Stmt::LetStmt(stmt) => {
478 let pat = self.collect_pat_opt(stmt.pat()); 478 let pat = self.collect_pat_opt(stmt.pat());
479 let type_ref = stmt.ascribed_type().map(TypeRef::from_ast); 479 let type_ref = stmt.ascribed_type().map(TypeRef::from_ast);
480 let initializer = stmt.initializer().map(|e| self.collect_expr(e)); 480 let initializer = stmt.initializer().map(|e| self.collect_expr(e));
481 Some(Statement::Let { pat, type_ref, initializer }) 481 Statement::Let { pat, type_ref, initializer }
482 }
483 ast::Stmt::ExprStmt(stmt) => {
484 Some(Statement::Expr(self.collect_expr_opt(stmt.expr())))
485 } 482 }
483 ast::Stmt::ExprStmt(stmt) => Statement::Expr(self.collect_expr_opt(stmt.expr())),
486 }) 484 })
487 .collect(); 485 .collect();
488 let tail = block.expr().map(|e| self.collect_expr(e)); 486 let tail = block.expr().map(|e| self.collect_expr(e));
diff --git a/crates/ra_hir_def/src/body/scope.rs b/crates/ra_hir_def/src/body/scope.rs
index 4d489f692..fe4137176 100644
--- a/crates/ra_hir_def/src/body/scope.rs
+++ b/crates/ra_hir_def/src/body/scope.rs
@@ -157,6 +157,10 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
157 for arm in arms { 157 for arm in arms {
158 let scope = scopes.new_scope(scope); 158 let scope = scopes.new_scope(scope);
159 scopes.add_bindings(body, scope, arm.pat); 159 scopes.add_bindings(body, scope, arm.pat);
160 if let Some(guard) = arm.guard {
161 scopes.set_scope(guard, scope);
162 compute_expr_scopes(guard, body, scopes, scope);
163 }
160 scopes.set_scope(arm.expr, scope); 164 scopes.set_scope(arm.expr, scope);
161 compute_expr_scopes(arm.expr, body, scopes, scope); 165 compute_expr_scopes(arm.expr, body, scopes, scope);
162 } 166 }
diff --git a/crates/ra_hir_expand/src/ast_id_map.rs b/crates/ra_hir_expand/src/ast_id_map.rs
index a3ca302c2..d19569245 100644
--- a/crates/ra_hir_expand/src/ast_id_map.rs
+++ b/crates/ra_hir_expand/src/ast_id_map.rs
@@ -66,7 +66,7 @@ impl AstIdMap {
66 // change parent's id. This means that, say, adding a new function to a 66 // change parent's id. This means that, say, adding a new function to a
67 // trait does not change ids of top-level items, which helps caching. 67 // trait does not change ids of top-level items, which helps caching.
68 bfs(node, |it| { 68 bfs(node, |it| {
69 if let Some(module_item) = ast::ModuleItem::cast(it.clone()) { 69 if let Some(module_item) = ast::ModuleItem::cast(it) {
70 res.alloc(module_item.syntax()); 70 res.alloc(module_item.syntax());
71 } 71 }
72 }); 72 });
diff --git a/crates/ra_hir_expand/src/builtin_macro.rs b/crates/ra_hir_expand/src/builtin_macro.rs
index f9d3787f6..3da137f2e 100644
--- a/crates/ra_hir_expand/src/builtin_macro.rs
+++ b/crates/ra_hir_expand/src/builtin_macro.rs
@@ -301,7 +301,7 @@ fn relative_file(db: &dyn AstDatabase, call_id: MacroCallId, path: &str) -> Opti
301 } 301 }
302 302
303 // Extern paths ? 303 // Extern paths ?
304 let krate = db.relevant_crates(call_site).get(0)?.clone(); 304 let krate = *db.relevant_crates(call_site).get(0)?;
305 let (extern_source_id, relative_file) = 305 let (extern_source_id, relative_file) =
306 db.crate_graph()[krate].extern_source.extern_path(path)?; 306 db.crate_graph()[krate].extern_source.extern_path(path)?;
307 307
@@ -329,7 +329,7 @@ fn include_expand(
329 329
330 // FIXME: 330 // FIXME:
331 // Handle include as expression 331 // Handle include as expression
332 let res = parse_to_token_tree(&db.file_text(file_id.into())) 332 let res = parse_to_token_tree(&db.file_text(file_id))
333 .ok_or_else(|| mbe::ExpandError::ConversionError)? 333 .ok_or_else(|| mbe::ExpandError::ConversionError)?
334 .0; 334 .0;
335 335
@@ -340,7 +340,7 @@ fn get_env_inner(db: &dyn AstDatabase, arg_id: EagerMacroId, key: &str) -> Optio
340 let call_id: MacroCallId = arg_id.into(); 340 let call_id: MacroCallId = arg_id.into();
341 let original_file = call_id.as_file().original_file(db); 341 let original_file = call_id.as_file().original_file(db);
342 342
343 let krate = db.relevant_crates(original_file).get(0)?.clone(); 343 let krate = *db.relevant_crates(original_file).get(0)?;
344 db.crate_graph()[krate].env.get(key) 344 db.crate_graph()[krate].env.get(key)
345} 345}
346 346
@@ -447,7 +447,7 @@ mod tests {
447 file_id: file_id.into(), 447 file_id: file_id.into(),
448 }; 448 };
449 449
450 let id: MacroCallId = db.intern_eager_expansion(eager.into()).into(); 450 let id: MacroCallId = db.intern_eager_expansion(eager).into();
451 id.as_file() 451 id.as_file()
452 } 452 }
453 }; 453 };
diff --git a/crates/ra_hir_ty/Cargo.toml b/crates/ra_hir_ty/Cargo.toml
index 177bdbcb0..04d3cd6a2 100644
--- a/crates/ra_hir_ty/Cargo.toml
+++ b/crates/ra_hir_ty/Cargo.toml
@@ -27,9 +27,9 @@ test_utils = { path = "../test_utils" }
27 27
28scoped-tls = "1" 28scoped-tls = "1"
29 29
30chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" } 30chalk-solve = { git = "https://github.com/rust-lang/chalk.git", rev = "2c072cc830d04af5f10b390e6643327f85108282" }
31chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" } 31chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "2c072cc830d04af5f10b390e6643327f85108282" }
32chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "28cef6ff403d403e6ad2f3d27d944e9ffac1bce8" } 32chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "2c072cc830d04af5f10b390e6643327f85108282" }
33 33
34[dev-dependencies] 34[dev-dependencies]
35insta = "0.16.0" 35insta = "0.16.0"
diff --git a/crates/ra_hir_ty/src/db.rs b/crates/ra_hir_ty/src/db.rs
index 33da16b48..9e5dfeab3 100644
--- a/crates/ra_hir_ty/src/db.rs
+++ b/crates/ra_hir_ty/src/db.rs
@@ -107,6 +107,13 @@ pub trait HirDatabase: DefDatabase + Upcast<dyn DefDatabase> {
107 krate: CrateId, 107 krate: CrateId,
108 goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>, 108 goal: crate::Canonical<crate::InEnvironment<crate::Obligation>>,
109 ) -> Option<crate::traits::Solution>; 109 ) -> Option<crate::traits::Solution>;
110
111 #[salsa::invoke(crate::traits::chalk::program_clauses_for_chalk_env_query)]
112 fn program_clauses_for_chalk_env(
113 &self,
114 krate: CrateId,
115 env: chalk_ir::Environment<chalk::Interner>,
116 ) -> chalk_ir::ProgramClauses<chalk::Interner>;
110} 117}
111 118
112fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { 119fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
diff --git a/crates/ra_hir_ty/src/infer.rs b/crates/ra_hir_ty/src/infer.rs
index b6d9b3438..dfb6a435f 100644
--- a/crates/ra_hir_ty/src/infer.rs
+++ b/crates/ra_hir_ty/src/infer.rs
@@ -127,6 +127,7 @@ pub struct InferenceResult {
127 field_resolutions: FxHashMap<ExprId, StructFieldId>, 127 field_resolutions: FxHashMap<ExprId, StructFieldId>,
128 /// For each field in record literal, records the field it resolves to. 128 /// For each field in record literal, records the field it resolves to.
129 record_field_resolutions: FxHashMap<ExprId, StructFieldId>, 129 record_field_resolutions: FxHashMap<ExprId, StructFieldId>,
130 record_field_pat_resolutions: FxHashMap<PatId, StructFieldId>,
130 /// For each struct literal, records the variant it resolves to. 131 /// For each struct literal, records the variant it resolves to.
131 variant_resolutions: FxHashMap<ExprOrPatId, VariantId>, 132 variant_resolutions: FxHashMap<ExprOrPatId, VariantId>,
132 /// For each associated item record what it resolves to 133 /// For each associated item record what it resolves to
@@ -147,6 +148,9 @@ impl InferenceResult {
147 pub fn record_field_resolution(&self, expr: ExprId) -> Option<StructFieldId> { 148 pub fn record_field_resolution(&self, expr: ExprId) -> Option<StructFieldId> {
148 self.record_field_resolutions.get(&expr).copied() 149 self.record_field_resolutions.get(&expr).copied()
149 } 150 }
151 pub fn record_field_pat_resolution(&self, pat: PatId) -> Option<StructFieldId> {
152 self.record_field_pat_resolutions.get(&pat).copied()
153 }
150 pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> { 154 pub fn variant_resolution_for_expr(&self, id: ExprId) -> Option<VariantId> {
151 self.variant_resolutions.get(&id.into()).copied() 155 self.variant_resolutions.get(&id.into()).copied()
152 } 156 }
diff --git a/crates/ra_hir_ty/src/infer/pat.rs b/crates/ra_hir_ty/src/infer/pat.rs
index 8ec4d4ace..7c2ad4384 100644
--- a/crates/ra_hir_ty/src/infer/pat.rs
+++ b/crates/ra_hir_ty/src/infer/pat.rs
@@ -7,6 +7,7 @@ use hir_def::{
7 expr::{BindingAnnotation, Pat, PatId, RecordFieldPat}, 7 expr::{BindingAnnotation, Pat, PatId, RecordFieldPat},
8 path::Path, 8 path::Path,
9 type_ref::Mutability, 9 type_ref::Mutability,
10 StructFieldId,
10}; 11};
11use hir_expand::name::Name; 12use hir_expand::name::Name;
12use test_utils::tested_by; 13use test_utils::tested_by;
@@ -67,6 +68,11 @@ impl<'a> InferenceContext<'a> {
67 let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); 68 let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default();
68 for subpat in subpats { 69 for subpat in subpats {
69 let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name)); 70 let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name));
71 if let Some(local_id) = matching_field {
72 let field_def = StructFieldId { parent: def.unwrap(), local_id };
73 self.result.record_field_pat_resolutions.insert(subpat.pat, field_def);
74 }
75
70 let expected_ty = 76 let expected_ty =
71 matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs)); 77 matching_field.map_or(Ty::Unknown, |field| field_tys[field].clone().subst(&substs));
72 let expected_ty = self.normalize_associated_types_in(expected_ty); 78 let expected_ty = self.normalize_associated_types_in(expected_ty);
diff --git a/crates/ra_hir_ty/src/tests/patterns.rs b/crates/ra_hir_ty/src/tests/patterns.rs
index 07cbc521a..6ea51d5d3 100644
--- a/crates/ra_hir_ty/src/tests/patterns.rs
+++ b/crates/ra_hir_ty/src/tests/patterns.rs
@@ -455,3 +455,29 @@ fn test() {
455 "### 455 "###
456 ); 456 );
457} 457}
458
459#[test]
460fn infer_guard() {
461 assert_snapshot!(
462 infer(r#"
463struct S;
464impl S { fn foo(&self) -> bool { false } }
465
466fn main() {
467 match S {
468 s if s.foo() => (),
469 }
470}
471 "#), @"
472 [28; 32) 'self': &S
473 [42; 51) '{ false }': bool
474 [44; 49) 'false': bool
475 [65; 116) '{ ... } }': ()
476 [71; 114) 'match ... }': ()
477 [77; 78) 'S': S
478 [89; 90) 's': S
479 [94; 95) 's': S
480 [94; 101) 's.foo()': bool
481 [105; 107) '()': ()
482 ")
483}
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs
index 05791a848..6bc6d474c 100644
--- a/crates/ra_hir_ty/src/traits.rs
+++ b/crates/ra_hir_ty/src/traits.rs
@@ -225,7 +225,7 @@ fn solution_from_chalk(
225 None => unimplemented!(), 225 None => unimplemented!(),
226 }) 226 })
227 .collect(); 227 .collect();
228 let result = Canonical { value, num_vars: subst.binders.len() }; 228 let result = Canonical { value, num_vars: subst.binders.len(&Interner) };
229 SolutionVariables(result) 229 SolutionVariables(result)
230 }; 230 };
231 match solution { 231 match solution {
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs
index e00a82db2..1ccb7c3b4 100644
--- a/crates/ra_hir_ty/src/traits/chalk.rs
+++ b/crates/ra_hir_ty/src/traits/chalk.rs
@@ -4,8 +4,8 @@ use std::{fmt, sync::Arc};
4use log::debug; 4use log::debug;
5 5
6use chalk_ir::{ 6use chalk_ir::{
7 cast::Cast, fold::shift::Shift, Goal, GoalData, Parameter, PlaceholderIndex, TypeName, 7 cast::Cast, fold::shift::Shift, interner::HasInterner, Goal, GoalData, Parameter,
8 UniverseIndex, 8 PlaceholderIndex, TypeName, UniverseIndex,
9}; 9};
10 10
11use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId}; 11use hir_def::{AssocContainerId, AssocItemId, GenericDefId, HasModule, Lookup, TypeAliasId};
@@ -33,8 +33,10 @@ impl chalk_ir::interner::Interner for Interner {
33 type InternedGoals = Vec<Goal<Self>>; 33 type InternedGoals = Vec<Goal<Self>>;
34 type InternedSubstitution = Vec<Parameter<Self>>; 34 type InternedSubstitution = Vec<Parameter<Self>>;
35 type InternedProgramClause = chalk_ir::ProgramClauseData<Self>; 35 type InternedProgramClause = chalk_ir::ProgramClauseData<Self>;
36 type InternedProgramClauses = Vec<chalk_ir::ProgramClause<Self>>; 36 type InternedProgramClauses = Arc<[chalk_ir::ProgramClause<Self>]>;
37 type InternedQuantifiedWhereClauses = Vec<chalk_ir::QuantifiedWhereClause<Self>>; 37 type InternedQuantifiedWhereClauses = Vec<chalk_ir::QuantifiedWhereClause<Self>>;
38 type InternedParameterKinds = Vec<chalk_ir::ParameterKind<()>>;
39 type InternedCanonicalVarKinds = Vec<chalk_ir::ParameterKind<UniverseIndex>>;
38 type Identifier = TypeAliasId; 40 type Identifier = TypeAliasId;
39 type DefId = InternId; 41 type DefId = InternId;
40 42
@@ -60,6 +62,27 @@ impl chalk_ir::interner::Interner for Interner {
60 tls::with_current_program(|prog| Some(prog?.debug_alias(alias, fmt))) 62 tls::with_current_program(|prog| Some(prog?.debug_alias(alias, fmt)))
61 } 63 }
62 64
65 fn debug_projection_ty(
66 proj: &chalk_ir::ProjectionTy<Interner>,
67 fmt: &mut fmt::Formatter<'_>,
68 ) -> Option<fmt::Result> {
69 tls::with_current_program(|prog| Some(prog?.debug_projection_ty(proj, fmt)))
70 }
71
72 fn debug_opaque_ty(
73 opaque_ty: &chalk_ir::OpaqueTy<Interner>,
74 fmt: &mut fmt::Formatter<'_>,
75 ) -> Option<fmt::Result> {
76 tls::with_current_program(|prog| Some(prog?.debug_opaque_ty(opaque_ty, fmt)))
77 }
78
79 fn debug_opaque_ty_id(
80 opaque_ty_id: chalk_ir::OpaqueTyId<Self>,
81 fmt: &mut fmt::Formatter<'_>,
82 ) -> Option<fmt::Result> {
83 tls::with_current_program(|prog| Some(prog?.debug_opaque_ty_id(opaque_ty_id, fmt)))
84 }
85
63 fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> { 86 fn debug_ty(ty: &chalk_ir::Ty<Interner>, fmt: &mut fmt::Formatter<'_>) -> Option<fmt::Result> {
64 tls::with_current_program(|prog| Some(prog?.debug_ty(ty, fmt))) 87 tls::with_current_program(|prog| Some(prog?.debug_ty(ty, fmt)))
65 } 88 }
@@ -202,15 +225,15 @@ impl chalk_ir::interner::Interner for Interner {
202 fn intern_program_clauses( 225 fn intern_program_clauses(
203 &self, 226 &self,
204 data: impl IntoIterator<Item = chalk_ir::ProgramClause<Self>>, 227 data: impl IntoIterator<Item = chalk_ir::ProgramClause<Self>>,
205 ) -> Vec<chalk_ir::ProgramClause<Self>> { 228 ) -> Arc<[chalk_ir::ProgramClause<Self>]> {
206 data.into_iter().collect() 229 data.into_iter().collect()
207 } 230 }
208 231
209 fn program_clauses_data<'a>( 232 fn program_clauses_data<'a>(
210 &self, 233 &self,
211 clauses: &'a Vec<chalk_ir::ProgramClause<Self>>, 234 clauses: &'a Arc<[chalk_ir::ProgramClause<Self>]>,
212 ) -> &'a [chalk_ir::ProgramClause<Self>] { 235 ) -> &'a [chalk_ir::ProgramClause<Self>] {
213 clauses 236 &clauses
214 } 237 }
215 238
216 fn intern_quantified_where_clauses( 239 fn intern_quantified_where_clauses(
@@ -226,6 +249,34 @@ impl chalk_ir::interner::Interner for Interner {
226 ) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] { 249 ) -> &'a [chalk_ir::QuantifiedWhereClause<Self>] {
227 clauses 250 clauses
228 } 251 }
252
253 fn intern_parameter_kinds(
254 &self,
255 data: impl IntoIterator<Item = chalk_ir::ParameterKind<()>>,
256 ) -> Self::InternedParameterKinds {
257 data.into_iter().collect()
258 }
259
260 fn parameter_kinds_data<'a>(
261 &self,
262 parameter_kinds: &'a Self::InternedParameterKinds,
263 ) -> &'a [chalk_ir::ParameterKind<()>] {
264 &parameter_kinds
265 }
266
267 fn intern_canonical_var_kinds(
268 &self,
269 data: impl IntoIterator<Item = chalk_ir::ParameterKind<UniverseIndex>>,
270 ) -> Self::InternedCanonicalVarKinds {
271 data.into_iter().collect()
272 }
273
274 fn canonical_var_kinds_data<'a>(
275 &self,
276 canonical_var_kinds: &'a Self::InternedCanonicalVarKinds,
277 ) -> &'a [chalk_ir::ParameterKind<UniverseIndex>] {
278 &canonical_var_kinds
279 }
229} 280}
230 281
231impl chalk_ir::interner::HasInterner for Interner { 282impl chalk_ir::interner::HasInterner for Interner {
@@ -268,9 +319,12 @@ impl ToChalk for Ty {
268 Ty::Projection(proj_ty) => { 319 Ty::Projection(proj_ty) => {
269 let associated_ty_id = proj_ty.associated_ty.to_chalk(db); 320 let associated_ty_id = proj_ty.associated_ty.to_chalk(db);
270 let substitution = proj_ty.parameters.to_chalk(db); 321 let substitution = proj_ty.parameters.to_chalk(db);
271 chalk_ir::AliasTy { associated_ty_id, substitution } 322 chalk_ir::AliasTy::Projection(chalk_ir::ProjectionTy {
272 .cast(&Interner) 323 associated_ty_id,
273 .intern(&Interner) 324 substitution,
325 })
326 .cast(&Interner)
327 .intern(&Interner)
274 } 328 }
275 Ty::Placeholder(id) => { 329 Ty::Placeholder(id) => {
276 let interned_id = db.intern_type_param_id(id); 330 let interned_id = db.intern_type_param_id(id);
@@ -314,16 +368,17 @@ impl ToChalk for Ty {
314 ); 368 );
315 Ty::Placeholder(db.lookup_intern_type_param_id(interned_id)) 369 Ty::Placeholder(db.lookup_intern_type_param_id(interned_id))
316 } 370 }
317 chalk_ir::TyData::Alias(proj) => { 371 chalk_ir::TyData::Alias(chalk_ir::AliasTy::Projection(proj)) => {
318 let associated_ty = from_chalk(db, proj.associated_ty_id); 372 let associated_ty = from_chalk(db, proj.associated_ty_id);
319 let parameters = from_chalk(db, proj.substitution); 373 let parameters = from_chalk(db, proj.substitution);
320 Ty::Projection(ProjectionTy { associated_ty, parameters }) 374 Ty::Projection(ProjectionTy { associated_ty, parameters })
321 } 375 }
376 chalk_ir::TyData::Alias(chalk_ir::AliasTy::Opaque(_)) => unimplemented!(),
322 chalk_ir::TyData::Function(_) => unimplemented!(), 377 chalk_ir::TyData::Function(_) => unimplemented!(),
323 chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx), 378 chalk_ir::TyData::BoundVar(idx) => Ty::Bound(idx),
324 chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown, 379 chalk_ir::TyData::InferenceVar(_iv) => Ty::Unknown,
325 chalk_ir::TyData::Dyn(where_clauses) => { 380 chalk_ir::TyData::Dyn(where_clauses) => {
326 assert_eq!(where_clauses.bounds.binders.len(), 1); 381 assert_eq!(where_clauses.bounds.binders.len(&Interner), 1);
327 let predicates = where_clauses 382 let predicates = where_clauses
328 .bounds 383 .bounds
329 .skip_binders() 384 .skip_binders()
@@ -404,6 +459,7 @@ impl ToChalk for TypeCtor {
404 match type_name { 459 match type_name {
405 TypeName::Struct(struct_id) => db.lookup_intern_type_ctor(struct_id.into()), 460 TypeName::Struct(struct_id) => db.lookup_intern_type_ctor(struct_id.into()),
406 TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)), 461 TypeName::AssociatedType(type_id) => TypeCtor::AssociatedType(from_chalk(db, type_id)),
462 TypeName::OpaqueType(_) => unreachable!(),
407 TypeName::Error => { 463 TypeName::Error => {
408 // this should not be reached, since we don't represent TypeName::Error with TypeCtor 464 // this should not be reached, since we don't represent TypeName::Error with TypeCtor
409 unreachable!() 465 unreachable!()
@@ -460,7 +516,8 @@ impl ToChalk for GenericPredicate {
460 } 516 }
461 GenericPredicate::Projection(projection_pred) => { 517 GenericPredicate::Projection(projection_pred) => {
462 let ty = projection_pred.ty.to_chalk(db).shifted_in(&Interner); 518 let ty = projection_pred.ty.to_chalk(db).shifted_in(&Interner);
463 let alias = projection_pred.projection_ty.to_chalk(db).shifted_in(&Interner); 519 let projection = projection_pred.projection_ty.to_chalk(db).shifted_in(&Interner);
520 let alias = chalk_ir::AliasTy::Projection(projection);
464 make_binders(chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { alias, ty }), 0) 521 make_binders(chalk_ir::WhereClause::AliasEq(chalk_ir::AliasEq { alias, ty }), 0)
465 } 522 }
466 GenericPredicate::Error => panic!("tried passing GenericPredicate::Error to Chalk"), 523 GenericPredicate::Error => panic!("tried passing GenericPredicate::Error to Chalk"),
@@ -481,7 +538,13 @@ impl ToChalk for GenericPredicate {
481 GenericPredicate::Implemented(from_chalk(db, tr)) 538 GenericPredicate::Implemented(from_chalk(db, tr))
482 } 539 }
483 chalk_ir::WhereClause::AliasEq(projection_eq) => { 540 chalk_ir::WhereClause::AliasEq(projection_eq) => {
484 let projection_ty = from_chalk(db, projection_eq.alias); 541 let projection_ty = from_chalk(
542 db,
543 match projection_eq.alias {
544 chalk_ir::AliasTy::Projection(p) => p,
545 _ => unimplemented!(),
546 },
547 );
485 let ty = from_chalk(db, projection_eq.ty); 548 let ty = from_chalk(db, projection_eq.ty);
486 GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty }) 549 GenericPredicate::Projection(super::ProjectionPredicate { projection_ty, ty })
487 } 550 }
@@ -490,10 +553,10 @@ impl ToChalk for GenericPredicate {
490} 553}
491 554
492impl ToChalk for ProjectionTy { 555impl ToChalk for ProjectionTy {
493 type Chalk = chalk_ir::AliasTy<Interner>; 556 type Chalk = chalk_ir::ProjectionTy<Interner>;
494 557
495 fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasTy<Interner> { 558 fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::ProjectionTy<Interner> {
496 chalk_ir::AliasTy { 559 chalk_ir::ProjectionTy {
497 associated_ty_id: self.associated_ty.to_chalk(db), 560 associated_ty_id: self.associated_ty.to_chalk(db),
498 substitution: self.parameters.to_chalk(db), 561 substitution: self.parameters.to_chalk(db),
499 } 562 }
@@ -501,7 +564,7 @@ impl ToChalk for ProjectionTy {
501 564
502 fn from_chalk( 565 fn from_chalk(
503 db: &dyn HirDatabase, 566 db: &dyn HirDatabase,
504 projection_ty: chalk_ir::AliasTy<Interner>, 567 projection_ty: chalk_ir::ProjectionTy<Interner>,
505 ) -> ProjectionTy { 568 ) -> ProjectionTy {
506 ProjectionTy { 569 ProjectionTy {
507 associated_ty: from_chalk(db, projection_ty.associated_ty_id), 570 associated_ty: from_chalk(db, projection_ty.associated_ty_id),
@@ -514,7 +577,10 @@ impl ToChalk for super::ProjectionPredicate {
514 type Chalk = chalk_ir::AliasEq<Interner>; 577 type Chalk = chalk_ir::AliasEq<Interner>;
515 578
516 fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq<Interner> { 579 fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::AliasEq<Interner> {
517 chalk_ir::AliasEq { alias: self.projection_ty.to_chalk(db), ty: self.ty.to_chalk(db) } 580 chalk_ir::AliasEq {
581 alias: chalk_ir::AliasTy::Projection(self.projection_ty.to_chalk(db)),
582 ty: self.ty.to_chalk(db),
583 }
518 } 584 }
519 585
520 fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq<Interner>) -> Self { 586 fn from_chalk(_db: &dyn HirDatabase, _normalize: chalk_ir::AliasEq<Interner>) -> Self {
@@ -540,17 +606,24 @@ impl ToChalk for Obligation {
540impl<T> ToChalk for Canonical<T> 606impl<T> ToChalk for Canonical<T>
541where 607where
542 T: ToChalk, 608 T: ToChalk,
609 T::Chalk: HasInterner<Interner = Interner>,
543{ 610{
544 type Chalk = chalk_ir::Canonical<T::Chalk>; 611 type Chalk = chalk_ir::Canonical<T::Chalk>;
545 612
546 fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> { 613 fn to_chalk(self, db: &dyn HirDatabase) -> chalk_ir::Canonical<T::Chalk> {
547 let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT); 614 let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT);
548 let value = self.value.to_chalk(db); 615 let value = self.value.to_chalk(db);
549 chalk_ir::Canonical { value, binders: vec![parameter; self.num_vars] } 616 chalk_ir::Canonical {
617 value,
618 binders: chalk_ir::CanonicalVarKinds::from(&Interner, vec![parameter; self.num_vars]),
619 }
550 } 620 }
551 621
552 fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> { 622 fn from_chalk(db: &dyn HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> {
553 Canonical { num_vars: canonical.binders.len(), value: from_chalk(db, canonical.value) } 623 Canonical {
624 num_vars: canonical.binders.len(&Interner),
625 value: from_chalk(db, canonical.value),
626 }
554 } 627 }
555} 628}
556 629
@@ -649,9 +722,15 @@ impl ToChalk for builtin::BuiltinImplAssocTyValueData {
649 } 722 }
650} 723}
651 724
652fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T> { 725fn make_binders<T>(value: T, num_vars: usize) -> chalk_ir::Binders<T>
726where
727 T: HasInterner<Interner = Interner>,
728{
653 chalk_ir::Binders::new( 729 chalk_ir::Binders::new(
654 std::iter::repeat(chalk_ir::ParameterKind::Ty(())).take(num_vars).collect(), 730 chalk_ir::ParameterKinds::from(
731 &Interner,
732 std::iter::repeat(chalk_ir::ParameterKind::Ty(())).take(num_vars),
733 ),
655 value, 734 value,
656 ) 735 )
657} 736}
@@ -799,6 +878,28 @@ impl<'a> chalk_solve::RustIrDatabase<Interner> for ChalkContext<'a> {
799 // FIXME tell Chalk about well-known traits (here and in trait_datum) 878 // FIXME tell Chalk about well-known traits (here and in trait_datum)
800 None 879 None
801 } 880 }
881
882 fn program_clauses_for_env(
883 &self,
884 environment: &chalk_ir::Environment<Interner>,
885 ) -> chalk_ir::ProgramClauses<Interner> {
886 self.db.program_clauses_for_chalk_env(self.krate, environment.clone())
887 }
888
889 fn opaque_ty_data(
890 &self,
891 _id: chalk_ir::OpaqueTyId<Interner>,
892 ) -> Arc<chalk_rust_ir::OpaqueTyDatum<Interner>> {
893 unimplemented!()
894 }
895}
896
897pub(crate) fn program_clauses_for_chalk_env_query(
898 db: &dyn HirDatabase,
899 krate: CrateId,
900 environment: chalk_ir::Environment<Interner>,
901) -> chalk_ir::ProgramClauses<Interner> {
902 chalk_solve::program_clauses_for_env(&ChalkContext { db, krate }, &environment)
802} 903}
803 904
804pub(crate) fn associated_ty_data_query( 905pub(crate) fn associated_ty_data_query(
diff --git a/crates/ra_hir_ty/src/traits/chalk/tls.rs b/crates/ra_hir_ty/src/traits/chalk/tls.rs
index fa8e4d1ad..4867cb17e 100644
--- a/crates/ra_hir_ty/src/traits/chalk/tls.rs
+++ b/crates/ra_hir_ty/src/traits/chalk/tls.rs
@@ -121,19 +121,38 @@ impl DebugContext<'_> {
121 write!(fmt, "{}::{}", trait_data.name, type_alias_data.name) 121 write!(fmt, "{}::{}", trait_data.name, type_alias_data.name)
122 } 122 }
123 123
124 pub fn debug_opaque_ty_id(
125 &self,
126 opaque_ty_id: chalk_ir::OpaqueTyId<Interner>,
127 fmt: &mut fmt::Formatter<'_>,
128 ) -> Result<(), fmt::Error> {
129 fmt.debug_struct("OpaqueTyId").field("index", &opaque_ty_id.0).finish()
130 }
131
124 pub fn debug_alias( 132 pub fn debug_alias(
125 &self, 133 &self,
126 alias: &AliasTy<Interner>, 134 alias_ty: &AliasTy<Interner>,
135 fmt: &mut fmt::Formatter<'_>,
136 ) -> Result<(), fmt::Error> {
137 match alias_ty {
138 AliasTy::Projection(projection_ty) => self.debug_projection_ty(projection_ty, fmt),
139 AliasTy::Opaque(opaque_ty) => self.debug_opaque_ty(opaque_ty, fmt),
140 }
141 }
142
143 pub fn debug_projection_ty(
144 &self,
145 projection_ty: &chalk_ir::ProjectionTy<Interner>,
127 fmt: &mut fmt::Formatter<'_>, 146 fmt: &mut fmt::Formatter<'_>,
128 ) -> Result<(), fmt::Error> { 147 ) -> Result<(), fmt::Error> {
129 let type_alias: TypeAliasId = from_chalk(self.0, alias.associated_ty_id); 148 let type_alias: TypeAliasId = from_chalk(self.0, projection_ty.associated_ty_id);
130 let type_alias_data = self.0.type_alias_data(type_alias); 149 let type_alias_data = self.0.type_alias_data(type_alias);
131 let trait_ = match type_alias.lookup(self.0.upcast()).container { 150 let trait_ = match type_alias.lookup(self.0.upcast()).container {
132 AssocContainerId::TraitId(t) => t, 151 AssocContainerId::TraitId(t) => t,
133 _ => panic!("associated type not in trait"), 152 _ => panic!("associated type not in trait"),
134 }; 153 };
135 let trait_data = self.0.trait_data(trait_); 154 let trait_data = self.0.trait_data(trait_);
136 let params = alias.substitution.parameters(&Interner); 155 let params = projection_ty.substitution.parameters(&Interner);
137 write!(fmt, "<{:?} as {}", &params[0], trait_data.name,)?; 156 write!(fmt, "<{:?} as {}", &params[0], trait_data.name,)?;
138 if params.len() > 1 { 157 if params.len() > 1 {
139 write!( 158 write!(
@@ -145,6 +164,14 @@ impl DebugContext<'_> {
145 write!(fmt, ">::{}", type_alias_data.name) 164 write!(fmt, ">::{}", type_alias_data.name)
146 } 165 }
147 166
167 pub fn debug_opaque_ty(
168 &self,
169 opaque_ty: &chalk_ir::OpaqueTy<Interner>,
170 fmt: &mut fmt::Formatter<'_>,
171 ) -> Result<(), fmt::Error> {
172 write!(fmt, "{:?}", opaque_ty.opaque_ty_id)
173 }
174
148 pub fn debug_ty( 175 pub fn debug_ty(
149 &self, 176 &self,
150 ty: &chalk_ir::Ty<Interner>, 177 ty: &chalk_ir::Ty<Interner>,
diff --git a/crates/ra_ide/src/extend_selection.rs b/crates/ra_ide/src/extend_selection.rs
index f5a063351..753d2ef6a 100644
--- a/crates/ra_ide/src/extend_selection.rs
+++ b/crates/ra_ide/src/extend_selection.rs
@@ -96,7 +96,7 @@ fn try_extend_selection(
96 return Some(node.text_range()); 96 return Some(node.text_range());
97 } 97 }
98 98
99 let node = shallowest_node(&node.into()); 99 let node = shallowest_node(&node);
100 100
101 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) { 101 if node.parent().map(|n| list_kinds.contains(&n.kind())) == Some(true) {
102 if let Some(range) = extend_list_item(&node) { 102 if let Some(range) = extend_list_item(&node) {
diff --git a/crates/ra_ide/src/goto_definition.rs b/crates/ra_ide/src/goto_definition.rs
index 8aed94d16..9998ca5a3 100644
--- a/crates/ra_ide/src/goto_definition.rs
+++ b/crates/ra_ide/src/goto_definition.rs
@@ -62,10 +62,9 @@ pub(crate) enum ReferenceResult {
62 62
63impl ReferenceResult { 63impl ReferenceResult {
64 fn to_vec(self) -> Vec<NavigationTarget> { 64 fn to_vec(self) -> Vec<NavigationTarget> {
65 use self::ReferenceResult::*;
66 match self { 65 match self {
67 Exact(target) => vec![target], 66 ReferenceResult::Exact(target) => vec![target],
68 Approximate(vec) => vec, 67 ReferenceResult::Approximate(vec) => vec,
69 } 68 }
70 } 69 }
71} 70}
@@ -74,8 +73,6 @@ pub(crate) fn reference_definition(
74 sema: &Semantics<RootDatabase>, 73 sema: &Semantics<RootDatabase>,
75 name_ref: &ast::NameRef, 74 name_ref: &ast::NameRef,
76) -> ReferenceResult { 75) -> ReferenceResult {
77 use self::ReferenceResult::*;
78
79 let name_kind = classify_name_ref(sema, name_ref); 76 let name_kind = classify_name_ref(sema, name_ref);
80 if let Some(def) = name_kind { 77 if let Some(def) = name_kind {
81 let def = def.definition(); 78 let def = def.definition();
@@ -91,7 +88,7 @@ pub(crate) fn reference_definition(
91 .into_iter() 88 .into_iter()
92 .map(|s| s.to_nav(sema.db)) 89 .map(|s| s.to_nav(sema.db))
93 .collect(); 90 .collect();
94 Approximate(navs) 91 ReferenceResult::Approximate(navs)
95} 92}
96 93
97#[cfg(test)] 94#[cfg(test)]
@@ -399,6 +396,25 @@ mod tests {
399 } 396 }
400 397
401 #[test] 398 #[test]
399 fn goto_def_for_record_pat_fields() {
400 covers!(ra_ide_db::goto_def_for_record_field_pats);
401 check_goto(
402 r"
403 //- /lib.rs
404 struct Foo {
405 spam: u32,
406 }
407
408 fn bar(foo: Foo) -> Foo {
409 let Foo { spam<|>: _, } = foo
410 }
411 ",
412 "spam RECORD_FIELD_DEF FileId(1) [17; 26) [17; 21)",
413 "spam: u32|spam",
414 );
415 }
416
417 #[test]
402 fn goto_def_for_record_fields_macros() { 418 fn goto_def_for_record_fields_macros() {
403 check_goto( 419 check_goto(
404 r" 420 r"
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html
index 214dcbb62..ccb1fc751 100644
--- a/crates/ra_ide/src/snapshots/highlighting.html
+++ b/crates/ra_ide/src/snapshots/highlighting.html
@@ -50,12 +50,12 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
50<span class="keyword">fn</span> <span class="function declaration">main</span>() { 50<span class="keyword">fn</span> <span class="function declaration">main</span>() {
51 <span class="macro">println!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>); 51 <span class="macro">println!</span>(<span class="string_literal">"Hello, {}!"</span>, <span class="numeric_literal">92</span>);
52 52
53 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = Vec::new(); 53 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">vec</span> = <span class="unresolved_reference">Vec</span>::<span class="unresolved_reference">new</span>();
54 <span class="keyword control">if</span> <span class="keyword">true</span> { 54 <span class="keyword control">if</span> <span class="keyword">true</span> {
55 <span class="keyword">let</span> <span class="variable declaration">x</span> = <span class="numeric_literal">92</span>; 55 <span class="keyword">let</span> <span class="variable declaration">x</span> = <span class="numeric_literal">92</span>;
56 <span class="variable mutable">vec</span>.push(<span class="struct">Foo</span> { <span class="field">x</span>, <span class="field">y</span>: <span class="numeric_literal">1</span> }); 56 <span class="variable mutable">vec</span>.<span class="unresolved_reference">push</span>(<span class="struct">Foo</span> { <span class="field">x</span>, <span class="field">y</span>: <span class="numeric_literal">1</span> });
57 } 57 }
58 <span class="keyword unsafe">unsafe</span> { <span class="variable mutable">vec</span>.set_len(<span class="numeric_literal">0</span>); } 58 <span class="keyword unsafe">unsafe</span> { <span class="variable mutable">vec</span>.<span class="unresolved_reference">set_len</span>(<span class="numeric_literal">0</span>); }
59 59
60 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">x</span> = <span class="numeric_literal">42</span>; 60 <span class="keyword">let</span> <span class="keyword">mut</span> <span class="variable declaration mutable">x</span> = <span class="numeric_literal">42</span>;
61 <span class="keyword">let</span> <span class="variable declaration mutable">y</span> = &<span class="keyword">mut</span> <span class="variable mutable">x</span>; 61 <span class="keyword">let</span> <span class="variable declaration mutable">y</span> = &<span class="keyword">mut</span> <span class="variable mutable">x</span>;
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
index dddbfc0dd..3df82c45f 100644
--- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html
+++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
@@ -28,11 +28,11 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
28</style> 28</style>
29<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span>() { 29<pre><code><span class="keyword">fn</span> <span class="function declaration">main</span>() {
30 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string_literal">"hello"</span>; 30 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span> = <span class="string_literal">"hello"</span>;
31 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="2705725358298919760" style="color: hsl(17,51%,74%);">x</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string(); 31 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="2705725358298919760" style="color: hsl(17,51%,74%);">x</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.<span class="unresolved_reference">to_string</span>();
32 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="3365759661443752373" style="color: hsl(127,76%,66%);">y</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.to_string(); 32 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="3365759661443752373" style="color: hsl(127,76%,66%);">y</span> = <span class="variable" data-binding-hash="8121853618659664005" style="color: hsl(261,57%,61%);">hello</span>.<span class="unresolved_reference">to_string</span>();
33 33
34 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span> = <span class="string_literal">"other color please!"</span>; 34 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span> = <span class="string_literal">"other color please!"</span>;
35 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="6717528807933952652" style="color: hsl(85,49%,84%);">y</span> = <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span>.to_string(); 35 <span class="keyword">let</span> <span class="variable declaration" data-binding-hash="6717528807933952652" style="color: hsl(85,49%,84%);">y</span> = <span class="variable" data-binding-hash="794745962933817518" style="color: hsl(19,74%,76%);">x</span>.<span class="unresolved_reference">to_string</span>();
36} 36}
37 37
38<span class="keyword">fn</span> <span class="function declaration">bar</span>() { 38<span class="keyword">fn</span> <span class="function declaration">bar</span>() {
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs
index 7b15b82bd..93d502875 100644
--- a/crates/ra_ide/src/syntax_highlighting.rs
+++ b/crates/ra_ide/src/syntax_highlighting.rs
@@ -239,20 +239,21 @@ fn highlight_element(
239 NAME_REF if element.ancestors().any(|it| it.kind() == ATTR) => return None, 239 NAME_REF if element.ancestors().any(|it| it.kind() == ATTR) => return None,
240 NAME_REF => { 240 NAME_REF => {
241 let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap(); 241 let name_ref = element.into_node().and_then(ast::NameRef::cast).unwrap();
242 let name_kind = classify_name_ref(sema, &name_ref)?; 242 match classify_name_ref(sema, &name_ref) {
243 243 Some(name_kind) => match name_kind {
244 match name_kind { 244 NameRefClass::Definition(def) => {
245 NameRefClass::Definition(def) => { 245 if let Definition::Local(local) = &def {
246 if let Definition::Local(local) = &def { 246 if let Some(name) = local.name(db) {
247 if let Some(name) = local.name(db) { 247 let shadow_count =
248 let shadow_count = 248 bindings_shadow_count.entry(name.clone()).or_default();
249 bindings_shadow_count.entry(name.clone()).or_default(); 249 binding_hash = Some(calc_binding_hash(&name, *shadow_count))
250 binding_hash = Some(calc_binding_hash(&name, *shadow_count)) 250 }
251 } 251 };
252 }; 252 highlight_name(db, def)
253 highlight_name(db, def) 253 }
254 } 254 NameRefClass::FieldShorthand { .. } => HighlightTag::Field.into(),
255 NameRefClass::FieldShorthand { .. } => HighlightTag::Field.into(), 255 },
256 None => HighlightTag::UnresolvedReference.into(),
256 } 257 }
257 } 258 }
258 259
diff --git a/crates/ra_ide/src/syntax_highlighting/tags.rs b/crates/ra_ide/src/syntax_highlighting/tags.rs
index e8b138e1a..f2c421654 100644
--- a/crates/ra_ide/src/syntax_highlighting/tags.rs
+++ b/crates/ra_ide/src/syntax_highlighting/tags.rs
@@ -38,6 +38,7 @@ pub enum HighlightTag {
38 TypeParam, 38 TypeParam,
39 Union, 39 Union,
40 Local, 40 Local,
41 UnresolvedReference,
41} 42}
42 43
43#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] 44#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
@@ -79,6 +80,7 @@ impl HighlightTag {
79 HighlightTag::TypeParam => "type_param", 80 HighlightTag::TypeParam => "type_param",
80 HighlightTag::Union => "union", 81 HighlightTag::Union => "union",
81 HighlightTag::Local => "variable", 82 HighlightTag::Local => "variable",
83 HighlightTag::UnresolvedReference => "unresolved_reference",
82 } 84 }
83 } 85 }
84} 86}
diff --git a/crates/ra_ide_db/src/defs.rs b/crates/ra_ide_db/src/defs.rs
index 49a8c74fb..785613b82 100644
--- a/crates/ra_ide_db/src/defs.rs
+++ b/crates/ra_ide_db/src/defs.rs
@@ -180,6 +180,7 @@ fn classify_name_inner(sema: &Semantics<RootDatabase>, name: &ast::Name) -> Opti
180 } 180 }
181} 181}
182 182
183#[derive(Debug)]
183pub enum NameRefClass { 184pub enum NameRefClass {
184 Definition(Definition), 185 Definition(Definition),
185 FieldShorthand { local: Local, field: Definition }, 186 FieldShorthand { local: Local, field: Definition },
@@ -229,6 +230,14 @@ pub fn classify_name_ref(
229 } 230 }
230 } 231 }
231 232
233 if let Some(record_field_pat) = ast::RecordFieldPat::cast(parent.clone()) {
234 tested_by!(goto_def_for_record_field_pats; force);
235 if let Some(field) = sema.resolve_record_field_pat(&record_field_pat) {
236 let field = Definition::StructField(field);
237 return Some(NameRefClass::Definition(field));
238 }
239 }
240
232 if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) { 241 if let Some(macro_call) = parent.ancestors().find_map(ast::MacroCall::cast) {
233 tested_by!(goto_def_for_macros; force); 242 tested_by!(goto_def_for_macros; force);
234 if let Some(macro_def) = sema.resolve_macro_call(&macro_call) { 243 if let Some(macro_def) = sema.resolve_macro_call(&macro_call) {
diff --git a/crates/ra_ide_db/src/marks.rs b/crates/ra_ide_db/src/marks.rs
index 4f0a22af0..03b4be21c 100644
--- a/crates/ra_ide_db/src/marks.rs
+++ b/crates/ra_ide_db/src/marks.rs
@@ -6,5 +6,6 @@ test_utils::marks![
6 goto_def_for_fields 6 goto_def_for_fields
7 goto_def_for_record_fields 7 goto_def_for_record_fields
8 goto_def_for_field_init_shorthand 8 goto_def_for_field_init_shorthand
9 goto_def_for_record_field_pats
9 search_filters_by_range 10 search_filters_by_range
10]; 11];
diff --git a/crates/ra_mbe/src/mbe_expander/matcher.rs b/crates/ra_mbe/src/mbe_expander/matcher.rs
index 9485c62b8..78f9efa1b 100644
--- a/crates/ra_mbe/src/mbe_expander/matcher.rs
+++ b/crates/ra_mbe/src/mbe_expander/matcher.rs
@@ -187,7 +187,11 @@ impl<'a> TtIter<'a> {
187 _ => false, 187 _ => false,
188 }, 188 },
189 Separator::Literal(lhs) => match fork.expect_literal() { 189 Separator::Literal(lhs) => match fork.expect_literal() {
190 Ok(rhs) => rhs.text == lhs.text, 190 Ok(rhs) => match rhs {
191 tt::Leaf::Literal(rhs) => rhs.text == lhs.text,
192 tt::Leaf::Ident(rhs) => rhs.text == lhs.text,
193 tt::Leaf::Punct(_) => false,
194 },
191 _ => false, 195 _ => false,
192 }, 196 },
193 Separator::Puncts(lhss) => lhss.iter().all(|lhs| match fork.expect_punct() { 197 Separator::Puncts(lhss) => lhss.iter().all(|lhs| match fork.expect_punct() {
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs
index 46791efaa..d7866452d 100644
--- a/crates/ra_mbe/src/subtree_source.rs
+++ b/crates/ra_mbe/src/subtree_source.rs
@@ -158,20 +158,17 @@ fn convert_literal(l: &tt::Literal) -> TtToken {
158 let kind = lex_single_syntax_kind(&l.text) 158 let kind = lex_single_syntax_kind(&l.text)
159 .map(|(kind, _error)| kind) 159 .map(|(kind, _error)| kind)
160 .filter(|kind| kind.is_literal()) 160 .filter(|kind| kind.is_literal())
161 .unwrap_or_else(|| match l.text.as_ref() { 161 .unwrap_or_else(|| panic!("Fail to convert given literal {:#?}", &l));
162 "true" => T![true],
163 "false" => T![false],
164 _ => panic!("Fail to convert given literal {:#?}", &l),
165 });
166 162
167 TtToken { kind, is_joint_to_next: false, text: l.text.clone() } 163 TtToken { kind, is_joint_to_next: false, text: l.text.clone() }
168} 164}
169 165
170fn convert_ident(ident: &tt::Ident) -> TtToken { 166fn convert_ident(ident: &tt::Ident) -> TtToken {
171 let kind = if ident.text.starts_with('\'') { 167 let kind = match ident.text.as_ref() {
172 LIFETIME 168 "true" => T![true],
173 } else { 169 "false" => T![false],
174 SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT) 170 i if i.starts_with('\'') => LIFETIME,
171 _ => SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT),
175 }; 172 };
176 173
177 TtToken { kind, is_joint_to_next: false, text: ident.text.clone() } 174 TtToken { kind, is_joint_to_next: false, text: ident.text.clone() }
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index 70899bc5d..2b4390eb2 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -376,7 +376,7 @@ trait TokenConvertor {
376 }; 376 };
377 } 377 }
378 let leaf: tt::Leaf = match k { 378 let leaf: tt::Leaf = match k {
379 T![true] | T![false] => make_leaf!(Literal), 379 T![true] | T![false] => make_leaf!(Ident),
380 IDENT => make_leaf!(Ident), 380 IDENT => make_leaf!(Ident),
381 k if k.is_keyword() => make_leaf!(Ident), 381 k if k.is_keyword() => make_leaf!(Ident),
382 k if k.is_literal() => make_leaf!(Literal), 382 k if k.is_literal() => make_leaf!(Literal),
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index f2a726538..100ed41f2 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -1016,6 +1016,36 @@ fn test_literal() {
1016} 1016}
1017 1017
1018#[test] 1018#[test]
1019fn test_boolean_is_ident() {
1020 parse_macro(
1021 r#"
1022 macro_rules! foo {
1023 ($lit0:literal, $lit1:literal) => { const VALUE: (bool,bool) = ($lit0,$lit1); };
1024 }
1025"#,
1026 )
1027 .assert_expand(
1028 r#"foo!(true,false);"#,
1029 r#"
1030SUBTREE $
1031 IDENT const 14
1032 IDENT VALUE 15
1033 PUNCH : [alone] 16
1034 SUBTREE () 17
1035 IDENT bool 18
1036 PUNCH , [alone] 19
1037 IDENT bool 20
1038 PUNCH = [alone] 21
1039 SUBTREE () 22
1040 IDENT true 29
1041 PUNCH , [joint] 25
1042 IDENT false 31
1043 PUNCH ; [alone] 28
1044"#,
1045 );
1046}
1047
1048#[test]
1019fn test_vis() { 1049fn test_vis() {
1020 parse_macro( 1050 parse_macro(
1021 r#" 1051 r#"
diff --git a/crates/ra_mbe/src/tt_iter.rs b/crates/ra_mbe/src/tt_iter.rs
index 100184e66..46c420718 100644
--- a/crates/ra_mbe/src/tt_iter.rs
+++ b/crates/ra_mbe/src/tt_iter.rs
@@ -40,9 +40,11 @@ impl<'a> TtIter<'a> {
40 } 40 }
41 } 41 }
42 42
43 pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Literal, ()> { 43 pub(crate) fn expect_literal(&mut self) -> Result<&'a tt::Leaf, ()> {
44 match self.expect_leaf()? { 44 let it = self.expect_leaf()?;
45 tt::Leaf::Literal(it) => Ok(it), 45 match it {
46 tt::Leaf::Literal(_) => Ok(it),
47 tt::Leaf::Ident(ident) if ident.text == "true" || ident.text == "false" => Ok(it),
46 _ => Err(()), 48 _ => Err(()),
47 } 49 }
48 } 50 }
diff --git a/crates/ra_proc_macro/src/lib.rs b/crates/ra_proc_macro/src/lib.rs
index b200fd126..004943b9e 100644
--- a/crates/ra_proc_macro/src/lib.rs
+++ b/crates/ra_proc_macro/src/lib.rs
@@ -2,7 +2,7 @@
2//! 2//!
3//! We separate proc-macro expanding logic to an extern program to allow 3//! We separate proc-macro expanding logic to an extern program to allow
4//! different implementations (e.g. wasm or dylib loading). And this crate 4//! different implementations (e.g. wasm or dylib loading). And this crate
5//! is used to provide basic infrastructure for communication between two 5//! is used to provide basic infrastructure for communication between two
6//! processes: Client (RA itself), Server (the external program) 6//! processes: Client (RA itself), Server (the external program)
7 7
8mod rpc; 8mod rpc;
@@ -13,6 +13,7 @@ use process::{ProcMacroProcessSrv, ProcMacroProcessThread};
13use ra_tt::{SmolStr, Subtree}; 13use ra_tt::{SmolStr, Subtree};
14use std::{ 14use std::{
15 ffi::OsStr, 15 ffi::OsStr,
16 io,
16 path::{Path, PathBuf}, 17 path::{Path, PathBuf},
17 sync::Arc, 18 sync::Arc,
18}; 19};
@@ -57,14 +58,10 @@ pub struct ProcMacroClient {
57} 58}
58 59
59impl ProcMacroClient { 60impl ProcMacroClient {
60 pub fn extern_process<I, S>( 61 pub fn extern_process(
61 process_path: &Path, 62 process_path: PathBuf,
62 args: I, 63 args: impl IntoIterator<Item = impl AsRef<OsStr>>,
63 ) -> Result<ProcMacroClient, std::io::Error> 64 ) -> io::Result<ProcMacroClient> {
64 where
65 I: IntoIterator<Item = S>,
66 S: AsRef<OsStr>,
67 {
68 let (thread, process) = ProcMacroProcessSrv::run(process_path, args)?; 65 let (thread, process) = ProcMacroProcessSrv::run(process_path, args)?;
69 Ok(ProcMacroClient { 66 Ok(ProcMacroClient {
70 kind: ProcMacroClientKind::Process { process: Arc::new(process), thread }, 67 kind: ProcMacroClientKind::Process { process: Arc::new(process), thread },
@@ -84,7 +81,7 @@ impl ProcMacroClient {
84 ProcMacroClientKind::Process { process, .. } => { 81 ProcMacroClientKind::Process { process, .. } => {
85 let macros = match process.find_proc_macros(dylib_path) { 82 let macros = match process.find_proc_macros(dylib_path) {
86 Err(err) => { 83 Err(err) => {
87 eprintln!("Fail to find proc macro. Error: {:#?}", err); 84 eprintln!("Failed to find proc macros. Error: {:#?}", err);
88 return vec![]; 85 return vec![];
89 } 86 }
90 Ok(macros) => macros, 87 Ok(macros) => macros,
diff --git a/crates/ra_proc_macro/src/msg.rs b/crates/ra_proc_macro/src/msg.rs
index aa95bcc8f..95d9b8804 100644
--- a/crates/ra_proc_macro/src/msg.rs
+++ b/crates/ra_proc_macro/src/msg.rs
@@ -1,4 +1,4 @@
1//! Defines messages for cross-process message based on `ndjson` wire protocol 1//! Defines messages for cross-process message passing based on `ndjson` wire protocol
2 2
3use std::{ 3use std::{
4 convert::TryFrom, 4 convert::TryFrom,
@@ -31,7 +31,7 @@ macro_rules! impl_try_from_response {
31 fn try_from(value: Response) -> Result<Self, Self::Error> { 31 fn try_from(value: Response) -> Result<Self, Self::Error> {
32 match value { 32 match value {
33 Response::$tag(res) => Ok(res), 33 Response::$tag(res) => Ok(res),
34 _ => Err("Fail to convert from response"), 34 _ => Err(concat!("Failed to convert response to ", stringify!($tag))),
35 } 35 }
36 } 36 }
37 } 37 }
@@ -53,18 +53,16 @@ pub enum ErrorCode {
53 ExpansionError, 53 ExpansionError,
54} 54}
55 55
56pub trait Message: Sized + Serialize + DeserializeOwned { 56pub trait Message: Serialize + DeserializeOwned {
57 fn read(r: &mut impl BufRead) -> io::Result<Option<Self>> { 57 fn read(inp: &mut impl BufRead) -> io::Result<Option<Self>> {
58 let text = match read_json(r)? { 58 Ok(match read_json(inp)? {
59 None => return Ok(None), 59 None => None,
60 Some(text) => text, 60 Some(text) => Some(serde_json::from_str(&text)?),
61 }; 61 })
62 let msg = serde_json::from_str(&text)?;
63 Ok(Some(msg))
64 } 62 }
65 fn write(self, w: &mut impl Write) -> io::Result<()> { 63 fn write(self, out: &mut impl Write) -> io::Result<()> {
66 let text = serde_json::to_string(&self)?; 64 let text = serde_json::to_string(&self)?;
67 write_json(w, &text) 65 write_json(out, &text)
68 } 66 }
69} 67}
70 68
@@ -73,15 +71,12 @@ impl Message for Response {}
73 71
74fn read_json(inp: &mut impl BufRead) -> io::Result<Option<String>> { 72fn read_json(inp: &mut impl BufRead) -> io::Result<Option<String>> {
75 let mut buf = String::new(); 73 let mut buf = String::new();
76 if inp.read_line(&mut buf)? == 0 { 74 inp.read_line(&mut buf)?;
77 return Ok(None); 75 buf.pop(); // Remove traling '\n'
78 } 76 Ok(match buf.len() {
79 // Remove ending '\n' 77 0 => None,
80 let buf = &buf[..buf.len() - 1]; 78 _ => Some(buf),
81 if buf.is_empty() { 79 })
82 return Ok(None);
83 }
84 Ok(Some(buf.to_string()))
85} 80}
86 81
87fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { 82fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> {
diff --git a/crates/ra_proc_macro/src/process.rs b/crates/ra_proc_macro/src/process.rs
index f851570bc..673f80a7a 100644
--- a/crates/ra_proc_macro/src/process.rs
+++ b/crates/ra_proc_macro/src/process.rs
@@ -9,7 +9,7 @@ use crate::rpc::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTas
9use io::{BufRead, BufReader}; 9use io::{BufRead, BufReader};
10use std::{ 10use std::{
11 convert::{TryFrom, TryInto}, 11 convert::{TryFrom, TryInto},
12 ffi::OsStr, 12 ffi::{OsStr, OsString},
13 io::{self, Write}, 13 io::{self, Write},
14 path::{Path, PathBuf}, 14 path::{Path, PathBuf},
15 process::{Child, Command, Stdio}, 15 process::{Child, Command, Stdio},
@@ -28,66 +28,11 @@ pub(crate) struct ProcMacroProcessThread {
28 handle: jod_thread::JoinHandle<()>, 28 handle: jod_thread::JoinHandle<()>,
29} 29}
30 30
31struct Task {
32 req: Request,
33 result_tx: Sender<Option<Response>>,
34}
35
36struct Process {
37 path: PathBuf,
38 child: Child,
39}
40
41impl Drop for Process {
42 fn drop(&mut self) {
43 let _ = self.child.kill();
44 }
45}
46
47impl Process {
48 fn run<I, S>(process_path: &Path, args: I) -> Result<Process, io::Error>
49 where
50 I: IntoIterator<Item = S>,
51 S: AsRef<OsStr>,
52 {
53 let child = Command::new(process_path.clone())
54 .args(args)
55 .stdin(Stdio::piped())
56 .stdout(Stdio::piped())
57 .stderr(Stdio::null())
58 .spawn()?;
59
60 Ok(Process { path: process_path.into(), child })
61 }
62
63 fn restart(&mut self) -> Result<(), io::Error> {
64 let _ = self.child.kill();
65 self.child = Command::new(self.path.clone())
66 .stdin(Stdio::piped())
67 .stdout(Stdio::piped())
68 .stderr(Stdio::null())
69 .spawn()?;
70 Ok(())
71 }
72
73 fn stdio(&mut self) -> Option<(impl Write, impl BufRead)> {
74 let stdin = self.child.stdin.take()?;
75 let stdout = self.child.stdout.take()?;
76 let read = BufReader::new(stdout);
77
78 Some((stdin, read))
79 }
80}
81
82impl ProcMacroProcessSrv { 31impl ProcMacroProcessSrv {
83 pub fn run<I, S>( 32 pub fn run(
84 process_path: &Path, 33 process_path: PathBuf,
85 args: I, 34 args: impl IntoIterator<Item = impl AsRef<OsStr>>,
86 ) -> Result<(ProcMacroProcessThread, ProcMacroProcessSrv), io::Error> 35 ) -> io::Result<(ProcMacroProcessThread, ProcMacroProcessSrv)> {
87 where
88 I: IntoIterator<Item = S>,
89 S: AsRef<OsStr>,
90 {
91 let process = Process::run(process_path, args)?; 36 let process = Process::run(process_path, args)?;
92 37
93 let (task_tx, task_rx) = bounded(0); 38 let (task_tx, task_rx) = bounded(0);
@@ -197,11 +142,62 @@ fn client_loop(task_rx: Receiver<Task>, mut process: Process) {
197 } 142 }
198} 143}
199 144
145struct Task {
146 req: Request,
147 result_tx: Sender<Option<Response>>,
148}
149
150struct Process {
151 path: PathBuf,
152 args: Vec<OsString>,
153 child: Child,
154}
155
156impl Drop for Process {
157 fn drop(&mut self) {
158 let _ = self.child.kill();
159 }
160}
161
162impl Process {
163 fn run(
164 path: PathBuf,
165 args: impl IntoIterator<Item = impl AsRef<OsStr>>,
166 ) -> io::Result<Process> {
167 let args = args.into_iter().map(|s| s.as_ref().into()).collect();
168 let child = mk_child(&path, &args)?;
169 Ok(Process { path, args, child })
170 }
171
172 fn restart(&mut self) -> io::Result<()> {
173 let _ = self.child.kill();
174 self.child = mk_child(&self.path, &self.args)?;
175 Ok(())
176 }
177
178 fn stdio(&mut self) -> Option<(impl Write, impl BufRead)> {
179 let stdin = self.child.stdin.take()?;
180 let stdout = self.child.stdout.take()?;
181 let read = BufReader::new(stdout);
182
183 Some((stdin, read))
184 }
185}
186
187fn mk_child(path: &Path, args: impl IntoIterator<Item = impl AsRef<OsStr>>) -> io::Result<Child> {
188 Command::new(&path)
189 .args(args)
190 .stdin(Stdio::piped())
191 .stdout(Stdio::piped())
192 .stderr(Stdio::null())
193 .spawn()
194}
195
200fn send_request( 196fn send_request(
201 mut writer: &mut impl Write, 197 mut writer: &mut impl Write,
202 mut reader: &mut impl BufRead, 198 mut reader: &mut impl BufRead,
203 req: Request, 199 req: Request,
204) -> Result<Option<Response>, io::Error> { 200) -> io::Result<Option<Response>> {
205 req.write(&mut writer)?; 201 req.write(&mut writer)?;
206 Ok(Response::read(&mut reader)?) 202 Ok(Response::read(&mut reader)?)
207} 203}
diff --git a/crates/ra_proc_macro/src/rpc.rs b/crates/ra_proc_macro/src/rpc.rs
index 66b3f55db..4ce485926 100644
--- a/crates/ra_proc_macro/src/rpc.rs
+++ b/crates/ra_proc_macro/src/rpc.rs
@@ -1,9 +1,9 @@
1//! Data struture serialization related stuffs for RPC 1//! Data struture serialization related stuff for RPC
2//! 2//!
3//! Define all necessary rpc serialization data structure, 3//! Defines all necessary rpc serialization data structures,
4//! which include ra_tt related data and some task messages. 4//! which includes `ra_tt` related data and some task messages.
5//! Although adding Serialize and Deserialize trait to ra_tt directly seem to be much easier, 5//! Although adding `Serialize` and `Deserialize` traits to `ra_tt` directly seems
6//! we deliberately duplicate the ra_tt struct with #[serde(with = "XXDef")] 6//! to be much easier, we deliberately duplicate `ra_tt` structs with `#[serde(with = "XXDef")]`
7//! for separation of code responsibility. 7//! for separation of code responsibility.
8 8
9use ra_tt::{ 9use ra_tt::{
@@ -34,15 +34,15 @@ pub struct ListMacrosResult {
34pub struct ExpansionTask { 34pub struct ExpansionTask {
35 /// Argument of macro call. 35 /// Argument of macro call.
36 /// 36 ///
37 /// In custom derive that would be a struct or enum; in attribute-like macro - underlying 37 /// In custom derive this will be a struct or enum; in attribute-like macro - underlying
38 /// item; in function-like macro - the macro body. 38 /// item; in function-like macro - the macro body.
39 #[serde(with = "SubtreeDef")] 39 #[serde(with = "SubtreeDef")]
40 pub macro_body: Subtree, 40 pub macro_body: Subtree,
41 41
42 /// Names of macros to expand. 42 /// Name of macro to expand.
43 /// 43 ///
44 /// In custom derive those are names of derived traits (`Serialize`, `Getters`, etc.). In 44 /// In custom derive this is the name of the derived trait (`Serialize`, `Getters`, etc.).
45 /// attribute-like and functiona-like macros - single name of macro itself (`show_streams`). 45 /// In attribute-like and function-like macros - single name of macro itself (`show_streams`).
46 pub macro_name: String, 46 pub macro_name: String,
47 47
48 /// Possible attributes for the attribute-like macros. 48 /// Possible attributes for the attribute-like macros.
diff --git a/crates/ra_proc_macro_srv/src/cli.rs b/crates/ra_proc_macro_srv/src/cli.rs
index c771f2b38..5f1f3ba3c 100644
--- a/crates/ra_proc_macro_srv/src/cli.rs
+++ b/crates/ra_proc_macro_srv/src/cli.rs
@@ -2,55 +2,43 @@
2 2
3use crate::{expand_task, list_macros}; 3use crate::{expand_task, list_macros};
4use ra_proc_macro::msg::{self, Message}; 4use ra_proc_macro::msg::{self, Message};
5
6use std::io; 5use std::io;
7 6
8fn read_request() -> Result<Option<msg::Request>, io::Error> {
9 let stdin = io::stdin();
10 let mut stdin = stdin.lock();
11 msg::Request::read(&mut stdin)
12}
13
14fn write_response(res: Result<msg::Response, String>) -> Result<(), io::Error> {
15 let msg: msg::Response = match res {
16 Ok(res) => res,
17 Err(err) => msg::Response::Error(msg::ResponseError {
18 code: msg::ErrorCode::ExpansionError,
19 message: err,
20 }),
21 };
22
23 let stdout = io::stdout();
24 let mut stdout = stdout.lock();
25 msg.write(&mut stdout)
26}
27
28pub fn run() { 7pub fn run() {
29 loop { 8 loop {
30 let req = match read_request() { 9 let req = match read_request() {
31 Err(err) => { 10 Err(err) => {
32 eprintln!("Read message error on ra_proc_macro_srv: {}", err.to_string()); 11 eprintln!("Read message error on ra_proc_macro_srv: {}", err);
33 continue; 12 continue;
34 } 13 }
35 Ok(None) => continue, 14 Ok(None) => continue,
36 Ok(Some(req)) => req, 15 Ok(Some(req)) => req,
37 }; 16 };
38 17
39 match req { 18 let res = match req {
40 msg::Request::ListMacro(task) => { 19 msg::Request::ListMacro(task) => Ok(msg::Response::ListMacro(list_macros(&task))),
41 if let Err(err) =
42 write_response(list_macros(&task).map(|it| msg::Response::ListMacro(it)))
43 {
44 eprintln!("Write message error on list macro: {}", err);
45 }
46 }
47 msg::Request::ExpansionMacro(task) => { 20 msg::Request::ExpansionMacro(task) => {
48 if let Err(err) = 21 expand_task(&task).map(msg::Response::ExpansionMacro)
49 write_response(expand_task(&task).map(|it| msg::Response::ExpansionMacro(it)))
50 {
51 eprintln!("Write message error on expansion macro: {}", err);
52 }
53 } 22 }
23 };
24
25 let msg = res.unwrap_or_else(|err| {
26 msg::Response::Error(msg::ResponseError {
27 code: msg::ErrorCode::ExpansionError,
28 message: err,
29 })
30 });
31
32 if let Err(err) = write_response(msg) {
33 eprintln!("Write message error: {}", err);
54 } 34 }
55 } 35 }
56} 36}
37
38fn read_request() -> io::Result<Option<msg::Request>> {
39 msg::Request::read(&mut io::stdin().lock())
40}
41
42fn write_response(msg: msg::Response) -> io::Result<()> {
43 msg.write(&mut io::stdout().lock())
44}
diff --git a/crates/ra_proc_macro_srv/src/dylib.rs b/crates/ra_proc_macro_srv/src/dylib.rs
index 16bd7466e..d202eb0fd 100644
--- a/crates/ra_proc_macro_srv/src/dylib.rs
+++ b/crates/ra_proc_macro_srv/src/dylib.rs
@@ -9,43 +9,37 @@ use libloading::Library;
9use memmap::Mmap; 9use memmap::Mmap;
10use ra_proc_macro::ProcMacroKind; 10use ra_proc_macro::ProcMacroKind;
11 11
12use std::io::Error as IoError; 12use std::io;
13use std::io::ErrorKind as IoErrorKind;
14 13
15const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; 14const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
16 15
17fn invalid_data_err(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> IoError { 16fn invalid_data_err(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> io::Error {
18 IoError::new(IoErrorKind::InvalidData, e) 17 io::Error::new(io::ErrorKind::InvalidData, e)
19} 18}
20 19
21fn is_derive_registrar_symbol(symbol: &str) -> bool { 20fn is_derive_registrar_symbol(symbol: &str) -> bool {
22 symbol.contains(NEW_REGISTRAR_SYMBOL) 21 symbol.contains(NEW_REGISTRAR_SYMBOL)
23} 22}
24 23
25fn find_registrar_symbol(file: &Path) -> Result<Option<String>, IoError> { 24fn find_registrar_symbol(file: &Path) -> io::Result<Option<String>> {
26 let file = File::open(file)?; 25 let file = File::open(file)?;
27 let buffer = unsafe { Mmap::map(&file)? }; 26 let buffer = unsafe { Mmap::map(&file)? };
28 let object = Object::parse(&buffer).map_err(invalid_data_err)?; 27 let object = Object::parse(&buffer).map_err(invalid_data_err)?;
29 28
30 match object { 29 let name = match object {
31 Object::Elf(elf) => { 30 Object::Elf(elf) => {
32 let symbols = elf.dynstrtab.to_vec().map_err(invalid_data_err)?; 31 let symbols = elf.dynstrtab.to_vec().map_err(invalid_data_err)?;
33 let name = 32 symbols.into_iter().find(|s| is_derive_registrar_symbol(s)).map(&str::to_owned)
34 symbols.iter().find(|s| is_derive_registrar_symbol(s)).map(|s| s.to_string());
35 Ok(name)
36 }
37 Object::PE(pe) => {
38 let name = pe
39 .exports
40 .iter()
41 .flat_map(|s| s.name)
42 .find(|s| is_derive_registrar_symbol(s))
43 .map(|s| s.to_string());
44 Ok(name)
45 } 33 }
34 Object::PE(pe) => pe
35 .exports
36 .iter()
37 .flat_map(|s| s.name)
38 .find(|s| is_derive_registrar_symbol(s))
39 .map(&str::to_owned),
46 Object::Mach(Mach::Binary(binary)) => { 40 Object::Mach(Mach::Binary(binary)) => {
47 let exports = binary.exports().map_err(invalid_data_err)?; 41 let exports = binary.exports().map_err(invalid_data_err)?;
48 let name = exports 42 exports
49 .iter() 43 .iter()
50 .map(|s| { 44 .map(|s| {
51 // In macos doc: 45 // In macos doc:
@@ -59,11 +53,11 @@ fn find_registrar_symbol(file: &Path) -> Result<Option<String>, IoError> {
59 } 53 }
60 }) 54 })
61 .find(|s| is_derive_registrar_symbol(s)) 55 .find(|s| is_derive_registrar_symbol(s))
62 .map(|s| s.to_string()); 56 .map(&str::to_owned)
63 Ok(name)
64 } 57 }
65 _ => Ok(None), 58 _ => return Ok(None),
66 } 59 };
60 return Ok(name);
67} 61}
68 62
69/// Loads dynamic library in platform dependent manner. 63/// Loads dynamic library in platform dependent manner.
@@ -93,15 +87,16 @@ fn load_library(file: &Path) -> Result<Library, libloading::Error> {
93} 87}
94 88
95struct ProcMacroLibraryLibloading { 89struct ProcMacroLibraryLibloading {
96 // Hold the dylib to prevent it for unloadeding 90 // Hold the dylib to prevent it from unloading
97 _lib: Library, 91 _lib: Library,
98 exported_macros: Vec<bridge::client::ProcMacro>, 92 exported_macros: Vec<bridge::client::ProcMacro>,
99} 93}
100 94
101impl ProcMacroLibraryLibloading { 95impl ProcMacroLibraryLibloading {
102 fn open(file: &Path) -> Result<Self, IoError> { 96 fn open(file: &Path) -> io::Result<Self> {
103 let symbol_name = find_registrar_symbol(file)? 97 let symbol_name = find_registrar_symbol(file)?.ok_or_else(|| {
104 .ok_or(invalid_data_err(format!("Cannot find registrar symbol in file {:?}", file)))?; 98 invalid_data_err(format!("Cannot find registrar symbol in file {}", file.display()))
99 })?;
105 100
106 let lib = load_library(file).map_err(invalid_data_err)?; 101 let lib = load_library(file).map_err(invalid_data_err)?;
107 let exported_macros = { 102 let exported_macros = {
@@ -121,18 +116,16 @@ pub struct Expander {
121} 116}
122 117
123impl Expander { 118impl Expander {
124 pub fn new<P: AsRef<Path>>(lib: &P) -> Result<Expander, String> { 119 pub fn new(lib: &Path) -> Result<Expander, String> {
125 let mut libs = vec![]; 120 // Some libraries for dynamic loading require canonicalized path even when it is
126 /* Some libraries for dynamic loading require canonicalized path (even when it is 121 // already absolute
127 already absolute 122 let lib = lib
128 */ 123 .canonicalize()
129 let lib = 124 .unwrap_or_else(|err| panic!("Cannot canonicalize {}: {:?}", lib.display(), err));
130 lib.as_ref().canonicalize().expect(&format!("Cannot canonicalize {:?}", lib.as_ref()));
131 125
132 let library = ProcMacroLibraryImpl::open(&lib).map_err(|e| e.to_string())?; 126 let library = ProcMacroLibraryImpl::open(&lib).map_err(|e| e.to_string())?;
133 libs.push(library);
134 127
135 Ok(Expander { libs }) 128 Ok(Expander { libs: vec![library] })
136 } 129 }
137 130
138 pub fn expand( 131 pub fn expand(
@@ -176,7 +169,6 @@ impl Expander {
176 parsed_attributes, 169 parsed_attributes,
177 parsed_body, 170 parsed_body,
178 ); 171 );
179
180 return res.map(|it| it.subtree); 172 return res.map(|it| it.subtree);
181 } 173 }
182 _ => continue, 174 _ => continue,
@@ -187,26 +179,21 @@ impl Expander {
187 Err(bridge::PanicMessage::String("Nothing to expand".to_string())) 179 Err(bridge::PanicMessage::String("Nothing to expand".to_string()))
188 } 180 }
189 181
190 pub fn list_macros(&self) -> Result<Vec<(String, ProcMacroKind)>, bridge::PanicMessage> { 182 pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
191 let mut result = vec![]; 183 self.libs
192 184 .iter()
193 for lib in &self.libs { 185 .flat_map(|it| &it.exported_macros)
194 for proc_macro in &lib.exported_macros { 186 .map(|proc_macro| match proc_macro {
195 let res = match proc_macro { 187 bridge::client::ProcMacro::CustomDerive { trait_name, .. } => {
196 bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { 188 (trait_name.to_string(), ProcMacroKind::CustomDerive)
197 (trait_name.to_string(), ProcMacroKind::CustomDerive) 189 }
198 } 190 bridge::client::ProcMacro::Bang { name, .. } => {
199 bridge::client::ProcMacro::Bang { name, .. } => { 191 (name.to_string(), ProcMacroKind::FuncLike)
200 (name.to_string(), ProcMacroKind::FuncLike) 192 }
201 } 193 bridge::client::ProcMacro::Attr { name, .. } => {
202 bridge::client::ProcMacro::Attr { name, .. } => { 194 (name.to_string(), ProcMacroKind::Attr)
203 (name.to_string(), ProcMacroKind::Attr) 195 }
204 } 196 })
205 }; 197 .collect()
206 result.push(res);
207 }
208 }
209
210 Ok(result)
211 } 198 }
212} 199}
diff --git a/crates/ra_proc_macro_srv/src/lib.rs b/crates/ra_proc_macro_srv/src/lib.rs
index c62b0ed89..3aca859db 100644
--- a/crates/ra_proc_macro_srv/src/lib.rs
+++ b/crates/ra_proc_macro_srv/src/lib.rs
@@ -3,10 +3,10 @@
3//! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code. 3//! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code.
4//! The general idea here is based on https://github.com/fedochet/rust-proc-macro-expander. 4//! The general idea here is based on https://github.com/fedochet/rust-proc-macro-expander.
5//! 5//!
6//! But we change some several design for fitting RA needs: 6//! But we adapt it to better fit RA needs:
7//! 7//!
8//! * We use `ra_tt` for proc-macro `TokenStream` server, it is easy to manipute and interact with 8//! * We use `ra_tt` for proc-macro `TokenStream` server, it is easier to manipulate and interact with
9//! RA then proc-macro2 token stream. 9//! RA than `proc-macro2` token stream.
10//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` 10//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
11//! rustc rather than `unstable`. (Although in gerenal ABI compatibility is still an issue) 11//! rustc rather than `unstable`. (Although in gerenal ABI compatibility is still an issue)
12 12
@@ -21,36 +21,28 @@ mod dylib;
21 21
22use proc_macro::bridge::client::TokenStream; 22use proc_macro::bridge::client::TokenStream;
23use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; 23use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask};
24use std::path::Path;
24 25
25pub(crate) fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> { 26pub(crate) fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> {
26 let expander = dylib::Expander::new(&task.lib) 27 let expander = create_expander(&task.lib);
27 .expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib));
28 28
29 match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) { 29 match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) {
30 Ok(expansion) => Ok(ExpansionResult { expansion }), 30 Ok(expansion) => Ok(ExpansionResult { expansion }),
31 Err(msg) => { 31 Err(msg) => {
32 let reason = format!( 32 Err(format!("Cannot perform expansion for {}: error {:?}", &task.macro_name, msg))
33 "Cannot perform expansion for {}: error {:?}!",
34 &task.macro_name,
35 msg.as_str()
36 );
37 Err(reason)
38 } 33 }
39 } 34 }
40} 35}
41 36
42pub(crate) fn list_macros(task: &ListMacrosTask) -> Result<ListMacrosResult, String> { 37pub(crate) fn list_macros(task: &ListMacrosTask) -> ListMacrosResult {
43 let expander = dylib::Expander::new(&task.lib) 38 let expander = create_expander(&task.lib);
44 .expect(&format!("Cannot expand with provided libraries: ${:?}", &task.lib));
45 39
46 match expander.list_macros() { 40 ListMacrosResult { macros: expander.list_macros() }
47 Ok(macros) => Ok(ListMacrosResult { macros }), 41}
48 Err(msg) => { 42
49 let reason = 43fn create_expander(lib: &Path) -> dylib::Expander {
50 format!("Cannot perform expansion for {:?}: error {:?}!", &task.lib, msg.as_str()); 44 dylib::Expander::new(lib)
51 Err(reason) 45 .unwrap_or_else(|err| panic!("Cannot create expander for {}: {:?}", lib.display(), err))
52 }
53 }
54} 46}
55 47
56pub mod cli; 48pub mod cli;
diff --git a/crates/ra_proc_macro_srv/src/rustc_server.rs b/crates/ra_proc_macro_srv/src/rustc_server.rs
index 9fcfdc450..f481d70b2 100644
--- a/crates/ra_proc_macro_srv/src/rustc_server.rs
+++ b/crates/ra_proc_macro_srv/src/rustc_server.rs
@@ -6,7 +6,7 @@
6//! The original idea from fedochet is using proc-macro2 as backend, 6//! The original idea from fedochet is using proc-macro2 as backend,
7//! we use ra_tt instead for better intergation with RA. 7//! we use ra_tt instead for better intergation with RA.
8//! 8//!
9//! FIXME: No span and source file informatin is implemented yet 9//! FIXME: No span and source file information is implemented yet
10 10
11use crate::proc_macro::bridge::{self, server}; 11use crate::proc_macro::bridge::{self, server};
12use ra_tt as tt; 12use ra_tt as tt;
diff --git a/crates/ra_proc_macro_srv/src/tests/utils.rs b/crates/ra_proc_macro_srv/src/tests/utils.rs
index 1ee409449..2139ec7a4 100644
--- a/crates/ra_proc_macro_srv/src/tests/utils.rs
+++ b/crates/ra_proc_macro_srv/src/tests/utils.rs
@@ -60,6 +60,6 @@ pub fn list(crate_name: &str, version: &str) -> Vec<String> {
60 let path = fixtures::dylib_path(crate_name, version); 60 let path = fixtures::dylib_path(crate_name, version);
61 let task = ListMacrosTask { lib: path }; 61 let task = ListMacrosTask { lib: path };
62 62
63 let res = list_macros(&task).unwrap(); 63 let res = list_macros(&task);
64 res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect() 64 res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect()
65} 65}
diff --git a/crates/ra_project_model/src/cargo_workspace.rs b/crates/ra_project_model/src/cargo_workspace.rs
index b50cda06f..84008b2e3 100644
--- a/crates/ra_project_model/src/cargo_workspace.rs
+++ b/crates/ra_project_model/src/cargo_workspace.rs
@@ -303,8 +303,7 @@ pub fn load_extern_resources(
303 if message.target.kind.contains(&"proc-macro".to_string()) { 303 if message.target.kind.contains(&"proc-macro".to_string()) {
304 let package_id = message.package_id; 304 let package_id = message.package_id;
305 // Skip rmeta file 305 // Skip rmeta file
306 if let Some(filename) = 306 if let Some(filename) = message.filenames.iter().find(|name| is_dylib(name))
307 message.filenames.iter().filter(|name| is_dylib(name)).next()
308 { 307 {
309 res.proc_dylib_paths.insert(package_id, filename.clone()); 308 res.proc_dylib_paths.insert(package_id, filename.clone());
310 } 309 }
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs
index ea41bf85d..06df8495c 100644
--- a/crates/ra_syntax/src/algo.rs
+++ b/crates/ra_syntax/src/algo.rs
@@ -10,8 +10,8 @@ use ra_text_edit::TextEditBuilder;
10use rustc_hash::FxHashMap; 10use rustc_hash::FxHashMap;
11 11
12use crate::{ 12use crate::{
13 AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxNode, SyntaxNodePtr, SyntaxToken, 13 AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode, SyntaxNodePtr,
14 TextRange, TextUnit, 14 SyntaxToken, TextRange, TextUnit,
15}; 15};
16 16
17/// Returns ancestors of the node at the offset, sorted by length. This should 17/// Returns ancestors of the node at the offset, sorted by length. This should
@@ -90,6 +90,10 @@ pub fn neighbor<T: AstNode>(me: &T, direction: Direction) -> Option<T> {
90 me.syntax().siblings(direction).skip(1).find_map(T::cast) 90 me.syntax().siblings(direction).skip(1).find_map(T::cast)
91} 91}
92 92
93pub fn has_errors(node: &SyntaxNode) -> bool {
94 node.children().any(|it| it.kind() == SyntaxKind::ERROR)
95}
96
93#[derive(Debug, PartialEq, Eq, Clone, Copy)] 97#[derive(Debug, PartialEq, Eq, Clone, Copy)]
94pub enum InsertPosition<T> { 98pub enum InsertPosition<T> {
95 First, 99 First,
diff --git a/crates/ra_syntax/src/ast/edit.rs b/crates/ra_syntax/src/ast/edit.rs
index 9e5411ee5..26e4576ff 100644
--- a/crates/ra_syntax/src/ast/edit.rs
+++ b/crates/ra_syntax/src/ast/edit.rs
@@ -307,7 +307,11 @@ impl ast::UseTree {
307 307
308 fn split_path_prefix(prefix: &ast::Path) -> Option<ast::Path> { 308 fn split_path_prefix(prefix: &ast::Path) -> Option<ast::Path> {
309 let parent = prefix.parent_path()?; 309 let parent = prefix.parent_path()?;
310 let mut res = make::path_unqualified(parent.segment()?); 310 let segment = parent.segment()?;
311 if algo::has_errors(segment.syntax()) {
312 return None;
313 }
314 let mut res = make::path_unqualified(segment);
311 for p in iter::successors(parent.parent_path(), |it| it.parent_path()) { 315 for p in iter::successors(parent.parent_path(), |it| it.parent_path()) {
312 res = make::path_qualified(res, p.segment()?); 316 res = make::path_qualified(res, p.segment()?);
313 } 317 }
diff --git a/crates/rust-analyzer/src/bin/args.rs b/crates/rust-analyzer/src/bin/args.rs
index 5e19253a6..b14409c39 100644
--- a/crates/rust-analyzer/src/bin/args.rs
+++ b/crates/rust-analyzer/src/bin/args.rs
@@ -84,7 +84,7 @@ impl Args {
84 if matches.contains(["-h", "--help"]) { 84 if matches.contains(["-h", "--help"]) {
85 eprintln!( 85 eprintln!(
86 "\ 86 "\
87ra-cli-parse 87rust-analyzer parse
88 88
89USAGE: 89USAGE:
90 rust-analyzer parse [FLAGS] 90 rust-analyzer parse [FLAGS]
@@ -104,7 +104,7 @@ FLAGS:
104 if matches.contains(["-h", "--help"]) { 104 if matches.contains(["-h", "--help"]) {
105 eprintln!( 105 eprintln!(
106 "\ 106 "\
107ra-cli-symbols 107rust-analyzer symbols
108 108
109USAGE: 109USAGE:
110 rust-analyzer highlight [FLAGS] 110 rust-analyzer highlight [FLAGS]
@@ -123,7 +123,7 @@ FLAGS:
123 if matches.contains(["-h", "--help"]) { 123 if matches.contains(["-h", "--help"]) {
124 eprintln!( 124 eprintln!(
125 "\ 125 "\
126ra-cli-highlight 126rust-analyzer highlight
127 127
128USAGE: 128USAGE:
129 rust-analyzer highlight [FLAGS] 129 rust-analyzer highlight [FLAGS]
@@ -143,7 +143,7 @@ FLAGS:
143 if matches.contains(["-h", "--help"]) { 143 if matches.contains(["-h", "--help"]) {
144 eprintln!( 144 eprintln!(
145 "\ 145 "\
146ra-cli-analysis-stats 146rust-analyzer analysis-stats
147 147
148USAGE: 148USAGE:
149 rust-analyzer analysis-stats [FLAGS] [OPTIONS] [PATH] 149 rust-analyzer analysis-stats [FLAGS] [OPTIONS] [PATH]
@@ -193,7 +193,7 @@ ARGS:
193 if matches.contains(["-h", "--help"]) { 193 if matches.contains(["-h", "--help"]) {
194 eprintln!( 194 eprintln!(
195 "\ 195 "\
196rust-analyzer-analysis-bench 196rust-analyzer analysis-bench
197 197
198USAGE: 198USAGE:
199 rust-analyzer analysis-bench [FLAGS] [OPTIONS] 199 rust-analyzer analysis-bench [FLAGS] [OPTIONS]
@@ -236,7 +236,7 @@ ARGS:
236 if matches.contains(["-h", "--help"]) { 236 if matches.contains(["-h", "--help"]) {
237 eprintln!( 237 eprintln!(
238 "\ 238 "\
239ra-cli-diagnostics 239rust-analyzer diagnostics
240 240
241USAGE: 241USAGE:
242 rust-analyzer diagnostics [FLAGS] [PATH] 242 rust-analyzer diagnostics [FLAGS] [PATH]
@@ -269,7 +269,7 @@ ARGS:
269 _ => { 269 _ => {
270 eprintln!( 270 eprintln!(
271 "\ 271 "\
272ra-cli 272rust-analyzer
273 273
274USAGE: 274USAGE:
275 rust-analyzer <SUBCOMMAND> 275 rust-analyzer <SUBCOMMAND>
@@ -281,6 +281,8 @@ SUBCOMMANDS:
281 analysis-bench 281 analysis-bench
282 analysis-stats 282 analysis-stats
283 highlight 283 highlight
284 diagnostics
285 proc-macro
284 parse 286 parse
285 symbols" 287 symbols"
286 ); 288 );
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs
index 28b67cfe2..e8d5dad65 100644
--- a/crates/rust-analyzer/src/bin/main.rs
+++ b/crates/rust-analyzer/src/bin/main.rs
@@ -51,7 +51,7 @@ fn main() -> Result<()> {
51 cli::diagnostics(path.as_ref(), load_output_dirs, with_proc_macro, all)? 51 cli::diagnostics(path.as_ref(), load_output_dirs, with_proc_macro, all)?
52 } 52 }
53 53
54 args::Command::ProcMacro => run_proc_macro_sv()?, 54 args::Command::ProcMacro => run_proc_macro_srv()?,
55 args::Command::RunServer => run_server()?, 55 args::Command::RunServer => run_server()?,
56 args::Command::Version => println!("rust-analyzer {}", env!("REV")), 56 args::Command::Version => println!("rust-analyzer {}", env!("REV")),
57 } 57 }
@@ -65,7 +65,7 @@ fn setup_logging() -> Result<()> {
65 Ok(()) 65 Ok(())
66} 66}
67 67
68fn run_proc_macro_sv() -> Result<()> { 68fn run_proc_macro_srv() -> Result<()> {
69 ra_proc_macro_srv::cli::run(); 69 ra_proc_macro_srv::cli::run();
70 Ok(()) 70 Ok(())
71} 71}
diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs
index 762f776fe..d0a71120a 100644
--- a/crates/rust-analyzer/src/cli/load_cargo.rs
+++ b/crates/rust-analyzer/src/cli/load_cargo.rs
@@ -76,7 +76,7 @@ pub(crate) fn load_cargo(
76 ProcMacroClient::dummy() 76 ProcMacroClient::dummy()
77 } else { 77 } else {
78 let path = std::env::current_exe()?; 78 let path = std::env::current_exe()?;
79 ProcMacroClient::extern_process(&path, &["proc-macro"]).unwrap() 79 ProcMacroClient::extern_process(path, &["proc-macro"]).unwrap()
80 }; 80 };
81 let host = load(&source_roots, ws, &mut vfs, receiver, extern_dirs, &proc_macro_client); 81 let host = load(&source_roots, ws, &mut vfs, receiver, extern_dirs, &proc_macro_client);
82 Ok((host, source_roots)) 82 Ok((host, source_roots))
diff --git a/crates/rust-analyzer/src/conv.rs b/crates/rust-analyzer/src/conv.rs
index b2b1cb625..8d2360cc8 100644
--- a/crates/rust-analyzer/src/conv.rs
+++ b/crates/rust-analyzer/src/conv.rs
@@ -24,7 +24,9 @@ use crate::{
24 world::WorldSnapshot, 24 world::WorldSnapshot,
25 Result, 25 Result,
26}; 26};
27use semantic_tokens::{ATTRIBUTE, BUILTIN_TYPE, ENUM_MEMBER, LIFETIME, TYPE_ALIAS, UNION}; 27use semantic_tokens::{
28 ATTRIBUTE, BUILTIN_TYPE, ENUM_MEMBER, LIFETIME, TYPE_ALIAS, UNION, UNRESOLVED_REFERENCE,
29};
28 30
29pub trait Conv { 31pub trait Conv {
30 type Output; 32 type Output;
@@ -373,6 +375,7 @@ impl Conv for Highlight {
373 HighlightTag::Comment => SemanticTokenType::COMMENT, 375 HighlightTag::Comment => SemanticTokenType::COMMENT,
374 HighlightTag::Attribute => ATTRIBUTE, 376 HighlightTag::Attribute => ATTRIBUTE,
375 HighlightTag::Keyword => SemanticTokenType::KEYWORD, 377 HighlightTag::Keyword => SemanticTokenType::KEYWORD,
378 HighlightTag::UnresolvedReference => UNRESOLVED_REFERENCE,
376 }; 379 };
377 380
378 for modifier in self.modifiers.iter() { 381 for modifier in self.modifiers.iter() {
diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs
index 865fa3b1c..10fe696f6 100644
--- a/crates/rust-analyzer/src/semantic_tokens.rs
+++ b/crates/rust-analyzer/src/semantic_tokens.rs
@@ -10,6 +10,8 @@ pub(crate) const ENUM_MEMBER: SemanticTokenType = SemanticTokenType::new("enumMe
10pub(crate) const LIFETIME: SemanticTokenType = SemanticTokenType::new("lifetime"); 10pub(crate) const LIFETIME: SemanticTokenType = SemanticTokenType::new("lifetime");
11pub(crate) const TYPE_ALIAS: SemanticTokenType = SemanticTokenType::new("typeAlias"); 11pub(crate) const TYPE_ALIAS: SemanticTokenType = SemanticTokenType::new("typeAlias");
12pub(crate) const UNION: SemanticTokenType = SemanticTokenType::new("union"); 12pub(crate) const UNION: SemanticTokenType = SemanticTokenType::new("union");
13pub(crate) const UNRESOLVED_REFERENCE: SemanticTokenType =
14 SemanticTokenType::new("unresolvedReference");
13 15
14pub(crate) const CONSTANT: SemanticTokenModifier = SemanticTokenModifier::new("constant"); 16pub(crate) const CONSTANT: SemanticTokenModifier = SemanticTokenModifier::new("constant");
15pub(crate) const CONTROL_FLOW: SemanticTokenModifier = SemanticTokenModifier::new("controlFlow"); 17pub(crate) const CONTROL_FLOW: SemanticTokenModifier = SemanticTokenModifier::new("controlFlow");
@@ -43,6 +45,7 @@ pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
43 LIFETIME, 45 LIFETIME,
44 TYPE_ALIAS, 46 TYPE_ALIAS,
45 UNION, 47 UNION,
48 UNRESOLVED_REFERENCE,
46]; 49];
47 50
48pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[ 51pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
diff --git a/crates/rust-analyzer/src/world.rs b/crates/rust-analyzer/src/world.rs
index f2ad453fa..8e1744bf9 100644
--- a/crates/rust-analyzer/src/world.rs
+++ b/crates/rust-analyzer/src/world.rs
@@ -148,20 +148,17 @@ impl WorldState {
148 148
149 let proc_macro_client = match &config.proc_macro_srv { 149 let proc_macro_client = match &config.proc_macro_srv {
150 None => ProcMacroClient::dummy(), 150 None => ProcMacroClient::dummy(),
151 Some((path, args)) => { 151 Some((path, args)) => match ProcMacroClient::extern_process(path.into(), args) {
152 let path = std::path::Path::new(path); 152 Ok(it) => it,
153 match ProcMacroClient::extern_process(path, args) { 153 Err(err) => {
154 Ok(it) => it, 154 log::error!(
155 Err(err) => { 155 "Fail to run ra_proc_macro_srv from path {}, error: {:?}",
156 log::error!( 156 path,
157 "Fail to run ra_proc_macro_srv from path {}, error : {}", 157 err
158 path.to_string_lossy(), 158 );
159 err 159 ProcMacroClient::dummy()
160 );
161 ProcMacroClient::dummy()
162 }
163 } 160 }
164 } 161 },
165 }; 162 };
166 163
167 workspaces 164 workspaces
@@ -184,7 +181,7 @@ impl WorldState {
184 let mut analysis_host = AnalysisHost::new(lru_capacity); 181 let mut analysis_host = AnalysisHost::new(lru_capacity);
185 analysis_host.apply_change(change); 182 analysis_host.apply_change(change);
186 WorldState { 183 WorldState {
187 config: config, 184 config,
188 roots: folder_roots, 185 roots: folder_roots,
189 workspaces: Arc::new(workspaces), 186 workspaces: Arc::new(workspaces),
190 analysis_host, 187 analysis_host,
diff --git a/docs/user/readme.adoc b/docs/user/readme.adoc
index abd126340..13ab2acc2 100644
--- a/docs/user/readme.adoc
+++ b/docs/user/readme.adoc
@@ -14,9 +14,9 @@
14// Master copy of this document lives in the https://github.com/rust-analyzer/rust-analyzer repository 14// Master copy of this document lives in the https://github.com/rust-analyzer/rust-analyzer repository
15 15
16At its core, rust-analyzer is a *library* for semantic analysis of Rust code as it changes over time. 16At its core, rust-analyzer is a *library* for semantic analysis of Rust code as it changes over time.
17This manual focuses on a specific usage of the library -- the implementation of 17This manual focuses on a specific usage of the library -- running it as part of a server that implements the
18https://microsoft.github.io/language-server-protocol/[Language Server Protocol]. 18https://microsoft.github.io/language-server-protocol/[Language Server Protocol] (LSP).
19LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic features like completion or goto definition by talking to an external language server process. 19The LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic features like completion or goto definition by talking to an external language server process.
20 20
21To improve this document, send a pull request against 21To improve this document, send a pull request against
22https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/readme.adoc[this file]. 22https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/readme.adoc[this file].
@@ -26,7 +26,7 @@ https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/readme.adoc
26In theory, one should be able to just install the server binary and have it automatically work with any editor. 26In theory, one should be able to just install the server binary and have it automatically work with any editor.
27We are not there yet, so some editor specific setup is required. 27We are not there yet, so some editor specific setup is required.
28 28
29Additionally, rust-analyzer needs sources of the standard library. 29Additionally, rust-analyzer needs the sources of the standard library.
30If the source code is not present, rust-analyzer will attempt to install it automatically. 30If the source code is not present, rust-analyzer will attempt to install it automatically.
31 31
32To add the sources manually, run the following command: 32To add the sources manually, run the following command:
@@ -38,7 +38,7 @@ $ rustup component add rust-src
38=== VS Code 38=== VS Code
39 39
40This is the best supported editor at the moment. 40This is the best supported editor at the moment.
41rust-analyzer plugin for VS Code is maintained 41The rust-analyzer plugin for VS Code is maintained
42https://github.com/rust-analyzer/rust-analyzer/tree/master/editors/code[in tree]. 42https://github.com/rust-analyzer/rust-analyzer/tree/master/editors/code[in tree].
43 43
44You can install the latest release of the plugin from 44You can install the latest release of the plugin from
@@ -74,7 +74,7 @@ We ship nightly releases for VS Code. To help us out with testing the newest cod
74{ "rust-analyzer.updates.channel": "nightly" } 74{ "rust-analyzer.updates.channel": "nightly" }
75---- 75----
76 76
77You will be prompted to install the `nightly` extension version. Just click `Download now` and from that moment you will get automatic updates each 24 hours. 77You will be prompted to install the `nightly` extension version. Just click `Download now` and from that moment you will get automatic updates every 24 hours.
78 78
79If you don't want to be asked for `Download now` every day when the new nightly version is released add the following to your `settings.json`: 79If you don't want to be asked for `Download now` every day when the new nightly version is released add the following to your `settings.json`:
80[source,json] 80[source,json]
@@ -110,19 +110,21 @@ Here are some useful self-diagnostic commands:
110 110
111=== Language Server Binary 111=== Language Server Binary
112 112
113Other editors generally require `rust-analyzer` binary to be in `$PATH`. 113Other editors generally require the `rust-analyzer` binary to be in `$PATH`.
114You can download the pre-built binary from 114You can download the pre-built binary from the https://github.com/rust-analyzer/rust-analyzer/releases[releases] page. Typically, you then need to rename the binary for your platform, e.g. `rust-analyzer-mac` if you're on Mac OS, to `rust-analzyer` and make it executable in addition to moving it into a directory in your `$PATH`.
115https://github.com/rust-analyzer/rust-analyzer/releases[releases] 115
116page, or you can install it from source using the following command: 116Alternatively, you can install it from source using the following command:
117 117
118[source,bash] 118[source,bash]
119---- 119----
120$ cargo xtask install --server 120$ cargo xtask install --server
121---- 121----
122 122
123If your editor can't find the binary even though the binary is on your `$PATH`, the likely explanation is that it doesn't see the same `$PATH` as the shell, see https://github.com/rust-analyzer/rust-analyzer/issues/1811[this issue]. On Unix, running the editor from a shell or changing the `.desktop` file to set the environment should help.
124
123==== Arch Linux 125==== Arch Linux
124 126
125`rust-analyzer` binary can be installed from AUR (Arch User Repository): 127The `rust-analyzer` binary can be installed from AUR (Arch User Repository):
126 128
127- https://aur.archlinux.org/packages/rust-analyzer-bin[`rust-analyzer-bin`] (binary from GitHub releases) 129- https://aur.archlinux.org/packages/rust-analyzer-bin[`rust-analyzer-bin`] (binary from GitHub releases)
128- https://aur.archlinux.org/packages/rust-analyzer[`rust-analyzer`] (built from latest tagged source) 130- https://aur.archlinux.org/packages/rust-analyzer[`rust-analyzer`] (built from latest tagged source)
@@ -156,8 +158,8 @@ The are several LSP client implementations for vim:
1562. Run `:CocInstall coc-rust-analyzer` to install 1582. Run `:CocInstall coc-rust-analyzer` to install
157 https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer], 159 https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer],
158 this extension implements _most_ of the features supported in the VSCode extension: 160 this extension implements _most_ of the features supported in the VSCode extension:
159 * same configurations as VSCode extension, `rust-analyzer.serverPath`, `rust-analyzer.enableCargoWatchOnStartup` etc. 161 * same configurations as VSCode extension, `rust-analyzer.serverPath`, `rust-analyzer.cargo.features` etc.
160 * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.startCargoWatch` etc. 162 * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc.
161 * highlighting and inlay_hints are not implemented yet 163 * highlighting and inlay_hints are not implemented yet
162 164
163==== LanguageClient-neovim 165==== LanguageClient-neovim
@@ -183,11 +185,20 @@ Once `neovim/nvim-lsp` is installed, use `+lua require'nvim_lsp'.rust_analyzer.s
183 185
184=== Sublime Text 3 186=== Sublime Text 3
185 187
186Prerequisites: 188Prerequisites: You have installed the <<language-server-binary,`rust-analyzer` binary>>.
189
190You also need the `LSP` package. To install it:
191
1921. If you've never installed a Sublime Text package, install Package Control:
193 * Open the command palette (Win/Linux: `ctrl+shift+p`, Mac: `cmd+shift+p`)
194 * Type `Install Package Control`, press enter
1952. In the command palette, run `Package control: Install package`, and in the list that pops up, type `LSP` and press enter.
196
197Finally, with your Rust project open, in the command palette, run `LSP: Enable Language Server In Project` or `LSP: Enable Language Server Globally`, then select `rust-analyzer` in the list that pops up to enable the rust-analyzer LSP. The latter means that rust-analzyer is enabled by default in Rust projects.
187 198
188`LSP` package. 199If it worked, you should see "rust-analzyer, Line X, Column Y" on the left side of the bottom bar, and after waiting a bit, functionality like tooltips on hovering over variables should become available.
189 200
190Invoke the command palette (`ctrl+shift+p`) and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer) 201If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<language-server-binary,section on installing the language server binary>>.
191 202
192== Usage 203== Usage
193 204
diff --git a/editors/code/package.json b/editors/code/package.json
index 5ce59e54a..79410ad10 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -517,6 +517,10 @@
517 "id": "union", 517 "id": "union",
518 "description": "Style for C-style untagged unions", 518 "description": "Style for C-style untagged unions",
519 "superType": "type" 519 "superType": "type"
520 },
521 {
522 "id": "unresolvedReference",
523 "description": "Style for names which can not be resolved due to compilation errors"
520 } 524 }
521 ], 525 ],
522 "semanticTokenModifiers": [ 526 "semanticTokenModifiers": [