aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock1
-rw-r--r--crates/ra_assists/src/handlers/auto_import.rs588
-rw-r--r--crates/ra_hir/src/code_model.rs2
-rw-r--r--crates/ra_hir_def/src/resolver.rs6
-rw-r--r--crates/ra_hir_ty/src/display.rs373
-rw-r--r--crates/ra_hir_ty/src/lib.rs383
-rw-r--r--crates/ra_hir_ty/src/lower.rs40
-rw-r--r--crates/ra_hir_ty/src/marks.rs2
-rw-r--r--crates/ra_hir_ty/src/method_resolution.rs9
-rw-r--r--crates/ra_hir_ty/src/tests/method_resolution.rs32
-rw-r--r--crates/ra_hir_ty/src/tests/traits.rs48
-rw-r--r--crates/ra_hir_ty/src/traits/chalk.rs4
-rw-r--r--crates/ra_lsp_server/src/main_loop/handlers.rs2
-rw-r--r--crates/ra_parser/src/grammar/params.rs5
-rw-r--r--crates/ra_project_model/Cargo.toml2
-rw-r--r--crates/ra_project_model/src/cargo_workspace.rs11
-rw-r--r--crates/ra_project_model/src/lib.rs57
-rw-r--r--crates/ra_project_model/src/sysroot.rs14
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs1
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.txt26
-rw-r--r--docs/user/README.md280
-rw-r--r--docs/user/readme.adoc155
-rw-r--r--editors/code/package-lock.json20
-rw-r--r--editors/code/package.json9
-rw-r--r--editors/code/src/client.ts7
-rw-r--r--editors/code/src/installation/download_file.ts33
-rw-r--r--editors/code/src/installation/language_server.ts2
-rw-r--r--xtask/src/cmd.rs56
-rw-r--r--xtask/src/install.rs116
-rw-r--r--xtask/src/lib.rs77
-rw-r--r--xtask/src/main.rs3
-rw-r--r--xtask/src/not_bash.rs165
-rw-r--r--xtask/src/pre_commit.rs6
33 files changed, 1595 insertions, 940 deletions
diff --git a/Cargo.lock b/Cargo.lock
index 45804c087..f1651edaa 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1247,6 +1247,7 @@ dependencies = [
1247name = "ra_project_model" 1247name = "ra_project_model"
1248version = "0.1.0" 1248version = "0.1.0"
1249dependencies = [ 1249dependencies = [
1250 "anyhow",
1250 "cargo_metadata", 1251 "cargo_metadata",
1251 "log", 1252 "log",
1252 "ra_arena", 1253 "ra_arena",
diff --git a/crates/ra_assists/src/handlers/auto_import.rs b/crates/ra_assists/src/handlers/auto_import.rs
index 1fb701da5..c4aea2a06 100644
--- a/crates/ra_assists/src/handlers/auto_import.rs
+++ b/crates/ra_assists/src/handlers/auto_import.rs
@@ -1,10 +1,18 @@
1use ra_ide_db::imports_locator::ImportsLocator;
2use ra_syntax::ast::{self, AstNode};
3
4use crate::{ 1use crate::{
5 assist_ctx::{Assist, AssistCtx}, 2 assist_ctx::{Assist, AssistCtx},
6 insert_use_statement, AssistId, 3 insert_use_statement, AssistId,
7}; 4};
5use hir::{
6 db::HirDatabase, AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution,
7 SourceAnalyzer, Trait, Type,
8};
9use ra_ide_db::{imports_locator::ImportsLocator, RootDatabase};
10use ra_prof::profile;
11use ra_syntax::{
12 ast::{self, AstNode},
13 SyntaxNode,
14};
15use rustc_hash::FxHashSet;
8use std::collections::BTreeSet; 16use std::collections::BTreeSet;
9 17
10// Assist: auto_import 18// Assist: auto_import
@@ -27,52 +35,24 @@ use std::collections::BTreeSet;
27// # pub mod std { pub mod collections { pub struct HashMap { } } } 35// # pub mod std { pub mod collections { pub struct HashMap { } } }
28// ``` 36// ```
29pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> { 37pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> {
30 let path_under_caret: ast::Path = ctx.find_node_at_offset()?; 38 let auto_import_assets = AutoImportAssets::new(&ctx)?;
31 if path_under_caret.syntax().ancestors().find_map(ast::UseItem::cast).is_some() { 39 let proposed_imports = auto_import_assets.search_for_imports(ctx.db);
32 return None;
33 }
34
35 let module = path_under_caret.syntax().ancestors().find_map(ast::Module::cast);
36 let position = match module.and_then(|it| it.item_list()) {
37 Some(item_list) => item_list.syntax().clone(),
38 None => {
39 let current_file =
40 path_under_caret.syntax().ancestors().find_map(ast::SourceFile::cast)?;
41 current_file.syntax().clone()
42 }
43 };
44 let source_analyzer = ctx.source_analyzer(&position, None);
45 let module_with_name_to_import = source_analyzer.module()?;
46
47 let name_ref_to_import =
48 path_under_caret.syntax().descendants().find_map(ast::NameRef::cast)?;
49 if source_analyzer
50 .resolve_path(ctx.db, &name_ref_to_import.syntax().ancestors().find_map(ast::Path::cast)?)
51 .is_some()
52 {
53 return None;
54 }
55
56 let name_to_import = name_ref_to_import.syntax().to_string();
57 let proposed_imports = ImportsLocator::new(ctx.db)
58 .find_imports(&name_to_import)
59 .into_iter()
60 .filter_map(|module_def| module_with_name_to_import.find_use_path(ctx.db, module_def))
61 .filter(|use_path| !use_path.segments.is_empty())
62 .take(20)
63 .collect::<BTreeSet<_>>();
64
65 if proposed_imports.is_empty() { 40 if proposed_imports.is_empty() {
66 return None; 41 return None;
67 } 42 }
68 43
69 let mut group = ctx.add_assist_group(format!("Import {}", name_to_import)); 44 let assist_group_name = if proposed_imports.len() == 1 {
45 format!("Import `{}`", proposed_imports.iter().next().unwrap())
46 } else {
47 auto_import_assets.get_import_group_message()
48 };
49 let mut group = ctx.add_assist_group(assist_group_name);
70 for import in proposed_imports { 50 for import in proposed_imports {
71 group.add_assist(AssistId("auto_import"), format!("Import `{}`", &import), |edit| { 51 group.add_assist(AssistId("auto_import"), format!("Import `{}`", &import), |edit| {
72 edit.target(path_under_caret.syntax().text_range()); 52 edit.target(auto_import_assets.syntax_under_caret.text_range());
73 insert_use_statement( 53 insert_use_statement(
74 &position, 54 &auto_import_assets.syntax_under_caret,
75 path_under_caret.syntax(), 55 &auto_import_assets.syntax_under_caret,
76 &import, 56 &import,
77 edit.text_edit_builder(), 57 edit.text_edit_builder(),
78 ); 58 );
@@ -81,11 +61,232 @@ pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> {
81 group.finish() 61 group.finish()
82} 62}
83 63
64struct AutoImportAssets {
65 import_candidate: ImportCandidate,
66 module_with_name_to_import: Module,
67 syntax_under_caret: SyntaxNode,
68}
69
70impl AutoImportAssets {
71 fn new(ctx: &AssistCtx) -> Option<Self> {
72 if let Some(path_under_caret) = ctx.find_node_at_offset::<ast::Path>() {
73 Self::for_regular_path(path_under_caret, &ctx)
74 } else {
75 Self::for_method_call(ctx.find_node_at_offset()?, &ctx)
76 }
77 }
78
79 fn for_method_call(method_call: ast::MethodCallExpr, ctx: &AssistCtx) -> Option<Self> {
80 let syntax_under_caret = method_call.syntax().to_owned();
81 let source_analyzer = ctx.source_analyzer(&syntax_under_caret, None);
82 let module_with_name_to_import = source_analyzer.module()?;
83 Some(Self {
84 import_candidate: ImportCandidate::for_method_call(
85 &method_call,
86 &source_analyzer,
87 ctx.db,
88 )?,
89 module_with_name_to_import,
90 syntax_under_caret,
91 })
92 }
93
94 fn for_regular_path(path_under_caret: ast::Path, ctx: &AssistCtx) -> Option<Self> {
95 let syntax_under_caret = path_under_caret.syntax().to_owned();
96 if syntax_under_caret.ancestors().find_map(ast::UseItem::cast).is_some() {
97 return None;
98 }
99
100 let source_analyzer = ctx.source_analyzer(&syntax_under_caret, None);
101 let module_with_name_to_import = source_analyzer.module()?;
102 Some(Self {
103 import_candidate: ImportCandidate::for_regular_path(
104 &path_under_caret,
105 &source_analyzer,
106 ctx.db,
107 )?,
108 module_with_name_to_import,
109 syntax_under_caret,
110 })
111 }
112
113 fn get_search_query(&self) -> &str {
114 match &self.import_candidate {
115 ImportCandidate::UnqualifiedName(name) => name,
116 ImportCandidate::QualifierStart(qualifier_start) => qualifier_start,
117 ImportCandidate::TraitAssocItem(_, trait_assoc_item_name) => trait_assoc_item_name,
118 ImportCandidate::TraitMethod(_, trait_method_name) => trait_method_name,
119 }
120 }
121
122 fn get_import_group_message(&self) -> String {
123 match &self.import_candidate {
124 ImportCandidate::UnqualifiedName(name) => format!("Import {}", name),
125 ImportCandidate::QualifierStart(qualifier_start) => {
126 format!("Import {}", qualifier_start)
127 }
128 ImportCandidate::TraitAssocItem(_, trait_assoc_item_name) => {
129 format!("Import a trait for item {}", trait_assoc_item_name)
130 }
131 ImportCandidate::TraitMethod(_, trait_method_name) => {
132 format!("Import a trait for method {}", trait_method_name)
133 }
134 }
135 }
136
137 fn search_for_imports(&self, db: &RootDatabase) -> BTreeSet<ModPath> {
138 let _p = profile("auto_import::search_for_imports");
139 let current_crate = self.module_with_name_to_import.krate();
140 ImportsLocator::new(db)
141 .find_imports(&self.get_search_query())
142 .into_iter()
143 .filter_map(|module_def| match &self.import_candidate {
144 ImportCandidate::TraitAssocItem(assoc_item_type, _) => {
145 let located_assoc_item = match module_def {
146 ModuleDef::Function(located_function) => located_function
147 .as_assoc_item(db)
148 .map(|assoc| assoc.container(db))
149 .and_then(Self::assoc_to_trait),
150 ModuleDef::Const(located_const) => located_const
151 .as_assoc_item(db)
152 .map(|assoc| assoc.container(db))
153 .and_then(Self::assoc_to_trait),
154 _ => None,
155 }?;
156
157 let mut trait_candidates = FxHashSet::default();
158 trait_candidates.insert(located_assoc_item.into());
159
160 assoc_item_type
161 .iterate_path_candidates(
162 db,
163 current_crate,
164 &trait_candidates,
165 None,
166 |_, assoc| Self::assoc_to_trait(assoc.container(db)),
167 )
168 .map(ModuleDef::from)
169 }
170 ImportCandidate::TraitMethod(function_callee, _) => {
171 let located_assoc_item =
172 if let ModuleDef::Function(located_function) = module_def {
173 located_function
174 .as_assoc_item(db)
175 .map(|assoc| assoc.container(db))
176 .and_then(Self::assoc_to_trait)
177 } else {
178 None
179 }?;
180
181 let mut trait_candidates = FxHashSet::default();
182 trait_candidates.insert(located_assoc_item.into());
183
184 function_callee
185 .iterate_method_candidates(
186 db,
187 current_crate,
188 &trait_candidates,
189 None,
190 |_, function| {
191 Self::assoc_to_trait(function.as_assoc_item(db)?.container(db))
192 },
193 )
194 .map(ModuleDef::from)
195 }
196 _ => Some(module_def),
197 })
198 .filter_map(|module_def| self.module_with_name_to_import.find_use_path(db, module_def))
199 .filter(|use_path| !use_path.segments.is_empty())
200 .take(20)
201 .collect::<BTreeSet<_>>()
202 }
203
204 fn assoc_to_trait(assoc: AssocItemContainer) -> Option<Trait> {
205 if let AssocItemContainer::Trait(extracted_trait) = assoc {
206 Some(extracted_trait)
207 } else {
208 None
209 }
210 }
211}
212
213#[derive(Debug)]
214enum ImportCandidate {
215 /// Simple name like 'HashMap'
216 UnqualifiedName(String),
217 /// First part of the qualified name.
218 /// For 'std::collections::HashMap', that will be 'std'.
219 QualifierStart(String),
220 /// A trait associated function (with no self parameter) or associated constant.
221 /// For 'test_mod::TestEnum::test_function', `Type` is the `test_mod::TestEnum` expression type
222 /// and `String` is the `test_function`
223 TraitAssocItem(Type, String),
224 /// A trait method with self parameter.
225 /// For 'test_enum.test_method()', `Type` is the `test_enum` expression type
226 /// and `String` is the `test_method`
227 TraitMethod(Type, String),
228}
229
230impl ImportCandidate {
231 fn for_method_call(
232 method_call: &ast::MethodCallExpr,
233 source_analyzer: &SourceAnalyzer,
234 db: &impl HirDatabase,
235 ) -> Option<Self> {
236 if source_analyzer.resolve_method_call(method_call).is_some() {
237 return None;
238 }
239 Some(Self::TraitMethod(
240 source_analyzer.type_of(db, &method_call.expr()?)?,
241 method_call.name_ref()?.syntax().to_string(),
242 ))
243 }
244
245 fn for_regular_path(
246 path_under_caret: &ast::Path,
247 source_analyzer: &SourceAnalyzer,
248 db: &impl HirDatabase,
249 ) -> Option<Self> {
250 if source_analyzer.resolve_path(db, path_under_caret).is_some() {
251 return None;
252 }
253
254 let segment = path_under_caret.segment()?;
255 if let Some(qualifier) = path_under_caret.qualifier() {
256 let qualifier_start = qualifier.syntax().descendants().find_map(ast::NameRef::cast)?;
257 let qualifier_start_path =
258 qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?;
259 if let Some(qualifier_start_resolution) =
260 source_analyzer.resolve_path(db, &qualifier_start_path)
261 {
262 let qualifier_resolution = if qualifier_start_path == qualifier {
263 qualifier_start_resolution
264 } else {
265 source_analyzer.resolve_path(db, &qualifier)?
266 };
267 if let PathResolution::Def(ModuleDef::Adt(assoc_item_path)) = qualifier_resolution {
268 Some(ImportCandidate::TraitAssocItem(
269 assoc_item_path.ty(db),
270 segment.syntax().to_string(),
271 ))
272 } else {
273 None
274 }
275 } else {
276 Some(ImportCandidate::QualifierStart(qualifier_start.syntax().to_string()))
277 }
278 } else {
279 Some(ImportCandidate::UnqualifiedName(
280 segment.syntax().descendants().find_map(ast::NameRef::cast)?.syntax().to_string(),
281 ))
282 }
283 }
284}
285
84#[cfg(test)] 286#[cfg(test)]
85mod tests { 287mod tests {
86 use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target};
87
88 use super::*; 288 use super::*;
289 use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target};
89 290
90 #[test] 291 #[test]
91 fn applicable_when_found_an_import() { 292 fn applicable_when_found_an_import() {
@@ -290,4 +491,303 @@ mod tests {
290 ", 491 ",
291 ); 492 );
292 } 493 }
494
495 #[test]
496 fn not_applicable_for_imported_function() {
497 check_assist_not_applicable(
498 auto_import,
499 r"
500 pub mod test_mod {
501 pub fn test_function() {}
502 }
503
504 use test_mod::test_function;
505 fn main() {
506 test_function<|>
507 }
508 ",
509 );
510 }
511
512 #[test]
513 fn associated_struct_function() {
514 check_assist(
515 auto_import,
516 r"
517 mod test_mod {
518 pub struct TestStruct {}
519 impl TestStruct {
520 pub fn test_function() {}
521 }
522 }
523
524 fn main() {
525 TestStruct::test_function<|>
526 }
527 ",
528 r"
529 use test_mod::TestStruct;
530
531 mod test_mod {
532 pub struct TestStruct {}
533 impl TestStruct {
534 pub fn test_function() {}
535 }
536 }
537
538 fn main() {
539 TestStruct::test_function<|>
540 }
541 ",
542 );
543 }
544
545 #[test]
546 fn associated_struct_const() {
547 check_assist(
548 auto_import,
549 r"
550 mod test_mod {
551 pub struct TestStruct {}
552 impl TestStruct {
553 const TEST_CONST: u8 = 42;
554 }
555 }
556
557 fn main() {
558 TestStruct::TEST_CONST<|>
559 }
560 ",
561 r"
562 use test_mod::TestStruct;
563
564 mod test_mod {
565 pub struct TestStruct {}
566 impl TestStruct {
567 const TEST_CONST: u8 = 42;
568 }
569 }
570
571 fn main() {
572 TestStruct::TEST_CONST<|>
573 }
574 ",
575 );
576 }
577
578 #[test]
579 fn associated_trait_function() {
580 check_assist(
581 auto_import,
582 r"
583 mod test_mod {
584 pub trait TestTrait {
585 fn test_function();
586 }
587 pub struct TestStruct {}
588 impl TestTrait for TestStruct {
589 fn test_function() {}
590 }
591 }
592
593 fn main() {
594 test_mod::TestStruct::test_function<|>
595 }
596 ",
597 r"
598 use test_mod::TestTrait;
599
600 mod test_mod {
601 pub trait TestTrait {
602 fn test_function();
603 }
604 pub struct TestStruct {}
605 impl TestTrait for TestStruct {
606 fn test_function() {}
607 }
608 }
609
610 fn main() {
611 test_mod::TestStruct::test_function<|>
612 }
613 ",
614 );
615 }
616
617 #[test]
618 fn not_applicable_for_imported_trait_for_function() {
619 check_assist_not_applicable(
620 auto_import,
621 r"
622 mod test_mod {
623 pub trait TestTrait {
624 fn test_function();
625 }
626 pub trait TestTrait2 {
627 fn test_function();
628 }
629 pub enum TestEnum {
630 One,
631 Two,
632 }
633 impl TestTrait2 for TestEnum {
634 fn test_function() {}
635 }
636 impl TestTrait for TestEnum {
637 fn test_function() {}
638 }
639 }
640
641 use test_mod::TestTrait2;
642 fn main() {
643 test_mod::TestEnum::test_function<|>;
644 }
645 ",
646 )
647 }
648
649 #[test]
650 fn associated_trait_const() {
651 check_assist(
652 auto_import,
653 r"
654 mod test_mod {
655 pub trait TestTrait {
656 const TEST_CONST: u8;
657 }
658 pub struct TestStruct {}
659 impl TestTrait for TestStruct {
660 const TEST_CONST: u8 = 42;
661 }
662 }
663
664 fn main() {
665 test_mod::TestStruct::TEST_CONST<|>
666 }
667 ",
668 r"
669 use test_mod::TestTrait;
670
671 mod test_mod {
672 pub trait TestTrait {
673 const TEST_CONST: u8;
674 }
675 pub struct TestStruct {}
676 impl TestTrait for TestStruct {
677 const TEST_CONST: u8 = 42;
678 }
679 }
680
681 fn main() {
682 test_mod::TestStruct::TEST_CONST<|>
683 }
684 ",
685 );
686 }
687
688 #[test]
689 fn not_applicable_for_imported_trait_for_const() {
690 check_assist_not_applicable(
691 auto_import,
692 r"
693 mod test_mod {
694 pub trait TestTrait {
695 const TEST_CONST: u8;
696 }
697 pub trait TestTrait2 {
698 const TEST_CONST: f64;
699 }
700 pub enum TestEnum {
701 One,
702 Two,
703 }
704 impl TestTrait2 for TestEnum {
705 const TEST_CONST: f64 = 42.0;
706 }
707 impl TestTrait for TestEnum {
708 const TEST_CONST: u8 = 42;
709 }
710 }
711
712 use test_mod::TestTrait2;
713 fn main() {
714 test_mod::TestEnum::TEST_CONST<|>;
715 }
716 ",
717 )
718 }
719
720 #[test]
721 fn trait_method() {
722 check_assist(
723 auto_import,
724 r"
725 mod test_mod {
726 pub trait TestTrait {
727 fn test_method(&self);
728 }
729 pub struct TestStruct {}
730 impl TestTrait for TestStruct {
731 fn test_method(&self) {}
732 }
733 }
734
735 fn main() {
736 let test_struct = test_mod::TestStruct {};
737 test_struct.test_meth<|>od()
738 }
739 ",
740 r"
741 use test_mod::TestTrait;
742
743 mod test_mod {
744 pub trait TestTrait {
745 fn test_method(&self);
746 }
747 pub struct TestStruct {}
748 impl TestTrait for TestStruct {
749 fn test_method(&self) {}
750 }
751 }
752
753 fn main() {
754 let test_struct = test_mod::TestStruct {};
755 test_struct.test_meth<|>od()
756 }
757 ",
758 );
759 }
760
761 #[test]
762 fn not_applicable_for_imported_trait_for_method() {
763 check_assist_not_applicable(
764 auto_import,
765 r"
766 mod test_mod {
767 pub trait TestTrait {
768 fn test_method(&self);
769 }
770 pub trait TestTrait2 {
771 fn test_method(&self);
772 }
773 pub enum TestEnum {
774 One,
775 Two,
776 }
777 impl TestTrait2 for TestEnum {
778 fn test_method(&self) {}
779 }
780 impl TestTrait for TestEnum {
781 fn test_method(&self) {}
782 }
783 }
784
785 use test_mod::TestTrait2;
786 fn main() {
787 let one = test_mod::TestEnum::One;
788 one.test<|>_method();
789 }
790 ",
791 )
792 }
293} 793}
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs
index 4fb679f6d..a56b8ab04 100644
--- a/crates/ra_hir/src/code_model.rs
+++ b/crates/ra_hir/src/code_model.rs
@@ -818,7 +818,7 @@ impl TypeParam {
818 } 818 }
819} 819}
820 820
821// FIXME: rename to `ImplBlock` 821// FIXME: rename from `ImplBlock` to `Impl`
822#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 822#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
823pub struct ImplBlock { 823pub struct ImplBlock {
824 pub(crate) id: ImplId, 824 pub(crate) id: ImplId,
diff --git a/crates/ra_hir_def/src/resolver.rs b/crates/ra_hir_def/src/resolver.rs
index 05cf4646a..e2b228e80 100644
--- a/crates/ra_hir_def/src/resolver.rs
+++ b/crates/ra_hir_def/src/resolver.rs
@@ -542,11 +542,7 @@ impl Resolver {
542 542
543 fn push_generic_params_scope(self, db: &impl DefDatabase, def: GenericDefId) -> Resolver { 543 fn push_generic_params_scope(self, db: &impl DefDatabase, def: GenericDefId) -> Resolver {
544 let params = db.generic_params(def); 544 let params = db.generic_params(def);
545 if params.types.is_empty() { 545 self.push_scope(Scope::GenericParams { def, params })
546 self
547 } else {
548 self.push_scope(Scope::GenericParams { def, params })
549 }
550 } 546 }
551 547
552 fn push_impl_block_scope(self, impl_block: ImplId) -> Resolver { 548 fn push_impl_block_scope(self, impl_block: ImplId) -> Resolver {
diff --git a/crates/ra_hir_ty/src/display.rs b/crates/ra_hir_ty/src/display.rs
index d1ff85f0f..14e089cf4 100644
--- a/crates/ra_hir_ty/src/display.rs
+++ b/crates/ra_hir_ty/src/display.rs
@@ -2,7 +2,12 @@
2 2
3use std::fmt; 3use std::fmt;
4 4
5use crate::db::HirDatabase; 5use crate::{
6 db::HirDatabase, utils::generics, ApplicationTy, CallableDef, FnSig, GenericPredicate,
7 Obligation, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
8};
9use hir_def::{generics::TypeParamProvenance, AdtId, AssocContainerId, Lookup};
10use hir_expand::name::Name;
6 11
7pub struct HirFormatter<'a, 'b, DB> { 12pub struct HirFormatter<'a, 'b, DB> {
8 pub db: &'a DB, 13 pub db: &'a DB,
@@ -97,3 +102,369 @@ where
97 }) 102 })
98 } 103 }
99} 104}
105
106const TYPE_HINT_TRUNCATION: &str = "…";
107
108impl HirDisplay for &Ty {
109 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
110 HirDisplay::hir_fmt(*self, f)
111 }
112}
113
114impl HirDisplay for ApplicationTy {
115 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
116 if f.should_truncate() {
117 return write!(f, "{}", TYPE_HINT_TRUNCATION);
118 }
119
120 match self.ctor {
121 TypeCtor::Bool => write!(f, "bool")?,
122 TypeCtor::Char => write!(f, "char")?,
123 TypeCtor::Int(t) => write!(f, "{}", t)?,
124 TypeCtor::Float(t) => write!(f, "{}", t)?,
125 TypeCtor::Str => write!(f, "str")?,
126 TypeCtor::Slice => {
127 let t = self.parameters.as_single();
128 write!(f, "[{}]", t.display(f.db))?;
129 }
130 TypeCtor::Array => {
131 let t = self.parameters.as_single();
132 write!(f, "[{}; _]", t.display(f.db))?;
133 }
134 TypeCtor::RawPtr(m) => {
135 let t = self.parameters.as_single();
136 write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?;
137 }
138 TypeCtor::Ref(m) => {
139 let t = self.parameters.as_single();
140 let ty_display = if f.omit_verbose_types() {
141 t.display_truncated(f.db, f.max_size)
142 } else {
143 t.display(f.db)
144 };
145 write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?;
146 }
147 TypeCtor::Never => write!(f, "!")?,
148 TypeCtor::Tuple { .. } => {
149 let ts = &self.parameters;
150 if ts.len() == 1 {
151 write!(f, "({},)", ts[0].display(f.db))?;
152 } else {
153 write!(f, "(")?;
154 f.write_joined(&*ts.0, ", ")?;
155 write!(f, ")")?;
156 }
157 }
158 TypeCtor::FnPtr { .. } => {
159 let sig = FnSig::from_fn_ptr_substs(&self.parameters);
160 write!(f, "fn(")?;
161 f.write_joined(sig.params(), ", ")?;
162 write!(f, ") -> {}", sig.ret().display(f.db))?;
163 }
164 TypeCtor::FnDef(def) => {
165 let sig = f.db.callable_item_signature(def).subst(&self.parameters);
166 let name = match def {
167 CallableDef::FunctionId(ff) => f.db.function_data(ff).name.clone(),
168 CallableDef::StructId(s) => f.db.struct_data(s).name.clone(),
169 CallableDef::EnumVariantId(e) => {
170 let enum_data = f.db.enum_data(e.parent);
171 enum_data.variants[e.local_id].name.clone()
172 }
173 };
174 match def {
175 CallableDef::FunctionId(_) => write!(f, "fn {}", name)?,
176 CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {
177 write!(f, "{}", name)?
178 }
179 }
180 if self.parameters.len() > 0 {
181 let generics = generics(f.db, def.into());
182 let (parent_params, self_param, type_params, _impl_trait_params) =
183 generics.provenance_split();
184 let total_len = parent_params + self_param + type_params;
185 // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
186 if total_len > 0 {
187 write!(f, "<")?;
188 f.write_joined(&self.parameters.0[..total_len], ", ")?;
189 write!(f, ">")?;
190 }
191 }
192 write!(f, "(")?;
193 f.write_joined(sig.params(), ", ")?;
194 write!(f, ") -> {}", sig.ret().display(f.db))?;
195 }
196 TypeCtor::Adt(def_id) => {
197 let name = match def_id {
198 AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
199 AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
200 AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
201 };
202 write!(f, "{}", name)?;
203 if self.parameters.len() > 0 {
204 write!(f, "<")?;
205
206 let mut non_default_parameters = Vec::with_capacity(self.parameters.len());
207 let parameters_to_write = if f.omit_verbose_types() {
208 match self
209 .ctor
210 .as_generic_def()
211 .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
212 .filter(|defaults| !defaults.is_empty())
213 {
214 Option::None => self.parameters.0.as_ref(),
215 Option::Some(default_parameters) => {
216 for (i, parameter) in self.parameters.iter().enumerate() {
217 match (parameter, default_parameters.get(i)) {
218 (&Ty::Unknown, _) | (_, None) => {
219 non_default_parameters.push(parameter.clone())
220 }
221 (_, Some(default_parameter))
222 if parameter != default_parameter =>
223 {
224 non_default_parameters.push(parameter.clone())
225 }
226 _ => (),
227 }
228 }
229 &non_default_parameters
230 }
231 }
232 } else {
233 self.parameters.0.as_ref()
234 };
235
236 f.write_joined(parameters_to_write, ", ")?;
237 write!(f, ">")?;
238 }
239 }
240 TypeCtor::AssociatedType(type_alias) => {
241 let trait_ = match type_alias.lookup(f.db).container {
242 AssocContainerId::TraitId(it) => it,
243 _ => panic!("not an associated type"),
244 };
245 let trait_name = f.db.trait_data(trait_).name.clone();
246 let name = f.db.type_alias_data(type_alias).name.clone();
247 write!(f, "{}::{}", trait_name, name)?;
248 if self.parameters.len() > 0 {
249 write!(f, "<")?;
250 f.write_joined(&*self.parameters.0, ", ")?;
251 write!(f, ">")?;
252 }
253 }
254 TypeCtor::Closure { .. } => {
255 let sig = self.parameters[0]
256 .callable_sig(f.db)
257 .expect("first closure parameter should contain signature");
258 let return_type_hint = sig.ret().display(f.db);
259 if sig.params().is_empty() {
260 write!(f, "|| -> {}", return_type_hint)?;
261 } else if f.omit_verbose_types() {
262 write!(f, "|{}| -> {}", TYPE_HINT_TRUNCATION, return_type_hint)?;
263 } else {
264 write!(f, "|")?;
265 f.write_joined(sig.params(), ", ")?;
266 write!(f, "| -> {}", return_type_hint)?;
267 };
268 }
269 }
270 Ok(())
271 }
272}
273
274impl HirDisplay for ProjectionTy {
275 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
276 if f.should_truncate() {
277 return write!(f, "{}", TYPE_HINT_TRUNCATION);
278 }
279
280 let trait_name = f.db.trait_data(self.trait_(f.db)).name.clone();
281 write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_name,)?;
282 if self.parameters.len() > 1 {
283 write!(f, "<")?;
284 f.write_joined(&self.parameters[1..], ", ")?;
285 write!(f, ">")?;
286 }
287 write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?;
288 Ok(())
289 }
290}
291
292impl HirDisplay for Ty {
293 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
294 if f.should_truncate() {
295 return write!(f, "{}", TYPE_HINT_TRUNCATION);
296 }
297
298 match self {
299 Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
300 Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
301 Ty::Placeholder(id) => {
302 let generics = generics(f.db, id.parent);
303 let param_data = &generics.params.types[id.local_id];
304 match param_data.provenance {
305 TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
306 write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))?
307 }
308 TypeParamProvenance::ArgumentImplTrait => {
309 write!(f, "impl ")?;
310 let bounds = f.db.generic_predicates_for_param(*id);
311 let substs = Substs::type_params_for_generics(&generics);
312 write_bounds_like_dyn_trait(
313 &bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(),
314 f,
315 )?;
316 }
317 }
318 }
319 Ty::Bound(idx) => write!(f, "?{}", idx)?,
320 Ty::Dyn(predicates) | Ty::Opaque(predicates) => {
321 match self {
322 Ty::Dyn(_) => write!(f, "dyn ")?,
323 Ty::Opaque(_) => write!(f, "impl ")?,
324 _ => unreachable!(),
325 };
326 write_bounds_like_dyn_trait(&predicates, f)?;
327 }
328 Ty::Unknown => write!(f, "{{unknown}}")?,
329 Ty::Infer(..) => write!(f, "_")?,
330 }
331 Ok(())
332 }
333}
334
335fn write_bounds_like_dyn_trait(
336 predicates: &[GenericPredicate],
337 f: &mut HirFormatter<impl HirDatabase>,
338) -> fmt::Result {
339 // Note: This code is written to produce nice results (i.e.
340 // corresponding to surface Rust) for types that can occur in
341 // actual Rust. It will have weird results if the predicates
342 // aren't as expected (i.e. self types = $0, projection
343 // predicates for a certain trait come after the Implemented
344 // predicate for that trait).
345 let mut first = true;
346 let mut angle_open = false;
347 for p in predicates.iter() {
348 match p {
349 GenericPredicate::Implemented(trait_ref) => {
350 if angle_open {
351 write!(f, ">")?;
352 }
353 if !first {
354 write!(f, " + ")?;
355 }
356 // We assume that the self type is $0 (i.e. the
357 // existential) here, which is the only thing that's
358 // possible in actual Rust, and hence don't print it
359 write!(f, "{}", f.db.trait_data(trait_ref.trait_).name.clone())?;
360 if trait_ref.substs.len() > 1 {
361 write!(f, "<")?;
362 f.write_joined(&trait_ref.substs[1..], ", ")?;
363 // there might be assoc type bindings, so we leave the angle brackets open
364 angle_open = true;
365 }
366 }
367 GenericPredicate::Projection(projection_pred) => {
368 // in types in actual Rust, these will always come
369 // after the corresponding Implemented predicate
370 if angle_open {
371 write!(f, ", ")?;
372 } else {
373 write!(f, "<")?;
374 angle_open = true;
375 }
376 let name =
377 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name.clone();
378 write!(f, "{} = ", name)?;
379 projection_pred.ty.hir_fmt(f)?;
380 }
381 GenericPredicate::Error => {
382 if angle_open {
383 // impl Trait<X, {error}>
384 write!(f, ", ")?;
385 } else if !first {
386 // impl Trait + {error}
387 write!(f, " + ")?;
388 }
389 p.hir_fmt(f)?;
390 }
391 }
392 first = false;
393 }
394 if angle_open {
395 write!(f, ">")?;
396 }
397 Ok(())
398}
399
400impl TraitRef {
401 fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result {
402 if f.should_truncate() {
403 return write!(f, "{}", TYPE_HINT_TRUNCATION);
404 }
405
406 self.substs[0].hir_fmt(f)?;
407 if use_as {
408 write!(f, " as ")?;
409 } else {
410 write!(f, ": ")?;
411 }
412 write!(f, "{}", f.db.trait_data(self.trait_).name.clone())?;
413 if self.substs.len() > 1 {
414 write!(f, "<")?;
415 f.write_joined(&self.substs[1..], ", ")?;
416 write!(f, ">")?;
417 }
418 Ok(())
419 }
420}
421
422impl HirDisplay for TraitRef {
423 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
424 self.hir_fmt_ext(f, false)
425 }
426}
427
428impl HirDisplay for &GenericPredicate {
429 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
430 HirDisplay::hir_fmt(*self, f)
431 }
432}
433
434impl HirDisplay for GenericPredicate {
435 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
436 if f.should_truncate() {
437 return write!(f, "{}", TYPE_HINT_TRUNCATION);
438 }
439
440 match self {
441 GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
442 GenericPredicate::Projection(projection_pred) => {
443 write!(f, "<")?;
444 projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?;
445 write!(
446 f,
447 ">::{} = {}",
448 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name,
449 projection_pred.ty.display(f.db)
450 )?;
451 }
452 GenericPredicate::Error => write!(f, "{{error}}")?,
453 }
454 Ok(())
455 }
456}
457
458impl HirDisplay for Obligation {
459 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
460 match self {
461 Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db)),
462 Obligation::Projection(proj) => write!(
463 f,
464 "Normalize({} => {})",
465 proj.projection_ty.display(f.db),
466 proj.ty.display(f.db)
467 ),
468 }
469 }
470}
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs
index c5fe18c85..571579cc4 100644
--- a/crates/ra_hir_ty/src/lib.rs
+++ b/crates/ra_hir_ty/src/lib.rs
@@ -41,13 +41,12 @@ mod marks;
41 41
42use std::ops::Deref; 42use std::ops::Deref;
43use std::sync::Arc; 43use std::sync::Arc;
44use std::{fmt, iter, mem}; 44use std::{iter, mem};
45 45
46use hir_def::{ 46use hir_def::{
47 expr::ExprId, generics::TypeParamProvenance, type_ref::Mutability, AdtId, AssocContainerId, 47 expr::ExprId, type_ref::Mutability, AdtId, AssocContainerId, DefWithBodyId, GenericDefId,
48 DefWithBodyId, GenericDefId, HasModule, Lookup, TraitId, TypeAliasId, TypeParamId, 48 HasModule, Lookup, TraitId, TypeAliasId, TypeParamId,
49}; 49};
50use hir_expand::name::Name;
51use ra_db::{impl_intern_key, salsa, CrateId}; 50use ra_db::{impl_intern_key, salsa, CrateId};
52 51
53use crate::{ 52use crate::{
@@ -55,7 +54,7 @@ use crate::{
55 primitive::{FloatTy, IntTy, Uncertain}, 54 primitive::{FloatTy, IntTy, Uncertain},
56 utils::{generics, make_mut_slice, Generics}, 55 utils::{generics, make_mut_slice, Generics},
57}; 56};
58use display::{HirDisplay, HirFormatter}; 57use display::HirDisplay;
59 58
60pub use autoderef::autoderef; 59pub use autoderef::autoderef;
61pub use infer::{do_infer_query, InferTy, InferenceResult}; 60pub use infer::{do_infer_query, InferTy, InferenceResult};
@@ -291,7 +290,7 @@ pub enum Ty {
291 /// {}` when we're type-checking the body of that function. In this 290 /// {}` when we're type-checking the body of that function. In this
292 /// situation, we know this stands for *some* type, but don't know the exact 291 /// situation, we know this stands for *some* type, but don't know the exact
293 /// type. 292 /// type.
294 Param(TypeParamId), 293 Placeholder(TypeParamId),
295 294
296 /// A bound type variable. This is used in various places: when representing 295 /// A bound type variable. This is used in various places: when representing
297 /// some polymorphic type like the type of function `fn f<T>`, the type 296 /// some polymorphic type like the type of function `fn f<T>`, the type
@@ -365,7 +364,7 @@ impl Substs {
365 364
366 /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). 365 /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
367 pub(crate) fn type_params_for_generics(generic_params: &Generics) -> Substs { 366 pub(crate) fn type_params_for_generics(generic_params: &Generics) -> Substs {
368 Substs(generic_params.iter().map(|(id, _)| Ty::Param(id)).collect()) 367 Substs(generic_params.iter().map(|(id, _)| Ty::Placeholder(id)).collect())
369 } 368 }
370 369
371 /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). 370 /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
@@ -813,7 +812,7 @@ impl TypeWalk for Ty {
813 p.walk(f); 812 p.walk(f);
814 } 813 }
815 } 814 }
816 Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} 815 Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {}
817 } 816 }
818 f(self); 817 f(self);
819 } 818 }
@@ -831,374 +830,8 @@ impl TypeWalk for Ty {
831 p.walk_mut_binders(f, binders + 1); 830 p.walk_mut_binders(f, binders + 1);
832 } 831 }
833 } 832 }
834 Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} 833 Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {}
835 } 834 }
836 f(self, binders); 835 f(self, binders);
837 } 836 }
838} 837}
839
840const TYPE_HINT_TRUNCATION: &str = "…";
841
842impl HirDisplay for &Ty {
843 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
844 HirDisplay::hir_fmt(*self, f)
845 }
846}
847
848impl HirDisplay for ApplicationTy {
849 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
850 if f.should_truncate() {
851 return write!(f, "{}", TYPE_HINT_TRUNCATION);
852 }
853
854 match self.ctor {
855 TypeCtor::Bool => write!(f, "bool")?,
856 TypeCtor::Char => write!(f, "char")?,
857 TypeCtor::Int(t) => write!(f, "{}", t)?,
858 TypeCtor::Float(t) => write!(f, "{}", t)?,
859 TypeCtor::Str => write!(f, "str")?,
860 TypeCtor::Slice => {
861 let t = self.parameters.as_single();
862 write!(f, "[{}]", t.display(f.db))?;
863 }
864 TypeCtor::Array => {
865 let t = self.parameters.as_single();
866 write!(f, "[{}; _]", t.display(f.db))?;
867 }
868 TypeCtor::RawPtr(m) => {
869 let t = self.parameters.as_single();
870 write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?;
871 }
872 TypeCtor::Ref(m) => {
873 let t = self.parameters.as_single();
874 let ty_display = if f.omit_verbose_types() {
875 t.display_truncated(f.db, f.max_size)
876 } else {
877 t.display(f.db)
878 };
879 write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?;
880 }
881 TypeCtor::Never => write!(f, "!")?,
882 TypeCtor::Tuple { .. } => {
883 let ts = &self.parameters;
884 if ts.len() == 1 {
885 write!(f, "({},)", ts[0].display(f.db))?;
886 } else {
887 write!(f, "(")?;
888 f.write_joined(&*ts.0, ", ")?;
889 write!(f, ")")?;
890 }
891 }
892 TypeCtor::FnPtr { .. } => {
893 let sig = FnSig::from_fn_ptr_substs(&self.parameters);
894 write!(f, "fn(")?;
895 f.write_joined(sig.params(), ", ")?;
896 write!(f, ") -> {}", sig.ret().display(f.db))?;
897 }
898 TypeCtor::FnDef(def) => {
899 let sig = f.db.callable_item_signature(def).subst(&self.parameters);
900 let name = match def {
901 CallableDef::FunctionId(ff) => f.db.function_data(ff).name.clone(),
902 CallableDef::StructId(s) => f.db.struct_data(s).name.clone(),
903 CallableDef::EnumVariantId(e) => {
904 let enum_data = f.db.enum_data(e.parent);
905 enum_data.variants[e.local_id].name.clone()
906 }
907 };
908 match def {
909 CallableDef::FunctionId(_) => write!(f, "fn {}", name)?,
910 CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {
911 write!(f, "{}", name)?
912 }
913 }
914 if self.parameters.len() > 0 {
915 let generics = generics(f.db, def.into());
916 let (parent_params, self_param, type_params, _impl_trait_params) =
917 generics.provenance_split();
918 let total_len = parent_params + self_param + type_params;
919 // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
920 if total_len > 0 {
921 write!(f, "<")?;
922 f.write_joined(&self.parameters.0[..total_len], ", ")?;
923 write!(f, ">")?;
924 }
925 }
926 write!(f, "(")?;
927 f.write_joined(sig.params(), ", ")?;
928 write!(f, ") -> {}", sig.ret().display(f.db))?;
929 }
930 TypeCtor::Adt(def_id) => {
931 let name = match def_id {
932 AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
933 AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
934 AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
935 };
936 write!(f, "{}", name)?;
937 if self.parameters.len() > 0 {
938 write!(f, "<")?;
939
940 let mut non_default_parameters = Vec::with_capacity(self.parameters.len());
941 let parameters_to_write = if f.omit_verbose_types() {
942 match self
943 .ctor
944 .as_generic_def()
945 .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
946 .filter(|defaults| !defaults.is_empty())
947 {
948 Option::None => self.parameters.0.as_ref(),
949 Option::Some(default_parameters) => {
950 for (i, parameter) in self.parameters.iter().enumerate() {
951 match (parameter, default_parameters.get(i)) {
952 (&Ty::Unknown, _) | (_, None) => {
953 non_default_parameters.push(parameter.clone())
954 }
955 (_, Some(default_parameter))
956 if parameter != default_parameter =>
957 {
958 non_default_parameters.push(parameter.clone())
959 }
960 _ => (),
961 }
962 }
963 &non_default_parameters
964 }
965 }
966 } else {
967 self.parameters.0.as_ref()
968 };
969
970 f.write_joined(parameters_to_write, ", ")?;
971 write!(f, ">")?;
972 }
973 }
974 TypeCtor::AssociatedType(type_alias) => {
975 let trait_ = match type_alias.lookup(f.db).container {
976 AssocContainerId::TraitId(it) => it,
977 _ => panic!("not an associated type"),
978 };
979 let trait_name = f.db.trait_data(trait_).name.clone();
980 let name = f.db.type_alias_data(type_alias).name.clone();
981 write!(f, "{}::{}", trait_name, name)?;
982 if self.parameters.len() > 0 {
983 write!(f, "<")?;
984 f.write_joined(&*self.parameters.0, ", ")?;
985 write!(f, ">")?;
986 }
987 }
988 TypeCtor::Closure { .. } => {
989 let sig = self.parameters[0]
990 .callable_sig(f.db)
991 .expect("first closure parameter should contain signature");
992 let return_type_hint = sig.ret().display(f.db);
993 if sig.params().is_empty() {
994 write!(f, "|| -> {}", return_type_hint)?;
995 } else if f.omit_verbose_types() {
996 write!(f, "|{}| -> {}", TYPE_HINT_TRUNCATION, return_type_hint)?;
997 } else {
998 write!(f, "|")?;
999 f.write_joined(sig.params(), ", ")?;
1000 write!(f, "| -> {}", return_type_hint)?;
1001 };
1002 }
1003 }
1004 Ok(())
1005 }
1006}
1007
1008impl HirDisplay for ProjectionTy {
1009 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1010 if f.should_truncate() {
1011 return write!(f, "{}", TYPE_HINT_TRUNCATION);
1012 }
1013
1014 let trait_name = f.db.trait_data(self.trait_(f.db)).name.clone();
1015 write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_name,)?;
1016 if self.parameters.len() > 1 {
1017 write!(f, "<")?;
1018 f.write_joined(&self.parameters[1..], ", ")?;
1019 write!(f, ">")?;
1020 }
1021 write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?;
1022 Ok(())
1023 }
1024}
1025
1026impl HirDisplay for Ty {
1027 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1028 if f.should_truncate() {
1029 return write!(f, "{}", TYPE_HINT_TRUNCATION);
1030 }
1031
1032 match self {
1033 Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
1034 Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
1035 Ty::Param(id) => {
1036 let generics = generics(f.db, id.parent);
1037 let param_data = &generics.params.types[id.local_id];
1038 match param_data.provenance {
1039 TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
1040 write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))?
1041 }
1042 TypeParamProvenance::ArgumentImplTrait => {
1043 write!(f, "impl ")?;
1044 let bounds = f.db.generic_predicates_for_param(*id);
1045 let substs = Substs::type_params_for_generics(&generics);
1046 write_bounds_like_dyn_trait(
1047 &bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(),
1048 f,
1049 )?;
1050 }
1051 }
1052 }
1053 Ty::Bound(idx) => write!(f, "?{}", idx)?,
1054 Ty::Dyn(predicates) | Ty::Opaque(predicates) => {
1055 match self {
1056 Ty::Dyn(_) => write!(f, "dyn ")?,
1057 Ty::Opaque(_) => write!(f, "impl ")?,
1058 _ => unreachable!(),
1059 };
1060 write_bounds_like_dyn_trait(&predicates, f)?;
1061 }
1062 Ty::Unknown => write!(f, "{{unknown}}")?,
1063 Ty::Infer(..) => write!(f, "_")?,
1064 }
1065 Ok(())
1066 }
1067}
1068
1069fn write_bounds_like_dyn_trait(
1070 predicates: &[GenericPredicate],
1071 f: &mut HirFormatter<impl HirDatabase>,
1072) -> fmt::Result {
1073 // Note: This code is written to produce nice results (i.e.
1074 // corresponding to surface Rust) for types that can occur in
1075 // actual Rust. It will have weird results if the predicates
1076 // aren't as expected (i.e. self types = $0, projection
1077 // predicates for a certain trait come after the Implemented
1078 // predicate for that trait).
1079 let mut first = true;
1080 let mut angle_open = false;
1081 for p in predicates.iter() {
1082 match p {
1083 GenericPredicate::Implemented(trait_ref) => {
1084 if angle_open {
1085 write!(f, ">")?;
1086 }
1087 if !first {
1088 write!(f, " + ")?;
1089 }
1090 // We assume that the self type is $0 (i.e. the
1091 // existential) here, which is the only thing that's
1092 // possible in actual Rust, and hence don't print it
1093 write!(f, "{}", f.db.trait_data(trait_ref.trait_).name.clone())?;
1094 if trait_ref.substs.len() > 1 {
1095 write!(f, "<")?;
1096 f.write_joined(&trait_ref.substs[1..], ", ")?;
1097 // there might be assoc type bindings, so we leave the angle brackets open
1098 angle_open = true;
1099 }
1100 }
1101 GenericPredicate::Projection(projection_pred) => {
1102 // in types in actual Rust, these will always come
1103 // after the corresponding Implemented predicate
1104 if angle_open {
1105 write!(f, ", ")?;
1106 } else {
1107 write!(f, "<")?;
1108 angle_open = true;
1109 }
1110 let name =
1111 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name.clone();
1112 write!(f, "{} = ", name)?;
1113 projection_pred.ty.hir_fmt(f)?;
1114 }
1115 GenericPredicate::Error => {
1116 if angle_open {
1117 // impl Trait<X, {error}>
1118 write!(f, ", ")?;
1119 } else if !first {
1120 // impl Trait + {error}
1121 write!(f, " + ")?;
1122 }
1123 p.hir_fmt(f)?;
1124 }
1125 }
1126 first = false;
1127 }
1128 if angle_open {
1129 write!(f, ">")?;
1130 }
1131 Ok(())
1132}
1133
1134impl TraitRef {
1135 fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result {
1136 if f.should_truncate() {
1137 return write!(f, "{}", TYPE_HINT_TRUNCATION);
1138 }
1139
1140 self.substs[0].hir_fmt(f)?;
1141 if use_as {
1142 write!(f, " as ")?;
1143 } else {
1144 write!(f, ": ")?;
1145 }
1146 write!(f, "{}", f.db.trait_data(self.trait_).name.clone())?;
1147 if self.substs.len() > 1 {
1148 write!(f, "<")?;
1149 f.write_joined(&self.substs[1..], ", ")?;
1150 write!(f, ">")?;
1151 }
1152 Ok(())
1153 }
1154}
1155
1156impl HirDisplay for TraitRef {
1157 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1158 self.hir_fmt_ext(f, false)
1159 }
1160}
1161
1162impl HirDisplay for &GenericPredicate {
1163 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1164 HirDisplay::hir_fmt(*self, f)
1165 }
1166}
1167
1168impl HirDisplay for GenericPredicate {
1169 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1170 if f.should_truncate() {
1171 return write!(f, "{}", TYPE_HINT_TRUNCATION);
1172 }
1173
1174 match self {
1175 GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
1176 GenericPredicate::Projection(projection_pred) => {
1177 write!(f, "<")?;
1178 projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?;
1179 write!(
1180 f,
1181 ">::{} = {}",
1182 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name,
1183 projection_pred.ty.display(f.db)
1184 )?;
1185 }
1186 GenericPredicate::Error => write!(f, "{{error}}")?,
1187 }
1188 Ok(())
1189 }
1190}
1191
1192impl HirDisplay for Obligation {
1193 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1194 match self {
1195 Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db)),
1196 Obligation::Projection(proj) => write!(
1197 f,
1198 "Normalize({} => {})",
1199 proj.projection_ty.display(f.db),
1200 proj.ty.display(f.db)
1201 ),
1202 }
1203 }
1204}
diff --git a/crates/ra_hir_ty/src/lower.rs b/crates/ra_hir_ty/src/lower.rs
index c68c5852b..6a2aded02 100644
--- a/crates/ra_hir_ty/src/lower.rs
+++ b/crates/ra_hir_ty/src/lower.rs
@@ -14,9 +14,9 @@ use hir_def::{
14 path::{GenericArg, Path, PathSegment, PathSegments}, 14 path::{GenericArg, Path, PathSegment, PathSegments},
15 resolver::{HasResolver, Resolver, TypeNs}, 15 resolver::{HasResolver, Resolver, TypeNs},
16 type_ref::{TypeBound, TypeRef}, 16 type_ref::{TypeBound, TypeRef},
17 AdtId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, 17 AdtId, AssocContainerId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule,
18 LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, 18 ImplId, LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId,
19 VariantId, 19 UnionId, VariantId,
20}; 20};
21use ra_arena::map::ArenaMap; 21use ra_arena::map::ArenaMap;
22use ra_db::CrateId; 22use ra_db::CrateId;
@@ -152,7 +152,7 @@ impl Ty {
152 data.provenance == TypeParamProvenance::ArgumentImplTrait 152 data.provenance == TypeParamProvenance::ArgumentImplTrait
153 }) 153 })
154 .nth(idx as usize) 154 .nth(idx as usize)
155 .map_or(Ty::Unknown, |(id, _)| Ty::Param(id)); 155 .map_or(Ty::Unknown, |(id, _)| Ty::Placeholder(id));
156 param 156 param
157 } else { 157 } else {
158 Ty::Unknown 158 Ty::Unknown
@@ -270,7 +270,7 @@ impl Ty {
270 let generics = 270 let generics =
271 generics(ctx.db, ctx.resolver.generic_def().expect("generics in scope")); 271 generics(ctx.db, ctx.resolver.generic_def().expect("generics in scope"));
272 match ctx.type_param_mode { 272 match ctx.type_param_mode {
273 TypeParamLoweringMode::Placeholder => Ty::Param(param_id), 273 TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
274 TypeParamLoweringMode::Variable => { 274 TypeParamLoweringMode::Variable => {
275 let idx = generics.param_idx(param_id).expect("matching generics"); 275 let idx = generics.param_idx(param_id).expect("matching generics");
276 Ty::Bound(idx) 276 Ty::Bound(idx)
@@ -339,7 +339,7 @@ impl Ty {
339 None => return Ty::Unknown, // this can't actually happen 339 None => return Ty::Unknown, // this can't actually happen
340 }; 340 };
341 let param_id = match self_ty { 341 let param_id = match self_ty {
342 Ty::Param(id) if ctx.type_param_mode == TypeParamLoweringMode::Placeholder => id, 342 Ty::Placeholder(id) if ctx.type_param_mode == TypeParamLoweringMode::Placeholder => id,
343 Ty::Bound(idx) if ctx.type_param_mode == TypeParamLoweringMode::Variable => { 343 Ty::Bound(idx) if ctx.type_param_mode == TypeParamLoweringMode::Variable => {
344 let generics = generics(ctx.db, def); 344 let generics = generics(ctx.db, def);
345 let param_id = if let Some((id, _)) = generics.iter().nth(idx as usize) { 345 let param_id = if let Some((id, _)) = generics.iter().nth(idx as usize) {
@@ -544,7 +544,7 @@ impl GenericPredicate {
544 let generics = generics(ctx.db, generic_def); 544 let generics = generics(ctx.db, generic_def);
545 let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id }; 545 let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id };
546 match ctx.type_param_mode { 546 match ctx.type_param_mode {
547 TypeParamLoweringMode::Placeholder => Ty::Param(param_id), 547 TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
548 TypeParamLoweringMode::Variable => { 548 TypeParamLoweringMode::Variable => {
549 let idx = generics.param_idx(param_id).expect("matching generics"); 549 let idx = generics.param_idx(param_id).expect("matching generics");
550 Ty::Bound(idx) 550 Ty::Bound(idx)
@@ -672,11 +672,35 @@ impl TraitEnvironment {
672 pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> { 672 pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> {
673 let ctx = TyLoweringContext::new(db, &resolver) 673 let ctx = TyLoweringContext::new(db, &resolver)
674 .with_type_param_mode(TypeParamLoweringMode::Placeholder); 674 .with_type_param_mode(TypeParamLoweringMode::Placeholder);
675 let predicates = resolver 675 let mut predicates = resolver
676 .where_predicates_in_scope() 676 .where_predicates_in_scope()
677 .flat_map(|pred| GenericPredicate::from_where_predicate(&ctx, pred)) 677 .flat_map(|pred| GenericPredicate::from_where_predicate(&ctx, pred))
678 .collect::<Vec<_>>(); 678 .collect::<Vec<_>>();
679 679
680 if let Some(def) = resolver.generic_def() {
681 let container: Option<AssocContainerId> = match def {
682 // FIXME: is there a function for this?
683 GenericDefId::FunctionId(f) => Some(f.lookup(db).container),
684 GenericDefId::AdtId(_) => None,
685 GenericDefId::TraitId(_) => None,
686 GenericDefId::TypeAliasId(t) => Some(t.lookup(db).container),
687 GenericDefId::ImplId(_) => None,
688 GenericDefId::EnumVariantId(_) => None,
689 GenericDefId::ConstId(c) => Some(c.lookup(db).container),
690 };
691 if let Some(AssocContainerId::TraitId(trait_id)) = container {
692 // add `Self: Trait<T1, T2, ...>` to the environment in trait
693 // function default implementations (and hypothetical code
694 // inside consts or type aliases)
695 test_utils::tested_by!(trait_self_implements_self);
696 let substs = Substs::type_params(db, trait_id);
697 let trait_ref = TraitRef { trait_: trait_id, substs };
698 let pred = GenericPredicate::Implemented(trait_ref);
699
700 predicates.push(pred);
701 }
702 }
703
680 Arc::new(TraitEnvironment { predicates }) 704 Arc::new(TraitEnvironment { predicates })
681 } 705 }
682} 706}
diff --git a/crates/ra_hir_ty/src/marks.rs b/crates/ra_hir_ty/src/marks.rs
index 0f754eb9c..de5cb1d6b 100644
--- a/crates/ra_hir_ty/src/marks.rs
+++ b/crates/ra_hir_ty/src/marks.rs
@@ -4,6 +4,8 @@ test_utils::marks!(
4 type_var_cycles_resolve_completely 4 type_var_cycles_resolve_completely
5 type_var_cycles_resolve_as_possible 5 type_var_cycles_resolve_as_possible
6 type_var_resolves_to_int_var 6 type_var_resolves_to_int_var
7 impl_self_type_match_without_receiver
7 match_ergonomics_ref 8 match_ergonomics_ref
8 coerce_merge_fail_fallback 9 coerce_merge_fail_fallback
10 trait_self_implements_self
9); 11);
diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs
index 5283bff28..4f8c52433 100644
--- a/crates/ra_hir_ty/src/method_resolution.rs
+++ b/crates/ra_hir_ty/src/method_resolution.rs
@@ -425,6 +425,15 @@ fn iterate_inherent_methods<T>(
425 if !is_valid_candidate(db, name, receiver_ty, item, self_ty) { 425 if !is_valid_candidate(db, name, receiver_ty, item, self_ty) {
426 continue; 426 continue;
427 } 427 }
428 // we have to check whether the self type unifies with the type
429 // that the impl is for. If we have a receiver type, this
430 // already happens in `is_valid_candidate` above; if not, we
431 // check it here
432 if receiver_ty.is_none() && inherent_impl_substs(db, impl_block, self_ty).is_none()
433 {
434 test_utils::tested_by!(impl_self_type_match_without_receiver);
435 continue;
436 }
428 if let Some(result) = callback(&self_ty.value, item) { 437 if let Some(result) = callback(&self_ty.value, item) {
429 return Some(result); 438 return Some(result);
430 } 439 }
diff --git a/crates/ra_hir_ty/src/tests/method_resolution.rs b/crates/ra_hir_ty/src/tests/method_resolution.rs
index 1722563aa..1f767d324 100644
--- a/crates/ra_hir_ty/src/tests/method_resolution.rs
+++ b/crates/ra_hir_ty/src/tests/method_resolution.rs
@@ -964,6 +964,38 @@ fn test() { S2.into()<|>; }
964} 964}
965 965
966#[test] 966#[test]
967fn method_resolution_overloaded_method() {
968 test_utils::covers!(impl_self_type_match_without_receiver);
969 let t = type_at(
970 r#"
971//- main.rs
972struct Wrapper<T>(T);
973struct Foo<T>(T);
974struct Bar<T>(T);
975
976impl<T> Wrapper<Foo<T>> {
977 pub fn new(foo_: T) -> Self {
978 Wrapper(Foo(foo_))
979 }
980}
981
982impl<T> Wrapper<Bar<T>> {
983 pub fn new(bar_: T) -> Self {
984 Wrapper(Bar(bar_))
985 }
986}
987
988fn main() {
989 let a = Wrapper::<Foo<f32>>::new(1.0);
990 let b = Wrapper::<Bar<f32>>::new(1.0);
991 (a, b)<|>;
992}
993"#,
994 );
995 assert_eq!(t, "(Wrapper<Foo<f32>>, Wrapper<Bar<f32>>)")
996}
997
998#[test]
967fn method_resolution_encountering_fn_type() { 999fn method_resolution_encountering_fn_type() {
968 type_at( 1000 type_at(
969 r#" 1001 r#"
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs
index 17611ddbf..aa2018944 100644
--- a/crates/ra_hir_ty/src/tests/traits.rs
+++ b/crates/ra_hir_ty/src/tests/traits.rs
@@ -300,6 +300,54 @@ fn test() {
300} 300}
301 301
302#[test] 302#[test]
303fn trait_default_method_self_bound_implements_trait() {
304 test_utils::covers!(trait_self_implements_self);
305 assert_snapshot!(
306 infer(r#"
307trait Trait {
308 fn foo(&self) -> i64;
309 fn bar(&self) -> {
310 let x = self.foo();
311 }
312}
313"#),
314 @r###"
315 [27; 31) 'self': &Self
316 [53; 57) 'self': &Self
317 [62; 97) '{ ... }': ()
318 [76; 77) 'x': i64
319 [80; 84) 'self': &Self
320 [80; 90) 'self.foo()': i64
321 "###
322 );
323}
324
325#[test]
326fn trait_default_method_self_bound_implements_super_trait() {
327 test_utils::covers!(trait_self_implements_self);
328 assert_snapshot!(
329 infer(r#"
330trait SuperTrait {
331 fn foo(&self) -> i64;
332}
333trait Trait: SuperTrait {
334 fn bar(&self) -> {
335 let x = self.foo();
336 }
337}
338"#),
339 @r###"
340 [32; 36) 'self': &Self
341 [86; 90) 'self': &Self
342 [95; 130) '{ ... }': ()
343 [109; 110) 'x': i64
344 [113; 117) 'self': &Self
345 [113; 123) 'self.foo()': i64
346 "###
347 );
348}
349
350#[test]
303fn infer_project_associated_type() { 351fn infer_project_associated_type() {
304 // y, z, a don't yet work because of https://github.com/rust-lang/chalk/issues/234 352 // y, z, a don't yet work because of https://github.com/rust-lang/chalk/issues/234
305 assert_snapshot!( 353 assert_snapshot!(
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs
index 4974c565b..882160fa8 100644
--- a/crates/ra_hir_ty/src/traits/chalk.rs
+++ b/crates/ra_hir_ty/src/traits/chalk.rs
@@ -142,7 +142,7 @@ impl ToChalk for Ty {
142 let substitution = proj_ty.parameters.to_chalk(db); 142 let substitution = proj_ty.parameters.to_chalk(db);
143 chalk_ir::AliasTy { associated_ty_id, substitution }.cast().intern() 143 chalk_ir::AliasTy { associated_ty_id, substitution }.cast().intern()
144 } 144 }
145 Ty::Param(id) => { 145 Ty::Placeholder(id) => {
146 let interned_id = db.intern_type_param_id(id); 146 let interned_id = db.intern_type_param_id(id);
147 PlaceholderIndex { 147 PlaceholderIndex {
148 ui: UniverseIndex::ROOT, 148 ui: UniverseIndex::ROOT,
@@ -184,7 +184,7 @@ impl ToChalk for Ty {
184 let interned_id = crate::db::GlobalTypeParamId::from_intern_id( 184 let interned_id = crate::db::GlobalTypeParamId::from_intern_id(
185 crate::salsa::InternId::from(idx.idx), 185 crate::salsa::InternId::from(idx.idx),
186 ); 186 );
187 Ty::Param(db.lookup_intern_type_param_id(interned_id)) 187 Ty::Placeholder(db.lookup_intern_type_param_id(interned_id))
188 } 188 }
189 chalk_ir::TyData::Alias(proj) => { 189 chalk_ir::TyData::Alias(proj) => {
190 let associated_ty = from_chalk(db, proj.associated_ty_id); 190 let associated_ty = from_chalk(db, proj.associated_ty_id);
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs
index 59c86bbfa..2e598fdcd 100644
--- a/crates/ra_lsp_server/src/main_loop/handlers.rs
+++ b/crates/ra_lsp_server/src/main_loop/handlers.rs
@@ -758,7 +758,7 @@ pub fn handle_code_lens(
758 // Gather runnables 758 // Gather runnables
759 for runnable in world.analysis().runnables(file_id)? { 759 for runnable in world.analysis().runnables(file_id)? {
760 let title = match &runnable.kind { 760 let title = match &runnable.kind {
761 RunnableKind::Test { .. } | RunnableKind::TestMod { .. } => "▶️Run Test", 761 RunnableKind::Test { .. } | RunnableKind::TestMod { .. } => "▶️\u{fe0e}Run Test",
762 RunnableKind::Bench { .. } => "Run Bench", 762 RunnableKind::Bench { .. } => "Run Bench",
763 RunnableKind::Bin => "Run", 763 RunnableKind::Bin => "Run",
764 } 764 }
diff --git a/crates/ra_parser/src/grammar/params.rs b/crates/ra_parser/src/grammar/params.rs
index ed4f93347..272661b1d 100644
--- a/crates/ra_parser/src/grammar/params.rs
+++ b/crates/ra_parser/src/grammar/params.rs
@@ -114,8 +114,11 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) {
114 // test fn_pointer_param_ident_path 114 // test fn_pointer_param_ident_path
115 // type Foo = fn(Bar::Baz); 115 // type Foo = fn(Bar::Baz);
116 // type Qux = fn(baz: Bar::Baz); 116 // type Qux = fn(baz: Bar::Baz);
117
118 // test fn_pointer_unnamed_arg
119 // type Foo = fn(_: bar);
117 Flavor::FnPointer => { 120 Flavor::FnPointer => {
118 if p.at(IDENT) && p.nth(1) == T![:] && !p.nth_at(1, T![::]) { 121 if (p.at(IDENT) || p.at(UNDERSCORE)) && p.nth(1) == T![:] && !p.nth_at(1, T![::]) {
119 patterns::pattern_single(p); 122 patterns::pattern_single(p);
120 types::ascription(p); 123 types::ascription(p);
121 } else { 124 } else {
diff --git a/crates/ra_project_model/Cargo.toml b/crates/ra_project_model/Cargo.toml
index 69edc3c66..653d5bd14 100644
--- a/crates/ra_project_model/Cargo.toml
+++ b/crates/ra_project_model/Cargo.toml
@@ -19,3 +19,5 @@ ra_cfg = { path = "../ra_cfg" }
19 19
20serde = { version = "1.0.89", features = ["derive"] } 20serde = { version = "1.0.89", features = ["derive"] }
21serde_json = "1.0.39" 21serde_json = "1.0.39"
22
23anyhow = "1.0.26"
diff --git a/crates/ra_project_model/src/cargo_workspace.rs b/crates/ra_project_model/src/cargo_workspace.rs
index 60cb8c1eb..22d226a74 100644
--- a/crates/ra_project_model/src/cargo_workspace.rs
+++ b/crates/ra_project_model/src/cargo_workspace.rs
@@ -2,14 +2,13 @@
2 2
3use std::path::{Path, PathBuf}; 3use std::path::{Path, PathBuf};
4 4
5use anyhow::{Context, Result};
5use cargo_metadata::{CargoOpt, MetadataCommand}; 6use cargo_metadata::{CargoOpt, MetadataCommand};
6use ra_arena::{impl_arena_id, Arena, RawId}; 7use ra_arena::{impl_arena_id, Arena, RawId};
7use ra_db::Edition; 8use ra_db::Edition;
8use rustc_hash::FxHashMap; 9use rustc_hash::FxHashMap;
9use serde::Deserialize; 10use serde::Deserialize;
10 11
11use crate::Result;
12
13/// `CargoWorkspace` represents the logical structure of, well, a Cargo 12/// `CargoWorkspace` represents the logical structure of, well, a Cargo
14/// workspace. It pretty closely mirrors `cargo metadata` output. 13/// workspace. It pretty closely mirrors `cargo metadata` output.
15/// 14///
@@ -171,7 +170,9 @@ impl CargoWorkspace {
171 if let Some(parent) = cargo_toml.parent() { 170 if let Some(parent) = cargo_toml.parent() {
172 meta.current_dir(parent); 171 meta.current_dir(parent);
173 } 172 }
174 let meta = meta.exec().map_err(|e| format!("cargo metadata failed: {}", e))?; 173 let meta = meta.exec().with_context(|| {
174 format!("Failed to run `cargo metadata --manifest-path {}`", cargo_toml.display())
175 })?;
175 let mut pkg_by_id = FxHashMap::default(); 176 let mut pkg_by_id = FxHashMap::default();
176 let mut packages = Arena::default(); 177 let mut packages = Arena::default();
177 let mut targets = Arena::default(); 178 let mut targets = Arena::default();
@@ -181,7 +182,9 @@ impl CargoWorkspace {
181 for meta_pkg in meta.packages { 182 for meta_pkg in meta.packages {
182 let cargo_metadata::Package { id, edition, name, manifest_path, .. } = meta_pkg; 183 let cargo_metadata::Package { id, edition, name, manifest_path, .. } = meta_pkg;
183 let is_member = ws_members.contains(&id); 184 let is_member = ws_members.contains(&id);
184 let edition = edition.parse::<Edition>()?; 185 let edition = edition
186 .parse::<Edition>()
187 .with_context(|| format!("Failed to parse edition {}", edition))?;
185 let pkg = packages.alloc(PackageData { 188 let pkg = packages.alloc(PackageData {
186 name, 189 name,
187 manifest: manifest_path, 190 manifest: manifest_path,
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs
index bc1d15406..250255813 100644
--- a/crates/ra_project_model/src/lib.rs
+++ b/crates/ra_project_model/src/lib.rs
@@ -12,6 +12,7 @@ use std::{
12 process::Command, 12 process::Command,
13}; 13};
14 14
15use anyhow::{bail, Context, Result};
15use ra_cfg::CfgOptions; 16use ra_cfg::CfgOptions;
16use ra_db::{CrateGraph, CrateId, CrateName, Edition, Env, FileId}; 17use ra_db::{CrateGraph, CrateId, CrateName, Edition, Env, FileId};
17use rustc_hash::FxHashMap; 18use rustc_hash::FxHashMap;
@@ -23,8 +24,6 @@ pub use crate::{
23 sysroot::Sysroot, 24 sysroot::Sysroot,
24}; 25};
25 26
26pub type Result<T> = ::std::result::Result<T, Box<dyn Error + Send + Sync>>;
27
28#[derive(Clone, PartialEq, Eq, Hash, Debug)] 27#[derive(Clone, PartialEq, Eq, Hash, Debug)]
29pub struct CargoTomlNotFoundError(pub PathBuf); 28pub struct CargoTomlNotFoundError(pub PathBuf);
30 29
@@ -81,15 +80,36 @@ impl ProjectWorkspace {
81 ) -> Result<ProjectWorkspace> { 80 ) -> Result<ProjectWorkspace> {
82 match find_rust_project_json(path) { 81 match find_rust_project_json(path) {
83 Some(json_path) => { 82 Some(json_path) => {
84 let file = File::open(json_path)?; 83 let file = File::open(&json_path)
84 .with_context(|| format!("Failed to open json file {}", json_path.display()))?;
85 let reader = BufReader::new(file); 85 let reader = BufReader::new(file);
86 Ok(ProjectWorkspace::Json { project: from_reader(reader)? }) 86 Ok(ProjectWorkspace::Json {
87 project: from_reader(reader).with_context(|| {
88 format!("Failed to deserialize json file {}", json_path.display())
89 })?,
90 })
87 } 91 }
88 None => { 92 None => {
89 let cargo_toml = find_cargo_toml(path)?; 93 let cargo_toml = find_cargo_toml(path).with_context(|| {
90 let cargo = CargoWorkspace::from_cargo_metadata(&cargo_toml, cargo_features)?; 94 format!("Failed to find Cargo.toml for path {}", path.display())
91 let sysroot = 95 })?;
92 if with_sysroot { Sysroot::discover(&cargo_toml)? } else { Sysroot::default() }; 96 let cargo = CargoWorkspace::from_cargo_metadata(&cargo_toml, cargo_features)
97 .with_context(|| {
98 format!(
99 "Failed to read Cargo metadata from Cargo.toml file {}",
100 cargo_toml.display()
101 )
102 })?;
103 let sysroot = if with_sysroot {
104 Sysroot::discover(&cargo_toml).with_context(|| {
105 format!(
106 "Failed to find sysroot for Cargo.toml file {}",
107 cargo_toml.display()
108 )
109 })?
110 } else {
111 Sysroot::default()
112 };
93 Ok(ProjectWorkspace::Cargo { cargo, sysroot }) 113 Ok(ProjectWorkspace::Cargo { cargo, sysroot })
94 } 114 }
95 } 115 }
@@ -398,16 +418,27 @@ pub fn get_rustc_cfg_options() -> CfgOptions {
398 // Some nightly-only cfgs, which are required for stdlib 418 // Some nightly-only cfgs, which are required for stdlib
399 { 419 {
400 cfg_options.insert_atom("target_thread_local".into()); 420 cfg_options.insert_atom("target_thread_local".into());
401 for &target_has_atomic in ["16", "32", "64", "8", "cas", "ptr"].iter() { 421 for &target_has_atomic in ["8", "16", "32", "64", "cas", "ptr"].iter() {
402 cfg_options.insert_key_value("target_has_atomic".into(), target_has_atomic.into()) 422 cfg_options.insert_key_value("target_has_atomic".into(), target_has_atomic.into());
423 cfg_options
424 .insert_key_value("target_has_atomic_load_store".into(), target_has_atomic.into());
403 } 425 }
404 } 426 }
405 427
406 match (|| -> Result<_> { 428 match (|| -> Result<String> {
407 // `cfg(test)` and `cfg(debug_assertion)` are handled outside, so we suppress them here. 429 // `cfg(test)` and `cfg(debug_assertion)` are handled outside, so we suppress them here.
408 let output = Command::new("rustc").args(&["--print", "cfg", "-O"]).output()?; 430 let output = Command::new("rustc")
431 .args(&["--print", "cfg", "-O"])
432 .output()
433 .context("Failed to get output from rustc --print cfg -O")?;
409 if !output.status.success() { 434 if !output.status.success() {
410 Err("failed to get rustc cfgs")?; 435 bail!(
436 "rustc --print cfg -O exited with exit code ({})",
437 output
438 .status
439 .code()
440 .map_or(String::from("no exit code"), |code| format!("{}", code))
441 );
411 } 442 }
412 Ok(String::from_utf8(output.stdout)?) 443 Ok(String::from_utf8(output.stdout)?)
413 })() { 444 })() {
diff --git a/crates/ra_project_model/src/sysroot.rs b/crates/ra_project_model/src/sysroot.rs
index a23265fc0..7b9cc899c 100644
--- a/crates/ra_project_model/src/sysroot.rs
+++ b/crates/ra_project_model/src/sysroot.rs
@@ -1,5 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use anyhow::{anyhow, bail, Context, Result};
3use std::{ 4use std::{
4 env, 5 env,
5 path::{Path, PathBuf}, 6 path::{Path, PathBuf},
@@ -8,8 +9,6 @@ use std::{
8 9
9use ra_arena::{impl_arena_id, Arena, RawId}; 10use ra_arena::{impl_arena_id, Arena, RawId};
10 11
11use crate::Result;
12
13#[derive(Default, Debug, Clone)] 12#[derive(Default, Debug, Clone)]
14pub struct Sysroot { 13pub struct Sysroot {
15 crates: Arena<SysrootCrate, SysrootCrateData>, 14 crates: Arena<SysrootCrate, SysrootCrateData>,
@@ -51,7 +50,7 @@ impl Sysroot {
51 let src = try_find_src_path(cargo_toml)?; 50 let src = try_find_src_path(cargo_toml)?;
52 51
53 if !src.exists() { 52 if !src.exists() {
54 Err(format!( 53 Err(anyhow!(
55 "can't load standard library from sysroot\n\ 54 "can't load standard library from sysroot\n\
56 {}\n\ 55 {}\n\
57 (discovered via `rustc --print sysroot`)\n\ 56 (discovered via `rustc --print sysroot`)\n\
@@ -100,9 +99,14 @@ fn try_find_src_path(cargo_toml: &Path) -> Result<PathBuf> {
100 .current_dir(cargo_toml.parent().unwrap()) 99 .current_dir(cargo_toml.parent().unwrap())
101 .args(&["--print", "sysroot"]) 100 .args(&["--print", "sysroot"])
102 .output() 101 .output()
103 .map_err(|e| format!("rustc --print sysroot failed: {}", e))?; 102 .context("rustc --print sysroot failed")?;
104 if !rustc_output.status.success() { 103 if !rustc_output.status.success() {
105 Err("failed to locate sysroot")?; 104 match rustc_output.status.code() {
105 Some(code) => {
106 bail!("failed to locate sysroot: rustc --print sysroot exited with code {}", code)
107 }
108 None => bail!("failed to locate sysroot: rustc --print sysroot terminated by signal"),
109 };
106 } 110 }
107 let stdout = String::from_utf8(rustc_output.stdout)?; 111 let stdout = String::from_utf8(rustc_output.stdout)?;
108 let sysroot_path = Path::new(stdout.trim()); 112 let sysroot_path = Path::new(stdout.trim());
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs b/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs
new file mode 100644
index 000000000..1ebbe5b03
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs
@@ -0,0 +1 @@
type Foo = fn(_: bar);
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.txt b/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.txt
new file mode 100644
index 000000000..52d8f21a4
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.txt
@@ -0,0 +1,26 @@
1SOURCE_FILE@[0; 23)
2 TYPE_ALIAS_DEF@[0; 22)
3 TYPE_KW@[0; 4) "type"
4 WHITESPACE@[4; 5) " "
5 NAME@[5; 8)
6 IDENT@[5; 8) "Foo"
7 WHITESPACE@[8; 9) " "
8 EQ@[9; 10) "="
9 WHITESPACE@[10; 11) " "
10 FN_POINTER_TYPE@[11; 21)
11 FN_KW@[11; 13) "fn"
12 PARAM_LIST@[13; 21)
13 L_PAREN@[13; 14) "("
14 PARAM@[14; 20)
15 PLACEHOLDER_PAT@[14; 15)
16 UNDERSCORE@[14; 15) "_"
17 COLON@[15; 16) ":"
18 WHITESPACE@[16; 17) " "
19 PATH_TYPE@[17; 20)
20 PATH@[17; 20)
21 PATH_SEGMENT@[17; 20)
22 NAME_REF@[17; 20)
23 IDENT@[17; 20) "bar"
24 R_PAREN@[20; 21) ")"
25 SEMI@[21; 22) ";"
26 WHITESPACE@[22; 23) "\n"
diff --git a/docs/user/README.md b/docs/user/README.md
deleted file mode 100644
index 14ca6fd64..000000000
--- a/docs/user/README.md
+++ /dev/null
@@ -1,280 +0,0 @@
1[github-releases]: https://github.com/rust-analyzer/rust-analyzer/releases
2
3The main interface to rust-analyzer is the
4[LSP](https://microsoft.github.io/language-server-protocol/) implementation. To
5install lsp server, you have three options:
6
7* **Preferred and default:** install the plugin/extension for your IDE and it will ask your permission to automatically download the latest lsp server for you from [GitHub releases][github-releases]. (See docs to find out whether this is implemented for your editor below).
8* Manually download prebuilt binaries from [GitHub releases][github-releases]
9 * `ra_lsp_server-linux` for Linux
10 * `ra_lsp_server-mac` for Mac
11 * `ra_lsp_server-windows.exe` for Windows
12* Clone the repository and build from sources
13```bash
14$ git clone [email protected]:rust-analyzer/rust-analyzer && cd rust-analyzer
15$ cargo xtask install --server # or cargo install --path ./crates/ra_lsp_server
16```
17
18This way you will get a binary named `ra_lsp_server` (with os suffix for prebuilt binaries)
19which you should be able to use with any LSP-compatible editor.
20
21We make use of custom extensions to LSP, so special client-side support is required to take full
22advantage of rust-analyzer. This repository contains support code for VS Code.
23
24Rust Analyzer needs sources of rust standard library to work, so
25you might also need to execute
26
27```
28$ rustup component add rust-src
29```
30
31See [./features.md](./features.md) document for a list of features that are available.
32
33## VS Code
34
35### Prerequisites
36
37You will need the most recent version of VS Code: we don't try to
38maintain compatibility with older versions yet.
39
40### Installation from prebuilt binaries
41
42We ship prebuilt binaries for Linux, Mac and Windows via
43[GitHub releases][github-releases].
44In order to use them you need to install the client VSCode extension.
45
46Publishing to VS Code marketplace is currently WIP. Thus, you need to manually download
47`rust-analyzer-0.1.0.vsix` file from latest [GitHub release][github-releases].
48
49After you downloaded the `.vsix` file you can install it from the terminal
50
51```
52$ code --install-extension rust-analyzer-0.1.0.vsix
53```
54
55Or open VS Code, press <kbd>Ctrl+Shift+P</kbd>, and search for the following command:
56
57<img width="500px" alt="Install from VSIX command" src="https://user-images.githubusercontent.com/36276403/74108225-c0c11d80-4b80-11ea-9b2a-0a43f09e29af.png">
58
59Press <kbd>Enter</kbd> and go to `rust-analyzer-0.1.0.vsix` file through the file explorer.
60
61Then open some Rust project and you should
62see an info message pop-up.
63
64<img height="140px" src="https://user-images.githubusercontent.com/36276403/74103174-a40df100-4b52-11ea-81f4-372c70797924.png" alt="Download now message"/>
65
66
67Click `Download now`, wait until the progress is 100% and you are ready to go.
68
69For updates you need to remove installed binary
70```
71rm -rf ${HOME}/.config/Code/User/globalStorage/matklad.rust-analyzer
72```
73
74`"Download latest language server"` command for VSCode and automatic updates detection is currently WIP.
75
76
77### Installation from sources
78
79In order to build the VS Code plugin from sources, you need to have node.js and npm with
80a minimum version of 12 installed. Please refer to
81[node.js and npm documentation](https://nodejs.org) for installation instructions.
82
83The experimental VS Code plugin can be built and installed by executing the
84following commands:
85
86```
87$ git clone https://github.com/rust-analyzer/rust-analyzer.git --depth 1
88$ cd rust-analyzer
89$ cargo xtask install
90```
91
92After that you need to amend your `settings.json` file to explicitly specify the
93path to `ra_lsp_server` that you've just built.
94```json
95{
96 "rust-analyzer.raLspServerPath": "ra_lsp_server"
97}
98```
99This should work on all platforms, otherwise if installed `ra_lsp_server` is not available through your `$PATH` then see how to configure it [here](#setting-up-the-PATH-variable).
100
101
102The automatic installation is expected to *just work* for common cases, if it
103doesn't, report bugs!
104
105**Note** [#1831](https://github.com/rust-analyzer/rust-analyzer/issues/1831): If you are using the popular
106[Vim emulation plugin](https://github.com/VSCodeVim/Vim), you will likely
107need to turn off the `rust-analyzer.enableEnhancedTyping` setting.
108(// TODO: This configuration is no longer available, enhanced typing shoud be disabled via removing Enter key binding, [see this issue](https://github.com/rust-analyzer/rust-analyzer/issues/3051))
109
110If you have an unusual setup (for example, `code` is not in the `PATH`), you
111should adapt these manual installation instructions:
112
113```
114$ git clone https://github.com/rust-analyzer/rust-analyzer.git --depth 1
115$ cd rust-analyzer
116$ cargo install --path ./crates/ra_lsp_server/ --force --locked
117$ cd ./editors/code
118$ npm install
119$ npm run package
120$ code --install-extension ./rust-analyzer-0.1.0.vsix
121```
122
123It's better to remove existing Rust plugins to avoid interference.
124
125Beyond basic LSP features, there are some extension commands which you can
126invoke via <kbd>Ctrl+Shift+P</kbd> or bind to a shortcut. See [./features.md](./features.md)
127for details.
128
129For updates, pull the latest changes from the master branch, run `cargo xtask install` again, and **restart** VS Code instance.
130See [microsoft/vscode#72308](https://github.com/microsoft/vscode/issues/72308) for why a full restart is needed.
131
132### VS Code Remote
133
134You can also use `rust-analyzer` with the Visual Studio Code Remote extensions
135(Remote SSH, Remote WSL, Remote Containers). In this case, however, you have to
136manually install the `.vsix` package:
137
1381. Build the extension on the remote host using the instructions above (ignore the
139 error if `code` cannot be found in your PATH: VSCode doesn't need to be installed
140 on the remote host).
1412. In Visual Studio Code open a connection to the remote host.
1423. Open the Extensions View (`View > Extensions`, keyboard shortcut: `Ctrl+Shift+X`).
1434. From the top-right kebab menu (`···`) select `Install from VSIX...`
1445. Inside the `rust-analyzer` directory find the `editors/code` subdirectory and choose
145 the `rust-analyzer-0.1.0.vsix` file.
1466. Restart Visual Studio Code and re-establish the connection to the remote host.
147
148In case of errors please make sure that `~/.cargo/bin` is in your `PATH` on the remote
149host.
150
151### Settings
152
153* `rust-analyzer.highlightingOn`: enables experimental syntax highlighting.
154 Colors can be configured via `editor.tokenColorCustomizations`.
155 As an example, [Pale Fire](https://github.com/matklad/pale-fire/) color scheme tweaks rust colors.
156* `rust-analyzer.enableEnhancedTyping`: by default, rust-analyzer intercepts the
157 `Enter` key to make it easier to continue comments. Note that it may conflict with VIM emulation plugin.
158* `rust-analyzer.raLspServerPath`: path to `ra_lsp_server` executable, when absent or `null` defaults to prebuilt binary path
159* `rust-analyzer.enableCargoWatchOnStartup`: prompt to install & enable `cargo
160 watch` for live error highlighting (note, this **does not** use rust-analyzer)
161* `rust-analyzer.excludeGlobs`: a list of glob-patterns for exclusion (see globset [docs](https://docs.rs/globset) for syntax).
162 Note: glob patterns are applied to all Cargo packages and a rooted at a package root.
163 This is not very intuitive and a limitation of a current implementation.
164* `rust-analyzer.useClientWatching`: use client provided file watching instead
165 of notify watching.
166* `rust-analyzer.cargo-watch.command`: `cargo-watch` command. (e.g: `clippy` will run as `cargo watch -x clippy` )
167* `rust-analyzer.cargo-watch.arguments`: cargo-watch check arguments.
168 (e.g: `--features="shumway,pdf"` will run as `cargo watch -x "check --features="shumway,pdf""` )
169* `rust-analyzer.cargo-watch.ignore`: list of patterns for cargo-watch to ignore (will be passed as `--ignore`)
170* `rust-analyzer.trace.server`: enables internal logging
171* `rust-analyzer.trace.cargo-watch`: enables cargo-watch logging
172* `RUST_SRC_PATH`: environment variable that overwrites the sysroot
173* `rust-analyzer.featureFlags` -- a JSON object to tweak fine-grained behavior:
174 ```jsonc
175 {
176 // Show diagnostics produced by rust-analyzer itself.
177 "lsp.diagnostics": true,
178 // Automatically insert `()` and `<>` when completing functions and types.
179 "completion.insertion.add-call-parenthesis": true,
180 // Enable completions like `.if`, `.match`, etc.
181 "completion.enable-postfix": true,
182 // Show notification when workspace is fully loaded
183 "notifications.workspace-loaded": true,
184 // Show error when no Cargo.toml was found
185 "notifications.cargo-toml-not-found": true,
186 }
187 ```
188
189
190## Emacs
191
192* install recent version of `emacs-lsp` package by following the instructions [here][emacs-lsp]
193* set `lsp-rust-server` to `'rust-analyzer`
194* run `lsp` in a Rust buffer
195* (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys
196
197[emacs-lsp]: https://github.com/emacs-lsp/lsp-mode
198
199
200## Vim and NeoVim (coc-rust-analyzer)
201
202* Install coc.nvim by following the instructions at [coc.nvim][] (nodejs required)
203* Run `:CocInstall coc-rust-analyzer` to install [coc-rust-analyzer], this extension implements _most_ of the features supported in the VSCode extension:
204 - same configurations as VSCode extension, `rust-analyzer.raLspServerPath`, `rust-analyzer.enableCargoWatchOnStartup` etc.
205 - same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.startCargoWatch` etc.
206 - highlighting and inlay_hints are not implemented yet
207
208[coc.nvim]: https://github.com/neoclide/coc.nvim
209[coc-rust-analyzer]: https://github.com/fannheyward/coc-rust-analyzer
210
211## Vim and NeoVim (LanguageClient-neovim)
212
213* Install LanguageClient-neovim by following the instructions [here][lang-client-neovim]
214 - The github project wiki has extra tips on configuration
215
216* Configure by adding this to your vim/neovim config file (replacing the existing rust specific line if it exists):
217
218```vim
219let g:LanguageClient_serverCommands = {
220\ 'rust': ['ra_lsp_server'],
221\ }
222```
223
224[lang-client-neovim]: https://github.com/autozimu/LanguageClient-neovim
225
226## NeoVim (nvim-lsp)
227
228NeoVim 0.5 (not yet released) has built in language server support. For a quick start configuration
229of rust-analyzer, use [neovim/nvim-lsp](https://github.com/neovim/nvim-lsp#rust_analyzer).
230Once `neovim/nvim-lsp` is installed, use `lua require'nvim_lsp'.rust_analyzer.setup({})` in your `init.vim`.
231
232
233## Sublime Text 3
234
235Prequisites:
236
237`LSP` package.
238
239Installation:
240
241* Invoke the command palette with <kbd>Ctrl+Shift+P</kbd>
242* Type `LSP Settings` to open the LSP preferences editor
243* Add the following LSP client definition to your settings:
244
245```json
246"rust-analyzer": {
247 "command": ["ra_lsp_server"],
248 "languageId": "rust",
249 "scopes": ["source.rust"],
250 "syntaxes": [
251 "Packages/Rust/Rust.sublime-syntax",
252 "Packages/Rust Enhanced/RustEnhanced.sublime-syntax"
253 ],
254 "initializationOptions": {
255 "featureFlags": {
256 }
257 },
258}
259```
260
261* You can now invoke the command palette and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer)
262
263
264<!-- Update links to this header when changing it! -->
265### Setting up the `PATH` variable
266
267On Unix systems, `rustup` adds `~/.cargo/bin` to `PATH` by modifying the shell's
268startup file. Depending on your configuration, your Desktop Environment might not
269actually load it. If you find that `rust-analyzer` only runs when starting the
270editor from the terminal, you will have to set up your `PATH` variable manually.
271
272There are a couple of ways to do that:
273
274- for Code, set `rust-analyzer.raLspServerPath` to `~/.cargo/bin` (the `~` is
275 automatically resolved by the extension)
276- copy the binary to a location that is already in `PATH`, e.g. `/usr/local/bin`
277- on Linux, use PAM to configure the `PATH` variable, by e.g. putting
278 `PATH DEFAULT=/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:@{HOME}/.cargo/bin:@{HOME}/.local/bin`
279 in your `~/.pam_environment` file; note that this might interfere with other
280 defaults set by the system administrator via `/etc/environment`.
diff --git a/docs/user/readme.adoc b/docs/user/readme.adoc
new file mode 100644
index 000000000..867aae975
--- /dev/null
+++ b/docs/user/readme.adoc
@@ -0,0 +1,155 @@
1= User Manual
2:toc: preamble
3:sectanchors:
4:page-layout: post
5
6
7// Master copy of this document lives in the https://github.com/rust-analyzer/rust-analyzer repository
8
9At it's core, rust-analyzer is a *library* for semantic analysis of the Rust code as it changes over time.
10This manual focuses on a specific usage of the library -- the implementation of
11https://microsoft.github.io/language-server-protocol/[Language Server Protocol].
12LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic feature like completion or goto definition by talking to an external language server process.
13
14To improve this document, send a pull request against
15https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/readme.adoc[this file].
16
17== Installation
18
19In theory, one should be able to just install the server binary and have it automatically work with any editor.
20We are not there yet, so some editor specific setup is required.
21
22=== VS Code
23
24This the best supported editor at the moment.
25rust-analyzer plugin for VS Code is maintained
26https://github.com/rust-analyzer/rust-analyzer/tree/master/editors/code[in tree].
27
28You can install the latest release of the plugin from
29https://marketplace.visualstudio.com/items?itemName=matklad.rust-analyzer[the marketplace].
30By default, the plugin will download the latest version of the server as well.
31
32image::https://user-images.githubusercontent.com/36276403/74103174-a40df100-4b52-11ea-81f4-372c70797924.png[]
33
34The server binary is stored in `~/.config/Code/User/globalStorage/matklad.rust-analyzer`.
35
36Note that we only support the latest version of VS Code.
37
38==== Updates
39
40The extension will be updated automatically as new versions become available.
41The server update functionality is in progress.
42For the time being, the workaround is to remove the binary from `globalStorage` and to restart the extension.
43
44==== Building From Source
45
46Alternatively, both the server and the plugin can be installed from source:
47
48[source]
49----
50$ git clone https://github.com/rust-analyzer/rust-analyzer.git && cd rust-analyzer
51$ cargo xtask install
52----
53
54You'll need Cargo, nodejs and npm for this.
55To make VS Code use the freshly build server, add this to the settings:
56
57[source,json]
58----
59{ "rust-analyzer.raLspServerPath": "ra_lsp_server" }
60----
61
62Note that installing via `xtask install` does not work for VS Code Remote, instead you'll need to install the `.vsix` manually.
63
64=== Language Server Binary
65
66Other editors generally require `ra_lsp_server` binary to be in `$PATH`.
67You can download pre-build binary from
68https://github.com/rust-analyzer/rust-analyzer/releases[relases]
69page, or you can install it from source using the following command:
70
71[source,bash]
72----
73$ cargo xtask install --server
74----
75
76=== Emacs
77
78Emacs support is maintained https://github.com/emacs-lsp/lsp-mode/blob/master/lsp-rust.el[upstream].
79
801. Install recent version of `emacs-lsp` package by following the instructions https://github.com/emacs-lsp/lsp-mode[here].
812. Set `lsp-rust-server` to `'rust-analyzer`.
823. Run `lsp` in a Rust buffer.
834. (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys.
84
85=== Vim
86
87The are several LSP client implementations for vim:
88
89==== coc-rust-analyzer
90
911. Install coc.nvim by following the instructions at
92 https://github.com/neoclide/coc.nvim[coc.nvim]
93 (nodejs required)
942. Run `:CocInstall coc-rust-analyzer` to install
95 https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer],
96 this extension implements _most_ of the features supported in the VSCode extension:
97 * same configurations as VSCode extension, `rust-analyzer.raLspServerPath`, `rust-analyzer.enableCargoWatchOnStartup` etc.
98 * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.startCargoWatch` etc.
99 * highlighting and inlay_hints are not implemented yet
100
101==== LanguageClient-neovim
102
1031. Install LanguageClient-neovim by following the instructions
104 https://github.com/autozimu/LanguageClient-neovim[here]
105 * The github project wiki has extra tips on configuration
106
1072. Configure by adding this to your vim/neovim config file (replacing the existing rust specific line if it exists):
108+
109[source,vim]
110----
111let g:LanguageClient_serverCommands = {
112\ 'rust': ['ra_lsp_server'],
113\ }
114----
115
116==== nvim-lsp
117
118NeoVim 0.5 (not yet released) has built in language server support.
119For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lsp#rust_analyzer[neovim/nvim-lsp].
120Once `neovim/nvim-lsp` is installed, use `lua require'nvim_lsp'.rust_analyzer.setup({})` in your `init.vim`.
121
122=== Sublime Text 3
123
124Prerequisites:
125
126`LSP` package.
127
128Installation:
129
1301. Invoke the command palette with <kbd>Ctrl+Shift+P</kbd>
1312. Type `LSP Settings` to open the LSP preferences editor
1323. Add the following LSP client definition to your settings:
133+
134[source,json]
135----
136"rust-analyzer": {
137 "command": ["ra_lsp_server"],
138 "languageId": "rust",
139 "scopes": ["source.rust"],
140 "syntaxes": [
141 "Packages/Rust/Rust.sublime-syntax",
142 "Packages/Rust Enhanced/RustEnhanced.sublime-syntax"
143 ],
144 "initializationOptions": {
145 "featureFlags": {
146 }
147 },
148}
149----
150
1514. You can now invoke the command palette and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer)
152
153== Usage
154
155See https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/features.md[features.md].
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index 5c056463e..c74078735 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -1,6 +1,6 @@
1{ 1{
2 "name": "rust-analyzer", 2 "name": "rust-analyzer",
3 "version": "0.1.0", 3 "version": "0.2.0-dev",
4 "lockfileVersion": 1, 4 "lockfileVersion": 1,
5 "requires": true, 5 "requires": true,
6 "dependencies": { 6 "dependencies": {
@@ -107,9 +107,9 @@
107 "dev": true 107 "dev": true
108 }, 108 },
109 "@types/vscode": { 109 "@types/vscode": {
110 "version": "1.41.0", 110 "version": "1.42.0",
111 "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.41.0.tgz", 111 "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.42.0.tgz",
112 "integrity": "sha512-7SfeY5u9jgiELwxyLB3z7l6l/GbN9CqpCQGkcRlB7tKRFBxzbz2PoBfGrLxI1vRfUCIq5+hg5vtDHExwq5j3+A==", 112 "integrity": "sha512-ds6TceMsh77Fs0Mq0Vap6Y72JbGWB8Bay4DrnJlf5d9ui2RSe1wis13oQm+XhguOeH1HUfLGzaDAoupTUtgabw==",
113 "dev": true 113 "dev": true
114 }, 114 },
115 "acorn": { 115 "acorn": {
@@ -662,9 +662,9 @@
662 } 662 }
663 }, 663 },
664 "readable-stream": { 664 "readable-stream": {
665 "version": "3.4.0", 665 "version": "3.6.0",
666 "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", 666 "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
667 "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", 667 "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
668 "dev": true, 668 "dev": true,
669 "requires": { 669 "requires": {
670 "inherits": "^2.0.3", 670 "inherits": "^2.0.3",
@@ -860,9 +860,9 @@
860 "dev": true 860 "dev": true
861 }, 861 },
862 "vsce": { 862 "vsce": {
863 "version": "1.71.0", 863 "version": "1.73.0",
864 "resolved": "https://registry.npmjs.org/vsce/-/vsce-1.71.0.tgz", 864 "resolved": "https://registry.npmjs.org/vsce/-/vsce-1.73.0.tgz",
865 "integrity": "sha512-7k+LPC4oJYPyyxs0a5nh4A8CleQ6+2EMPiAiX/bDyN+PmwJFm2FFPqLRxdIsIWfFnkW4ZMQBf10+W62dCRd9kQ==", 865 "integrity": "sha512-6W37Ebbkj3uF3WhT+SCfRtsneRQEFcGvf/XYz+b6OAgDCj4gPurWyDVrqw/HLsbP1WflGIyUfVZ8t5M7kQp6Uw==",
866 "dev": true, 866 "dev": true,
867 "requires": { 867 "requires": {
868 "azure-devops-node-api": "^7.2.0", 868 "azure-devops-node-api": "^7.2.0",
diff --git a/editors/code/package.json b/editors/code/package.json
index 12d32cef7..a607c2148 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -5,7 +5,8 @@
5 "preview": true, 5 "preview": true,
6 "private": true, 6 "private": true,
7 "icon": "icon.png", 7 "icon": "icon.png",
8 "version": "0.1.0", 8 "//": "The real version is in release.yaml, this one just needs to be bigger",
9 "version": "0.2.0-dev",
9 "publisher": "matklad", 10 "publisher": "matklad",
10 "repository": { 11 "repository": {
11 "url": "https://github.com/rust-analyzer/rust-analyzer.git", 12 "url": "https://github.com/rust-analyzer/rust-analyzer.git",
@@ -15,7 +16,7 @@
15 "Other" 16 "Other"
16 ], 17 ],
17 "engines": { 18 "engines": {
18 "vscode": "^1.41.0" 19 "vscode": "^1.42.0"
19 }, 20 },
20 "scripts": { 21 "scripts": {
21 "vscode:prepublish": "tsc && rollup -c", 22 "vscode:prepublish": "tsc && rollup -c",
@@ -35,13 +36,13 @@
35 "@types/node": "^12.12.25", 36 "@types/node": "^12.12.25",
36 "@types/node-fetch": "^2.5.4", 37 "@types/node-fetch": "^2.5.4",
37 "@types/throttle-debounce": "^2.1.0", 38 "@types/throttle-debounce": "^2.1.0",
38 "@types/vscode": "^1.41.0", 39 "@types/vscode": "^1.42.0",
39 "rollup": "^1.31.0", 40 "rollup": "^1.31.0",
40 "tslib": "^1.10.0", 41 "tslib": "^1.10.0",
41 "tslint": "^5.20.1", 42 "tslint": "^5.20.1",
42 "typescript": "^3.7.5", 43 "typescript": "^3.7.5",
43 "typescript-formatter": "^7.2.2", 44 "typescript-formatter": "^7.2.2",
44 "vsce": "^1.71.0" 45 "vsce": "^1.73.0"
45 }, 46 },
46 "activationEvents": [ 47 "activationEvents": [
47 "onLanguage:rust", 48 "onLanguage:rust",
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index 4484b2167..dcf9d0c06 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -3,6 +3,7 @@ import * as vscode from 'vscode';
3 3
4import { Config } from './config'; 4import { Config } from './config';
5import { ensureLanguageServerBinary } from './installation/language_server'; 5import { ensureLanguageServerBinary } from './installation/language_server';
6import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed';
6 7
7export async function createClient(config: Config): Promise<null | lc.LanguageClient> { 8export async function createClient(config: Config): Promise<null | lc.LanguageClient> {
8 // '.' Is the fallback if no folder is open 9 // '.' Is the fallback if no folder is open
@@ -80,6 +81,10 @@ export async function createClient(config: Config): Promise<null | lc.LanguageCl
80 } 81 }
81 }, 82 },
82 }; 83 };
83 res.registerProposedFeatures(); 84
85 // To turn on all proposed features use: res.registerProposedFeatures();
86 // Here we want to just enable CallHierarchyFeature since it is available on stable.
87 // Note that while the CallHierarchyFeature is stable the LSP protocol is not.
88 res.registerFeature(new CallHierarchyFeature(res));
84 return res; 89 return res;
85} 90}
diff --git a/editors/code/src/installation/download_file.ts b/editors/code/src/installation/download_file.ts
index f1f9f4a25..d154f4816 100644
--- a/editors/code/src/installation/download_file.ts
+++ b/editors/code/src/installation/download_file.ts
@@ -1,7 +1,11 @@
1import fetch from "node-fetch"; 1import fetch from "node-fetch";
2import * as fs from "fs"; 2import * as fs from "fs";
3import * as stream from "stream";
4import * as util from "util";
3import { strict as assert } from "assert"; 5import { strict as assert } from "assert";
4 6
7const pipeline = util.promisify(stream.pipeline);
8
5/** 9/**
6 * Downloads file from `url` and stores it at `destFilePath` with `destFilePermissions`. 10 * Downloads file from `url` and stores it at `destFilePath` with `destFilePermissions`.
7 * `onProgress` callback is called on recieveing each chunk of bytes 11 * `onProgress` callback is called on recieveing each chunk of bytes
@@ -20,25 +24,28 @@ export async function downloadFile(
20 console.log("Error", res.status, "while downloading file from", url); 24 console.log("Error", res.status, "while downloading file from", url);
21 console.dir({ body: await res.text(), headers: res.headers }, { depth: 3 }); 25 console.dir({ body: await res.text(), headers: res.headers }, { depth: 3 });
22 26
23 throw new Error(`Got response ${res.status} when trying to download a file`); 27 throw new Error(`Got response ${res.status} when trying to download a file.`);
24 } 28 }
25 29
26 const totalBytes = Number(res.headers.get('content-length')); 30 const totalBytes = Number(res.headers.get('content-length'));
27 assert(!Number.isNaN(totalBytes), "Sanity check of content-length protocol"); 31 assert(!Number.isNaN(totalBytes), "Sanity check of content-length protocol");
28 32
33 console.log("Downloading file of", totalBytes, "bytes size from", url, "to", destFilePath);
34
29 let readBytes = 0; 35 let readBytes = 0;
36 res.body.on("data", (chunk: Buffer) => {
37 readBytes += chunk.length;
38 onProgress(readBytes, totalBytes);
39 });
30 40
31 console.log("Downloading file of", totalBytes, "bytes size from", url, "to", destFilePath); 41 const destFileStream = fs.createWriteStream(destFilePath, { mode: destFilePermissions });
42
43 await pipeline(res.body, destFileStream);
44 return new Promise<void>(resolve => {
45 destFileStream.on("close", resolve);
46 destFileStream.destroy();
32 47
33 return new Promise<void>((resolve, reject) => res.body 48 // Details on workaround: https://github.com/rust-analyzer/rust-analyzer/pull/3092#discussion_r378191131
34 .on("data", (chunk: Buffer) => { 49 // Issue at nodejs repo: https://github.com/nodejs/node/issues/31776
35 readBytes += chunk.length; 50 });
36 onProgress(readBytes, totalBytes);
37 })
38 .on("error", reject)
39 .pipe(fs
40 .createWriteStream(destFilePath, { mode: destFilePermissions })
41 .on("close", resolve)
42 )
43 );
44} 51}
diff --git a/editors/code/src/installation/language_server.ts b/editors/code/src/installation/language_server.ts
index 52c5cbe7d..4797c3f01 100644
--- a/editors/code/src/installation/language_server.ts
+++ b/editors/code/src/installation/language_server.ts
@@ -104,6 +104,8 @@ export async function ensureLanguageServerBinary(
104 `GitHub repository: ${err.message}` 104 `GitHub repository: ${err.message}`
105 ); 105 );
106 106
107 console.error(err);
108
107 dns.resolve('example.com').then( 109 dns.resolve('example.com').then(
108 addrs => console.log("DNS resolution for example.com was successful", addrs), 110 addrs => console.log("DNS resolution for example.com was successful", addrs),
109 err => { 111 err => {
diff --git a/xtask/src/cmd.rs b/xtask/src/cmd.rs
deleted file mode 100644
index 37497fb74..000000000
--- a/xtask/src/cmd.rs
+++ /dev/null
@@ -1,56 +0,0 @@
1use std::process::{Command, Output, Stdio};
2
3use anyhow::{Context, Result};
4
5use crate::project_root;
6
7pub struct Cmd<'a> {
8 pub unix: &'a str,
9 pub windows: &'a str,
10 pub work_dir: &'a str,
11}
12
13impl Cmd<'_> {
14 pub fn run(self) -> Result<()> {
15 if cfg!(windows) {
16 run(self.windows, self.work_dir)
17 } else {
18 run(self.unix, self.work_dir)
19 }
20 }
21 pub fn run_with_output(self) -> Result<String> {
22 if cfg!(windows) {
23 run_with_output(self.windows, self.work_dir)
24 } else {
25 run_with_output(self.unix, self.work_dir)
26 }
27 }
28}
29
30pub fn run(cmdline: &str, dir: &str) -> Result<()> {
31 do_run(cmdline, dir, &mut |c| {
32 c.stdout(Stdio::inherit());
33 })
34 .map(|_| ())
35}
36
37pub fn run_with_output(cmdline: &str, dir: &str) -> Result<String> {
38 let output = do_run(cmdline, dir, &mut |_| {})?;
39 let stdout = String::from_utf8(output.stdout)?;
40 let stdout = stdout.trim().to_string();
41 Ok(stdout)
42}
43
44fn do_run(cmdline: &str, dir: &str, f: &mut dyn FnMut(&mut Command)) -> Result<Output> {
45 eprintln!("\nwill run: {}", cmdline);
46 let proj_dir = project_root().join(dir);
47 let mut args = cmdline.split_whitespace();
48 let exec = args.next().unwrap();
49 let mut cmd = Command::new(exec);
50 f(cmd.args(args).current_dir(proj_dir).stderr(Stdio::inherit()));
51 let output = cmd.output().with_context(|| format!("running `{}`", cmdline))?;
52 if !output.status.success() {
53 anyhow::bail!("`{}` exited with {}", cmdline, output.status);
54 }
55 Ok(output)
56}
diff --git a/xtask/src/install.rs b/xtask/src/install.rs
index 99e1eddb1..540a66130 100644
--- a/xtask/src/install.rs
+++ b/xtask/src/install.rs
@@ -2,9 +2,9 @@
2 2
3use std::{env, path::PathBuf, str}; 3use std::{env, path::PathBuf, str};
4 4
5use anyhow::{Context, Result}; 5use anyhow::{bail, format_err, Context, Result};
6 6
7use crate::cmd::{run, run_with_output, Cmd}; 7use crate::not_bash::{ls, pushd, rm, run};
8 8
9// Latest stable, feel free to send a PR if this lags behind. 9// Latest stable, feel free to send a PR if this lags behind.
10const REQUIRED_RUST_VERSION: u32 = 41; 10const REQUIRED_RUST_VERSION: u32 = 41;
@@ -55,7 +55,7 @@ fn fix_path_for_mac() -> Result<()> {
55 const ROOT_DIR: &str = ""; 55 const ROOT_DIR: &str = "";
56 let home_dir = match env::var("HOME") { 56 let home_dir = match env::var("HOME") {
57 Ok(home) => home, 57 Ok(home) => home,
58 Err(e) => anyhow::bail!("Failed getting HOME from environment with error: {}.", e), 58 Err(e) => bail!("Failed getting HOME from environment with error: {}.", e),
59 }; 59 };
60 60
61 [ROOT_DIR, &home_dir] 61 [ROOT_DIR, &home_dir]
@@ -69,7 +69,7 @@ fn fix_path_for_mac() -> Result<()> {
69 if !vscode_path.is_empty() { 69 if !vscode_path.is_empty() {
70 let vars = match env::var_os("PATH") { 70 let vars = match env::var_os("PATH") {
71 Some(path) => path, 71 Some(path) => path,
72 None => anyhow::bail!("Could not get PATH variable from env."), 72 None => bail!("Could not get PATH variable from env."),
73 }; 73 };
74 74
75 let mut paths = env::split_paths(&vars).collect::<Vec<_>>(); 75 let mut paths = env::split_paths(&vars).collect::<Vec<_>>();
@@ -82,84 +82,61 @@ fn fix_path_for_mac() -> Result<()> {
82} 82}
83 83
84fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> { 84fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
85 let npm_version = Cmd { 85 let _dir = pushd("./editors/code");
86 unix: r"npm --version",
87 windows: r"cmd.exe /c npm --version",
88 work_dir: "./editors/code",
89 }
90 .run();
91
92 if npm_version.is_err() {
93 eprintln!("\nERROR: `npm --version` failed, `npm` is required to build the VS Code plugin")
94 }
95 86
96 Cmd { unix: r"npm install", windows: r"cmd.exe /c npm install", work_dir: "./editors/code" } 87 let find_code = |f: fn(&str) -> bool| -> Result<&'static str> {
97 .run()?; 88 ["code", "code-insiders", "codium", "code-oss"]
98 Cmd { 89 .iter()
99 unix: r"npm run package --scripts-prepend-node-path", 90 .copied()
100 windows: r"cmd.exe /c npm run package", 91 .find(|bin| f(bin))
101 work_dir: "./editors/code", 92 .ok_or_else(|| {
102 } 93 format_err!("Can't execute `code --version`. Perhaps it is not in $PATH?")
103 .run()?; 94 })
95 };
104 96
105 let code_binary = ["code", "code-insiders", "codium", "code-oss"].iter().find(|bin| { 97 let installed_extensions;
106 Cmd { 98 if cfg!(unix) {
107 unix: &format!("{} --version", bin), 99 run!("npm --version").context("`npm` is required to build the VS Code plugin")?;
108 windows: &format!("cmd.exe /c {}.cmd --version", bin), 100 run!("npm install")?;
109 work_dir: "./editors/code",
110 }
111 .run()
112 .is_ok()
113 });
114 101
115 let code_binary = match code_binary { 102 let vsix_pkg = {
116 Some(it) => it, 103 rm("*.vsix")?;
117 None => anyhow::bail!("Can't execute `code --version`. Perhaps it is not in $PATH?"), 104 run!("npm run package --scripts-prepend-node-path")?;
118 }; 105 ls("*.vsix")?.pop().unwrap()
106 };
119 107
120 Cmd { 108 let code = find_code(|bin| run!("{} --version", bin).is_ok())?;
121 unix: &format!(r"{} --install-extension ./rust-analyzer-0.1.0.vsix --force", code_binary), 109 run!("{} --install-extension {} --force", code, vsix_pkg.display())?;
122 windows: &format!( 110 installed_extensions = run!("{} --list-extensions", code; echo = false)?;
123 r"cmd.exe /c {}.cmd --install-extension ./rust-analyzer-0.1.0.vsix --force", 111 } else {
124 code_binary 112 run!("cmd.exe /c npm --version")
125 ), 113 .context("`npm` is required to build the VS Code plugin")?;
126 work_dir: "./editors/code", 114 run!("cmd.exe /c npm install")?;
127 } 115
128 .run()?; 116 let vsix_pkg = {
117 rm("*.vsix")?;
118 run!("cmd.exe /c npm run package")?;
119 ls("*.vsix")?.pop().unwrap()
120 };
129 121
130 let installed_extensions = Cmd { 122 let code = find_code(|bin| run!("cmd.exe /c {}.cmd --version", bin).is_ok())?;
131 unix: &format!(r"{} --list-extensions", code_binary), 123 run!(r"cmd.exe /c {}.cmd --install-extension {} --force", code, vsix_pkg.display())?;
132 windows: &format!(r"cmd.exe /c {}.cmd --list-extensions", code_binary), 124 installed_extensions = run!("cmd.exe /c {}.cmd --list-extensions", code; echo = false)?;
133 work_dir: ".",
134 } 125 }
135 .run_with_output()?;
136 126
137 if !installed_extensions.contains("rust-analyzer") { 127 if !installed_extensions.contains("rust-analyzer") {
138 anyhow::bail!( 128 bail!(
139 "Could not install the Visual Studio Code extension. \ 129 "Could not install the Visual Studio Code extension. \
140 Please make sure you have at least NodeJS 10.x together with the latest version of VS Code installed and try again." 130 Please make sure you have at least NodeJS 10.x together with the latest version of VS Code installed and try again."
141 ); 131 );
142 } 132 }
143 133
144 if installed_extensions.contains("ra-lsp") {
145 Cmd {
146 unix: &format!(r"{} --uninstall-extension matklad.ra-lsp", code_binary),
147 windows: &format!(
148 r"cmd.exe /c {}.cmd --uninstall-extension matklad.ra-lsp",
149 code_binary
150 ),
151 work_dir: "./editors/code",
152 }
153 .run()?;
154 }
155
156 Ok(()) 134 Ok(())
157} 135}
158 136
159fn install_server(opts: ServerOpt) -> Result<()> { 137fn install_server(opts: ServerOpt) -> Result<()> {
160 let mut old_rust = false; 138 let mut old_rust = false;
161 if let Ok(stdout) = run_with_output("cargo --version", ".") { 139 if let Ok(stdout) = run!("cargo --version") {
162 println!("{}", stdout);
163 if !check_version(&stdout, REQUIRED_RUST_VERSION) { 140 if !check_version(&stdout, REQUIRED_RUST_VERSION) {
164 old_rust = true; 141 old_rust = true;
165 } 142 }
@@ -172,20 +149,17 @@ fn install_server(opts: ServerOpt) -> Result<()> {
172 ) 149 )
173 } 150 }
174 151
175 let res = if opts.jemalloc { 152 let jemalloc = if opts.jemalloc { "--features jemalloc" } else { "" };
176 run("cargo install --path crates/ra_lsp_server --locked --force --features jemalloc", ".") 153 let res = run!("cargo install --path crates/ra_lsp_server --locked --force {}", jemalloc);
177 } else {
178 run("cargo install --path crates/ra_lsp_server --locked --force", ".")
179 };
180 154
181 if res.is_err() && old_rust { 155 if res.is_err() && old_rust {
182 eprintln!( 156 eprintln!(
183 "\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n", 157 "\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n",
184 REQUIRED_RUST_VERSION, 158 REQUIRED_RUST_VERSION,
185 ) 159 );
186 } 160 }
187 161
188 res 162 res.map(drop)
189} 163}
190 164
191fn check_version(version_output: &str, min_minor_version: u32) -> bool { 165fn check_version(version_output: &str, min_minor_version: u32) -> bool {
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs
index 1bb1882b0..2bcd76d60 100644
--- a/xtask/src/lib.rs
+++ b/xtask/src/lib.rs
@@ -1,6 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3mod cmd; 3pub mod not_bash;
4pub mod install; 4pub mod install;
5pub mod pre_commit; 5pub mod pre_commit;
6 6
@@ -9,15 +9,15 @@ mod ast_src;
9 9
10use anyhow::Context; 10use anyhow::Context;
11use std::{ 11use std::{
12 env, fs, 12 env,
13 io::Write, 13 io::Write,
14 path::{Path, PathBuf}, 14 path::{Path, PathBuf},
15 process::{Command, Stdio}, 15 process::{Command, Stdio},
16}; 16};
17 17
18use crate::{ 18use crate::{
19 cmd::{run, run_with_output},
20 codegen::Mode, 19 codegen::Mode,
20 not_bash::{fs2, pushd, rm_rf, run},
21}; 21};
22 22
23pub use anyhow::Result; 23pub use anyhow::Result;
@@ -38,9 +38,9 @@ pub fn run_rustfmt(mode: Mode) -> Result<()> {
38 ensure_rustfmt()?; 38 ensure_rustfmt()?;
39 39
40 if mode == Mode::Verify { 40 if mode == Mode::Verify {
41 run(&format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN), ".")?; 41 run!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN)?;
42 } else { 42 } else {
43 run(&format!("rustup run {} -- cargo fmt", TOOLCHAIN), ".")?; 43 run!("rustup run {} -- cargo fmt", TOOLCHAIN)?;
44 } 44 }
45 Ok(()) 45 Ok(())
46} 46}
@@ -70,8 +70,9 @@ fn ensure_rustfmt() -> Result<()> {
70 Ok(status) if status.success() => return Ok(()), 70 Ok(status) if status.success() => return Ok(()),
71 _ => (), 71 _ => (),
72 }; 72 };
73 run(&format!("rustup toolchain install {}", TOOLCHAIN), ".")?; 73 run!("rustup toolchain install {}", TOOLCHAIN)?;
74 run(&format!("rustup component add rustfmt --toolchain {}", TOOLCHAIN), ".") 74 run!("rustup component add rustfmt --toolchain {}", TOOLCHAIN)?;
75 Ok(())
75} 76}
76 77
77pub fn run_clippy() -> Result<()> { 78pub fn run_clippy() -> Result<()> {
@@ -92,34 +93,28 @@ pub fn run_clippy() -> Result<()> {
92 "clippy::nonminimal_bool", 93 "clippy::nonminimal_bool",
93 "clippy::redundant_pattern_matching", 94 "clippy::redundant_pattern_matching",
94 ]; 95 ];
95 run( 96 run!(
96 &format!( 97 "rustup run {} -- cargo clippy --all-features --all-targets -- -A {}",
97 "rustup run {} -- cargo clippy --all-features --all-targets -- -A {}", 98 TOOLCHAIN,
98 TOOLCHAIN, 99 allowed_lints.join(" -A ")
99 allowed_lints.join(" -A ")
100 ),
101 ".",
102 )?; 100 )?;
103 Ok(()) 101 Ok(())
104} 102}
105 103
106fn install_clippy() -> Result<()> { 104fn install_clippy() -> Result<()> {
107 run(&format!("rustup toolchain install {}", TOOLCHAIN), ".")?; 105 run!("rustup toolchain install {}", TOOLCHAIN)?;
108 run(&format!("rustup component add clippy --toolchain {}", TOOLCHAIN), ".") 106 run!("rustup component add clippy --toolchain {}", TOOLCHAIN)?;
107 Ok(())
109} 108}
110 109
111pub fn run_fuzzer() -> Result<()> { 110pub fn run_fuzzer() -> Result<()> {
112 match Command::new("cargo") 111 let _d = pushd("./crates/ra_syntax");
113 .args(&["fuzz", "--help"]) 112 if run!("cargo fuzz --help").is_err() {
114 .stderr(Stdio::null()) 113 run!("cargo install cargo-fuzz")?;
115 .stdout(Stdio::null())
116 .status()
117 {
118 Ok(status) if status.success() => (),
119 _ => run("cargo install cargo-fuzz", ".")?,
120 }; 114 };
121 115
122 run("rustup run nightly -- cargo fuzz run parser", "./crates/ra_syntax") 116 run!("rustup run nightly -- cargo fuzz run parser")?;
117 Ok(())
123} 118}
124 119
125/// Cleans the `./target` dir after the build such that only 120/// Cleans the `./target` dir after the build such that only
@@ -141,7 +136,7 @@ pub fn run_pre_cache() -> Result<()> {
141 } 136 }
142 } 137 }
143 138
144 fs::remove_file("./target/.rustc_info.json")?; 139 fs2::remove_file("./target/.rustc_info.json")?;
145 let to_delete = ["ra_", "heavy_test"]; 140 let to_delete = ["ra_", "heavy_test"];
146 for &dir in ["./target/debug/deps", "target/debug/.fingerprint"].iter() { 141 for &dir in ["./target/debug/deps", "target/debug/.fingerprint"].iter() {
147 for entry in Path::new(dir).read_dir()? { 142 for entry in Path::new(dir).read_dir()? {
@@ -155,22 +150,20 @@ pub fn run_pre_cache() -> Result<()> {
155 Ok(()) 150 Ok(())
156} 151}
157 152
158fn rm_rf(path: &Path) -> Result<()> { 153pub fn run_release(dry_run: bool) -> Result<()> {
159 if path.is_file() { fs::remove_file(path) } else { fs::remove_dir_all(path) } 154 if !dry_run {
160 .with_context(|| format!("failed to remove {:?}", path)) 155 run!("git switch release")?;
161} 156 run!("git fetch upstream")?;
162 157 run!("git reset --hard upstream/master")?;
163pub fn run_release() -> Result<()> { 158 run!("git push")?;
164 run("git switch release", ".")?; 159 }
165 run("git fetch upstream", ".")?;
166 run("git reset --hard upstream/master", ".")?;
167 run("git push", ".")?;
168 160
169 let changelog_dir = project_root().join("../rust-analyzer.github.io/thisweek/_posts"); 161 let website_root = project_root().join("../rust-analyzer.github.io");
162 let changelog_dir = website_root.join("./thisweek/_posts");
170 163
171 let today = run_with_output("date --iso", ".")?; 164 let today = run!("date --iso")?;
172 let commit = run_with_output("git rev-parse HEAD", ".")?; 165 let commit = run!("git rev-parse HEAD")?;
173 let changelog_n = fs::read_dir(changelog_dir.as_path())?.count(); 166 let changelog_n = fs2::read_dir(changelog_dir.as_path())?.count();
174 167
175 let contents = format!( 168 let contents = format!(
176 "\ 169 "\
@@ -193,7 +186,9 @@ Release: release:{}[]
193 ); 186 );
194 187
195 let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n)); 188 let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n));
196 fs::write(&path, &contents)?; 189 fs2::write(&path, &contents)?;
190
191 fs2::copy(project_root().join("./docs/user/readme.adoc"), website_root.join("manual.adoc"))?;
197 192
198 Ok(()) 193 Ok(())
199} 194}
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index 7ca727bde..a7dffe2cc 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -93,8 +93,9 @@ FLAGS:
93 run_pre_cache() 93 run_pre_cache()
94 } 94 }
95 "release" => { 95 "release" => {
96 let dry_run = args.contains("--dry-run");
96 args.finish()?; 97 args.finish()?;
97 run_release() 98 run_release(dry_run)
98 } 99 }
99 _ => { 100 _ => {
100 eprintln!( 101 eprintln!(
diff --git a/xtask/src/not_bash.rs b/xtask/src/not_bash.rs
new file mode 100644
index 000000000..3e30e7279
--- /dev/null
+++ b/xtask/src/not_bash.rs
@@ -0,0 +1,165 @@
1//! A bad shell -- small cross platform module for writing glue code
2use std::{
3 cell::RefCell,
4 env,
5 ffi::OsStr,
6 fs,
7 path::{Path, PathBuf},
8 process::{Command, Stdio},
9};
10
11use anyhow::{bail, Context, Result};
12
13pub mod fs2 {
14 use std::{fs, path::Path};
15
16 use anyhow::{Context, Result};
17
18 pub fn read_dir<P: AsRef<Path>>(path: P) -> Result<fs::ReadDir> {
19 let path = path.as_ref();
20 fs::read_dir(path).with_context(|| format!("Failed to read {}", path.display()))
21 }
22
23 pub fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> {
24 let path = path.as_ref();
25 fs::write(path, contents).with_context(|| format!("Failed to write {}", path.display()))
26 }
27
28 pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<u64> {
29 let from = from.as_ref();
30 let to = to.as_ref();
31 fs::copy(from, to)
32 .with_context(|| format!("Failed to copy {} to {}", from.display(), to.display()))
33 }
34
35 pub fn remove_file<P: AsRef<Path>>(path: P) -> Result<()> {
36 let path = path.as_ref();
37 fs::remove_file(path).with_context(|| format!("Failed to remove file {}", path.display()))
38 }
39
40 pub fn remove_dir_all<P: AsRef<Path>>(path: P) -> Result<()> {
41 let path = path.as_ref();
42 fs::remove_dir_all(path).with_context(|| format!("Failed to remove dir {}", path.display()))
43 }
44}
45
46macro_rules! _run {
47 ($($expr:expr),*) => {
48 run!($($expr),*; echo = true)
49 };
50 ($($expr:expr),* ; echo = $echo:expr) => {
51 $crate::not_bash::run_process(format!($($expr),*), $echo)
52 };
53}
54pub(crate) use _run as run;
55
56pub struct Pushd {
57 _p: (),
58}
59
60pub fn pushd(path: impl Into<PathBuf>) -> Pushd {
61 Env::with(|env| env.pushd(path.into()));
62 Pushd { _p: () }
63}
64
65impl Drop for Pushd {
66 fn drop(&mut self) {
67 Env::with(|env| env.popd())
68 }
69}
70
71pub fn rm(glob: &str) -> Result<()> {
72 let cwd = Env::with(|env| env.cwd());
73 ls(glob)?.into_iter().try_for_each(|it| fs::remove_file(cwd.join(it)))?;
74 Ok(())
75}
76
77pub fn rm_rf(path: impl AsRef<Path>) -> Result<()> {
78 let path = path.as_ref();
79 if path.is_file() {
80 fs2::remove_file(path)
81 } else {
82 fs2::remove_dir_all(path)
83 }
84}
85
86pub fn ls(glob: &str) -> Result<Vec<PathBuf>> {
87 let cwd = Env::with(|env| env.cwd());
88 let mut res = Vec::new();
89 for entry in fs::read_dir(&cwd)? {
90 let entry = entry?;
91 if matches(&entry.file_name(), glob) {
92 let path = entry.path();
93 let path = path.strip_prefix(&cwd).unwrap();
94 res.push(path.to_path_buf())
95 }
96 }
97 return Ok(res);
98
99 fn matches(file_name: &OsStr, glob: &str) -> bool {
100 assert!(glob.starts_with('*'));
101 file_name.to_string_lossy().ends_with(&glob[1..])
102 }
103}
104
105#[doc(hidden)]
106pub fn run_process(cmd: String, echo: bool) -> Result<String> {
107 run_process_inner(&cmd, echo).with_context(|| format!("process `{}` failed", cmd))
108}
109
110fn run_process_inner(cmd: &str, echo: bool) -> Result<String> {
111 let cwd = Env::with(|env| env.cwd());
112 let mut args = shelx(cmd);
113 let binary = args.remove(0);
114
115 if echo {
116 println!("> {}", cmd)
117 }
118
119 let output = Command::new(binary)
120 .args(args)
121 .current_dir(cwd)
122 .stdin(Stdio::null())
123 .stderr(Stdio::inherit())
124 .output()?;
125 let stdout = String::from_utf8(output.stdout)?;
126
127 if echo {
128 print!("{}", stdout)
129 }
130
131 if !output.status.success() {
132 bail!("{}", output.status)
133 }
134
135 Ok(stdout.trim().to_string())
136}
137
138// FIXME: some real shell lexing here
139fn shelx(cmd: &str) -> Vec<String> {
140 cmd.split_whitespace().map(|it| it.to_string()).collect()
141}
142
143#[derive(Default)]
144struct Env {
145 pushd_stack: Vec<PathBuf>,
146}
147
148impl Env {
149 fn with<F: FnOnce(&mut Env) -> T, T>(f: F) -> T {
150 thread_local! {
151 static ENV: RefCell<Env> = Default::default();
152 }
153 ENV.with(|it| f(&mut *it.borrow_mut()))
154 }
155
156 fn pushd(&mut self, dir: PathBuf) {
157 self.pushd_stack.push(dir)
158 }
159 fn popd(&mut self) {
160 self.pushd_stack.pop().unwrap();
161 }
162 fn cwd(&self) -> PathBuf {
163 self.pushd_stack.last().cloned().unwrap_or_else(|| env::current_dir().unwrap())
164 }
165}
diff --git a/xtask/src/pre_commit.rs b/xtask/src/pre_commit.rs
index 1533f64dc..056f34acf 100644
--- a/xtask/src/pre_commit.rs
+++ b/xtask/src/pre_commit.rs
@@ -4,18 +4,18 @@ use std::{fs, path::PathBuf};
4 4
5use anyhow::{bail, Result}; 5use anyhow::{bail, Result};
6 6
7use crate::{cmd::run_with_output, project_root, run, run_rustfmt, Mode}; 7use crate::{not_bash::run, project_root, run_rustfmt, Mode};
8 8
9// FIXME: if there are changed `.ts` files, also reformat TypeScript (by 9// FIXME: if there are changed `.ts` files, also reformat TypeScript (by
10// shelling out to `npm fmt`). 10// shelling out to `npm fmt`).
11pub fn run_hook() -> Result<()> { 11pub fn run_hook() -> Result<()> {
12 run_rustfmt(Mode::Overwrite)?; 12 run_rustfmt(Mode::Overwrite)?;
13 13
14 let diff = run_with_output("git diff --diff-filter=MAR --name-only --cached", ".")?; 14 let diff = run!("git diff --diff-filter=MAR --name-only --cached")?;
15 15
16 let root = project_root(); 16 let root = project_root();
17 for line in diff.lines() { 17 for line in diff.lines() {
18 run(&format!("git update-index --add {}", root.join(line).to_string_lossy()), ".")?; 18 run!("git update-index --add {}", root.join(line).display())?;
19 } 19 }
20 20
21 Ok(()) 21 Ok(())