diff options
68 files changed, 1928 insertions, 592 deletions
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 2c1192f07..3f52f31f8 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml | |||
@@ -39,7 +39,6 @@ jobs: | |||
39 | with: | 39 | with: |
40 | toolchain: stable | 40 | toolchain: stable |
41 | profile: minimal | 41 | profile: minimal |
42 | target: x86_64-unknown-linux-musl | ||
43 | override: true | 42 | override: true |
44 | 43 | ||
45 | - name: Install Nodejs | 44 | - name: Install Nodejs |
diff --git a/.vscode/launch.json b/.vscode/launch.json index 3f74d7566..6a2fff906 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json | |||
@@ -41,7 +41,7 @@ | |||
41 | "outFiles": [ | 41 | "outFiles": [ |
42 | "${workspaceFolder}/editors/code/out/**/*.js" | 42 | "${workspaceFolder}/editors/code/out/**/*.js" |
43 | ], | 43 | ], |
44 | "preLaunchTask": "Build Extension", | 44 | "preLaunchTask": "Build Server and Extension", |
45 | "skipFiles": [ | 45 | "skipFiles": [ |
46 | "<node_internals>/**/*.js" | 46 | "<node_internals>/**/*.js" |
47 | ], | 47 | ], |
@@ -62,7 +62,7 @@ | |||
62 | "outFiles": [ | 62 | "outFiles": [ |
63 | "${workspaceFolder}/editors/code/out/**/*.js" | 63 | "${workspaceFolder}/editors/code/out/**/*.js" |
64 | ], | 64 | ], |
65 | "preLaunchTask": "Build Extension", | 65 | "preLaunchTask": "Build Server (Release) and Extension", |
66 | "skipFiles": [ | 66 | "skipFiles": [ |
67 | "<node_internals>/**/*.js" | 67 | "<node_internals>/**/*.js" |
68 | ], | 68 | ], |
diff --git a/.vscode/tasks.json b/.vscode/tasks.json index 4037e7cce..0969ce89a 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json | |||
@@ -4,7 +4,7 @@ | |||
4 | "version": "2.0.0", | 4 | "version": "2.0.0", |
5 | "tasks": [ | 5 | "tasks": [ |
6 | { | 6 | { |
7 | "label": "Build Extension", | 7 | "label": "Build Extension in Background", |
8 | "group": "build", | 8 | "group": "build", |
9 | "type": "npm", | 9 | "type": "npm", |
10 | "script": "watch", | 10 | "script": "watch", |
@@ -16,11 +16,40 @@ | |||
16 | "isBackground": true, | 16 | "isBackground": true, |
17 | }, | 17 | }, |
18 | { | 18 | { |
19 | "label": "Build Extension", | ||
20 | "group": "build", | ||
21 | "type": "npm", | ||
22 | "script": "build", | ||
23 | "path": "editors/code/", | ||
24 | "problemMatcher": { | ||
25 | "base": "$tsc", | ||
26 | "fileLocation": ["relative", "${workspaceFolder}/editors/code/"] | ||
27 | }, | ||
28 | }, | ||
29 | { | ||
19 | "label": "Build Server", | 30 | "label": "Build Server", |
20 | "group": "build", | 31 | "group": "build", |
21 | "type": "shell", | 32 | "type": "shell", |
22 | "command": "cargo build --package rust-analyzer", | 33 | "command": "cargo build --package rust-analyzer", |
23 | "problemMatcher": "$rustc" | 34 | "problemMatcher": "$rustc" |
24 | }, | 35 | }, |
36 | { | ||
37 | "label": "Build Server (Release)", | ||
38 | "group": "build", | ||
39 | "type": "shell", | ||
40 | "command": "cargo build --release --package rust-analyzer", | ||
41 | "problemMatcher": "$rustc" | ||
42 | }, | ||
43 | |||
44 | { | ||
45 | "label": "Build Server and Extension", | ||
46 | "dependsOn": ["Build Server", "Build Extension"], | ||
47 | "problemMatcher": "$rustc" | ||
48 | }, | ||
49 | { | ||
50 | "label": "Build Server (Release) and Extension", | ||
51 | "dependsOn": ["Build Server (Release)", "Build Extension"], | ||
52 | "problemMatcher": "$rustc" | ||
53 | } | ||
25 | ] | 54 | ] |
26 | } | 55 | } |
diff --git a/Cargo.lock b/Cargo.lock index 367ff3f82..522ecf2ee 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -68,9 +68,9 @@ dependencies = [ | |||
68 | 68 | ||
69 | [[package]] | 69 | [[package]] |
70 | name = "base64" | 70 | name = "base64" |
71 | version = "0.11.0" | 71 | version = "0.12.0" |
72 | source = "registry+https://github.com/rust-lang/crates.io-index" | 72 | source = "registry+https://github.com/rust-lang/crates.io-index" |
73 | checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" | 73 | checksum = "7d5ca2cd0adc3f48f9e9ea5a6bbdf9ccc0bfade884847e484d452414c7ccffb3" |
74 | 74 | ||
75 | [[package]] | 75 | [[package]] |
76 | name = "bitflags" | 76 | name = "bitflags" |
@@ -645,9 +645,9 @@ dependencies = [ | |||
645 | 645 | ||
646 | [[package]] | 646 | [[package]] |
647 | name = "lsp-types" | 647 | name = "lsp-types" |
648 | version = "0.73.0" | 648 | version = "0.74.0" |
649 | source = "registry+https://github.com/rust-lang/crates.io-index" | 649 | source = "registry+https://github.com/rust-lang/crates.io-index" |
650 | checksum = "93d0cf64ea141b43d9e055f6b9df13f0bce32b103d84237509ce0a571ab9b159" | 650 | checksum = "820f746e5716ab9a2d664794636188bd003023b72e55404ee27105dc22869922" |
651 | dependencies = [ | 651 | dependencies = [ |
652 | "base64", | 652 | "base64", |
653 | "bitflags", | 653 | "bitflags", |
@@ -1193,9 +1193,9 @@ dependencies = [ | |||
1193 | 1193 | ||
1194 | [[package]] | 1194 | [[package]] |
1195 | name = "ra_vfs" | 1195 | name = "ra_vfs" |
1196 | version = "0.5.3" | 1196 | version = "0.6.0" |
1197 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1197 | source = "registry+https://github.com/rust-lang/crates.io-index" |
1198 | checksum = "58a265769d5e5655345a9fcbd870a1a7c3658558c0d8efaed79e0669358f46b8" | 1198 | checksum = "fcaa5615f420134aea7667253db101d03a5c5f300eac607872dc2a36407b2ac9" |
1199 | dependencies = [ | 1199 | dependencies = [ |
1200 | "crossbeam-channel", | 1200 | "crossbeam-channel", |
1201 | "jod-thread", | 1201 | "jod-thread", |
diff --git a/crates/ra_assists/src/handlers/add_from_impl_for_enum.rs b/crates/ra_assists/src/handlers/add_from_impl_for_enum.rs index 03806724a..49deb6701 100644 --- a/crates/ra_assists/src/handlers/add_from_impl_for_enum.rs +++ b/crates/ra_assists/src/handlers/add_from_impl_for_enum.rs | |||
@@ -1,11 +1,12 @@ | |||
1 | use ra_ide_db::RootDatabase; | ||
1 | use ra_syntax::{ | 2 | use ra_syntax::{ |
2 | ast::{self, AstNode, NameOwner}, | 3 | ast::{self, AstNode, NameOwner}, |
3 | TextSize, | 4 | TextSize, |
4 | }; | 5 | }; |
5 | use stdx::format_to; | 6 | use stdx::format_to; |
6 | 7 | ||
7 | use crate::{Assist, AssistCtx, AssistId}; | 8 | use crate::{utils::FamousDefs, Assist, AssistCtx, AssistId}; |
8 | use ra_ide_db::RootDatabase; | 9 | use test_utils::tested_by; |
9 | 10 | ||
10 | // Assist add_from_impl_for_enum | 11 | // Assist add_from_impl_for_enum |
11 | // | 12 | // |
@@ -41,7 +42,8 @@ pub(crate) fn add_from_impl_for_enum(ctx: AssistCtx) -> Option<Assist> { | |||
41 | _ => return None, | 42 | _ => return None, |
42 | }; | 43 | }; |
43 | 44 | ||
44 | if already_has_from_impl(ctx.sema, &variant) { | 45 | if existing_from_impl(ctx.sema, &variant).is_some() { |
46 | tested_by!(test_add_from_impl_already_exists); | ||
45 | return None; | 47 | return None; |
46 | } | 48 | } |
47 | 49 | ||
@@ -70,41 +72,33 @@ impl From<{0}> for {1} {{ | |||
70 | ) | 72 | ) |
71 | } | 73 | } |
72 | 74 | ||
73 | fn already_has_from_impl( | 75 | fn existing_from_impl( |
74 | sema: &'_ hir::Semantics<'_, RootDatabase>, | 76 | sema: &'_ hir::Semantics<'_, RootDatabase>, |
75 | variant: &ast::EnumVariant, | 77 | variant: &ast::EnumVariant, |
76 | ) -> bool { | 78 | ) -> Option<()> { |
77 | let scope = sema.scope(&variant.syntax()); | 79 | let variant = sema.to_def(variant)?; |
80 | let enum_ = variant.parent_enum(sema.db); | ||
81 | let krate = enum_.module(sema.db).krate(); | ||
78 | 82 | ||
79 | let from_path = ast::make::path_from_text("From"); | 83 | let from_trait = FamousDefs(sema, krate).core_convert_From()?; |
80 | let from_hir_path = match hir::Path::from_ast(from_path) { | ||
81 | Some(p) => p, | ||
82 | None => return false, | ||
83 | }; | ||
84 | let from_trait = match scope.resolve_hir_path(&from_hir_path) { | ||
85 | Some(hir::PathResolution::Def(hir::ModuleDef::Trait(t))) => t, | ||
86 | _ => return false, | ||
87 | }; | ||
88 | 84 | ||
89 | let e: hir::Enum = match sema.to_def(&variant.parent_enum()) { | 85 | let enum_type = enum_.ty(sema.db); |
90 | Some(e) => e, | ||
91 | None => return false, | ||
92 | }; | ||
93 | let e_ty = e.ty(sema.db); | ||
94 | 86 | ||
95 | let hir_enum_var: hir::EnumVariant = match sema.to_def(variant) { | 87 | let wrapped_type = variant.fields(sema.db).get(0)?.signature_ty(sema.db); |
96 | Some(ev) => ev, | ||
97 | None => return false, | ||
98 | }; | ||
99 | let var_ty = hir_enum_var.fields(sema.db)[0].signature_ty(sema.db); | ||
100 | 88 | ||
101 | e_ty.impls_trait(sema.db, from_trait, &[var_ty]) | 89 | if enum_type.impls_trait(sema.db, from_trait, &[wrapped_type]) { |
90 | Some(()) | ||
91 | } else { | ||
92 | None | ||
93 | } | ||
102 | } | 94 | } |
103 | 95 | ||
104 | #[cfg(test)] | 96 | #[cfg(test)] |
105 | mod tests { | 97 | mod tests { |
106 | use super::*; | 98 | use super::*; |
99 | |||
107 | use crate::helpers::{check_assist, check_assist_not_applicable}; | 100 | use crate::helpers::{check_assist, check_assist_not_applicable}; |
101 | use test_utils::covers; | ||
108 | 102 | ||
109 | #[test] | 103 | #[test] |
110 | fn test_add_from_impl_for_enum() { | 104 | fn test_add_from_impl_for_enum() { |
@@ -136,36 +130,40 @@ mod tests { | |||
136 | ); | 130 | ); |
137 | } | 131 | } |
138 | 132 | ||
133 | fn check_not_applicable(ra_fixture: &str) { | ||
134 | let fixture = | ||
135 | format!("//- main.rs crate:main deps:core\n{}\n{}", ra_fixture, FamousDefs::FIXTURE); | ||
136 | check_assist_not_applicable(add_from_impl_for_enum, &fixture) | ||
137 | } | ||
138 | |||
139 | #[test] | 139 | #[test] |
140 | fn test_add_from_impl_no_element() { | 140 | fn test_add_from_impl_no_element() { |
141 | check_assist_not_applicable(add_from_impl_for_enum, "enum A { <|>One }"); | 141 | check_not_applicable("enum A { <|>One }"); |
142 | } | 142 | } |
143 | 143 | ||
144 | #[test] | 144 | #[test] |
145 | fn test_add_from_impl_more_than_one_element_in_tuple() { | 145 | fn test_add_from_impl_more_than_one_element_in_tuple() { |
146 | check_assist_not_applicable(add_from_impl_for_enum, "enum A { <|>One(u32, String) }"); | 146 | check_not_applicable("enum A { <|>One(u32, String) }"); |
147 | } | 147 | } |
148 | 148 | ||
149 | #[test] | 149 | #[test] |
150 | fn test_add_from_impl_struct_variant() { | 150 | fn test_add_from_impl_struct_variant() { |
151 | check_assist_not_applicable(add_from_impl_for_enum, "enum A { <|>One { x: u32 } }"); | 151 | check_not_applicable("enum A { <|>One { x: u32 } }"); |
152 | } | 152 | } |
153 | 153 | ||
154 | #[test] | 154 | #[test] |
155 | fn test_add_from_impl_already_exists() { | 155 | fn test_add_from_impl_already_exists() { |
156 | check_assist_not_applicable( | 156 | covers!(test_add_from_impl_already_exists); |
157 | add_from_impl_for_enum, | 157 | check_not_applicable( |
158 | r#"enum A { <|>One(u32), } | 158 | r#" |
159 | enum A { <|>One(u32), } | ||
159 | 160 | ||
160 | impl From<u32> for A { | 161 | impl From<u32> for A { |
161 | fn from(v: u32) -> Self { | 162 | fn from(v: u32) -> Self { |
162 | A::One(v) | 163 | A::One(v) |
163 | } | 164 | } |
164 | } | 165 | } |
165 | 166 | "#, | |
166 | pub trait From<T> { | ||
167 | fn from(T) -> Self; | ||
168 | }"#, | ||
169 | ); | 167 | ); |
170 | } | 168 | } |
171 | 169 | ||
diff --git a/crates/ra_assists/src/handlers/replace_if_let_with_match.rs b/crates/ra_assists/src/handlers/replace_if_let_with_match.rs index 0a0a88f3d..9841f6980 100644 --- a/crates/ra_assists/src/handlers/replace_if_let_with_match.rs +++ b/crates/ra_assists/src/handlers/replace_if_let_with_match.rs | |||
@@ -1,11 +1,10 @@ | |||
1 | use ra_fmt::unwrap_trivial_block; | 1 | use ra_fmt::unwrap_trivial_block; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | ast::{self, make}, | 3 | ast::{self, edit::IndentLevel, make}, |
4 | AstNode, | 4 | AstNode, |
5 | }; | 5 | }; |
6 | 6 | ||
7 | use crate::{Assist, AssistCtx, AssistId}; | 7 | use crate::{utils::TryEnum, Assist, AssistCtx, AssistId}; |
8 | use ast::edit::IndentLevel; | ||
9 | 8 | ||
10 | // Assist: replace_if_let_with_match | 9 | // Assist: replace_if_let_with_match |
11 | // | 10 | // |
@@ -44,15 +43,21 @@ pub(crate) fn replace_if_let_with_match(ctx: AssistCtx) -> Option<Assist> { | |||
44 | ast::ElseBranch::IfExpr(_) => return None, | 43 | ast::ElseBranch::IfExpr(_) => return None, |
45 | }; | 44 | }; |
46 | 45 | ||
47 | ctx.add_assist(AssistId("replace_if_let_with_match"), "Replace with match", |edit| { | 46 | let sema = ctx.sema; |
47 | ctx.add_assist(AssistId("replace_if_let_with_match"), "Replace with match", move |edit| { | ||
48 | let match_expr = { | 48 | let match_expr = { |
49 | let then_arm = { | 49 | let then_arm = { |
50 | let then_expr = unwrap_trivial_block(then_block); | 50 | let then_expr = unwrap_trivial_block(then_block); |
51 | make::match_arm(vec![pat], then_expr) | 51 | make::match_arm(vec![pat.clone()], then_expr) |
52 | }; | 52 | }; |
53 | let else_arm = { | 53 | let else_arm = { |
54 | let pattern = sema | ||
55 | .type_of_pat(&pat) | ||
56 | .and_then(|ty| TryEnum::from_ty(sema, &ty)) | ||
57 | .map(|it| it.sad_pattern()) | ||
58 | .unwrap_or_else(|| make::placeholder_pat().into()); | ||
54 | let else_expr = unwrap_trivial_block(else_block); | 59 | let else_expr = unwrap_trivial_block(else_block); |
55 | make::match_arm(vec![make::placeholder_pat().into()], else_expr) | 60 | make::match_arm(vec![pattern], else_expr) |
56 | }; | 61 | }; |
57 | make::expr_match(expr, make::match_arm_list(vec![then_arm, else_arm])) | 62 | make::expr_match(expr, make::match_arm_list(vec![then_arm, else_arm])) |
58 | }; | 63 | }; |
@@ -68,6 +73,7 @@ pub(crate) fn replace_if_let_with_match(ctx: AssistCtx) -> Option<Assist> { | |||
68 | #[cfg(test)] | 73 | #[cfg(test)] |
69 | mod tests { | 74 | mod tests { |
70 | use super::*; | 75 | use super::*; |
76 | |||
71 | use crate::helpers::{check_assist, check_assist_target}; | 77 | use crate::helpers::{check_assist, check_assist_target}; |
72 | 78 | ||
73 | #[test] | 79 | #[test] |
@@ -145,4 +151,64 @@ impl VariantData { | |||
145 | }", | 151 | }", |
146 | ); | 152 | ); |
147 | } | 153 | } |
154 | |||
155 | #[test] | ||
156 | fn special_case_option() { | ||
157 | check_assist( | ||
158 | replace_if_let_with_match, | ||
159 | r#" | ||
160 | enum Option<T> { Some(T), None } | ||
161 | use Option::*; | ||
162 | |||
163 | fn foo(x: Option<i32>) { | ||
164 | <|>if let Some(x) = x { | ||
165 | println!("{}", x) | ||
166 | } else { | ||
167 | println!("none") | ||
168 | } | ||
169 | } | ||
170 | "#, | ||
171 | r#" | ||
172 | enum Option<T> { Some(T), None } | ||
173 | use Option::*; | ||
174 | |||
175 | fn foo(x: Option<i32>) { | ||
176 | <|>match x { | ||
177 | Some(x) => println!("{}", x), | ||
178 | None => println!("none"), | ||
179 | } | ||
180 | } | ||
181 | "#, | ||
182 | ); | ||
183 | } | ||
184 | |||
185 | #[test] | ||
186 | fn special_case_result() { | ||
187 | check_assist( | ||
188 | replace_if_let_with_match, | ||
189 | r#" | ||
190 | enum Result<T, E> { Ok(T), Err(E) } | ||
191 | use Result::*; | ||
192 | |||
193 | fn foo(x: Result<i32, ()>) { | ||
194 | <|>if let Ok(x) = x { | ||
195 | println!("{}", x) | ||
196 | } else { | ||
197 | println!("none") | ||
198 | } | ||
199 | } | ||
200 | "#, | ||
201 | r#" | ||
202 | enum Result<T, E> { Ok(T), Err(E) } | ||
203 | use Result::*; | ||
204 | |||
205 | fn foo(x: Result<i32, ()>) { | ||
206 | <|>match x { | ||
207 | Ok(x) => println!("{}", x), | ||
208 | Err(_) => println!("none"), | ||
209 | } | ||
210 | } | ||
211 | "#, | ||
212 | ); | ||
213 | } | ||
148 | } | 214 | } |
diff --git a/crates/ra_assists/src/handlers/replace_let_with_if_let.rs b/crates/ra_assists/src/handlers/replace_let_with_if_let.rs index bdbaae389..0cf23b754 100644 --- a/crates/ra_assists/src/handlers/replace_let_with_if_let.rs +++ b/crates/ra_assists/src/handlers/replace_let_with_if_let.rs | |||
@@ -1,6 +1,5 @@ | |||
1 | use std::iter::once; | 1 | use std::iter::once; |
2 | 2 | ||
3 | use hir::Adt; | ||
4 | use ra_syntax::{ | 3 | use ra_syntax::{ |
5 | ast::{ | 4 | ast::{ |
6 | self, | 5 | self, |
@@ -12,6 +11,7 @@ use ra_syntax::{ | |||
12 | 11 | ||
13 | use crate::{ | 12 | use crate::{ |
14 | assist_ctx::{Assist, AssistCtx}, | 13 | assist_ctx::{Assist, AssistCtx}, |
14 | utils::TryEnum, | ||
15 | AssistId, | 15 | AssistId, |
16 | }; | 16 | }; |
17 | 17 | ||
@@ -45,20 +45,10 @@ pub(crate) fn replace_let_with_if_let(ctx: AssistCtx) -> Option<Assist> { | |||
45 | let init = let_stmt.initializer()?; | 45 | let init = let_stmt.initializer()?; |
46 | let original_pat = let_stmt.pat()?; | 46 | let original_pat = let_stmt.pat()?; |
47 | let ty = ctx.sema.type_of_expr(&init)?; | 47 | let ty = ctx.sema.type_of_expr(&init)?; |
48 | let enum_ = match ty.as_adt() { | 48 | let happy_variant = TryEnum::from_ty(ctx.sema, &ty).map(|it| it.happy_case()); |
49 | Some(Adt::Enum(it)) => it, | ||
50 | _ => return None, | ||
51 | }; | ||
52 | let happy_case = | ||
53 | [("Result", "Ok"), ("Option", "Some")].iter().find_map(|(known_type, happy_case)| { | ||
54 | if &enum_.name(ctx.db).to_string() == known_type { | ||
55 | return Some(happy_case); | ||
56 | } | ||
57 | None | ||
58 | }); | ||
59 | 49 | ||
60 | ctx.add_assist(AssistId("replace_let_with_if_let"), "Replace with if-let", |edit| { | 50 | ctx.add_assist(AssistId("replace_let_with_if_let"), "Replace with if-let", |edit| { |
61 | let with_placeholder: ast::Pat = match happy_case { | 51 | let with_placeholder: ast::Pat = match happy_variant { |
62 | None => make::placeholder_pat().into(), | 52 | None => make::placeholder_pat().into(), |
63 | Some(var_name) => make::tuple_struct_pat( | 53 | Some(var_name) => make::tuple_struct_pat( |
64 | make::path_unqualified(make::path_segment(make::name_ref(var_name))), | 54 | make::path_unqualified(make::path_segment(make::name_ref(var_name))), |
diff --git a/crates/ra_assists/src/handlers/replace_unwrap_with_match.rs b/crates/ra_assists/src/handlers/replace_unwrap_with_match.rs index 62cb7a763..62d4ea522 100644 --- a/crates/ra_assists/src/handlers/replace_unwrap_with_match.rs +++ b/crates/ra_assists/src/handlers/replace_unwrap_with_match.rs | |||
@@ -1,12 +1,11 @@ | |||
1 | use std::iter; | 1 | use std::iter; |
2 | 2 | ||
3 | use ra_syntax::{ | 3 | use ra_syntax::{ |
4 | ast::{self, make}, | 4 | ast::{self, edit::IndentLevel, make}, |
5 | AstNode, | 5 | AstNode, |
6 | }; | 6 | }; |
7 | 7 | ||
8 | use crate::{Assist, AssistCtx, AssistId}; | 8 | use crate::{utils::TryEnum, Assist, AssistCtx, AssistId}; |
9 | use ast::edit::IndentLevel; | ||
10 | 9 | ||
11 | // Assist: replace_unwrap_with_match | 10 | // Assist: replace_unwrap_with_match |
12 | // | 11 | // |
@@ -38,42 +37,27 @@ pub(crate) fn replace_unwrap_with_match(ctx: AssistCtx) -> Option<Assist> { | |||
38 | } | 37 | } |
39 | let caller = method_call.expr()?; | 38 | let caller = method_call.expr()?; |
40 | let ty = ctx.sema.type_of_expr(&caller)?; | 39 | let ty = ctx.sema.type_of_expr(&caller)?; |
40 | let happy_variant = TryEnum::from_ty(ctx.sema, &ty)?.happy_case(); | ||
41 | 41 | ||
42 | let type_name = ty.as_adt()?.name(ctx.sema.db).to_string(); | 42 | ctx.add_assist(AssistId("replace_unwrap_with_match"), "Replace unwrap with match", |edit| { |
43 | let ok_path = make::path_unqualified(make::path_segment(make::name_ref(happy_variant))); | ||
44 | let it = make::bind_pat(make::name("a")).into(); | ||
45 | let ok_tuple = make::tuple_struct_pat(ok_path, iter::once(it)).into(); | ||
43 | 46 | ||
44 | for (unwrap_type, variant_name) in [("Result", "Ok"), ("Option", "Some")].iter() { | 47 | let bind_path = make::path_unqualified(make::path_segment(make::name_ref("a"))); |
45 | if &type_name == unwrap_type { | 48 | let ok_arm = make::match_arm(iter::once(ok_tuple), make::expr_path(bind_path)); |
46 | return ctx.add_assist( | ||
47 | AssistId("replace_unwrap_with_match"), | ||
48 | "Replace unwrap with match", | ||
49 | |edit| { | ||
50 | let ok_path = | ||
51 | make::path_unqualified(make::path_segment(make::name_ref(variant_name))); | ||
52 | let it = make::bind_pat(make::name("a")).into(); | ||
53 | let ok_tuple = make::tuple_struct_pat(ok_path, iter::once(it)).into(); | ||
54 | 49 | ||
55 | let bind_path = make::path_unqualified(make::path_segment(make::name_ref("a"))); | 50 | let unreachable_call = make::unreachable_macro_call().into(); |
56 | let ok_arm = make::match_arm(iter::once(ok_tuple), make::expr_path(bind_path)); | 51 | let err_arm = make::match_arm(iter::once(make::placeholder_pat().into()), unreachable_call); |
57 | 52 | ||
58 | let unreachable_call = make::unreachable_macro_call().into(); | 53 | let match_arm_list = make::match_arm_list(vec![ok_arm, err_arm]); |
59 | let err_arm = make::match_arm( | 54 | let match_expr = make::expr_match(caller.clone(), match_arm_list); |
60 | iter::once(make::placeholder_pat().into()), | 55 | let match_expr = IndentLevel::from_node(method_call.syntax()).increase_indent(match_expr); |
61 | unreachable_call, | ||
62 | ); | ||
63 | 56 | ||
64 | let match_arm_list = make::match_arm_list(vec![ok_arm, err_arm]); | 57 | edit.target(method_call.syntax().text_range()); |
65 | let match_expr = make::expr_match(caller.clone(), match_arm_list); | 58 | edit.set_cursor(caller.syntax().text_range().start()); |
66 | let match_expr = | 59 | edit.replace_ast::<ast::Expr>(method_call.into(), match_expr); |
67 | IndentLevel::from_node(method_call.syntax()).increase_indent(match_expr); | 60 | }) |
68 | |||
69 | edit.target(method_call.syntax().text_range()); | ||
70 | edit.set_cursor(caller.syntax().text_range().start()); | ||
71 | edit.replace_ast::<ast::Expr>(method_call.into(), match_expr); | ||
72 | }, | ||
73 | ); | ||
74 | } | ||
75 | } | ||
76 | None | ||
77 | } | 61 | } |
78 | 62 | ||
79 | #[cfg(test)] | 63 | #[cfg(test)] |
diff --git a/crates/ra_assists/src/marks.rs b/crates/ra_assists/src/marks.rs index 6c2a2b8b6..8d910205f 100644 --- a/crates/ra_assists/src/marks.rs +++ b/crates/ra_assists/src/marks.rs | |||
@@ -8,4 +8,5 @@ test_utils::marks![ | |||
8 | test_not_inline_mut_variable | 8 | test_not_inline_mut_variable |
9 | test_not_applicable_if_variable_unused | 9 | test_not_applicable_if_variable_unused |
10 | change_visibility_field_false_positive | 10 | change_visibility_field_false_positive |
11 | test_add_from_impl_already_exists | ||
11 | ]; | 12 | ]; |
diff --git a/crates/ra_assists/src/utils.rs b/crates/ra_assists/src/utils.rs index 3d6c59bda..efd988697 100644 --- a/crates/ra_assists/src/utils.rs +++ b/crates/ra_assists/src/utils.rs | |||
@@ -1,7 +1,9 @@ | |||
1 | //! Assorted functions shared by several assists. | 1 | //! Assorted functions shared by several assists. |
2 | pub(crate) mod insert_use; | 2 | pub(crate) mod insert_use; |
3 | 3 | ||
4 | use hir::Semantics; | 4 | use std::iter; |
5 | |||
6 | use hir::{Adt, Crate, Semantics, Trait, Type}; | ||
5 | use ra_ide_db::RootDatabase; | 7 | use ra_ide_db::RootDatabase; |
6 | use ra_syntax::{ | 8 | use ra_syntax::{ |
7 | ast::{self, make, NameOwner}, | 9 | ast::{self, make, NameOwner}, |
@@ -99,3 +101,109 @@ fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> { | |||
99 | _ => None, | 101 | _ => None, |
100 | } | 102 | } |
101 | } | 103 | } |
104 | |||
105 | #[derive(Clone, Copy)] | ||
106 | pub(crate) enum TryEnum { | ||
107 | Result, | ||
108 | Option, | ||
109 | } | ||
110 | |||
111 | impl TryEnum { | ||
112 | const ALL: [TryEnum; 2] = [TryEnum::Option, TryEnum::Result]; | ||
113 | |||
114 | pub(crate) fn from_ty(sema: &Semantics<RootDatabase>, ty: &Type) -> Option<TryEnum> { | ||
115 | let enum_ = match ty.as_adt() { | ||
116 | Some(Adt::Enum(it)) => it, | ||
117 | _ => return None, | ||
118 | }; | ||
119 | TryEnum::ALL.iter().find_map(|&var| { | ||
120 | if &enum_.name(sema.db).to_string() == var.type_name() { | ||
121 | return Some(var); | ||
122 | } | ||
123 | None | ||
124 | }) | ||
125 | } | ||
126 | |||
127 | pub(crate) fn happy_case(self) -> &'static str { | ||
128 | match self { | ||
129 | TryEnum::Result => "Ok", | ||
130 | TryEnum::Option => "Some", | ||
131 | } | ||
132 | } | ||
133 | |||
134 | pub(crate) fn sad_pattern(self) -> ast::Pat { | ||
135 | match self { | ||
136 | TryEnum::Result => make::tuple_struct_pat( | ||
137 | make::path_unqualified(make::path_segment(make::name_ref("Err"))), | ||
138 | iter::once(make::placeholder_pat().into()), | ||
139 | ) | ||
140 | .into(), | ||
141 | TryEnum::Option => make::bind_pat(make::name("None")).into(), | ||
142 | } | ||
143 | } | ||
144 | |||
145 | fn type_name(self) -> &'static str { | ||
146 | match self { | ||
147 | TryEnum::Result => "Result", | ||
148 | TryEnum::Option => "Option", | ||
149 | } | ||
150 | } | ||
151 | } | ||
152 | |||
153 | /// Helps with finding well-know things inside the standard library. This is | ||
154 | /// somewhat similar to the known paths infra inside hir, but it different; We | ||
155 | /// want to make sure that IDE specific paths don't become interesting inside | ||
156 | /// the compiler itself as well. | ||
157 | pub(crate) struct FamousDefs<'a, 'b>(pub(crate) &'a Semantics<'b, RootDatabase>, pub(crate) Crate); | ||
158 | |||
159 | #[allow(non_snake_case)] | ||
160 | impl FamousDefs<'_, '_> { | ||
161 | #[cfg(test)] | ||
162 | pub(crate) const FIXTURE: &'static str = r#" | ||
163 | //- /libcore.rs crate:core | ||
164 | pub mod convert{ | ||
165 | pub trait From<T> { | ||
166 | fn from(T) -> Self; | ||
167 | } | ||
168 | } | ||
169 | |||
170 | pub mod prelude { pub use crate::convert::From } | ||
171 | #[prelude_import] | ||
172 | pub use prelude::*; | ||
173 | "#; | ||
174 | |||
175 | pub(crate) fn core_convert_From(&self) -> Option<Trait> { | ||
176 | self.find_trait("core:convert:From") | ||
177 | } | ||
178 | |||
179 | fn find_trait(&self, path: &str) -> Option<Trait> { | ||
180 | let db = self.0.db; | ||
181 | let mut path = path.split(':'); | ||
182 | let trait_ = path.next_back()?; | ||
183 | let std_crate = path.next()?; | ||
184 | let std_crate = self | ||
185 | .1 | ||
186 | .dependencies(db) | ||
187 | .into_iter() | ||
188 | .find(|dep| &dep.name.to_string() == std_crate)? | ||
189 | .krate; | ||
190 | |||
191 | let mut module = std_crate.root_module(db)?; | ||
192 | for segment in path { | ||
193 | module = module.children(db).find_map(|child| { | ||
194 | let name = child.name(db)?; | ||
195 | if &name.to_string() == segment { | ||
196 | Some(child) | ||
197 | } else { | ||
198 | None | ||
199 | } | ||
200 | })?; | ||
201 | } | ||
202 | let def = | ||
203 | module.scope(db, None).into_iter().find(|(name, _def)| &name.to_string() == trait_)?.1; | ||
204 | match def { | ||
205 | hir::ScopeDef::ModuleDef(hir::ModuleDef::Trait(it)) => Some(it), | ||
206 | _ => None, | ||
207 | } | ||
208 | } | ||
209 | } | ||
diff --git a/crates/ra_flycheck/Cargo.toml b/crates/ra_flycheck/Cargo.toml index 76e5cada4..324c33d9d 100644 --- a/crates/ra_flycheck/Cargo.toml +++ b/crates/ra_flycheck/Cargo.toml | |||
@@ -6,7 +6,7 @@ authors = ["rust-analyzer developers"] | |||
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | crossbeam-channel = "0.4.0" | 8 | crossbeam-channel = "0.4.0" |
9 | lsp-types = { version = "0.73.0", features = ["proposed"] } | 9 | lsp-types = { version = "0.74.0", features = ["proposed"] } |
10 | log = "0.4.8" | 10 | log = "0.4.8" |
11 | cargo_metadata = "0.9.1" | 11 | cargo_metadata = "0.9.1" |
12 | serde_json = "1.0.48" | 12 | serde_json = "1.0.48" |
diff --git a/crates/ra_fmt/src/lib.rs b/crates/ra_fmt/src/lib.rs index 0b4ba1bbe..1a30b2b3a 100644 --- a/crates/ra_fmt/src/lib.rs +++ b/crates/ra_fmt/src/lib.rs | |||
@@ -57,18 +57,17 @@ pub fn extract_trivial_expression(block: &ast::BlockExpr) -> Option<ast::Expr> { | |||
57 | return None; | 57 | return None; |
58 | } | 58 | } |
59 | return Some(expr); | 59 | return Some(expr); |
60 | } else { | 60 | } |
61 | // Unwrap `{ continue; }` | 61 | // Unwrap `{ continue; }` |
62 | let (stmt,) = block.statements().next_tuple()?; | 62 | let (stmt,) = block.statements().next_tuple()?; |
63 | if let ast::Stmt::ExprStmt(expr_stmt) = stmt { | 63 | if let ast::Stmt::ExprStmt(expr_stmt) = stmt { |
64 | if has_anything_else(expr_stmt.syntax()) { | 64 | if has_anything_else(expr_stmt.syntax()) { |
65 | return None; | 65 | return None; |
66 | } | 66 | } |
67 | let expr = expr_stmt.expr()?; | 67 | let expr = expr_stmt.expr()?; |
68 | match expr.syntax().kind() { | 68 | match expr.syntax().kind() { |
69 | CONTINUE_EXPR | BREAK_EXPR | RETURN_EXPR => return Some(expr), | 69 | CONTINUE_EXPR | BREAK_EXPR | RETURN_EXPR => return Some(expr), |
70 | _ => (), | 70 | _ => (), |
71 | } | ||
72 | } | 71 | } |
73 | } | 72 | } |
74 | None | 73 | None |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index fb788736d..af59aa1b6 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -953,6 +953,16 @@ impl TypeParam { | |||
953 | pub fn module(self, db: &dyn HirDatabase) -> Module { | 953 | pub fn module(self, db: &dyn HirDatabase) -> Module { |
954 | self.id.parent.module(db.upcast()).into() | 954 | self.id.parent.module(db.upcast()).into() |
955 | } | 955 | } |
956 | |||
957 | pub fn ty(self, db: &dyn HirDatabase) -> Type { | ||
958 | let resolver = self.id.parent.resolver(db.upcast()); | ||
959 | let environment = TraitEnvironment::lower(db, &resolver); | ||
960 | let ty = Ty::Placeholder(self.id); | ||
961 | Type { | ||
962 | krate: self.id.parent.module(db.upcast()).krate, | ||
963 | ty: InEnvironment { value: ty, environment }, | ||
964 | } | ||
965 | } | ||
956 | } | 966 | } |
957 | 967 | ||
958 | // FIXME: rename from `ImplDef` to `Impl` | 968 | // FIXME: rename from `ImplDef` to `Impl` |
@@ -1157,18 +1167,21 @@ impl Type { | |||
1157 | 1167 | ||
1158 | pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> { | 1168 | pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> { |
1159 | if let Ty::Apply(a_ty) = &self.ty.value { | 1169 | if let Ty::Apply(a_ty) = &self.ty.value { |
1160 | if let TypeCtor::Adt(AdtId::StructId(s)) = a_ty.ctor { | 1170 | let variant_id = match a_ty.ctor { |
1161 | let var_def = s.into(); | 1171 | TypeCtor::Adt(AdtId::StructId(s)) => s.into(), |
1162 | return db | 1172 | TypeCtor::Adt(AdtId::UnionId(u)) => u.into(), |
1163 | .field_types(var_def) | 1173 | _ => return Vec::new(), |
1164 | .iter() | 1174 | }; |
1165 | .map(|(local_id, ty)| { | 1175 | |
1166 | let def = Field { parent: var_def.into(), id: local_id }; | 1176 | return db |
1167 | let ty = ty.clone().subst(&a_ty.parameters); | 1177 | .field_types(variant_id) |
1168 | (def, self.derived(ty)) | 1178 | .iter() |
1169 | }) | 1179 | .map(|(local_id, ty)| { |
1170 | .collect(); | 1180 | let def = Field { parent: variant_id.into(), id: local_id }; |
1171 | } | 1181 | let ty = ty.clone().subst(&a_ty.parameters); |
1182 | (def, self.derived(ty)) | ||
1183 | }) | ||
1184 | .collect(); | ||
1172 | }; | 1185 | }; |
1173 | Vec::new() | 1186 | Vec::new() |
1174 | } | 1187 | } |
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs index 86bfb416c..a0a0f234b 100644 --- a/crates/ra_hir/src/semantics.rs +++ b/crates/ra_hir/src/semantics.rs | |||
@@ -9,6 +9,7 @@ use hir_def::{ | |||
9 | AsMacroCall, TraitId, | 9 | AsMacroCall, TraitId, |
10 | }; | 10 | }; |
11 | use hir_expand::ExpansionInfo; | 11 | use hir_expand::ExpansionInfo; |
12 | use hir_ty::associated_type_shorthand_candidates; | ||
12 | use itertools::Itertools; | 13 | use itertools::Itertools; |
13 | use ra_db::{FileId, FileRange}; | 14 | use ra_db::{FileId, FileRange}; |
14 | use ra_prof::profile; | 15 | use ra_prof::profile; |
@@ -24,8 +25,9 @@ use crate::{ | |||
24 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, | 25 | semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, |
25 | source_analyzer::{resolve_hir_path, SourceAnalyzer}, | 26 | source_analyzer::{resolve_hir_path, SourceAnalyzer}, |
26 | AssocItem, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, | 27 | AssocItem, Field, Function, HirFileId, ImplDef, InFile, Local, MacroDef, Module, ModuleDef, |
27 | Name, Origin, Path, ScopeDef, Trait, Type, TypeParam, | 28 | Name, Origin, Path, ScopeDef, Trait, Type, TypeAlias, TypeParam, |
28 | }; | 29 | }; |
30 | use resolver::TypeNs; | ||
29 | 31 | ||
30 | #[derive(Debug, Clone, PartialEq, Eq)] | 32 | #[derive(Debug, Clone, PartialEq, Eq)] |
31 | pub enum PathResolution { | 33 | pub enum PathResolution { |
@@ -40,6 +42,44 @@ pub enum PathResolution { | |||
40 | AssocItem(AssocItem), | 42 | AssocItem(AssocItem), |
41 | } | 43 | } |
42 | 44 | ||
45 | impl PathResolution { | ||
46 | fn in_type_ns(&self) -> Option<TypeNs> { | ||
47 | match self { | ||
48 | PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())), | ||
49 | PathResolution::Def(ModuleDef::BuiltinType(builtin)) => { | ||
50 | Some(TypeNs::BuiltinType(*builtin)) | ||
51 | } | ||
52 | PathResolution::Def(ModuleDef::Const(_)) | ||
53 | | PathResolution::Def(ModuleDef::EnumVariant(_)) | ||
54 | | PathResolution::Def(ModuleDef::Function(_)) | ||
55 | | PathResolution::Def(ModuleDef::Module(_)) | ||
56 | | PathResolution::Def(ModuleDef::Static(_)) | ||
57 | | PathResolution::Def(ModuleDef::Trait(_)) => None, | ||
58 | PathResolution::Def(ModuleDef::TypeAlias(alias)) => { | ||
59 | Some(TypeNs::TypeAliasId((*alias).into())) | ||
60 | } | ||
61 | PathResolution::Local(_) | PathResolution::Macro(_) => None, | ||
62 | PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())), | ||
63 | PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())), | ||
64 | PathResolution::AssocItem(AssocItem::Const(_)) | ||
65 | | PathResolution::AssocItem(AssocItem::Function(_)) => None, | ||
66 | PathResolution::AssocItem(AssocItem::TypeAlias(alias)) => { | ||
67 | Some(TypeNs::TypeAliasId((*alias).into())) | ||
68 | } | ||
69 | } | ||
70 | } | ||
71 | |||
72 | /// Returns an iterator over associated types that may be specified after this path (using | ||
73 | /// `Ty::Assoc` syntax). | ||
74 | pub fn assoc_type_shorthand_candidates<R>( | ||
75 | &self, | ||
76 | db: &dyn HirDatabase, | ||
77 | mut cb: impl FnMut(TypeAlias) -> Option<R>, | ||
78 | ) -> Option<R> { | ||
79 | associated_type_shorthand_candidates(db, self.in_type_ns()?, |_, _, id| cb(id.into())) | ||
80 | } | ||
81 | } | ||
82 | |||
43 | /// Primary API to get semantic information, like types, from syntax trees. | 83 | /// Primary API to get semantic information, like types, from syntax trees. |
44 | pub struct Semantics<'db, DB> { | 84 | pub struct Semantics<'db, DB> { |
45 | pub db: &'db DB, | 85 | pub db: &'db DB, |
diff --git a/crates/ra_hir_def/src/body/lower.rs b/crates/ra_hir_def/src/body/lower.rs index 571603854..f467ed3fe 100644 --- a/crates/ra_hir_def/src/body/lower.rs +++ b/crates/ra_hir_def/src/body/lower.rs | |||
@@ -182,10 +182,6 @@ impl ExprCollector<'_> { | |||
182 | 182 | ||
183 | self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr) | 183 | self.alloc_expr(Expr::If { condition, then_branch, else_branch }, syntax_ptr) |
184 | } | 184 | } |
185 | ast::Expr::TryBlockExpr(e) => { | ||
186 | let body = self.collect_block_opt(e.body()); | ||
187 | self.alloc_expr(Expr::TryBlock { body }, syntax_ptr) | ||
188 | } | ||
189 | ast::Expr::BlockExpr(e) => self.collect_block(e), | 185 | ast::Expr::BlockExpr(e) => self.collect_block(e), |
190 | ast::Expr::LoopExpr(e) => { | 186 | ast::Expr::LoopExpr(e) => { |
191 | let body = self.collect_block_opt(e.loop_body()); | 187 | let body = self.collect_block_opt(e.loop_body()); |
diff --git a/crates/ra_hir_def/src/expr.rs b/crates/ra_hir_def/src/expr.rs index a0cdad529..aad12e123 100644 --- a/crates/ra_hir_def/src/expr.rs +++ b/crates/ra_hir_def/src/expr.rs | |||
@@ -101,9 +101,6 @@ pub enum Expr { | |||
101 | Try { | 101 | Try { |
102 | expr: ExprId, | 102 | expr: ExprId, |
103 | }, | 103 | }, |
104 | TryBlock { | ||
105 | body: ExprId, | ||
106 | }, | ||
107 | Cast { | 104 | Cast { |
108 | expr: ExprId, | 105 | expr: ExprId, |
109 | type_ref: TypeRef, | 106 | type_ref: TypeRef, |
@@ -239,7 +236,6 @@ impl Expr { | |||
239 | f(*expr); | 236 | f(*expr); |
240 | } | 237 | } |
241 | } | 238 | } |
242 | Expr::TryBlock { body } => f(*body), | ||
243 | Expr::Loop { body } => f(*body), | 239 | Expr::Loop { body } => f(*body), |
244 | Expr::While { condition, body } => { | 240 | Expr::While { condition, body } => { |
245 | f(*condition); | 241 | f(*condition); |
diff --git a/crates/ra_hir_ty/src/infer/expr.rs b/crates/ra_hir_ty/src/infer/expr.rs index 83f946eee..efc60986b 100644 --- a/crates/ra_hir_ty/src/infer/expr.rs +++ b/crates/ra_hir_ty/src/infer/expr.rs | |||
@@ -73,11 +73,6 @@ impl<'a> InferenceContext<'a> { | |||
73 | self.coerce_merge_branch(&then_ty, &else_ty) | 73 | self.coerce_merge_branch(&then_ty, &else_ty) |
74 | } | 74 | } |
75 | Expr::Block { statements, tail } => self.infer_block(statements, *tail, expected), | 75 | Expr::Block { statements, tail } => self.infer_block(statements, *tail, expected), |
76 | Expr::TryBlock { body } => { | ||
77 | let _inner = self.infer_expr(*body, expected); | ||
78 | // FIXME should be std::result::Result<{inner}, _> | ||
79 | Ty::Unknown | ||
80 | } | ||
81 | Expr::Loop { body } => { | 76 | Expr::Loop { body } => { |
82 | self.infer_expr(*body, &Expectation::has_type(Ty::unit())); | 77 | self.infer_expr(*body, &Expectation::has_type(Ty::unit())); |
83 | // FIXME handle break with value | 78 | // FIXME handle break with value |
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs index a8ef32ec5..a6f56c661 100644 --- a/crates/ra_hir_ty/src/lib.rs +++ b/crates/ra_hir_ty/src/lib.rs | |||
@@ -66,7 +66,8 @@ pub use autoderef::autoderef; | |||
66 | pub use infer::{InferTy, InferenceResult}; | 66 | pub use infer::{InferTy, InferenceResult}; |
67 | pub use lower::CallableDef; | 67 | pub use lower::CallableDef; |
68 | pub use lower::{ | 68 | pub use lower::{ |
69 | callable_item_sig, ImplTraitLoweringMode, TyDefId, TyLoweringContext, ValueTyDefId, | 69 | associated_type_shorthand_candidates, callable_item_sig, ImplTraitLoweringMode, TyDefId, |
70 | TyLoweringContext, ValueTyDefId, | ||
70 | }; | 71 | }; |
71 | pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment}; | 72 | pub use traits::{InEnvironment, Obligation, ProjectionPredicate, TraitEnvironment}; |
72 | 73 | ||
diff --git a/crates/ra_hir_ty/src/lower.rs b/crates/ra_hir_ty/src/lower.rs index a6f893037..9ad6dbe07 100644 --- a/crates/ra_hir_ty/src/lower.rs +++ b/crates/ra_hir_ty/src/lower.rs | |||
@@ -17,9 +17,9 @@ use hir_def::{ | |||
17 | path::{GenericArg, Path, PathSegment, PathSegments}, | 17 | path::{GenericArg, Path, PathSegment, PathSegments}, |
18 | resolver::{HasResolver, Resolver, TypeNs}, | 18 | resolver::{HasResolver, Resolver, TypeNs}, |
19 | type_ref::{TypeBound, TypeRef}, | 19 | type_ref::{TypeBound, TypeRef}, |
20 | AdtId, AssocContainerId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, | 20 | AdtId, AssocContainerId, AssocItemId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, |
21 | ImplId, LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, | 21 | HasModule, ImplId, LocalFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, |
22 | VariantId, | 22 | UnionId, VariantId, |
23 | }; | 23 | }; |
24 | use ra_arena::map::ArenaMap; | 24 | use ra_arena::map::ArenaMap; |
25 | use ra_db::CrateId; | 25 | use ra_db::CrateId; |
@@ -34,6 +34,7 @@ use crate::{ | |||
34 | Binders, BoundVar, DebruijnIndex, FnSig, GenericPredicate, PolyFnSig, ProjectionPredicate, | 34 | Binders, BoundVar, DebruijnIndex, FnSig, GenericPredicate, PolyFnSig, ProjectionPredicate, |
35 | ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk, | 35 | ProjectionTy, Substs, TraitEnvironment, TraitRef, Ty, TypeCtor, TypeWalk, |
36 | }; | 36 | }; |
37 | use hir_expand::name::Name; | ||
37 | 38 | ||
38 | #[derive(Debug)] | 39 | #[derive(Debug)] |
39 | pub struct TyLoweringContext<'a> { | 40 | pub struct TyLoweringContext<'a> { |
@@ -383,61 +384,38 @@ impl Ty { | |||
383 | res: Option<TypeNs>, | 384 | res: Option<TypeNs>, |
384 | segment: PathSegment<'_>, | 385 | segment: PathSegment<'_>, |
385 | ) -> Ty { | 386 | ) -> Ty { |
386 | let traits_from_env: Vec<_> = match res { | 387 | if let Some(res) = res { |
387 | Some(TypeNs::SelfType(impl_id)) => match ctx.db.impl_trait(impl_id) { | 388 | let ty = |
388 | None => return Ty::Unknown, | 389 | associated_type_shorthand_candidates(ctx.db, res, move |name, t, associated_ty| { |
389 | Some(trait_ref) => vec![trait_ref.value], | 390 | if name == segment.name { |
390 | }, | 391 | let substs = match ctx.type_param_mode { |
391 | Some(TypeNs::GenericParam(param_id)) => { | 392 | TypeParamLoweringMode::Placeholder => { |
392 | let predicates = ctx.db.generic_predicates_for_param(param_id); | 393 | // if we're lowering to placeholders, we have to put |
393 | let mut traits_: Vec<_> = predicates | 394 | // them in now |
394 | .iter() | 395 | let s = Substs::type_params( |
395 | .filter_map(|pred| match &pred.value { | 396 | ctx.db, |
396 | GenericPredicate::Implemented(tr) => Some(tr.clone()), | 397 | ctx.resolver.generic_def().expect( |
397 | _ => None, | 398 | "there should be generics if there's a generic param", |
398 | }) | 399 | ), |
399 | .collect(); | 400 | ); |
400 | // Handle `Self::Type` referring to own associated type in trait definitions | 401 | t.substs.clone().subst_bound_vars(&s) |
401 | if let GenericDefId::TraitId(trait_id) = param_id.parent { | 402 | } |
402 | let generics = generics(ctx.db.upcast(), trait_id.into()); | 403 | TypeParamLoweringMode::Variable => t.substs.clone(), |
403 | if generics.params.types[param_id.local_id].provenance | ||
404 | == TypeParamProvenance::TraitSelf | ||
405 | { | ||
406 | let trait_ref = TraitRef { | ||
407 | trait_: trait_id, | ||
408 | substs: Substs::bound_vars(&generics, DebruijnIndex::INNERMOST), | ||
409 | }; | 404 | }; |
410 | traits_.push(trait_ref); | 405 | // FIXME handle type parameters on the segment |
406 | return Some(Ty::Projection(ProjectionTy { | ||
407 | associated_ty, | ||
408 | parameters: substs, | ||
409 | })); | ||
411 | } | 410 | } |
412 | } | 411 | |
413 | traits_ | 412 | None |
414 | } | 413 | }); |
415 | _ => return Ty::Unknown, | 414 | |
416 | }; | 415 | ty.unwrap_or(Ty::Unknown) |
417 | let traits = traits_from_env.into_iter().flat_map(|t| all_super_trait_refs(ctx.db, t)); | 416 | } else { |
418 | for t in traits { | 417 | Ty::Unknown |
419 | if let Some(associated_ty) = | ||
420 | ctx.db.trait_data(t.trait_).associated_type_by_name(&segment.name) | ||
421 | { | ||
422 | let substs = match ctx.type_param_mode { | ||
423 | TypeParamLoweringMode::Placeholder => { | ||
424 | // if we're lowering to placeholders, we have to put | ||
425 | // them in now | ||
426 | let s = Substs::type_params( | ||
427 | ctx.db, | ||
428 | ctx.resolver | ||
429 | .generic_def() | ||
430 | .expect("there should be generics if there's a generic param"), | ||
431 | ); | ||
432 | t.substs.subst_bound_vars(&s) | ||
433 | } | ||
434 | TypeParamLoweringMode::Variable => t.substs, | ||
435 | }; | ||
436 | // FIXME handle (forbid) type parameters on the segment | ||
437 | return Ty::Projection(ProjectionTy { associated_ty, parameters: substs }); | ||
438 | } | ||
439 | } | 418 | } |
440 | Ty::Unknown | ||
441 | } | 419 | } |
442 | 420 | ||
443 | fn from_hir_path_inner( | 421 | fn from_hir_path_inner( |
@@ -694,6 +672,61 @@ pub fn callable_item_sig(db: &dyn HirDatabase, def: CallableDef) -> PolyFnSig { | |||
694 | } | 672 | } |
695 | } | 673 | } |
696 | 674 | ||
675 | pub fn associated_type_shorthand_candidates<R>( | ||
676 | db: &dyn HirDatabase, | ||
677 | res: TypeNs, | ||
678 | mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>, | ||
679 | ) -> Option<R> { | ||
680 | let traits_from_env: Vec<_> = match res { | ||
681 | TypeNs::SelfType(impl_id) => match db.impl_trait(impl_id) { | ||
682 | None => vec![], | ||
683 | Some(trait_ref) => vec![trait_ref.value], | ||
684 | }, | ||
685 | TypeNs::GenericParam(param_id) => { | ||
686 | let predicates = db.generic_predicates_for_param(param_id); | ||
687 | let mut traits_: Vec<_> = predicates | ||
688 | .iter() | ||
689 | .filter_map(|pred| match &pred.value { | ||
690 | GenericPredicate::Implemented(tr) => Some(tr.clone()), | ||
691 | _ => None, | ||
692 | }) | ||
693 | .collect(); | ||
694 | // Handle `Self::Type` referring to own associated type in trait definitions | ||
695 | if let GenericDefId::TraitId(trait_id) = param_id.parent { | ||
696 | let generics = generics(db.upcast(), trait_id.into()); | ||
697 | if generics.params.types[param_id.local_id].provenance | ||
698 | == TypeParamProvenance::TraitSelf | ||
699 | { | ||
700 | let trait_ref = TraitRef { | ||
701 | trait_: trait_id, | ||
702 | substs: Substs::bound_vars(&generics, DebruijnIndex::INNERMOST), | ||
703 | }; | ||
704 | traits_.push(trait_ref); | ||
705 | } | ||
706 | } | ||
707 | traits_ | ||
708 | } | ||
709 | _ => vec![], | ||
710 | }; | ||
711 | |||
712 | for t in traits_from_env.into_iter().flat_map(move |t| all_super_trait_refs(db, t)) { | ||
713 | let data = db.trait_data(t.trait_); | ||
714 | |||
715 | for (name, assoc_id) in &data.items { | ||
716 | match assoc_id { | ||
717 | AssocItemId::TypeAliasId(alias) => { | ||
718 | if let Some(result) = cb(name, &t, *alias) { | ||
719 | return Some(result); | ||
720 | } | ||
721 | } | ||
722 | AssocItemId::FunctionId(_) | AssocItemId::ConstId(_) => {} | ||
723 | } | ||
724 | } | ||
725 | } | ||
726 | |||
727 | None | ||
728 | } | ||
729 | |||
697 | /// Build the type of all specific fields of a struct or enum variant. | 730 | /// Build the type of all specific fields of a struct or enum variant. |
698 | pub(crate) fn field_types_query( | 731 | pub(crate) fn field_types_query( |
699 | db: &dyn HirDatabase, | 732 | db: &dyn HirDatabase, |
diff --git a/crates/ra_ide/src/completion/complete_dot.rs b/crates/ra_ide/src/completion/complete_dot.rs index 814354ffa..05f825c6f 100644 --- a/crates/ra_ide/src/completion/complete_dot.rs +++ b/crates/ra_ide/src/completion/complete_dot.rs | |||
@@ -250,6 +250,44 @@ mod tests { | |||
250 | } | 250 | } |
251 | 251 | ||
252 | #[test] | 252 | #[test] |
253 | fn test_union_field_completion() { | ||
254 | assert_debug_snapshot!( | ||
255 | do_ref_completion( | ||
256 | r" | ||
257 | union Un { | ||
258 | field: u8, | ||
259 | other: u16, | ||
260 | } | ||
261 | |||
262 | fn foo(u: Un) { | ||
263 | u.<|> | ||
264 | } | ||
265 | ", | ||
266 | ), | ||
267 | @r###" | ||
268 | [ | ||
269 | CompletionItem { | ||
270 | label: "field", | ||
271 | source_range: 140..140, | ||
272 | delete: 140..140, | ||
273 | insert: "field", | ||
274 | kind: Field, | ||
275 | detail: "u8", | ||
276 | }, | ||
277 | CompletionItem { | ||
278 | label: "other", | ||
279 | source_range: 140..140, | ||
280 | delete: 140..140, | ||
281 | insert: "other", | ||
282 | kind: Field, | ||
283 | detail: "u16", | ||
284 | }, | ||
285 | ] | ||
286 | "### | ||
287 | ); | ||
288 | } | ||
289 | |||
290 | #[test] | ||
253 | fn test_method_completion() { | 291 | fn test_method_completion() { |
254 | assert_debug_snapshot!( | 292 | assert_debug_snapshot!( |
255 | do_ref_completion( | 293 | do_ref_completion( |
diff --git a/crates/ra_ide/src/completion/complete_qualified_path.rs b/crates/ra_ide/src/completion/complete_qualified_path.rs index dd10f74e6..aa56a5cd8 100644 --- a/crates/ra_ide/src/completion/complete_qualified_path.rs +++ b/crates/ra_ide/src/completion/complete_qualified_path.rs | |||
@@ -5,19 +5,29 @@ use ra_syntax::AstNode; | |||
5 | use test_utils::tested_by; | 5 | use test_utils::tested_by; |
6 | 6 | ||
7 | use crate::completion::{CompletionContext, Completions}; | 7 | use crate::completion::{CompletionContext, Completions}; |
8 | use rustc_hash::FxHashSet; | ||
8 | 9 | ||
9 | pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionContext) { | 10 | pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionContext) { |
10 | let path = match &ctx.path_prefix { | 11 | let path = match &ctx.path_prefix { |
11 | Some(path) => path.clone(), | 12 | Some(path) => path.clone(), |
12 | _ => return, | 13 | _ => return, |
13 | }; | 14 | }; |
14 | let def = match ctx.scope().resolve_hir_path(&path) { | 15 | let scope = ctx.scope(); |
15 | Some(PathResolution::Def(def)) => def, | 16 | let context_module = scope.module(); |
16 | _ => return, | 17 | |
18 | let res = match scope.resolve_hir_path(&path) { | ||
19 | Some(res) => res, | ||
20 | None => return, | ||
17 | }; | 21 | }; |
18 | let context_module = ctx.scope().module(); | 22 | |
19 | match def { | 23 | // Add associated types on type parameters and `Self`. |
20 | hir::ModuleDef::Module(module) => { | 24 | res.assoc_type_shorthand_candidates(ctx.db, |alias| { |
25 | acc.add_type_alias(ctx, alias); | ||
26 | None::<()> | ||
27 | }); | ||
28 | |||
29 | match res { | ||
30 | PathResolution::Def(hir::ModuleDef::Module(module)) => { | ||
21 | let module_scope = module.scope(ctx.db, context_module); | 31 | let module_scope = module.scope(ctx.db, context_module); |
22 | for (name, def) in module_scope { | 32 | for (name, def) in module_scope { |
23 | if ctx.use_item_syntax.is_some() { | 33 | if ctx.use_item_syntax.is_some() { |
@@ -35,7 +45,8 @@ pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon | |||
35 | acc.add_resolution(ctx, name.to_string(), &def); | 45 | acc.add_resolution(ctx, name.to_string(), &def); |
36 | } | 46 | } |
37 | } | 47 | } |
38 | hir::ModuleDef::Adt(_) | hir::ModuleDef::TypeAlias(_) => { | 48 | PathResolution::Def(def @ hir::ModuleDef::Adt(_)) |
49 | | PathResolution::Def(def @ hir::ModuleDef::TypeAlias(_)) => { | ||
39 | if let hir::ModuleDef::Adt(Adt::Enum(e)) = def { | 50 | if let hir::ModuleDef::Adt(Adt::Enum(e)) = def { |
40 | for variant in e.variants(ctx.db) { | 51 | for variant in e.variants(ctx.db) { |
41 | acc.add_enum_variant(ctx, variant, None); | 52 | acc.add_enum_variant(ctx, variant, None); |
@@ -46,8 +57,10 @@ pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon | |||
46 | hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db), | 57 | hir::ModuleDef::TypeAlias(a) => a.ty(ctx.db), |
47 | _ => unreachable!(), | 58 | _ => unreachable!(), |
48 | }; | 59 | }; |
49 | // Iterate assoc types separately | 60 | |
50 | // FIXME: complete T::AssocType | 61 | // XXX: For parity with Rust bug #22519, this does not complete Ty::AssocType. |
62 | // (where AssocType is defined on a trait, not an inherent impl) | ||
63 | |||
51 | let krate = ctx.krate; | 64 | let krate = ctx.krate; |
52 | if let Some(krate) = krate { | 65 | if let Some(krate) = krate { |
53 | let traits_in_scope = ctx.scope().traits_in_scope(); | 66 | let traits_in_scope = ctx.scope().traits_in_scope(); |
@@ -65,6 +78,7 @@ pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon | |||
65 | None::<()> | 78 | None::<()> |
66 | }); | 79 | }); |
67 | 80 | ||
81 | // Iterate assoc types separately | ||
68 | ty.iterate_impl_items(ctx.db, krate, |item| { | 82 | ty.iterate_impl_items(ctx.db, krate, |item| { |
69 | if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { | 83 | if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { |
70 | return None; | 84 | return None; |
@@ -77,7 +91,8 @@ pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon | |||
77 | }); | 91 | }); |
78 | } | 92 | } |
79 | } | 93 | } |
80 | hir::ModuleDef::Trait(t) => { | 94 | PathResolution::Def(hir::ModuleDef::Trait(t)) => { |
95 | // Handles `Trait::assoc` as well as `<Ty as Trait>::assoc`. | ||
81 | for item in t.items(ctx.db) { | 96 | for item in t.items(ctx.db) { |
82 | if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { | 97 | if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { |
83 | continue; | 98 | continue; |
@@ -91,8 +106,38 @@ pub(super) fn complete_qualified_path(acc: &mut Completions, ctx: &CompletionCon | |||
91 | } | 106 | } |
92 | } | 107 | } |
93 | } | 108 | } |
109 | PathResolution::TypeParam(_) | PathResolution::SelfType(_) => { | ||
110 | if let Some(krate) = ctx.krate { | ||
111 | let ty = match res { | ||
112 | PathResolution::TypeParam(param) => param.ty(ctx.db), | ||
113 | PathResolution::SelfType(impl_def) => impl_def.target_ty(ctx.db), | ||
114 | _ => return, | ||
115 | }; | ||
116 | |||
117 | let traits_in_scope = ctx.scope().traits_in_scope(); | ||
118 | let mut seen = FxHashSet::default(); | ||
119 | ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { | ||
120 | if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) { | ||
121 | return None; | ||
122 | } | ||
123 | |||
124 | // We might iterate candidates of a trait multiple times here, so deduplicate | ||
125 | // them. | ||
126 | if seen.insert(item) { | ||
127 | match item { | ||
128 | hir::AssocItem::Function(func) => { | ||
129 | acc.add_function(ctx, func, None); | ||
130 | } | ||
131 | hir::AssocItem::Const(ct) => acc.add_const(ctx, ct), | ||
132 | hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), | ||
133 | } | ||
134 | } | ||
135 | None::<()> | ||
136 | }); | ||
137 | } | ||
138 | } | ||
94 | _ => {} | 139 | _ => {} |
95 | }; | 140 | } |
96 | } | 141 | } |
97 | 142 | ||
98 | #[cfg(test)] | 143 | #[cfg(test)] |
@@ -844,6 +889,211 @@ mod tests { | |||
844 | } | 889 | } |
845 | 890 | ||
846 | #[test] | 891 | #[test] |
892 | fn completes_ty_param_assoc_ty() { | ||
893 | assert_debug_snapshot!( | ||
894 | do_reference_completion( | ||
895 | " | ||
896 | //- /lib.rs | ||
897 | trait Super { | ||
898 | type Ty; | ||
899 | const CONST: u8; | ||
900 | fn func() {} | ||
901 | fn method(&self) {} | ||
902 | } | ||
903 | |||
904 | trait Sub: Super { | ||
905 | type SubTy; | ||
906 | const C2: (); | ||
907 | fn subfunc() {} | ||
908 | fn submethod(&self) {} | ||
909 | } | ||
910 | |||
911 | fn foo<T: Sub>() { | ||
912 | T::<|> | ||
913 | } | ||
914 | " | ||
915 | ), | ||
916 | @r###" | ||
917 | [ | ||
918 | CompletionItem { | ||
919 | label: "C2", | ||
920 | source_range: 219..219, | ||
921 | delete: 219..219, | ||
922 | insert: "C2", | ||
923 | kind: Const, | ||
924 | detail: "const C2: ();", | ||
925 | }, | ||
926 | CompletionItem { | ||
927 | label: "CONST", | ||
928 | source_range: 219..219, | ||
929 | delete: 219..219, | ||
930 | insert: "CONST", | ||
931 | kind: Const, | ||
932 | detail: "const CONST: u8;", | ||
933 | }, | ||
934 | CompletionItem { | ||
935 | label: "SubTy", | ||
936 | source_range: 219..219, | ||
937 | delete: 219..219, | ||
938 | insert: "SubTy", | ||
939 | kind: TypeAlias, | ||
940 | detail: "type SubTy;", | ||
941 | }, | ||
942 | CompletionItem { | ||
943 | label: "Ty", | ||
944 | source_range: 219..219, | ||
945 | delete: 219..219, | ||
946 | insert: "Ty", | ||
947 | kind: TypeAlias, | ||
948 | detail: "type Ty;", | ||
949 | }, | ||
950 | CompletionItem { | ||
951 | label: "func()", | ||
952 | source_range: 219..219, | ||
953 | delete: 219..219, | ||
954 | insert: "func()$0", | ||
955 | kind: Function, | ||
956 | lookup: "func", | ||
957 | detail: "fn func()", | ||
958 | }, | ||
959 | CompletionItem { | ||
960 | label: "method()", | ||
961 | source_range: 219..219, | ||
962 | delete: 219..219, | ||
963 | insert: "method()$0", | ||
964 | kind: Method, | ||
965 | lookup: "method", | ||
966 | detail: "fn method(&self)", | ||
967 | }, | ||
968 | CompletionItem { | ||
969 | label: "subfunc()", | ||
970 | source_range: 219..219, | ||
971 | delete: 219..219, | ||
972 | insert: "subfunc()$0", | ||
973 | kind: Function, | ||
974 | lookup: "subfunc", | ||
975 | detail: "fn subfunc()", | ||
976 | }, | ||
977 | CompletionItem { | ||
978 | label: "submethod()", | ||
979 | source_range: 219..219, | ||
980 | delete: 219..219, | ||
981 | insert: "submethod()$0", | ||
982 | kind: Method, | ||
983 | lookup: "submethod", | ||
984 | detail: "fn submethod(&self)", | ||
985 | }, | ||
986 | ] | ||
987 | "### | ||
988 | ); | ||
989 | } | ||
990 | |||
991 | #[test] | ||
992 | fn completes_self_param_assoc_ty() { | ||
993 | assert_debug_snapshot!( | ||
994 | do_reference_completion( | ||
995 | " | ||
996 | //- /lib.rs | ||
997 | trait Super { | ||
998 | type Ty; | ||
999 | const CONST: u8 = 0; | ||
1000 | fn func() {} | ||
1001 | fn method(&self) {} | ||
1002 | } | ||
1003 | |||
1004 | trait Sub: Super { | ||
1005 | type SubTy; | ||
1006 | const C2: () = (); | ||
1007 | fn subfunc() {} | ||
1008 | fn submethod(&self) {} | ||
1009 | } | ||
1010 | |||
1011 | struct Wrap<T>(T); | ||
1012 | impl<T> Super for Wrap<T> {} | ||
1013 | impl<T> Sub for Wrap<T> { | ||
1014 | fn subfunc() { | ||
1015 | // Should be able to assume `Self: Sub + Super` | ||
1016 | Self::<|> | ||
1017 | } | ||
1018 | } | ||
1019 | " | ||
1020 | ), | ||
1021 | @r###" | ||
1022 | [ | ||
1023 | CompletionItem { | ||
1024 | label: "C2", | ||
1025 | source_range: 365..365, | ||
1026 | delete: 365..365, | ||
1027 | insert: "C2", | ||
1028 | kind: Const, | ||
1029 | detail: "const C2: () = ();", | ||
1030 | }, | ||
1031 | CompletionItem { | ||
1032 | label: "CONST", | ||
1033 | source_range: 365..365, | ||
1034 | delete: 365..365, | ||
1035 | insert: "CONST", | ||
1036 | kind: Const, | ||
1037 | detail: "const CONST: u8 = 0;", | ||
1038 | }, | ||
1039 | CompletionItem { | ||
1040 | label: "SubTy", | ||
1041 | source_range: 365..365, | ||
1042 | delete: 365..365, | ||
1043 | insert: "SubTy", | ||
1044 | kind: TypeAlias, | ||
1045 | detail: "type SubTy;", | ||
1046 | }, | ||
1047 | CompletionItem { | ||
1048 | label: "Ty", | ||
1049 | source_range: 365..365, | ||
1050 | delete: 365..365, | ||
1051 | insert: "Ty", | ||
1052 | kind: TypeAlias, | ||
1053 | detail: "type Ty;", | ||
1054 | }, | ||
1055 | CompletionItem { | ||
1056 | label: "func()", | ||
1057 | source_range: 365..365, | ||
1058 | delete: 365..365, | ||
1059 | insert: "func()$0", | ||
1060 | kind: Function, | ||
1061 | lookup: "func", | ||
1062 | detail: "fn func()", | ||
1063 | }, | ||
1064 | CompletionItem { | ||
1065 | label: "method()", | ||
1066 | source_range: 365..365, | ||
1067 | delete: 365..365, | ||
1068 | insert: "method()$0", | ||
1069 | kind: Method, | ||
1070 | lookup: "method", | ||
1071 | detail: "fn method(&self)", | ||
1072 | }, | ||
1073 | CompletionItem { | ||
1074 | label: "subfunc()", | ||
1075 | source_range: 365..365, | ||
1076 | delete: 365..365, | ||
1077 | insert: "subfunc()$0", | ||
1078 | kind: Function, | ||
1079 | lookup: "subfunc", | ||
1080 | detail: "fn subfunc()", | ||
1081 | }, | ||
1082 | CompletionItem { | ||
1083 | label: "submethod()", | ||
1084 | source_range: 365..365, | ||
1085 | delete: 365..365, | ||
1086 | insert: "submethod()$0", | ||
1087 | kind: Method, | ||
1088 | lookup: "submethod", | ||
1089 | detail: "fn submethod(&self)", | ||
1090 | }, | ||
1091 | ] | ||
1092 | "### | ||
1093 | ); | ||
1094 | } | ||
1095 | |||
1096 | #[test] | ||
847 | fn completes_type_alias() { | 1097 | fn completes_type_alias() { |
848 | assert_debug_snapshot!( | 1098 | assert_debug_snapshot!( |
849 | do_reference_completion( | 1099 | do_reference_completion( |
diff --git a/crates/ra_ide/src/completion/complete_unqualified_path.rs b/crates/ra_ide/src/completion/complete_unqualified_path.rs index f559f2b97..a6a5568de 100644 --- a/crates/ra_ide/src/completion/complete_unqualified_path.rs +++ b/crates/ra_ide/src/completion/complete_unqualified_path.rs | |||
@@ -53,7 +53,7 @@ fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &T | |||
53 | // Variants with trivial paths are already added by the existing completion logic, | 53 | // Variants with trivial paths are already added by the existing completion logic, |
54 | // so we should avoid adding these twice | 54 | // so we should avoid adding these twice |
55 | if path.segments.len() > 1 { | 55 | if path.segments.len() > 1 { |
56 | acc.add_enum_variant(ctx, variant, Some(path.to_string())); | 56 | acc.add_qualified_enum_variant(ctx, variant, path); |
57 | } | 57 | } |
58 | } | 58 | } |
59 | } | 59 | } |
@@ -1173,6 +1173,7 @@ mod tests { | |||
1173 | delete: 248..250, | 1173 | delete: 248..250, |
1174 | insert: "Foo::Bar", | 1174 | insert: "Foo::Bar", |
1175 | kind: EnumVariant, | 1175 | kind: EnumVariant, |
1176 | lookup: "Bar", | ||
1176 | detail: "()", | 1177 | detail: "()", |
1177 | }, | 1178 | }, |
1178 | CompletionItem { | 1179 | CompletionItem { |
@@ -1181,6 +1182,7 @@ mod tests { | |||
1181 | delete: 248..250, | 1182 | delete: 248..250, |
1182 | insert: "Foo::Baz", | 1183 | insert: "Foo::Baz", |
1183 | kind: EnumVariant, | 1184 | kind: EnumVariant, |
1185 | lookup: "Baz", | ||
1184 | detail: "()", | 1186 | detail: "()", |
1185 | }, | 1187 | }, |
1186 | CompletionItem { | 1188 | CompletionItem { |
@@ -1189,6 +1191,7 @@ mod tests { | |||
1189 | delete: 248..250, | 1191 | delete: 248..250, |
1190 | insert: "Foo::Quux", | 1192 | insert: "Foo::Quux", |
1191 | kind: EnumVariant, | 1193 | kind: EnumVariant, |
1194 | lookup: "Quux", | ||
1192 | detail: "()", | 1195 | detail: "()", |
1193 | }, | 1196 | }, |
1194 | ] | 1197 | ] |
@@ -1231,6 +1234,7 @@ mod tests { | |||
1231 | delete: 219..221, | 1234 | delete: 219..221, |
1232 | insert: "Foo::Bar", | 1235 | insert: "Foo::Bar", |
1233 | kind: EnumVariant, | 1236 | kind: EnumVariant, |
1237 | lookup: "Bar", | ||
1234 | detail: "()", | 1238 | detail: "()", |
1235 | }, | 1239 | }, |
1236 | CompletionItem { | 1240 | CompletionItem { |
@@ -1239,6 +1243,7 @@ mod tests { | |||
1239 | delete: 219..221, | 1243 | delete: 219..221, |
1240 | insert: "Foo::Baz", | 1244 | insert: "Foo::Baz", |
1241 | kind: EnumVariant, | 1245 | kind: EnumVariant, |
1246 | lookup: "Baz", | ||
1242 | detail: "()", | 1247 | detail: "()", |
1243 | }, | 1248 | }, |
1244 | CompletionItem { | 1249 | CompletionItem { |
@@ -1247,6 +1252,7 @@ mod tests { | |||
1247 | delete: 219..221, | 1252 | delete: 219..221, |
1248 | insert: "Foo::Quux", | 1253 | insert: "Foo::Quux", |
1249 | kind: EnumVariant, | 1254 | kind: EnumVariant, |
1255 | lookup: "Quux", | ||
1250 | detail: "()", | 1256 | detail: "()", |
1251 | }, | 1257 | }, |
1252 | ] | 1258 | ] |
@@ -1285,6 +1291,7 @@ mod tests { | |||
1285 | delete: 185..186, | 1291 | delete: 185..186, |
1286 | insert: "Foo::Bar", | 1292 | insert: "Foo::Bar", |
1287 | kind: EnumVariant, | 1293 | kind: EnumVariant, |
1294 | lookup: "Bar", | ||
1288 | detail: "()", | 1295 | detail: "()", |
1289 | }, | 1296 | }, |
1290 | CompletionItem { | 1297 | CompletionItem { |
@@ -1293,6 +1300,7 @@ mod tests { | |||
1293 | delete: 185..186, | 1300 | delete: 185..186, |
1294 | insert: "Foo::Baz", | 1301 | insert: "Foo::Baz", |
1295 | kind: EnumVariant, | 1302 | kind: EnumVariant, |
1303 | lookup: "Baz", | ||
1296 | detail: "()", | 1304 | detail: "()", |
1297 | }, | 1305 | }, |
1298 | CompletionItem { | 1306 | CompletionItem { |
@@ -1301,6 +1309,7 @@ mod tests { | |||
1301 | delete: 185..186, | 1309 | delete: 185..186, |
1302 | insert: "Foo::Quux", | 1310 | insert: "Foo::Quux", |
1303 | kind: EnumVariant, | 1311 | kind: EnumVariant, |
1312 | lookup: "Quux", | ||
1304 | detail: "()", | 1313 | detail: "()", |
1305 | }, | 1314 | }, |
1306 | CompletionItem { | 1315 | CompletionItem { |
@@ -1353,6 +1362,7 @@ mod tests { | |||
1353 | delete: 98..99, | 1362 | delete: 98..99, |
1354 | insert: "m::E::V", | 1363 | insert: "m::E::V", |
1355 | kind: EnumVariant, | 1364 | kind: EnumVariant, |
1365 | lookup: "V", | ||
1356 | detail: "()", | 1366 | detail: "()", |
1357 | }, | 1367 | }, |
1358 | ] | 1368 | ] |
diff --git a/crates/ra_ide/src/completion/presentation.rs b/crates/ra_ide/src/completion/presentation.rs index 77d354376..2edb130cf 100644 --- a/crates/ra_ide/src/completion/presentation.rs +++ b/crates/ra_ide/src/completion/presentation.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | //! This modules takes care of rendering various definitions as completion items. | 1 | //! This modules takes care of rendering various definitions as completion items. |
2 | 2 | ||
3 | use hir::{Docs, HasAttrs, HasSource, HirDisplay, ScopeDef, StructKind, Type}; | 3 | use hir::{Docs, HasAttrs, HasSource, HirDisplay, ModPath, ScopeDef, StructKind, Type}; |
4 | use ra_syntax::ast::NameOwner; | 4 | use ra_syntax::ast::NameOwner; |
5 | use stdx::SepBy; | 5 | use stdx::SepBy; |
6 | use test_utils::tested_by; | 6 | use test_utils::tested_by; |
@@ -246,14 +246,37 @@ impl Completions { | |||
246 | .add_to(self); | 246 | .add_to(self); |
247 | } | 247 | } |
248 | 248 | ||
249 | pub(crate) fn add_qualified_enum_variant( | ||
250 | &mut self, | ||
251 | ctx: &CompletionContext, | ||
252 | variant: hir::EnumVariant, | ||
253 | path: ModPath, | ||
254 | ) { | ||
255 | self.add_enum_variant_impl(ctx, variant, None, Some(path)) | ||
256 | } | ||
257 | |||
249 | pub(crate) fn add_enum_variant( | 258 | pub(crate) fn add_enum_variant( |
250 | &mut self, | 259 | &mut self, |
251 | ctx: &CompletionContext, | 260 | ctx: &CompletionContext, |
252 | variant: hir::EnumVariant, | 261 | variant: hir::EnumVariant, |
253 | local_name: Option<String>, | 262 | local_name: Option<String>, |
254 | ) { | 263 | ) { |
264 | self.add_enum_variant_impl(ctx, variant, local_name, None) | ||
265 | } | ||
266 | |||
267 | fn add_enum_variant_impl( | ||
268 | &mut self, | ||
269 | ctx: &CompletionContext, | ||
270 | variant: hir::EnumVariant, | ||
271 | local_name: Option<String>, | ||
272 | path: Option<ModPath>, | ||
273 | ) { | ||
255 | let is_deprecated = is_deprecated(variant, ctx.db); | 274 | let is_deprecated = is_deprecated(variant, ctx.db); |
256 | let name = local_name.unwrap_or_else(|| variant.name(ctx.db).to_string()); | 275 | let name = local_name.unwrap_or_else(|| variant.name(ctx.db).to_string()); |
276 | let qualified_name = match &path { | ||
277 | Some(it) => it.to_string(), | ||
278 | None => name.to_string(), | ||
279 | }; | ||
257 | let detail_types = variant | 280 | let detail_types = variant |
258 | .fields(ctx.db) | 281 | .fields(ctx.db) |
259 | .into_iter() | 282 | .into_iter() |
@@ -271,16 +294,23 @@ impl Completions { | |||
271 | .surround_with("{ ", " }") | 294 | .surround_with("{ ", " }") |
272 | .to_string(), | 295 | .to_string(), |
273 | }; | 296 | }; |
274 | let mut res = | 297 | let mut res = CompletionItem::new( |
275 | CompletionItem::new(CompletionKind::Reference, ctx.source_range(), name.clone()) | 298 | CompletionKind::Reference, |
276 | .kind(CompletionItemKind::EnumVariant) | 299 | ctx.source_range(), |
277 | .set_documentation(variant.docs(ctx.db)) | 300 | qualified_name.clone(), |
278 | .set_deprecated(is_deprecated) | 301 | ) |
279 | .detail(detail); | 302 | .kind(CompletionItemKind::EnumVariant) |
303 | .set_documentation(variant.docs(ctx.db)) | ||
304 | .set_deprecated(is_deprecated) | ||
305 | .detail(detail); | ||
306 | |||
307 | if path.is_some() { | ||
308 | res = res.lookup_by(name); | ||
309 | } | ||
280 | 310 | ||
281 | if variant_kind == StructKind::Tuple { | 311 | if variant_kind == StructKind::Tuple { |
282 | let params = Params::Anonymous(variant.fields(ctx.db).len()); | 312 | let params = Params::Anonymous(variant.fields(ctx.db).len()); |
283 | res = res.add_call_parens(ctx, name, params) | 313 | res = res.add_call_parens(ctx, qualified_name, params) |
284 | } | 314 | } |
285 | 315 | ||
286 | res.add_to(self); | 316 | res.add_to(self); |
diff --git a/crates/ra_ide/src/display/function_signature.rs b/crates/ra_ide/src/display/function_signature.rs index b5e2785fe..db3907fe6 100644 --- a/crates/ra_ide/src/display/function_signature.rs +++ b/crates/ra_ide/src/display/function_signature.rs | |||
@@ -26,6 +26,8 @@ pub struct FunctionSignature { | |||
26 | pub kind: CallableKind, | 26 | pub kind: CallableKind, |
27 | /// Optional visibility | 27 | /// Optional visibility |
28 | pub visibility: Option<String>, | 28 | pub visibility: Option<String>, |
29 | /// Qualifiers like `async`, `unsafe`, ... | ||
30 | pub qualifier: FunctionQualifier, | ||
29 | /// Name of the function | 31 | /// Name of the function |
30 | pub name: Option<String>, | 32 | pub name: Option<String>, |
31 | /// Documentation for the function | 33 | /// Documentation for the function |
@@ -46,6 +48,16 @@ pub struct FunctionSignature { | |||
46 | pub has_self_param: bool, | 48 | pub has_self_param: bool, |
47 | } | 49 | } |
48 | 50 | ||
51 | #[derive(Debug, Default)] | ||
52 | pub struct FunctionQualifier { | ||
53 | // `async` and `const` are mutually exclusive. Do we need to enforcing it here? | ||
54 | pub is_async: bool, | ||
55 | pub is_const: bool, | ||
56 | pub is_unsafe: bool, | ||
57 | /// The string `extern ".."` | ||
58 | pub extern_abi: Option<String>, | ||
59 | } | ||
60 | |||
49 | impl FunctionSignature { | 61 | impl FunctionSignature { |
50 | pub(crate) fn with_doc_opt(mut self, doc: Option<Documentation>) -> Self { | 62 | pub(crate) fn with_doc_opt(mut self, doc: Option<Documentation>) -> Self { |
51 | self.doc = doc; | 63 | self.doc = doc; |
@@ -83,6 +95,8 @@ impl FunctionSignature { | |||
83 | FunctionSignature { | 95 | FunctionSignature { |
84 | kind: CallableKind::StructConstructor, | 96 | kind: CallableKind::StructConstructor, |
85 | visibility: node.visibility().map(|n| n.syntax().text().to_string()), | 97 | visibility: node.visibility().map(|n| n.syntax().text().to_string()), |
98 | // Do we need `const`? | ||
99 | qualifier: Default::default(), | ||
86 | name: node.name().map(|n| n.text().to_string()), | 100 | name: node.name().map(|n| n.text().to_string()), |
87 | ret_type: node.name().map(|n| n.text().to_string()), | 101 | ret_type: node.name().map(|n| n.text().to_string()), |
88 | parameters: params, | 102 | parameters: params, |
@@ -128,6 +142,8 @@ impl FunctionSignature { | |||
128 | FunctionSignature { | 142 | FunctionSignature { |
129 | kind: CallableKind::VariantConstructor, | 143 | kind: CallableKind::VariantConstructor, |
130 | visibility: None, | 144 | visibility: None, |
145 | // Do we need `const`? | ||
146 | qualifier: Default::default(), | ||
131 | name: Some(name), | 147 | name: Some(name), |
132 | ret_type: None, | 148 | ret_type: None, |
133 | parameters: params, | 149 | parameters: params, |
@@ -151,6 +167,7 @@ impl FunctionSignature { | |||
151 | FunctionSignature { | 167 | FunctionSignature { |
152 | kind: CallableKind::Macro, | 168 | kind: CallableKind::Macro, |
153 | visibility: None, | 169 | visibility: None, |
170 | qualifier: Default::default(), | ||
154 | name: node.name().map(|n| n.text().to_string()), | 171 | name: node.name().map(|n| n.text().to_string()), |
155 | ret_type: None, | 172 | ret_type: None, |
156 | parameters: params, | 173 | parameters: params, |
@@ -223,6 +240,12 @@ impl From<&'_ ast::FnDef> for FunctionSignature { | |||
223 | FunctionSignature { | 240 | FunctionSignature { |
224 | kind: CallableKind::Function, | 241 | kind: CallableKind::Function, |
225 | visibility: node.visibility().map(|n| n.syntax().text().to_string()), | 242 | visibility: node.visibility().map(|n| n.syntax().text().to_string()), |
243 | qualifier: FunctionQualifier { | ||
244 | is_async: node.async_token().is_some(), | ||
245 | is_const: node.const_token().is_some(), | ||
246 | is_unsafe: node.unsafe_token().is_some(), | ||
247 | extern_abi: node.abi().map(|n| n.to_string()), | ||
248 | }, | ||
226 | name: node.name().map(|n| n.text().to_string()), | 249 | name: node.name().map(|n| n.text().to_string()), |
227 | ret_type: node | 250 | ret_type: node |
228 | .ret_type() | 251 | .ret_type() |
@@ -246,6 +269,23 @@ impl Display for FunctionSignature { | |||
246 | write!(f, "{} ", t)?; | 269 | write!(f, "{} ", t)?; |
247 | } | 270 | } |
248 | 271 | ||
272 | if self.qualifier.is_async { | ||
273 | write!(f, "async ")?; | ||
274 | } | ||
275 | |||
276 | if self.qualifier.is_const { | ||
277 | write!(f, "const ")?; | ||
278 | } | ||
279 | |||
280 | if self.qualifier.is_unsafe { | ||
281 | write!(f, "unsafe ")?; | ||
282 | } | ||
283 | |||
284 | if let Some(extern_abi) = &self.qualifier.extern_abi { | ||
285 | // Keyword `extern` is included in the string. | ||
286 | write!(f, "{} ", extern_abi)?; | ||
287 | } | ||
288 | |||
249 | if let Some(name) = &self.name { | 289 | if let Some(name) = &self.name { |
250 | match self.kind { | 290 | match self.kind { |
251 | CallableKind::Function => write!(f, "fn {}", name)?, | 291 | CallableKind::Function => write!(f, "fn {}", name)?, |
diff --git a/crates/ra_ide/src/hover.rs b/crates/ra_ide/src/hover.rs index 58c799eca..a62f598f0 100644 --- a/crates/ra_ide/src/hover.rs +++ b/crates/ra_ide/src/hover.rs | |||
@@ -844,4 +844,29 @@ fn func(foo: i32) { if true { <|>foo; }; } | |||
844 | &["fn foo()\n```\n\n<- `\u{3000}` here"], | 844 | &["fn foo()\n```\n\n<- `\u{3000}` here"], |
845 | ); | 845 | ); |
846 | } | 846 | } |
847 | |||
848 | #[test] | ||
849 | fn test_hover_function_show_qualifiers() { | ||
850 | check_hover_result( | ||
851 | " | ||
852 | //- /lib.rs | ||
853 | async fn foo<|>() {} | ||
854 | ", | ||
855 | &["async fn foo()"], | ||
856 | ); | ||
857 | check_hover_result( | ||
858 | " | ||
859 | //- /lib.rs | ||
860 | pub const unsafe fn foo<|>() {} | ||
861 | ", | ||
862 | &["pub const unsafe fn foo()"], | ||
863 | ); | ||
864 | check_hover_result( | ||
865 | r#" | ||
866 | //- /lib.rs | ||
867 | pub(crate) async unsafe extern "C" fn foo<|>() {} | ||
868 | "#, | ||
869 | &[r#"pub(crate) async unsafe extern "C" fn foo()"#], | ||
870 | ); | ||
871 | } | ||
847 | } | 872 | } |
diff --git a/crates/ra_ide/src/join_lines.rs b/crates/ra_ide/src/join_lines.rs index fde0bfa98..d0def7eaa 100644 --- a/crates/ra_ide/src/join_lines.rs +++ b/crates/ra_ide/src/join_lines.rs | |||
@@ -131,6 +131,9 @@ fn has_comma_after(node: &SyntaxNode) -> bool { | |||
131 | fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { | 131 | fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> { |
132 | let block = ast::Block::cast(token.parent())?; | 132 | let block = ast::Block::cast(token.parent())?; |
133 | let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; | 133 | let block_expr = ast::BlockExpr::cast(block.syntax().parent()?)?; |
134 | if !block_expr.is_standalone() { | ||
135 | return None; | ||
136 | } | ||
134 | let expr = extract_trivial_expression(&block_expr)?; | 137 | let expr = extract_trivial_expression(&block_expr)?; |
135 | 138 | ||
136 | let block_range = block_expr.syntax().text_range(); | 139 | let block_range = block_expr.syntax().text_range(); |
@@ -662,4 +665,67 @@ fn main() { | |||
662 | ", | 665 | ", |
663 | ) | 666 | ) |
664 | } | 667 | } |
668 | |||
669 | #[test] | ||
670 | fn join_lines_mandatory_blocks_block() { | ||
671 | check_join_lines( | ||
672 | r" | ||
673 | <|>fn foo() { | ||
674 | 92 | ||
675 | } | ||
676 | ", | ||
677 | r" | ||
678 | <|>fn foo() { 92 | ||
679 | } | ||
680 | ", | ||
681 | ); | ||
682 | |||
683 | check_join_lines( | ||
684 | r" | ||
685 | fn foo() { | ||
686 | <|>if true { | ||
687 | 92 | ||
688 | } | ||
689 | } | ||
690 | ", | ||
691 | r" | ||
692 | fn foo() { | ||
693 | <|>if true { 92 | ||
694 | } | ||
695 | } | ||
696 | ", | ||
697 | ); | ||
698 | |||
699 | check_join_lines( | ||
700 | r" | ||
701 | fn foo() { | ||
702 | <|>loop { | ||
703 | 92 | ||
704 | } | ||
705 | } | ||
706 | ", | ||
707 | r" | ||
708 | fn foo() { | ||
709 | <|>loop { 92 | ||
710 | } | ||
711 | } | ||
712 | ", | ||
713 | ); | ||
714 | |||
715 | check_join_lines( | ||
716 | r" | ||
717 | fn foo() { | ||
718 | <|>unsafe { | ||
719 | 92 | ||
720 | } | ||
721 | } | ||
722 | ", | ||
723 | r" | ||
724 | fn foo() { | ||
725 | <|>unsafe { 92 | ||
726 | } | ||
727 | } | ||
728 | ", | ||
729 | ); | ||
730 | } | ||
665 | } | 731 | } |
diff --git a/crates/ra_parser/src/grammar/expressions/atom.rs b/crates/ra_parser/src/grammar/expressions/atom.rs index 0d277a586..76aa601cb 100644 --- a/crates/ra_parser/src/grammar/expressions/atom.rs +++ b/crates/ra_parser/src/grammar/expressions/atom.rs | |||
@@ -84,7 +84,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar | |||
84 | T![box] => box_expr(p, None), | 84 | T![box] => box_expr(p, None), |
85 | T![for] => for_expr(p, None), | 85 | T![for] => for_expr(p, None), |
86 | T![while] => while_expr(p, None), | 86 | T![while] => while_expr(p, None), |
87 | T![try] => try_block_expr(p, None), | 87 | T![try] => try_expr(p, None), |
88 | LIFETIME if la == T![:] => { | 88 | LIFETIME if la == T![:] => { |
89 | let m = p.start(); | 89 | let m = p.start(); |
90 | label(p); | 90 | label(p); |
@@ -134,7 +134,7 @@ pub(super) fn atom_expr(p: &mut Parser, r: Restrictions) -> Option<(CompletedMar | |||
134 | } | 134 | } |
135 | }; | 135 | }; |
136 | let blocklike = match done.kind() { | 136 | let blocklike = match done.kind() { |
137 | IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR | TRY_BLOCK_EXPR => { | 137 | IF_EXPR | WHILE_EXPR | FOR_EXPR | LOOP_EXPR | MATCH_EXPR | BLOCK_EXPR | TRY_EXPR => { |
138 | BlockLike::Block | 138 | BlockLike::Block |
139 | } | 139 | } |
140 | _ => BlockLike::NotBlock, | 140 | _ => BlockLike::NotBlock, |
@@ -532,9 +532,25 @@ fn break_expr(p: &mut Parser, r: Restrictions) -> CompletedMarker { | |||
532 | // fn foo() { | 532 | // fn foo() { |
533 | // let _ = try {}; | 533 | // let _ = try {}; |
534 | // } | 534 | // } |
535 | fn try_block_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { | 535 | fn try_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker { |
536 | assert!(p.at(T![try])); | 536 | assert!(p.at(T![try])); |
537 | let m = m.unwrap_or_else(|| p.start()); | 537 | let m = m.unwrap_or_else(|| p.start()); |
538 | // Special-case `try!` as macro. | ||
539 | // This is a hack until we do proper edition support | ||
540 | if p.nth_at(1, T![!]) { | ||
541 | // test try_macro_fallback | ||
542 | // fn foo() { try!(Ok(())); } | ||
543 | let path = p.start(); | ||
544 | let path_segment = p.start(); | ||
545 | let name_ref = p.start(); | ||
546 | p.bump_remap(IDENT); | ||
547 | name_ref.complete(p, NAME_REF); | ||
548 | path_segment.complete(p, PATH_SEGMENT); | ||
549 | path.complete(p, PATH); | ||
550 | let _block_like = items::macro_call_after_excl(p); | ||
551 | return m.complete(p, MACRO_CALL); | ||
552 | } | ||
553 | |||
538 | p.bump(T![try]); | 554 | p.bump(T![try]); |
539 | block(p); | 555 | block(p); |
540 | m.complete(p, TRY_EXPR) | 556 | m.complete(p, TRY_EXPR) |
diff --git a/crates/ra_parser/src/grammar/items.rs b/crates/ra_parser/src/grammar/items.rs index 433ed6812..1503a8730 100644 --- a/crates/ra_parser/src/grammar/items.rs +++ b/crates/ra_parser/src/grammar/items.rs | |||
@@ -415,6 +415,17 @@ pub(super) fn macro_call_after_excl(p: &mut Parser) -> BlockLike { | |||
415 | if p.at(IDENT) { | 415 | if p.at(IDENT) { |
416 | name(p); | 416 | name(p); |
417 | } | 417 | } |
418 | // Special-case `macro_rules! try`. | ||
419 | // This is a hack until we do proper edition support | ||
420 | |||
421 | // test try_macro_rules | ||
422 | // macro_rules! try { () => {} } | ||
423 | if p.at(T![try]) { | ||
424 | let m = p.start(); | ||
425 | p.bump_remap(IDENT); | ||
426 | m.complete(p, NAME); | ||
427 | } | ||
428 | |||
418 | match p.current() { | 429 | match p.current() { |
419 | T!['{'] => { | 430 | T!['{'] => { |
420 | token_tree(p); | 431 | token_tree(p); |
diff --git a/crates/ra_parser/src/grammar/items/use_item.rs b/crates/ra_parser/src/grammar/items/use_item.rs index e3b991c8c..3a0c7a31a 100644 --- a/crates/ra_parser/src/grammar/items/use_item.rs +++ b/crates/ra_parser/src/grammar/items/use_item.rs | |||
@@ -47,7 +47,7 @@ fn use_tree(p: &mut Parser, top_level: bool) { | |||
47 | // use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`) | 47 | // use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`) |
48 | // use {path::from::root}; // Rust 2015 | 48 | // use {path::from::root}; // Rust 2015 |
49 | // use ::{some::arbritrary::path}; // Rust 2015 | 49 | // use ::{some::arbritrary::path}; // Rust 2015 |
50 | // use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig | 50 | // use ::{{{root::export}}}; // Nonsensical but perfectly legal nesting |
51 | T!['{'] => { | 51 | T!['{'] => { |
52 | use_tree_list(p); | 52 | use_tree_list(p); |
53 | } | 53 | } |
diff --git a/crates/ra_parser/src/syntax_kind/generated.rs b/crates/ra_parser/src/syntax_kind/generated.rs index 524e7d784..ab727ed7e 100644 --- a/crates/ra_parser/src/syntax_kind/generated.rs +++ b/crates/ra_parser/src/syntax_kind/generated.rs | |||
@@ -191,7 +191,6 @@ pub enum SyntaxKind { | |||
191 | RECORD_LIT, | 191 | RECORD_LIT, |
192 | RECORD_FIELD_LIST, | 192 | RECORD_FIELD_LIST, |
193 | RECORD_FIELD, | 193 | RECORD_FIELD, |
194 | TRY_BLOCK_EXPR, | ||
195 | BOX_EXPR, | 194 | BOX_EXPR, |
196 | CALL_EXPR, | 195 | CALL_EXPR, |
197 | INDEX_EXPR, | 196 | INDEX_EXPR, |
diff --git a/crates/ra_proc_macro_srv/src/cli.rs b/crates/ra_proc_macro_srv/src/cli.rs index 7282e5b9b..1437794c9 100644 --- a/crates/ra_proc_macro_srv/src/cli.rs +++ b/crates/ra_proc_macro_srv/src/cli.rs | |||
@@ -1,15 +1,17 @@ | |||
1 | //! Driver for proc macro server | 1 | //! Driver for proc macro server |
2 | 2 | ||
3 | use crate::{expand_task, list_macros}; | 3 | use crate::ProcMacroSrv; |
4 | use ra_proc_macro::msg::{self, Message}; | 4 | use ra_proc_macro::msg::{self, Message}; |
5 | use std::io; | 5 | use std::io; |
6 | 6 | ||
7 | pub fn run() -> io::Result<()> { | 7 | pub fn run() -> io::Result<()> { |
8 | let mut srv = ProcMacroSrv::default(); | ||
9 | |||
8 | while let Some(req) = read_request()? { | 10 | while let Some(req) = read_request()? { |
9 | let res = match req { | 11 | let res = match req { |
10 | msg::Request::ListMacro(task) => Ok(msg::Response::ListMacro(list_macros(&task))), | 12 | msg::Request::ListMacro(task) => srv.list_macros(&task).map(msg::Response::ListMacro), |
11 | msg::Request::ExpansionMacro(task) => { | 13 | msg::Request::ExpansionMacro(task) => { |
12 | expand_task(&task).map(msg::Response::ExpansionMacro) | 14 | srv.expand(&task).map(msg::Response::ExpansionMacro) |
13 | } | 15 | } |
14 | }; | 16 | }; |
15 | 17 | ||
diff --git a/crates/ra_proc_macro_srv/src/dylib.rs b/crates/ra_proc_macro_srv/src/dylib.rs index d202eb0fd..aa84e951c 100644 --- a/crates/ra_proc_macro_srv/src/dylib.rs +++ b/crates/ra_proc_macro_srv/src/dylib.rs | |||
@@ -2,13 +2,12 @@ | |||
2 | 2 | ||
3 | use crate::{proc_macro::bridge, rustc_server::TokenStream}; | 3 | use crate::{proc_macro::bridge, rustc_server::TokenStream}; |
4 | use std::fs::File; | 4 | use std::fs::File; |
5 | use std::path::Path; | 5 | use std::path::{Path, PathBuf}; |
6 | 6 | ||
7 | use goblin::{mach::Mach, Object}; | 7 | use goblin::{mach::Mach, Object}; |
8 | use libloading::Library; | 8 | use libloading::Library; |
9 | use memmap::Mmap; | 9 | use memmap::Mmap; |
10 | use ra_proc_macro::ProcMacroKind; | 10 | use ra_proc_macro::ProcMacroKind; |
11 | |||
12 | use std::io; | 11 | use std::io; |
13 | 12 | ||
14 | const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; | 13 | const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; |
@@ -109,23 +108,21 @@ impl ProcMacroLibraryLibloading { | |||
109 | } | 108 | } |
110 | } | 109 | } |
111 | 110 | ||
112 | type ProcMacroLibraryImpl = ProcMacroLibraryLibloading; | ||
113 | |||
114 | pub struct Expander { | 111 | pub struct Expander { |
115 | libs: Vec<ProcMacroLibraryImpl>, | 112 | inner: ProcMacroLibraryLibloading, |
116 | } | 113 | } |
117 | 114 | ||
118 | impl Expander { | 115 | impl Expander { |
119 | pub fn new(lib: &Path) -> Result<Expander, String> { | 116 | pub fn new(lib: &Path) -> io::Result<Expander> { |
120 | // Some libraries for dynamic loading require canonicalized path even when it is | 117 | // Some libraries for dynamic loading require canonicalized path even when it is |
121 | // already absolute | 118 | // already absolute |
122 | let lib = lib | 119 | let lib = lib.canonicalize()?; |
123 | .canonicalize() | 120 | |
124 | .unwrap_or_else(|err| panic!("Cannot canonicalize {}: {:?}", lib.display(), err)); | 121 | let lib = ensure_file_with_lock_free_access(&lib)?; |
125 | 122 | ||
126 | let library = ProcMacroLibraryImpl::open(&lib).map_err(|e| e.to_string())?; | 123 | let library = ProcMacroLibraryLibloading::open(&lib)?; |
127 | 124 | ||
128 | Ok(Expander { libs: vec![library] }) | 125 | Ok(Expander { inner: library }) |
129 | } | 126 | } |
130 | 127 | ||
131 | pub fn expand( | 128 | pub fn expand( |
@@ -141,38 +138,36 @@ impl Expander { | |||
141 | TokenStream::with_subtree(attr.clone()) | 138 | TokenStream::with_subtree(attr.clone()) |
142 | }); | 139 | }); |
143 | 140 | ||
144 | for lib in &self.libs { | 141 | for proc_macro in &self.inner.exported_macros { |
145 | for proc_macro in &lib.exported_macros { | 142 | match proc_macro { |
146 | match proc_macro { | 143 | bridge::client::ProcMacro::CustomDerive { trait_name, client, .. } |
147 | bridge::client::ProcMacro::CustomDerive { trait_name, client, .. } | 144 | if *trait_name == macro_name => |
148 | if *trait_name == macro_name => | 145 | { |
149 | { | 146 | let res = client.run( |
150 | let res = client.run( | 147 | &crate::proc_macro::bridge::server::SameThread, |
151 | &crate::proc_macro::bridge::server::SameThread, | 148 | crate::rustc_server::Rustc::default(), |
152 | crate::rustc_server::Rustc::default(), | 149 | parsed_body, |
153 | parsed_body, | 150 | ); |
154 | ); | 151 | return res.map(|it| it.subtree); |
155 | return res.map(|it| it.subtree); | 152 | } |
156 | } | 153 | bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => { |
157 | bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => { | 154 | let res = client.run( |
158 | let res = client.run( | 155 | &crate::proc_macro::bridge::server::SameThread, |
159 | &crate::proc_macro::bridge::server::SameThread, | 156 | crate::rustc_server::Rustc::default(), |
160 | crate::rustc_server::Rustc::default(), | 157 | parsed_body, |
161 | parsed_body, | 158 | ); |
162 | ); | 159 | return res.map(|it| it.subtree); |
163 | return res.map(|it| it.subtree); | 160 | } |
164 | } | 161 | bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => { |
165 | bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => { | 162 | let res = client.run( |
166 | let res = client.run( | 163 | &crate::proc_macro::bridge::server::SameThread, |
167 | &crate::proc_macro::bridge::server::SameThread, | 164 | crate::rustc_server::Rustc::default(), |
168 | crate::rustc_server::Rustc::default(), | 165 | parsed_attributes, |
169 | parsed_attributes, | 166 | parsed_body, |
170 | parsed_body, | 167 | ); |
171 | ); | 168 | return res.map(|it| it.subtree); |
172 | return res.map(|it| it.subtree); | ||
173 | } | ||
174 | _ => continue, | ||
175 | } | 169 | } |
170 | _ => continue, | ||
176 | } | 171 | } |
177 | } | 172 | } |
178 | 173 | ||
@@ -180,9 +175,9 @@ impl Expander { | |||
180 | } | 175 | } |
181 | 176 | ||
182 | pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { | 177 | pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> { |
183 | self.libs | 178 | self.inner |
179 | .exported_macros | ||
184 | .iter() | 180 | .iter() |
185 | .flat_map(|it| &it.exported_macros) | ||
186 | .map(|proc_macro| match proc_macro { | 181 | .map(|proc_macro| match proc_macro { |
187 | bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { | 182 | bridge::client::ProcMacro::CustomDerive { trait_name, .. } => { |
188 | (trait_name.to_string(), ProcMacroKind::CustomDerive) | 183 | (trait_name.to_string(), ProcMacroKind::CustomDerive) |
@@ -197,3 +192,33 @@ impl Expander { | |||
197 | .collect() | 192 | .collect() |
198 | } | 193 | } |
199 | } | 194 | } |
195 | |||
196 | /// Copy the dylib to temp directory to prevent locking in Windows | ||
197 | #[cfg(windows)] | ||
198 | fn ensure_file_with_lock_free_access(path: &Path) -> io::Result<PathBuf> { | ||
199 | use std::{ffi::OsString, time::SystemTime}; | ||
200 | |||
201 | let mut to = std::env::temp_dir(); | ||
202 | |||
203 | let file_name = path.file_name().ok_or_else(|| { | ||
204 | io::Error::new( | ||
205 | io::ErrorKind::InvalidInput, | ||
206 | format!("File path is invalid: {}", path.display()), | ||
207 | ) | ||
208 | })?; | ||
209 | |||
210 | // generate a time deps unique number | ||
211 | let t = SystemTime::now().duration_since(std::time::UNIX_EPOCH).expect("Time went backwards"); | ||
212 | |||
213 | let mut unique_name = OsString::from(t.as_millis().to_string()); | ||
214 | unique_name.push(file_name); | ||
215 | |||
216 | to.push(unique_name); | ||
217 | std::fs::copy(path, &to).unwrap(); | ||
218 | Ok(to) | ||
219 | } | ||
220 | |||
221 | #[cfg(unix)] | ||
222 | fn ensure_file_with_lock_free_access(path: &Path) -> io::Result<PathBuf> { | ||
223 | Ok(path.to_path_buf()) | ||
224 | } | ||
diff --git a/crates/ra_proc_macro_srv/src/lib.rs b/crates/ra_proc_macro_srv/src/lib.rs index 3aca859db..922bb84bb 100644 --- a/crates/ra_proc_macro_srv/src/lib.rs +++ b/crates/ra_proc_macro_srv/src/lib.rs | |||
@@ -21,28 +21,46 @@ mod dylib; | |||
21 | 21 | ||
22 | use proc_macro::bridge::client::TokenStream; | 22 | use proc_macro::bridge::client::TokenStream; |
23 | use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; | 23 | use ra_proc_macro::{ExpansionResult, ExpansionTask, ListMacrosResult, ListMacrosTask}; |
24 | use std::path::Path; | 24 | use std::{ |
25 | collections::{hash_map::Entry, HashMap}, | ||
26 | fs, | ||
27 | path::{Path, PathBuf}, | ||
28 | time::SystemTime, | ||
29 | }; | ||
25 | 30 | ||
26 | pub(crate) fn expand_task(task: &ExpansionTask) -> Result<ExpansionResult, String> { | 31 | #[derive(Default)] |
27 | let expander = create_expander(&task.lib); | 32 | pub(crate) struct ProcMacroSrv { |
33 | expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>, | ||
34 | } | ||
28 | 35 | ||
29 | match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) { | 36 | impl ProcMacroSrv { |
30 | Ok(expansion) => Ok(ExpansionResult { expansion }), | 37 | pub fn expand(&mut self, task: &ExpansionTask) -> Result<ExpansionResult, String> { |
31 | Err(msg) => { | 38 | let expander = self.expander(&task.lib)?; |
32 | Err(format!("Cannot perform expansion for {}: error {:?}", &task.macro_name, msg)) | 39 | match expander.expand(&task.macro_name, &task.macro_body, task.attributes.as_ref()) { |
40 | Ok(expansion) => Ok(ExpansionResult { expansion }), | ||
41 | Err(msg) => { | ||
42 | Err(format!("Cannot perform expansion for {}: error {:?}", &task.macro_name, msg)) | ||
43 | } | ||
33 | } | 44 | } |
34 | } | 45 | } |
35 | } | ||
36 | 46 | ||
37 | pub(crate) fn list_macros(task: &ListMacrosTask) -> ListMacrosResult { | 47 | pub fn list_macros(&mut self, task: &ListMacrosTask) -> Result<ListMacrosResult, String> { |
38 | let expander = create_expander(&task.lib); | 48 | let expander = self.expander(&task.lib)?; |
49 | Ok(ListMacrosResult { macros: expander.list_macros() }) | ||
50 | } | ||
39 | 51 | ||
40 | ListMacrosResult { macros: expander.list_macros() } | 52 | fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> { |
41 | } | 53 | let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| { |
54 | format!("Failed to get file metadata for {}: {:?}", path.display(), err) | ||
55 | })?; | ||
42 | 56 | ||
43 | fn create_expander(lib: &Path) -> dylib::Expander { | 57 | Ok(match self.expanders.entry((path.to_path_buf(), time)) { |
44 | dylib::Expander::new(lib) | 58 | Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| { |
45 | .unwrap_or_else(|err| panic!("Cannot create expander for {}: {:?}", lib.display(), err)) | 59 | format!("Cannot create expander for {}: {:?}", path.display(), err) |
60 | })?), | ||
61 | Entry::Occupied(e) => e.into_mut(), | ||
62 | }) | ||
63 | } | ||
46 | } | 64 | } |
47 | 65 | ||
48 | pub mod cli; | 66 | pub mod cli; |
diff --git a/crates/ra_proc_macro_srv/src/tests/utils.rs b/crates/ra_proc_macro_srv/src/tests/utils.rs index 2139ec7a4..646a427c5 100644 --- a/crates/ra_proc_macro_srv/src/tests/utils.rs +++ b/crates/ra_proc_macro_srv/src/tests/utils.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! utils used in proc-macro tests | 1 | //! utils used in proc-macro tests |
2 | 2 | ||
3 | use crate::dylib; | 3 | use crate::dylib; |
4 | use crate::list_macros; | 4 | use crate::ProcMacroSrv; |
5 | pub use difference::Changeset as __Changeset; | 5 | pub use difference::Changeset as __Changeset; |
6 | use ra_proc_macro::ListMacrosTask; | 6 | use ra_proc_macro::ListMacrosTask; |
7 | use std::str::FromStr; | 7 | use std::str::FromStr; |
@@ -59,7 +59,7 @@ pub fn assert_expand( | |||
59 | pub fn list(crate_name: &str, version: &str) -> Vec<String> { | 59 | pub fn list(crate_name: &str, version: &str) -> Vec<String> { |
60 | let path = fixtures::dylib_path(crate_name, version); | 60 | let path = fixtures::dylib_path(crate_name, version); |
61 | let task = ListMacrosTask { lib: path }; | 61 | let task = ListMacrosTask { lib: path }; |
62 | 62 | let mut srv = ProcMacroSrv::default(); | |
63 | let res = list_macros(&task); | 63 | let res = srv.list_macros(&task).unwrap(); |
64 | res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect() | 64 | res.macros.into_iter().map(|(name, kind)| format!("{} [{:?}]", name, kind)).collect() |
65 | } | 65 | } |
diff --git a/crates/ra_prof/src/hprof.rs b/crates/ra_prof/src/hprof.rs index 2b8a90363..a3f5321fb 100644 --- a/crates/ra_prof/src/hprof.rs +++ b/crates/ra_prof/src/hprof.rs | |||
@@ -30,8 +30,9 @@ pub fn init_from(spec: &str) { | |||
30 | pub type Label = &'static str; | 30 | pub type Label = &'static str; |
31 | 31 | ||
32 | /// This function starts a profiling scope in the current execution stack with a given description. | 32 | /// This function starts a profiling scope in the current execution stack with a given description. |
33 | /// It returns a Profile structure and measure elapsed time between this method invocation and Profile structure drop. | 33 | /// It returns a `Profile` struct that measures elapsed time between this method invocation and `Profile` struct drop. |
34 | /// It supports nested profiling scopes in case when this function invoked multiple times at the execution stack. In this case the profiling information will be nested at the output. | 34 | /// It supports nested profiling scopes in case when this function is invoked multiple times at the execution stack. |
35 | /// In this case the profiling information will be nested at the output. | ||
35 | /// Profiling information is being printed in the stderr. | 36 | /// Profiling information is being printed in the stderr. |
36 | /// | 37 | /// |
37 | /// # Example | 38 | /// # Example |
@@ -58,36 +59,35 @@ pub type Label = &'static str; | |||
58 | /// ``` | 59 | /// ``` |
59 | pub fn profile(label: Label) -> Profiler { | 60 | pub fn profile(label: Label) -> Profiler { |
60 | assert!(!label.is_empty()); | 61 | assert!(!label.is_empty()); |
61 | let enabled = PROFILING_ENABLED.load(Ordering::Relaxed) | 62 | |
62 | && PROFILE_STACK.with(|stack| stack.borrow_mut().push(label)); | 63 | if PROFILING_ENABLED.load(Ordering::Relaxed) |
63 | let label = if enabled { Some(label) } else { None }; | 64 | && PROFILE_STACK.with(|stack| stack.borrow_mut().push(label)) |
64 | Profiler { label, detail: None } | 65 | { |
66 | Profiler(Some(ProfilerImpl { label, detail: None })) | ||
67 | } else { | ||
68 | Profiler(None) | ||
69 | } | ||
65 | } | 70 | } |
66 | 71 | ||
67 | pub struct Profiler { | 72 | pub struct Profiler(Option<ProfilerImpl>); |
68 | label: Option<Label>, | 73 | |
74 | struct ProfilerImpl { | ||
75 | label: Label, | ||
69 | detail: Option<String>, | 76 | detail: Option<String>, |
70 | } | 77 | } |
71 | 78 | ||
72 | impl Profiler { | 79 | impl Profiler { |
73 | pub fn detail(mut self, detail: impl FnOnce() -> String) -> Profiler { | 80 | pub fn detail(mut self, detail: impl FnOnce() -> String) -> Profiler { |
74 | if self.label.is_some() { | 81 | if let Some(profiler) = &mut self.0 { |
75 | self.detail = Some(detail()) | 82 | profiler.detail = Some(detail()) |
76 | } | 83 | } |
77 | self | 84 | self |
78 | } | 85 | } |
79 | } | 86 | } |
80 | 87 | ||
81 | impl Drop for Profiler { | 88 | impl Drop for ProfilerImpl { |
82 | fn drop(&mut self) { | 89 | fn drop(&mut self) { |
83 | match self { | 90 | PROFILE_STACK.with(|it| it.borrow_mut().pop(self.label, self.detail.take())); |
84 | Profiler { label: Some(label), detail } => { | ||
85 | PROFILE_STACK.with(|stack| { | ||
86 | stack.borrow_mut().pop(label, detail.take()); | ||
87 | }); | ||
88 | } | ||
89 | Profiler { label: None, .. } => (), | ||
90 | } | ||
91 | } | 91 | } |
92 | } | 92 | } |
93 | 93 | ||
@@ -179,21 +179,18 @@ impl ProfileStack { | |||
179 | pub fn pop(&mut self, label: Label, detail: Option<String>) { | 179 | pub fn pop(&mut self, label: Label, detail: Option<String>) { |
180 | let start = self.starts.pop().unwrap(); | 180 | let start = self.starts.pop().unwrap(); |
181 | let duration = start.elapsed(); | 181 | let duration = start.elapsed(); |
182 | let level = self.starts.len(); | ||
183 | self.messages.finish(Message { duration, label, detail }); | 182 | self.messages.finish(Message { duration, label, detail }); |
184 | if level == 0 { | 183 | if self.starts.is_empty() { |
185 | let longer_than = self.filter.longer_than; | 184 | let longer_than = self.filter.longer_than; |
186 | // Convert to millis for comparison to avoid problems with rounding | 185 | // Convert to millis for comparison to avoid problems with rounding |
187 | // (otherwise we could print `0ms` despite user's `>0` filter when | 186 | // (otherwise we could print `0ms` despite user's `>0` filter when |
188 | // `duration` is just a few nanos). | 187 | // `duration` is just a few nanos). |
189 | if duration.as_millis() > longer_than.as_millis() { | 188 | if duration.as_millis() > longer_than.as_millis() { |
190 | let stderr = stderr(); | ||
191 | if let Some(root) = self.messages.root() { | 189 | if let Some(root) = self.messages.root() { |
192 | print(&self.messages, root, 0, longer_than, &mut stderr.lock()); | 190 | print(&self.messages, root, 0, longer_than, &mut stderr().lock()); |
193 | } | 191 | } |
194 | } | 192 | } |
195 | self.messages.clear(); | 193 | self.messages.clear(); |
196 | assert!(self.starts.is_empty()) | ||
197 | } | 194 | } |
198 | } | 195 | } |
199 | } | 196 | } |
diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs index 7fca5661e..521ca8ab8 100644 --- a/crates/ra_syntax/src/ast.rs +++ b/crates/ra_syntax/src/ast.rs | |||
@@ -16,7 +16,9 @@ use crate::{ | |||
16 | }; | 16 | }; |
17 | 17 | ||
18 | pub use self::{ | 18 | pub use self::{ |
19 | expr_extensions::{ArrayExprKind, BinOp, ElseBranch, LiteralKind, PrefixOp, RangeOp}, | 19 | expr_extensions::{ |
20 | ArrayExprKind, BinOp, BlockModifier, ElseBranch, LiteralKind, PrefixOp, RangeOp, | ||
21 | }, | ||
20 | extensions::{ | 22 | extensions::{ |
21 | AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents, | 23 | AttrKind, FieldKind, NameOrNameRef, PathSegmentKind, SelfParamKind, SlicePatComponents, |
22 | StructKind, TypeBoundKind, VisibilityKind, | 24 | StructKind, TypeBoundKind, VisibilityKind, |
@@ -243,6 +245,21 @@ fn test_comments_preserve_trailing_whitespace() { | |||
243 | } | 245 | } |
244 | 246 | ||
245 | #[test] | 247 | #[test] |
248 | fn test_four_slash_line_comment() { | ||
249 | let file = SourceFile::parse( | ||
250 | r#" | ||
251 | //// too many slashes to be a doc comment | ||
252 | /// doc comment | ||
253 | mod foo {} | ||
254 | "#, | ||
255 | ) | ||
256 | .ok() | ||
257 | .unwrap(); | ||
258 | let module = file.syntax().descendants().find_map(Module::cast).unwrap(); | ||
259 | assert_eq!("doc comment", module.doc_comment_text().unwrap()); | ||
260 | } | ||
261 | |||
262 | #[test] | ||
246 | fn test_where_predicates() { | 263 | fn test_where_predicates() { |
247 | fn assert_bound(text: &str, bound: Option<TypeBound>) { | 264 | fn assert_bound(text: &str, bound: Option<TypeBound>) { |
248 | assert_eq!(text, bound.unwrap().syntax().text().to_string()); | 265 | assert_eq!(text, bound.unwrap().syntax().text().to_string()); |
diff --git a/crates/ra_syntax/src/ast/expr_extensions.rs b/crates/ra_syntax/src/ast/expr_extensions.rs index 1c1134bc5..329c80749 100644 --- a/crates/ra_syntax/src/ast/expr_extensions.rs +++ b/crates/ra_syntax/src/ast/expr_extensions.rs | |||
@@ -16,7 +16,7 @@ impl ast::Expr { | |||
16 | | ast::Expr::WhileExpr(_) | 16 | | ast::Expr::WhileExpr(_) |
17 | | ast::Expr::BlockExpr(_) | 17 | | ast::Expr::BlockExpr(_) |
18 | | ast::Expr::MatchExpr(_) | 18 | | ast::Expr::MatchExpr(_) |
19 | | ast::Expr::TryBlockExpr(_) => true, | 19 | | ast::Expr::TryExpr(_) => true, |
20 | _ => false, | 20 | _ => false, |
21 | } | 21 | } |
22 | } | 22 | } |
@@ -359,7 +359,22 @@ impl ast::Literal { | |||
359 | } | 359 | } |
360 | } | 360 | } |
361 | 361 | ||
362 | pub enum BlockModifier { | ||
363 | Async(SyntaxToken), | ||
364 | Unsafe(SyntaxToken), | ||
365 | } | ||
366 | |||
362 | impl ast::BlockExpr { | 367 | impl ast::BlockExpr { |
368 | pub fn modifier(&self) -> Option<BlockModifier> { | ||
369 | if let Some(token) = self.async_token() { | ||
370 | return Some(BlockModifier::Async(token)); | ||
371 | } | ||
372 | if let Some(token) = self.unsafe_token() { | ||
373 | return Some(BlockModifier::Unsafe(token)); | ||
374 | } | ||
375 | None | ||
376 | } | ||
377 | |||
363 | /// false if the block is an intrinsic part of the syntax and can't be | 378 | /// false if the block is an intrinsic part of the syntax and can't be |
364 | /// replaced with arbitrary expression. | 379 | /// replaced with arbitrary expression. |
365 | /// | 380 | /// |
@@ -368,12 +383,15 @@ impl ast::BlockExpr { | |||
368 | /// const FOO: () = { stand_alone }; | 383 | /// const FOO: () = { stand_alone }; |
369 | /// ``` | 384 | /// ``` |
370 | pub fn is_standalone(&self) -> bool { | 385 | pub fn is_standalone(&self) -> bool { |
371 | let kind = match self.syntax().parent() { | 386 | if self.modifier().is_some() { |
387 | return false; | ||
388 | } | ||
389 | let parent = match self.syntax().parent() { | ||
390 | Some(it) => it, | ||
372 | None => return true, | 391 | None => return true, |
373 | Some(it) => it.kind(), | ||
374 | }; | 392 | }; |
375 | match kind { | 393 | match parent.kind() { |
376 | FN_DEF | MATCH_ARM | IF_EXPR | WHILE_EXPR | LOOP_EXPR | TRY_BLOCK_EXPR => false, | 394 | FN_DEF | IF_EXPR | WHILE_EXPR | LOOP_EXPR => false, |
377 | _ => true, | 395 | _ => true, |
378 | } | 396 | } |
379 | } | 397 | } |
diff --git a/crates/ra_syntax/src/ast/generated/nodes.rs b/crates/ra_syntax/src/ast/generated/nodes.rs index 2cb3ad011..81260680f 100644 --- a/crates/ra_syntax/src/ast/generated/nodes.rs +++ b/crates/ra_syntax/src/ast/generated/nodes.rs | |||
@@ -476,16 +476,6 @@ impl LoopExpr { | |||
476 | } | 476 | } |
477 | 477 | ||
478 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 478 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
479 | pub struct TryBlockExpr { | ||
480 | pub(crate) syntax: SyntaxNode, | ||
481 | } | ||
482 | impl ast::AttrsOwner for TryBlockExpr {} | ||
483 | impl TryBlockExpr { | ||
484 | pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) } | ||
485 | pub fn body(&self) -> Option<BlockExpr> { support::child(&self.syntax) } | ||
486 | } | ||
487 | |||
488 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
489 | pub struct ForExpr { | 479 | pub struct ForExpr { |
490 | pub(crate) syntax: SyntaxNode, | 480 | pub(crate) syntax: SyntaxNode, |
491 | } | 481 | } |
@@ -554,6 +544,7 @@ impl ast::AttrsOwner for BlockExpr {} | |||
554 | impl BlockExpr { | 544 | impl BlockExpr { |
555 | pub fn label(&self) -> Option<Label> { support::child(&self.syntax) } | 545 | pub fn label(&self) -> Option<Label> { support::child(&self.syntax) } |
556 | pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } | 546 | pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) } |
547 | pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) } | ||
557 | pub fn block(&self) -> Option<Block> { support::child(&self.syntax) } | 548 | pub fn block(&self) -> Option<Block> { support::child(&self.syntax) } |
558 | } | 549 | } |
559 | 550 | ||
@@ -1249,6 +1240,7 @@ pub struct PathSegment { | |||
1249 | } | 1240 | } |
1250 | impl PathSegment { | 1241 | impl PathSegment { |
1251 | pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) } | 1242 | pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) } |
1243 | pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) } | ||
1252 | pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) } | 1244 | pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) } |
1253 | pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) } | 1245 | pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) } |
1254 | pub fn type_arg_list(&self) -> Option<TypeArgList> { support::child(&self.syntax) } | 1246 | pub fn type_arg_list(&self) -> Option<TypeArgList> { support::child(&self.syntax) } |
@@ -1473,7 +1465,6 @@ pub enum Expr { | |||
1473 | FieldExpr(FieldExpr), | 1465 | FieldExpr(FieldExpr), |
1474 | AwaitExpr(AwaitExpr), | 1466 | AwaitExpr(AwaitExpr), |
1475 | TryExpr(TryExpr), | 1467 | TryExpr(TryExpr), |
1476 | TryBlockExpr(TryBlockExpr), | ||
1477 | CastExpr(CastExpr), | 1468 | CastExpr(CastExpr), |
1478 | RefExpr(RefExpr), | 1469 | RefExpr(RefExpr), |
1479 | PrefixExpr(PrefixExpr), | 1470 | PrefixExpr(PrefixExpr), |
@@ -1956,17 +1947,6 @@ impl AstNode for LoopExpr { | |||
1956 | } | 1947 | } |
1957 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | 1948 | fn syntax(&self) -> &SyntaxNode { &self.syntax } |
1958 | } | 1949 | } |
1959 | impl AstNode for TryBlockExpr { | ||
1960 | fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_BLOCK_EXPR } | ||
1961 | fn cast(syntax: SyntaxNode) -> Option<Self> { | ||
1962 | if Self::can_cast(syntax.kind()) { | ||
1963 | Some(Self { syntax }) | ||
1964 | } else { | ||
1965 | None | ||
1966 | } | ||
1967 | } | ||
1968 | fn syntax(&self) -> &SyntaxNode { &self.syntax } | ||
1969 | } | ||
1970 | impl AstNode for ForExpr { | 1950 | impl AstNode for ForExpr { |
1971 | fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR } | 1951 | fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR } |
1972 | fn cast(syntax: SyntaxNode) -> Option<Self> { | 1952 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
@@ -3308,9 +3288,6 @@ impl From<AwaitExpr> for Expr { | |||
3308 | impl From<TryExpr> for Expr { | 3288 | impl From<TryExpr> for Expr { |
3309 | fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) } | 3289 | fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) } |
3310 | } | 3290 | } |
3311 | impl From<TryBlockExpr> for Expr { | ||
3312 | fn from(node: TryBlockExpr) -> Expr { Expr::TryBlockExpr(node) } | ||
3313 | } | ||
3314 | impl From<CastExpr> for Expr { | 3291 | impl From<CastExpr> for Expr { |
3315 | fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) } | 3292 | fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) } |
3316 | } | 3293 | } |
@@ -3341,9 +3318,8 @@ impl AstNode for Expr { | |||
3341 | TUPLE_EXPR | ARRAY_EXPR | PAREN_EXPR | PATH_EXPR | LAMBDA_EXPR | IF_EXPR | 3318 | TUPLE_EXPR | ARRAY_EXPR | PAREN_EXPR | PATH_EXPR | LAMBDA_EXPR | IF_EXPR |
3342 | | LOOP_EXPR | FOR_EXPR | WHILE_EXPR | CONTINUE_EXPR | BREAK_EXPR | LABEL | 3319 | | LOOP_EXPR | FOR_EXPR | WHILE_EXPR | CONTINUE_EXPR | BREAK_EXPR | LABEL |
3343 | | BLOCK_EXPR | RETURN_EXPR | MATCH_EXPR | RECORD_LIT | CALL_EXPR | INDEX_EXPR | 3320 | | BLOCK_EXPR | RETURN_EXPR | MATCH_EXPR | RECORD_LIT | CALL_EXPR | INDEX_EXPR |
3344 | | METHOD_CALL_EXPR | FIELD_EXPR | AWAIT_EXPR | TRY_EXPR | TRY_BLOCK_EXPR | 3321 | | METHOD_CALL_EXPR | FIELD_EXPR | AWAIT_EXPR | TRY_EXPR | CAST_EXPR | REF_EXPR |
3345 | | CAST_EXPR | REF_EXPR | PREFIX_EXPR | RANGE_EXPR | BIN_EXPR | LITERAL | MACRO_CALL | 3322 | | PREFIX_EXPR | RANGE_EXPR | BIN_EXPR | LITERAL | MACRO_CALL | BOX_EXPR => true, |
3346 | | BOX_EXPR => true, | ||
3347 | _ => false, | 3323 | _ => false, |
3348 | } | 3324 | } |
3349 | } | 3325 | } |
@@ -3371,7 +3347,6 @@ impl AstNode for Expr { | |||
3371 | FIELD_EXPR => Expr::FieldExpr(FieldExpr { syntax }), | 3347 | FIELD_EXPR => Expr::FieldExpr(FieldExpr { syntax }), |
3372 | AWAIT_EXPR => Expr::AwaitExpr(AwaitExpr { syntax }), | 3348 | AWAIT_EXPR => Expr::AwaitExpr(AwaitExpr { syntax }), |
3373 | TRY_EXPR => Expr::TryExpr(TryExpr { syntax }), | 3349 | TRY_EXPR => Expr::TryExpr(TryExpr { syntax }), |
3374 | TRY_BLOCK_EXPR => Expr::TryBlockExpr(TryBlockExpr { syntax }), | ||
3375 | CAST_EXPR => Expr::CastExpr(CastExpr { syntax }), | 3350 | CAST_EXPR => Expr::CastExpr(CastExpr { syntax }), |
3376 | REF_EXPR => Expr::RefExpr(RefExpr { syntax }), | 3351 | REF_EXPR => Expr::RefExpr(RefExpr { syntax }), |
3377 | PREFIX_EXPR => Expr::PrefixExpr(PrefixExpr { syntax }), | 3352 | PREFIX_EXPR => Expr::PrefixExpr(PrefixExpr { syntax }), |
@@ -3408,7 +3383,6 @@ impl AstNode for Expr { | |||
3408 | Expr::FieldExpr(it) => &it.syntax, | 3383 | Expr::FieldExpr(it) => &it.syntax, |
3409 | Expr::AwaitExpr(it) => &it.syntax, | 3384 | Expr::AwaitExpr(it) => &it.syntax, |
3410 | Expr::TryExpr(it) => &it.syntax, | 3385 | Expr::TryExpr(it) => &it.syntax, |
3411 | Expr::TryBlockExpr(it) => &it.syntax, | ||
3412 | Expr::CastExpr(it) => &it.syntax, | 3386 | Expr::CastExpr(it) => &it.syntax, |
3413 | Expr::RefExpr(it) => &it.syntax, | 3387 | Expr::RefExpr(it) => &it.syntax, |
3414 | Expr::PrefixExpr(it) => &it.syntax, | 3388 | Expr::PrefixExpr(it) => &it.syntax, |
@@ -3889,11 +3863,6 @@ impl std::fmt::Display for LoopExpr { | |||
3889 | std::fmt::Display::fmt(self.syntax(), f) | 3863 | std::fmt::Display::fmt(self.syntax(), f) |
3890 | } | 3864 | } |
3891 | } | 3865 | } |
3892 | impl std::fmt::Display for TryBlockExpr { | ||
3893 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { | ||
3894 | std::fmt::Display::fmt(self.syntax(), f) | ||
3895 | } | ||
3896 | } | ||
3897 | impl std::fmt::Display for ForExpr { | 3866 | impl std::fmt::Display for ForExpr { |
3898 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { | 3867 | fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { |
3899 | std::fmt::Display::fmt(self.syntax(), f) | 3868 | std::fmt::Display::fmt(self.syntax(), f) |
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs index ee0f5cc40..492088353 100644 --- a/crates/ra_syntax/src/ast/make.rs +++ b/crates/ra_syntax/src/ast/make.rs | |||
@@ -22,8 +22,7 @@ pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path { | |||
22 | pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path { | 22 | pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path { |
23 | path_from_text(&format!("{}::{}", qual, segment)) | 23 | path_from_text(&format!("{}::{}", qual, segment)) |
24 | } | 24 | } |
25 | 25 | fn path_from_text(text: &str) -> ast::Path { | |
26 | pub fn path_from_text(text: &str) -> ast::Path { | ||
27 | ast_from_text(text) | 26 | ast_from_text(text) |
28 | } | 27 | } |
29 | 28 | ||
diff --git a/crates/ra_syntax/src/ast/tokens.rs b/crates/ra_syntax/src/ast/tokens.rs index 3865729b8..74906d8a6 100644 --- a/crates/ra_syntax/src/ast/tokens.rs +++ b/crates/ra_syntax/src/ast/tokens.rs | |||
@@ -13,7 +13,12 @@ impl Comment { | |||
13 | } | 13 | } |
14 | 14 | ||
15 | pub fn prefix(&self) -> &'static str { | 15 | pub fn prefix(&self) -> &'static str { |
16 | prefix_by_kind(self.kind()) | 16 | for (prefix, k) in COMMENT_PREFIX_TO_KIND.iter() { |
17 | if *k == self.kind() && self.text().starts_with(prefix) { | ||
18 | return prefix; | ||
19 | } | ||
20 | } | ||
21 | unreachable!() | ||
17 | } | 22 | } |
18 | } | 23 | } |
19 | 24 | ||
@@ -48,6 +53,7 @@ pub enum CommentPlacement { | |||
48 | const COMMENT_PREFIX_TO_KIND: &[(&str, CommentKind)] = { | 53 | const COMMENT_PREFIX_TO_KIND: &[(&str, CommentKind)] = { |
49 | use {CommentPlacement::*, CommentShape::*}; | 54 | use {CommentPlacement::*, CommentShape::*}; |
50 | &[ | 55 | &[ |
56 | ("////", CommentKind { shape: Line, doc: None }), | ||
51 | ("///", CommentKind { shape: Line, doc: Some(Outer) }), | 57 | ("///", CommentKind { shape: Line, doc: Some(Outer) }), |
52 | ("//!", CommentKind { shape: Line, doc: Some(Inner) }), | 58 | ("//!", CommentKind { shape: Line, doc: Some(Inner) }), |
53 | ("/**", CommentKind { shape: Block, doc: Some(Outer) }), | 59 | ("/**", CommentKind { shape: Block, doc: Some(Outer) }), |
@@ -69,15 +75,6 @@ fn kind_by_prefix(text: &str) -> CommentKind { | |||
69 | panic!("bad comment text: {:?}", text) | 75 | panic!("bad comment text: {:?}", text) |
70 | } | 76 | } |
71 | 77 | ||
72 | fn prefix_by_kind(kind: CommentKind) -> &'static str { | ||
73 | for (prefix, k) in COMMENT_PREFIX_TO_KIND.iter() { | ||
74 | if *k == kind { | ||
75 | return prefix; | ||
76 | } | ||
77 | } | ||
78 | unreachable!() | ||
79 | } | ||
80 | |||
81 | impl Whitespace { | 78 | impl Whitespace { |
82 | pub fn spans_multiple_lines(&self) -> bool { | 79 | pub fn spans_multiple_lines(&self) -> bool { |
83 | let text = self.text(); | 80 | let text = self.text(); |
diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs index 5e93895ec..f0b3dec63 100644 --- a/crates/ra_syntax/src/validation.rs +++ b/crates/ra_syntax/src/validation.rs | |||
@@ -96,6 +96,7 @@ pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> { | |||
96 | ast::RecordField(it) => validate_numeric_name(it.name_ref(), &mut errors), | 96 | ast::RecordField(it) => validate_numeric_name(it.name_ref(), &mut errors), |
97 | ast::Visibility(it) => validate_visibility(it, &mut errors), | 97 | ast::Visibility(it) => validate_visibility(it, &mut errors), |
98 | ast::RangeExpr(it) => validate_range_expr(it, &mut errors), | 98 | ast::RangeExpr(it) => validate_range_expr(it, &mut errors), |
99 | ast::PathSegment(it) => validate_crate_keyword_in_path_segment(it, &mut errors), | ||
99 | _ => (), | 100 | _ => (), |
100 | } | 101 | } |
101 | } | 102 | } |
@@ -222,3 +223,60 @@ fn validate_range_expr(expr: ast::RangeExpr, errors: &mut Vec<SyntaxError>) { | |||
222 | )); | 223 | )); |
223 | } | 224 | } |
224 | } | 225 | } |
226 | |||
227 | fn validate_crate_keyword_in_path_segment( | ||
228 | segment: ast::PathSegment, | ||
229 | errors: &mut Vec<SyntaxError>, | ||
230 | ) { | ||
231 | const ERR_MSG: &str = "The `crate` keyword is only allowed as the first segment of a path"; | ||
232 | |||
233 | let crate_token = match segment.crate_token() { | ||
234 | None => return, | ||
235 | Some(it) => it, | ||
236 | }; | ||
237 | |||
238 | // Disallow both ::crate and foo::crate | ||
239 | let mut path = segment.parent_path(); | ||
240 | if segment.coloncolon_token().is_some() || path.qualifier().is_some() { | ||
241 | errors.push(SyntaxError::new(ERR_MSG, crate_token.text_range())); | ||
242 | return; | ||
243 | } | ||
244 | |||
245 | // For expressions and types, validation is complete, but we still have | ||
246 | // to handle invalid UseItems like this: | ||
247 | // | ||
248 | // use foo:{crate::bar::baz}; | ||
249 | // | ||
250 | // To handle this we must inspect the parent `UseItem`s and `UseTree`s | ||
251 | // but right now we're looking deep inside the nested `Path` nodes because | ||
252 | // `Path`s are left-associative: | ||
253 | // | ||
254 | // ((crate)::bar)::baz) | ||
255 | // ^ current value of path | ||
256 | // | ||
257 | // So we need to climb to the top | ||
258 | while let Some(parent) = path.parent_path() { | ||
259 | path = parent; | ||
260 | } | ||
261 | |||
262 | // Now that we've found the whole path we need to see if there's a prefix | ||
263 | // somewhere in the UseTree hierarchy. This check is arbitrarily deep | ||
264 | // because rust allows arbitrary nesting like so: | ||
265 | // | ||
266 | // use {foo::{{{{crate::bar::baz}}}}}; | ||
267 | for node in path.syntax().ancestors().skip(1) { | ||
268 | match_ast! { | ||
269 | match node { | ||
270 | ast::UseTree(it) => if let Some(tree_path) = it.path() { | ||
271 | // Even a top-level path exists within a `UseTree` so we must explicitly | ||
272 | // allow our path but disallow anything else | ||
273 | if tree_path != path { | ||
274 | errors.push(SyntaxError::new(ERR_MSG, crate_token.text_range())); | ||
275 | } | ||
276 | }, | ||
277 | ast::UseTreeList(_it) => continue, | ||
278 | _ => return, | ||
279 | } | ||
280 | }; | ||
281 | } | ||
282 | } | ||
diff --git a/crates/ra_syntax/test_data/parser/err/0040_illegal_crate_kw_location.rast b/crates/ra_syntax/test_data/parser/err/0040_illegal_crate_kw_location.rast new file mode 100644 index 000000000..d2a549273 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/err/0040_illegal_crate_kw_location.rast | |||
@@ -0,0 +1,91 @@ | |||
1 | [email protected] | ||
2 | [email protected] | ||
3 | [email protected] "use" | ||
4 | [email protected] " " | ||
5 | [email protected] | ||
6 | [email protected] | ||
7 | [email protected] | ||
8 | [email protected] "::" | ||
9 | [email protected] "crate" | ||
10 | [email protected] ";" | ||
11 | [email protected] "\n" | ||
12 | [email protected] | ||
13 | [email protected] "use" | ||
14 | [email protected] " " | ||
15 | [email protected] | ||
16 | [email protected] | ||
17 | [email protected] "{" | ||
18 | [email protected] | ||
19 | [email protected] | ||
20 | [email protected] | ||
21 | [email protected] "crate" | ||
22 | [email protected] "," | ||
23 | [email protected] " " | ||
24 | [email protected] | ||
25 | [email protected] | ||
26 | [email protected] | ||
27 | [email protected] | ||
28 | [email protected] "foo" | ||
29 | [email protected] "::" | ||
30 | [email protected] | ||
31 | [email protected] "{" | ||
32 | [email protected] | ||
33 | [email protected] | ||
34 | [email protected] | ||
35 | [email protected] | ||
36 | [email protected] | ||
37 | [email protected] | ||
38 | [email protected] "crate" | ||
39 | [email protected] "::" | ||
40 | [email protected] | ||
41 | [email protected] | ||
42 | [email protected] "foo" | ||
43 | [email protected] "::" | ||
44 | [email protected] | ||
45 | [email protected] | ||
46 | [email protected] "bar" | ||
47 | [email protected] "::" | ||
48 | [email protected] | ||
49 | [email protected] | ||
50 | [email protected] "baz" | ||
51 | [email protected] "}" | ||
52 | [email protected] "}" | ||
53 | [email protected] ";" | ||
54 | [email protected] "\n" | ||
55 | [email protected] | ||
56 | [email protected] "use" | ||
57 | [email protected] " " | ||
58 | [email protected] | ||
59 | [email protected] | ||
60 | [email protected] | ||
61 | [email protected] | ||
62 | [email protected] | ||
63 | [email protected] "hello" | ||
64 | [email protected] "::" | ||
65 | [email protected] | ||
66 | [email protected] "crate" | ||
67 | [email protected] ";" | ||
68 | [email protected] "\n" | ||
69 | [email protected] | ||
70 | [email protected] "use" | ||
71 | [email protected] " " | ||
72 | [email protected] | ||
73 | [email protected] | ||
74 | [email protected] | ||
75 | [email protected] | ||
76 | [email protected] | ||
77 | [email protected] | ||
78 | [email protected] "hello" | ||
79 | [email protected] "::" | ||
80 | [email protected] | ||
81 | [email protected] "crate" | ||
82 | [email protected] "::" | ||
83 | [email protected] | ||
84 | [email protected] | ||
85 | [email protected] "there" | ||
86 | [email protected] ";" | ||
87 | [email protected] "\n" | ||
88 | error 6..11: The `crate` keyword is only allowed as the first segment of a path | ||
89 | error 31..36: The `crate` keyword is only allowed as the first segment of a path | ||
90 | error 66..71: The `crate` keyword is only allowed as the first segment of a path | ||
91 | error 84..89: The `crate` keyword is only allowed as the first segment of a path | ||
diff --git a/crates/ra_syntax/test_data/parser/err/0040_illegal_crate_kw_location.rs b/crates/ra_syntax/test_data/parser/err/0040_illegal_crate_kw_location.rs new file mode 100644 index 000000000..508def2c7 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/err/0040_illegal_crate_kw_location.rs | |||
@@ -0,0 +1,4 @@ | |||
1 | use ::crate; | ||
2 | use {crate, foo::{crate::foo::bar::baz}}; | ||
3 | use hello::crate; | ||
4 | use hello::crate::there; | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0002_use_tree_list.rast b/crates/ra_syntax/test_data/parser/inline/ok/0002_use_tree_list.rast index bd74b44a6..cf3a90400 100644 --- a/crates/ra_syntax/test_data/parser/inline/ok/0002_use_tree_list.rast +++ b/crates/ra_syntax/test_data/parser/inline/ok/0002_use_tree_list.rast | |||
@@ -1,4 +1,4 @@ | |||
1 | [email protected]50 | 1 | [email protected]49 |
2 | [email protected] | 2 | [email protected] |
3 | [email protected] "use" | 3 | [email protected] "use" |
4 | [email protected] " " | 4 | [email protected] " " |
@@ -104,32 +104,33 @@ [email protected] | |||
104 | [email protected] " " | 104 | [email protected] " " |
105 | [email protected] "// Rust 2015" | 105 | [email protected] "// Rust 2015" |
106 | [email protected] "\n" | 106 | [email protected] "\n" |
107 | [email protected]6 | 107 | [email protected]5 |
108 | [email protected] "use" | 108 | [email protected] "use" |
109 | [email protected] " " | 109 | [email protected] " " |
110 | [email protected]5 | 110 | [email protected]4 |
111 | [email protected] "::" | 111 | [email protected] "::" |
112 | [email protected]5 | 112 | [email protected]4 |
113 | [email protected] "{" | 113 | [email protected] "{" |
114 | [email protected]4 | 114 | [email protected]3 |
115 | [email protected]4 | 115 | [email protected]3 |
116 | [email protected] "{" | 116 | [email protected] "{" |
117 | [email protected]3 | 117 | [email protected]2 |
118 | [email protected]3 | 118 | [email protected]2 |
119 | [email protected] "{" | 119 | [email protected] "{" |
120 | [email protected] | 120 | [email protected] |
121 | [email protected] | 121 | [email protected] |
122 | [email protected] | 122 | [email protected] |
123 | [email protected] | 123 | [email protected] |
124 | [email protected] "crate" | 124 | [email protected] |
125 | [email protected] "::" | 125 | [email protected] "root" |
126 | [email protected] | 126 | [email protected] "::" |
127 | [email protected] | 127 | [email protected] |
128 | [email protected] "export" | 128 | [email protected] |
129 | [email protected] "}" | 129 | [email protected] "export" |
130 | [email protected] "}" | 130 | [email protected] "}" |
131 | [email protected] "}" | 131 | [email protected] "}" |
132 | [email protected] ";" | 132 | [email protected] "}" |
133 | [email protected] " " | 133 | [email protected] ";" |
134 | [email protected] "// Nonsensical but pe ..." | 134 | [email protected] " " |
135 | [email protected] "\n" | 135 | [email protected] "// Nonsensical but pe ..." |
136 | [email protected] "\n" | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0002_use_tree_list.rs b/crates/ra_syntax/test_data/parser/inline/ok/0002_use_tree_list.rs index 06c387cee..381cba1e2 100644 --- a/crates/ra_syntax/test_data/parser/inline/ok/0002_use_tree_list.rs +++ b/crates/ra_syntax/test_data/parser/inline/ok/0002_use_tree_list.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`) | 1 | use {crate::path::from::root, or::path::from::crate_name}; // Rust 2018 (with a crate named `or`) |
2 | use {path::from::root}; // Rust 2015 | 2 | use {path::from::root}; // Rust 2015 |
3 | use ::{some::arbritrary::path}; // Rust 2015 | 3 | use ::{some::arbritrary::path}; // Rust 2015 |
4 | use ::{{{crate::export}}}; // Nonsensical but perfectly legal nestnig | 4 | use ::{{{root::export}}}; // Nonsensical but perfectly legal nesting |
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rast b/crates/ra_syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rast new file mode 100644 index 000000000..beb6d8010 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rast | |||
@@ -0,0 +1,35 @@ | |||
1 | [email protected] | ||
2 | [email protected] | ||
3 | [email protected] "fn" | ||
4 | [email protected] " " | ||
5 | [email protected] | ||
6 | [email protected] "foo" | ||
7 | [email protected] | ||
8 | [email protected] "(" | ||
9 | [email protected] ")" | ||
10 | [email protected] " " | ||
11 | [email protected] | ||
12 | [email protected] | ||
13 | [email protected] "{" | ||
14 | [email protected] " " | ||
15 | [email protected] | ||
16 | [email protected] | ||
17 | [email protected] | ||
18 | [email protected] | ||
19 | [email protected] | ||
20 | [email protected] "try" | ||
21 | [email protected] "!" | ||
22 | [email protected] | ||
23 | [email protected] "(" | ||
24 | [email protected] "Ok" | ||
25 | [email protected] | ||
26 | [email protected] "(" | ||
27 | [email protected] | ||
28 | [email protected] "(" | ||
29 | [email protected] ")" | ||
30 | [email protected] ")" | ||
31 | [email protected] ")" | ||
32 | [email protected] ";" | ||
33 | [email protected] " " | ||
34 | [email protected] "}" | ||
35 | [email protected] "\n" | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rs b/crates/ra_syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rs new file mode 100644 index 000000000..61a6b46a0 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/inline/ok/0159_try_macro_fallback.rs | |||
@@ -0,0 +1 @@ | |||
fn foo() { try!(Ok(())); } | |||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0160_try_macro_rules.rast b/crates/ra_syntax/test_data/parser/inline/ok/0160_try_macro_rules.rast new file mode 100644 index 000000000..05b89d1c3 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/inline/ok/0160_try_macro_rules.rast | |||
@@ -0,0 +1,27 @@ | |||
1 | [email protected] | ||
2 | [email protected] | ||
3 | [email protected] | ||
4 | [email protected] | ||
5 | [email protected] | ||
6 | [email protected] "macro_rules" | ||
7 | [email protected] "!" | ||
8 | [email protected] " " | ||
9 | [email protected] | ||
10 | [email protected] "try" | ||
11 | [email protected] " " | ||
12 | [email protected] | ||
13 | [email protected] "{" | ||
14 | [email protected] " " | ||
15 | [email protected] | ||
16 | [email protected] "(" | ||
17 | [email protected] ")" | ||
18 | [email protected] " " | ||
19 | [email protected] "=" | ||
20 | [email protected] ">" | ||
21 | [email protected] " " | ||
22 | [email protected] | ||
23 | [email protected] "{" | ||
24 | [email protected] "}" | ||
25 | [email protected] " " | ||
26 | [email protected] "}" | ||
27 | [email protected] "\n" | ||
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0160_try_macro_rules.rs b/crates/ra_syntax/test_data/parser/inline/ok/0160_try_macro_rules.rs new file mode 100644 index 000000000..2e2ab6e60 --- /dev/null +++ b/crates/ra_syntax/test_data/parser/inline/ok/0160_try_macro_rules.rs | |||
@@ -0,0 +1 @@ | |||
macro_rules! try { () => {} } | |||
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index cee0248b6..0459807fc 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml | |||
@@ -20,7 +20,7 @@ globset = "0.4.4" | |||
20 | itertools = "0.9.0" | 20 | itertools = "0.9.0" |
21 | jod-thread = "0.1.0" | 21 | jod-thread = "0.1.0" |
22 | log = "0.4.8" | 22 | log = "0.4.8" |
23 | lsp-types = { version = "0.73.0", features = ["proposed"] } | 23 | lsp-types = { version = "0.74.0", features = ["proposed"] } |
24 | parking_lot = "0.10.0" | 24 | parking_lot = "0.10.0" |
25 | pico-args = "0.3.1" | 25 | pico-args = "0.3.1" |
26 | rand = { version = "0.7.3", features = ["small_rng"] } | 26 | rand = { version = "0.7.3", features = ["small_rng"] } |
@@ -39,7 +39,7 @@ ra_prof = { path = "../ra_prof" } | |||
39 | ra_project_model = { path = "../ra_project_model" } | 39 | ra_project_model = { path = "../ra_project_model" } |
40 | ra_syntax = { path = "../ra_syntax" } | 40 | ra_syntax = { path = "../ra_syntax" } |
41 | ra_text_edit = { path = "../ra_text_edit" } | 41 | ra_text_edit = { path = "../ra_text_edit" } |
42 | ra_vfs = "0.5.2" | 42 | ra_vfs = "0.6.0" |
43 | 43 | ||
44 | # This should only be used in CLI | 44 | # This should only be used in CLI |
45 | ra_db = { path = "../ra_db" } | 45 | ra_db = { path = "../ra_db" } |
diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs index 45b60768a..e22ab8402 100644 --- a/crates/rust-analyzer/src/caps.rs +++ b/crates/rust-analyzer/src/caps.rs | |||
@@ -16,7 +16,7 @@ pub fn server_capabilities() -> ServerCapabilities { | |||
16 | ServerCapabilities { | 16 | ServerCapabilities { |
17 | text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions { | 17 | text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions { |
18 | open_close: Some(true), | 18 | open_close: Some(true), |
19 | change: Some(TextDocumentSyncKind::Full), | 19 | change: Some(TextDocumentSyncKind::Incremental), |
20 | will_save: None, | 20 | will_save: None, |
21 | will_save_wait_until: None, | 21 | will_save_wait_until: None, |
22 | save: Some(SaveOptions::default()), | 22 | save: Some(SaveOptions::default()), |
diff --git a/crates/rust-analyzer/src/conv.rs b/crates/rust-analyzer/src/conv.rs index ffe3ea84d..7be5ebcdb 100644 --- a/crates/rust-analyzer/src/conv.rs +++ b/crates/rust-analyzer/src/conv.rs | |||
@@ -150,7 +150,7 @@ impl ConvWith<(&LineIndex, LineEndings)> for CompletionItem { | |||
150 | detail: self.detail().map(|it| it.to_string()), | 150 | detail: self.detail().map(|it| it.to_string()), |
151 | filter_text: Some(self.lookup().to_string()), | 151 | filter_text: Some(self.lookup().to_string()), |
152 | kind: self.kind().map(|it| it.conv()), | 152 | kind: self.kind().map(|it| it.conv()), |
153 | text_edit: Some(text_edit), | 153 | text_edit: Some(text_edit.into()), |
154 | additional_text_edits: Some(additional_text_edits), | 154 | additional_text_edits: Some(additional_text_edits), |
155 | documentation: self.documentation().map(|it| it.conv()), | 155 | documentation: self.documentation().map(|it| it.conv()), |
156 | deprecated: Some(self.deprecated()), | 156 | deprecated: Some(self.deprecated()), |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index f3aef3f0f..0a0e616c9 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -6,9 +6,12 @@ mod subscriptions; | |||
6 | pub(crate) mod pending_requests; | 6 | pub(crate) mod pending_requests; |
7 | 7 | ||
8 | use std::{ | 8 | use std::{ |
9 | borrow::Cow, | ||
9 | env, | 10 | env, |
10 | error::Error, | 11 | error::Error, |
11 | fmt, panic, | 12 | fmt, |
13 | ops::Range, | ||
14 | panic, | ||
12 | path::PathBuf, | 15 | path::PathBuf, |
13 | sync::Arc, | 16 | sync::Arc, |
14 | time::{Duration, Instant}, | 17 | time::{Duration, Instant}, |
@@ -18,11 +21,12 @@ use crossbeam_channel::{never, select, unbounded, RecvError, Sender}; | |||
18 | use itertools::Itertools; | 21 | use itertools::Itertools; |
19 | use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; | 22 | use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; |
20 | use lsp_types::{ | 23 | use lsp_types::{ |
21 | NumberOrString, WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressCreateParams, | 24 | DidChangeTextDocumentParams, NumberOrString, TextDocumentContentChangeEvent, WorkDoneProgress, |
22 | WorkDoneProgressEnd, WorkDoneProgressReport, | 25 | WorkDoneProgressBegin, WorkDoneProgressCreateParams, WorkDoneProgressEnd, |
26 | WorkDoneProgressReport, | ||
23 | }; | 27 | }; |
24 | use ra_flycheck::{url_from_path_with_drive_lowercasing, CheckTask}; | 28 | use ra_flycheck::{url_from_path_with_drive_lowercasing, CheckTask}; |
25 | use ra_ide::{Canceled, FileId, LibraryData, SourceRootId}; | 29 | use ra_ide::{Canceled, FileId, LibraryData, LineIndex, SourceRootId}; |
26 | use ra_prof::profile; | 30 | use ra_prof::profile; |
27 | use ra_project_model::{PackageRoot, ProjectWorkspace}; | 31 | use ra_project_model::{PackageRoot, ProjectWorkspace}; |
28 | use ra_vfs::{VfsFile, VfsTask, Watch}; | 32 | use ra_vfs::{VfsFile, VfsTask, Watch}; |
@@ -33,6 +37,7 @@ use threadpool::ThreadPool; | |||
33 | 37 | ||
34 | use crate::{ | 38 | use crate::{ |
35 | config::{Config, FilesWatcher}, | 39 | config::{Config, FilesWatcher}, |
40 | conv::{ConvWith, TryConvWith}, | ||
36 | diagnostics::DiagnosticTask, | 41 | diagnostics::DiagnosticTask, |
37 | main_loop::{ | 42 | main_loop::{ |
38 | pending_requests::{PendingRequest, PendingRequests}, | 43 | pending_requests::{PendingRequest, PendingRequests}, |
@@ -579,12 +584,16 @@ fn on_notification( | |||
579 | Err(not) => not, | 584 | Err(not) => not, |
580 | }; | 585 | }; |
581 | let not = match notification_cast::<req::DidChangeTextDocument>(not) { | 586 | let not = match notification_cast::<req::DidChangeTextDocument>(not) { |
582 | Ok(mut params) => { | 587 | Ok(params) => { |
583 | let uri = params.text_document.uri; | 588 | let DidChangeTextDocumentParams { text_document, content_changes } = params; |
589 | let world = state.snapshot(); | ||
590 | let file_id = text_document.try_conv_with(&world)?; | ||
591 | let line_index = world.analysis().file_line_index(file_id)?; | ||
592 | let uri = text_document.uri; | ||
584 | let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; | 593 | let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; |
585 | let text = | 594 | state.vfs.write().change_file_overlay(&path, |old_text| { |
586 | params.content_changes.pop().ok_or_else(|| "empty changes".to_string())?.text; | 595 | apply_document_changes(old_text, Cow::Borrowed(&line_index), content_changes); |
587 | state.vfs.write().change_file_overlay(path.as_path(), text); | 596 | }); |
588 | return Ok(()); | 597 | return Ok(()); |
589 | } | 598 | } |
590 | Err(not) => not, | 599 | Err(not) => not, |
@@ -653,6 +662,48 @@ fn on_notification( | |||
653 | Ok(()) | 662 | Ok(()) |
654 | } | 663 | } |
655 | 664 | ||
665 | fn apply_document_changes( | ||
666 | old_text: &mut String, | ||
667 | mut line_index: Cow<'_, LineIndex>, | ||
668 | content_changes: Vec<TextDocumentContentChangeEvent>, | ||
669 | ) { | ||
670 | // The changes we got must be applied sequentially, but can cross lines so we | ||
671 | // have to keep our line index updated. | ||
672 | // Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we | ||
673 | // remember the last valid line in the index and only rebuild it if needed. | ||
674 | enum IndexValid { | ||
675 | All, | ||
676 | UpToLine(u64), | ||
677 | } | ||
678 | |||
679 | impl IndexValid { | ||
680 | fn covers(&self, line: u64) -> bool { | ||
681 | match *self { | ||
682 | IndexValid::UpToLine(to) => to >= line, | ||
683 | _ => true, | ||
684 | } | ||
685 | } | ||
686 | } | ||
687 | |||
688 | let mut index_valid = IndexValid::All; | ||
689 | for change in content_changes { | ||
690 | match change.range { | ||
691 | Some(range) => { | ||
692 | if !index_valid.covers(range.start.line) { | ||
693 | line_index = Cow::Owned(LineIndex::new(&old_text)); | ||
694 | } | ||
695 | index_valid = IndexValid::UpToLine(range.start.line); | ||
696 | let range = range.conv_with(&line_index); | ||
697 | old_text.replace_range(Range::<usize>::from(range), &change.text); | ||
698 | } | ||
699 | None => { | ||
700 | *old_text = change.text; | ||
701 | index_valid = IndexValid::UpToLine(0); | ||
702 | } | ||
703 | } | ||
704 | } | ||
705 | } | ||
706 | |||
656 | fn on_check_task( | 707 | fn on_check_task( |
657 | task: CheckTask, | 708 | task: CheckTask, |
658 | world_state: &mut WorldState, | 709 | world_state: &mut WorldState, |
@@ -958,3 +1009,64 @@ where | |||
958 | { | 1009 | { |
959 | Request::new(id, R::METHOD.to_string(), params) | 1010 | Request::new(id, R::METHOD.to_string(), params) |
960 | } | 1011 | } |
1012 | |||
1013 | #[cfg(test)] | ||
1014 | mod tests { | ||
1015 | use std::borrow::Cow; | ||
1016 | |||
1017 | use lsp_types::{Position, Range, TextDocumentContentChangeEvent}; | ||
1018 | use ra_ide::LineIndex; | ||
1019 | |||
1020 | #[test] | ||
1021 | fn apply_document_changes() { | ||
1022 | fn run(text: &mut String, changes: Vec<TextDocumentContentChangeEvent>) { | ||
1023 | let line_index = Cow::Owned(LineIndex::new(&text)); | ||
1024 | super::apply_document_changes(text, line_index, changes); | ||
1025 | } | ||
1026 | |||
1027 | macro_rules! c { | ||
1028 | [$($sl:expr, $sc:expr; $el:expr, $ec:expr => $text:expr),+] => { | ||
1029 | vec![$(TextDocumentContentChangeEvent { | ||
1030 | range: Some(Range { | ||
1031 | start: Position { line: $sl, character: $sc }, | ||
1032 | end: Position { line: $el, character: $ec }, | ||
1033 | }), | ||
1034 | range_length: None, | ||
1035 | text: String::from($text), | ||
1036 | }),+] | ||
1037 | }; | ||
1038 | } | ||
1039 | |||
1040 | let mut text = String::new(); | ||
1041 | run(&mut text, vec![]); | ||
1042 | assert_eq!(text, ""); | ||
1043 | run( | ||
1044 | &mut text, | ||
1045 | vec![TextDocumentContentChangeEvent { | ||
1046 | range: None, | ||
1047 | range_length: None, | ||
1048 | text: String::from("the"), | ||
1049 | }], | ||
1050 | ); | ||
1051 | assert_eq!(text, "the"); | ||
1052 | run(&mut text, c![0, 3; 0, 3 => " quick"]); | ||
1053 | assert_eq!(text, "the quick"); | ||
1054 | run(&mut text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]); | ||
1055 | assert_eq!(text, "quick foxes"); | ||
1056 | run(&mut text, c![0, 11; 0, 11 => "\ndream"]); | ||
1057 | assert_eq!(text, "quick foxes\ndream"); | ||
1058 | run(&mut text, c![1, 0; 1, 0 => "have "]); | ||
1059 | assert_eq!(text, "quick foxes\nhave dream"); | ||
1060 | run(&mut text, c![0, 0; 0, 0 => "the ", 1, 4; 1, 4 => " quiet", 1, 16; 1, 16 => "s\n"]); | ||
1061 | assert_eq!(text, "the quick foxes\nhave quiet dreams\n"); | ||
1062 | run(&mut text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]); | ||
1063 | assert_eq!(text, "the quick foxes\n\nhave quiet dreams\n\n"); | ||
1064 | run( | ||
1065 | &mut text, | ||
1066 | c![1, 0; 1, 0 => "DREAM", 2, 0; 2, 0 => "they ", 3, 0; 3, 0 => "DON'T THEY?"], | ||
1067 | ); | ||
1068 | assert_eq!(text, "the quick foxes\nDREAM\nthey have quiet dreams\nDON'T THEY?\n"); | ||
1069 | run(&mut text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]); | ||
1070 | assert_eq!(text, "the quick \nthey have quiet dreams\n"); | ||
1071 | } | ||
1072 | } | ||
diff --git a/crates/rust-analyzer/src/main_loop/handlers.rs b/crates/rust-analyzer/src/main_loop/handlers.rs index 6caaf5f88..8db2dfa0c 100644 --- a/crates/rust-analyzer/src/main_loop/handlers.rs +++ b/crates/rust-analyzer/src/main_loop/handlers.rs | |||
@@ -326,10 +326,10 @@ pub fn handle_workspace_symbol( | |||
326 | 326 | ||
327 | pub fn handle_goto_definition( | 327 | pub fn handle_goto_definition( |
328 | world: WorldSnapshot, | 328 | world: WorldSnapshot, |
329 | params: req::TextDocumentPositionParams, | 329 | params: req::GotoDefinitionParams, |
330 | ) -> Result<Option<req::GotoDefinitionResponse>> { | 330 | ) -> Result<Option<req::GotoDefinitionResponse>> { |
331 | let _p = profile("handle_goto_definition"); | 331 | let _p = profile("handle_goto_definition"); |
332 | let position = params.try_conv_with(&world)?; | 332 | let position = params.text_document_position_params.try_conv_with(&world)?; |
333 | let nav_info = match world.analysis().goto_definition(position)? { | 333 | let nav_info = match world.analysis().goto_definition(position)? { |
334 | None => return Ok(None), | 334 | None => return Ok(None), |
335 | Some(it) => it, | 335 | Some(it) => it, |
@@ -340,10 +340,10 @@ pub fn handle_goto_definition( | |||
340 | 340 | ||
341 | pub fn handle_goto_implementation( | 341 | pub fn handle_goto_implementation( |
342 | world: WorldSnapshot, | 342 | world: WorldSnapshot, |
343 | params: req::TextDocumentPositionParams, | 343 | params: req::GotoImplementationParams, |
344 | ) -> Result<Option<req::GotoImplementationResponse>> { | 344 | ) -> Result<Option<req::GotoImplementationResponse>> { |
345 | let _p = profile("handle_goto_implementation"); | 345 | let _p = profile("handle_goto_implementation"); |
346 | let position = params.try_conv_with(&world)?; | 346 | let position = params.text_document_position_params.try_conv_with(&world)?; |
347 | let nav_info = match world.analysis().goto_implementation(position)? { | 347 | let nav_info = match world.analysis().goto_implementation(position)? { |
348 | None => return Ok(None), | 348 | None => return Ok(None), |
349 | Some(it) => it, | 349 | Some(it) => it, |
@@ -354,10 +354,10 @@ pub fn handle_goto_implementation( | |||
354 | 354 | ||
355 | pub fn handle_goto_type_definition( | 355 | pub fn handle_goto_type_definition( |
356 | world: WorldSnapshot, | 356 | world: WorldSnapshot, |
357 | params: req::TextDocumentPositionParams, | 357 | params: req::GotoTypeDefinitionParams, |
358 | ) -> Result<Option<req::GotoTypeDefinitionResponse>> { | 358 | ) -> Result<Option<req::GotoTypeDefinitionResponse>> { |
359 | let _p = profile("handle_goto_type_definition"); | 359 | let _p = profile("handle_goto_type_definition"); |
360 | let position = params.try_conv_with(&world)?; | 360 | let position = params.text_document_position_params.try_conv_with(&world)?; |
361 | let nav_info = match world.analysis().goto_type_definition(position)? { | 361 | let nav_info = match world.analysis().goto_type_definition(position)? { |
362 | None => return Ok(None), | 362 | None => return Ok(None), |
363 | Some(it) => it, | 363 | Some(it) => it, |
@@ -487,10 +487,10 @@ pub fn handle_folding_range( | |||
487 | 487 | ||
488 | pub fn handle_signature_help( | 488 | pub fn handle_signature_help( |
489 | world: WorldSnapshot, | 489 | world: WorldSnapshot, |
490 | params: req::TextDocumentPositionParams, | 490 | params: req::SignatureHelpParams, |
491 | ) -> Result<Option<req::SignatureHelp>> { | 491 | ) -> Result<Option<req::SignatureHelp>> { |
492 | let _p = profile("handle_signature_help"); | 492 | let _p = profile("handle_signature_help"); |
493 | let position = params.try_conv_with(&world)?; | 493 | let position = params.text_document_position_params.try_conv_with(&world)?; |
494 | if let Some(call_info) = world.analysis().call_info(position)? { | 494 | if let Some(call_info) = world.analysis().call_info(position)? { |
495 | let concise = !world.config.call_info_full; | 495 | let concise = !world.config.call_info_full; |
496 | let mut active_parameter = call_info.active_parameter.map(|it| it as i64); | 496 | let mut active_parameter = call_info.active_parameter.map(|it| it as i64); |
@@ -509,12 +509,9 @@ pub fn handle_signature_help( | |||
509 | } | 509 | } |
510 | } | 510 | } |
511 | 511 | ||
512 | pub fn handle_hover( | 512 | pub fn handle_hover(world: WorldSnapshot, params: req::HoverParams) -> Result<Option<Hover>> { |
513 | world: WorldSnapshot, | ||
514 | params: req::TextDocumentPositionParams, | ||
515 | ) -> Result<Option<Hover>> { | ||
516 | let _p = profile("handle_hover"); | 513 | let _p = profile("handle_hover"); |
517 | let position = params.try_conv_with(&world)?; | 514 | let position = params.text_document_position_params.try_conv_with(&world)?; |
518 | let info = match world.analysis().hover(position)? { | 515 | let info = match world.analysis().hover(position)? { |
519 | None => return Ok(None), | 516 | None => return Ok(None), |
520 | Some(info) => info, | 517 | Some(info) => info, |
@@ -878,8 +875,14 @@ pub fn handle_code_lens( | |||
878 | .map(|it| { | 875 | .map(|it| { |
879 | let range = it.node_range.conv_with(&line_index); | 876 | let range = it.node_range.conv_with(&line_index); |
880 | let pos = range.start; | 877 | let pos = range.start; |
881 | let lens_params = | 878 | let lens_params = req::GotoImplementationParams { |
882 | req::TextDocumentPositionParams::new(params.text_document.clone(), pos); | 879 | text_document_position_params: req::TextDocumentPositionParams::new( |
880 | params.text_document.clone(), | ||
881 | pos, | ||
882 | ), | ||
883 | work_done_progress_params: Default::default(), | ||
884 | partial_result_params: Default::default(), | ||
885 | }; | ||
883 | CodeLens { | 886 | CodeLens { |
884 | range, | 887 | range, |
885 | command: None, | 888 | command: None, |
@@ -894,7 +897,7 @@ pub fn handle_code_lens( | |||
894 | #[derive(Debug, Serialize, Deserialize)] | 897 | #[derive(Debug, Serialize, Deserialize)] |
895 | #[serde(rename_all = "camelCase")] | 898 | #[serde(rename_all = "camelCase")] |
896 | enum CodeLensResolveData { | 899 | enum CodeLensResolveData { |
897 | Impls(req::TextDocumentPositionParams), | 900 | Impls(req::GotoImplementationParams), |
898 | } | 901 | } |
899 | 902 | ||
900 | pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Result<CodeLens> { | 903 | pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Result<CodeLens> { |
@@ -927,7 +930,7 @@ pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Re | |||
927 | title, | 930 | title, |
928 | command: "rust-analyzer.showReferences".into(), | 931 | command: "rust-analyzer.showReferences".into(), |
929 | arguments: Some(vec![ | 932 | arguments: Some(vec![ |
930 | to_value(&lens_params.text_document.uri).unwrap(), | 933 | to_value(&lens_params.text_document_position_params.text_document.uri).unwrap(), |
931 | to_value(code_lens.range.start).unwrap(), | 934 | to_value(code_lens.range.start).unwrap(), |
932 | to_value(locations).unwrap(), | 935 | to_value(locations).unwrap(), |
933 | ]), | 936 | ]), |
@@ -944,16 +947,16 @@ pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Re | |||
944 | 947 | ||
945 | pub fn handle_document_highlight( | 948 | pub fn handle_document_highlight( |
946 | world: WorldSnapshot, | 949 | world: WorldSnapshot, |
947 | params: req::TextDocumentPositionParams, | 950 | params: req::DocumentHighlightParams, |
948 | ) -> Result<Option<Vec<DocumentHighlight>>> { | 951 | ) -> Result<Option<Vec<DocumentHighlight>>> { |
949 | let _p = profile("handle_document_highlight"); | 952 | let _p = profile("handle_document_highlight"); |
950 | let file_id = params.text_document.try_conv_with(&world)?; | 953 | let file_id = params.text_document_position_params.text_document.try_conv_with(&world)?; |
951 | let line_index = world.analysis().file_line_index(file_id)?; | 954 | let line_index = world.analysis().file_line_index(file_id)?; |
952 | 955 | ||
953 | let refs = match world | 956 | let refs = match world.analysis().find_all_refs( |
954 | .analysis() | 957 | params.text_document_position_params.try_conv_with(&world)?, |
955 | .find_all_refs(params.try_conv_with(&world)?, Some(SearchScope::single_file(file_id)))? | 958 | Some(SearchScope::single_file(file_id)), |
956 | { | 959 | )? { |
957 | None => return Ok(None), | 960 | None => return Ok(None), |
958 | Some(refs) => refs, | 961 | Some(refs) => refs, |
959 | }; | 962 | }; |
diff --git a/crates/rust-analyzer/src/req.rs b/crates/rust-analyzer/src/req.rs index ae3448892..0dae6bad4 100644 --- a/crates/rust-analyzer/src/req.rs +++ b/crates/rust-analyzer/src/req.rs | |||
@@ -8,14 +8,15 @@ pub use lsp_types::{ | |||
8 | notification::*, request::*, ApplyWorkspaceEditParams, CodeActionParams, CodeLens, | 8 | notification::*, request::*, ApplyWorkspaceEditParams, CodeActionParams, CodeLens, |
9 | CodeLensParams, CompletionParams, CompletionResponse, ConfigurationItem, ConfigurationParams, | 9 | CodeLensParams, CompletionParams, CompletionResponse, ConfigurationItem, ConfigurationParams, |
10 | DiagnosticTag, DidChangeConfigurationParams, DidChangeWatchedFilesParams, | 10 | DiagnosticTag, DidChangeConfigurationParams, DidChangeWatchedFilesParams, |
11 | DidChangeWatchedFilesRegistrationOptions, DocumentOnTypeFormattingParams, DocumentSymbolParams, | 11 | DidChangeWatchedFilesRegistrationOptions, DocumentHighlightParams, |
12 | DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType, | 12 | DocumentOnTypeFormattingParams, DocumentSymbolParams, DocumentSymbolResponse, |
13 | PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken, | 13 | FileSystemWatcher, GotoDefinitionParams, GotoDefinitionResponse, Hover, HoverParams, |
14 | PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange, | 14 | InitializeResult, MessageType, PartialResultParams, ProgressParams, ProgressParamsValue, |
15 | SelectionRangeParams, SemanticTokensParams, SemanticTokensRangeParams, | 15 | ProgressToken, PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, |
16 | SelectionRange, SelectionRangeParams, SemanticTokensParams, SemanticTokensRangeParams, | ||
16 | SemanticTokensRangeResult, SemanticTokensResult, ServerCapabilities, ShowMessageParams, | 17 | SemanticTokensRangeResult, SemanticTokensResult, ServerCapabilities, ShowMessageParams, |
17 | SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams, TextEdit, | 18 | SignatureHelp, SignatureHelpParams, SymbolKind, TextDocumentEdit, TextDocumentPositionParams, |
18 | WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams, | 19 | TextEdit, WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams, |
19 | }; | 20 | }; |
20 | use std::path::PathBuf; | 21 | use std::path::PathBuf; |
21 | 22 | ||
diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs index 71f4f58a3..2dc5cb119 100644 --- a/crates/rust-analyzer/src/semantic_tokens.rs +++ b/crates/rust-analyzer/src/semantic_tokens.rs | |||
@@ -4,64 +4,69 @@ use std::ops; | |||
4 | 4 | ||
5 | use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens}; | 5 | use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens}; |
6 | 6 | ||
7 | pub(crate) const ATTRIBUTE: SemanticTokenType = SemanticTokenType::new("attribute"); | 7 | macro_rules! define_semantic_token_types { |
8 | pub(crate) const BUILTIN_TYPE: SemanticTokenType = SemanticTokenType::new("builtinType"); | 8 | ($(($ident:ident, $string:literal)),*$(,)?) => { |
9 | pub(crate) const ENUM_MEMBER: SemanticTokenType = SemanticTokenType::new("enumMember"); | 9 | $(pub(crate) const $ident: SemanticTokenType = SemanticTokenType::new($string);)* |
10 | pub(crate) const LIFETIME: SemanticTokenType = SemanticTokenType::new("lifetime"); | 10 | |
11 | pub(crate) const TYPE_ALIAS: SemanticTokenType = SemanticTokenType::new("typeAlias"); | 11 | pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[ |
12 | pub(crate) const UNION: SemanticTokenType = SemanticTokenType::new("union"); | 12 | SemanticTokenType::COMMENT, |
13 | pub(crate) const UNRESOLVED_REFERENCE: SemanticTokenType = | 13 | SemanticTokenType::KEYWORD, |
14 | SemanticTokenType::new("unresolvedReference"); | 14 | SemanticTokenType::STRING, |
15 | pub(crate) const FORMAT_SPECIFIER: SemanticTokenType = SemanticTokenType::new("formatSpecifier"); | 15 | SemanticTokenType::NUMBER, |
16 | 16 | SemanticTokenType::REGEXP, | |
17 | pub(crate) const CONSTANT: SemanticTokenModifier = SemanticTokenModifier::new("constant"); | 17 | SemanticTokenType::OPERATOR, |
18 | pub(crate) const CONTROL_FLOW: SemanticTokenModifier = SemanticTokenModifier::new("controlFlow"); | 18 | SemanticTokenType::NAMESPACE, |
19 | pub(crate) const MUTABLE: SemanticTokenModifier = SemanticTokenModifier::new("mutable"); | 19 | SemanticTokenType::TYPE, |
20 | pub(crate) const UNSAFE: SemanticTokenModifier = SemanticTokenModifier::new("unsafe"); | 20 | SemanticTokenType::STRUCT, |
21 | 21 | SemanticTokenType::CLASS, | |
22 | pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[ | 22 | SemanticTokenType::INTERFACE, |
23 | SemanticTokenType::COMMENT, | 23 | SemanticTokenType::ENUM, |
24 | SemanticTokenType::KEYWORD, | 24 | SemanticTokenType::TYPE_PARAMETER, |
25 | SemanticTokenType::STRING, | 25 | SemanticTokenType::FUNCTION, |
26 | SemanticTokenType::NUMBER, | 26 | SemanticTokenType::MEMBER, |
27 | SemanticTokenType::REGEXP, | 27 | SemanticTokenType::PROPERTY, |
28 | SemanticTokenType::OPERATOR, | 28 | SemanticTokenType::MACRO, |
29 | SemanticTokenType::NAMESPACE, | 29 | SemanticTokenType::VARIABLE, |
30 | SemanticTokenType::TYPE, | 30 | SemanticTokenType::PARAMETER, |
31 | SemanticTokenType::STRUCT, | 31 | SemanticTokenType::LABEL, |
32 | SemanticTokenType::CLASS, | 32 | $($ident),* |
33 | SemanticTokenType::INTERFACE, | 33 | ]; |
34 | SemanticTokenType::ENUM, | 34 | }; |
35 | SemanticTokenType::TYPE_PARAMETER, | 35 | } |
36 | SemanticTokenType::FUNCTION, | 36 | |
37 | SemanticTokenType::MEMBER, | 37 | define_semantic_token_types![ |
38 | SemanticTokenType::PROPERTY, | 38 | (ATTRIBUTE, "attribute"), |
39 | SemanticTokenType::MACRO, | 39 | (BUILTIN_TYPE, "builtinType"), |
40 | SemanticTokenType::VARIABLE, | 40 | (ENUM_MEMBER, "enumMember"), |
41 | SemanticTokenType::PARAMETER, | 41 | (LIFETIME, "lifetime"), |
42 | SemanticTokenType::LABEL, | 42 | (TYPE_ALIAS, "typeAlias"), |
43 | ATTRIBUTE, | 43 | (UNION, "union"), |
44 | BUILTIN_TYPE, | 44 | (UNRESOLVED_REFERENCE, "unresolvedReference"), |
45 | ENUM_MEMBER, | 45 | (FORMAT_SPECIFIER, "formatSpecifier"), |
46 | LIFETIME, | ||
47 | TYPE_ALIAS, | ||
48 | UNION, | ||
49 | UNRESOLVED_REFERENCE, | ||
50 | FORMAT_SPECIFIER, | ||
51 | ]; | 46 | ]; |
52 | 47 | ||
53 | pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[ | 48 | macro_rules! define_semantic_token_modifiers { |
54 | SemanticTokenModifier::DOCUMENTATION, | 49 | ($(($ident:ident, $string:literal)),*$(,)?) => { |
55 | SemanticTokenModifier::DECLARATION, | 50 | $(pub(crate) const $ident: SemanticTokenModifier = SemanticTokenModifier::new($string);)* |
56 | SemanticTokenModifier::DEFINITION, | 51 | |
57 | SemanticTokenModifier::STATIC, | 52 | pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[ |
58 | SemanticTokenModifier::ABSTRACT, | 53 | SemanticTokenModifier::DOCUMENTATION, |
59 | SemanticTokenModifier::DEPRECATED, | 54 | SemanticTokenModifier::DECLARATION, |
60 | SemanticTokenModifier::READONLY, | 55 | SemanticTokenModifier::DEFINITION, |
61 | CONSTANT, | 56 | SemanticTokenModifier::STATIC, |
62 | MUTABLE, | 57 | SemanticTokenModifier::ABSTRACT, |
63 | UNSAFE, | 58 | SemanticTokenModifier::DEPRECATED, |
64 | CONTROL_FLOW, | 59 | SemanticTokenModifier::READONLY, |
60 | $($ident),* | ||
61 | ]; | ||
62 | }; | ||
63 | } | ||
64 | |||
65 | define_semantic_token_modifiers![ | ||
66 | (CONSTANT, "constant"), | ||
67 | (CONTROL_FLOW, "controlFlow"), | ||
68 | (MUTABLE, "mutable"), | ||
69 | (UNSAFE, "unsafe"), | ||
65 | ]; | 70 | ]; |
66 | 71 | ||
67 | #[derive(Default)] | 72 | #[derive(Default)] |
diff --git a/crates/rust-analyzer/tests/heavy_tests/main.rs b/crates/rust-analyzer/tests/heavy_tests/main.rs index f6245ddd4..07b8114c6 100644 --- a/crates/rust-analyzer/tests/heavy_tests/main.rs +++ b/crates/rust-analyzer/tests/heavy_tests/main.rs | |||
@@ -4,8 +4,8 @@ use std::{collections::HashMap, path::PathBuf, time::Instant}; | |||
4 | 4 | ||
5 | use lsp_types::{ | 5 | use lsp_types::{ |
6 | CodeActionContext, DidOpenTextDocumentParams, DocumentFormattingParams, FormattingOptions, | 6 | CodeActionContext, DidOpenTextDocumentParams, DocumentFormattingParams, FormattingOptions, |
7 | PartialResultParams, Position, Range, TextDocumentItem, TextDocumentPositionParams, | 7 | GotoDefinitionParams, HoverParams, PartialResultParams, Position, Range, TextDocumentItem, |
8 | WorkDoneProgressParams, | 8 | TextDocumentPositionParams, WorkDoneProgressParams, |
9 | }; | 9 | }; |
10 | use rust_analyzer::req::{ | 10 | use rust_analyzer::req::{ |
11 | CodeActionParams, CodeActionRequest, Completion, CompletionParams, DidOpenTextDocument, | 11 | CodeActionParams, CodeActionRequest, Completion, CompletionParams, DidOpenTextDocument, |
@@ -610,10 +610,14 @@ fn main() { message(); } | |||
610 | }) | 610 | }) |
611 | .server(); | 611 | .server(); |
612 | server.wait_until_workspace_is_loaded(); | 612 | server.wait_until_workspace_is_loaded(); |
613 | let res = server.send_request::<GotoDefinition>(TextDocumentPositionParams::new( | 613 | let res = server.send_request::<GotoDefinition>(GotoDefinitionParams { |
614 | server.doc_id("src/main.rs"), | 614 | text_document_position_params: TextDocumentPositionParams::new( |
615 | Position::new(2, 15), | 615 | server.doc_id("src/main.rs"), |
616 | )); | 616 | Position::new(2, 15), |
617 | ), | ||
618 | work_done_progress_params: Default::default(), | ||
619 | partial_result_params: Default::default(), | ||
620 | }); | ||
617 | assert!(format!("{}", res).contains("hello.rs")); | 621 | assert!(format!("{}", res).contains("hello.rs")); |
618 | } | 622 | } |
619 | 623 | ||
@@ -692,10 +696,13 @@ pub fn foo(_input: TokenStream) -> TokenStream { | |||
692 | .root("bar") | 696 | .root("bar") |
693 | .server(); | 697 | .server(); |
694 | server.wait_until_workspace_is_loaded(); | 698 | server.wait_until_workspace_is_loaded(); |
695 | let res = server.send_request::<HoverRequest>(TextDocumentPositionParams::new( | 699 | let res = server.send_request::<HoverRequest>(HoverParams { |
696 | server.doc_id("foo/src/main.rs"), | 700 | text_document_position_params: TextDocumentPositionParams::new( |
697 | Position::new(7, 9), | 701 | server.doc_id("foo/src/main.rs"), |
698 | )); | 702 | Position::new(7, 9), |
703 | ), | ||
704 | work_done_progress_params: Default::default(), | ||
705 | }); | ||
699 | 706 | ||
700 | let value = res.get("contents").unwrap().get("value").unwrap().to_string(); | 707 | let value = res.get("contents").unwrap().get("value").unwrap().to_string(); |
701 | assert_eq!(value, r#""```rust\nfoo::Bar\nfn bar()\n```""#) | 708 | assert_eq!(value, r#""```rust\nfoo::Bar\nfn bar()\n```""#) |
diff --git a/docs/dev/syntax.md b/docs/dev/syntax.md index 4dd1de659..33973ffec 100644 --- a/docs/dev/syntax.md +++ b/docs/dev/syntax.md | |||
@@ -35,7 +35,7 @@ The syntax tree consists of three layers: | |||
35 | * AST | 35 | * AST |
36 | 36 | ||
37 | Of these, only GreenNodes store the actual data, the other two layers are (non-trivial) views into green tree. | 37 | Of these, only GreenNodes store the actual data, the other two layers are (non-trivial) views into green tree. |
38 | Red-green terminology comes from Roslyn ([link](https://docs.microsoft.com/en-ie/archive/blogs/ericlippert/persistence-facades-and-roslyns-red-green-trees)) and gives the name to the `rowan` library. Green and syntax nodes are defined in rowan, ast is defined in rust-analyzer. | 38 | Red-green terminology comes from Roslyn ([link](https://ericlippert.com/2012/06/08/red-green-trees/)) and gives the name to the `rowan` library. Green and syntax nodes are defined in rowan, ast is defined in rust-analyzer. |
39 | 39 | ||
40 | Syntax trees are a semi-transient data structure. | 40 | Syntax trees are a semi-transient data structure. |
41 | In general, frontend does not keep syntax trees for all files in memory. | 41 | In general, frontend does not keep syntax trees for all files in memory. |
diff --git a/docs/user/features.md b/docs/user/features.md index 56d2969fd..b9a365fc1 100644 --- a/docs/user/features.md +++ b/docs/user/features.md | |||
@@ -140,8 +140,8 @@ space or `;` depending on the return type of the function. | |||
140 | When completing a function call, `()` are automatically inserted. If a function | 140 | When completing a function call, `()` are automatically inserted. If a function |
141 | takes arguments, the cursor is positioned inside the parenthesis. | 141 | takes arguments, the cursor is positioned inside the parenthesis. |
142 | 142 | ||
143 | There are postifx completions, which can be triggerd by typing something like | 143 | There are postfix completions, which can be triggered by typing something like |
144 | `foo().if`. The word after `.` determines postifx completion. Possible variants are: | 144 | `foo().if`. The word after `.` determines postfix completion. Possible variants are: |
145 | 145 | ||
146 | - `expr.if` -> `if expr {}` | 146 | - `expr.if` -> `if expr {}` |
147 | - `expr.match` -> `match expr {}` | 147 | - `expr.match` -> `match expr {}` |
diff --git a/docs/user/readme.adoc b/docs/user/readme.adoc index 76d065d35..4cb1e23e8 100644 --- a/docs/user/readme.adoc +++ b/docs/user/readme.adoc | |||
@@ -111,7 +111,7 @@ Here are some useful self-diagnostic commands: | |||
111 | === rust-analyzer Language Server Binary | 111 | === rust-analyzer Language Server Binary |
112 | 112 | ||
113 | Other editors generally require the `rust-analyzer` binary to be in `$PATH`. | 113 | Other editors generally require the `rust-analyzer` binary to be in `$PATH`. |
114 | You can download the pre-built binary from the https://github.com/rust-analyzer/rust-analyzer/releases[releases] page. Typically, you then need to rename the binary for your platform, e.g. `rust-analyzer-mac` if you're on Mac OS, to `rust-analzyer` and make it executable in addition to moving it into a directory in your `$PATH`. | 114 | You can download the pre-built binary from the https://github.com/rust-analyzer/rust-analyzer/releases[releases] page. Typically, you then need to rename the binary for your platform, e.g. `rust-analyzer-mac` if you're on Mac OS, to `rust-analyzer` and make it executable in addition to moving it into a directory in your `$PATH`. |
115 | 115 | ||
116 | On Linux to install the `rust-analyzer` binary into `~/.local/bin`, this commands could be used | 116 | On Linux to install the `rust-analyzer` binary into `~/.local/bin`, this commands could be used |
117 | 117 | ||
@@ -169,13 +169,15 @@ The are several LSP client implementations for vim: | |||
169 | 169 | ||
170 | 1. Install coc.nvim by following the instructions at | 170 | 1. Install coc.nvim by following the instructions at |
171 | https://github.com/neoclide/coc.nvim[coc.nvim] | 171 | https://github.com/neoclide/coc.nvim[coc.nvim] |
172 | (nodejs required) | 172 | (Node.js required) |
173 | 2. Run `:CocInstall coc-rust-analyzer` to install | 173 | 2. Run `:CocInstall coc-rust-analyzer` to install |
174 | https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer], | 174 | https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer], |
175 | this extension implements _most_ of the features supported in the VSCode extension: | 175 | this extension implements _most_ of the features supported in the VSCode extension: |
176 | * automatically install and upgrade stable/nightly releases | ||
176 | * same configurations as VSCode extension, `rust-analyzer.serverPath`, `rust-analyzer.cargo.features` etc. | 177 | * same configurations as VSCode extension, `rust-analyzer.serverPath`, `rust-analyzer.cargo.features` etc. |
177 | * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc. | 178 | * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc. |
178 | * highlighting and inlay_hints are not implemented yet | 179 | * inlay hints for method chaining support, _Neovim Only_ |
180 | * semantic highlighting is not implemented yet | ||
179 | 181 | ||
180 | ==== LanguageClient-neovim | 182 | ==== LanguageClient-neovim |
181 | 183 | ||
@@ -195,7 +197,7 @@ let g:LanguageClient_serverCommands = { | |||
195 | ==== YouCompleteMe | 197 | ==== YouCompleteMe |
196 | 198 | ||
197 | 1. Install YouCompleteMe by following the instructions | 199 | 1. Install YouCompleteMe by following the instructions |
198 | https://ycm-core.github.io/YouCompleteMe/#rust-semantic-completion[here] | 200 | https://github.com/ycm-core/lsp-examples#rust-rust-analyzer[here] |
199 | 201 | ||
200 | 2. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists): | 202 | 2. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists): |
201 | + | 203 | + |
@@ -212,6 +214,21 @@ let g:ycm_language_server = | |||
212 | \ ] | 214 | \ ] |
213 | ---- | 215 | ---- |
214 | 216 | ||
217 | ==== ALE | ||
218 | |||
219 | To add the LSP server to https://github.com/dense-analysis/ale[ale]: | ||
220 | |||
221 | [source,vim] | ||
222 | ---- | ||
223 | call ale#linter#Define('rust', { | ||
224 | \ 'name': 'rust-analyzer', | ||
225 | \ 'lsp': 'stdio', | ||
226 | \ 'executable': 'rust-analyzer', | ||
227 | \ 'command': '%e', | ||
228 | \ 'project_root': '.', | ||
229 | \}) | ||
230 | ---- | ||
231 | |||
215 | ==== nvim-lsp | 232 | ==== nvim-lsp |
216 | 233 | ||
217 | NeoVim 0.5 (not yet released) has built-in language server support. | 234 | NeoVim 0.5 (not yet released) has built-in language server support. |
@@ -229,9 +246,9 @@ You also need the `LSP` package. To install it: | |||
229 | * Type `Install Package Control`, press enter | 246 | * Type `Install Package Control`, press enter |
230 | 2. In the command palette, run `Package control: Install package`, and in the list that pops up, type `LSP` and press enter. | 247 | 2. In the command palette, run `Package control: Install package`, and in the list that pops up, type `LSP` and press enter. |
231 | 248 | ||
232 | Finally, with your Rust project open, in the command palette, run `LSP: Enable Language Server In Project` or `LSP: Enable Language Server Globally`, then select `rust-analyzer` in the list that pops up to enable the rust-analyzer LSP. The latter means that rust-analzyer is enabled by default in Rust projects. | 249 | Finally, with your Rust project open, in the command palette, run `LSP: Enable Language Server In Project` or `LSP: Enable Language Server Globally`, then select `rust-analyzer` in the list that pops up to enable the rust-analyzer LSP. The latter means that rust-analyzer is enabled by default in Rust projects. |
233 | 250 | ||
234 | If it worked, you should see "rust-analzyer, Line X, Column Y" on the left side of the bottom bar, and after waiting a bit, functionality like tooltips on hovering over variables should become available. | 251 | If it worked, you should see "rust-analyzer, Line X, Column Y" on the left side of the bottom bar, and after waiting a bit, functionality like tooltips on hovering over variables should become available. |
235 | 252 | ||
236 | If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> section on installing the language server binary. | 253 | If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> section on installing the language server binary. |
237 | 254 | ||
diff --git a/editors/code/package.json b/editors/code/package.json index b8aaa07d8..d30673791 100644 --- a/editors/code/package.json +++ b/editors/code/package.json | |||
@@ -27,6 +27,7 @@ | |||
27 | "scripts": { | 27 | "scripts": { |
28 | "vscode:prepublish": "tsc && rollup -c", | 28 | "vscode:prepublish": "tsc && rollup -c", |
29 | "package": "vsce package -o rust-analyzer.vsix", | 29 | "package": "vsce package -o rust-analyzer.vsix", |
30 | "build": "tsc", | ||
30 | "watch": "tsc --watch", | 31 | "watch": "tsc --watch", |
31 | "lint": "tsfmt --verify && eslint -c .eslintrc.js --ext ts ./src", | 32 | "lint": "tsfmt --verify && eslint -c .eslintrc.js --ext ts ./src", |
32 | "fix": " tsfmt -r && eslint -c .eslintrc.js --ext ts ./src --fix" | 33 | "fix": " tsfmt -r && eslint -c .eslintrc.js --ext ts ./src --fix" |
@@ -388,6 +389,28 @@ | |||
388 | "description": "Enable Proc macro support, cargo.loadOutDirsFromCheck must be enabled.", | 389 | "description": "Enable Proc macro support, cargo.loadOutDirsFromCheck must be enabled.", |
389 | "type": "boolean", | 390 | "type": "boolean", |
390 | "default": false | 391 | "default": false |
392 | }, | ||
393 | "rust-analyzer.debug.engine": { | ||
394 | "type": "string", | ||
395 | "enum": [ | ||
396 | "auto", | ||
397 | "vadimcn.vscode-lldb", | ||
398 | "ms-vscode.cpptools" | ||
399 | ], | ||
400 | "default": "auto", | ||
401 | "description": "Preffered debug engine.", | ||
402 | "markdownEnumDescriptions": [ | ||
403 | "First try to use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb), if it's not installed try to use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools).", | ||
404 | "Use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb)", | ||
405 | "Use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools)" | ||
406 | ] | ||
407 | }, | ||
408 | "rust-analyzer.debug.sourceFileMap": { | ||
409 | "type": "object", | ||
410 | "description": "Optional source file mappings passed to the debug engine.", | ||
411 | "default": { | ||
412 | "/rustc/<id>": "${env:USERPROFILE}/.rustup/toolchains/<toolchain-id>/lib/rustlib/src/rust" | ||
413 | } | ||
391 | } | 414 | } |
392 | } | 415 | } |
393 | }, | 416 | }, |
diff --git a/editors/code/src/cargo.ts b/editors/code/src/cargo.ts new file mode 100644 index 000000000..a328ba9bd --- /dev/null +++ b/editors/code/src/cargo.ts | |||
@@ -0,0 +1,106 @@ | |||
1 | import * as cp from 'child_process'; | ||
2 | import * as readline from 'readline'; | ||
3 | import { OutputChannel } from 'vscode'; | ||
4 | |||
5 | interface CompilationArtifact { | ||
6 | fileName: string; | ||
7 | name: string; | ||
8 | kind: string; | ||
9 | isTest: boolean; | ||
10 | } | ||
11 | |||
12 | export class Cargo { | ||
13 | rootFolder: string; | ||
14 | env?: Record<string, string>; | ||
15 | output: OutputChannel; | ||
16 | |||
17 | public constructor(cargoTomlFolder: string, output: OutputChannel, env: Record<string, string> | undefined = undefined) { | ||
18 | this.rootFolder = cargoTomlFolder; | ||
19 | this.output = output; | ||
20 | this.env = env; | ||
21 | } | ||
22 | |||
23 | public async artifactsFromArgs(cargoArgs: string[]): Promise<CompilationArtifact[]> { | ||
24 | const artifacts: CompilationArtifact[] = []; | ||
25 | |||
26 | try { | ||
27 | await this.runCargo(cargoArgs, | ||
28 | message => { | ||
29 | if (message.reason === 'compiler-artifact' && message.executable) { | ||
30 | const isBinary = message.target.crate_types.includes('bin'); | ||
31 | const isBuildScript = message.target.kind.includes('custom-build'); | ||
32 | if ((isBinary && !isBuildScript) || message.profile.test) { | ||
33 | artifacts.push({ | ||
34 | fileName: message.executable, | ||
35 | name: message.target.name, | ||
36 | kind: message.target.kind[0], | ||
37 | isTest: message.profile.test | ||
38 | }); | ||
39 | } | ||
40 | } | ||
41 | else if (message.reason === 'compiler-message') { | ||
42 | this.output.append(message.message.rendered); | ||
43 | } | ||
44 | }, | ||
45 | stderr => { | ||
46 | this.output.append(stderr); | ||
47 | } | ||
48 | ); | ||
49 | } | ||
50 | catch (err) { | ||
51 | this.output.show(true); | ||
52 | throw new Error(`Cargo invocation has failed: ${err}`); | ||
53 | } | ||
54 | |||
55 | return artifacts; | ||
56 | } | ||
57 | |||
58 | public async executableFromArgs(args: string[]): Promise<string> { | ||
59 | const cargoArgs = [...args]; // to remain args unchanged | ||
60 | cargoArgs.push("--message-format=json"); | ||
61 | |||
62 | const artifacts = await this.artifactsFromArgs(cargoArgs); | ||
63 | |||
64 | if (artifacts.length === 0) { | ||
65 | throw new Error('No compilation artifacts'); | ||
66 | } else if (artifacts.length > 1) { | ||
67 | throw new Error('Multiple compilation artifacts are not supported.'); | ||
68 | } | ||
69 | |||
70 | return artifacts[0].fileName; | ||
71 | } | ||
72 | |||
73 | runCargo( | ||
74 | cargoArgs: string[], | ||
75 | onStdoutJson: (obj: any) => void, | ||
76 | onStderrString: (data: string) => void | ||
77 | ): Promise<number> { | ||
78 | return new Promise<number>((resolve, reject) => { | ||
79 | const cargo = cp.spawn('cargo', cargoArgs, { | ||
80 | stdio: ['ignore', 'pipe', 'pipe'], | ||
81 | cwd: this.rootFolder, | ||
82 | env: this.env, | ||
83 | }); | ||
84 | |||
85 | cargo.on('error', err => { | ||
86 | reject(new Error(`could not launch cargo: ${err}`)); | ||
87 | }); | ||
88 | cargo.stderr.on('data', chunk => { | ||
89 | onStderrString(chunk.toString()); | ||
90 | }); | ||
91 | |||
92 | const rl = readline.createInterface({ input: cargo.stdout }); | ||
93 | rl.on('line', line => { | ||
94 | const message = JSON.parse(line); | ||
95 | onStdoutJson(message); | ||
96 | }); | ||
97 | |||
98 | cargo.on('exit', (exitCode, _) => { | ||
99 | if (exitCode === 0) | ||
100 | resolve(exitCode); | ||
101 | else | ||
102 | reject(new Error(`exit code: ${exitCode}.`)); | ||
103 | }); | ||
104 | }); | ||
105 | } | ||
106 | } \ No newline at end of file | ||
diff --git a/editors/code/src/commands/runnables.ts b/editors/code/src/commands/runnables.ts index 2635a1440..d77e8188c 100644 --- a/editors/code/src/commands/runnables.ts +++ b/editors/code/src/commands/runnables.ts | |||
@@ -1,8 +1,10 @@ | |||
1 | import * as vscode from 'vscode'; | 1 | import * as vscode from 'vscode'; |
2 | import * as lc from 'vscode-languageclient'; | 2 | import * as lc from 'vscode-languageclient'; |
3 | import * as ra from '../rust-analyzer-api'; | 3 | import * as ra from '../rust-analyzer-api'; |
4 | import * as os from "os"; | ||
4 | 5 | ||
5 | import { Ctx, Cmd } from '../ctx'; | 6 | import { Ctx, Cmd } from '../ctx'; |
7 | import { Cargo } from '../cargo'; | ||
6 | 8 | ||
7 | export function run(ctx: Ctx): Cmd { | 9 | export function run(ctx: Ctx): Cmd { |
8 | let prevRunnable: RunnableQuickPick | undefined; | 10 | let prevRunnable: RunnableQuickPick | undefined; |
@@ -62,25 +64,69 @@ export function runSingle(ctx: Ctx): Cmd { | |||
62 | }; | 64 | }; |
63 | } | 65 | } |
64 | 66 | ||
67 | function getLldbDebugConfig(config: ra.Runnable, sourceFileMap: Record<string, string>): vscode.DebugConfiguration { | ||
68 | return { | ||
69 | type: "lldb", | ||
70 | request: "launch", | ||
71 | name: config.label, | ||
72 | cargo: { | ||
73 | args: config.args, | ||
74 | }, | ||
75 | args: config.extraArgs, | ||
76 | cwd: config.cwd, | ||
77 | sourceMap: sourceFileMap | ||
78 | }; | ||
79 | } | ||
80 | |||
81 | const debugOutput = vscode.window.createOutputChannel("Debug"); | ||
82 | |||
83 | async function getCppvsDebugConfig(config: ra.Runnable, sourceFileMap: Record<string, string>): Promise<vscode.DebugConfiguration> { | ||
84 | debugOutput.clear(); | ||
85 | |||
86 | const cargo = new Cargo(config.cwd || '.', debugOutput); | ||
87 | const executable = await cargo.executableFromArgs(config.args); | ||
88 | |||
89 | // if we are here, there were no compilation errors. | ||
90 | return { | ||
91 | type: (os.platform() === "win32") ? "cppvsdbg" : 'cppdbg', | ||
92 | request: "launch", | ||
93 | name: config.label, | ||
94 | program: executable, | ||
95 | args: config.extraArgs, | ||
96 | cwd: config.cwd, | ||
97 | sourceFileMap: sourceFileMap, | ||
98 | }; | ||
99 | } | ||
100 | |||
65 | export function debugSingle(ctx: Ctx): Cmd { | 101 | export function debugSingle(ctx: Ctx): Cmd { |
66 | return async (config: ra.Runnable) => { | 102 | return async (config: ra.Runnable) => { |
67 | const editor = ctx.activeRustEditor; | 103 | const editor = ctx.activeRustEditor; |
68 | if (!editor) return; | 104 | if (!editor) return; |
69 | if (!vscode.extensions.getExtension("vadimcn.vscode-lldb")) { | 105 | |
70 | vscode.window.showErrorMessage("Install `vadimcn.vscode-lldb` extension for debugging"); | 106 | const lldbId = "vadimcn.vscode-lldb"; |
107 | const cpptoolsId = "ms-vscode.cpptools"; | ||
108 | |||
109 | const debugEngineId = ctx.config.debug.engine; | ||
110 | let debugEngine = null; | ||
111 | if (debugEngineId === "auto") { | ||
112 | debugEngine = vscode.extensions.getExtension(lldbId); | ||
113 | if (!debugEngine) { | ||
114 | debugEngine = vscode.extensions.getExtension(cpptoolsId); | ||
115 | } | ||
116 | } | ||
117 | else { | ||
118 | debugEngine = vscode.extensions.getExtension(debugEngineId); | ||
119 | } | ||
120 | |||
121 | if (!debugEngine) { | ||
122 | vscode.window.showErrorMessage(`Install [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=${lldbId})` | ||
123 | + ` or [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=${cpptoolsId}) extension for debugging.`); | ||
71 | return; | 124 | return; |
72 | } | 125 | } |
73 | 126 | ||
74 | const debugConfig = { | 127 | const debugConfig = lldbId === debugEngine.id |
75 | type: "lldb", | 128 | ? getLldbDebugConfig(config, ctx.config.debug.sourceFileMap) |
76 | request: "launch", | 129 | : await getCppvsDebugConfig(config, ctx.config.debug.sourceFileMap); |
77 | name: config.label, | ||
78 | cargo: { | ||
79 | args: config.args, | ||
80 | }, | ||
81 | args: config.extraArgs, | ||
82 | cwd: config.cwd | ||
83 | }; | ||
84 | 130 | ||
85 | return vscode.debug.startDebugging(undefined, debugConfig); | 131 | return vscode.debug.startDebugging(undefined, debugConfig); |
86 | }; | 132 | }; |
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts index 3b2eec8ba..110e54180 100644 --- a/editors/code/src/config.ts +++ b/editors/code/src/config.ts | |||
@@ -92,7 +92,6 @@ export class Config { | |||
92 | get askBeforeDownload() { return this.get<boolean>("updates.askBeforeDownload"); } | 92 | get askBeforeDownload() { return this.get<boolean>("updates.askBeforeDownload"); } |
93 | get traceExtension() { return this.get<boolean>("trace.extension"); } | 93 | get traceExtension() { return this.get<boolean>("trace.extension"); } |
94 | 94 | ||
95 | |||
96 | get inlayHints() { | 95 | get inlayHints() { |
97 | return { | 96 | return { |
98 | typeHints: this.get<boolean>("inlayHints.typeHints"), | 97 | typeHints: this.get<boolean>("inlayHints.typeHints"), |
@@ -107,4 +106,12 @@ export class Config { | |||
107 | command: this.get<string>("checkOnSave.command"), | 106 | command: this.get<string>("checkOnSave.command"), |
108 | }; | 107 | }; |
109 | } | 108 | } |
109 | |||
110 | get debug() { | ||
111 | return { | ||
112 | engine: this.get<string>("debug.engine"), | ||
113 | sourceFileMap: this.get<Record<string, string>>("debug.sourceFileMap"), | ||
114 | }; | ||
115 | } | ||
116 | |||
110 | } | 117 | } |
diff --git a/xtask/src/ast_src.rs b/xtask/src/ast_src.rs index 9c02f7c6f..bdd42cb76 100644 --- a/xtask/src/ast_src.rs +++ b/xtask/src/ast_src.rs | |||
@@ -162,7 +162,6 @@ pub(crate) const KINDS_SRC: KindsSrc = KindsSrc { | |||
162 | "RECORD_LIT", | 162 | "RECORD_LIT", |
163 | "RECORD_FIELD_LIST", | 163 | "RECORD_FIELD_LIST", |
164 | "RECORD_FIELD", | 164 | "RECORD_FIELD", |
165 | "TRY_BLOCK_EXPR", | ||
166 | "BOX_EXPR", | 165 | "BOX_EXPR", |
167 | // postfix | 166 | // postfix |
168 | "CALL_EXPR", | 167 | "CALL_EXPR", |
@@ -440,7 +439,6 @@ pub(crate) const AST_SRC: AstSrc = AstSrc { | |||
440 | } | 439 | } |
441 | struct IfExpr: AttrsOwner { T![if], Condition } | 440 | struct IfExpr: AttrsOwner { T![if], Condition } |
442 | struct LoopExpr: AttrsOwner, LoopBodyOwner { T![loop] } | 441 | struct LoopExpr: AttrsOwner, LoopBodyOwner { T![loop] } |
443 | struct TryBlockExpr: AttrsOwner { T![try], body: BlockExpr } | ||
444 | struct ForExpr: AttrsOwner, LoopBodyOwner { | 442 | struct ForExpr: AttrsOwner, LoopBodyOwner { |
445 | T![for], | 443 | T![for], |
446 | Pat, | 444 | Pat, |
@@ -451,7 +449,7 @@ pub(crate) const AST_SRC: AstSrc = AstSrc { | |||
451 | struct ContinueExpr: AttrsOwner { T![continue], T![lifetime] } | 449 | struct ContinueExpr: AttrsOwner { T![continue], T![lifetime] } |
452 | struct BreakExpr: AttrsOwner { T![break], T![lifetime], Expr } | 450 | struct BreakExpr: AttrsOwner { T![break], T![lifetime], Expr } |
453 | struct Label { T![lifetime] } | 451 | struct Label { T![lifetime] } |
454 | struct BlockExpr: AttrsOwner { Label, T![unsafe], Block } | 452 | struct BlockExpr: AttrsOwner { Label, T![unsafe], T![async], Block } |
455 | struct ReturnExpr: AttrsOwner { Expr } | 453 | struct ReturnExpr: AttrsOwner { Expr } |
456 | struct CallExpr: ArgListOwner { Expr } | 454 | struct CallExpr: ArgListOwner { Expr } |
457 | struct MethodCallExpr: AttrsOwner, ArgListOwner { | 455 | struct MethodCallExpr: AttrsOwner, ArgListOwner { |
@@ -595,7 +593,7 @@ pub(crate) const AST_SRC: AstSrc = AstSrc { | |||
595 | qualifier: Path, | 593 | qualifier: Path, |
596 | } | 594 | } |
597 | struct PathSegment { | 595 | struct PathSegment { |
598 | T![::], T![<], NameRef, TypeArgList, ParamList, RetType, PathType, T![>] | 596 | T![::], T![crate], T![<], NameRef, TypeArgList, ParamList, RetType, PathType, T![>] |
599 | } | 597 | } |
600 | struct TypeArgList { | 598 | struct TypeArgList { |
601 | T![::], | 599 | T![::], |
@@ -722,7 +720,6 @@ pub(crate) const AST_SRC: AstSrc = AstSrc { | |||
722 | FieldExpr, | 720 | FieldExpr, |
723 | AwaitExpr, | 721 | AwaitExpr, |
724 | TryExpr, | 722 | TryExpr, |
725 | TryBlockExpr, | ||
726 | CastExpr, | 723 | CastExpr, |
727 | RefExpr, | 724 | RefExpr, |
728 | PrefixExpr, | 725 | PrefixExpr, |
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs index a56eeef8d..aef68089e 100644 --- a/xtask/src/dist.rs +++ b/xtask/src/dist.rs | |||
@@ -50,21 +50,19 @@ fn dist_server(nightly: bool) -> Result<()> { | |||
50 | if cfg!(target_os = "linux") { | 50 | if cfg!(target_os = "linux") { |
51 | std::env::set_var("CC", "clang"); | 51 | std::env::set_var("CC", "clang"); |
52 | run!( | 52 | run!( |
53 | "cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release | 53 | "cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release" |
54 | --target x86_64-unknown-linux-musl | ||
55 | " | ||
56 | // We'd want to add, but that requires setting the right linker somehow | 54 | // We'd want to add, but that requires setting the right linker somehow |
57 | // --features=jemalloc | 55 | // --features=jemalloc |
58 | )?; | 56 | )?; |
59 | if !nightly { | 57 | if !nightly { |
60 | run!("strip ./target/x86_64-unknown-linux-musl/release/rust-analyzer")?; | 58 | run!("strip ./target/release/rust-analyzer")?; |
61 | } | 59 | } |
62 | } else { | 60 | } else { |
63 | run!("cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release")?; | 61 | run!("cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release")?; |
64 | } | 62 | } |
65 | 63 | ||
66 | let (src, dst) = if cfg!(target_os = "linux") { | 64 | let (src, dst) = if cfg!(target_os = "linux") { |
67 | ("./target/x86_64-unknown-linux-musl/release/rust-analyzer", "./dist/rust-analyzer-linux") | 65 | ("./target/release/rust-analyzer", "./dist/rust-analyzer-linux") |
68 | } else if cfg!(target_os = "windows") { | 66 | } else if cfg!(target_os = "windows") { |
69 | ("./target/release/rust-analyzer.exe", "./dist/rust-analyzer-windows.exe") | 67 | ("./target/release/rust-analyzer.exe", "./dist/rust-analyzer-windows.exe") |
70 | } else if cfg!(target_os = "macos") { | 68 | } else if cfg!(target_os = "macos") { |
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs index ec824a518..2b7a461e5 100644 --- a/xtask/src/lib.rs +++ b/xtask/src/lib.rs | |||
@@ -10,23 +10,19 @@ pub mod pre_commit; | |||
10 | pub mod codegen; | 10 | pub mod codegen; |
11 | mod ast_src; | 11 | mod ast_src; |
12 | 12 | ||
13 | use anyhow::Context; | ||
14 | use std::{ | 13 | use std::{ |
15 | env, | 14 | env, |
16 | io::Write, | ||
17 | path::{Path, PathBuf}, | 15 | path::{Path, PathBuf}, |
18 | process::{Command, Stdio}, | ||
19 | }; | 16 | }; |
17 | |||
20 | use walkdir::{DirEntry, WalkDir}; | 18 | use walkdir::{DirEntry, WalkDir}; |
21 | 19 | ||
22 | use crate::{ | 20 | use crate::{ |
23 | codegen::Mode, | 21 | codegen::Mode, |
24 | not_bash::{date_iso, fs2, pushd, rm_rf, run}, | 22 | not_bash::{date_iso, fs2, pushd, pushenv, rm_rf, run}, |
25 | }; | 23 | }; |
26 | 24 | ||
27 | pub use anyhow::Result; | 25 | pub use anyhow::{bail, Context as _, Result}; |
28 | |||
29 | const TOOLCHAIN: &str = "stable"; | ||
30 | 26 | ||
31 | pub fn project_root() -> PathBuf { | 27 | pub fn project_root() -> PathBuf { |
32 | Path::new( | 28 | Path::new( |
@@ -55,54 +51,44 @@ pub fn rust_files(path: &Path) -> impl Iterator<Item = PathBuf> { | |||
55 | 51 | ||
56 | pub fn run_rustfmt(mode: Mode) -> Result<()> { | 52 | pub fn run_rustfmt(mode: Mode) -> Result<()> { |
57 | let _dir = pushd(project_root()); | 53 | let _dir = pushd(project_root()); |
54 | let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); | ||
58 | ensure_rustfmt()?; | 55 | ensure_rustfmt()?; |
59 | 56 | match mode { | |
60 | let check = if mode == Mode::Verify { "--check" } else { "" }; | 57 | Mode::Overwrite => run!("cargo fmt"), |
61 | run!("rustup run {} -- cargo fmt -- {}", TOOLCHAIN, check)?; | 58 | Mode::Verify => run!("cargo fmt -- --check"), |
59 | }?; | ||
62 | Ok(()) | 60 | Ok(()) |
63 | } | 61 | } |
64 | 62 | ||
65 | fn reformat(text: impl std::fmt::Display) -> Result<String> { | 63 | fn reformat(text: impl std::fmt::Display) -> Result<String> { |
64 | let _e = pushenv("RUSTUP_TOOLCHAIN", "stable"); | ||
66 | ensure_rustfmt()?; | 65 | ensure_rustfmt()?; |
67 | let mut rustfmt = Command::new("rustup") | 66 | let stdout = run!( |
68 | .args(&["run", TOOLCHAIN, "--", "rustfmt", "--config-path"]) | 67 | "rustfmt --config-path {} --config fn_single_line=true", project_root().join("rustfmt.toml").display(); |
69 | .arg(project_root().join("rustfmt.toml")) | 68 | <text.to_string().as_bytes() |
70 | .args(&["--config", "fn_single_line=true"]) | 69 | )?; |
71 | .stdin(Stdio::piped()) | ||
72 | .stdout(Stdio::piped()) | ||
73 | .spawn()?; | ||
74 | write!(rustfmt.stdin.take().unwrap(), "{}", text)?; | ||
75 | let output = rustfmt.wait_with_output()?; | ||
76 | let stdout = String::from_utf8(output.stdout)?; | ||
77 | let preamble = "Generated file, do not edit by hand, see `xtask/src/codegen`"; | 70 | let preamble = "Generated file, do not edit by hand, see `xtask/src/codegen`"; |
78 | Ok(format!("//! {}\n\n{}", preamble, stdout)) | 71 | Ok(format!("//! {}\n\n{}\n", preamble, stdout)) |
79 | } | 72 | } |
80 | 73 | ||
81 | fn ensure_rustfmt() -> Result<()> { | 74 | fn ensure_rustfmt() -> Result<()> { |
82 | match Command::new("rustup") | 75 | let out = run!("rustfmt --version")?; |
83 | .args(&["run", TOOLCHAIN, "--", "cargo", "fmt", "--version"]) | 76 | if !out.contains("stable") { |
84 | .stderr(Stdio::null()) | 77 | bail!( |
85 | .stdout(Stdio::null()) | 78 | "Failed to run rustfmt from toolchain 'stable'. \ |
86 | .status() | 79 | Please run `rustup component add rustfmt --toolchain stable` to install it.", |
87 | { | 80 | ) |
88 | Ok(status) if status.success() => return Ok(()), | 81 | } |
89 | _ => (), | ||
90 | }; | ||
91 | run!("rustup toolchain install {}", TOOLCHAIN)?; | ||
92 | run!("rustup component add rustfmt --toolchain {}", TOOLCHAIN)?; | ||
93 | Ok(()) | 82 | Ok(()) |
94 | } | 83 | } |
95 | 84 | ||
96 | pub fn run_clippy() -> Result<()> { | 85 | pub fn run_clippy() -> Result<()> { |
97 | match Command::new("rustup") | 86 | if run!("cargo clippy --version").is_err() { |
98 | .args(&["run", TOOLCHAIN, "--", "cargo", "clippy", "--version"]) | 87 | bail!( |
99 | .stderr(Stdio::null()) | 88 | "Failed run cargo clippy. \ |
100 | .stdout(Stdio::null()) | 89 | Please run `rustup component add clippy` to install it.", |
101 | .status() | 90 | ) |
102 | { | 91 | } |
103 | Ok(status) if status.success() => (), | ||
104 | _ => install_clippy().context("install clippy")?, | ||
105 | }; | ||
106 | 92 | ||
107 | let allowed_lints = [ | 93 | let allowed_lints = [ |
108 | "clippy::collapsible_if", | 94 | "clippy::collapsible_if", |
@@ -110,27 +96,24 @@ pub fn run_clippy() -> Result<()> { | |||
110 | "clippy::nonminimal_bool", | 96 | "clippy::nonminimal_bool", |
111 | "clippy::redundant_pattern_matching", | 97 | "clippy::redundant_pattern_matching", |
112 | ]; | 98 | ]; |
113 | run!( | 99 | run!("cargo clippy --all-features --all-targets -- -A {}", allowed_lints.join(" -A "))?; |
114 | "rustup run {} -- cargo clippy --all-features --all-targets -- -A {}", | ||
115 | TOOLCHAIN, | ||
116 | allowed_lints.join(" -A ") | ||
117 | )?; | ||
118 | Ok(()) | ||
119 | } | ||
120 | |||
121 | fn install_clippy() -> Result<()> { | ||
122 | run!("rustup toolchain install {}", TOOLCHAIN)?; | ||
123 | run!("rustup component add clippy --toolchain {}", TOOLCHAIN)?; | ||
124 | Ok(()) | 100 | Ok(()) |
125 | } | 101 | } |
126 | 102 | ||
127 | pub fn run_fuzzer() -> Result<()> { | 103 | pub fn run_fuzzer() -> Result<()> { |
128 | let _d = pushd("./crates/ra_syntax"); | 104 | let _d = pushd("./crates/ra_syntax"); |
105 | let _e = pushenv("RUSTUP_TOOLCHAIN", "nightly"); | ||
129 | if run!("cargo fuzz --help").is_err() { | 106 | if run!("cargo fuzz --help").is_err() { |
130 | run!("cargo install cargo-fuzz")?; | 107 | run!("cargo install cargo-fuzz")?; |
131 | }; | 108 | }; |
132 | 109 | ||
133 | run!("rustup run nightly -- cargo fuzz run parser")?; | 110 | // Expecting nightly rustc |
111 | let out = run!("rustc --version")?; | ||
112 | if !out.contains("nightly") { | ||
113 | bail!("fuzz tests require nightly rustc") | ||
114 | } | ||
115 | |||
116 | run!("cargo fuzz run parser")?; | ||
134 | Ok(()) | 117 | Ok(()) |
135 | } | 118 | } |
136 | 119 | ||
diff --git a/xtask/src/not_bash.rs b/xtask/src/not_bash.rs index ef1699934..a6431e586 100644 --- a/xtask/src/not_bash.rs +++ b/xtask/src/not_bash.rs | |||
@@ -3,6 +3,8 @@ | |||
3 | use std::{ | 3 | use std::{ |
4 | cell::RefCell, | 4 | cell::RefCell, |
5 | env, | 5 | env, |
6 | ffi::OsString, | ||
7 | io::Write, | ||
6 | path::{Path, PathBuf}, | 8 | path::{Path, PathBuf}, |
7 | process::{Command, Stdio}, | 9 | process::{Command, Stdio}, |
8 | }; | 10 | }; |
@@ -57,7 +59,10 @@ macro_rules! _run { | |||
57 | run!($($expr),*; echo = true) | 59 | run!($($expr),*; echo = true) |
58 | }; | 60 | }; |
59 | ($($expr:expr),* ; echo = $echo:expr) => { | 61 | ($($expr:expr),* ; echo = $echo:expr) => { |
60 | $crate::not_bash::run_process(format!($($expr),*), $echo) | 62 | $crate::not_bash::run_process(format!($($expr),*), $echo, None) |
63 | }; | ||
64 | ($($expr:expr),* ; <$stdin:expr) => { | ||
65 | $crate::not_bash::run_process(format!($($expr),*), false, Some($stdin)) | ||
61 | }; | 66 | }; |
62 | } | 67 | } |
63 | pub(crate) use _run as run; | 68 | pub(crate) use _run as run; |
@@ -77,6 +82,21 @@ impl Drop for Pushd { | |||
77 | } | 82 | } |
78 | } | 83 | } |
79 | 84 | ||
85 | pub struct Pushenv { | ||
86 | _p: (), | ||
87 | } | ||
88 | |||
89 | pub fn pushenv(var: &str, value: &str) -> Pushenv { | ||
90 | Env::with(|env| env.pushenv(var.into(), value.into())); | ||
91 | Pushenv { _p: () } | ||
92 | } | ||
93 | |||
94 | impl Drop for Pushenv { | ||
95 | fn drop(&mut self) { | ||
96 | Env::with(|env| env.popenv()) | ||
97 | } | ||
98 | } | ||
99 | |||
80 | pub fn rm_rf(path: impl AsRef<Path>) -> Result<()> { | 100 | pub fn rm_rf(path: impl AsRef<Path>) -> Result<()> { |
81 | let path = path.as_ref(); | 101 | let path = path.as_ref(); |
82 | if !path.exists() { | 102 | if !path.exists() { |
@@ -90,15 +110,15 @@ pub fn rm_rf(path: impl AsRef<Path>) -> Result<()> { | |||
90 | } | 110 | } |
91 | 111 | ||
92 | #[doc(hidden)] | 112 | #[doc(hidden)] |
93 | pub fn run_process(cmd: String, echo: bool) -> Result<String> { | 113 | pub fn run_process(cmd: String, echo: bool, stdin: Option<&[u8]>) -> Result<String> { |
94 | run_process_inner(&cmd, echo).with_context(|| format!("process `{}` failed", cmd)) | 114 | run_process_inner(&cmd, echo, stdin).with_context(|| format!("process `{}` failed", cmd)) |
95 | } | 115 | } |
96 | 116 | ||
97 | pub fn date_iso() -> Result<String> { | 117 | pub fn date_iso() -> Result<String> { |
98 | run!("date --iso --utc") | 118 | run!("date --iso --utc") |
99 | } | 119 | } |
100 | 120 | ||
101 | fn run_process_inner(cmd: &str, echo: bool) -> Result<String> { | 121 | fn run_process_inner(cmd: &str, echo: bool, stdin: Option<&[u8]>) -> Result<String> { |
102 | let mut args = shelx(cmd); | 122 | let mut args = shelx(cmd); |
103 | let binary = args.remove(0); | 123 | let binary = args.remove(0); |
104 | let current_dir = Env::with(|it| it.cwd().to_path_buf()); | 124 | let current_dir = Env::with(|it| it.cwd().to_path_buf()); |
@@ -107,12 +127,17 @@ fn run_process_inner(cmd: &str, echo: bool) -> Result<String> { | |||
107 | println!("> {}", cmd) | 127 | println!("> {}", cmd) |
108 | } | 128 | } |
109 | 129 | ||
110 | let output = Command::new(binary) | 130 | let mut command = Command::new(binary); |
111 | .args(args) | 131 | command.args(args).current_dir(current_dir).stderr(Stdio::inherit()); |
112 | .current_dir(current_dir) | 132 | let output = match stdin { |
113 | .stdin(Stdio::null()) | 133 | None => command.stdin(Stdio::null()).output(), |
114 | .stderr(Stdio::inherit()) | 134 | Some(stdin) => { |
115 | .output()?; | 135 | command.stdin(Stdio::piped()).stdout(Stdio::piped()); |
136 | let mut process = command.spawn()?; | ||
137 | process.stdin.take().unwrap().write_all(stdin)?; | ||
138 | process.wait_with_output() | ||
139 | } | ||
140 | }?; | ||
116 | let stdout = String::from_utf8(output.stdout)?; | 141 | let stdout = String::from_utf8(output.stdout)?; |
117 | 142 | ||
118 | if echo { | 143 | if echo { |
@@ -133,13 +158,15 @@ fn shelx(cmd: &str) -> Vec<String> { | |||
133 | 158 | ||
134 | struct Env { | 159 | struct Env { |
135 | pushd_stack: Vec<PathBuf>, | 160 | pushd_stack: Vec<PathBuf>, |
161 | pushenv_stack: Vec<(OsString, Option<OsString>)>, | ||
136 | } | 162 | } |
137 | 163 | ||
138 | impl Env { | 164 | impl Env { |
139 | fn with<F: FnOnce(&mut Env) -> T, T>(f: F) -> T { | 165 | fn with<F: FnOnce(&mut Env) -> T, T>(f: F) -> T { |
140 | thread_local! { | 166 | thread_local! { |
141 | static ENV: RefCell<Env> = RefCell::new(Env { | 167 | static ENV: RefCell<Env> = RefCell::new(Env { |
142 | pushd_stack: vec![env::current_dir().unwrap()] | 168 | pushd_stack: vec![env::current_dir().unwrap()], |
169 | pushenv_stack: vec![], | ||
143 | }); | 170 | }); |
144 | } | 171 | } |
145 | ENV.with(|it| f(&mut *it.borrow_mut())) | 172 | ENV.with(|it| f(&mut *it.borrow_mut())) |
@@ -154,6 +181,17 @@ impl Env { | |||
154 | self.pushd_stack.pop().unwrap(); | 181 | self.pushd_stack.pop().unwrap(); |
155 | env::set_current_dir(self.cwd()).unwrap(); | 182 | env::set_current_dir(self.cwd()).unwrap(); |
156 | } | 183 | } |
184 | fn pushenv(&mut self, var: OsString, value: OsString) { | ||
185 | self.pushenv_stack.push((var.clone(), env::var_os(&var))); | ||
186 | env::set_var(var, value) | ||
187 | } | ||
188 | fn popenv(&mut self) { | ||
189 | let (var, value) = self.pushenv_stack.pop().unwrap(); | ||
190 | match value { | ||
191 | None => env::remove_var(var), | ||
192 | Some(value) => env::set_var(var, value), | ||
193 | } | ||
194 | } | ||
157 | fn cwd(&self) -> &Path { | 195 | fn cwd(&self) -> &Path { |
158 | self.pushd_stack.last().unwrap() | 196 | self.pushd_stack.last().unwrap() |
159 | } | 197 | } |