diff options
88 files changed, 381 insertions, 391 deletions
diff --git a/Cargo.lock b/Cargo.lock index e7b873076..c10d72aa4 100644 --- a/Cargo.lock +++ b/Cargo.lock | |||
@@ -1043,6 +1043,7 @@ version = "0.1.0" | |||
1043 | dependencies = [ | 1043 | dependencies = [ |
1044 | "either", | 1044 | "either", |
1045 | "format-buf", | 1045 | "format-buf", |
1046 | "fst", | ||
1046 | "indexmap", | 1047 | "indexmap", |
1047 | "insta", | 1048 | "insta", |
1048 | "itertools", | 1049 | "itertools", |
@@ -1059,8 +1060,11 @@ dependencies = [ | |||
1059 | "ra_syntax", | 1060 | "ra_syntax", |
1060 | "ra_text_edit", | 1061 | "ra_text_edit", |
1061 | "rand", | 1062 | "rand", |
1063 | "rayon", | ||
1062 | "rustc-hash", | 1064 | "rustc-hash", |
1065 | "superslice", | ||
1063 | "test_utils", | 1066 | "test_utils", |
1067 | "unicase", | ||
1064 | ] | 1068 | ] |
1065 | 1069 | ||
1066 | [[package]] | 1070 | [[package]] |
@@ -1087,6 +1091,7 @@ dependencies = [ | |||
1087 | "rustc-hash", | 1091 | "rustc-hash", |
1088 | "superslice", | 1092 | "superslice", |
1089 | "test_utils", | 1093 | "test_utils", |
1094 | "unicase", | ||
1090 | ] | 1095 | ] |
1091 | 1096 | ||
1092 | [[package]] | 1097 | [[package]] |
@@ -1598,6 +1603,15 @@ dependencies = [ | |||
1598 | ] | 1603 | ] |
1599 | 1604 | ||
1600 | [[package]] | 1605 | [[package]] |
1606 | name = "unicase" | ||
1607 | version = "2.6.0" | ||
1608 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
1609 | checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6" | ||
1610 | dependencies = [ | ||
1611 | "version_check", | ||
1612 | ] | ||
1613 | |||
1614 | [[package]] | ||
1601 | name = "unicode-bidi" | 1615 | name = "unicode-bidi" |
1602 | version = "0.3.4" | 1616 | version = "0.3.4" |
1603 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1617 | source = "registry+https://github.com/rust-lang/crates.io-index" |
@@ -1640,6 +1654,12 @@ dependencies = [ | |||
1640 | ] | 1654 | ] |
1641 | 1655 | ||
1642 | [[package]] | 1656 | [[package]] |
1657 | name = "version_check" | ||
1658 | version = "0.9.1" | ||
1659 | source = "registry+https://github.com/rust-lang/crates.io-index" | ||
1660 | checksum = "078775d0255232fb988e6fccf26ddc9d1ac274299aaedcedce21c6f72cc533ce" | ||
1661 | |||
1662 | [[package]] | ||
1643 | name = "walkdir" | 1663 | name = "walkdir" |
1644 | version = "2.3.1" | 1664 | version = "2.3.1" |
1645 | source = "registry+https://github.com/rust-lang/crates.io-index" | 1665 | source = "registry+https://github.com/rust-lang/crates.io-index" |
diff --git a/crates/ra_assists/Cargo.toml b/crates/ra_assists/Cargo.toml index 6973038d4..12a933645 100644 --- a/crates/ra_assists/Cargo.toml +++ b/crates/ra_assists/Cargo.toml | |||
@@ -10,8 +10,8 @@ doctest = false | |||
10 | [dependencies] | 10 | [dependencies] |
11 | format-buf = "1.0.0" | 11 | format-buf = "1.0.0" |
12 | join_to_string = "0.1.3" | 12 | join_to_string = "0.1.3" |
13 | rustc-hash = "1.0" | 13 | rustc-hash = "1.1.0" |
14 | either = "1.5" | 14 | either = "1.5.3" |
15 | 15 | ||
16 | ra_syntax = { path = "../ra_syntax" } | 16 | ra_syntax = { path = "../ra_syntax" } |
17 | ra_text_edit = { path = "../ra_text_edit" } | 17 | ra_text_edit = { path = "../ra_text_edit" } |
diff --git a/crates/ra_assists/src/handlers/add_custom_impl.rs b/crates/ra_assists/src/handlers/add_custom_impl.rs index 7fdd816bf..74aa4b001 100644 --- a/crates/ra_assists/src/handlers/add_custom_impl.rs +++ b/crates/ra_assists/src/handlers/add_custom_impl.rs | |||
@@ -43,9 +43,9 @@ pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> { | |||
43 | .clone(); | 43 | .clone(); |
44 | 44 | ||
45 | let trait_token = | 45 | let trait_token = |
46 | ctx.token_at_offset().filter(|t| t.kind() == IDENT && *t.text() != attr_name).next()?; | 46 | ctx.token_at_offset().find(|t| t.kind() == IDENT && *t.text() != attr_name)?; |
47 | 47 | ||
48 | let annotated = attr.syntax().siblings(Direction::Next).find_map(|s| ast::Name::cast(s))?; | 48 | let annotated = attr.syntax().siblings(Direction::Next).find_map(ast::Name::cast)?; |
49 | let annotated_name = annotated.syntax().text().to_string(); | 49 | let annotated_name = annotated.syntax().text().to_string(); |
50 | let start_offset = annotated.syntax().parent()?.text_range().end(); | 50 | let start_offset = annotated.syntax().parent()?.text_range().end(); |
51 | 51 | ||
@@ -62,7 +62,7 @@ pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> { | |||
62 | .filter_map(|t| t.into_token().map(|t| t.text().clone())) | 62 | .filter_map(|t| t.into_token().map(|t| t.text().clone())) |
63 | .filter(|t| t != trait_token.text()) | 63 | .filter(|t| t != trait_token.text()) |
64 | .collect::<Vec<SmolStr>>(); | 64 | .collect::<Vec<SmolStr>>(); |
65 | let has_more_derives = new_attr_input.len() > 0; | 65 | let has_more_derives = !new_attr_input.is_empty(); |
66 | let new_attr_input = | 66 | let new_attr_input = |
67 | join(new_attr_input.iter()).separator(", ").surround_with("(", ")").to_string(); | 67 | join(new_attr_input.iter()).separator(", ").surround_with("(", ")").to_string(); |
68 | let new_attr_input_len = new_attr_input.len(); | 68 | let new_attr_input_len = new_attr_input.len(); |
@@ -86,7 +86,7 @@ pub(crate) fn add_custom_impl(ctx: AssistCtx) -> Option<Assist> { | |||
86 | .next_sibling_or_token() | 86 | .next_sibling_or_token() |
87 | .filter(|t| t.kind() == WHITESPACE) | 87 | .filter(|t| t.kind() == WHITESPACE) |
88 | .map(|t| t.text_range()) | 88 | .map(|t| t.text_range()) |
89 | .unwrap_or(TextRange::from_to(TextUnit::from(0), TextUnit::from(0))); | 89 | .unwrap_or_else(|| TextRange::from_to(TextUnit::from(0), TextUnit::from(0))); |
90 | edit.delete(line_break_range); | 90 | edit.delete(line_break_range); |
91 | 91 | ||
92 | attr_range.len() + line_break_range.len() | 92 | attr_range.len() + line_break_range.len() |
diff --git a/crates/ra_assists/src/handlers/add_new.rs b/crates/ra_assists/src/handlers/add_new.rs index 2701eddb8..dd070e8ec 100644 --- a/crates/ra_assists/src/handlers/add_new.rs +++ b/crates/ra_assists/src/handlers/add_new.rs | |||
@@ -53,7 +53,7 @@ pub(crate) fn add_new(ctx: AssistCtx) -> Option<Assist> { | |||
53 | } | 53 | } |
54 | 54 | ||
55 | let vis = strukt.visibility().map(|v| format!("{} ", v.syntax())); | 55 | let vis = strukt.visibility().map(|v| format!("{} ", v.syntax())); |
56 | let vis = vis.as_ref().map(String::as_str).unwrap_or(""); | 56 | let vis = vis.as_deref().unwrap_or(""); |
57 | write!(&mut buf, " {}fn new(", vis).unwrap(); | 57 | write!(&mut buf, " {}fn new(", vis).unwrap(); |
58 | 58 | ||
59 | join(field_list.fields().filter_map(|f| { | 59 | join(field_list.fields().filter_map(|f| { |
diff --git a/crates/ra_assists/src/handlers/move_guard.rs b/crates/ra_assists/src/handlers/move_guard.rs index a61a2ba3e..1cc498638 100644 --- a/crates/ra_assists/src/handlers/move_guard.rs +++ b/crates/ra_assists/src/handlers/move_guard.rs | |||
@@ -44,7 +44,7 @@ pub(crate) fn move_guard_to_arm_body(ctx: AssistCtx) -> Option<Assist> { | |||
44 | edit.target(guard.syntax().text_range()); | 44 | edit.target(guard.syntax().text_range()); |
45 | let offseting_amount = match space_before_guard.and_then(|it| it.into_token()) { | 45 | let offseting_amount = match space_before_guard.and_then(|it| it.into_token()) { |
46 | Some(tok) => { | 46 | Some(tok) => { |
47 | if let Some(_) = ast::Whitespace::cast(tok.clone()) { | 47 | if ast::Whitespace::cast(tok.clone()).is_some() { |
48 | let ele = tok.text_range(); | 48 | let ele = tok.text_range(); |
49 | edit.delete(ele); | 49 | edit.delete(ele); |
50 | ele.len() | 50 | ele.len() |
@@ -98,11 +98,11 @@ pub(crate) fn move_arm_cond_to_match_guard(ctx: AssistCtx) -> Option<Assist> { | |||
98 | let then_block = if_expr.then_branch()?; | 98 | let then_block = if_expr.then_branch()?; |
99 | 99 | ||
100 | // Not support if with else branch | 100 | // Not support if with else branch |
101 | if let Some(_) = if_expr.else_branch() { | 101 | if if_expr.else_branch().is_some() { |
102 | return None; | 102 | return None; |
103 | } | 103 | } |
104 | // Not support moving if let to arm guard | 104 | // Not support moving if let to arm guard |
105 | if let Some(_) = cond.pat() { | 105 | if cond.pat().is_some() { |
106 | return None; | 106 | return None; |
107 | } | 107 | } |
108 | 108 | ||
diff --git a/crates/ra_assists/src/handlers/replace_if_let_with_match.rs b/crates/ra_assists/src/handlers/replace_if_let_with_match.rs index e6cd50bc1..0a0a88f3d 100644 --- a/crates/ra_assists/src/handlers/replace_if_let_with_match.rs +++ b/crates/ra_assists/src/handlers/replace_if_let_with_match.rs | |||
@@ -61,7 +61,7 @@ pub(crate) fn replace_if_let_with_match(ctx: AssistCtx) -> Option<Assist> { | |||
61 | 61 | ||
62 | edit.target(if_expr.syntax().text_range()); | 62 | edit.target(if_expr.syntax().text_range()); |
63 | edit.set_cursor(if_expr.syntax().text_range().start()); | 63 | edit.set_cursor(if_expr.syntax().text_range().start()); |
64 | edit.replace_ast::<ast::Expr>(if_expr.into(), match_expr.into()); | 64 | edit.replace_ast::<ast::Expr>(if_expr.into(), match_expr); |
65 | }) | 65 | }) |
66 | } | 66 | } |
67 | 67 | ||
diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs index cb124eaf0..a0e7fe17e 100644 --- a/crates/ra_assists/src/lib.rs +++ b/crates/ra_assists/src/lib.rs | |||
@@ -38,8 +38,8 @@ pub struct GroupLabel(pub String); | |||
38 | impl AssistLabel { | 38 | impl AssistLabel { |
39 | pub(crate) fn new(label: String, id: AssistId) -> AssistLabel { | 39 | pub(crate) fn new(label: String, id: AssistId) -> AssistLabel { |
40 | // FIXME: make fields private, so that this invariant can't be broken | 40 | // FIXME: make fields private, so that this invariant can't be broken |
41 | assert!(label.chars().nth(0).unwrap().is_uppercase()); | 41 | assert!(label.chars().next().unwrap().is_uppercase()); |
42 | AssistLabel { label: label.into(), id } | 42 | AssistLabel { label, id } |
43 | } | 43 | } |
44 | } | 44 | } |
45 | 45 | ||
diff --git a/crates/ra_cargo_watch/Cargo.toml b/crates/ra_cargo_watch/Cargo.toml index dd814fc9d..b09650d98 100644 --- a/crates/ra_cargo_watch/Cargo.toml +++ b/crates/ra_cargo_watch/Cargo.toml | |||
@@ -5,14 +5,14 @@ version = "0.1.0" | |||
5 | authors = ["rust-analyzer developers"] | 5 | authors = ["rust-analyzer developers"] |
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | crossbeam-channel = "0.4" | 8 | crossbeam-channel = "0.4.0" |
9 | lsp-types = { version = "0.70.0", features = ["proposed"] } | 9 | lsp-types = { version = "0.70.1", features = ["proposed"] } |
10 | log = "0.4.3" | 10 | log = "0.4.8" |
11 | cargo_metadata = "0.9.1" | 11 | cargo_metadata = "0.9.1" |
12 | jod-thread = "0.1.0" | 12 | jod-thread = "0.1.0" |
13 | parking_lot = "0.10.0" | 13 | parking_lot = "0.10.0" |
14 | serde_json = "1.0.45" | 14 | serde_json = "1.0.48" |
15 | 15 | ||
16 | [dev-dependencies] | 16 | [dev-dependencies] |
17 | insta = "0.13.0" | 17 | insta = "0.13.1" |
18 | serde_json = "1.0" \ No newline at end of file | 18 | serde_json = "1.0.48" |
diff --git a/crates/ra_cargo_watch/src/conv.rs b/crates/ra_cargo_watch/src/conv.rs index 506370535..0246adfb5 100644 --- a/crates/ra_cargo_watch/src/conv.rs +++ b/crates/ra_cargo_watch/src/conv.rs | |||
@@ -234,7 +234,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
234 | let child = map_rust_child_diagnostic(&child, workspace_root); | 234 | let child = map_rust_child_diagnostic(&child, workspace_root); |
235 | match child { | 235 | match child { |
236 | MappedRustChildDiagnostic::Related(related) => related_information.push(related), | 236 | MappedRustChildDiagnostic::Related(related) => related_information.push(related), |
237 | MappedRustChildDiagnostic::SuggestedFix(code_action) => fixes.push(code_action.into()), | 237 | MappedRustChildDiagnostic::SuggestedFix(code_action) => fixes.push(code_action), |
238 | MappedRustChildDiagnostic::MessageLine(message_line) => { | 238 | MappedRustChildDiagnostic::MessageLine(message_line) => { |
239 | write!(&mut message, "\n{}", message_line).unwrap(); | 239 | write!(&mut message, "\n{}", message_line).unwrap(); |
240 | 240 | ||
diff --git a/crates/ra_cfg/Cargo.toml b/crates/ra_cfg/Cargo.toml index dd5ff88b0..9165076a5 100644 --- a/crates/ra_cfg/Cargo.toml +++ b/crates/ra_cfg/Cargo.toml | |||
@@ -8,7 +8,7 @@ authors = ["rust-analyzer developers"] | |||
8 | doctest = false | 8 | doctest = false |
9 | 9 | ||
10 | [dependencies] | 10 | [dependencies] |
11 | rustc-hash = "1.0.1" | 11 | rustc-hash = "1.1.0" |
12 | 12 | ||
13 | ra_syntax = { path = "../ra_syntax" } | 13 | ra_syntax = { path = "../ra_syntax" } |
14 | tt = { path = "../ra_tt", package = "ra_tt" } | 14 | tt = { path = "../ra_tt", package = "ra_tt" } |
diff --git a/crates/ra_db/Cargo.toml b/crates/ra_db/Cargo.toml index 7afa5d8fc..878c22ba9 100644 --- a/crates/ra_db/Cargo.toml +++ b/crates/ra_db/Cargo.toml | |||
@@ -10,7 +10,7 @@ doctest = false | |||
10 | [dependencies] | 10 | [dependencies] |
11 | salsa = "0.14.1" | 11 | salsa = "0.14.1" |
12 | relative-path = "1.0.0" | 12 | relative-path = "1.0.0" |
13 | rustc-hash = "1.0" | 13 | rustc-hash = "1.1.0" |
14 | 14 | ||
15 | ra_syntax = { path = "../ra_syntax" } | 15 | ra_syntax = { path = "../ra_syntax" } |
16 | ra_cfg = { path = "../ra_cfg" } | 16 | ra_cfg = { path = "../ra_cfg" } |
diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs index 1b4b47215..78d121683 100644 --- a/crates/ra_db/src/input.rs +++ b/crates/ra_db/src/input.rs | |||
@@ -249,7 +249,7 @@ impl FromStr for Edition { | |||
249 | let res = match s { | 249 | let res = match s { |
250 | "2015" => Edition::Edition2015, | 250 | "2015" => Edition::Edition2015, |
251 | "2018" => Edition::Edition2018, | 251 | "2018" => Edition::Edition2018, |
252 | _ => Err(ParseEditionError { invalid_input: s.to_string() })?, | 252 | _ => return Err(ParseEditionError { invalid_input: s.to_string() }), |
253 | }; | 253 | }; |
254 | Ok(res) | 254 | Ok(res) |
255 | } | 255 | } |
diff --git a/crates/ra_fmt/Cargo.toml b/crates/ra_fmt/Cargo.toml index 9969d4746..ea9befeaf 100644 --- a/crates/ra_fmt/Cargo.toml +++ b/crates/ra_fmt/Cargo.toml | |||
@@ -9,6 +9,6 @@ publish = false | |||
9 | doctest = false | 9 | doctest = false |
10 | 10 | ||
11 | [dependencies] | 11 | [dependencies] |
12 | itertools = "0.8.0" | 12 | itertools = "0.8.2" |
13 | 13 | ||
14 | ra_syntax = { path = "../ra_syntax" } | 14 | ra_syntax = { path = "../ra_syntax" } |
diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 7dc31ad3c..0555a0de7 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml | |||
@@ -8,9 +8,9 @@ authors = ["rust-analyzer developers"] | |||
8 | doctest = false | 8 | doctest = false |
9 | 9 | ||
10 | [dependencies] | 10 | [dependencies] |
11 | log = "0.4.5" | 11 | log = "0.4.8" |
12 | rustc-hash = "1.0" | 12 | rustc-hash = "1.1.0" |
13 | either = "1.5" | 13 | either = "1.5.3" |
14 | 14 | ||
15 | ra_syntax = { path = "../ra_syntax" } | 15 | ra_syntax = { path = "../ra_syntax" } |
16 | ra_db = { path = "../ra_db" } | 16 | ra_db = { path = "../ra_db" } |
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs index b6adb7589..1bdcda069 100644 --- a/crates/ra_hir/src/code_model.rs +++ b/crates/ra_hir/src/code_model.rs | |||
@@ -283,7 +283,7 @@ impl StructField { | |||
283 | }; | 283 | }; |
284 | let substs = Substs::type_params(db, generic_def_id); | 284 | let substs = Substs::type_params(db, generic_def_id); |
285 | let ty = db.field_types(var_id)[self.id].clone().subst(&substs); | 285 | let ty = db.field_types(var_id)[self.id].clone().subst(&substs); |
286 | Type::new(db, self.parent.module(db).id.krate.into(), var_id, ty) | 286 | Type::new(db, self.parent.module(db).id.krate, var_id, ty) |
287 | } | 287 | } |
288 | 288 | ||
289 | pub fn parent_def(&self, _db: &impl HirDatabase) -> VariantDef { | 289 | pub fn parent_def(&self, _db: &impl HirDatabase) -> VariantDef { |
@@ -315,11 +315,11 @@ impl Struct { | |||
315 | } | 315 | } |
316 | 316 | ||
317 | pub fn name(self, db: &impl DefDatabase) -> Name { | 317 | pub fn name(self, db: &impl DefDatabase) -> Name { |
318 | db.struct_data(self.id.into()).name.clone() | 318 | db.struct_data(self.id).name.clone() |
319 | } | 319 | } |
320 | 320 | ||
321 | pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> { | 321 | pub fn fields(self, db: &impl HirDatabase) -> Vec<StructField> { |
322 | db.struct_data(self.id.into()) | 322 | db.struct_data(self.id) |
323 | .variant_data | 323 | .variant_data |
324 | .fields() | 324 | .fields() |
325 | .iter() | 325 | .iter() |
@@ -332,7 +332,7 @@ impl Struct { | |||
332 | } | 332 | } |
333 | 333 | ||
334 | fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> { | 334 | fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> { |
335 | db.struct_data(self.id.into()).variant_data.clone() | 335 | db.struct_data(self.id).variant_data.clone() |
336 | } | 336 | } |
337 | } | 337 | } |
338 | 338 | ||
@@ -988,20 +988,17 @@ impl Type { | |||
988 | 988 | ||
989 | pub fn fields(&self, db: &impl HirDatabase) -> Vec<(StructField, Type)> { | 989 | pub fn fields(&self, db: &impl HirDatabase) -> Vec<(StructField, Type)> { |
990 | if let Ty::Apply(a_ty) = &self.ty.value { | 990 | if let Ty::Apply(a_ty) = &self.ty.value { |
991 | match a_ty.ctor { | 991 | if let TypeCtor::Adt(AdtId::StructId(s)) = a_ty.ctor { |
992 | TypeCtor::Adt(AdtId::StructId(s)) => { | 992 | let var_def = s.into(); |
993 | let var_def = s.into(); | 993 | return db |
994 | return db | 994 | .field_types(var_def) |
995 | .field_types(var_def) | 995 | .iter() |
996 | .iter() | 996 | .map(|(local_id, ty)| { |
997 | .map(|(local_id, ty)| { | 997 | let def = StructField { parent: var_def.into(), id: local_id }; |
998 | let def = StructField { parent: var_def.into(), id: local_id }; | 998 | let ty = ty.clone().subst(&a_ty.parameters); |
999 | let ty = ty.clone().subst(&a_ty.parameters); | 999 | (def, self.derived(ty)) |
1000 | (def, self.derived(ty)) | 1000 | }) |
1001 | }) | 1001 | .collect(); |
1002 | .collect(); | ||
1003 | } | ||
1004 | _ => {} | ||
1005 | } | 1002 | } |
1006 | }; | 1003 | }; |
1007 | Vec::new() | 1004 | Vec::new() |
@@ -1010,14 +1007,11 @@ impl Type { | |||
1010 | pub fn tuple_fields(&self, _db: &impl HirDatabase) -> Vec<Type> { | 1007 | pub fn tuple_fields(&self, _db: &impl HirDatabase) -> Vec<Type> { |
1011 | let mut res = Vec::new(); | 1008 | let mut res = Vec::new(); |
1012 | if let Ty::Apply(a_ty) = &self.ty.value { | 1009 | if let Ty::Apply(a_ty) = &self.ty.value { |
1013 | match a_ty.ctor { | 1010 | if let TypeCtor::Tuple { .. } = a_ty.ctor { |
1014 | TypeCtor::Tuple { .. } => { | 1011 | for ty in a_ty.parameters.iter() { |
1015 | for ty in a_ty.parameters.iter() { | 1012 | let ty = ty.clone(); |
1016 | let ty = ty.clone(); | 1013 | res.push(self.derived(ty)); |
1017 | res.push(self.derived(ty)); | ||
1018 | } | ||
1019 | } | 1014 | } |
1020 | _ => {} | ||
1021 | } | 1015 | } |
1022 | }; | 1016 | }; |
1023 | res | 1017 | res |
@@ -1049,7 +1043,7 @@ impl Type { | |||
1049 | // FIXME check that? | 1043 | // FIXME check that? |
1050 | let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 }; | 1044 | let canonical = Canonical { value: self.ty.value.clone(), num_vars: 0 }; |
1051 | let environment = self.ty.environment.clone(); | 1045 | let environment = self.ty.environment.clone(); |
1052 | let ty = InEnvironment { value: canonical, environment: environment.clone() }; | 1046 | let ty = InEnvironment { value: canonical, environment }; |
1053 | autoderef(db, Some(self.krate), ty) | 1047 | autoderef(db, Some(self.krate), ty) |
1054 | .map(|canonical| canonical.value) | 1048 | .map(|canonical| canonical.value) |
1055 | .map(move |ty| self.derived(ty)) | 1049 | .map(move |ty| self.derived(ty)) |
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs index 94d5b4cfd..efa3f8a79 100644 --- a/crates/ra_hir/src/source_analyzer.rs +++ b/crates/ra_hir/src/source_analyzer.rs | |||
@@ -361,9 +361,8 @@ impl SourceAnalyzer { | |||
361 | db: &impl HirDatabase, | 361 | db: &impl HirDatabase, |
362 | macro_call: InFile<&ast::MacroCall>, | 362 | macro_call: InFile<&ast::MacroCall>, |
363 | ) -> Option<Expansion> { | 363 | ) -> Option<Expansion> { |
364 | let macro_call_id = macro_call.as_call_id(db, |path| { | 364 | let macro_call_id = |
365 | self.resolver.resolve_path_as_macro(db, &path).map(|it| it.into()) | 365 | macro_call.as_call_id(db, |path| self.resolver.resolve_path_as_macro(db, &path))?; |
366 | })?; | ||
367 | Some(Expansion { macro_call_id }) | 366 | Some(Expansion { macro_call_id }) |
368 | } | 367 | } |
369 | } | 368 | } |
diff --git a/crates/ra_hir_def/Cargo.toml b/crates/ra_hir_def/Cargo.toml index 6b9be9948..5053d0688 100644 --- a/crates/ra_hir_def/Cargo.toml +++ b/crates/ra_hir_def/Cargo.toml | |||
@@ -8,11 +8,11 @@ authors = ["rust-analyzer developers"] | |||
8 | doctest = false | 8 | doctest = false |
9 | 9 | ||
10 | [dependencies] | 10 | [dependencies] |
11 | log = "0.4.5" | 11 | log = "0.4.8" |
12 | once_cell = "1.0.1" | 12 | once_cell = "1.3.1" |
13 | rustc-hash = "1.0" | 13 | rustc-hash = "1.1.0" |
14 | either = "1.5" | 14 | either = "1.5.3" |
15 | anymap = "0.12" | 15 | anymap = "0.12.1" |
16 | drop_bomb = "0.1.4" | 16 | drop_bomb = "0.1.4" |
17 | itertools = "0.8.2" | 17 | itertools = "0.8.2" |
18 | 18 | ||
@@ -27,4 +27,4 @@ ra_cfg = { path = "../ra_cfg" } | |||
27 | tt = { path = "../ra_tt", package = "ra_tt" } | 27 | tt = { path = "../ra_tt", package = "ra_tt" } |
28 | 28 | ||
29 | [dev-dependencies] | 29 | [dev-dependencies] |
30 | insta = "0.13.0" | 30 | insta = "0.13.1" |
diff --git a/crates/ra_hir_def/src/body/lower.rs b/crates/ra_hir_def/src/body/lower.rs index 1fc892362..b1626fa11 100644 --- a/crates/ra_hir_def/src/body/lower.rs +++ b/crates/ra_hir_def/src/body/lower.rs | |||
@@ -448,7 +448,7 @@ where | |||
448 | // FIXME expand to statements in statement position | 448 | // FIXME expand to statements in statement position |
449 | ast::Expr::MacroCall(e) => { | 449 | ast::Expr::MacroCall(e) => { |
450 | let macro_call = self.expander.to_source(AstPtr::new(&e)); | 450 | let macro_call = self.expander.to_source(AstPtr::new(&e)); |
451 | match self.expander.enter_expand(self.db, e.clone()) { | 451 | match self.expander.enter_expand(self.db, e) { |
452 | Some((mark, expansion)) => { | 452 | Some((mark, expansion)) => { |
453 | self.source_map | 453 | self.source_map |
454 | .expansions | 454 | .expansions |
diff --git a/crates/ra_hir_def/src/generics.rs b/crates/ra_hir_def/src/generics.rs index f765e6edc..e07a4f947 100644 --- a/crates/ra_hir_def/src/generics.rs +++ b/crates/ra_hir_def/src/generics.rs | |||
@@ -71,7 +71,7 @@ impl GenericParams { | |||
71 | db: &impl DefDatabase, | 71 | db: &impl DefDatabase, |
72 | def: GenericDefId, | 72 | def: GenericDefId, |
73 | ) -> Arc<GenericParams> { | 73 | ) -> Arc<GenericParams> { |
74 | let (params, _source_map) = GenericParams::new(db, def.into()); | 74 | let (params, _source_map) = GenericParams::new(db, def); |
75 | Arc::new(params) | 75 | Arc::new(params) |
76 | } | 76 | } |
77 | 77 | ||
diff --git a/crates/ra_hir_def/src/item_scope.rs b/crates/ra_hir_def/src/item_scope.rs index d74a1cef2..6e958ca75 100644 --- a/crates/ra_hir_def/src/item_scope.rs +++ b/crates/ra_hir_def/src/item_scope.rs | |||
@@ -138,7 +138,7 @@ impl ItemScope { | |||
138 | 138 | ||
139 | pub(crate) fn push_res(&mut self, name: Name, def: PerNs) -> bool { | 139 | pub(crate) fn push_res(&mut self, name: Name, def: PerNs) -> bool { |
140 | let mut changed = false; | 140 | let mut changed = false; |
141 | let existing = self.visible.entry(name.clone()).or_default(); | 141 | let existing = self.visible.entry(name).or_default(); |
142 | 142 | ||
143 | if existing.types.is_none() && def.types.is_some() { | 143 | if existing.types.is_none() && def.types.is_some() { |
144 | existing.types = def.types; | 144 | existing.types = def.types; |
@@ -157,7 +157,7 @@ impl ItemScope { | |||
157 | } | 157 | } |
158 | 158 | ||
159 | pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator<Item = (Name, PerNs)> + 'a { | 159 | pub(crate) fn resolutions<'a>(&'a self) -> impl Iterator<Item = (Name, PerNs)> + 'a { |
160 | self.visible.iter().map(|(name, res)| (name.clone(), res.clone())) | 160 | self.visible.iter().map(|(name, res)| (name.clone(), *res)) |
161 | } | 161 | } |
162 | 162 | ||
163 | pub(crate) fn collect_legacy_macros(&self) -> FxHashMap<Name, MacroDefId> { | 163 | pub(crate) fn collect_legacy_macros(&self) -> FxHashMap<Name, MacroDefId> { |
diff --git a/crates/ra_hir_def/src/lib.rs b/crates/ra_hir_def/src/lib.rs index aa0b558b8..564b5fec5 100644 --- a/crates/ra_hir_def/src/lib.rs +++ b/crates/ra_hir_def/src/lib.rs | |||
@@ -460,7 +460,7 @@ impl AsMacroCall for AstIdWithPath<ast::MacroCall> { | |||
460 | resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, | 460 | resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, |
461 | ) -> Option<MacroCallId> { | 461 | ) -> Option<MacroCallId> { |
462 | let def = resolver(self.path.clone())?; | 462 | let def = resolver(self.path.clone())?; |
463 | Some(def.as_call_id(db, MacroCallKind::FnLike(self.ast_id.clone()))) | 463 | Some(def.as_call_id(db, MacroCallKind::FnLike(self.ast_id))) |
464 | } | 464 | } |
465 | } | 465 | } |
466 | 466 | ||
@@ -471,6 +471,6 @@ impl AsMacroCall for AstIdWithPath<ast::ModuleItem> { | |||
471 | resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, | 471 | resolver: impl Fn(path::ModPath) -> Option<MacroDefId>, |
472 | ) -> Option<MacroCallId> { | 472 | ) -> Option<MacroCallId> { |
473 | let def = resolver(self.path.clone())?; | 473 | let def = resolver(self.path.clone())?; |
474 | Some(def.as_call_id(db, MacroCallKind::Attr(self.ast_id.clone()))) | 474 | Some(def.as_call_id(db, MacroCallKind::Attr(self.ast_id))) |
475 | } | 475 | } |
476 | } | 476 | } |
diff --git a/crates/ra_hir_def/src/nameres.rs b/crates/ra_hir_def/src/nameres.rs index 852304dd0..c5137a0ab 100644 --- a/crates/ra_hir_def/src/nameres.rs +++ b/crates/ra_hir_def/src/nameres.rs | |||
@@ -156,7 +156,7 @@ impl ModuleOrigin { | |||
156 | ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => { | 156 | ModuleOrigin::File { definition, .. } | ModuleOrigin::CrateRoot { definition } => { |
157 | let file_id = *definition; | 157 | let file_id = *definition; |
158 | let sf = db.parse(file_id).tree(); | 158 | let sf = db.parse(file_id).tree(); |
159 | return InFile::new(file_id.into(), ModuleSource::SourceFile(sf)); | 159 | InFile::new(file_id.into(), ModuleSource::SourceFile(sf)) |
160 | } | 160 | } |
161 | ModuleOrigin::Inline { definition } => { | 161 | ModuleOrigin::Inline { definition } => { |
162 | InFile::new(definition.file_id, ModuleSource::Module(definition.to_node(db))) | 162 | InFile::new(definition.file_id, ModuleSource::Module(definition.to_node(db))) |
diff --git a/crates/ra_hir_def/src/nameres/raw.rs b/crates/ra_hir_def/src/nameres/raw.rs index 650cf1f98..0e8c9da76 100644 --- a/crates/ra_hir_def/src/nameres/raw.rs +++ b/crates/ra_hir_def/src/nameres/raw.rs | |||
@@ -357,9 +357,7 @@ impl RawItemsCollector { | |||
357 | let visibility = | 357 | let visibility = |
358 | RawVisibility::from_ast_with_hygiene(extern_crate.visibility(), &self.hygiene); | 358 | RawVisibility::from_ast_with_hygiene(extern_crate.visibility(), &self.hygiene); |
359 | let alias = extern_crate.alias().map(|a| { | 359 | let alias = extern_crate.alias().map(|a| { |
360 | a.name() | 360 | a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias) |
361 | .map(|it| it.as_name()) | ||
362 | .map_or(ImportAlias::Underscore, |a| ImportAlias::Alias(a)) | ||
363 | }); | 361 | }); |
364 | let attrs = self.parse_attrs(&extern_crate); | 362 | let attrs = self.parse_attrs(&extern_crate); |
365 | // FIXME: cfg_attr | 363 | // FIXME: cfg_attr |
diff --git a/crates/ra_hir_def/src/nameres/tests/incremental.rs b/crates/ra_hir_def/src/nameres/tests/incremental.rs index faeb7aa4d..83f429c29 100644 --- a/crates/ra_hir_def/src/nameres/tests/incremental.rs +++ b/crates/ra_hir_def/src/nameres/tests/incremental.rs | |||
@@ -116,7 +116,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() { | |||
116 | let events = db.log_executed(|| { | 116 | let events = db.log_executed(|| { |
117 | let crate_def_map = db.crate_def_map(krate); | 117 | let crate_def_map = db.crate_def_map(krate); |
118 | let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); | 118 | let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); |
119 | assert_eq!(module_data.scope.resolutions().collect::<Vec<_>>().len(), 1); | 119 | assert_eq!(module_data.scope.resolutions().count(), 1); |
120 | }); | 120 | }); |
121 | assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) | 121 | assert!(format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) |
122 | } | 122 | } |
@@ -126,7 +126,7 @@ fn typing_inside_a_macro_should_not_invalidate_def_map() { | |||
126 | let events = db.log_executed(|| { | 126 | let events = db.log_executed(|| { |
127 | let crate_def_map = db.crate_def_map(krate); | 127 | let crate_def_map = db.crate_def_map(krate); |
128 | let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); | 128 | let (_, module_data) = crate_def_map.modules.iter().last().unwrap(); |
129 | assert_eq!(module_data.scope.resolutions().collect::<Vec<_>>().len(), 1); | 129 | assert_eq!(module_data.scope.resolutions().count(), 1); |
130 | }); | 130 | }); |
131 | assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) | 131 | assert!(!format!("{:?}", events).contains("crate_def_map"), "{:#?}", events) |
132 | } | 132 | } |
diff --git a/crates/ra_hir_def/src/path/lower/lower_use.rs b/crates/ra_hir_def/src/path/lower/lower_use.rs index d2bc9d193..b6d1125e2 100644 --- a/crates/ra_hir_def/src/path/lower/lower_use.rs +++ b/crates/ra_hir_def/src/path/lower/lower_use.rs | |||
@@ -32,9 +32,7 @@ pub(crate) fn lower_use_tree( | |||
32 | } | 32 | } |
33 | } else { | 33 | } else { |
34 | let alias = tree.alias().map(|a| { | 34 | let alias = tree.alias().map(|a| { |
35 | a.name() | 35 | a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias) |
36 | .map(|it| it.as_name()) | ||
37 | .map_or(ImportAlias::Underscore, |a| ImportAlias::Alias(a)) | ||
38 | }); | 36 | }); |
39 | let is_glob = tree.has_star(); | 37 | let is_glob = tree.has_star(); |
40 | if let Some(ast_path) = tree.path() { | 38 | if let Some(ast_path) = tree.path() { |
diff --git a/crates/ra_hir_def/src/resolver.rs b/crates/ra_hir_def/src/resolver.rs index e2b228e80..5365b80e2 100644 --- a/crates/ra_hir_def/src/resolver.rs +++ b/crates/ra_hir_def/src/resolver.rs | |||
@@ -474,7 +474,7 @@ impl Scope { | |||
474 | f(name.clone(), ScopeDef::PerNs(PerNs::macros(macro_, Visibility::Public))); | 474 | f(name.clone(), ScopeDef::PerNs(PerNs::macros(macro_, Visibility::Public))); |
475 | }); | 475 | }); |
476 | m.crate_def_map.extern_prelude.iter().for_each(|(name, &def)| { | 476 | m.crate_def_map.extern_prelude.iter().for_each(|(name, &def)| { |
477 | f(name.clone(), ScopeDef::PerNs(PerNs::types(def.into(), Visibility::Public))); | 477 | f(name.clone(), ScopeDef::PerNs(PerNs::types(def, Visibility::Public))); |
478 | }); | 478 | }); |
479 | if let Some(prelude) = m.crate_def_map.prelude { | 479 | if let Some(prelude) = m.crate_def_map.prelude { |
480 | let prelude_def_map = db.crate_def_map(prelude.krate); | 480 | let prelude_def_map = db.crate_def_map(prelude.krate); |
@@ -499,10 +499,10 @@ impl Scope { | |||
499 | } | 499 | } |
500 | } | 500 | } |
501 | Scope::ImplBlockScope(i) => { | 501 | Scope::ImplBlockScope(i) => { |
502 | f(name![Self], ScopeDef::ImplSelfType((*i).into())); | 502 | f(name![Self], ScopeDef::ImplSelfType(*i)); |
503 | } | 503 | } |
504 | Scope::AdtScope(i) => { | 504 | Scope::AdtScope(i) => { |
505 | f(name![Self], ScopeDef::AdtSelfType((*i).into())); | 505 | f(name![Self], ScopeDef::AdtSelfType(*i)); |
506 | } | 506 | } |
507 | Scope::ExprScope(scope) => { | 507 | Scope::ExprScope(scope) => { |
508 | scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| { | 508 | scope.expr_scopes.entries(scope.scope_id).iter().for_each(|e| { |
diff --git a/crates/ra_hir_expand/Cargo.toml b/crates/ra_hir_expand/Cargo.toml index 3ae4376dc..d6e3c1f76 100644 --- a/crates/ra_hir_expand/Cargo.toml +++ b/crates/ra_hir_expand/Cargo.toml | |||
@@ -8,8 +8,8 @@ authors = ["rust-analyzer developers"] | |||
8 | doctest = false | 8 | doctest = false |
9 | 9 | ||
10 | [dependencies] | 10 | [dependencies] |
11 | log = "0.4.5" | 11 | log = "0.4.8" |
12 | either = "1.5" | 12 | either = "1.5.3" |
13 | 13 | ||
14 | ra_arena = { path = "../ra_arena" } | 14 | ra_arena = { path = "../ra_arena" } |
15 | ra_db = { path = "../ra_db" } | 15 | ra_db = { path = "../ra_db" } |
diff --git a/crates/ra_hir_expand/src/builtin_derive.rs b/crates/ra_hir_expand/src/builtin_derive.rs index 62c60e336..d0e3eaf7e 100644 --- a/crates/ra_hir_expand/src/builtin_derive.rs +++ b/crates/ra_hir_expand/src/builtin_derive.rs | |||
@@ -235,7 +235,7 @@ mod tests { | |||
235 | let (db, file_id) = TestDB::with_single_file(&s); | 235 | let (db, file_id) = TestDB::with_single_file(&s); |
236 | let parsed = db.parse(file_id); | 236 | let parsed = db.parse(file_id); |
237 | let items: Vec<_> = | 237 | let items: Vec<_> = |
238 | parsed.syntax_node().descendants().filter_map(|it| ast::ModuleItem::cast(it)).collect(); | 238 | parsed.syntax_node().descendants().filter_map(ast::ModuleItem::cast).collect(); |
239 | 239 | ||
240 | let ast_id_map = db.ast_id_map(file_id.into()); | 240 | let ast_id_map = db.ast_id_map(file_id.into()); |
241 | 241 | ||
diff --git a/crates/ra_hir_expand/src/builtin_macro.rs b/crates/ra_hir_expand/src/builtin_macro.rs index f3f959ac6..f2bb0bddb 100644 --- a/crates/ra_hir_expand/src/builtin_macro.rs +++ b/crates/ra_hir_expand/src/builtin_macro.rs | |||
@@ -155,14 +155,11 @@ fn compile_error_expand( | |||
155 | tt: &tt::Subtree, | 155 | tt: &tt::Subtree, |
156 | ) -> Result<tt::Subtree, mbe::ExpandError> { | 156 | ) -> Result<tt::Subtree, mbe::ExpandError> { |
157 | if tt.count() == 1 { | 157 | if tt.count() == 1 { |
158 | match &tt.token_trees[0] { | 158 | if let tt::TokenTree::Leaf(tt::Leaf::Literal(it)) = &tt.token_trees[0] { |
159 | tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => { | 159 | let s = it.text.as_str(); |
160 | let s = it.text.as_str(); | 160 | if s.contains('"') { |
161 | if s.contains(r#"""#) { | 161 | return Ok(quote! { loop { #it }}); |
162 | return Ok(quote! { loop { #it }}); | ||
163 | } | ||
164 | } | 162 | } |
165 | _ => {} | ||
166 | }; | 163 | }; |
167 | } | 164 | } |
168 | 165 | ||
@@ -222,7 +219,7 @@ mod tests { | |||
222 | let (db, file_id) = TestDB::with_single_file(&s); | 219 | let (db, file_id) = TestDB::with_single_file(&s); |
223 | let parsed = db.parse(file_id); | 220 | let parsed = db.parse(file_id); |
224 | let macro_calls: Vec<_> = | 221 | let macro_calls: Vec<_> = |
225 | parsed.syntax_node().descendants().filter_map(|it| ast::MacroCall::cast(it)).collect(); | 222 | parsed.syntax_node().descendants().filter_map(ast::MacroCall::cast).collect(); |
226 | 223 | ||
227 | let ast_id_map = db.ast_id_map(file_id.into()); | 224 | let ast_id_map = db.ast_id_map(file_id.into()); |
228 | 225 | ||
diff --git a/crates/ra_hir_expand/src/quote.rs b/crates/ra_hir_expand/src/quote.rs index 4de219ce4..57e7eebf9 100644 --- a/crates/ra_hir_expand/src/quote.rs +++ b/crates/ra_hir_expand/src/quote.rs | |||
@@ -15,14 +15,13 @@ macro_rules! __quote { | |||
15 | ( @SUBTREE $delim:ident $($tt:tt)* ) => { | 15 | ( @SUBTREE $delim:ident $($tt:tt)* ) => { |
16 | { | 16 | { |
17 | let children = $crate::__quote!($($tt)*); | 17 | let children = $crate::__quote!($($tt)*); |
18 | let subtree = tt::Subtree { | 18 | tt::Subtree { |
19 | delimiter: Some(tt::Delimiter { | 19 | delimiter: Some(tt::Delimiter { |
20 | kind: tt::DelimiterKind::$delim, | 20 | kind: tt::DelimiterKind::$delim, |
21 | id: tt::TokenId::unspecified(), | 21 | id: tt::TokenId::unspecified(), |
22 | }), | 22 | }), |
23 | token_trees: $crate::quote::IntoTt::to_tokens(children), | 23 | token_trees: $crate::quote::IntoTt::to_tokens(children), |
24 | }; | 24 | } |
25 | subtree | ||
26 | } | 25 | } |
27 | }; | 26 | }; |
28 | 27 | ||
@@ -259,8 +258,7 @@ mod tests { | |||
259 | // } | 258 | // } |
260 | let struct_name = mk_ident("Foo"); | 259 | let struct_name = mk_ident("Foo"); |
261 | let fields = [mk_ident("name"), mk_ident("id")]; | 260 | let fields = [mk_ident("name"), mk_ident("id")]; |
262 | let fields = | 261 | let fields = fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees).flatten(); |
263 | fields.iter().map(|it| quote!(#it: self.#it.clone(), ).token_trees.clone()).flatten(); | ||
264 | 262 | ||
265 | let list = tt::Subtree { | 263 | let list = tt::Subtree { |
266 | delimiter: Some(tt::Delimiter { | 264 | delimiter: Some(tt::Delimiter { |
diff --git a/crates/ra_hir_ty/Cargo.toml b/crates/ra_hir_ty/Cargo.toml index f5484bf70..49cafc539 100644 --- a/crates/ra_hir_ty/Cargo.toml +++ b/crates/ra_hir_ty/Cargo.toml | |||
@@ -9,9 +9,9 @@ doctest = false | |||
9 | 9 | ||
10 | [dependencies] | 10 | [dependencies] |
11 | arrayvec = "0.5.1" | 11 | arrayvec = "0.5.1" |
12 | ena = "0.13" | 12 | ena = "0.13.1" |
13 | log = "0.4.5" | 13 | log = "0.4.8" |
14 | rustc-hash = "1.0" | 14 | rustc-hash = "1.1.0" |
15 | 15 | ||
16 | hir_def = { path = "../ra_hir_def", package = "ra_hir_def" } | 16 | hir_def = { path = "../ra_hir_def", package = "ra_hir_def" } |
17 | hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" } | 17 | hir_expand = { path = "../ra_hir_expand", package = "ra_hir_expand" } |
@@ -28,4 +28,4 @@ chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "af48f30 | |||
28 | lalrpop-intern = "0.15.1" | 28 | lalrpop-intern = "0.15.1" |
29 | 29 | ||
30 | [dev-dependencies] | 30 | [dev-dependencies] |
31 | insta = "0.13.0" | 31 | insta = "0.13.1" |
diff --git a/crates/ra_hir_ty/src/diagnostics.rs b/crates/ra_hir_ty/src/diagnostics.rs index 5054189cc..6eafdc8f6 100644 --- a/crates/ra_hir_ty/src/diagnostics.rs +++ b/crates/ra_hir_ty/src/diagnostics.rs | |||
@@ -40,7 +40,7 @@ impl Diagnostic for MissingFields { | |||
40 | use std::fmt::Write; | 40 | use std::fmt::Write; |
41 | let mut message = String::from("Missing structure fields:\n"); | 41 | let mut message = String::from("Missing structure fields:\n"); |
42 | for field in &self.missed_fields { | 42 | for field in &self.missed_fields { |
43 | write!(message, "- {}\n", field).unwrap(); | 43 | writeln!(message, "- {}", field).unwrap(); |
44 | } | 44 | } |
45 | message | 45 | message |
46 | } | 46 | } |
diff --git a/crates/ra_hir_ty/src/expr.rs b/crates/ra_hir_ty/src/expr.rs index f752a9f09..0d11b537c 100644 --- a/crates/ra_hir_ty/src/expr.rs +++ b/crates/ra_hir_ty/src/expr.rs | |||
@@ -138,7 +138,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> { | |||
138 | _ => return, | 138 | _ => return, |
139 | }; | 139 | }; |
140 | 140 | ||
141 | if params.len() == 2 && ¶ms[0] == &mismatch.actual { | 141 | if params.len() == 2 && params[0] == mismatch.actual { |
142 | let (_, source_map) = db.body_with_source_map(self.func.into()); | 142 | let (_, source_map) = db.body_with_source_map(self.func.into()); |
143 | 143 | ||
144 | if let Some(source_ptr) = source_map.expr_syntax(id) { | 144 | if let Some(source_ptr) = source_map.expr_syntax(id) { |
diff --git a/crates/ra_hir_ty/src/infer.rs b/crates/ra_hir_ty/src/infer.rs index a9d958c8b..76069eb9c 100644 --- a/crates/ra_hir_ty/src/infer.rs +++ b/crates/ra_hir_ty/src/infer.rs | |||
@@ -225,14 +225,14 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
225 | coerce_unsized_map: Self::init_coerce_unsized_map(db, &resolver), | 225 | coerce_unsized_map: Self::init_coerce_unsized_map(db, &resolver), |
226 | db, | 226 | db, |
227 | owner, | 227 | owner, |
228 | body: db.body(owner.into()), | 228 | body: db.body(owner), |
229 | resolver, | 229 | resolver, |
230 | } | 230 | } |
231 | } | 231 | } |
232 | 232 | ||
233 | fn resolve_all(mut self) -> InferenceResult { | 233 | fn resolve_all(mut self) -> InferenceResult { |
234 | // FIXME resolve obligations as well (use Guidance if necessary) | 234 | // FIXME resolve obligations as well (use Guidance if necessary) |
235 | let mut result = mem::replace(&mut self.result, InferenceResult::default()); | 235 | let mut result = std::mem::take(&mut self.result); |
236 | for ty in result.type_of_expr.values_mut() { | 236 | for ty in result.type_of_expr.values_mut() { |
237 | let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown)); | 237 | let resolved = self.table.resolve_ty_completely(mem::replace(ty, Ty::Unknown)); |
238 | *ty = resolved; | 238 | *ty = resolved; |
@@ -261,7 +261,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
261 | } | 261 | } |
262 | 262 | ||
263 | fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) { | 263 | fn write_assoc_resolution(&mut self, id: ExprOrPatId, item: AssocItemId) { |
264 | self.result.assoc_resolutions.insert(id, item.into()); | 264 | self.result.assoc_resolutions.insert(id, item); |
265 | } | 265 | } |
266 | 266 | ||
267 | fn write_pat_ty(&mut self, pat: PatId, ty: Ty) { | 267 | fn write_pat_ty(&mut self, pat: PatId, ty: Ty) { |
@@ -312,9 +312,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
312 | for obligation in obligations { | 312 | for obligation in obligations { |
313 | let in_env = InEnvironment::new(self.trait_env.clone(), obligation.clone()); | 313 | let in_env = InEnvironment::new(self.trait_env.clone(), obligation.clone()); |
314 | let canonicalized = self.canonicalizer().canonicalize_obligation(in_env); | 314 | let canonicalized = self.canonicalizer().canonicalize_obligation(in_env); |
315 | let solution = self | 315 | let solution = |
316 | .db | 316 | self.db.trait_solve(self.resolver.krate().unwrap(), canonicalized.value.clone()); |
317 | .trait_solve(self.resolver.krate().unwrap().into(), canonicalized.value.clone()); | ||
318 | 317 | ||
319 | match solution { | 318 | match solution { |
320 | Some(Solution::Unique(substs)) => { | 319 | Some(Solution::Unique(substs)) => { |
diff --git a/crates/ra_hir_ty/src/infer/coerce.rs b/crates/ra_hir_ty/src/infer/coerce.rs index f68a1439f..fb6a51b12 100644 --- a/crates/ra_hir_ty/src/infer/coerce.rs +++ b/crates/ra_hir_ty/src/infer/coerce.rs | |||
@@ -26,7 +26,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
26 | /// Note that it is only possible that one type are coerced to another. | 26 | /// Note that it is only possible that one type are coerced to another. |
27 | /// Coercing both types to another least upper bound type is not possible in rustc, | 27 | /// Coercing both types to another least upper bound type is not possible in rustc, |
28 | /// which will simply result in "incompatible types" error. | 28 | /// which will simply result in "incompatible types" error. |
29 | pub(super) fn coerce_merge_branch<'t>(&mut self, ty1: &Ty, ty2: &Ty) -> Ty { | 29 | pub(super) fn coerce_merge_branch(&mut self, ty1: &Ty, ty2: &Ty) -> Ty { |
30 | if self.coerce(ty1, ty2) { | 30 | if self.coerce(ty1, ty2) { |
31 | ty2.clone() | 31 | ty2.clone() |
32 | } else if self.coerce(ty2, ty1) { | 32 | } else if self.coerce(ty2, ty1) { |
@@ -44,10 +44,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
44 | resolver: &Resolver, | 44 | resolver: &Resolver, |
45 | ) -> FxHashMap<(TypeCtor, TypeCtor), usize> { | 45 | ) -> FxHashMap<(TypeCtor, TypeCtor), usize> { |
46 | let krate = resolver.krate().unwrap(); | 46 | let krate = resolver.krate().unwrap(); |
47 | let impls = match db.lang_item(krate.into(), "coerce_unsized".into()) { | 47 | let impls = match db.lang_item(krate, "coerce_unsized".into()) { |
48 | Some(LangItemTarget::TraitId(trait_)) => { | 48 | Some(LangItemTarget::TraitId(trait_)) => db.impls_for_trait(krate, trait_), |
49 | db.impls_for_trait(krate.into(), trait_.into()) | ||
50 | } | ||
51 | _ => return FxHashMap::default(), | 49 | _ => return FxHashMap::default(), |
52 | }; | 50 | }; |
53 | 51 | ||
@@ -254,15 +252,14 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
254 | let unsize_generic_index = { | 252 | let unsize_generic_index = { |
255 | let mut index = None; | 253 | let mut index = None; |
256 | let mut multiple_param = false; | 254 | let mut multiple_param = false; |
257 | field_tys[last_field_id].value.walk(&mut |ty| match ty { | 255 | field_tys[last_field_id].value.walk(&mut |ty| { |
258 | &Ty::Bound(idx) => { | 256 | if let &Ty::Bound(idx) = ty { |
259 | if index.is_none() { | 257 | if index.is_none() { |
260 | index = Some(idx); | 258 | index = Some(idx); |
261 | } else if Some(idx) != index { | 259 | } else if Some(idx) != index { |
262 | multiple_param = true; | 260 | multiple_param = true; |
263 | } | 261 | } |
264 | } | 262 | } |
265 | _ => {} | ||
266 | }); | 263 | }); |
267 | 264 | ||
268 | if multiple_param { | 265 | if multiple_param { |
diff --git a/crates/ra_hir_ty/src/infer/expr.rs b/crates/ra_hir_ty/src/infer/expr.rs index 39d8bc0ca..9d5f75625 100644 --- a/crates/ra_hir_ty/src/infer/expr.rs +++ b/crates/ra_hir_ty/src/infer/expr.rs | |||
@@ -35,8 +35,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
35 | TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() }, | 35 | TypeMismatch { expected: expected.ty.clone(), actual: ty.clone() }, |
36 | ); | 36 | ); |
37 | } | 37 | } |
38 | let ty = self.resolve_ty_as_possible(ty); | 38 | self.resolve_ty_as_possible(ty) |
39 | ty | ||
40 | } | 39 | } |
41 | 40 | ||
42 | /// Infer type of expression with possibly implicit coerce to the expected type. | 41 | /// Infer type of expression with possibly implicit coerce to the expected type. |
@@ -127,10 +126,8 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
127 | TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1 }, | 126 | TypeCtor::FnPtr { num_args: sig_tys.len() as u16 - 1 }, |
128 | Substs(sig_tys.into()), | 127 | Substs(sig_tys.into()), |
129 | ); | 128 | ); |
130 | let closure_ty = Ty::apply_one( | 129 | let closure_ty = |
131 | TypeCtor::Closure { def: self.owner.into(), expr: tgt_expr }, | 130 | Ty::apply_one(TypeCtor::Closure { def: self.owner, expr: tgt_expr }, sig_ty); |
132 | sig_ty, | ||
133 | ); | ||
134 | 131 | ||
135 | // Eagerly try to relate the closure type with the expected | 132 | // Eagerly try to relate the closure type with the expected |
136 | // type, otherwise we often won't have enough information to | 133 | // type, otherwise we often won't have enough information to |
@@ -157,15 +154,14 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
157 | }; | 154 | }; |
158 | self.register_obligations_for_call(&callee_ty); | 155 | self.register_obligations_for_call(&callee_ty); |
159 | self.check_call_arguments(args, ¶m_tys); | 156 | self.check_call_arguments(args, ¶m_tys); |
160 | let ret_ty = self.normalize_associated_types_in(ret_ty); | 157 | self.normalize_associated_types_in(ret_ty) |
161 | ret_ty | ||
162 | } | 158 | } |
163 | Expr::MethodCall { receiver, args, method_name, generic_args } => self | 159 | Expr::MethodCall { receiver, args, method_name, generic_args } => self |
164 | .infer_method_call(tgt_expr, *receiver, &args, &method_name, generic_args.as_ref()), | 160 | .infer_method_call(tgt_expr, *receiver, &args, &method_name, generic_args.as_ref()), |
165 | Expr::Match { expr, arms } => { | 161 | Expr::Match { expr, arms } => { |
166 | let input_ty = self.infer_expr(*expr, &Expectation::none()); | 162 | let input_ty = self.infer_expr(*expr, &Expectation::none()); |
167 | 163 | ||
168 | let mut result_ty = if arms.len() == 0 { | 164 | let mut result_ty = if arms.is_empty() { |
169 | Ty::simple(TypeCtor::Never) | 165 | Ty::simple(TypeCtor::Never) |
170 | } else { | 166 | } else { |
171 | self.table.new_type_var() | 167 | self.table.new_type_var() |
@@ -188,7 +184,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
188 | } | 184 | } |
189 | Expr::Path(p) => { | 185 | Expr::Path(p) => { |
190 | // FIXME this could be more efficient... | 186 | // FIXME this could be more efficient... |
191 | let resolver = resolver_for_expr(self.db, self.owner.into(), tgt_expr); | 187 | let resolver = resolver_for_expr(self.db, self.owner, tgt_expr); |
192 | self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown) | 188 | self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or(Ty::Unknown) |
193 | } | 189 | } |
194 | Expr::Continue => Ty::simple(TypeCtor::Never), | 190 | Expr::Continue => Ty::simple(TypeCtor::Never), |
@@ -217,8 +213,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
217 | self.unify(&ty, &expected.ty); | 213 | self.unify(&ty, &expected.ty); |
218 | 214 | ||
219 | let substs = ty.substs().unwrap_or_else(Substs::empty); | 215 | let substs = ty.substs().unwrap_or_else(Substs::empty); |
220 | let field_types = | 216 | let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default(); |
221 | def_id.map(|it| self.db.field_types(it.into())).unwrap_or_default(); | ||
222 | let variant_data = def_id.map(|it| variant_data(self.db, it)); | 217 | let variant_data = def_id.map(|it| variant_data(self.db, it)); |
223 | for (field_idx, field) in fields.iter().enumerate() { | 218 | for (field_idx, field) in fields.iter().enumerate() { |
224 | let field_def = | 219 | let field_def = |
@@ -264,7 +259,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
264 | .and_then(|idx| a_ty.parameters.0.get(idx).cloned()), | 259 | .and_then(|idx| a_ty.parameters.0.get(idx).cloned()), |
265 | TypeCtor::Adt(AdtId::StructId(s)) => { | 260 | TypeCtor::Adt(AdtId::StructId(s)) => { |
266 | self.db.struct_data(s).variant_data.field(name).map(|local_id| { | 261 | self.db.struct_data(s).variant_data.field(name).map(|local_id| { |
267 | let field = StructFieldId { parent: s.into(), local_id }.into(); | 262 | let field = StructFieldId { parent: s.into(), local_id }; |
268 | self.write_field_resolution(tgt_expr, field); | 263 | self.write_field_resolution(tgt_expr, field); |
269 | self.db.field_types(s.into())[field.local_id] | 264 | self.db.field_types(s.into())[field.local_id] |
270 | .clone() | 265 | .clone() |
@@ -283,14 +278,11 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
283 | } | 278 | } |
284 | Expr::Await { expr } => { | 279 | Expr::Await { expr } => { |
285 | let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); | 280 | let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); |
286 | let ty = | 281 | self.resolve_associated_type(inner_ty, self.resolve_future_future_output()) |
287 | self.resolve_associated_type(inner_ty, self.resolve_future_future_output()); | ||
288 | ty | ||
289 | } | 282 | } |
290 | Expr::Try { expr } => { | 283 | Expr::Try { expr } => { |
291 | let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); | 284 | let inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); |
292 | let ty = self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok()); | 285 | self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok()) |
293 | ty | ||
294 | } | 286 | } |
295 | Expr::Cast { expr, type_ref } => { | 287 | Expr::Cast { expr, type_ref } => { |
296 | let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); | 288 | let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none()); |
@@ -614,8 +606,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
614 | self.unify(&expected_receiver_ty, &actual_receiver_ty); | 606 | self.unify(&expected_receiver_ty, &actual_receiver_ty); |
615 | 607 | ||
616 | self.check_call_arguments(args, ¶m_tys); | 608 | self.check_call_arguments(args, ¶m_tys); |
617 | let ret_ty = self.normalize_associated_types_in(ret_ty); | 609 | self.normalize_associated_types_in(ret_ty) |
618 | ret_ty | ||
619 | } | 610 | } |
620 | 611 | ||
621 | fn check_call_arguments(&mut self, args: &[ExprId], param_tys: &[Ty]) { | 612 | fn check_call_arguments(&mut self, args: &[ExprId], param_tys: &[Ty]) { |
@@ -700,10 +691,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
700 | // construct a TraitDef | 691 | // construct a TraitDef |
701 | let substs = | 692 | let substs = |
702 | a_ty.parameters.prefix(generics(self.db, trait_.into()).len()); | 693 | a_ty.parameters.prefix(generics(self.db, trait_.into()).len()); |
703 | self.obligations.push(Obligation::Trait(TraitRef { | 694 | self.obligations.push(Obligation::Trait(TraitRef { trait_, substs })); |
704 | trait_: trait_.into(), | ||
705 | substs, | ||
706 | })); | ||
707 | } | 695 | } |
708 | } | 696 | } |
709 | CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {} | 697 | CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {} |
diff --git a/crates/ra_hir_ty/src/infer/pat.rs b/crates/ra_hir_ty/src/infer/pat.rs index a5dfdf6c4..a495ecbfe 100644 --- a/crates/ra_hir_ty/src/infer/pat.rs +++ b/crates/ra_hir_ty/src/infer/pat.rs | |||
@@ -28,7 +28,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
28 | 28 | ||
29 | let substs = ty.substs().unwrap_or_else(Substs::empty); | 29 | let substs = ty.substs().unwrap_or_else(Substs::empty); |
30 | 30 | ||
31 | let field_tys = def.map(|it| self.db.field_types(it.into())).unwrap_or_default(); | 31 | let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); |
32 | 32 | ||
33 | for (i, &subpat) in subpats.iter().enumerate() { | 33 | for (i, &subpat) in subpats.iter().enumerate() { |
34 | let expected_ty = var_data | 34 | let expected_ty = var_data |
@@ -60,7 +60,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
60 | 60 | ||
61 | let substs = ty.substs().unwrap_or_else(Substs::empty); | 61 | let substs = ty.substs().unwrap_or_else(Substs::empty); |
62 | 62 | ||
63 | let field_tys = def.map(|it| self.db.field_types(it.into())).unwrap_or_default(); | 63 | let field_tys = def.map(|it| self.db.field_types(it)).unwrap_or_default(); |
64 | for subpat in subpats { | 64 | for subpat in subpats { |
65 | let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name)); | 65 | let matching_field = var_data.as_ref().and_then(|it| it.field(&subpat.name)); |
66 | let expected_ty = | 66 | let expected_ty = |
diff --git a/crates/ra_hir_ty/src/infer/path.rs b/crates/ra_hir_ty/src/infer/path.rs index 686ce7a21..471d60342 100644 --- a/crates/ra_hir_ty/src/infer/path.rs +++ b/crates/ra_hir_ty/src/infer/path.rs | |||
@@ -104,8 +104,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
104 | let segment = | 104 | let segment = |
105 | remaining_segments.last().expect("there should be at least one segment here"); | 105 | remaining_segments.last().expect("there should be at least one segment here"); |
106 | let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); | 106 | let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); |
107 | let trait_ref = | 107 | let trait_ref = TraitRef::from_resolved_path(&ctx, trait_, resolved_segment, None); |
108 | TraitRef::from_resolved_path(&ctx, trait_.into(), resolved_segment, None); | ||
109 | self.resolve_trait_assoc_item(trait_ref, segment, id) | 108 | self.resolve_trait_assoc_item(trait_ref, segment, id) |
110 | } | 109 | } |
111 | (def, _) => { | 110 | (def, _) => { |
@@ -144,30 +143,32 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
144 | id: ExprOrPatId, | 143 | id: ExprOrPatId, |
145 | ) -> Option<(ValueNs, Option<Substs>)> { | 144 | ) -> Option<(ValueNs, Option<Substs>)> { |
146 | let trait_ = trait_ref.trait_; | 145 | let trait_ = trait_ref.trait_; |
147 | let item = self | 146 | let item = |
148 | .db | 147 | self.db.trait_data(trait_).items.iter().map(|(_name, id)| (*id)).find_map(|item| { |
149 | .trait_data(trait_) | 148 | match item { |
150 | .items | 149 | AssocItemId::FunctionId(func) => { |
151 | .iter() | 150 | if segment.name == &self.db.function_data(func).name { |
152 | .map(|(_name, id)| (*id).into()) | 151 | Some(AssocItemId::FunctionId(func)) |
153 | .find_map(|item| match item { | 152 | } else { |
154 | AssocItemId::FunctionId(func) => { | 153 | None |
155 | if segment.name == &self.db.function_data(func).name { | 154 | } |
156 | Some(AssocItemId::FunctionId(func)) | ||
157 | } else { | ||
158 | None | ||
159 | } | 155 | } |
160 | } | ||
161 | 156 | ||
162 | AssocItemId::ConstId(konst) => { | 157 | AssocItemId::ConstId(konst) => { |
163 | if self.db.const_data(konst).name.as_ref().map_or(false, |n| n == segment.name) | 158 | if self |
164 | { | 159 | .db |
165 | Some(AssocItemId::ConstId(konst)) | 160 | .const_data(konst) |
166 | } else { | 161 | .name |
167 | None | 162 | .as_ref() |
163 | .map_or(false, |n| n == segment.name) | ||
164 | { | ||
165 | Some(AssocItemId::ConstId(konst)) | ||
166 | } else { | ||
167 | None | ||
168 | } | ||
168 | } | 169 | } |
170 | AssocItemId::TypeAliasId(_) => None, | ||
169 | } | 171 | } |
170 | AssocItemId::TypeAliasId(_) => None, | ||
171 | })?; | 172 | })?; |
172 | let def = match item { | 173 | let def = match item { |
173 | AssocItemId::FunctionId(f) => ValueNs::FunctionId(f), | 174 | AssocItemId::FunctionId(f) => ValueNs::FunctionId(f), |
@@ -233,7 +234,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> { | |||
233 | AssocContainerId::ContainerId(_) => None, | 234 | AssocContainerId::ContainerId(_) => None, |
234 | }; | 235 | }; |
235 | 236 | ||
236 | self.write_assoc_resolution(id, item.into()); | 237 | self.write_assoc_resolution(id, item); |
237 | Some((def, substs)) | 238 | Some((def, substs)) |
238 | }, | 239 | }, |
239 | ) | 240 | ) |
diff --git a/crates/ra_hir_ty/src/infer/unify.rs b/crates/ra_hir_ty/src/infer/unify.rs index 1dc842f40..9c7996572 100644 --- a/crates/ra_hir_ty/src/infer/unify.rs +++ b/crates/ra_hir_ty/src/infer/unify.rs | |||
@@ -140,13 +140,12 @@ where | |||
140 | impl<T> Canonicalized<T> { | 140 | impl<T> Canonicalized<T> { |
141 | pub fn decanonicalize_ty(&self, mut ty: Ty) -> Ty { | 141 | pub fn decanonicalize_ty(&self, mut ty: Ty) -> Ty { |
142 | ty.walk_mut_binders( | 142 | ty.walk_mut_binders( |
143 | &mut |ty, binders| match ty { | 143 | &mut |ty, binders| { |
144 | &mut Ty::Bound(idx) => { | 144 | if let &mut Ty::Bound(idx) = ty { |
145 | if idx as usize >= binders && (idx as usize - binders) < self.free_vars.len() { | 145 | if idx as usize >= binders && (idx as usize - binders) < self.free_vars.len() { |
146 | *ty = Ty::Infer(self.free_vars[idx as usize - binders]); | 146 | *ty = Ty::Infer(self.free_vars[idx as usize - binders]); |
147 | } | 147 | } |
148 | } | 148 | } |
149 | _ => {} | ||
150 | }, | 149 | }, |
151 | 0, | 150 | 0, |
152 | ); | 151 | ); |
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs index 571579cc4..13c5e6c6b 100644 --- a/crates/ra_hir_ty/src/lib.rs +++ b/crates/ra_hir_ty/src/lib.rs | |||
@@ -167,7 +167,7 @@ impl TypeCtor { | |||
167 | | TypeCtor::Closure { .. } // 1 param representing the signature of the closure | 167 | | TypeCtor::Closure { .. } // 1 param representing the signature of the closure |
168 | => 1, | 168 | => 1, |
169 | TypeCtor::Adt(adt) => { | 169 | TypeCtor::Adt(adt) => { |
170 | let generic_params = generics(db, AdtId::from(adt).into()); | 170 | let generic_params = generics(db, adt.into()); |
171 | generic_params.len() | 171 | generic_params.len() |
172 | } | 172 | } |
173 | TypeCtor::FnDef(callable) => { | 173 | TypeCtor::FnDef(callable) => { |
@@ -247,7 +247,7 @@ pub struct ProjectionTy { | |||
247 | 247 | ||
248 | impl ProjectionTy { | 248 | impl ProjectionTy { |
249 | pub fn trait_ref(&self, db: &impl HirDatabase) -> TraitRef { | 249 | pub fn trait_ref(&self, db: &impl HirDatabase) -> TraitRef { |
250 | TraitRef { trait_: self.trait_(db).into(), substs: self.parameters.clone() } | 250 | TraitRef { trait_: self.trait_(db), substs: self.parameters.clone() } |
251 | } | 251 | } |
252 | 252 | ||
253 | fn trait_(&self, db: &impl HirDatabase) -> TraitId { | 253 | fn trait_(&self, db: &impl HirDatabase) -> TraitId { |
@@ -763,8 +763,8 @@ pub trait TypeWalk { | |||
763 | Self: Sized, | 763 | Self: Sized, |
764 | { | 764 | { |
765 | self.walk_mut_binders( | 765 | self.walk_mut_binders( |
766 | &mut |ty, binders| match ty { | 766 | &mut |ty, binders| { |
767 | &mut Ty::Bound(idx) => { | 767 | if let &mut Ty::Bound(idx) = ty { |
768 | if idx as usize >= binders && (idx as usize - binders) < substs.len() { | 768 | if idx as usize >= binders && (idx as usize - binders) < substs.len() { |
769 | *ty = substs.0[idx as usize - binders].clone(); | 769 | *ty = substs.0[idx as usize - binders].clone(); |
770 | } else if idx as usize >= binders + substs.len() { | 770 | } else if idx as usize >= binders + substs.len() { |
@@ -772,7 +772,6 @@ pub trait TypeWalk { | |||
772 | *ty = Ty::Bound(idx - substs.len() as u32); | 772 | *ty = Ty::Bound(idx - substs.len() as u32); |
773 | } | 773 | } |
774 | } | 774 | } |
775 | _ => {} | ||
776 | }, | 775 | }, |
777 | 0, | 776 | 0, |
778 | ); | 777 | ); |
diff --git a/crates/ra_hir_ty/src/lower.rs b/crates/ra_hir_ty/src/lower.rs index c2a3703fa..52da34574 100644 --- a/crates/ra_hir_ty/src/lower.rs +++ b/crates/ra_hir_ty/src/lower.rs | |||
@@ -361,10 +361,8 @@ impl Ty { | |||
361 | for t in traits { | 361 | for t in traits { |
362 | if let Some(associated_ty) = ctx.db.trait_data(t).associated_type_by_name(&segment.name) | 362 | if let Some(associated_ty) = ctx.db.trait_data(t).associated_type_by_name(&segment.name) |
363 | { | 363 | { |
364 | let substs = Substs::build_for_def(ctx.db, t) | 364 | let substs = |
365 | .push(self_ty.clone()) | 365 | Substs::build_for_def(ctx.db, t).push(self_ty).fill_with_unknown().build(); |
366 | .fill_with_unknown() | ||
367 | .build(); | ||
368 | // FIXME handle type parameters on the segment | 366 | // FIXME handle type parameters on the segment |
369 | return Ty::Projection(ProjectionTy { associated_ty, parameters: substs }); | 367 | return Ty::Projection(ProjectionTy { associated_ty, parameters: substs }); |
370 | } | 368 | } |
@@ -428,7 +426,7 @@ pub(super) fn substs_from_path_segment( | |||
428 | _add_self_param: bool, | 426 | _add_self_param: bool, |
429 | ) -> Substs { | 427 | ) -> Substs { |
430 | let mut substs = Vec::new(); | 428 | let mut substs = Vec::new(); |
431 | let def_generics = def_generic.map(|def| generics(ctx.db, def.into())); | 429 | let def_generics = def_generic.map(|def| generics(ctx.db, def)); |
432 | 430 | ||
433 | let (parent_params, self_params, type_params, impl_trait_params) = | 431 | let (parent_params, self_params, type_params, impl_trait_params) = |
434 | def_generics.map_or((0, 0, 0, 0), |g| g.provenance_split()); | 432 | def_generics.map_or((0, 0, 0, 0), |g| g.provenance_split()); |
@@ -459,7 +457,7 @@ pub(super) fn substs_from_path_segment( | |||
459 | 457 | ||
460 | // handle defaults | 458 | // handle defaults |
461 | if let Some(def_generic) = def_generic { | 459 | if let Some(def_generic) = def_generic { |
462 | let default_substs = ctx.db.generic_defaults(def_generic.into()); | 460 | let default_substs = ctx.db.generic_defaults(def_generic); |
463 | assert_eq!(substs.len(), default_substs.len()); | 461 | assert_eq!(substs.len(), default_substs.len()); |
464 | 462 | ||
465 | for (i, default_ty) in default_substs.iter().enumerate() { | 463 | for (i, default_ty) in default_substs.iter().enumerate() { |
@@ -483,7 +481,7 @@ impl TraitRef { | |||
483 | _ => return None, | 481 | _ => return None, |
484 | }; | 482 | }; |
485 | let segment = path.segments().last().expect("path should have at least one segment"); | 483 | let segment = path.segments().last().expect("path should have at least one segment"); |
486 | Some(TraitRef::from_resolved_path(ctx, resolved.into(), segment, explicit_self_ty)) | 484 | Some(TraitRef::from_resolved_path(ctx, resolved, segment, explicit_self_ty)) |
487 | } | 485 | } |
488 | 486 | ||
489 | pub(crate) fn from_resolved_path( | 487 | pub(crate) fn from_resolved_path( |
@@ -728,7 +726,7 @@ pub(crate) fn generic_predicates_query( | |||
728 | pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -> Substs { | 726 | pub(crate) fn generic_defaults_query(db: &impl HirDatabase, def: GenericDefId) -> Substs { |
729 | let resolver = def.resolver(db); | 727 | let resolver = def.resolver(db); |
730 | let ctx = TyLoweringContext::new(db, &resolver); | 728 | let ctx = TyLoweringContext::new(db, &resolver); |
731 | let generic_params = generics(db, def.into()); | 729 | let generic_params = generics(db, def); |
732 | 730 | ||
733 | let defaults = generic_params | 731 | let defaults = generic_params |
734 | .iter() | 732 | .iter() |
@@ -792,7 +790,7 @@ fn type_for_builtin(def: BuiltinType) -> Ty { | |||
792 | } | 790 | } |
793 | 791 | ||
794 | fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> PolyFnSig { | 792 | fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> PolyFnSig { |
795 | let struct_data = db.struct_data(def.into()); | 793 | let struct_data = db.struct_data(def); |
796 | let fields = struct_data.variant_data.fields(); | 794 | let fields = struct_data.variant_data.fields(); |
797 | let resolver = def.resolver(db); | 795 | let resolver = def.resolver(db); |
798 | let ctx = | 796 | let ctx = |
@@ -805,7 +803,7 @@ fn fn_sig_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> PolyFn | |||
805 | 803 | ||
806 | /// Build the type of a tuple struct constructor. | 804 | /// Build the type of a tuple struct constructor. |
807 | fn type_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> Binders<Ty> { | 805 | fn type_for_struct_constructor(db: &impl HirDatabase, def: StructId) -> Binders<Ty> { |
808 | let struct_data = db.struct_data(def.into()); | 806 | let struct_data = db.struct_data(def); |
809 | if let StructKind::Unit = struct_data.variant_data.kind() { | 807 | if let StructKind::Unit = struct_data.variant_data.kind() { |
810 | return type_for_adt(db, def.into()); | 808 | return type_for_adt(db, def.into()); |
811 | } | 809 | } |
@@ -836,7 +834,7 @@ fn type_for_enum_variant_constructor(db: &impl HirDatabase, def: EnumVariantId) | |||
836 | } | 834 | } |
837 | let generics = generics(db, def.parent.into()); | 835 | let generics = generics(db, def.parent.into()); |
838 | let substs = Substs::bound_vars(&generics); | 836 | let substs = Substs::bound_vars(&generics); |
839 | Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(EnumVariantId::from(def).into()), substs)) | 837 | Binders::new(substs.len(), Ty::apply(TypeCtor::FnDef(def.into()), substs)) |
840 | } | 838 | } |
841 | 839 | ||
842 | fn type_for_adt(db: &impl HirDatabase, adt: AdtId) -> Binders<Ty> { | 840 | fn type_for_adt(db: &impl HirDatabase, adt: AdtId) -> Binders<Ty> { |
@@ -964,6 +962,6 @@ pub(crate) fn impl_trait_query( | |||
964 | let target_trait = impl_data.target_trait.as_ref()?; | 962 | let target_trait = impl_data.target_trait.as_ref()?; |
965 | Some(Binders::new( | 963 | Some(Binders::new( |
966 | self_ty.num_binders, | 964 | self_ty.num_binders, |
967 | TraitRef::from_hir(&ctx, target_trait, Some(self_ty.value.clone()))?, | 965 | TraitRef::from_hir(&ctx, target_trait, Some(self_ty.value))?, |
968 | )) | 966 | )) |
969 | } | 967 | } |
diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs index 4f8c52433..964acdb09 100644 --- a/crates/ra_hir_ty/src/method_resolution.rs +++ b/crates/ra_hir_ty/src/method_resolution.rs | |||
@@ -214,7 +214,7 @@ pub fn iterate_method_candidates<T>( | |||
214 | // the methods by autoderef order of *receiver types*, not *self | 214 | // the methods by autoderef order of *receiver types*, not *self |
215 | // types*. | 215 | // types*. |
216 | 216 | ||
217 | let deref_chain: Vec<_> = autoderef::autoderef(db, Some(krate), ty.clone()).collect(); | 217 | let deref_chain: Vec<_> = autoderef::autoderef(db, Some(krate), ty).collect(); |
218 | for i in 0..deref_chain.len() { | 218 | for i in 0..deref_chain.len() { |
219 | if let Some(result) = iterate_method_candidates_with_autoref( | 219 | if let Some(result) = iterate_method_candidates_with_autoref( |
220 | &deref_chain[i..], | 220 | &deref_chain[i..], |
@@ -290,7 +290,7 @@ fn iterate_method_candidates_with_autoref<T>( | |||
290 | &ref_muted, | 290 | &ref_muted, |
291 | deref_chain, | 291 | deref_chain, |
292 | db, | 292 | db, |
293 | env.clone(), | 293 | env, |
294 | krate, | 294 | krate, |
295 | &traits_in_scope, | 295 | &traits_in_scope, |
296 | name, | 296 | name, |
@@ -391,17 +391,17 @@ fn iterate_trait_method_candidates<T>( | |||
391 | // iteration | 391 | // iteration |
392 | let mut known_implemented = false; | 392 | let mut known_implemented = false; |
393 | for (_name, item) in data.items.iter() { | 393 | for (_name, item) in data.items.iter() { |
394 | if !is_valid_candidate(db, name, receiver_ty, (*item).into(), self_ty) { | 394 | if !is_valid_candidate(db, name, receiver_ty, *item, self_ty) { |
395 | continue; | 395 | continue; |
396 | } | 396 | } |
397 | if !known_implemented { | 397 | if !known_implemented { |
398 | let goal = generic_implements_goal(db, env.clone(), t, self_ty.clone()); | 398 | let goal = generic_implements_goal(db, env.clone(), t, self_ty.clone()); |
399 | if db.trait_solve(krate.into(), goal).is_none() { | 399 | if db.trait_solve(krate, goal).is_none() { |
400 | continue 'traits; | 400 | continue 'traits; |
401 | } | 401 | } |
402 | } | 402 | } |
403 | known_implemented = true; | 403 | known_implemented = true; |
404 | if let Some(result) = callback(&self_ty.value, (*item).into()) { | 404 | if let Some(result) = callback(&self_ty.value, *item) { |
405 | return Some(result); | 405 | return Some(result); |
406 | } | 406 | } |
407 | } | 407 | } |
@@ -521,7 +521,7 @@ pub fn implements_trait( | |||
521 | return true; | 521 | return true; |
522 | } | 522 | } |
523 | let goal = generic_implements_goal(db, env, trait_, ty.clone()); | 523 | let goal = generic_implements_goal(db, env, trait_, ty.clone()); |
524 | let solution = db.trait_solve(krate.into(), goal); | 524 | let solution = db.trait_solve(krate, goal); |
525 | 525 | ||
526 | solution.is_some() | 526 | solution.is_some() |
527 | } | 527 | } |
diff --git a/crates/ra_hir_ty/src/op.rs b/crates/ra_hir_ty/src/op.rs index ae253ca04..54e2bd05a 100644 --- a/crates/ra_hir_ty/src/op.rs +++ b/crates/ra_hir_ty/src/op.rs | |||
@@ -30,20 +30,18 @@ pub(super) fn binary_op_return_ty(op: BinaryOp, lhs_ty: Ty, rhs_ty: Ty) -> Ty { | |||
30 | pub(super) fn binary_op_rhs_expectation(op: BinaryOp, lhs_ty: Ty) -> Ty { | 30 | pub(super) fn binary_op_rhs_expectation(op: BinaryOp, lhs_ty: Ty) -> Ty { |
31 | match op { | 31 | match op { |
32 | BinaryOp::LogicOp(..) => Ty::simple(TypeCtor::Bool), | 32 | BinaryOp::LogicOp(..) => Ty::simple(TypeCtor::Bool), |
33 | BinaryOp::Assignment { op: None } | BinaryOp::CmpOp(CmpOp::Eq { negated: _ }) => { | 33 | BinaryOp::Assignment { op: None } | BinaryOp::CmpOp(CmpOp::Eq { .. }) => match lhs_ty { |
34 | match lhs_ty { | 34 | Ty::Apply(ApplicationTy { ctor, .. }) => match ctor { |
35 | Ty::Apply(ApplicationTy { ctor, .. }) => match ctor { | 35 | TypeCtor::Int(..) |
36 | TypeCtor::Int(..) | 36 | | TypeCtor::Float(..) |
37 | | TypeCtor::Float(..) | 37 | | TypeCtor::Str |
38 | | TypeCtor::Str | 38 | | TypeCtor::Char |
39 | | TypeCtor::Char | 39 | | TypeCtor::Bool => lhs_ty, |
40 | | TypeCtor::Bool => lhs_ty, | ||
41 | _ => Ty::Unknown, | ||
42 | }, | ||
43 | Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty, | ||
44 | _ => Ty::Unknown, | 40 | _ => Ty::Unknown, |
45 | } | 41 | }, |
46 | } | 42 | Ty::Infer(InferTy::IntVar(..)) | Ty::Infer(InferTy::FloatVar(..)) => lhs_ty, |
43 | _ => Ty::Unknown, | ||
44 | }, | ||
47 | BinaryOp::ArithOp(ArithOp::Shl) | BinaryOp::ArithOp(ArithOp::Shr) => Ty::Unknown, | 45 | BinaryOp::ArithOp(ArithOp::Shl) | BinaryOp::ArithOp(ArithOp::Shr) => Ty::Unknown, |
48 | BinaryOp::CmpOp(CmpOp::Ord { .. }) | 46 | BinaryOp::CmpOp(CmpOp::Ord { .. }) |
49 | | BinaryOp::Assignment { op: Some(_) } | 47 | | BinaryOp::Assignment { op: Some(_) } |
diff --git a/crates/ra_hir_ty/src/test_db.rs b/crates/ra_hir_ty/src/test_db.rs index 1a31b587b..c794f7b84 100644 --- a/crates/ra_hir_ty/src/test_db.rs +++ b/crates/ra_hir_ty/src/test_db.rs | |||
@@ -86,15 +86,14 @@ impl TestDB { | |||
86 | pub fn diagnostics(&self) -> String { | 86 | pub fn diagnostics(&self) -> String { |
87 | let mut buf = String::new(); | 87 | let mut buf = String::new(); |
88 | let crate_graph = self.crate_graph(); | 88 | let crate_graph = self.crate_graph(); |
89 | for krate in crate_graph.iter().next() { | 89 | for krate in crate_graph.iter() { |
90 | let crate_def_map = self.crate_def_map(krate); | 90 | let crate_def_map = self.crate_def_map(krate); |
91 | 91 | ||
92 | let mut fns = Vec::new(); | 92 | let mut fns = Vec::new(); |
93 | for (module_id, _) in crate_def_map.modules.iter() { | 93 | for (module_id, _) in crate_def_map.modules.iter() { |
94 | for decl in crate_def_map[module_id].scope.declarations() { | 94 | for decl in crate_def_map[module_id].scope.declarations() { |
95 | match decl { | 95 | if let ModuleDefId::FunctionId(f) = decl { |
96 | ModuleDefId::FunctionId(f) => fns.push(f), | 96 | fns.push(f) |
97 | _ => (), | ||
98 | } | 97 | } |
99 | } | 98 | } |
100 | 99 | ||
diff --git a/crates/ra_hir_ty/src/tests.rs b/crates/ra_hir_ty/src/tests.rs index d1f10e675..240cc03a2 100644 --- a/crates/ra_hir_ty/src/tests.rs +++ b/crates/ra_hir_ty/src/tests.rs | |||
@@ -101,9 +101,9 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { | |||
101 | (src_ptr.value.range(), node.text().to_string().replace("\n", " ")) | 101 | (src_ptr.value.range(), node.text().to_string().replace("\n", " ")) |
102 | }; | 102 | }; |
103 | let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" }; | 103 | let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" }; |
104 | write!( | 104 | writeln!( |
105 | acc, | 105 | acc, |
106 | "{}{} '{}': {}\n", | 106 | "{}{} '{}': {}", |
107 | macro_prefix, | 107 | macro_prefix, |
108 | range, | 108 | range, |
109 | ellipsize(text, 15), | 109 | ellipsize(text, 15), |
@@ -118,9 +118,9 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String { | |||
118 | for (src_ptr, mismatch) in &mismatches { | 118 | for (src_ptr, mismatch) in &mismatches { |
119 | let range = src_ptr.value.range(); | 119 | let range = src_ptr.value.range(); |
120 | let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" }; | 120 | let macro_prefix = if src_ptr.file_id != file_id.into() { "!" } else { "" }; |
121 | write!( | 121 | writeln!( |
122 | acc, | 122 | acc, |
123 | "{}{}: expected {}, got {}\n", | 123 | "{}{}: expected {}, got {}", |
124 | macro_prefix, | 124 | macro_prefix, |
125 | range, | 125 | range, |
126 | mismatch.expected.display(&db), | 126 | mismatch.expected.display(&db), |
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs index ff8e75b48..e83449957 100644 --- a/crates/ra_hir_ty/src/traits.rs +++ b/crates/ra_hir_ty/src/traits.rs | |||
@@ -248,12 +248,9 @@ fn solution_from_chalk( | |||
248 | let value = subst | 248 | let value = subst |
249 | .value | 249 | .value |
250 | .into_iter() | 250 | .into_iter() |
251 | .map(|p| { | 251 | .map(|p| match p.ty() { |
252 | let ty = match p.ty() { | 252 | Some(ty) => from_chalk(db, ty.clone()), |
253 | Some(ty) => from_chalk(db, ty.clone()), | 253 | None => unimplemented!(), |
254 | None => unimplemented!(), | ||
255 | }; | ||
256 | ty | ||
257 | }) | 254 | }) |
258 | .collect(); | 255 | .collect(); |
259 | let result = Canonical { value, num_vars: subst.binders.len() }; | 256 | let result = Canonical { value, num_vars: subst.binders.len() }; |
diff --git a/crates/ra_hir_ty/src/traits/builtin.rs b/crates/ra_hir_ty/src/traits/builtin.rs index dd41176f0..a537420a5 100644 --- a/crates/ra_hir_ty/src/traits/builtin.rs +++ b/crates/ra_hir_ty/src/traits/builtin.rs | |||
@@ -98,7 +98,7 @@ fn closure_fn_trait_impl_datum( | |||
98 | // the existence of the Fn trait has been checked before | 98 | // the existence of the Fn trait has been checked before |
99 | .expect("fn trait for closure impl missing"); | 99 | .expect("fn trait for closure impl missing"); |
100 | 100 | ||
101 | let num_args: u16 = match &db.body(data.def.into())[data.expr] { | 101 | let num_args: u16 = match &db.body(data.def)[data.expr] { |
102 | Expr::Lambda { args, .. } => args.len() as u16, | 102 | Expr::Lambda { args, .. } => args.len() as u16, |
103 | _ => { | 103 | _ => { |
104 | log::warn!("closure for closure type {:?} not found", data); | 104 | log::warn!("closure for closure type {:?} not found", data); |
@@ -118,11 +118,11 @@ fn closure_fn_trait_impl_datum( | |||
118 | let self_ty = Ty::apply_one(TypeCtor::Closure { def: data.def, expr: data.expr }, sig_ty); | 118 | let self_ty = Ty::apply_one(TypeCtor::Closure { def: data.def, expr: data.expr }, sig_ty); |
119 | 119 | ||
120 | let trait_ref = TraitRef { | 120 | let trait_ref = TraitRef { |
121 | trait_: trait_.into(), | 121 | trait_, |
122 | substs: Substs::build_for_def(db, trait_).push(self_ty).push(arg_ty).build(), | 122 | substs: Substs::build_for_def(db, trait_).push(self_ty).push(arg_ty).build(), |
123 | }; | 123 | }; |
124 | 124 | ||
125 | let output_ty_id = AssocTyValue::ClosureFnTraitImplOutput(data.clone()); | 125 | let output_ty_id = AssocTyValue::ClosureFnTraitImplOutput(data); |
126 | 126 | ||
127 | BuiltinImplData { | 127 | BuiltinImplData { |
128 | num_vars: num_args as usize + 1, | 128 | num_vars: num_args as usize + 1, |
@@ -137,9 +137,9 @@ fn closure_fn_trait_output_assoc_ty_value( | |||
137 | krate: CrateId, | 137 | krate: CrateId, |
138 | data: super::ClosureFnTraitImplData, | 138 | data: super::ClosureFnTraitImplData, |
139 | ) -> BuiltinImplAssocTyValueData { | 139 | ) -> BuiltinImplAssocTyValueData { |
140 | let impl_ = Impl::ClosureFnTraitImpl(data.clone()); | 140 | let impl_ = Impl::ClosureFnTraitImpl(data); |
141 | 141 | ||
142 | let num_args: u16 = match &db.body(data.def.into())[data.expr] { | 142 | let num_args: u16 = match &db.body(data.def)[data.expr] { |
143 | Expr::Lambda { args, .. } => args.len() as u16, | 143 | Expr::Lambda { args, .. } => args.len() as u16, |
144 | _ => { | 144 | _ => { |
145 | log::warn!("closure for closure type {:?} not found", data); | 145 | log::warn!("closure for closure type {:?} not found", data); |
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs index 882160fa8..1bdf13e48 100644 --- a/crates/ra_hir_ty/src/traits/chalk.rs +++ b/crates/ra_hir_ty/src/traits/chalk.rs | |||
@@ -409,8 +409,7 @@ where | |||
409 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Canonical<T::Chalk> { | 409 | fn to_chalk(self, db: &impl HirDatabase) -> chalk_ir::Canonical<T::Chalk> { |
410 | let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT); | 410 | let parameter = chalk_ir::ParameterKind::Ty(chalk_ir::UniverseIndex::ROOT); |
411 | let value = self.value.to_chalk(db); | 411 | let value = self.value.to_chalk(db); |
412 | let canonical = chalk_ir::Canonical { value, binders: vec![parameter; self.num_vars] }; | 412 | chalk_ir::Canonical { value, binders: vec![parameter; self.num_vars] } |
413 | canonical | ||
414 | } | 413 | } |
415 | 414 | ||
416 | fn from_chalk(db: &impl HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> { | 415 | fn from_chalk(db: &impl HirDatabase, canonical: chalk_ir::Canonical<T::Chalk>) -> Canonical<T> { |
@@ -565,10 +564,10 @@ where | |||
565 | // and will panic if the trait can't be resolved. | 564 | // and will panic if the trait can't be resolved. |
566 | let mut result: Vec<_> = self | 565 | let mut result: Vec<_> = self |
567 | .db | 566 | .db |
568 | .impls_for_trait(self.krate, trait_.into()) | 567 | .impls_for_trait(self.krate, trait_) |
569 | .iter() | 568 | .iter() |
570 | .copied() | 569 | .copied() |
571 | .map(|it| Impl::ImplBlock(it.into())) | 570 | .map(Impl::ImplBlock) |
572 | .map(|impl_| impl_.to_chalk(self.db)) | 571 | .map(|impl_| impl_.to_chalk(self.db)) |
573 | .collect(); | 572 | .collect(); |
574 | 573 | ||
@@ -586,7 +585,7 @@ where | |||
586 | false // FIXME | 585 | false // FIXME |
587 | } | 586 | } |
588 | fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> { | 587 | fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc<AssociatedTyValue> { |
589 | self.db.associated_ty_value(self.krate.into(), id) | 588 | self.db.associated_ty_value(self.krate, id) |
590 | } | 589 | } |
591 | fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<TypeFamily>> { | 590 | fn custom_clauses(&self) -> Vec<chalk_ir::ProgramClause<TypeFamily>> { |
592 | vec![] | 591 | vec![] |
@@ -674,7 +673,7 @@ pub(crate) fn struct_datum_query( | |||
674 | let where_clauses = type_ctor | 673 | let where_clauses = type_ctor |
675 | .as_generic_def() | 674 | .as_generic_def() |
676 | .map(|generic_def| { | 675 | .map(|generic_def| { |
677 | let generic_params = generics(db, generic_def.into()); | 676 | let generic_params = generics(db, generic_def); |
678 | let bound_vars = Substs::bound_vars(&generic_params); | 677 | let bound_vars = Substs::bound_vars(&generic_params); |
679 | convert_where_clauses(db, generic_def, &bound_vars) | 678 | convert_where_clauses(db, generic_def, &bound_vars) |
680 | }) | 679 | }) |
@@ -805,7 +804,7 @@ fn type_alias_associated_ty_value( | |||
805 | let ty = db.ty(type_alias.into()); | 804 | let ty = db.ty(type_alias.into()); |
806 | let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: ty.value.to_chalk(db) }; | 805 | let value_bound = chalk_rust_ir::AssociatedTyValueBound { ty: ty.value.to_chalk(db) }; |
807 | let value = chalk_rust_ir::AssociatedTyValue { | 806 | let value = chalk_rust_ir::AssociatedTyValue { |
808 | impl_id: Impl::ImplBlock(impl_id.into()).to_chalk(db), | 807 | impl_id: Impl::ImplBlock(impl_id).to_chalk(db), |
809 | associated_ty_id: assoc_ty.to_chalk(db), | 808 | associated_ty_id: assoc_ty.to_chalk(db), |
810 | value: make_binders(value_bound, ty.num_binders), | 809 | value: make_binders(value_bound, ty.num_binders), |
811 | }; | 810 | }; |
diff --git a/crates/ra_ide/Cargo.toml b/crates/ra_ide/Cargo.toml index 97dea5ffd..3407d2598 100644 --- a/crates/ra_ide/Cargo.toml +++ b/crates/ra_ide/Cargo.toml | |||
@@ -11,15 +11,19 @@ doctest = false | |||
11 | wasm = [] | 11 | wasm = [] |
12 | 12 | ||
13 | [dependencies] | 13 | [dependencies] |
14 | either = "1.5" | 14 | either = "1.5.3" |
15 | format-buf = "1.0.0" | 15 | format-buf = "1.0.0" |
16 | indexmap = "1.3.0" | 16 | indexmap = "1.3.2" |
17 | itertools = "0.8.0" | 17 | itertools = "0.8.2" |
18 | join_to_string = "0.1.3" | 18 | join_to_string = "0.1.3" |
19 | log = "0.4.5" | 19 | log = "0.4.8" |
20 | rustc-hash = "1.0" | 20 | rayon = "1.3.0" |
21 | rand = { version = "0.7.0", features = ["small_rng"] } | 21 | fst = { version = "0.3.5", default-features = false } |
22 | once_cell = "1.2.0" | 22 | rustc-hash = "1.1.0" |
23 | unicase = "2.6.0" | ||
24 | superslice = "1.0.0" | ||
25 | rand = { version = "0.7.3", features = ["small_rng"] } | ||
26 | once_cell = "1.3.1" | ||
23 | 27 | ||
24 | ra_syntax = { path = "../ra_syntax" } | 28 | ra_syntax = { path = "../ra_syntax" } |
25 | ra_text_edit = { path = "../ra_text_edit" } | 29 | ra_text_edit = { path = "../ra_text_edit" } |
@@ -36,4 +40,4 @@ ra_assists = { path = "../ra_assists" } | |||
36 | hir = { path = "../ra_hir", package = "ra_hir" } | 40 | hir = { path = "../ra_hir", package = "ra_hir" } |
37 | 41 | ||
38 | [dev-dependencies] | 42 | [dev-dependencies] |
39 | insta = "0.13.0" | 43 | insta = "0.13.1" |
diff --git a/crates/ra_ide/src/call_info.rs b/crates/ra_ide/src/call_info.rs index f2b29306e..7c6322cb4 100644 --- a/crates/ra_ide/src/call_info.rs +++ b/crates/ra_ide/src/call_info.rs | |||
@@ -128,7 +128,7 @@ impl FnCallNode { | |||
128 | }), | 128 | }), |
129 | 129 | ||
130 | FnCallNode::MethodCallExpr(call_expr) => { | 130 | FnCallNode::MethodCallExpr(call_expr) => { |
131 | call_expr.syntax().children().filter_map(ast::NameRef::cast).nth(0) | 131 | call_expr.syntax().children().filter_map(ast::NameRef::cast).next() |
132 | } | 132 | } |
133 | 133 | ||
134 | FnCallNode::MacroCallExpr(call_expr) => call_expr.path()?.segment()?.name_ref(), | 134 | FnCallNode::MacroCallExpr(call_expr) => call_expr.path()?.segment()?.name_ref(), |
diff --git a/crates/ra_ide/src/completion/complete_trait_impl.rs b/crates/ra_ide/src/completion/complete_trait_impl.rs index 6ff10c017..83628e35c 100644 --- a/crates/ra_ide/src/completion/complete_trait_impl.rs +++ b/crates/ra_ide/src/completion/complete_trait_impl.rs | |||
@@ -59,7 +59,7 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext | |||
59 | .as_ref() | 59 | .as_ref() |
60 | .and_then(|node| node.parent()) | 60 | .and_then(|node| node.parent()) |
61 | .and_then(|node| node.parent()) | 61 | .and_then(|node| node.parent()) |
62 | .and_then(|node| ast::ImplBlock::cast(node)); | 62 | .and_then(ast::ImplBlock::cast); |
63 | 63 | ||
64 | if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) { | 64 | if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) { |
65 | match trigger.kind() { | 65 | match trigger.kind() { |
@@ -110,17 +110,17 @@ fn add_function_impl( | |||
110 | ctx: &CompletionContext, | 110 | ctx: &CompletionContext, |
111 | func: &hir::Function, | 111 | func: &hir::Function, |
112 | ) { | 112 | ) { |
113 | let display = FunctionSignature::from_hir(ctx.db, func.clone()); | 113 | let display = FunctionSignature::from_hir(ctx.db, *func); |
114 | 114 | ||
115 | let fn_name = func.name(ctx.db).to_string(); | 115 | let fn_name = func.name(ctx.db).to_string(); |
116 | 116 | ||
117 | let label = if func.params(ctx.db).len() > 0 { | 117 | let label = if !func.params(ctx.db).is_empty() { |
118 | format!("fn {}(..)", fn_name) | 118 | format!("fn {}(..)", fn_name) |
119 | } else { | 119 | } else { |
120 | format!("fn {}()", fn_name) | 120 | format!("fn {}()", fn_name) |
121 | }; | 121 | }; |
122 | 122 | ||
123 | let builder = CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label.clone()) | 123 | let builder = CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label) |
124 | .lookup_by(fn_name) | 124 | .lookup_by(fn_name) |
125 | .set_documentation(func.docs(ctx.db)); | 125 | .set_documentation(func.docs(ctx.db)); |
126 | 126 | ||
diff --git a/crates/ra_ide/src/completion/completion_item.rs b/crates/ra_ide/src/completion/completion_item.rs index 93f336370..61867c0ff 100644 --- a/crates/ra_ide/src/completion/completion_item.rs +++ b/crates/ra_ide/src/completion/completion_item.rs | |||
@@ -159,7 +159,7 @@ impl CompletionItem { | |||
159 | 159 | ||
160 | /// Short one-line additional information, like a type | 160 | /// Short one-line additional information, like a type |
161 | pub fn detail(&self) -> Option<&str> { | 161 | pub fn detail(&self) -> Option<&str> { |
162 | self.detail.as_ref().map(|it| it.as_str()) | 162 | self.detail.as_deref() |
163 | } | 163 | } |
164 | /// A doc-comment | 164 | /// A doc-comment |
165 | pub fn documentation(&self) -> Option<Documentation> { | 165 | pub fn documentation(&self) -> Option<Documentation> { |
@@ -167,7 +167,7 @@ impl CompletionItem { | |||
167 | } | 167 | } |
168 | /// What string is used for filtering. | 168 | /// What string is used for filtering. |
169 | pub fn lookup(&self) -> &str { | 169 | pub fn lookup(&self) -> &str { |
170 | self.lookup.as_ref().map(|it| it.as_str()).unwrap_or_else(|| self.label()) | 170 | self.lookup.as_deref().unwrap_or_else(|| self.label()) |
171 | } | 171 | } |
172 | 172 | ||
173 | pub fn kind(&self) -> Option<CompletionItemKind> { | 173 | pub fn kind(&self) -> Option<CompletionItemKind> { |
diff --git a/crates/ra_ide/src/display/function_signature.rs b/crates/ra_ide/src/display/function_signature.rs index c23e08e9a..b85fd8075 100644 --- a/crates/ra_ide/src/display/function_signature.rs +++ b/crates/ra_ide/src/display/function_signature.rs | |||
@@ -54,9 +54,8 @@ impl FunctionSignature { | |||
54 | 54 | ||
55 | pub(crate) fn from_struct(db: &RootDatabase, st: hir::Struct) -> Option<Self> { | 55 | pub(crate) fn from_struct(db: &RootDatabase, st: hir::Struct) -> Option<Self> { |
56 | let node: ast::StructDef = st.source(db).value; | 56 | let node: ast::StructDef = st.source(db).value; |
57 | match node.kind() { | 57 | if let ast::StructKind::Record(_) = node.kind() { |
58 | ast::StructKind::Record(_) => return None, | 58 | return None; |
59 | _ => (), | ||
60 | }; | 59 | }; |
61 | 60 | ||
62 | let params = st | 61 | let params = st |
diff --git a/crates/ra_ide/src/display/navigation_target.rs b/crates/ra_ide/src/display/navigation_target.rs index 906aab1eb..096c41c81 100644 --- a/crates/ra_ide/src/display/navigation_target.rs +++ b/crates/ra_ide/src/display/navigation_target.rs | |||
@@ -64,11 +64,11 @@ impl NavigationTarget { | |||
64 | } | 64 | } |
65 | 65 | ||
66 | pub fn docs(&self) -> Option<&str> { | 66 | pub fn docs(&self) -> Option<&str> { |
67 | self.docs.as_ref().map(String::as_str) | 67 | self.docs.as_deref() |
68 | } | 68 | } |
69 | 69 | ||
70 | pub fn description(&self) -> Option<&str> { | 70 | pub fn description(&self) -> Option<&str> { |
71 | self.description.as_ref().map(String::as_str) | 71 | self.description.as_deref() |
72 | } | 72 | } |
73 | 73 | ||
74 | /// A "most interesting" range withing the `full_range`. | 74 | /// A "most interesting" range withing the `full_range`. |
diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs index de924fad2..97c08ade5 100644 --- a/crates/ra_ide/src/references.rs +++ b/crates/ra_ide/src/references.rs | |||
@@ -268,7 +268,7 @@ fn decl_access( | |||
268 | }; | 268 | }; |
269 | 269 | ||
270 | let stmt = find_node_at_offset::<ast::LetStmt>(syntax, range.start())?; | 270 | let stmt = find_node_at_offset::<ast::LetStmt>(syntax, range.start())?; |
271 | if let Some(_) = stmt.initializer() { | 271 | if stmt.initializer().is_some() { |
272 | let pat = stmt.pat()?; | 272 | let pat = stmt.pat()?; |
273 | if let ast::Pat::BindPat(it) = pat { | 273 | if let ast::Pat::BindPat(it) = pat { |
274 | if it.name()?.text().as_str() == name { | 274 | if it.name()?.text().as_str() == name { |
diff --git a/crates/ra_ide/src/ssr.rs b/crates/ra_ide/src/ssr.rs index 14eb0b8b2..902c29fc6 100644 --- a/crates/ra_ide/src/ssr.rs +++ b/crates/ra_ide/src/ssr.rs | |||
@@ -85,8 +85,11 @@ impl FromStr for SsrQuery { | |||
85 | fn from_str(query: &str) -> Result<SsrQuery, SsrError> { | 85 | fn from_str(query: &str) -> Result<SsrQuery, SsrError> { |
86 | let mut it = query.split("==>>"); | 86 | let mut it = query.split("==>>"); |
87 | let pattern = it.next().expect("at least empty string").trim(); | 87 | let pattern = it.next().expect("at least empty string").trim(); |
88 | let mut template = | 88 | let mut template = it |
89 | it.next().ok_or(SsrError("Cannot find delemiter `==>>`".into()))?.trim().to_string(); | 89 | .next() |
90 | .ok_or_else(|| SsrError("Cannot find delemiter `==>>`".into()))? | ||
91 | .trim() | ||
92 | .to_string(); | ||
90 | if it.next().is_some() { | 93 | if it.next().is_some() { |
91 | return Err(SsrError("More than one delimiter found".into())); | 94 | return Err(SsrError("More than one delimiter found".into())); |
92 | } | 95 | } |
@@ -131,11 +134,12 @@ fn traverse(node: &SyntaxNode, go: &mut impl FnMut(&SyntaxNode) -> bool) { | |||
131 | } | 134 | } |
132 | 135 | ||
133 | fn split_by_var(s: &str) -> Result<(&str, &str, &str), SsrError> { | 136 | fn split_by_var(s: &str) -> Result<(&str, &str, &str), SsrError> { |
134 | let end_of_name = s.find(":").ok_or(SsrError("Use $<name>:expr".into()))?; | 137 | let end_of_name = s.find(':').ok_or_else(|| SsrError("Use $<name>:expr".into()))?; |
135 | let name = &s[0..end_of_name]; | 138 | let name = &s[0..end_of_name]; |
136 | is_name(name)?; | 139 | is_name(name)?; |
137 | let type_begin = end_of_name + 1; | 140 | let type_begin = end_of_name + 1; |
138 | let type_length = s[type_begin..].find(|c| !char::is_ascii_alphanumeric(&c)).unwrap_or(s.len()); | 141 | let type_length = |
142 | s[type_begin..].find(|c| !char::is_ascii_alphanumeric(&c)).unwrap_or_else(|| s.len()); | ||
139 | let type_name = &s[type_begin..type_begin + type_length]; | 143 | let type_name = &s[type_begin..type_begin + type_length]; |
140 | Ok((name, type_name, &s[type_begin + type_length..])) | 144 | Ok((name, type_name, &s[type_begin + type_length..])) |
141 | } | 145 | } |
@@ -182,7 +186,7 @@ fn find(pattern: &SsrPattern, code: &SyntaxNode) -> SsrMatches { | |||
182 | pattern.text() == code.text() | 186 | pattern.text() == code.text() |
183 | } | 187 | } |
184 | (SyntaxElement::Node(ref pattern), SyntaxElement::Node(ref code)) => { | 188 | (SyntaxElement::Node(ref pattern), SyntaxElement::Node(ref code)) => { |
185 | if placeholders.iter().find(|&n| n.0.as_str() == pattern.text()).is_some() { | 189 | if placeholders.iter().any(|n| n.0.as_str() == pattern.text()) { |
186 | match_.binding.insert(Var(pattern.text().to_string()), code.clone()); | 190 | match_.binding.insert(Var(pattern.text().to_string()), code.clone()); |
187 | true | 191 | true |
188 | } else { | 192 | } else { |
diff --git a/crates/ra_ide_db/Cargo.toml b/crates/ra_ide_db/Cargo.toml index dbe98f3a0..ee409e34e 100644 --- a/crates/ra_ide_db/Cargo.toml +++ b/crates/ra_ide_db/Cargo.toml | |||
@@ -11,17 +11,18 @@ doctest = false | |||
11 | wasm = [] | 11 | wasm = [] |
12 | 12 | ||
13 | [dependencies] | 13 | [dependencies] |
14 | either = "1.5" | 14 | either = "1.5.3" |
15 | format-buf = "1.0.0" | 15 | format-buf = "1.0.0" |
16 | indexmap = "1.3.0" | 16 | indexmap = "1.3.2" |
17 | itertools = "0.8.0" | 17 | itertools = "0.8.2" |
18 | join_to_string = "0.1.3" | 18 | join_to_string = "0.1.3" |
19 | log = "0.4.5" | 19 | log = "0.4.8" |
20 | rayon = "1.0.2" | 20 | rayon = "1.3.0" |
21 | fst = { version = "0.3.1", default-features = false } | 21 | fst = { version = "0.3.5", default-features = false } |
22 | rustc-hash = "1.0" | 22 | rustc-hash = "1.1.0" |
23 | unicase = "2.6.0" | ||
23 | superslice = "1.0.0" | 24 | superslice = "1.0.0" |
24 | once_cell = "1.2.0" | 25 | once_cell = "1.3.1" |
25 | 26 | ||
26 | ra_syntax = { path = "../ra_syntax" } | 27 | ra_syntax = { path = "../ra_syntax" } |
27 | ra_text_edit = { path = "../ra_text_edit" } | 28 | ra_text_edit = { path = "../ra_text_edit" } |
@@ -36,4 +37,4 @@ test_utils = { path = "../test_utils" } | |||
36 | hir = { path = "../ra_hir", package = "ra_hir" } | 37 | hir = { path = "../ra_hir", package = "ra_hir" } |
37 | 38 | ||
38 | [dev-dependencies] | 39 | [dev-dependencies] |
39 | insta = "0.13.0" | 40 | insta = "0.13.1" |
diff --git a/crates/ra_ide_db/src/change.rs b/crates/ra_ide_db/src/change.rs index 4668784d3..7e9310005 100644 --- a/crates/ra_ide_db/src/change.rs +++ b/crates/ra_ide_db/src/change.rs | |||
@@ -44,7 +44,7 @@ impl fmt::Debug for AnalysisChange { | |||
44 | if !self.libraries_added.is_empty() { | 44 | if !self.libraries_added.is_empty() { |
45 | d.field("libraries_added", &self.libraries_added.len()); | 45 | d.field("libraries_added", &self.libraries_added.len()); |
46 | } | 46 | } |
47 | if !self.crate_graph.is_none() { | 47 | if self.crate_graph.is_some() { |
48 | d.field("crate_graph", &self.crate_graph); | 48 | d.field("crate_graph", &self.crate_graph); |
49 | } | 49 | } |
50 | d.finish() | 50 | d.finish() |
diff --git a/crates/ra_mbe/Cargo.toml b/crates/ra_mbe/Cargo.toml index a3fc01f63..4dec24914 100644 --- a/crates/ra_mbe/Cargo.toml +++ b/crates/ra_mbe/Cargo.toml | |||
@@ -11,9 +11,9 @@ doctest = false | |||
11 | ra_syntax = { path = "../ra_syntax" } | 11 | ra_syntax = { path = "../ra_syntax" } |
12 | ra_parser = { path = "../ra_parser" } | 12 | ra_parser = { path = "../ra_parser" } |
13 | tt = { path = "../ra_tt", package = "ra_tt" } | 13 | tt = { path = "../ra_tt", package = "ra_tt" } |
14 | rustc-hash = "1.0.0" | 14 | rustc-hash = "1.1.0" |
15 | smallvec = "1.0.0" | 15 | smallvec = "1.2.0" |
16 | log = "0.4.5" | 16 | log = "0.4.8" |
17 | 17 | ||
18 | [dev-dependencies] | 18 | [dev-dependencies] |
19 | test_utils = { path = "../test_utils" } | 19 | test_utils = { path = "../test_utils" } |
diff --git a/crates/ra_mbe/src/mbe_expander/matcher.rs b/crates/ra_mbe/src/mbe_expander/matcher.rs index e36b5a412..2bdea11e1 100644 --- a/crates/ra_mbe/src/mbe_expander/matcher.rs +++ b/crates/ra_mbe/src/mbe_expander/matcher.rs | |||
@@ -101,7 +101,7 @@ fn match_subtree( | |||
101 | tt::Leaf::Literal(tt::Literal { text: lhs, .. }), | 101 | tt::Leaf::Literal(tt::Literal { text: lhs, .. }), |
102 | tt::Leaf::Literal(tt::Literal { text: rhs, .. }), | 102 | tt::Leaf::Literal(tt::Literal { text: rhs, .. }), |
103 | ) if lhs == rhs => (), | 103 | ) if lhs == rhs => (), |
104 | _ => Err(ExpandError::UnexpectedToken)?, | 104 | _ => return Err(ExpandError::UnexpectedToken), |
105 | } | 105 | } |
106 | } | 106 | } |
107 | Op::TokenTree(tt::TokenTree::Subtree(lhs)) => { | 107 | Op::TokenTree(tt::TokenTree::Subtree(lhs)) => { |
diff --git a/crates/ra_mbe/src/parser.rs b/crates/ra_mbe/src/parser.rs index 50b8011a9..10a6f300a 100644 --- a/crates/ra_mbe/src/parser.rs +++ b/crates/ra_mbe/src/parser.rs | |||
@@ -45,15 +45,15 @@ impl PartialEq for Separator { | |||
45 | } | 45 | } |
46 | } | 46 | } |
47 | 47 | ||
48 | pub(crate) fn parse_template<'a>( | 48 | pub(crate) fn parse_template( |
49 | template: &'a tt::Subtree, | 49 | template: &tt::Subtree, |
50 | ) -> impl Iterator<Item = Result<Op<'a>, ExpandError>> { | 50 | ) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> { |
51 | parse_inner(template, Mode::Template) | 51 | parse_inner(template, Mode::Template) |
52 | } | 52 | } |
53 | 53 | ||
54 | pub(crate) fn parse_pattern<'a>( | 54 | pub(crate) fn parse_pattern( |
55 | pattern: &'a tt::Subtree, | 55 | pattern: &tt::Subtree, |
56 | ) -> impl Iterator<Item = Result<Op<'a>, ExpandError>> { | 56 | ) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> { |
57 | parse_inner(pattern, Mode::Pattern) | 57 | parse_inner(pattern, Mode::Pattern) |
58 | } | 58 | } |
59 | 59 | ||
@@ -63,10 +63,7 @@ enum Mode { | |||
63 | Template, | 63 | Template, |
64 | } | 64 | } |
65 | 65 | ||
66 | fn parse_inner<'a>( | 66 | fn parse_inner(src: &tt::Subtree, mode: Mode) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> { |
67 | src: &'a tt::Subtree, | ||
68 | mode: Mode, | ||
69 | ) -> impl Iterator<Item = Result<Op<'a>, ExpandError>> { | ||
70 | let mut src = TtIter::new(src); | 67 | let mut src = TtIter::new(src); |
71 | std::iter::from_fn(move || { | 68 | std::iter::from_fn(move || { |
72 | let first = src.next()?; | 69 | let first = src.next()?; |
@@ -100,7 +97,7 @@ fn next_op<'a>( | |||
100 | Op::Repeat { subtree, separator, kind } | 97 | Op::Repeat { subtree, separator, kind } |
101 | } | 98 | } |
102 | tt::TokenTree::Leaf(leaf) => match leaf { | 99 | tt::TokenTree::Leaf(leaf) => match leaf { |
103 | tt::Leaf::Punct(..) => Err(ExpandError::UnexpectedToken)?, | 100 | tt::Leaf::Punct(..) => return Err(ExpandError::UnexpectedToken), |
104 | tt::Leaf::Ident(ident) => { | 101 | tt::Leaf::Ident(ident) => { |
105 | let name = &ident.text; | 102 | let name = &ident.text; |
106 | let kind = eat_fragment_kind(src, mode)?; | 103 | let kind = eat_fragment_kind(src, mode)?; |
@@ -147,15 +144,15 @@ fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), Exp | |||
147 | for tt in src { | 144 | for tt in src { |
148 | let tt = match tt { | 145 | let tt = match tt { |
149 | tt::TokenTree::Leaf(leaf) => leaf, | 146 | tt::TokenTree::Leaf(leaf) => leaf, |
150 | tt::TokenTree::Subtree(_) => Err(ExpandError::InvalidRepeat)?, | 147 | tt::TokenTree::Subtree(_) => return Err(ExpandError::InvalidRepeat), |
151 | }; | 148 | }; |
152 | let has_sep = match &separator { | 149 | let has_sep = match &separator { |
153 | Separator::Puncts(puncts) => puncts.len() != 0, | 150 | Separator::Puncts(puncts) => !puncts.is_empty(), |
154 | _ => true, | 151 | _ => true, |
155 | }; | 152 | }; |
156 | match tt { | 153 | match tt { |
157 | tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => { | 154 | tt::Leaf::Ident(_) | tt::Leaf::Literal(_) if has_sep => { |
158 | Err(ExpandError::InvalidRepeat)? | 155 | return Err(ExpandError::InvalidRepeat) |
159 | } | 156 | } |
160 | tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()), | 157 | tt::Leaf::Ident(ident) => separator = Separator::Ident(ident.clone()), |
161 | tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()), | 158 | tt::Leaf::Literal(lit) => separator = Separator::Literal(lit.clone()), |
@@ -168,11 +165,11 @@ fn parse_repeat(src: &mut TtIter) -> Result<(Option<Separator>, RepeatKind), Exp | |||
168 | match &mut separator { | 165 | match &mut separator { |
169 | Separator::Puncts(puncts) => { | 166 | Separator::Puncts(puncts) => { |
170 | if puncts.len() == 3 { | 167 | if puncts.len() == 3 { |
171 | Err(ExpandError::InvalidRepeat)? | 168 | return Err(ExpandError::InvalidRepeat); |
172 | } | 169 | } |
173 | puncts.push(punct.clone()) | 170 | puncts.push(punct.clone()) |
174 | } | 171 | } |
175 | _ => Err(ExpandError::InvalidRepeat)?, | 172 | _ => return Err(ExpandError::InvalidRepeat), |
176 | } | 173 | } |
177 | continue; | 174 | continue; |
178 | } | 175 | } |
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index c9f42b3dd..eb8b79e9a 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs | |||
@@ -124,7 +124,7 @@ fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken { | |||
124 | 124 | ||
125 | let idx = closing as usize; | 125 | let idx = closing as usize; |
126 | let kind = kinds[idx]; | 126 | let kind = kinds[idx]; |
127 | let text = if texts.len() > 0 { &texts[idx..texts.len() - (1 - idx)] } else { "" }; | 127 | let text = if !texts.is_empty() { &texts[idx..texts.len() - (1 - idx)] } else { "" }; |
128 | TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) } | 128 | TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text) } |
129 | } | 129 | } |
130 | 130 | ||
diff --git a/crates/ra_parser/src/grammar/expressions/atom.rs b/crates/ra_parser/src/grammar/expressions/atom.rs index b72d2e9e6..b77b683b5 100644 --- a/crates/ra_parser/src/grammar/expressions/atom.rs +++ b/crates/ra_parser/src/grammar/expressions/atom.rs | |||
@@ -230,10 +230,8 @@ fn lambda_expr(p: &mut Parser) -> CompletedMarker { | |||
230 | p.eat(T![async]); | 230 | p.eat(T![async]); |
231 | p.eat(T![move]); | 231 | p.eat(T![move]); |
232 | params::param_list_closure(p); | 232 | params::param_list_closure(p); |
233 | if opt_fn_ret_type(p) { | 233 | if opt_fn_ret_type(p) && !p.at(T!['{']) { |
234 | if !p.at(T!['{']) { | 234 | p.error("expected `{`"); |
235 | p.error("expected `{`"); | ||
236 | } | ||
237 | } | 235 | } |
238 | 236 | ||
239 | if p.at_ts(EXPR_FIRST) { | 237 | if p.at_ts(EXPR_FIRST) { |
diff --git a/crates/ra_parser/src/grammar/items.rs b/crates/ra_parser/src/grammar/items.rs index 54284c933..f8b43866c 100644 --- a/crates/ra_parser/src/grammar/items.rs +++ b/crates/ra_parser/src/grammar/items.rs | |||
@@ -21,7 +21,7 @@ use super::*; | |||
21 | // struct S; | 21 | // struct S; |
22 | pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) { | 22 | pub(super) fn mod_contents(p: &mut Parser, stop_on_r_curly: bool) { |
23 | attributes::inner_attributes(p); | 23 | attributes::inner_attributes(p); |
24 | while !p.at(EOF) && !(stop_on_r_curly && p.at(T!['}'])) { | 24 | while !(stop_on_r_curly && p.at(T!['}']) || p.at(EOF)) { |
25 | item_or_macro(p, stop_on_r_curly, ItemFlavor::Mod) | 25 | item_or_macro(p, stop_on_r_curly, ItemFlavor::Mod) |
26 | } | 26 | } |
27 | } | 27 | } |
diff --git a/crates/ra_parser/src/grammar/paths.rs b/crates/ra_parser/src/grammar/paths.rs index f5bf3d7ce..332acc1a0 100644 --- a/crates/ra_parser/src/grammar/paths.rs +++ b/crates/ra_parser/src/grammar/paths.rs | |||
@@ -94,7 +94,7 @@ fn path_segment(p: &mut Parser, mode: Mode, first: bool) { | |||
94 | 94 | ||
95 | fn opt_path_type_args(p: &mut Parser, mode: Mode) { | 95 | fn opt_path_type_args(p: &mut Parser, mode: Mode) { |
96 | match mode { | 96 | match mode { |
97 | Mode::Use => return, | 97 | Mode::Use => {} |
98 | Mode::Type => { | 98 | Mode::Type => { |
99 | // test path_fn_trait_args | 99 | // test path_fn_trait_args |
100 | // type F = Box<Fn(i32) -> ()>; | 100 | // type F = Box<Fn(i32) -> ()>; |
diff --git a/crates/ra_parser/src/parser.rs b/crates/ra_parser/src/parser.rs index 1071c46dc..76e2d4f7d 100644 --- a/crates/ra_parser/src/parser.rs +++ b/crates/ra_parser/src/parser.rs | |||
@@ -126,13 +126,13 @@ impl<'t> Parser<'t> { | |||
126 | } | 126 | } |
127 | 127 | ||
128 | fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool { | 128 | fn at_composite2(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind) -> bool { |
129 | let t1 = self.token_source.lookahead_nth(n + 0); | 129 | let t1 = self.token_source.lookahead_nth(n); |
130 | let t2 = self.token_source.lookahead_nth(n + 1); | 130 | let t2 = self.token_source.lookahead_nth(n + 1); |
131 | t1.kind == k1 && t1.is_jointed_to_next && t2.kind == k2 | 131 | t1.kind == k1 && t1.is_jointed_to_next && t2.kind == k2 |
132 | } | 132 | } |
133 | 133 | ||
134 | fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool { | 134 | fn at_composite3(&self, n: usize, k1: SyntaxKind, k2: SyntaxKind, k3: SyntaxKind) -> bool { |
135 | let t1 = self.token_source.lookahead_nth(n + 0); | 135 | let t1 = self.token_source.lookahead_nth(n); |
136 | let t2 = self.token_source.lookahead_nth(n + 1); | 136 | let t2 = self.token_source.lookahead_nth(n + 1); |
137 | let t3 = self.token_source.lookahead_nth(n + 2); | 137 | let t3 = self.token_source.lookahead_nth(n + 2); |
138 | (t1.kind == k1 && t1.is_jointed_to_next) | 138 | (t1.kind == k1 && t1.is_jointed_to_next) |
diff --git a/crates/ra_prof/Cargo.toml b/crates/ra_prof/Cargo.toml index 382e6219a..e06a6d7d2 100644 --- a/crates/ra_prof/Cargo.toml +++ b/crates/ra_prof/Cargo.toml | |||
@@ -9,13 +9,13 @@ publish = false | |||
9 | doctest = false | 9 | doctest = false |
10 | 10 | ||
11 | [dependencies] | 11 | [dependencies] |
12 | once_cell = "1.0.1" | 12 | once_cell = "1.3.1" |
13 | itertools = "0.8.0" | 13 | itertools = "0.8.2" |
14 | backtrace = "0.3.28" | 14 | backtrace = "0.3.44" |
15 | 15 | ||
16 | [target.'cfg(not(target_env = "msvc"))'.dependencies] | 16 | [target.'cfg(not(target_env = "msvc"))'.dependencies] |
17 | jemallocator = { version = "0.3.2", optional = true } | 17 | jemallocator = { version = "0.3.2", optional = true } |
18 | jemalloc-ctl = { version = "0.3.2", optional = true } | 18 | jemalloc-ctl = { version = "0.3.3", optional = true } |
19 | 19 | ||
20 | [features] | 20 | [features] |
21 | jemalloc = [ "jemallocator", "jemalloc-ctl" ] | 21 | jemalloc = [ "jemallocator", "jemalloc-ctl" ] |
diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs index c267bc85f..660d85b42 100644 --- a/crates/ra_prof/src/lib.rs +++ b/crates/ra_prof/src/lib.rs | |||
@@ -214,7 +214,7 @@ impl Drop for Profiler { | |||
214 | let start = stack.starts.pop().unwrap(); | 214 | let start = stack.starts.pop().unwrap(); |
215 | let duration = start.elapsed(); | 215 | let duration = start.elapsed(); |
216 | let level = stack.starts.len(); | 216 | let level = stack.starts.len(); |
217 | stack.messages.push(Message { level, duration, label: label }); | 217 | stack.messages.push(Message { level, duration, label }); |
218 | if level == 0 { | 218 | if level == 0 { |
219 | let stdout = stderr(); | 219 | let stdout = stderr(); |
220 | let longer_than = stack.filter_data.longer_than; | 220 | let longer_than = stack.filter_data.longer_than; |
diff --git a/crates/ra_project_model/Cargo.toml b/crates/ra_project_model/Cargo.toml index 653d5bd14..6252241bf 100644 --- a/crates/ra_project_model/Cargo.toml +++ b/crates/ra_project_model/Cargo.toml | |||
@@ -8,16 +8,16 @@ authors = ["rust-analyzer developers"] | |||
8 | doctest = false | 8 | doctest = false |
9 | 9 | ||
10 | [dependencies] | 10 | [dependencies] |
11 | log = "0.4.5" | 11 | log = "0.4.8" |
12 | rustc-hash = "1.0" | 12 | rustc-hash = "1.1.0" |
13 | 13 | ||
14 | cargo_metadata = "0.9.0" | 14 | cargo_metadata = "0.9.1" |
15 | 15 | ||
16 | ra_arena = { path = "../ra_arena" } | 16 | ra_arena = { path = "../ra_arena" } |
17 | ra_db = { path = "../ra_db" } | 17 | ra_db = { path = "../ra_db" } |
18 | ra_cfg = { path = "../ra_cfg" } | 18 | ra_cfg = { path = "../ra_cfg" } |
19 | 19 | ||
20 | serde = { version = "1.0.89", features = ["derive"] } | 20 | serde = { version = "1.0.104", features = ["derive"] } |
21 | serde_json = "1.0.39" | 21 | serde_json = "1.0.48" |
22 | 22 | ||
23 | anyhow = "1.0.26" | 23 | anyhow = "1.0.26" |
diff --git a/crates/ra_project_model/src/cargo_workspace.rs b/crates/ra_project_model/src/cargo_workspace.rs index 22d226a74..4fea459d5 100644 --- a/crates/ra_project_model/src/cargo_workspace.rs +++ b/crates/ra_project_model/src/cargo_workspace.rs | |||
@@ -164,7 +164,7 @@ impl CargoWorkspace { | |||
164 | // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` | 164 | // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures` |
165 | // https://github.com/oli-obk/cargo_metadata/issues/79 | 165 | // https://github.com/oli-obk/cargo_metadata/issues/79 |
166 | meta.features(CargoOpt::NoDefaultFeatures); | 166 | meta.features(CargoOpt::NoDefaultFeatures); |
167 | } else if cargo_features.features.len() > 0 { | 167 | } else if !cargo_features.features.is_empty() { |
168 | meta.features(CargoOpt::SomeFeatures(cargo_features.features.clone())); | 168 | meta.features(CargoOpt::SomeFeatures(cargo_features.features.clone())); |
169 | } | 169 | } |
170 | if let Some(parent) = cargo_toml.parent() { | 170 | if let Some(parent) = cargo_toml.parent() { |
@@ -197,7 +197,7 @@ impl CargoWorkspace { | |||
197 | let pkg_data = &mut packages[pkg]; | 197 | let pkg_data = &mut packages[pkg]; |
198 | pkg_by_id.insert(id, pkg); | 198 | pkg_by_id.insert(id, pkg); |
199 | for meta_tgt in meta_pkg.targets { | 199 | for meta_tgt in meta_pkg.targets { |
200 | let is_proc_macro = meta_tgt.kind.as_slice() == &["proc-macro"]; | 200 | let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"]; |
201 | let tgt = targets.alloc(TargetData { | 201 | let tgt = targets.alloc(TargetData { |
202 | pkg, | 202 | pkg, |
203 | name: meta_tgt.name, | 203 | name: meta_tgt.name, |
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs index 250255813..e35f7fbbc 100644 --- a/crates/ra_project_model/src/lib.rs +++ b/crates/ra_project_model/src/lib.rs | |||
@@ -197,8 +197,9 @@ impl ProjectWorkspace { | |||
197 | if let (Some(&from), Some(&to)) = | 197 | if let (Some(&from), Some(&to)) = |
198 | (crates.get(&from_crate_id), crates.get(&to_crate_id)) | 198 | (crates.get(&from_crate_id), crates.get(&to_crate_id)) |
199 | { | 199 | { |
200 | if let Err(_) = | 200 | if crate_graph |
201 | crate_graph.add_dep(from, CrateName::new(&dep.name).unwrap(), to) | 201 | .add_dep(from, CrateName::new(&dep.name).unwrap(), to) |
202 | .is_err() | ||
202 | { | 203 | { |
203 | log::error!( | 204 | log::error!( |
204 | "cyclic dependency {:?} -> {:?}", | 205 | "cyclic dependency {:?} -> {:?}", |
@@ -237,8 +238,7 @@ impl ProjectWorkspace { | |||
237 | if let (Some(&from), Some(&to)) = | 238 | if let (Some(&from), Some(&to)) = |
238 | (sysroot_crates.get(&from), sysroot_crates.get(&to)) | 239 | (sysroot_crates.get(&from), sysroot_crates.get(&to)) |
239 | { | 240 | { |
240 | if let Err(_) = | 241 | if crate_graph.add_dep(from, CrateName::new(name).unwrap(), to).is_err() |
241 | crate_graph.add_dep(from, CrateName::new(name).unwrap(), to) | ||
242 | { | 242 | { |
243 | log::error!("cyclic dependency between sysroot crates") | 243 | log::error!("cyclic dependency between sysroot crates") |
244 | } | 244 | } |
@@ -279,11 +279,14 @@ impl ProjectWorkspace { | |||
279 | } | 279 | } |
280 | if tgt.is_proc_macro(&cargo) { | 280 | if tgt.is_proc_macro(&cargo) { |
281 | if let Some(proc_macro) = libproc_macro { | 281 | if let Some(proc_macro) = libproc_macro { |
282 | if let Err(_) = crate_graph.add_dep( | 282 | if crate_graph |
283 | crate_id, | 283 | .add_dep( |
284 | CrateName::new("proc_macro").unwrap(), | 284 | crate_id, |
285 | proc_macro, | 285 | CrateName::new("proc_macro").unwrap(), |
286 | ) { | 286 | proc_macro, |
287 | ) | ||
288 | .is_err() | ||
289 | { | ||
287 | log::error!( | 290 | log::error!( |
288 | "cyclic dependency on proc_macro for {}", | 291 | "cyclic dependency on proc_macro for {}", |
289 | pkg.name(&cargo) | 292 | pkg.name(&cargo) |
@@ -299,15 +302,19 @@ impl ProjectWorkspace { | |||
299 | // Set deps to the core, std and to the lib target of the current package | 302 | // Set deps to the core, std and to the lib target of the current package |
300 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { | 303 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { |
301 | if let Some(to) = lib_tgt { | 304 | if let Some(to) = lib_tgt { |
302 | if to != from { | 305 | if to != from |
303 | if let Err(_) = crate_graph.add_dep( | 306 | && crate_graph |
304 | from, | 307 | .add_dep( |
305 | // For root projects with dashes in their name, | 308 | from, |
306 | // cargo metadata does not do any normalization, | 309 | // For root projects with dashes in their name, |
307 | // so we do it ourselves currently | 310 | // cargo metadata does not do any normalization, |
308 | CrateName::normalize_dashes(pkg.name(&cargo)), | 311 | // so we do it ourselves currently |
309 | to, | 312 | CrateName::normalize_dashes(pkg.name(&cargo)), |
310 | ) { | 313 | to, |
314 | ) | ||
315 | .is_err() | ||
316 | { | ||
317 | { | ||
311 | log::error!( | 318 | log::error!( |
312 | "cyclic dependency between targets of {}", | 319 | "cyclic dependency between targets of {}", |
313 | pkg.name(&cargo) | 320 | pkg.name(&cargo) |
@@ -318,22 +325,25 @@ impl ProjectWorkspace { | |||
318 | // core is added as a dependency before std in order to | 325 | // core is added as a dependency before std in order to |
319 | // mimic rustcs dependency order | 326 | // mimic rustcs dependency order |
320 | if let Some(core) = libcore { | 327 | if let Some(core) = libcore { |
321 | if let Err(_) = | 328 | if crate_graph |
322 | crate_graph.add_dep(from, CrateName::new("core").unwrap(), core) | 329 | .add_dep(from, CrateName::new("core").unwrap(), core) |
330 | .is_err() | ||
323 | { | 331 | { |
324 | log::error!("cyclic dependency on core for {}", pkg.name(&cargo)) | 332 | log::error!("cyclic dependency on core for {}", pkg.name(&cargo)) |
325 | } | 333 | } |
326 | } | 334 | } |
327 | if let Some(alloc) = liballoc { | 335 | if let Some(alloc) = liballoc { |
328 | if let Err(_) = | 336 | if crate_graph |
329 | crate_graph.add_dep(from, CrateName::new("alloc").unwrap(), alloc) | 337 | .add_dep(from, CrateName::new("alloc").unwrap(), alloc) |
338 | .is_err() | ||
330 | { | 339 | { |
331 | log::error!("cyclic dependency on alloc for {}", pkg.name(&cargo)) | 340 | log::error!("cyclic dependency on alloc for {}", pkg.name(&cargo)) |
332 | } | 341 | } |
333 | } | 342 | } |
334 | if let Some(std) = libstd { | 343 | if let Some(std) = libstd { |
335 | if let Err(_) = | 344 | if crate_graph |
336 | crate_graph.add_dep(from, CrateName::new("std").unwrap(), std) | 345 | .add_dep(from, CrateName::new("std").unwrap(), std) |
346 | .is_err() | ||
337 | { | 347 | { |
338 | log::error!("cyclic dependency on std for {}", pkg.name(&cargo)) | 348 | log::error!("cyclic dependency on std for {}", pkg.name(&cargo)) |
339 | } | 349 | } |
@@ -347,11 +357,10 @@ impl ProjectWorkspace { | |||
347 | for dep in pkg.dependencies(&cargo) { | 357 | for dep in pkg.dependencies(&cargo) { |
348 | if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) { | 358 | if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) { |
349 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { | 359 | for &from in pkg_crates.get(&pkg).into_iter().flatten() { |
350 | if let Err(_) = crate_graph.add_dep( | 360 | if crate_graph |
351 | from, | 361 | .add_dep(from, CrateName::new(&dep.name).unwrap(), to) |
352 | CrateName::new(&dep.name).unwrap(), | 362 | .is_err() |
353 | to, | 363 | { |
354 | ) { | ||
355 | log::error!( | 364 | log::error!( |
356 | "cyclic dependency {} -> {}", | 365 | "cyclic dependency {} -> {}", |
357 | pkg.name(&cargo), | 366 | pkg.name(&cargo), |
@@ -409,7 +418,7 @@ fn find_cargo_toml(path: &Path) -> Result<PathBuf> { | |||
409 | } | 418 | } |
410 | curr = path.parent(); | 419 | curr = path.parent(); |
411 | } | 420 | } |
412 | Err(CargoTomlNotFoundError(path.to_path_buf()))? | 421 | Err(CargoTomlNotFoundError(path.to_path_buf()).into()) |
413 | } | 422 | } |
414 | 423 | ||
415 | pub fn get_rustc_cfg_options() -> CfgOptions { | 424 | pub fn get_rustc_cfg_options() -> CfgOptions { |
diff --git a/crates/ra_syntax/Cargo.toml b/crates/ra_syntax/Cargo.toml index cb72972c5..7891628dc 100644 --- a/crates/ra_syntax/Cargo.toml +++ b/crates/ra_syntax/Cargo.toml | |||
@@ -11,12 +11,12 @@ repository = "https://github.com/rust-analyzer/rust-analyzer" | |||
11 | doctest = false | 11 | doctest = false |
12 | 12 | ||
13 | [dependencies] | 13 | [dependencies] |
14 | itertools = "0.8.0" | 14 | itertools = "0.8.2" |
15 | rowan = "0.9.0" | 15 | rowan = "0.9.0" |
16 | rustc_lexer = "0.1.0" | 16 | rustc_lexer = "0.1.0" |
17 | rustc-hash = "1.0.1" | 17 | rustc-hash = "1.1.0" |
18 | arrayvec = "0.5.1" | 18 | arrayvec = "0.5.1" |
19 | once_cell = "1.2.0" | 19 | once_cell = "1.3.1" |
20 | 20 | ||
21 | ra_text_edit = { path = "../ra_text_edit" } | 21 | ra_text_edit = { path = "../ra_text_edit" } |
22 | ra_parser = { path = "../ra_parser" } | 22 | ra_parser = { path = "../ra_parser" } |
@@ -24,9 +24,9 @@ ra_parser = { path = "../ra_parser" } | |||
24 | # This crate transitively depends on `smol_str` via `rowan`. | 24 | # This crate transitively depends on `smol_str` via `rowan`. |
25 | # ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here | 25 | # ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here |
26 | # to reduce number of compilations | 26 | # to reduce number of compilations |
27 | smol_str = { version = "0.1.12", features = ["serde"] } | 27 | smol_str = { version = "0.1.15", features = ["serde"] } |
28 | serde = { version = "1", features = ["derive"] } | 28 | serde = { version = "1.0.104", features = ["derive"] } |
29 | 29 | ||
30 | [dev-dependencies] | 30 | [dev-dependencies] |
31 | test_utils = { path = "../test_utils" } | 31 | test_utils = { path = "../test_utils" } |
32 | walkdir = "2.2.0" | 32 | walkdir = "2.3.1" |
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index acf677e7d..21fca99a6 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -95,16 +95,17 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { | |||
95 | lhs: SyntaxElement, | 95 | lhs: SyntaxElement, |
96 | rhs: SyntaxElement, | 96 | rhs: SyntaxElement, |
97 | ) { | 97 | ) { |
98 | if lhs.kind() == rhs.kind() && lhs.text_range().len() == rhs.text_range().len() { | 98 | if lhs.kind() == rhs.kind() |
99 | if match (&lhs, &rhs) { | 99 | && lhs.text_range().len() == rhs.text_range().len() |
100 | && match (&lhs, &rhs) { | ||
100 | (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { | 101 | (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { |
101 | lhs.green() == rhs.green() || lhs.text() == rhs.text() | 102 | lhs.green() == rhs.green() || lhs.text() == rhs.text() |
102 | } | 103 | } |
103 | (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), | 104 | (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), |
104 | _ => false, | 105 | _ => false, |
105 | } { | ||
106 | return; | ||
107 | } | 106 | } |
107 | { | ||
108 | return; | ||
108 | } | 109 | } |
109 | if let (Some(lhs), Some(rhs)) = (lhs.as_node(), rhs.as_node()) { | 110 | if let (Some(lhs), Some(rhs)) = (lhs.as_node(), rhs.as_node()) { |
110 | if lhs.children_with_tokens().count() == rhs.children_with_tokens().count() { | 111 | if lhs.children_with_tokens().count() == rhs.children_with_tokens().count() { |
diff --git a/crates/ra_syntax/src/ast/expr_extensions.rs b/crates/ra_syntax/src/ast/expr_extensions.rs index 2e50a095c..77cceb382 100644 --- a/crates/ra_syntax/src/ast/expr_extensions.rs +++ b/crates/ra_syntax/src/ast/expr_extensions.rs | |||
@@ -30,7 +30,7 @@ pub enum ElseBranch { | |||
30 | 30 | ||
31 | impl ast::IfExpr { | 31 | impl ast::IfExpr { |
32 | pub fn then_branch(&self) -> Option<ast::BlockExpr> { | 32 | pub fn then_branch(&self) -> Option<ast::BlockExpr> { |
33 | self.blocks().nth(0) | 33 | self.blocks().next() |
34 | } | 34 | } |
35 | pub fn else_branch(&self) -> Option<ElseBranch> { | 35 | pub fn else_branch(&self) -> Option<ElseBranch> { |
36 | let res = match self.blocks().nth(1) { | 36 | let res = match self.blocks().nth(1) { |
@@ -208,7 +208,7 @@ impl ast::BinExpr { | |||
208 | } | 208 | } |
209 | 209 | ||
210 | pub fn lhs(&self) -> Option<ast::Expr> { | 210 | pub fn lhs(&self) -> Option<ast::Expr> { |
211 | children(self).nth(0) | 211 | children(self).next() |
212 | } | 212 | } |
213 | 213 | ||
214 | pub fn rhs(&self) -> Option<ast::Expr> { | 214 | pub fn rhs(&self) -> Option<ast::Expr> { |
@@ -271,7 +271,7 @@ impl ast::RangeExpr { | |||
271 | 271 | ||
272 | impl ast::IndexExpr { | 272 | impl ast::IndexExpr { |
273 | pub fn base(&self) -> Option<ast::Expr> { | 273 | pub fn base(&self) -> Option<ast::Expr> { |
274 | children(self).nth(0) | 274 | children(self).next() |
275 | } | 275 | } |
276 | pub fn index(&self) -> Option<ast::Expr> { | 276 | pub fn index(&self) -> Option<ast::Expr> { |
277 | children(self).nth(1) | 277 | children(self).nth(1) |
@@ -287,7 +287,7 @@ impl ast::ArrayExpr { | |||
287 | pub fn kind(&self) -> ArrayExprKind { | 287 | pub fn kind(&self) -> ArrayExprKind { |
288 | if self.is_repeat() { | 288 | if self.is_repeat() { |
289 | ArrayExprKind::Repeat { | 289 | ArrayExprKind::Repeat { |
290 | initializer: children(self).nth(0), | 290 | initializer: children(self).next(), |
291 | repeat: children(self).nth(1), | 291 | repeat: children(self).nth(1), |
292 | } | 292 | } |
293 | } else { | 293 | } else { |
@@ -328,10 +328,10 @@ impl ast::Literal { | |||
328 | } | 328 | } |
329 | 329 | ||
330 | pub fn kind(&self) -> LiteralKind { | 330 | pub fn kind(&self) -> LiteralKind { |
331 | const INT_SUFFIXES: [&'static str; 12] = [ | 331 | const INT_SUFFIXES: [&str; 12] = [ |
332 | "u64", "u32", "u16", "u8", "usize", "isize", "i64", "i32", "i16", "i8", "u128", "i128", | 332 | "u64", "u32", "u16", "u8", "usize", "isize", "i64", "i32", "i16", "i8", "u128", "i128", |
333 | ]; | 333 | ]; |
334 | const FLOAT_SUFFIXES: [&'static str; 2] = ["f32", "f64"]; | 334 | const FLOAT_SUFFIXES: [&str; 2] = ["f32", "f64"]; |
335 | 335 | ||
336 | let token = self.token(); | 336 | let token = self.token(); |
337 | 337 | ||
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs index 89d1403e7..7c20fcc10 100644 --- a/crates/ra_syntax/src/ast/make.rs +++ b/crates/ra_syntax/src/ast/make.rs | |||
@@ -152,7 +152,7 @@ pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::Mat | |||
152 | format!(" {}{}\n", arm.syntax(), comma) | 152 | format!(" {}{}\n", arm.syntax(), comma) |
153 | }) | 153 | }) |
154 | .collect::<String>(); | 154 | .collect::<String>(); |
155 | return from_text(&format!("{}", arms_str)); | 155 | return from_text(&arms_str); |
156 | 156 | ||
157 | fn from_text(text: &str) -> ast::MatchArmList { | 157 | fn from_text(text: &str) -> ast::MatchArmList { |
158 | ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text)) | 158 | ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text)) |
diff --git a/crates/ra_syntax/src/parsing/text_token_source.rs b/crates/ra_syntax/src/parsing/text_token_source.rs index e793f93a4..e2433913c 100644 --- a/crates/ra_syntax/src/parsing/text_token_source.rs +++ b/crates/ra_syntax/src/parsing/text_token_source.rs | |||
@@ -48,7 +48,7 @@ impl<'t> TokenSource for TextTokenSource<'t> { | |||
48 | 48 | ||
49 | fn is_keyword(&self, kw: &str) -> bool { | 49 | fn is_keyword(&self, kw: &str) -> bool { |
50 | let pos = self.curr.1; | 50 | let pos = self.curr.1; |
51 | if !(pos < self.tokens.len()) { | 51 | if pos >= self.tokens.len() { |
52 | return false; | 52 | return false; |
53 | } | 53 | } |
54 | let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len); | 54 | let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len); |
diff --git a/crates/ra_text_edit/Cargo.toml b/crates/ra_text_edit/Cargo.toml index 4490ae43b..a32149299 100644 --- a/crates/ra_text_edit/Cargo.toml +++ b/crates/ra_text_edit/Cargo.toml | |||
@@ -9,7 +9,7 @@ publish = false | |||
9 | doctest = false | 9 | doctest = false |
10 | 10 | ||
11 | [dependencies] | 11 | [dependencies] |
12 | text_unit = "0.1.6" | 12 | text_unit = "0.1.9" |
13 | 13 | ||
14 | [dev-dependencies] | 14 | [dev-dependencies] |
15 | test_utils = { path = "../test_utils" } | 15 | test_utils = { path = "../test_utils" } |
diff --git a/crates/ra_tt/Cargo.toml b/crates/ra_tt/Cargo.toml index c9601fdcc..f7230a9ca 100644 --- a/crates/ra_tt/Cargo.toml +++ b/crates/ra_tt/Cargo.toml | |||
@@ -10,4 +10,4 @@ doctest = false | |||
10 | [dependencies] | 10 | [dependencies] |
11 | # ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here | 11 | # ideally, `serde` should be enabled by `rust-analyzer`, but we enable it here |
12 | # to reduce number of compilations | 12 | # to reduce number of compilations |
13 | smol_str = { version = "0.1.12", features = ["serde"] } | 13 | smol_str = { version = "0.1.15", features = ["serde"] } |
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml index 3dae43d2a..c5d6e3831 100644 --- a/crates/rust-analyzer/Cargo.toml +++ b/crates/rust-analyzer/Cargo.toml | |||
@@ -13,32 +13,32 @@ name = "rust-analyzer" | |||
13 | path = "./src/bin/main.rs" | 13 | path = "./src/bin/main.rs" |
14 | 14 | ||
15 | [dependencies] | 15 | [dependencies] |
16 | anyhow = "1.0" | 16 | anyhow = "1.0.26" |
17 | crossbeam-channel = "0.4" | 17 | crossbeam-channel = "0.4.0" |
18 | either = "1.5" | 18 | either = "1.5.3" |
19 | env_logger = { version = "0.7.1", default-features = false } | 19 | env_logger = { version = "0.7.1", default-features = false } |
20 | globset = "0.4.4" | 20 | globset = "0.4.4" |
21 | itertools = "0.8.0" | 21 | itertools = "0.8.2" |
22 | jod-thread = "0.1.0" | 22 | jod-thread = "0.1.0" |
23 | log = "0.4.3" | 23 | log = "0.4.8" |
24 | lsp-types = { version = "0.70.0", features = ["proposed"] } | 24 | lsp-types = { version = "0.70.1", features = ["proposed"] } |
25 | parking_lot = "0.10.0" | 25 | parking_lot = "0.10.0" |
26 | pico-args = "0.3.0" | 26 | pico-args = "0.3.1" |
27 | rand = { version = "0.7.0", features = ["small_rng"] } | 27 | rand = { version = "0.7.3", features = ["small_rng"] } |
28 | relative-path = "1.0.0" | 28 | relative-path = "1.0.0" |
29 | rustc-hash = "1.0" | 29 | rustc-hash = "1.1.0" |
30 | serde = { version = "1.0.83", features = ["derive"] } | 30 | serde = { version = "1.0.104", features = ["derive"] } |
31 | serde_json = "1.0.34" | 31 | serde_json = "1.0.48" |
32 | threadpool = "1.7.1" | 32 | threadpool = "1.7.1" |
33 | 33 | ||
34 | lsp-server = "0.3.0" | 34 | lsp-server = "0.3.1" |
35 | ra_cargo_watch = { path = "../ra_cargo_watch" } | 35 | ra_cargo_watch = { path = "../ra_cargo_watch" } |
36 | ra_ide = { path = "../ra_ide" } | 36 | ra_ide = { path = "../ra_ide" } |
37 | ra_prof = { path = "../ra_prof" } | 37 | ra_prof = { path = "../ra_prof" } |
38 | ra_project_model = { path = "../ra_project_model" } | 38 | ra_project_model = { path = "../ra_project_model" } |
39 | ra_syntax = { path = "../ra_syntax" } | 39 | ra_syntax = { path = "../ra_syntax" } |
40 | ra_text_edit = { path = "../ra_text_edit" } | 40 | ra_text_edit = { path = "../ra_text_edit" } |
41 | ra_vfs = "0.5.0" | 41 | ra_vfs = "0.5.2" |
42 | 42 | ||
43 | # This should only be used in CLI | 43 | # This should only be used in CLI |
44 | ra_db = { path = "../ra_db" } | 44 | ra_db = { path = "../ra_db" } |
@@ -48,10 +48,10 @@ hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" } | |||
48 | 48 | ||
49 | 49 | ||
50 | [target.'cfg(windows)'.dependencies] | 50 | [target.'cfg(windows)'.dependencies] |
51 | winapi = "0.3" | 51 | winapi = "0.3.8" |
52 | 52 | ||
53 | [dev-dependencies] | 53 | [dev-dependencies] |
54 | tempfile = "3" | 54 | tempfile = "3.1.0" |
55 | test_utils = { path = "../test_utils" } | 55 | test_utils = { path = "../test_utils" } |
56 | 56 | ||
57 | [features] | 57 | [features] |
diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 69e709a25..dd15b3458 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs | |||
@@ -4,7 +4,7 @@ | |||
4 | mod args; | 4 | mod args; |
5 | 5 | ||
6 | use lsp_server::Connection; | 6 | use lsp_server::Connection; |
7 | use ra_prof; | 7 | |
8 | use rust_analyzer::{cli, from_json, show_message, Result, ServerConfig}; | 8 | use rust_analyzer::{cli, from_json, show_message, Result, ServerConfig}; |
9 | 9 | ||
10 | use crate::args::HelpPrinted; | 10 | use crate::args::HelpPrinted; |
diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 99ab6e443..4d59db1ee 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs | |||
@@ -130,7 +130,7 @@ pub fn analysis_stats( | |||
130 | write!(msg, " ({:?} {})", path, syntax_range).unwrap(); | 130 | write!(msg, " ({:?} {})", path, syntax_range).unwrap(); |
131 | } | 131 | } |
132 | if verbosity.is_spammy() { | 132 | if verbosity.is_spammy() { |
133 | bar.println(format!("{}", msg)); | 133 | bar.println(msg.to_string()); |
134 | } | 134 | } |
135 | bar.set_message(&msg); | 135 | bar.set_message(&msg); |
136 | let f_id = FunctionId::from(f); | 136 | let f_id = FunctionId::from(f); |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index dc16a234d..71917a62e 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -206,17 +206,17 @@ pub fn main_loop( | |||
206 | let event = select! { | 206 | let event = select! { |
207 | recv(&connection.receiver) -> msg => match msg { | 207 | recv(&connection.receiver) -> msg => match msg { |
208 | Ok(msg) => Event::Msg(msg), | 208 | Ok(msg) => Event::Msg(msg), |
209 | Err(RecvError) => Err("client exited without shutdown")?, | 209 | Err(RecvError) => return Err("client exited without shutdown".into()), |
210 | }, | 210 | }, |
211 | recv(task_receiver) -> task => Event::Task(task.unwrap()), | 211 | recv(task_receiver) -> task => Event::Task(task.unwrap()), |
212 | recv(world_state.task_receiver) -> task => match task { | 212 | recv(world_state.task_receiver) -> task => match task { |
213 | Ok(task) => Event::Vfs(task), | 213 | Ok(task) => Event::Vfs(task), |
214 | Err(RecvError) => Err("vfs died")?, | 214 | Err(RecvError) => return Err("vfs died".into()), |
215 | }, | 215 | }, |
216 | recv(libdata_receiver) -> data => Event::Lib(data.unwrap()), | 216 | recv(libdata_receiver) -> data => Event::Lib(data.unwrap()), |
217 | recv(world_state.check_watcher.task_recv) -> task => match task { | 217 | recv(world_state.check_watcher.task_recv) -> task => match task { |
218 | Ok(task) => Event::CheckWatcher(task), | 218 | Ok(task) => Event::CheckWatcher(task), |
219 | Err(RecvError) => Err("check watcher died")?, | 219 | Err(RecvError) => return Err("check watcher died".into()), |
220 | } | 220 | } |
221 | }; | 221 | }; |
222 | if let Event::Msg(Message::Request(req)) = &event { | 222 | if let Event::Msg(Message::Request(req)) = &event { |
diff --git a/crates/rust-analyzer/tests/heavy_tests/main.rs b/crates/rust-analyzer/tests/heavy_tests/main.rs index 3af63d9cf..970185dec 100644 --- a/crates/rust-analyzer/tests/heavy_tests/main.rs +++ b/crates/rust-analyzer/tests/heavy_tests/main.rs | |||
@@ -17,7 +17,7 @@ use test_utils::skip_slow_tests; | |||
17 | 17 | ||
18 | use crate::support::{project, Project}; | 18 | use crate::support::{project, Project}; |
19 | 19 | ||
20 | const PROFILE: &'static str = ""; | 20 | const PROFILE: &str = ""; |
21 | // const PROFILE: &'static str = "*@3>100"; | 21 | // const PROFILE: &'static str = "*@3>100"; |
22 | 22 | ||
23 | #[test] | 23 | #[test] |
diff --git a/crates/rust-analyzer/tests/heavy_tests/support.rs b/crates/rust-analyzer/tests/heavy_tests/support.rs index 5b90b3218..e28ae61fe 100644 --- a/crates/rust-analyzer/tests/heavy_tests/support.rs +++ b/crates/rust-analyzer/tests/heavy_tests/support.rs | |||
@@ -52,7 +52,7 @@ impl<'a> Project<'a> { | |||
52 | let tmp_dir = self.tmp_dir.unwrap_or_else(|| TempDir::new().unwrap()); | 52 | let tmp_dir = self.tmp_dir.unwrap_or_else(|| TempDir::new().unwrap()); |
53 | static INIT: Once = Once::new(); | 53 | static INIT: Once = Once::new(); |
54 | INIT.call_once(|| { | 54 | INIT.call_once(|| { |
55 | let _ = env_logger::builder().is_test(true).try_init().unwrap(); | 55 | env_logger::builder().is_test(true).try_init().unwrap(); |
56 | ra_prof::set_filter(if crate::PROFILE.is_empty() { | 56 | ra_prof::set_filter(if crate::PROFILE.is_empty() { |
57 | ra_prof::Filter::disabled() | 57 | ra_prof::Filter::disabled() |
58 | } else { | 58 | } else { |
diff --git a/crates/test_utils/Cargo.toml b/crates/test_utils/Cargo.toml index a71366cc4..971592b73 100644 --- a/crates/test_utils/Cargo.toml +++ b/crates/test_utils/Cargo.toml | |||
@@ -9,5 +9,5 @@ doctest = false | |||
9 | 9 | ||
10 | [dependencies] | 10 | [dependencies] |
11 | difference = "2.0.0" | 11 | difference = "2.0.0" |
12 | text_unit = "0.1.2" | 12 | text_unit = "0.1.9" |
13 | serde_json = "1.0.34" | 13 | serde_json = "1.0.48" |
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs index 336c594a6..e6e8d7110 100644 --- a/crates/test_utils/src/lib.rs +++ b/crates/test_utils/src/lib.rs | |||
@@ -279,7 +279,7 @@ pub fn find_mismatch<'a>(expected: &'a Value, actual: &'a Value) -> Option<(&'a | |||
279 | return Some((expected, actual)); | 279 | return Some((expected, actual)); |
280 | } | 280 | } |
281 | 281 | ||
282 | l.values().zip(r.values()).filter_map(|(l, r)| find_mismatch(l, r)).nth(0) | 282 | l.values().zip(r.values()).filter_map(|(l, r)| find_mismatch(l, r)).next() |
283 | } | 283 | } |
284 | (&Null, &Null) => None, | 284 | (&Null, &Null) => None, |
285 | // magic string literal "{...}" acts as wildcard for any sub-JSON | 285 | // magic string literal "{...}" acts as wildcard for any sub-JSON |
diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index 72dd5e581..a8b9b010d 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml | |||
@@ -9,8 +9,8 @@ publish = false | |||
9 | doctest = false | 9 | doctest = false |
10 | 10 | ||
11 | [dependencies] | 11 | [dependencies] |
12 | walkdir = "2.1.3" | 12 | walkdir = "2.3.1" |
13 | pico-args = "0.3.0" | 13 | pico-args = "0.3.1" |
14 | quote = "1.0.2" | 14 | quote = "1.0.2" |
15 | proc-macro2 = "1.0.1" | 15 | proc-macro2 = "1.0.8" |
16 | anyhow = "1.0.19" | 16 | anyhow = "1.0.26" |
diff --git a/xtask/src/install.rs b/xtask/src/install.rs index cc6fecc85..3df021acc 100644 --- a/xtask/src/install.rs +++ b/xtask/src/install.rs | |||
@@ -94,8 +94,7 @@ fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> { | |||
94 | }) | 94 | }) |
95 | }; | 95 | }; |
96 | 96 | ||
97 | let installed_extensions; | 97 | let installed_extensions = if cfg!(unix) { |
98 | if cfg!(unix) { | ||
99 | run!("npm --version").context("`npm` is required to build the VS Code plugin")?; | 98 | run!("npm --version").context("`npm` is required to build the VS Code plugin")?; |
100 | run!("npm install")?; | 99 | run!("npm install")?; |
101 | 100 | ||
@@ -103,7 +102,7 @@ fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> { | |||
103 | 102 | ||
104 | let code = find_code(|bin| run!("{} --version", bin).is_ok())?; | 103 | let code = find_code(|bin| run!("{} --version", bin).is_ok())?; |
105 | run!("{} --install-extension rust-analyzer.vsix --force", code)?; | 104 | run!("{} --install-extension rust-analyzer.vsix --force", code)?; |
106 | installed_extensions = run!("{} --list-extensions", code; echo = false)?; | 105 | run!("{} --list-extensions", code; echo = false)? |
107 | } else { | 106 | } else { |
108 | run!("cmd.exe /c npm --version") | 107 | run!("cmd.exe /c npm --version") |
109 | .context("`npm` is required to build the VS Code plugin")?; | 108 | .context("`npm` is required to build the VS Code plugin")?; |
@@ -113,8 +112,8 @@ fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> { | |||
113 | 112 | ||
114 | let code = find_code(|bin| run!("cmd.exe /c {}.cmd --version", bin).is_ok())?; | 113 | let code = find_code(|bin| run!("cmd.exe /c {}.cmd --version", bin).is_ok())?; |
115 | run!(r"cmd.exe /c {}.cmd --install-extension rust-analyzer.vsix --force", code)?; | 114 | run!(r"cmd.exe /c {}.cmd --install-extension rust-analyzer.vsix --force", code)?; |
116 | installed_extensions = run!("cmd.exe /c {}.cmd --list-extensions", code; echo = false)?; | 115 | run!("cmd.exe /c {}.cmd --list-extensions", code; echo = false)? |
117 | } | 116 | }; |
118 | 117 | ||
119 | if !installed_extensions.contains("rust-analyzer") { | 118 | if !installed_extensions.contains("rust-analyzer") { |
120 | bail!( | 119 | bail!( |