aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.lock56
-rw-r--r--crates/assists/Cargo.toml2
-rw-r--r--crates/assists/src/handlers/extract_variable.rs2
-rw-r--r--crates/assists/src/handlers/replace_derive_with_manual_impl.rs31
-rw-r--r--crates/assists/src/utils/import_assets.rs37
-rw-r--r--crates/completion/Cargo.toml2
-rw-r--r--crates/completion/src/completions/attribute.rs2
-rw-r--r--crates/completion/src/completions/postfix.rs2
-rw-r--r--crates/completion/src/completions/postfix/format_like.rs13
-rw-r--r--crates/completion/src/completions/unqualified_path.rs15
-rw-r--r--crates/completion/src/lib.rs2
-rw-r--r--crates/hir/Cargo.toml2
-rw-r--r--crates/hir_def/Cargo.toml2
-rw-r--r--crates/hir_def/src/import_map.rs224
-rw-r--r--crates/hir_def/src/nameres.rs4
-rw-r--r--crates/hir_expand/src/builtin_macro.rs3
-rw-r--r--crates/hir_ty/Cargo.toml2
-rw-r--r--crates/hir_ty/src/infer/expr.rs2
-rw-r--r--crates/ide/Cargo.toml2
-rw-r--r--crates/ide/src/diagnostics.rs22
-rw-r--r--crates/ide/src/lib.rs2
-rw-r--r--crates/ide_db/Cargo.toml2
-rw-r--r--crates/ide_db/src/imports_locator.rs23
-rw-r--r--crates/mbe/src/lib.rs58
-rw-r--r--crates/mbe/src/mbe_expander/matcher.rs32
-rw-r--r--crates/mbe/src/mbe_expander/transcriber.rs29
-rw-r--r--crates/mbe/src/parser.rs68
-rw-r--r--crates/proc_macro_srv/src/rustc_server.rs43
-rw-r--r--crates/project_model/Cargo.toml2
-rw-r--r--crates/project_model/src/cargo_workspace.rs18
-rw-r--r--crates/rust-analyzer/Cargo.toml3
-rw-r--r--crates/rust-analyzer/src/diff.rs53
-rw-r--r--crates/rust-analyzer/src/handlers.rs25
-rw-r--r--crates/rust-analyzer/src/lib.rs1
-rw-r--r--crates/rust-analyzer/tests/rust-analyzer/main.rs31
-rw-r--r--crates/ssr/Cargo.toml2
-rw-r--r--crates/ssr/src/matching.rs4
-rw-r--r--crates/syntax/Cargo.toml2
-rw-r--r--crates/syntax/src/ast/node_ext.rs8
-rw-r--r--editors/code/package.json8
-rw-r--r--editors/code/src/client.ts11
-rw-r--r--editors/code/src/commands.ts10
-rw-r--r--editors/code/src/config.ts3
-rw-r--r--editors/code/src/ctx.ts2
44 files changed, 606 insertions, 261 deletions
diff --git a/Cargo.lock b/Cargo.lock
index fd04ec3c5..1aa0c072d 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -26,9 +26,9 @@ dependencies = [
26 26
27[[package]] 27[[package]]
28name = "anyhow" 28name = "anyhow"
29version = "1.0.36" 29version = "1.0.37"
30source = "registry+https://github.com/rust-lang/crates.io-index" 30source = "registry+https://github.com/rust-lang/crates.io-index"
31checksum = "68803225a7b13e47191bab76f2687382b60d259e8cf37f6e1893658b84bb9479" 31checksum = "ee67c11feeac938fae061b232e38e0b6d94f97a9df10e6271319325ac4c56a86"
32 32
33[[package]] 33[[package]]
34name = "anymap" 34name = "anymap"
@@ -53,7 +53,7 @@ dependencies = [
53 "either", 53 "either",
54 "hir", 54 "hir",
55 "ide_db", 55 "ide_db",
56 "itertools", 56 "itertools 0.10.0",
57 "profile", 57 "profile",
58 "rustc-hash", 58 "rustc-hash",
59 "stdx", 59 "stdx",
@@ -211,7 +211,7 @@ dependencies = [
211 "chalk-derive", 211 "chalk-derive",
212 "chalk-ir", 212 "chalk-ir",
213 "ena", 213 "ena",
214 "itertools", 214 "itertools 0.9.0",
215 "petgraph", 215 "petgraph",
216 "rustc-hash", 216 "rustc-hash",
217 "tracing", 217 "tracing",
@@ -250,7 +250,7 @@ dependencies = [
250 "expect-test", 250 "expect-test",
251 "hir", 251 "hir",
252 "ide_db", 252 "ide_db",
253 "itertools", 253 "itertools 0.10.0",
254 "log", 254 "log",
255 "profile", 255 "profile",
256 "rustc-hash", 256 "rustc-hash",
@@ -349,6 +349,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
349checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" 349checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
350 350
351[[package]] 351[[package]]
352name = "dissimilar"
353version = "1.0.2"
354source = "registry+https://github.com/rust-lang/crates.io-index"
355checksum = "fc4b29f4b9bb94bf267d57269fd0706d343a160937108e9619fe380645428abb"
356
357[[package]]
352name = "drop_bomb" 358name = "drop_bomb"
353version = "0.1.5" 359version = "0.1.5"
354source = "registry+https://github.com/rust-lang/crates.io-index" 360source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -534,7 +540,7 @@ dependencies = [
534 "hir_def", 540 "hir_def",
535 "hir_expand", 541 "hir_expand",
536 "hir_ty", 542 "hir_ty",
537 "itertools", 543 "itertools 0.10.0",
538 "log", 544 "log",
539 "profile", 545 "profile",
540 "rustc-hash", 546 "rustc-hash",
@@ -557,7 +563,7 @@ dependencies = [
557 "fst", 563 "fst",
558 "hir_expand", 564 "hir_expand",
559 "indexmap", 565 "indexmap",
560 "itertools", 566 "itertools 0.10.0",
561 "log", 567 "log",
562 "mbe", 568 "mbe",
563 "once_cell", 569 "once_cell",
@@ -601,7 +607,7 @@ dependencies = [
601 "expect-test", 607 "expect-test",
602 "hir_def", 608 "hir_def",
603 "hir_expand", 609 "hir_expand",
604 "itertools", 610 "itertools 0.10.0",
605 "log", 611 "log",
606 "once_cell", 612 "once_cell",
607 "profile", 613 "profile",
@@ -637,7 +643,7 @@ dependencies = [
637 "hir", 643 "hir",
638 "ide_db", 644 "ide_db",
639 "indexmap", 645 "indexmap",
640 "itertools", 646 "itertools 0.10.0",
641 "log", 647 "log",
642 "oorandom", 648 "oorandom",
643 "profile", 649 "profile",
@@ -661,7 +667,7 @@ dependencies = [
661 "expect-test", 667 "expect-test",
662 "fst", 668 "fst",
663 "hir", 669 "hir",
664 "itertools", 670 "itertools 0.10.0",
665 "log", 671 "log",
666 "once_cell", 672 "once_cell",
667 "profile", 673 "profile",
@@ -742,10 +748,19 @@ dependencies = [
742] 748]
743 749
744[[package]] 750[[package]]
751name = "itertools"
752version = "0.10.0"
753source = "registry+https://github.com/rust-lang/crates.io-index"
754checksum = "37d572918e350e82412fe766d24b15e6682fb2ed2bbe018280caa810397cb319"
755dependencies = [
756 "either",
757]
758
759[[package]]
745name = "itoa" 760name = "itoa"
746version = "0.4.6" 761version = "0.4.7"
747source = "registry+https://github.com/rust-lang/crates.io-index" 762source = "registry+https://github.com/rust-lang/crates.io-index"
748checksum = "dc6f3ad7b9d11a0c00842ff8de1b60ee58661048eb8049ed33c73594f359d7e6" 763checksum = "dd25036021b0de88a0aff6b850051563c6516d0bf53f8638938edbb9de732736"
749 764
750[[package]] 765[[package]]
751name = "jod-thread" 766name = "jod-thread"
@@ -1219,7 +1234,7 @@ dependencies = [
1219 "base_db", 1234 "base_db",
1220 "cargo_metadata", 1235 "cargo_metadata",
1221 "cfg", 1236 "cfg",
1222 "itertools", 1237 "itertools 0.10.0",
1223 "log", 1238 "log",
1224 "paths", 1239 "paths",
1225 "proc_macro_api", 1240 "proc_macro_api",
@@ -1334,6 +1349,7 @@ dependencies = [
1334 "anyhow", 1349 "anyhow",
1335 "cfg", 1350 "cfg",
1336 "crossbeam-channel 0.5.0", 1351 "crossbeam-channel 0.5.0",
1352 "dissimilar",
1337 "env_logger", 1353 "env_logger",
1338 "expect-test", 1354 "expect-test",
1339 "flycheck", 1355 "flycheck",
@@ -1342,7 +1358,7 @@ dependencies = [
1342 "hir_ty", 1358 "hir_ty",
1343 "ide", 1359 "ide",
1344 "ide_db", 1360 "ide_db",
1345 "itertools", 1361 "itertools 0.10.0",
1346 "jod-thread", 1362 "jod-thread",
1347 "log", 1363 "log",
1348 "lsp-server", 1364 "lsp-server",
@@ -1518,9 +1534,9 @@ dependencies = [
1518 1534
1519[[package]] 1535[[package]]
1520name = "serde_json" 1536name = "serde_json"
1521version = "1.0.60" 1537version = "1.0.61"
1522source = "registry+https://github.com/rust-lang/crates.io-index" 1538source = "registry+https://github.com/rust-lang/crates.io-index"
1523checksum = "1500e84d27fe482ed1dc791a56eddc2f230046a040fa908c08bda1d9fb615779" 1539checksum = "4fceb2595057b6891a4ee808f70054bd2d12f0e97f1cbb78689b59f676df325a"
1524dependencies = [ 1540dependencies = [
1525 "indexmap", 1541 "indexmap",
1526 "itoa", 1542 "itoa",
@@ -1586,7 +1602,7 @@ dependencies = [
1586 "expect-test", 1602 "expect-test",
1587 "hir", 1603 "hir",
1588 "ide_db", 1604 "ide_db",
1589 "itertools", 1605 "itertools 0.10.0",
1590 "rustc-hash", 1606 "rustc-hash",
1591 "syntax", 1607 "syntax",
1592 "test_utils", 1608 "test_utils",
@@ -1599,9 +1615,9 @@ version = "0.0.0"
1599 1615
1600[[package]] 1616[[package]]
1601name = "syn" 1617name = "syn"
1602version = "1.0.55" 1618version = "1.0.56"
1603source = "registry+https://github.com/rust-lang/crates.io-index" 1619source = "registry+https://github.com/rust-lang/crates.io-index"
1604checksum = "a571a711dddd09019ccc628e1b17fe87c59b09d513c06c026877aa708334f37a" 1620checksum = "a9802ddde94170d186eeee5005b798d9c159fa970403f1be19976d0cfb939b72"
1605dependencies = [ 1621dependencies = [
1606 "proc-macro2", 1622 "proc-macro2",
1607 "quote", 1623 "quote",
@@ -1627,7 +1643,7 @@ dependencies = [
1627 "arrayvec", 1643 "arrayvec",
1628 "expect-test", 1644 "expect-test",
1629 "indexmap", 1645 "indexmap",
1630 "itertools", 1646 "itertools 0.10.0",
1631 "once_cell", 1647 "once_cell",
1632 "parser", 1648 "parser",
1633 "profile", 1649 "profile",
diff --git a/crates/assists/Cargo.toml b/crates/assists/Cargo.toml
index 3fd8327d6..ed8ad666f 100644
--- a/crates/assists/Cargo.toml
+++ b/crates/assists/Cargo.toml
@@ -11,7 +11,7 @@ doctest = false
11 11
12[dependencies] 12[dependencies]
13rustc-hash = "1.1.0" 13rustc-hash = "1.1.0"
14itertools = "0.9.0" 14itertools = "0.10.0"
15either = "1.6.1" 15either = "1.6.1"
16 16
17stdx = { path = "../stdx", version = "0.0.0" } 17stdx = { path = "../stdx", version = "0.0.0" }
diff --git a/crates/assists/src/handlers/extract_variable.rs b/crates/assists/src/handlers/extract_variable.rs
index d2ae137cd..9957012fe 100644
--- a/crates/assists/src/handlers/extract_variable.rs
+++ b/crates/assists/src/handlers/extract_variable.rs
@@ -91,7 +91,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext) -> Option
91 // extra newlines in the indent block 91 // extra newlines in the indent block
92 let text = indent.text(); 92 let text = indent.text();
93 if text.starts_with('\n') { 93 if text.starts_with('\n') {
94 buf.push_str("\n"); 94 buf.push('\n');
95 buf.push_str(text.trim_start_matches('\n')); 95 buf.push_str(text.trim_start_matches('\n'));
96 } else { 96 } else {
97 buf.push_str(text); 97 buf.push_str(text);
diff --git a/crates/assists/src/handlers/replace_derive_with_manual_impl.rs b/crates/assists/src/handlers/replace_derive_with_manual_impl.rs
index 4d6a1956b..cb7a5c104 100644
--- a/crates/assists/src/handlers/replace_derive_with_manual_impl.rs
+++ b/crates/assists/src/handlers/replace_derive_with_manual_impl.rs
@@ -62,21 +62,22 @@ pub(crate) fn replace_derive_with_manual_impl(
62 let current_module = ctx.sema.scope(annotated_name.syntax()).module()?; 62 let current_module = ctx.sema.scope(annotated_name.syntax()).module()?;
63 let current_crate = current_module.krate(); 63 let current_crate = current_module.krate();
64 64
65 let found_traits = 65 let found_traits = imports_locator::find_exact_imports(
66 imports_locator::find_exact_imports(&ctx.sema, current_crate, trait_token.text()) 66 &ctx.sema,
67 .filter_map( 67 current_crate,
68 |candidate: either::Either<hir::ModuleDef, hir::MacroDef>| match candidate { 68 trait_token.text().to_string(),
69 either::Either::Left(hir::ModuleDef::Trait(trait_)) => Some(trait_), 69 )
70 _ => None, 70 .filter_map(|candidate: either::Either<hir::ModuleDef, hir::MacroDef>| match candidate {
71 }, 71 either::Either::Left(hir::ModuleDef::Trait(trait_)) => Some(trait_),
72 ) 72 _ => None,
73 .flat_map(|trait_| { 73 })
74 current_module 74 .flat_map(|trait_| {
75 .find_use_path(ctx.sema.db, hir::ModuleDef::Trait(trait_)) 75 current_module
76 .as_ref() 76 .find_use_path(ctx.sema.db, hir::ModuleDef::Trait(trait_))
77 .map(mod_path_to_ast) 77 .as_ref()
78 .zip(Some(trait_)) 78 .map(mod_path_to_ast)
79 }); 79 .zip(Some(trait_))
80 });
80 81
81 let mut no_traits_found = true; 82 let mut no_traits_found = true;
82 for (trait_path, trait_) in found_traits.inspect(|_| no_traits_found = false) { 83 for (trait_path, trait_) in found_traits.inspect(|_| no_traits_found = false) {
diff --git a/crates/assists/src/utils/import_assets.rs b/crates/assists/src/utils/import_assets.rs
index ff5c0e78e..4ce82c1ba 100644
--- a/crates/assists/src/utils/import_assets.rs
+++ b/crates/assists/src/utils/import_assets.rs
@@ -179,25 +179,24 @@ impl ImportAssets {
179 } 179 }
180 }; 180 };
181 181
182 let mut res = 182 let mut res = imports_locator::find_exact_imports(
183 imports_locator::find_exact_imports(sema, current_crate, &self.get_search_query()) 183 sema,
184 .filter_map(filter) 184 current_crate,
185 .filter_map(|candidate| { 185 self.get_search_query().to_string(),
186 let item: hir::ItemInNs = candidate.either(Into::into, Into::into); 186 )
187 if let Some(prefix_kind) = prefixed { 187 .filter_map(filter)
188 self.module_with_name_to_import.find_use_path_prefixed( 188 .filter_map(|candidate| {
189 db, 189 let item: hir::ItemInNs = candidate.either(Into::into, Into::into);
190 item, 190 if let Some(prefix_kind) = prefixed {
191 prefix_kind, 191 self.module_with_name_to_import.find_use_path_prefixed(db, item, prefix_kind)
192 ) 192 } else {
193 } else { 193 self.module_with_name_to_import.find_use_path(db, item)
194 self.module_with_name_to_import.find_use_path(db, item) 194 }
195 } 195 .map(|path| (path, item))
196 .map(|path| (path, item)) 196 })
197 }) 197 .filter(|(use_path, _)| use_path.len() > 1)
198 .filter(|(use_path, _)| use_path.len() > 1) 198 .take(20)
199 .take(20) 199 .collect::<Vec<_>>();
200 .collect::<Vec<_>>();
201 res.sort_by_key(|(path, _)| path.clone()); 200 res.sort_by_key(|(path, _)| path.clone());
202 res 201 res
203 } 202 }
diff --git a/crates/completion/Cargo.toml b/crates/completion/Cargo.toml
index 35e169a28..78e93e78e 100644
--- a/crates/completion/Cargo.toml
+++ b/crates/completion/Cargo.toml
@@ -10,7 +10,7 @@ edition = "2018"
10doctest = false 10doctest = false
11 11
12[dependencies] 12[dependencies]
13itertools = "0.9.0" 13itertools = "0.10.0"
14log = "0.4.8" 14log = "0.4.8"
15rustc-hash = "1.1.0" 15rustc-hash = "1.1.0"
16either = "1.6.1" 16either = "1.6.1"
diff --git a/crates/completion/src/completions/attribute.rs b/crates/completion/src/completions/attribute.rs
index 19ce2482f..8695eed39 100644
--- a/crates/completion/src/completions/attribute.rs
+++ b/crates/completion/src/completions/attribute.rs
@@ -234,7 +234,7 @@ fn parse_comma_sep_input(derive_input: ast::TokenTree) -> Result<FxHashSet<Strin
234 current_derive = String::new(); 234 current_derive = String::new();
235 } 235 }
236 } else { 236 } else {
237 current_derive.push_str(token.to_string().trim()); 237 current_derive.push_str(token.text().trim());
238 } 238 }
239 } 239 }
240 240
diff --git a/crates/completion/src/completions/postfix.rs b/crates/completion/src/completions/postfix.rs
index d6db82a93..3883d6d21 100644
--- a/crates/completion/src/completions/postfix.rs
+++ b/crates/completion/src/completions/postfix.rs
@@ -502,7 +502,7 @@ fn main() {
502 #[test] 502 #[test]
503 fn postfix_completion_for_format_like_strings() { 503 fn postfix_completion_for_format_like_strings() {
504 check_edit( 504 check_edit(
505 "fmt", 505 "format",
506 r#"fn main() { "{some_var:?}".<|> }"#, 506 r#"fn main() { "{some_var:?}".<|> }"#,
507 r#"fn main() { format!("{:?}", some_var) }"#, 507 r#"fn main() { format!("{:?}", some_var) }"#,
508 ); 508 );
diff --git a/crates/completion/src/completions/postfix/format_like.rs b/crates/completion/src/completions/postfix/format_like.rs
index 88ba86acb..def4b13fb 100644
--- a/crates/completion/src/completions/postfix/format_like.rs
+++ b/crates/completion/src/completions/postfix/format_like.rs
@@ -22,7 +22,7 @@ use syntax::ast::{self, AstToken};
22 22
23/// Mapping ("postfix completion item" => "macro to use") 23/// Mapping ("postfix completion item" => "macro to use")
24static KINDS: &[(&str, &str)] = &[ 24static KINDS: &[(&str, &str)] = &[
25 ("fmt", "format!"), 25 ("format", "format!"),
26 ("panic", "panic!"), 26 ("panic", "panic!"),
27 ("println", "println!"), 27 ("println", "println!"),
28 ("eprintln", "eprintln!"), 28 ("eprintln", "eprintln!"),
@@ -108,7 +108,8 @@ impl FormatStrParser {
108 // "{MyStruct { val_a: 0, val_b: 1 }}". 108 // "{MyStruct { val_a: 0, val_b: 1 }}".
109 let mut inexpr_open_count = 0; 109 let mut inexpr_open_count = 0;
110 110
111 for chr in self.input.chars() { 111 let mut chars = self.input.chars().peekable();
112 while let Some(chr) = chars.next() {
112 match (self.state, chr) { 113 match (self.state, chr) {
113 (State::NotExpr, '{') => { 114 (State::NotExpr, '{') => {
114 self.output.push(chr); 115 self.output.push(chr);
@@ -157,6 +158,11 @@ impl FormatStrParser {
157 inexpr_open_count -= 1; 158 inexpr_open_count -= 1;
158 } 159 }
159 } 160 }
161 (State::Expr, ':') if chars.peek().copied() == Some(':') => {
162 // path seperator
163 current_expr.push_str("::");
164 chars.next();
165 }
160 (State::Expr, ':') => { 166 (State::Expr, ':') => {
161 if inexpr_open_count == 0 { 167 if inexpr_open_count == 0 {
162 // We're outside of braces, thus assume that it's a specifier, like "{Some(value):?}" 168 // We're outside of braces, thus assume that it's a specifier, like "{Some(value):?}"
@@ -249,6 +255,9 @@ mod tests {
249 expect![["{:?}; SomeStruct { val_a: 0, val_b: 1 }"]], 255 expect![["{:?}; SomeStruct { val_a: 0, val_b: 1 }"]],
250 ), 256 ),
251 ("{ 2 + 2 }", expect![["{}; 2 + 2"]]), 257 ("{ 2 + 2 }", expect![["{}; 2 + 2"]]),
258 ("{strsim::jaro_winkle(a)}", expect![["{}; strsim::jaro_winkle(a)"]]),
259 ("{foo::bar::baz()}", expect![["{}; foo::bar::baz()"]]),
260 ("{foo::bar():?}", expect![["{:?}; foo::bar()"]]),
252 ]; 261 ];
253 262
254 for (input, output) in test_vector { 263 for (input, output) in test_vector {
diff --git a/crates/completion/src/completions/unqualified_path.rs b/crates/completion/src/completions/unqualified_path.rs
index d09849752..81a6d00e2 100644
--- a/crates/completion/src/completions/unqualified_path.rs
+++ b/crates/completion/src/completions/unqualified_path.rs
@@ -101,8 +101,9 @@ fn complete_enum_variants(acc: &mut Completions, ctx: &CompletionContext, ty: &T
101// 101//
102// .Fuzzy search details 102// .Fuzzy search details
103// 103//
104// To avoid an excessive amount of the results returned, completion input is checked for inclusion in the identifiers only 104// To avoid an excessive amount of the results returned, completion input is checked for inclusion in the names only
105// (i.e. in `HashMap` in the `std::collections::HashMap` path), also not in the module indentifiers. 105// (i.e. in `HashMap` in the `std::collections::HashMap` path).
106// For the same reasons, avoids searching for any imports for inputs with their length less that 2 symbols.
106// 107//
107// .Merge Behavior 108// .Merge Behavior
108// 109//
@@ -126,15 +127,20 @@ fn fuzzy_completion(acc: &mut Completions, ctx: &CompletionContext) -> Option<()
126 let _p = profile::span("fuzzy_completion"); 127 let _p = profile::span("fuzzy_completion");
127 let potential_import_name = ctx.token.to_string(); 128 let potential_import_name = ctx.token.to_string();
128 129
130 if potential_import_name.len() < 2 {
131 return None;
132 }
133
129 let current_module = ctx.scope.module()?; 134 let current_module = ctx.scope.module()?;
130 let anchor = ctx.name_ref_syntax.as_ref()?; 135 let anchor = ctx.name_ref_syntax.as_ref()?;
131 let import_scope = ImportScope::find_insert_use_container(anchor.syntax(), &ctx.sema)?; 136 let import_scope = ImportScope::find_insert_use_container(anchor.syntax(), &ctx.sema)?;
132 137
138 let user_input_lowercased = potential_import_name.to_lowercase();
133 let mut all_mod_paths = imports_locator::find_similar_imports( 139 let mut all_mod_paths = imports_locator::find_similar_imports(
134 &ctx.sema, 140 &ctx.sema,
135 ctx.krate?, 141 ctx.krate?,
136 Some(100), 142 Some(40),
137 &potential_import_name, 143 potential_import_name,
138 true, 144 true,
139 ) 145 )
140 .filter_map(|import_candidate| { 146 .filter_map(|import_candidate| {
@@ -150,7 +156,6 @@ fn fuzzy_completion(acc: &mut Completions, ctx: &CompletionContext) -> Option<()
150 .filter(|(mod_path, _)| mod_path.len() > 1) 156 .filter(|(mod_path, _)| mod_path.len() > 1)
151 .collect::<Vec<_>>(); 157 .collect::<Vec<_>>();
152 158
153 let user_input_lowercased = potential_import_name.to_lowercase();
154 all_mod_paths.sort_by_cached_key(|(mod_path, _)| { 159 all_mod_paths.sort_by_cached_key(|(mod_path, _)| {
155 compute_fuzzy_completion_order_key(mod_path, &user_input_lowercased) 160 compute_fuzzy_completion_order_key(mod_path, &user_input_lowercased)
156 }); 161 });
diff --git a/crates/completion/src/lib.rs b/crates/completion/src/lib.rs
index 8e27bb153..c57d05bbe 100644
--- a/crates/completion/src/lib.rs
+++ b/crates/completion/src/lib.rs
@@ -137,7 +137,7 @@ pub fn resolve_completion_edits(
137 config: &CompletionConfig, 137 config: &CompletionConfig,
138 position: FilePosition, 138 position: FilePosition,
139 full_import_path: &str, 139 full_import_path: &str,
140 imported_name: &str, 140 imported_name: String,
141) -> Option<Vec<TextEdit>> { 141) -> Option<Vec<TextEdit>> {
142 let ctx = CompletionContext::new(db, position, config)?; 142 let ctx = CompletionContext::new(db, position, config)?;
143 let anchor = ctx.name_ref_syntax.as_ref()?; 143 let anchor = ctx.name_ref_syntax.as_ref()?;
diff --git a/crates/hir/Cargo.toml b/crates/hir/Cargo.toml
index 6dc5ad63b..d4ea7327e 100644
--- a/crates/hir/Cargo.toml
+++ b/crates/hir/Cargo.toml
@@ -14,7 +14,7 @@ log = "0.4.8"
14rustc-hash = "1.1.0" 14rustc-hash = "1.1.0"
15either = "1.5.3" 15either = "1.5.3"
16arrayvec = "0.5.1" 16arrayvec = "0.5.1"
17itertools = "0.9.0" 17itertools = "0.10.0"
18 18
19stdx = { path = "../stdx", version = "0.0.0" } 19stdx = { path = "../stdx", version = "0.0.0" }
20syntax = { path = "../syntax", version = "0.0.0" } 20syntax = { path = "../syntax", version = "0.0.0" }
diff --git a/crates/hir_def/Cargo.toml b/crates/hir_def/Cargo.toml
index a88b5f57e..e8b581e2f 100644
--- a/crates/hir_def/Cargo.toml
+++ b/crates/hir_def/Cargo.toml
@@ -17,7 +17,7 @@ either = "1.5.3"
17anymap = "0.12.1" 17anymap = "0.12.1"
18drop_bomb = "0.1.4" 18drop_bomb = "0.1.4"
19fst = { version = "0.4", default-features = false } 19fst = { version = "0.4", default-features = false }
20itertools = "0.9.0" 20itertools = "0.10.0"
21indexmap = "1.4.0" 21indexmap = "1.4.0"
22smallvec = "1.4.0" 22smallvec = "1.4.0"
23 23
diff --git a/crates/hir_def/src/import_map.rs b/crates/hir_def/src/import_map.rs
index c0f108848..30b22f51d 100644
--- a/crates/hir_def/src/import_map.rs
+++ b/crates/hir_def/src/import_map.rs
@@ -238,32 +238,53 @@ pub enum ImportKind {
238 BuiltinType, 238 BuiltinType,
239} 239}
240 240
241/// A way to match import map contents against the search query.
242#[derive(Debug)]
243pub enum SearchMode {
244 /// Import map entry should strictly match the query string.
245 Equals,
246 /// Import map entry should contain the query string.
247 Contains,
248 /// Import map entry should contain all letters from the query string,
249 /// in the same order, but not necessary adjacent.
250 Fuzzy,
251}
252
241#[derive(Debug)] 253#[derive(Debug)]
242pub struct Query { 254pub struct Query {
243 query: String, 255 query: String,
244 lowercased: String, 256 lowercased: String,
245 anchor_end: bool, 257 name_only: bool,
258 search_mode: SearchMode,
246 case_sensitive: bool, 259 case_sensitive: bool,
247 limit: usize, 260 limit: usize,
248 exclude_import_kinds: FxHashSet<ImportKind>, 261 exclude_import_kinds: FxHashSet<ImportKind>,
249} 262}
250 263
251impl Query { 264impl Query {
252 pub fn new(query: &str) -> Self { 265 pub fn new(query: String) -> Self {
266 let lowercased = query.to_lowercase();
253 Self { 267 Self {
254 lowercased: query.to_lowercase(), 268 query,
255 query: query.to_string(), 269 lowercased,
256 anchor_end: false, 270 name_only: false,
271 search_mode: SearchMode::Contains,
257 case_sensitive: false, 272 case_sensitive: false,
258 limit: usize::max_value(), 273 limit: usize::max_value(),
259 exclude_import_kinds: FxHashSet::default(), 274 exclude_import_kinds: FxHashSet::default(),
260 } 275 }
261 } 276 }
262 277
263 /// Only returns items whose paths end with the (case-insensitive) query string as their last 278 /// Matches entries' names only, ignoring the rest of
264 /// segment. 279 /// the qualifier.
265 pub fn anchor_end(self) -> Self { 280 /// Example: for `std::marker::PhantomData`, the name is `PhantomData`.
266 Self { anchor_end: true, ..self } 281 pub fn name_only(self) -> Self {
282 Self { name_only: true, ..self }
283 }
284
285 /// Specifies the way to search for the entries using the query.
286 pub fn search_mode(self, search_mode: SearchMode) -> Self {
287 Self { search_mode, ..self }
267 } 288 }
268 289
269 /// Limits the returned number of items to `limit`. 290 /// Limits the returned number of items to `limit`.
@@ -283,6 +304,40 @@ impl Query {
283 } 304 }
284} 305}
285 306
307fn contains_query(query: &Query, input_path: &ImportPath, enforce_lowercase: bool) -> bool {
308 let mut input = if query.name_only {
309 input_path.segments.last().unwrap().to_string()
310 } else {
311 input_path.to_string()
312 };
313 if enforce_lowercase || !query.case_sensitive {
314 input.make_ascii_lowercase();
315 }
316
317 let query_string =
318 if !enforce_lowercase && query.case_sensitive { &query.query } else { &query.lowercased };
319
320 match query.search_mode {
321 SearchMode::Equals => &input == query_string,
322 SearchMode::Contains => input.contains(query_string),
323 SearchMode::Fuzzy => {
324 let mut unchecked_query_chars = query_string.chars();
325 let mut mismatching_query_char = unchecked_query_chars.next();
326
327 for input_char in input.chars() {
328 match mismatching_query_char {
329 None => return true,
330 Some(matching_query_char) if matching_query_char == input_char => {
331 mismatching_query_char = unchecked_query_chars.next();
332 }
333 _ => (),
334 }
335 }
336 mismatching_query_char.is_none()
337 }
338 }
339}
340
286/// Searches dependencies of `krate` for an importable path matching `query`. 341/// Searches dependencies of `krate` for an importable path matching `query`.
287/// 342///
288/// This returns a list of items that could be imported from dependencies of `krate`. 343/// This returns a list of items that could be imported from dependencies of `krate`.
@@ -312,39 +367,29 @@ pub fn search_dependencies<'a>(
312 let importables = &import_map.importables[indexed_value.value as usize..]; 367 let importables = &import_map.importables[indexed_value.value as usize..];
313 368
314 // Path shared by the importable items in this group. 369 // Path shared by the importable items in this group.
315 let path = &import_map.map[&importables[0]].path; 370 let common_importables_path = &import_map.map[&importables[0]].path;
316 371 if !contains_query(&query, common_importables_path, true) {
317 if query.anchor_end { 372 continue;
318 // Last segment must match query.
319 let last = path.segments.last().unwrap().to_string();
320 if last.to_lowercase() != query.lowercased {
321 continue;
322 }
323 } 373 }
324 374
375 let common_importables_path_fst = fst_path(common_importables_path);
325 // Add the items from this `ModPath` group. Those are all subsequent items in 376 // Add the items from this `ModPath` group. Those are all subsequent items in
326 // `importables` whose paths match `path`. 377 // `importables` whose paths match `path`.
327 let iter = importables 378 let iter = importables
328 .iter() 379 .iter()
329 .copied() 380 .copied()
330 .take_while(|item| { 381 .take_while(|item| {
331 let item_path = &import_map.map[item].path; 382 common_importables_path_fst == fst_path(&import_map.map[item].path)
332 fst_path(item_path) == fst_path(path)
333 }) 383 })
334 .filter(|&item| match item_import_kind(item) { 384 .filter(|&item| match item_import_kind(item) {
335 Some(import_kind) => !query.exclude_import_kinds.contains(&import_kind), 385 Some(import_kind) => !query.exclude_import_kinds.contains(&import_kind),
336 None => true, 386 None => true,
387 })
388 .filter(|item| {
389 !query.case_sensitive // we've already checked the common importables path case-insensitively
390 || contains_query(&query, &import_map.map[item].path, false)
337 }); 391 });
338 392 res.extend(iter);
339 if query.case_sensitive {
340 // FIXME: This does not do a subsequence match.
341 res.extend(iter.filter(|item| {
342 let item_path = &import_map.map[item].path;
343 item_path.to_string().contains(&query.query)
344 }));
345 } else {
346 res.extend(iter);
347 }
348 393
349 if res.len() >= query.limit { 394 if res.len() >= query.limit {
350 res.truncate(query.limit); 395 res.truncate(query.limit);
@@ -387,8 +432,9 @@ fn item_import_kind(item: ItemInNs) -> Option<ImportKind> {
387mod tests { 432mod tests {
388 use base_db::{fixture::WithFixture, SourceDatabase, Upcast}; 433 use base_db::{fixture::WithFixture, SourceDatabase, Upcast};
389 use expect_test::{expect, Expect}; 434 use expect_test::{expect, Expect};
435 use stdx::format_to;
390 436
391 use crate::{test_db::TestDB, AssocContainerId, Lookup}; 437 use crate::{data::FunctionData, test_db::TestDB, AssocContainerId, Lookup};
392 438
393 use super::*; 439 use super::*;
394 440
@@ -407,14 +453,32 @@ mod tests {
407 .into_iter() 453 .into_iter()
408 .filter_map(|item| { 454 .filter_map(|item| {
409 let mark = match item { 455 let mark = match item {
456 ItemInNs::Types(ModuleDefId::FunctionId(_))
457 | ItemInNs::Values(ModuleDefId::FunctionId(_)) => "f",
410 ItemInNs::Types(_) => "t", 458 ItemInNs::Types(_) => "t",
411 ItemInNs::Values(_) => "v", 459 ItemInNs::Values(_) => "v",
412 ItemInNs::Macros(_) => "m", 460 ItemInNs::Macros(_) => "m",
413 }; 461 };
414 let item = assoc_to_trait(&db, item);
415 item.krate(db.upcast()).map(|krate| { 462 item.krate(db.upcast()).map(|krate| {
416 let map = db.import_map(krate); 463 let map = db.import_map(krate);
417 let path = map.path_of(item).unwrap(); 464
465 let path = match assoc_to_trait(&db, item) {
466 Some(trait_) => {
467 let mut full_path = map.path_of(trait_).unwrap().to_string();
468 if let ItemInNs::Types(ModuleDefId::FunctionId(function_id))
469 | ItemInNs::Values(ModuleDefId::FunctionId(function_id)) = item
470 {
471 format_to!(
472 full_path,
473 "::{}",
474 FunctionData::fn_data_query(&db, function_id).name,
475 );
476 }
477 full_path
478 }
479 None => map.path_of(item).unwrap().to_string(),
480 };
481
418 format!( 482 format!(
419 "{}::{} ({})\n", 483 "{}::{} ({})\n",
420 crate_graph[krate].display_name.as_ref().unwrap(), 484 crate_graph[krate].display_name.as_ref().unwrap(),
@@ -427,15 +491,15 @@ mod tests {
427 expect.assert_eq(&actual) 491 expect.assert_eq(&actual)
428 } 492 }
429 493
430 fn assoc_to_trait(db: &dyn DefDatabase, item: ItemInNs) -> ItemInNs { 494 fn assoc_to_trait(db: &dyn DefDatabase, item: ItemInNs) -> Option<ItemInNs> {
431 let assoc: AssocItemId = match item { 495 let assoc: AssocItemId = match item {
432 ItemInNs::Types(it) | ItemInNs::Values(it) => match it { 496 ItemInNs::Types(it) | ItemInNs::Values(it) => match it {
433 ModuleDefId::TypeAliasId(it) => it.into(), 497 ModuleDefId::TypeAliasId(it) => it.into(),
434 ModuleDefId::FunctionId(it) => it.into(), 498 ModuleDefId::FunctionId(it) => it.into(),
435 ModuleDefId::ConstId(it) => it.into(), 499 ModuleDefId::ConstId(it) => it.into(),
436 _ => return item, 500 _ => return None,
437 }, 501 },
438 _ => return item, 502 _ => return None,
439 }; 503 };
440 504
441 let container = match assoc { 505 let container = match assoc {
@@ -445,8 +509,8 @@ mod tests {
445 }; 509 };
446 510
447 match container { 511 match container {
448 AssocContainerId::TraitId(it) => ItemInNs::Types(it.into()), 512 AssocContainerId::TraitId(it) => Some(ItemInNs::Types(it.into())),
449 _ => item, 513 _ => None,
450 } 514 }
451 } 515 }
452 516
@@ -685,7 +749,7 @@ mod tests {
685 } 749 }
686 750
687 #[test] 751 #[test]
688 fn search() { 752 fn search_mode() {
689 let ra_fixture = r#" 753 let ra_fixture = r#"
690 //- /main.rs crate:main deps:dep 754 //- /main.rs crate:main deps:dep
691 //- /dep.rs crate:dep deps:tdep 755 //- /dep.rs crate:dep deps:tdep
@@ -713,28 +777,96 @@ mod tests {
713 check_search( 777 check_search(
714 ra_fixture, 778 ra_fixture,
715 "main", 779 "main",
716 Query::new("fmt"), 780 Query::new("fmt".to_string()).search_mode(SearchMode::Fuzzy),
717 expect![[r#" 781 expect![[r#"
718 dep::fmt (t) 782 dep::fmt (t)
719 dep::Fmt (t) 783 dep::Fmt (t)
720 dep::Fmt (v) 784 dep::Fmt (v)
721 dep::Fmt (m) 785 dep::Fmt (m)
722 dep::fmt::Display (t) 786 dep::fmt::Display (t)
723 dep::format (v) 787 dep::format (f)
788 dep::fmt::Display::fmt (f)
789 "#]],
790 );
791
792 check_search(
793 ra_fixture,
794 "main",
795 Query::new("fmt".to_string()).search_mode(SearchMode::Equals),
796 expect![[r#"
797 dep::fmt (t)
798 dep::Fmt (t)
799 dep::Fmt (v)
800 dep::Fmt (m)
801 dep::fmt::Display::fmt (f)
802 "#]],
803 );
804
805 check_search(
806 ra_fixture,
807 "main",
808 Query::new("fmt".to_string()).search_mode(SearchMode::Contains),
809 expect![[r#"
810 dep::fmt (t)
811 dep::Fmt (t)
812 dep::Fmt (v)
813 dep::Fmt (m)
724 dep::fmt::Display (t) 814 dep::fmt::Display (t)
815 dep::fmt::Display::fmt (f)
725 "#]], 816 "#]],
726 ); 817 );
818 }
819
820 #[test]
821 fn name_only() {
822 let ra_fixture = r#"
823 //- /main.rs crate:main deps:dep
824 //- /dep.rs crate:dep deps:tdep
825 use tdep::fmt as fmt_dep;
826 pub mod fmt {
827 pub trait Display {
828 fn fmt();
829 }
830 }
831 #[macro_export]
832 macro_rules! Fmt {
833 () => {};
834 }
835 pub struct Fmt;
836
837 pub fn format() {}
838 pub fn no() {}
839
840 //- /tdep.rs crate:tdep
841 pub mod fmt {
842 pub struct NotImportableFromMain;
843 }
844 "#;
727 845
728 check_search( 846 check_search(
729 ra_fixture, 847 ra_fixture,
730 "main", 848 "main",
731 Query::new("fmt").anchor_end(), 849 Query::new("fmt".to_string()),
732 expect![[r#" 850 expect![[r#"
733 dep::fmt (t) 851 dep::fmt (t)
734 dep::Fmt (t) 852 dep::Fmt (t)
735 dep::Fmt (v) 853 dep::Fmt (v)
736 dep::Fmt (m) 854 dep::Fmt (m)
737 dep::fmt::Display (t) 855 dep::fmt::Display (t)
856 dep::fmt::Display::fmt (f)
857 "#]],
858 );
859
860 check_search(
861 ra_fixture,
862 "main",
863 Query::new("fmt".to_string()).name_only(),
864 expect![[r#"
865 dep::fmt (t)
866 dep::Fmt (t)
867 dep::Fmt (v)
868 dep::Fmt (m)
869 dep::fmt::Display::fmt (f)
738 "#]], 870 "#]],
739 ); 871 );
740 } 872 }
@@ -752,7 +884,7 @@ mod tests {
752 check_search( 884 check_search(
753 ra_fixture, 885 ra_fixture,
754 "main", 886 "main",
755 Query::new("FMT"), 887 Query::new("FMT".to_string()),
756 expect![[r#" 888 expect![[r#"
757 dep::fmt (t) 889 dep::fmt (t)
758 dep::fmt (v) 890 dep::fmt (v)
@@ -764,7 +896,7 @@ mod tests {
764 check_search( 896 check_search(
765 ra_fixture, 897 ra_fixture,
766 "main", 898 "main",
767 Query::new("FMT").case_sensitive(), 899 Query::new("FMT".to_string()).case_sensitive(),
768 expect![[r#" 900 expect![[r#"
769 dep::FMT (t) 901 dep::FMT (t)
770 dep::FMT (v) 902 dep::FMT (v)
@@ -793,7 +925,7 @@ mod tests {
793 pub fn no() {} 925 pub fn no() {}
794 "#, 926 "#,
795 "main", 927 "main",
796 Query::new("").limit(2), 928 Query::new("".to_string()).limit(2),
797 expect![[r#" 929 expect![[r#"
798 dep::fmt (t) 930 dep::fmt (t)
799 dep::Fmt (t) 931 dep::Fmt (t)
@@ -814,7 +946,7 @@ mod tests {
814 check_search( 946 check_search(
815 ra_fixture, 947 ra_fixture,
816 "main", 948 "main",
817 Query::new("FMT"), 949 Query::new("FMT".to_string()),
818 expect![[r#" 950 expect![[r#"
819 dep::fmt (t) 951 dep::fmt (t)
820 dep::fmt (v) 952 dep::fmt (v)
@@ -826,7 +958,7 @@ mod tests {
826 check_search( 958 check_search(
827 ra_fixture, 959 ra_fixture,
828 "main", 960 "main",
829 Query::new("FMT").exclude_import_kind(ImportKind::Adt), 961 Query::new("FMT".to_string()).exclude_import_kind(ImportKind::Adt),
830 expect![[r#""#]], 962 expect![[r#""#]],
831 ); 963 );
832 } 964 }
diff --git a/crates/hir_def/src/nameres.rs b/crates/hir_def/src/nameres.rs
index ffd0381d4..5682e122d 100644
--- a/crates/hir_def/src/nameres.rs
+++ b/crates/hir_def/src/nameres.rs
@@ -249,7 +249,7 @@ impl CrateDefMap {
249 buf.push_str(" _"); 249 buf.push_str(" _");
250 } 250 }
251 251
252 buf.push_str("\n"); 252 buf.push('\n');
253 } 253 }
254 254
255 for (name, child) in map.modules[module].children.iter() { 255 for (name, child) in map.modules[module].children.iter() {
@@ -454,7 +454,7 @@ mod diagnostics {
454 }); 454 });
455 for token in tokens { 455 for token in tokens {
456 if token.kind() == SyntaxKind::IDENT 456 if token.kind() == SyntaxKind::IDENT
457 && token.to_string() == *name 457 && token.text() == name.as_str()
458 { 458 {
459 precise_location = Some(token.text_range()); 459 precise_location = Some(token.text_range());
460 break 'outer; 460 break 'outer;
diff --git a/crates/hir_expand/src/builtin_macro.rs b/crates/hir_expand/src/builtin_macro.rs
index 6382521fb..80b60d59f 100644
--- a/crates/hir_expand/src/builtin_macro.rs
+++ b/crates/hir_expand/src/builtin_macro.rs
@@ -259,7 +259,8 @@ fn format_args_expand(
259 } 259 }
260 for arg in &mut args { 260 for arg in &mut args {
261 // Remove `key =`. 261 // Remove `key =`.
262 if matches!(arg.get(1), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=') { 262 if matches!(arg.get(1), Some(tt::TokenTree::Leaf(tt::Leaf::Punct(p))) if p.char == '=' && p.spacing != tt::Spacing::Joint)
263 {
263 arg.drain(..2); 264 arg.drain(..2);
264 } 265 }
265 } 266 }
diff --git a/crates/hir_ty/Cargo.toml b/crates/hir_ty/Cargo.toml
index 965c1780a..2dfccd191 100644
--- a/crates/hir_ty/Cargo.toml
+++ b/crates/hir_ty/Cargo.toml
@@ -10,7 +10,7 @@ edition = "2018"
10doctest = false 10doctest = false
11 11
12[dependencies] 12[dependencies]
13itertools = "0.9.0" 13itertools = "0.10.0"
14arrayvec = "0.5.1" 14arrayvec = "0.5.1"
15smallvec = "1.2.0" 15smallvec = "1.2.0"
16ena = "0.14.0" 16ena = "0.14.0"
diff --git a/crates/hir_ty/src/infer/expr.rs b/crates/hir_ty/src/infer/expr.rs
index 70a3f3075..f2fc69b2f 100644
--- a/crates/hir_ty/src/infer/expr.rs
+++ b/crates/hir_ty/src/infer/expr.rs
@@ -648,6 +648,8 @@ impl<'a> InferenceContext<'a> {
648 } 648 }
649 Expr::Array(array) => { 649 Expr::Array(array) => {
650 let elem_ty = match &expected.ty { 650 let elem_ty = match &expected.ty {
651 // FIXME: remove when https://github.com/rust-lang/rust/issues/80501 is fixed
652 #[allow(unreachable_patterns)]
651 ty_app!(TypeCtor::Array, st) | ty_app!(TypeCtor::Slice, st) => { 653 ty_app!(TypeCtor::Array, st) | ty_app!(TypeCtor::Slice, st) => {
652 st.as_single().clone() 654 st.as_single().clone()
653 } 655 }
diff --git a/crates/ide/Cargo.toml b/crates/ide/Cargo.toml
index 4d483580d..f1544dbe0 100644
--- a/crates/ide/Cargo.toml
+++ b/crates/ide/Cargo.toml
@@ -12,7 +12,7 @@ doctest = false
12[dependencies] 12[dependencies]
13either = "1.5.3" 13either = "1.5.3"
14indexmap = "1.4.0" 14indexmap = "1.4.0"
15itertools = "0.9.0" 15itertools = "0.10.0"
16log = "0.4.8" 16log = "0.4.8"
17rustc-hash = "1.1.0" 17rustc-hash = "1.1.0"
18oorandom = "11.1.2" 18oorandom = "11.1.2"
diff --git a/crates/ide/src/diagnostics.rs b/crates/ide/src/diagnostics.rs
index 038273750..79d126ff2 100644
--- a/crates/ide/src/diagnostics.rs
+++ b/crates/ide/src/diagnostics.rs
@@ -199,6 +199,12 @@ fn check_unnecessary_braces_in_use_statement(
199) -> Option<()> { 199) -> Option<()> {
200 let use_tree_list = ast::UseTreeList::cast(node.clone())?; 200 let use_tree_list = ast::UseTreeList::cast(node.clone())?;
201 if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() { 201 if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
202 // If there is a comment inside the bracketed `use`,
203 // assume it is a commented out module path and don't show diagnostic.
204 if use_tree_list.has_inner_comment() {
205 return Some(());
206 }
207
202 let use_range = use_tree_list.syntax().text_range(); 208 let use_range = use_tree_list.syntax().text_range();
203 let edit = 209 let edit =
204 text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree) 210 text_edit_for_remove_unnecessary_braces_with_self_in_use_statement(&single_use_tree)
@@ -638,6 +644,22 @@ mod a {
638} 644}
639"#, 645"#,
640 ); 646 );
647 check_no_diagnostics(
648 r#"
649use a;
650use a::{
651 c,
652 // d::e
653};
654
655mod a {
656 mod c {}
657 mod d {
658 mod e {}
659 }
660}
661"#,
662 );
641 check_fix( 663 check_fix(
642 r" 664 r"
643 mod b {} 665 mod b {}
diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs
index 41eb139d1..b3331f03f 100644
--- a/crates/ide/src/lib.rs
+++ b/crates/ide/src/lib.rs
@@ -475,7 +475,7 @@ impl Analysis {
475 config: &CompletionConfig, 475 config: &CompletionConfig,
476 position: FilePosition, 476 position: FilePosition,
477 full_import_path: &str, 477 full_import_path: &str,
478 imported_name: &str, 478 imported_name: String,
479 ) -> Cancelable<Vec<TextEdit>> { 479 ) -> Cancelable<Vec<TextEdit>> {
480 Ok(self 480 Ok(self
481 .with_db(|db| { 481 .with_db(|db| {
diff --git a/crates/ide_db/Cargo.toml b/crates/ide_db/Cargo.toml
index 0ad6e1000..ebe53c8ee 100644
--- a/crates/ide_db/Cargo.toml
+++ b/crates/ide_db/Cargo.toml
@@ -19,7 +19,7 @@ fst = { version = "0.4", default-features = false }
19rustc-hash = "1.1.0" 19rustc-hash = "1.1.0"
20once_cell = "1.3.1" 20once_cell = "1.3.1"
21either = "1.6.1" 21either = "1.6.1"
22itertools = "0.9.0" 22itertools = "0.10.0"
23 23
24stdx = { path = "../stdx", version = "0.0.0" } 24stdx = { path = "../stdx", version = "0.0.0" }
25syntax = { path = "../syntax", version = "0.0.0" } 25syntax = { path = "../syntax", version = "0.0.0" }
diff --git a/crates/ide_db/src/imports_locator.rs b/crates/ide_db/src/imports_locator.rs
index b2980a5d6..0f4c2ca47 100644
--- a/crates/ide_db/src/imports_locator.rs
+++ b/crates/ide_db/src/imports_locator.rs
@@ -15,19 +15,23 @@ use rustc_hash::FxHashSet;
15pub fn find_exact_imports<'a>( 15pub fn find_exact_imports<'a>(
16 sema: &Semantics<'a, RootDatabase>, 16 sema: &Semantics<'a, RootDatabase>,
17 krate: Crate, 17 krate: Crate,
18 name_to_import: &str, 18 name_to_import: String,
19) -> impl Iterator<Item = Either<ModuleDef, MacroDef>> { 19) -> impl Iterator<Item = Either<ModuleDef, MacroDef>> {
20 let _p = profile::span("find_exact_imports"); 20 let _p = profile::span("find_exact_imports");
21 find_imports( 21 find_imports(
22 sema, 22 sema,
23 krate, 23 krate,
24 { 24 {
25 let mut local_query = symbol_index::Query::new(name_to_import.to_string()); 25 let mut local_query = symbol_index::Query::new(name_to_import.clone());
26 local_query.exact(); 26 local_query.exact();
27 local_query.limit(40); 27 local_query.limit(40);
28 local_query 28 local_query
29 }, 29 },
30 import_map::Query::new(name_to_import).anchor_end().case_sensitive().limit(40), 30 import_map::Query::new(name_to_import)
31 .limit(40)
32 .name_only()
33 .search_mode(import_map::SearchMode::Equals)
34 .case_sensitive(),
31 ) 35 )
32} 36}
33 37
@@ -35,17 +39,18 @@ pub fn find_similar_imports<'a>(
35 sema: &Semantics<'a, RootDatabase>, 39 sema: &Semantics<'a, RootDatabase>,
36 krate: Crate, 40 krate: Crate,
37 limit: Option<usize>, 41 limit: Option<usize>,
38 name_to_import: &str, 42 fuzzy_search_string: String,
39 ignore_modules: bool, 43 name_only: bool,
40) -> impl Iterator<Item = Either<ModuleDef, MacroDef>> { 44) -> impl Iterator<Item = Either<ModuleDef, MacroDef>> {
41 let _p = profile::span("find_similar_imports"); 45 let _p = profile::span("find_similar_imports");
42 46
43 let mut external_query = import_map::Query::new(name_to_import); 47 let mut external_query = import_map::Query::new(fuzzy_search_string.clone())
44 if ignore_modules { 48 .search_mode(import_map::SearchMode::Fuzzy);
45 external_query = external_query.exclude_import_kind(import_map::ImportKind::Module); 49 if name_only {
50 external_query = external_query.name_only();
46 } 51 }
47 52
48 let mut local_query = symbol_index::Query::new(name_to_import.to_string()); 53 let mut local_query = symbol_index::Query::new(fuzzy_search_string);
49 54
50 if let Some(limit) = limit { 55 if let Some(limit) = limit {
51 local_query.limit(limit); 56 local_query.limit(limit);
diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs
index 3ad609a00..b3472879d 100644
--- a/crates/mbe/src/lib.rs
+++ b/crates/mbe/src/lib.rs
@@ -14,10 +14,10 @@ mod tests;
14 14
15use std::fmt; 15use std::fmt;
16 16
17pub use tt::{Delimiter, Punct}; 17pub use tt::{Delimiter, DelimiterKind, Punct};
18 18
19use crate::{ 19use crate::{
20 parser::{parse_pattern, Op}, 20 parser::{parse_pattern, parse_template, Op},
21 tt_iter::TtIter, 21 tt_iter::TtIter,
22}; 22};
23 23
@@ -78,8 +78,24 @@ pub struct MacroRules {
78 78
79#[derive(Clone, Debug, PartialEq, Eq)] 79#[derive(Clone, Debug, PartialEq, Eq)]
80struct Rule { 80struct Rule {
81 lhs: tt::Subtree, 81 lhs: MetaTemplate,
82 rhs: tt::Subtree, 82 rhs: MetaTemplate,
83}
84
85#[derive(Clone, Debug, PartialEq, Eq)]
86struct MetaTemplate {
87 delimiter: Option<Delimiter>,
88 tokens: Vec<Result<Op, ExpandError>>,
89}
90
91impl<'a> MetaTemplate {
92 fn iter(&self) -> impl Iterator<Item = &Result<Op, ExpandError>> {
93 self.tokens.iter()
94 }
95
96 fn delimiter_kind(&self) -> Option<DelimiterKind> {
97 self.delimiter.map(|it| it.kind)
98 }
83} 99}
84 100
85#[derive(Clone, Copy, Debug, PartialEq, Eq)] 101#[derive(Clone, Copy, Debug, PartialEq, Eq)]
@@ -167,7 +183,7 @@ impl MacroRules {
167 rules.push(rule); 183 rules.push(rule);
168 if let Err(()) = src.expect_char(';') { 184 if let Err(()) = src.expect_char(';') {
169 if src.len() > 0 { 185 if src.len() > 0 {
170 return Err(ParseError::Expected("expected `:`".to_string())); 186 return Err(ParseError::Expected("expected `;`".to_string()));
171 } 187 }
172 break; 188 break;
173 } 189 }
@@ -201,23 +217,23 @@ impl MacroRules {
201 217
202impl Rule { 218impl Rule {
203 fn parse(src: &mut TtIter) -> Result<Rule, ParseError> { 219 fn parse(src: &mut TtIter) -> Result<Rule, ParseError> {
204 let mut lhs = src 220 let lhs = src
205 .expect_subtree() 221 .expect_subtree()
206 .map_err(|()| ParseError::Expected("expected subtree".to_string()))? 222 .map_err(|()| ParseError::Expected("expected subtree".to_string()))?;
207 .clone();
208 lhs.delimiter = None;
209 src.expect_char('=').map_err(|()| ParseError::Expected("expected `=`".to_string()))?; 223 src.expect_char('=').map_err(|()| ParseError::Expected("expected `=`".to_string()))?;
210 src.expect_char('>').map_err(|()| ParseError::Expected("expected `>`".to_string()))?; 224 src.expect_char('>').map_err(|()| ParseError::Expected("expected `>`".to_string()))?;
211 let mut rhs = src 225 let rhs = src
212 .expect_subtree() 226 .expect_subtree()
213 .map_err(|()| ParseError::Expected("expected subtree".to_string()))? 227 .map_err(|()| ParseError::Expected("expected subtree".to_string()))?;
214 .clone(); 228
215 rhs.delimiter = None; 229 let lhs = MetaTemplate { tokens: parse_pattern(&lhs), delimiter: None };
230 let rhs = MetaTemplate { tokens: parse_template(&rhs), delimiter: None };
231
216 Ok(crate::Rule { lhs, rhs }) 232 Ok(crate::Rule { lhs, rhs })
217 } 233 }
218} 234}
219 235
220fn to_parse_error(e: ExpandError) -> ParseError { 236fn to_parse_error(e: &ExpandError) -> ParseError {
221 let msg = match e { 237 let msg = match e {
222 ExpandError::InvalidRepeat => "invalid repeat".to_string(), 238 ExpandError::InvalidRepeat => "invalid repeat".to_string(),
223 _ => "invalid macro definition".to_string(), 239 _ => "invalid macro definition".to_string(),
@@ -225,22 +241,22 @@ fn to_parse_error(e: ExpandError) -> ParseError {
225 ParseError::Expected(msg) 241 ParseError::Expected(msg)
226} 242}
227 243
228fn validate(pattern: &tt::Subtree) -> Result<(), ParseError> { 244fn validate(pattern: &MetaTemplate) -> Result<(), ParseError> {
229 for op in parse_pattern(pattern) { 245 for op in pattern.iter() {
230 let op = op.map_err(to_parse_error)?; 246 let op = op.as_ref().map_err(|e| to_parse_error(&e))?;
231 247
232 match op { 248 match op {
233 Op::TokenTree(tt::TokenTree::Subtree(subtree)) => validate(subtree)?, 249 Op::Subtree(subtree) => validate(&subtree)?,
234 Op::Repeat { subtree, separator, .. } => { 250 Op::Repeat { subtree, separator, .. } => {
235 // Checks that no repetition which could match an empty token 251 // Checks that no repetition which could match an empty token
236 // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558 252 // https://github.com/rust-lang/rust/blob/a58b1ed44f5e06976de2bdc4d7dc81c36a96934f/src/librustc_expand/mbe/macro_rules.rs#L558
237 253
238 if separator.is_none() { 254 if separator.is_none() {
239 if parse_pattern(subtree).all(|child_op| { 255 if subtree.iter().all(|child_op| {
240 match child_op.map_err(to_parse_error) { 256 match child_op.as_ref().map_err(to_parse_error) {
241 Ok(Op::Var { kind, .. }) => { 257 Ok(Op::Var { kind, .. }) => {
242 // vis is optional 258 // vis is optional
243 if kind.map_or(false, |it| it == "vis") { 259 if kind.as_ref().map_or(false, |it| it == "vis") {
244 return true; 260 return true;
245 } 261 }
246 } 262 }
diff --git a/crates/mbe/src/mbe_expander/matcher.rs b/crates/mbe/src/mbe_expander/matcher.rs
index 44722c0f1..ab5f87c48 100644
--- a/crates/mbe/src/mbe_expander/matcher.rs
+++ b/crates/mbe/src/mbe_expander/matcher.rs
@@ -2,10 +2,10 @@
2 2
3use crate::{ 3use crate::{
4 mbe_expander::{Binding, Bindings, Fragment}, 4 mbe_expander::{Binding, Bindings, Fragment},
5 parser::{parse_pattern, Op, RepeatKind, Separator}, 5 parser::{Op, RepeatKind, Separator},
6 subtree_source::SubtreeTokenSource, 6 subtree_source::SubtreeTokenSource,
7 tt_iter::TtIter, 7 tt_iter::TtIter,
8 ExpandError, 8 ExpandError, MetaTemplate,
9}; 9};
10 10
11use super::ExpandResult; 11use super::ExpandResult;
@@ -83,7 +83,7 @@ impl Match {
83// sense to try using it. Matching errors are added to the `Match`. It might 83// sense to try using it. Matching errors are added to the `Match`. It might
84// make sense to make pattern parsing a separate step? 84// make sense to make pattern parsing a separate step?
85 85
86pub(super) fn match_(pattern: &tt::Subtree, src: &tt::Subtree) -> Result<Match, ExpandError> { 86pub(super) fn match_(pattern: &MetaTemplate, src: &tt::Subtree) -> Result<Match, ExpandError> {
87 assert!(pattern.delimiter == None); 87 assert!(pattern.delimiter == None);
88 88
89 let mut res = Match::default(); 89 let mut res = Match::default();
@@ -101,12 +101,12 @@ pub(super) fn match_(pattern: &tt::Subtree, src: &tt::Subtree) -> Result<Match,
101 101
102fn match_subtree( 102fn match_subtree(
103 res: &mut Match, 103 res: &mut Match,
104 pattern: &tt::Subtree, 104 pattern: &MetaTemplate,
105 src: &mut TtIter, 105 src: &mut TtIter,
106) -> Result<(), ExpandError> { 106) -> Result<(), ExpandError> {
107 for op in parse_pattern(pattern) { 107 for op in pattern.iter() {
108 match op? { 108 match op.as_ref().map_err(|err| err.clone())? {
109 Op::TokenTree(tt::TokenTree::Leaf(lhs)) => { 109 Op::Leaf(lhs) => {
110 let rhs = match src.expect_leaf() { 110 let rhs = match src.expect_leaf() {
111 Ok(l) => l, 111 Ok(l) => l,
112 Err(()) => { 112 Err(()) => {
@@ -132,7 +132,7 @@ fn match_subtree(
132 } 132 }
133 } 133 }
134 } 134 }
135 Op::TokenTree(tt::TokenTree::Subtree(lhs)) => { 135 Op::Subtree(lhs) => {
136 let rhs = match src.expect_subtree() { 136 let rhs = match src.expect_subtree() {
137 Ok(s) => s, 137 Ok(s) => s,
138 Err(()) => { 138 Err(()) => {
@@ -172,7 +172,7 @@ fn match_subtree(
172 } 172 }
173 } 173 }
174 Op::Repeat { subtree, kind, separator } => { 174 Op::Repeat { subtree, kind, separator } => {
175 match_repeat(res, subtree, kind, separator, src)?; 175 match_repeat(res, subtree, *kind, separator, src)?;
176 } 176 }
177 } 177 }
178 } 178 }
@@ -372,9 +372,9 @@ impl<'a> TtIter<'a> {
372 372
373pub(super) fn match_repeat( 373pub(super) fn match_repeat(
374 res: &mut Match, 374 res: &mut Match,
375 pattern: &tt::Subtree, 375 pattern: &MetaTemplate,
376 kind: RepeatKind, 376 kind: RepeatKind,
377 separator: Option<Separator>, 377 separator: &Option<Separator>,
378 src: &mut TtIter, 378 src: &mut TtIter,
379) -> Result<(), ExpandError> { 379) -> Result<(), ExpandError> {
380 // Dirty hack to make macro-expansion terminate. 380 // Dirty hack to make macro-expansion terminate.
@@ -489,12 +489,12 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragmen
489 result.map(|tt| if kind == "expr" { tt.map(Fragment::Ast) } else { tt.map(Fragment::Tokens) }) 489 result.map(|tt| if kind == "expr" { tt.map(Fragment::Ast) } else { tt.map(Fragment::Tokens) })
490} 490}
491 491
492fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &tt::Subtree) -> Result<(), ExpandError> { 492fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &MetaTemplate) -> Result<(), ExpandError> {
493 for op in parse_pattern(pattern) { 493 for op in pattern.iter() {
494 match op? { 494 match op.as_ref().map_err(|e| e.clone())? {
495 Op::Var { name, .. } => buf.push(name.clone()), 495 Op::Var { name, .. } => buf.push(name.clone()),
496 Op::TokenTree(tt::TokenTree::Leaf(_)) => (), 496 Op::Leaf(_) => (),
497 Op::TokenTree(tt::TokenTree::Subtree(subtree)) => collect_vars(buf, subtree)?, 497 Op::Subtree(subtree) => collect_vars(buf, subtree)?,
498 Op::Repeat { subtree, .. } => collect_vars(buf, subtree)?, 498 Op::Repeat { subtree, .. } => collect_vars(buf, subtree)?,
499 } 499 }
500 } 500 }
diff --git a/crates/mbe/src/mbe_expander/transcriber.rs b/crates/mbe/src/mbe_expander/transcriber.rs
index 57592dc92..720531237 100644
--- a/crates/mbe/src/mbe_expander/transcriber.rs
+++ b/crates/mbe/src/mbe_expander/transcriber.rs
@@ -6,8 +6,8 @@ use syntax::SmolStr;
6use super::ExpandResult; 6use super::ExpandResult;
7use crate::{ 7use crate::{
8 mbe_expander::{Binding, Bindings, Fragment}, 8 mbe_expander::{Binding, Bindings, Fragment},
9 parser::{parse_template, Op, RepeatKind, Separator}, 9 parser::{Op, RepeatKind, Separator},
10 ExpandError, 10 ExpandError, MetaTemplate,
11}; 11};
12 12
13impl Bindings { 13impl Bindings {
@@ -50,7 +50,10 @@ impl Bindings {
50 } 50 }
51} 51}
52 52
53pub(super) fn transcribe(template: &tt::Subtree, bindings: &Bindings) -> ExpandResult<tt::Subtree> { 53pub(super) fn transcribe(
54 template: &MetaTemplate,
55 bindings: &Bindings,
56) -> ExpandResult<tt::Subtree> {
54 assert!(template.delimiter == None); 57 assert!(template.delimiter == None);
55 let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() }; 58 let mut ctx = ExpandCtx { bindings: &bindings, nesting: Vec::new() };
56 let mut arena: Vec<tt::TokenTree> = Vec::new(); 59 let mut arena: Vec<tt::TokenTree> = Vec::new();
@@ -76,35 +79,35 @@ struct ExpandCtx<'a> {
76 79
77fn expand_subtree( 80fn expand_subtree(
78 ctx: &mut ExpandCtx, 81 ctx: &mut ExpandCtx,
79 template: &tt::Subtree, 82 template: &MetaTemplate,
80 arena: &mut Vec<tt::TokenTree>, 83 arena: &mut Vec<tt::TokenTree>,
81) -> ExpandResult<tt::Subtree> { 84) -> ExpandResult<tt::Subtree> {
82 // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation 85 // remember how many elements are in the arena now - when returning, we want to drain exactly how many elements we added. This way, the recursive uses of the arena get their own "view" of the arena, but will reuse the allocation
83 let start_elements = arena.len(); 86 let start_elements = arena.len();
84 let mut err = None; 87 let mut err = None;
85 for op in parse_template(template) { 88 for op in template.iter() {
86 let op = match op { 89 let op = match op {
87 Ok(op) => op, 90 Ok(op) => op,
88 Err(e) => { 91 Err(e) => {
89 err = Some(e); 92 err = Some(e.clone());
90 break; 93 break;
91 } 94 }
92 }; 95 };
93 match op { 96 match op {
94 Op::TokenTree(tt @ tt::TokenTree::Leaf(..)) => arena.push(tt.clone()), 97 Op::Leaf(tt) => arena.push(tt.clone().into()),
95 Op::TokenTree(tt::TokenTree::Subtree(tt)) => { 98 Op::Subtree(tt) => {
96 let ExpandResult { value: tt, err: e } = expand_subtree(ctx, tt, arena); 99 let ExpandResult { value: tt, err: e } = expand_subtree(ctx, &tt, arena);
97 err = err.or(e); 100 err = err.or(e);
98 arena.push(tt.into()); 101 arena.push(tt.into());
99 } 102 }
100 Op::Var { name, .. } => { 103 Op::Var { name, .. } => {
101 let ExpandResult { value: fragment, err: e } = expand_var(ctx, name); 104 let ExpandResult { value: fragment, err: e } = expand_var(ctx, &name);
102 err = err.or(e); 105 err = err.or(e);
103 push_fragment(arena, fragment); 106 push_fragment(arena, fragment);
104 } 107 }
105 Op::Repeat { subtree, kind, separator } => { 108 Op::Repeat { subtree, kind, separator } => {
106 let ExpandResult { value: fragment, err: e } = 109 let ExpandResult { value: fragment, err: e } =
107 expand_repeat(ctx, subtree, kind, separator, arena); 110 expand_repeat(ctx, subtree, *kind, separator, arena);
108 err = err.or(e); 111 err = err.or(e);
109 push_fragment(arena, fragment) 112 push_fragment(arena, fragment)
110 } 113 }
@@ -161,9 +164,9 @@ fn expand_var(ctx: &mut ExpandCtx, v: &SmolStr) -> ExpandResult<Fragment> {
161 164
162fn expand_repeat( 165fn expand_repeat(
163 ctx: &mut ExpandCtx, 166 ctx: &mut ExpandCtx,
164 template: &tt::Subtree, 167 template: &MetaTemplate,
165 kind: RepeatKind, 168 kind: RepeatKind,
166 separator: Option<Separator>, 169 separator: &Option<Separator>,
167 arena: &mut Vec<tt::TokenTree>, 170 arena: &mut Vec<tt::TokenTree>,
168) -> ExpandResult<Fragment> { 171) -> ExpandResult<Fragment> {
169 let mut buf: Vec<tt::TokenTree> = Vec::new(); 172 let mut buf: Vec<tt::TokenTree> = Vec::new();
diff --git a/crates/mbe/src/parser.rs b/crates/mbe/src/parser.rs
index d681905f5..2f3ebc831 100644
--- a/crates/mbe/src/parser.rs
+++ b/crates/mbe/src/parser.rs
@@ -4,16 +4,17 @@
4use smallvec::SmallVec; 4use smallvec::SmallVec;
5use syntax::SmolStr; 5use syntax::SmolStr;
6 6
7use crate::{tt_iter::TtIter, ExpandError}; 7use crate::{tt_iter::TtIter, ExpandError, MetaTemplate};
8 8
9#[derive(Debug)] 9#[derive(Clone, Debug, PartialEq, Eq)]
10pub(crate) enum Op<'a> { 10pub(crate) enum Op {
11 Var { name: &'a SmolStr, kind: Option<&'a SmolStr> }, 11 Var { name: SmolStr, kind: Option<SmolStr> },
12 Repeat { subtree: &'a tt::Subtree, kind: RepeatKind, separator: Option<Separator> }, 12 Repeat { subtree: MetaTemplate, kind: RepeatKind, separator: Option<Separator> },
13 TokenTree(&'a tt::TokenTree), 13 Leaf(tt::Leaf),
14 Subtree(MetaTemplate),
14} 15}
15 16
16#[derive(Clone, Debug, PartialEq, Eq)] 17#[derive(Copy, Clone, Debug, PartialEq, Eq)]
17pub(crate) enum RepeatKind { 18pub(crate) enum RepeatKind {
18 ZeroOrMore, 19 ZeroOrMore,
19 OneOrMore, 20 OneOrMore,
@@ -45,16 +46,12 @@ impl PartialEq for Separator {
45 } 46 }
46} 47}
47 48
48pub(crate) fn parse_template( 49pub(crate) fn parse_template(template: &tt::Subtree) -> Vec<Result<Op, ExpandError>> {
49 template: &tt::Subtree, 50 parse_inner(&template, Mode::Template)
50) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> {
51 parse_inner(template, Mode::Template)
52} 51}
53 52
54pub(crate) fn parse_pattern( 53pub(crate) fn parse_pattern(pattern: &tt::Subtree) -> Vec<Result<Op, ExpandError>> {
55 pattern: &tt::Subtree, 54 parse_inner(&pattern, Mode::Pattern)
56) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> {
57 parse_inner(pattern, Mode::Pattern)
58} 55}
59 56
60#[derive(Clone, Copy)] 57#[derive(Clone, Copy)]
@@ -63,12 +60,13 @@ enum Mode {
63 Template, 60 Template,
64} 61}
65 62
66fn parse_inner(src: &tt::Subtree, mode: Mode) -> impl Iterator<Item = Result<Op<'_>, ExpandError>> { 63fn parse_inner(tt: &tt::Subtree, mode: Mode) -> Vec<Result<Op, ExpandError>> {
67 let mut src = TtIter::new(src); 64 let mut src = TtIter::new(&tt);
68 std::iter::from_fn(move || { 65 std::iter::from_fn(move || {
69 let first = src.next()?; 66 let first = src.next()?;
70 Some(next_op(first, &mut src, mode)) 67 Some(next_op(first, &mut src, mode))
71 }) 68 })
69 .collect()
72} 70}
73 71
74macro_rules! err { 72macro_rules! err {
@@ -83,21 +81,20 @@ macro_rules! bail {
83 }; 81 };
84} 82}
85 83
86fn next_op<'a>( 84fn next_op<'a>(first: &tt::TokenTree, src: &mut TtIter<'a>, mode: Mode) -> Result<Op, ExpandError> {
87 first: &'a tt::TokenTree,
88 src: &mut TtIter<'a>,
89 mode: Mode,
90) -> Result<Op<'a>, ExpandError> {
91 let res = match first { 85 let res = match first {
92 tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { char: '$', .. })) => { 86 tt::TokenTree::Leaf(leaf @ tt::Leaf::Punct(tt::Punct { char: '$', .. })) => {
93 // Note that the '$' itself is a valid token inside macro_rules. 87 // Note that the '$' itself is a valid token inside macro_rules.
94 let second = match src.next() { 88 let second = match src.next() {
95 None => return Ok(Op::TokenTree(first)), 89 None => return Ok(Op::Leaf(leaf.clone())),
96 Some(it) => it, 90 Some(it) => it,
97 }; 91 };
98 match second { 92 match second {
99 tt::TokenTree::Subtree(subtree) => { 93 tt::TokenTree::Subtree(subtree) => {
100 let (separator, kind) = parse_repeat(src)?; 94 let (separator, kind) = parse_repeat(src)?;
95 let delimiter = subtree.delimiter;
96 let tokens = parse_inner(&subtree, mode);
97 let subtree = MetaTemplate { tokens, delimiter };
101 Op::Repeat { subtree, separator, kind } 98 Op::Repeat { subtree, separator, kind }
102 } 99 }
103 tt::TokenTree::Leaf(leaf) => match leaf { 100 tt::TokenTree::Leaf(leaf) => match leaf {
@@ -107,18 +104,18 @@ fn next_op<'a>(
107 if punct.char != '_' { 104 if punct.char != '_' {
108 return Err(ExpandError::UnexpectedToken); 105 return Err(ExpandError::UnexpectedToken);
109 } 106 }
110 let name = &UNDERSCORE; 107 let name = UNDERSCORE.clone();
111 let kind = eat_fragment_kind(src, mode)?; 108 let kind = eat_fragment_kind(src, mode)?;
112 Op::Var { name, kind } 109 Op::Var { name, kind }
113 } 110 }
114 tt::Leaf::Ident(ident) => { 111 tt::Leaf::Ident(ident) => {
115 let name = &ident.text; 112 let name = ident.text.clone();
116 let kind = eat_fragment_kind(src, mode)?; 113 let kind = eat_fragment_kind(src, mode)?;
117 Op::Var { name, kind } 114 Op::Var { name, kind }
118 } 115 }
119 tt::Leaf::Literal(lit) => { 116 tt::Leaf::Literal(lit) => {
120 if is_boolean_literal(lit) { 117 if is_boolean_literal(&lit) {
121 let name = &lit.text; 118 let name = lit.text.clone();
122 let kind = eat_fragment_kind(src, mode)?; 119 let kind = eat_fragment_kind(src, mode)?;
123 Op::Var { name, kind } 120 Op::Var { name, kind }
124 } else { 121 } else {
@@ -128,19 +125,22 @@ fn next_op<'a>(
128 }, 125 },
129 } 126 }
130 } 127 }
131 tt => Op::TokenTree(tt), 128 tt::TokenTree::Leaf(tt) => Op::Leaf(tt.clone()),
129 tt::TokenTree::Subtree(subtree) => {
130 let delimiter = subtree.delimiter;
131 let tokens = parse_inner(&subtree, mode);
132 let subtree = MetaTemplate { tokens, delimiter };
133 Op::Subtree(subtree)
134 }
132 }; 135 };
133 Ok(res) 136 Ok(res)
134} 137}
135 138
136fn eat_fragment_kind<'a>( 139fn eat_fragment_kind<'a>(src: &mut TtIter<'a>, mode: Mode) -> Result<Option<SmolStr>, ExpandError> {
137 src: &mut TtIter<'a>,
138 mode: Mode,
139) -> Result<Option<&'a SmolStr>, ExpandError> {
140 if let Mode::Pattern = mode { 140 if let Mode::Pattern = mode {
141 src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?; 141 src.expect_char(':').map_err(|()| err!("bad fragment specifier 1"))?;
142 let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?; 142 let ident = src.expect_ident().map_err(|()| err!("bad fragment specifier 1"))?;
143 return Ok(Some(&ident.text)); 143 return Ok(Some(ident.text.clone()));
144 }; 144 };
145 Ok(None) 145 Ok(None)
146} 146}
diff --git a/crates/proc_macro_srv/src/rustc_server.rs b/crates/proc_macro_srv/src/rustc_server.rs
index 503f4c101..b54aa1f3b 100644
--- a/crates/proc_macro_srv/src/rustc_server.rs
+++ b/crates/proc_macro_srv/src/rustc_server.rs
@@ -204,17 +204,18 @@ pub mod token_stream {
204 let content = subtree 204 let content = subtree
205 .token_trees 205 .token_trees
206 .iter() 206 .iter()
207 .map(|tkn| { 207 .fold((String::new(), true), |(last, last_to_joint), tkn| {
208 let s = to_text(tkn); 208 let s = [last, to_text(tkn)].join(if last_to_joint { "" } else { " " });
209 let mut is_joint = false;
209 if let tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) = tkn { 210 if let tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) = tkn {
210 if punct.spacing == tt::Spacing::Alone { 211 if punct.spacing == tt::Spacing::Joint {
211 return s + " "; 212 is_joint = true;
212 } 213 }
213 } 214 }
214 s 215 (s, is_joint)
215 }) 216 })
216 .collect::<Vec<_>>() 217 .0;
217 .concat(); 218
218 let (open, close) = match subtree.delimiter.map(|it| it.kind) { 219 let (open, close) = match subtree.delimiter.map(|it| it.kind) {
219 None => ("", ""), 220 None => ("", ""),
220 Some(tt::DelimiterKind::Brace) => ("{", "}"), 221 Some(tt::DelimiterKind::Brace) => ("{", "}"),
@@ -710,4 +711,32 @@ mod tests {
710 assert_eq!(srv.character('c').text, "'c'"); 711 assert_eq!(srv.character('c').text, "'c'");
711 assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\""); 712 assert_eq!(srv.byte_string(b"1234586\x88").text, "b\"1234586\\x88\"");
712 } 713 }
714
715 #[test]
716 fn test_rustc_server_to_string() {
717 let s = TokenStream {
718 subtree: tt::Subtree {
719 delimiter: None,
720 token_trees: vec![
721 tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
722 text: "struct".into(),
723 id: tt::TokenId::unspecified(),
724 })),
725 tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
726 text: "T".into(),
727 id: tt::TokenId::unspecified(),
728 })),
729 tt::TokenTree::Subtree(tt::Subtree {
730 delimiter: Some(tt::Delimiter {
731 id: tt::TokenId::unspecified(),
732 kind: tt::DelimiterKind::Brace,
733 }),
734 token_trees: vec![],
735 }),
736 ],
737 },
738 };
739
740 assert_eq!(s.to_string(), "struct T {}");
741 }
713} 742}
diff --git a/crates/project_model/Cargo.toml b/crates/project_model/Cargo.toml
index c55e85709..a65e42261 100644
--- a/crates/project_model/Cargo.toml
+++ b/crates/project_model/Cargo.toml
@@ -16,7 +16,7 @@ cargo_metadata = "=0.12.0"
16serde = { version = "1.0.106", features = ["derive"] } 16serde = { version = "1.0.106", features = ["derive"] }
17serde_json = "1.0.48" 17serde_json = "1.0.48"
18anyhow = "1.0.26" 18anyhow = "1.0.26"
19itertools = "0.9.0" 19itertools = "0.10.0"
20 20
21arena = { path = "../arena", version = "0.0.0" } 21arena = { path = "../arena", version = "0.0.0" }
22cfg = { path = "../cfg", version = "0.0.0" } 22cfg = { path = "../cfg", version = "0.0.0" }
diff --git a/crates/project_model/src/cargo_workspace.rs b/crates/project_model/src/cargo_workspace.rs
index 3e0b40cbc..1700cb8a7 100644
--- a/crates/project_model/src/cargo_workspace.rs
+++ b/crates/project_model/src/cargo_workspace.rs
@@ -1,6 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use std::{ 3use std::{
4 convert::TryInto,
4 ffi::OsStr, 5 ffi::OsStr,
5 ops, 6 ops,
6 path::{Path, PathBuf}, 7 path::{Path, PathBuf},
@@ -196,8 +197,23 @@ impl CargoWorkspace {
196 if let Some(target) = target { 197 if let Some(target) = target {
197 meta.other_options(vec![String::from("--filter-platform"), target]); 198 meta.other_options(vec![String::from("--filter-platform"), target]);
198 } 199 }
200
199 let mut meta = meta.exec().with_context(|| { 201 let mut meta = meta.exec().with_context(|| {
200 format!("Failed to run `cargo metadata --manifest-path {}`", cargo_toml.display()) 202 let cwd: Option<AbsPathBuf> =
203 std::env::current_dir().ok().and_then(|p| p.try_into().ok());
204
205 let workdir = cargo_toml
206 .parent()
207 .map(|p| p.to_path_buf())
208 .or(cwd)
209 .map(|dir| dir.to_string_lossy().to_string())
210 .unwrap_or_else(|| "<failed to get path>".into());
211
212 format!(
213 "Failed to run `cargo metadata --manifest-path {}` in `{}`",
214 cargo_toml.display(),
215 workdir
216 )
201 })?; 217 })?;
202 218
203 let mut out_dir_by_id = FxHashMap::default(); 219 let mut out_dir_by_id = FxHashMap::default();
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index 53e70eaf7..0a63593fb 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -17,8 +17,9 @@ path = "src/bin/main.rs"
17[dependencies] 17[dependencies]
18anyhow = "1.0.26" 18anyhow = "1.0.26"
19crossbeam-channel = "0.5.0" 19crossbeam-channel = "0.5.0"
20dissimilar = "1.0.2"
20env_logger = { version = "0.8.1", default-features = false } 21env_logger = { version = "0.8.1", default-features = false }
21itertools = "0.9.0" 22itertools = "0.10.0"
22jod-thread = "0.1.0" 23jod-thread = "0.1.0"
23log = "0.4.8" 24log = "0.4.8"
24lsp-types = { version = "0.86.0", features = ["proposed"] } 25lsp-types = { version = "0.86.0", features = ["proposed"] }
diff --git a/crates/rust-analyzer/src/diff.rs b/crates/rust-analyzer/src/diff.rs
new file mode 100644
index 000000000..231be5807
--- /dev/null
+++ b/crates/rust-analyzer/src/diff.rs
@@ -0,0 +1,53 @@
1//! Generate minimal `TextEdit`s from different text versions
2use dissimilar::Chunk;
3use ide::{TextEdit, TextRange, TextSize};
4
5pub(crate) fn diff(left: &str, right: &str) -> TextEdit {
6 let chunks = dissimilar::diff(left, right);
7 textedit_from_chunks(chunks)
8}
9
10fn textedit_from_chunks(chunks: Vec<dissimilar::Chunk>) -> TextEdit {
11 let mut builder = TextEdit::builder();
12 let mut pos = TextSize::default();
13
14 let mut chunks = chunks.into_iter().peekable();
15 while let Some(chunk) = chunks.next() {
16 if let (Chunk::Delete(deleted), Some(&Chunk::Insert(inserted))) = (chunk, chunks.peek()) {
17 chunks.next().unwrap();
18 let deleted_len = TextSize::of(deleted);
19 builder.replace(TextRange::at(pos, deleted_len), inserted.into());
20 pos += deleted_len;
21 continue;
22 }
23
24 match chunk {
25 Chunk::Equal(text) => {
26 pos += TextSize::of(text);
27 }
28 Chunk::Delete(deleted) => {
29 let deleted_len = TextSize::of(deleted);
30 builder.delete(TextRange::at(pos, deleted_len));
31 pos += deleted_len;
32 }
33 Chunk::Insert(inserted) => {
34 builder.insert(pos, inserted.into());
35 }
36 }
37 }
38 builder.finish()
39}
40
41#[cfg(test)]
42mod tests {
43 use super::*;
44
45 #[test]
46 fn diff_applies() {
47 let mut original = String::from("fn foo(a:u32){\n}");
48 let result = "fn foo(a: u32) {}";
49 let edit = diff(&original, result);
50 edit.apply(&mut original);
51 assert_eq!(original, result);
52 }
53}
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs
index 374fb5302..948cfc17c 100644
--- a/crates/rust-analyzer/src/handlers.rs
+++ b/crates/rust-analyzer/src/handlers.rs
@@ -31,6 +31,7 @@ use serde_json::to_value;
31use stdx::{format_to, split_once}; 31use stdx::{format_to, split_once};
32use syntax::{algo, ast, AstNode, TextRange, TextSize}; 32use syntax::{algo, ast, AstNode, TextRange, TextSize};
33 33
34use crate::diff::diff;
34use crate::{ 35use crate::{
35 cargo_target_spec::CargoTargetSpec, 36 cargo_target_spec::CargoTargetSpec,
36 config::RustfmtConfig, 37 config::RustfmtConfig,
@@ -681,7 +682,7 @@ pub(crate) fn handle_completion_resolve(
681 &snap.config.completion, 682 &snap.config.completion,
682 FilePosition { file_id, offset }, 683 FilePosition { file_id, offset },
683 &resolve_data.full_import_path, 684 &resolve_data.full_import_path,
684 &resolve_data.imported_name, 685 resolve_data.imported_name,
685 )? 686 )?
686 .into_iter() 687 .into_iter()
687 .flat_map(|edit| { 688 .flat_map(|edit| {
@@ -840,7 +841,7 @@ pub(crate) fn handle_formatting(
840 let crate_ids = snap.analysis.crate_for(file_id)?; 841 let crate_ids = snap.analysis.crate_for(file_id)?;
841 842
842 let file_line_index = snap.analysis.file_line_index(file_id)?; 843 let file_line_index = snap.analysis.file_line_index(file_id)?;
843 let end_position = to_proto::position(&file_line_index, TextSize::of(file.as_str())); 844 let file_line_endings = snap.file_line_endings(file_id);
844 845
845 let mut rustfmt = match &snap.config.rustfmt { 846 let mut rustfmt = match &snap.config.rustfmt {
846 RustfmtConfig::Rustfmt { extra_args } => { 847 RustfmtConfig::Rustfmt { extra_args } => {
@@ -861,16 +862,18 @@ pub(crate) fn handle_formatting(
861 } 862 }
862 }; 863 };
863 864
864 let mut rustfmt = rustfmt.stdin(Stdio::piped()).stdout(Stdio::piped()).spawn()?; 865 let mut rustfmt =
866 rustfmt.stdin(Stdio::piped()).stdout(Stdio::piped()).stderr(Stdio::piped()).spawn()?;
865 867
866 rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?; 868 rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
867 869
868 let output = rustfmt.wait_with_output()?; 870 let output = rustfmt.wait_with_output()?;
869 let captured_stdout = String::from_utf8(output.stdout)?; 871 let captured_stdout = String::from_utf8(output.stdout)?;
872 let captured_stderr = String::from_utf8(output.stderr).unwrap_or_default();
870 873
871 if !output.status.success() { 874 if !output.status.success() {
872 match output.status.code() { 875 match output.status.code() {
873 Some(1) => { 876 Some(1) if !captured_stderr.contains("not installed") => {
874 // While `rustfmt` doesn't have a specific exit code for parse errors this is the 877 // While `rustfmt` doesn't have a specific exit code for parse errors this is the
875 // likely cause exiting with 1. Most Language Servers swallow parse errors on 878 // likely cause exiting with 1. Most Language Servers swallow parse errors on
876 // formatting because otherwise an error is surfaced to the user on top of the 879 // formatting because otherwise an error is surfaced to the user on top of the
@@ -886,8 +889,9 @@ pub(crate) fn handle_formatting(
886 format!( 889 format!(
887 r#"rustfmt exited with: 890 r#"rustfmt exited with:
888 Status: {} 891 Status: {}
889 stdout: {}"#, 892 stdout: {}
890 output.status, captured_stdout, 893 stderr: {}"#,
894 output.status, captured_stdout, captured_stderr,
891 ), 895 ),
892 ) 896 )
893 .into()); 897 .into());
@@ -899,10 +903,11 @@ pub(crate) fn handle_formatting(
899 // The document is already formatted correctly -- no edits needed. 903 // The document is already formatted correctly -- no edits needed.
900 Ok(None) 904 Ok(None)
901 } else { 905 } else {
902 Ok(Some(vec![lsp_types::TextEdit { 906 Ok(Some(to_proto::text_edit_vec(
903 range: Range::new(Position::new(0, 0), end_position), 907 &file_line_index,
904 new_text: captured_stdout, 908 file_line_endings,
905 }])) 909 diff(&file, &captured_stdout),
910 )))
906 } 911 }
907} 912}
908 913
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs
index d538ad69a..c9494e300 100644
--- a/crates/rust-analyzer/src/lib.rs
+++ b/crates/rust-analyzer/src/lib.rs
@@ -34,6 +34,7 @@ mod request_metrics;
34mod lsp_utils; 34mod lsp_utils;
35mod thread_pool; 35mod thread_pool;
36mod document; 36mod document;
37mod diff;
37pub mod lsp_ext; 38pub mod lsp_ext;
38pub mod config; 39pub mod config;
39 40
diff --git a/crates/rust-analyzer/tests/rust-analyzer/main.rs b/crates/rust-analyzer/tests/rust-analyzer/main.rs
index e51eb2626..84db0856d 100644
--- a/crates/rust-analyzer/tests/rust-analyzer/main.rs
+++ b/crates/rust-analyzer/tests/rust-analyzer/main.rs
@@ -190,15 +190,10 @@ pub use std::collections::HashMap;
190 }, 190 },
191 json!([ 191 json!([
192 { 192 {
193 "newText": r#"mod bar; 193 "newText": "",
194
195fn main() {}
196
197pub use std::collections::HashMap;
198"#,
199 "range": { 194 "range": {
200 "end": { "character": 0, "line": 6 }, 195 "end": { "character": 0, "line": 3 },
201 "start": { "character": 0, "line": 0 } 196 "start": { "character": 11, "line": 2 }
202 } 197 }
203 } 198 }
204 ]), 199 ]),
@@ -248,17 +243,17 @@ pub use std::collections::HashMap;
248 }, 243 },
249 json!([ 244 json!([
250 { 245 {
251 "newText": r#"mod bar; 246 "newText": "",
252 247 "range": {
253async fn test() {} 248 "end": { "character": 0, "line": 3 },
254 249 "start": { "character": 17, "line": 2 }
255fn main() {} 250 }
256 251 },
257pub use std::collections::HashMap; 252 {
258"#, 253 "newText": "",
259 "range": { 254 "range": {
260 "end": { "character": 0, "line": 9 }, 255 "end": { "character": 0, "line": 6 },
261 "start": { "character": 0, "line": 0 } 256 "start": { "character": 11, "line": 5 }
262 } 257 }
263 } 258 }
264 ]), 259 ]),
diff --git a/crates/ssr/Cargo.toml b/crates/ssr/Cargo.toml
index 98ed25fb6..339eda86a 100644
--- a/crates/ssr/Cargo.toml
+++ b/crates/ssr/Cargo.toml
@@ -12,7 +12,7 @@ doctest = false
12 12
13[dependencies] 13[dependencies]
14rustc-hash = "1.1.0" 14rustc-hash = "1.1.0"
15itertools = "0.9.0" 15itertools = "0.10.0"
16 16
17text_edit = { path = "../text_edit", version = "0.0.0" } 17text_edit = { path = "../text_edit", version = "0.0.0" }
18syntax = { path = "../syntax", version = "0.0.0" } 18syntax = { path = "../syntax", version = "0.0.0" }
diff --git a/crates/ssr/src/matching.rs b/crates/ssr/src/matching.rs
index 99b187311..6cf831431 100644
--- a/crates/ssr/src/matching.rs
+++ b/crates/ssr/src/matching.rs
@@ -473,7 +473,9 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
473 } 473 }
474 SyntaxElement::Node(n) => { 474 SyntaxElement::Node(n) => {
475 if let Some(first_token) = n.first_token() { 475 if let Some(first_token) = n.first_token() {
476 if Some(first_token.to_string()) == next_pattern_token { 476 if Some(first_token.text().as_str())
477 == next_pattern_token.as_deref()
478 {
477 if let Some(SyntaxElement::Node(p)) = pattern.next() { 479 if let Some(SyntaxElement::Node(p)) = pattern.next() {
478 // We have a subtree that starts with the next token in our pattern. 480 // We have a subtree that starts with the next token in our pattern.
479 self.attempt_match_token_tree(phase, &p, &n)?; 481 self.attempt_match_token_tree(phase, &p, &n)?;
diff --git a/crates/syntax/Cargo.toml b/crates/syntax/Cargo.toml
index 21015591c..5d8389ade 100644
--- a/crates/syntax/Cargo.toml
+++ b/crates/syntax/Cargo.toml
@@ -11,7 +11,7 @@ edition = "2018"
11doctest = false 11doctest = false
12 12
13[dependencies] 13[dependencies]
14itertools = "0.9.0" 14itertools = "0.10.0"
15rowan = "0.10.0" 15rowan = "0.10.0"
16rustc_lexer = { version = "695.0.0", package = "rustc-ap-rustc_lexer" } 16rustc_lexer = { version = "695.0.0", package = "rustc-ap-rustc_lexer" }
17rustc-hash = "1.1.0" 17rustc-hash = "1.1.0"
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs
index c45cb514a..2aa472fb4 100644
--- a/crates/syntax/src/ast/node_ext.rs
+++ b/crates/syntax/src/ast/node_ext.rs
@@ -193,6 +193,14 @@ impl ast::UseTreeList {
193 .and_then(ast::UseTree::cast) 193 .and_then(ast::UseTree::cast)
194 .expect("UseTreeLists are always nested in UseTrees") 194 .expect("UseTreeLists are always nested in UseTrees")
195 } 195 }
196
197 pub fn has_inner_comment(&self) -> bool {
198 self.syntax()
199 .children_with_tokens()
200 .filter_map(|it| it.into_token())
201 .find_map(ast::Comment::cast)
202 .is_some()
203 }
196} 204}
197 205
198impl ast::Impl { 206impl ast::Impl {
diff --git a/editors/code/package.json b/editors/code/package.json
index 13749a084..587f11b90 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -283,6 +283,14 @@
283 "default": null, 283 "default": null,
284 "markdownDescription": "Path to rust-analyzer executable (points to bundled binary by default). If this is set, then `#rust-analyzer.updates.channel#` setting is not used" 284 "markdownDescription": "Path to rust-analyzer executable (points to bundled binary by default). If this is set, then `#rust-analyzer.updates.channel#` setting is not used"
285 }, 285 },
286 "rust-analyzer.server.extraEnv": {
287 "type": [
288 "null",
289 "object"
290 ],
291 "default": null,
292 "markdownDescription": "Extra environment variables that will be passed to the rust-analyzer executable. Useful for passing e.g. `RA_LOG` for debugging."
293 },
286 "rust-analyzer.trace.server": { 294 "rust-analyzer.trace.server": {
287 "type": "string", 295 "type": "string",
288 "scope": "window", 296 "scope": "window",
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index 63ab82dde..539e487ec 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -6,6 +6,10 @@ import { DocumentSemanticsTokensSignature, DocumentSemanticsTokensEditsSignature
6import { assert } from './util'; 6import { assert } from './util';
7import { WorkspaceEdit } from 'vscode'; 7import { WorkspaceEdit } from 'vscode';
8 8
9export interface Env {
10 [name: string]: string;
11}
12
9function renderCommand(cmd: ra.CommandLink) { 13function renderCommand(cmd: ra.CommandLink) {
10 return `[${cmd.title}](command:${cmd.command}?${encodeURIComponent(JSON.stringify(cmd.arguments))} '${cmd.tooltip!}')`; 14 return `[${cmd.title}](command:${cmd.command}?${encodeURIComponent(JSON.stringify(cmd.arguments))} '${cmd.tooltip!}')`;
11} 15}
@@ -27,14 +31,17 @@ async function semanticHighlightingWorkaround<R, F extends (...args: any[]) => v
27 return res; 31 return res;
28} 32}
29 33
30export function createClient(serverPath: string, cwd: string): lc.LanguageClient { 34export function createClient(serverPath: string, cwd: string, extraEnv: Env): lc.LanguageClient {
31 // '.' Is the fallback if no folder is open 35 // '.' Is the fallback if no folder is open
32 // TODO?: Workspace folders support Uri's (eg: file://test.txt). 36 // TODO?: Workspace folders support Uri's (eg: file://test.txt).
33 // It might be a good idea to test if the uri points to a file. 37 // It might be a good idea to test if the uri points to a file.
34 38
39 const newEnv = Object.assign({}, process.env);
40 Object.assign(newEnv, extraEnv);
41
35 const run: lc.Executable = { 42 const run: lc.Executable = {
36 command: serverPath, 43 command: serverPath,
37 options: { cwd }, 44 options: { cwd, env: newEnv },
38 }; 45 };
39 const serverOptions: lc.ServerOptions = { 46 const serverOptions: lc.ServerOptions = {
40 run, 47 run,
diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts
index 9d4823a34..b12e134ca 100644
--- a/editors/code/src/commands.ts
+++ b/editors/code/src/commands.ts
@@ -469,8 +469,14 @@ export function resolveCodeAction(ctx: Ctx): Cmd {
469 if (!item.edit) { 469 if (!item.edit) {
470 return; 470 return;
471 } 471 }
472 const edit = client.protocol2CodeConverter.asWorkspaceEdit(item.edit); 472 const itemEdit = item.edit;
473 await vscode.workspace.applyEdit(edit); 473 const edit = client.protocol2CodeConverter.asWorkspaceEdit(itemEdit);
474 // filter out all text edits and recreate the WorkspaceEdit without them so we can apply
475 // snippet edits on our own
476 const itemEditWithoutTextEdits = { ...item, documentChanges: itemEdit.documentChanges?.filter(change => "kind" in change) };
477 const editWithoutTextEdits = client.protocol2CodeConverter.asWorkspaceEdit(itemEditWithoutTextEdits);
478 await applySnippetWorkspaceEdit(edit);
479 await vscode.workspace.applyEdit(editWithoutTextEdits);
474 }; 480 };
475} 481}
476 482
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index 848e92af9..fe9f3b4a8 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -1,4 +1,5 @@
1import * as vscode from 'vscode'; 1import * as vscode from 'vscode';
2import { Env } from './client';
2import { log } from "./util"; 3import { log } from "./util";
3 4
4export type UpdatesChannel = "stable" | "nightly"; 5export type UpdatesChannel = "stable" | "nightly";
@@ -13,6 +14,7 @@ export class Config {
13 readonly rootSection = "rust-analyzer"; 14 readonly rootSection = "rust-analyzer";
14 private readonly requiresReloadOpts = [ 15 private readonly requiresReloadOpts = [
15 "serverPath", 16 "serverPath",
17 "server",
16 "cargo", 18 "cargo",
17 "procMacro", 19 "procMacro",
18 "files", 20 "files",
@@ -92,6 +94,7 @@ export class Config {
92 } 94 }
93 95
94 get serverPath() { return this.get<null | string>("serverPath"); } 96 get serverPath() { return this.get<null | string>("serverPath"); }
97 get serverExtraEnv() { return this.get<Env | null>("server.extraEnv") ?? {}; }
95 get channel() { return this.get<UpdatesChannel>("updates.channel"); } 98 get channel() { return this.get<UpdatesChannel>("updates.channel"); }
96 get askBeforeDownload() { return this.get<boolean>("updates.askBeforeDownload"); } 99 get askBeforeDownload() { return this.get<boolean>("updates.askBeforeDownload"); }
97 get traceExtension() { return this.get<boolean>("trace.extension"); } 100 get traceExtension() { return this.get<boolean>("trace.extension"); }
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index d39864d33..e7585184b 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -24,7 +24,7 @@ export class Ctx {
24 serverPath: string, 24 serverPath: string,
25 cwd: string, 25 cwd: string,
26 ): Promise<Ctx> { 26 ): Promise<Ctx> {
27 const client = createClient(serverPath, cwd); 27 const client = createClient(serverPath, cwd, config.serverExtraEnv);
28 28
29 const statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left); 29 const statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left);
30 extCtx.subscriptions.push(statusBar); 30 extCtx.subscriptions.push(statusBar);