aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/ci.yaml6
-rw-r--r--.github/workflows/release.yaml9
-rw-r--r--.github/workflows/rustdoc.yaml2
-rw-r--r--Cargo.lock46
-rw-r--r--crates/ra_cargo_watch/Cargo.toml4
-rw-r--r--crates/ra_db/src/fixture.rs70
-rw-r--r--crates/ra_db/src/input.rs195
-rw-r--r--crates/ra_db/src/lib.rs18
-rw-r--r--crates/ra_hir/Cargo.toml2
-rw-r--r--crates/ra_hir/src/code_model.rs92
-rw-r--r--crates/ra_hir/src/db.rs13
-rw-r--r--crates/ra_hir/src/semantics.rs35
-rw-r--r--crates/ra_hir_def/Cargo.toml2
-rw-r--r--crates/ra_hir_def/src/data.rs41
-rw-r--r--crates/ra_hir_def/src/db.rs12
-rw-r--r--crates/ra_hir_def/src/find_path.rs2
-rw-r--r--crates/ra_hir_def/src/lang_item.rs5
-rw-r--r--crates/ra_hir_def/src/nameres.rs13
-rw-r--r--crates/ra_hir_def/src/nameres/collector.rs9
-rw-r--r--crates/ra_hir_def/src/test_db.rs10
-rw-r--r--crates/ra_hir_expand/src/builtin_macro.rs186
-rw-r--r--crates/ra_hir_expand/src/db.rs71
-rw-r--r--crates/ra_hir_expand/src/test_db.rs9
-rw-r--r--crates/ra_hir_ty/Cargo.toml2
-rw-r--r--crates/ra_hir_ty/src/db.rs29
-rw-r--r--crates/ra_hir_ty/src/infer.rs4
-rw-r--r--crates/ra_hir_ty/src/infer/path.rs4
-rw-r--r--crates/ra_hir_ty/src/lib.rs4
-rw-r--r--crates/ra_hir_ty/src/lower.rs86
-rw-r--r--crates/ra_hir_ty/src/method_resolution.rs28
-rw-r--r--crates/ra_hir_ty/src/test_db.rs7
-rw-r--r--crates/ra_hir_ty/src/tests/macros.rs65
-rw-r--r--crates/ra_hir_ty/src/tests/traits.rs41
-rw-r--r--crates/ra_hir_ty/src/traits.rs119
-rw-r--r--crates/ra_ide/Cargo.toml2
-rw-r--r--crates/ra_ide/src/completion.rs29
-rw-r--r--crates/ra_ide/src/completion/complete_dot.rs173
-rw-r--r--crates/ra_ide/src/completion/complete_fn_param.rs2
-rw-r--r--crates/ra_ide/src/completion/complete_keyword.rs3
-rw-r--r--crates/ra_ide/src/completion/complete_macro_in_item_position.rs3
-rw-r--r--crates/ra_ide/src/completion/complete_path.rs144
-rw-r--r--crates/ra_ide/src/completion/complete_pattern.rs20
-rw-r--r--crates/ra_ide/src/completion/complete_postfix.rs69
-rw-r--r--crates/ra_ide/src/completion/complete_record_literal.rs27
-rw-r--r--crates/ra_ide/src/completion/complete_record_pattern.rs30
-rw-r--r--crates/ra_ide/src/completion/complete_scope.rs76
-rw-r--r--crates/ra_ide/src/completion/complete_snippet.rs2
-rw-r--r--crates/ra_ide/src/completion/complete_trait_impl.rs146
-rw-r--r--crates/ra_ide/src/completion/completion_context.rs116
-rw-r--r--crates/ra_ide/src/completion/completion_item.rs34
-rw-r--r--crates/ra_ide/src/completion/presentation.rs76
-rw-r--r--crates/ra_ide/src/completion/test_utils.rs29
-rw-r--r--crates/ra_ide/src/display.rs14
-rw-r--r--crates/ra_ide/src/hover.rs114
-rw-r--r--crates/ra_ide/src/inlay_hints.rs32
-rw-r--r--crates/ra_ide/src/lib.rs39
-rw-r--r--crates/ra_ide/src/mock_analysis.rs12
-rw-r--r--crates/ra_ide/src/parent_module.rs2
-rw-r--r--crates/ra_ide/src/references/rename.rs208
-rw-r--r--crates/ra_ide/src/typing.rs160
-rw-r--r--crates/ra_ide/src/typing/on_enter.rs216
-rw-r--r--crates/ra_ide_db/Cargo.toml2
-rw-r--r--crates/ra_ide_db/src/change.rs13
-rw-r--r--crates/ra_ide_db/src/lib.rs19
-rw-r--r--crates/ra_ide_db/src/search.rs28
-rw-r--r--crates/ra_ide_db/src/symbol_index.rs4
-rw-r--r--crates/ra_mbe/src/lib.rs3
-rw-r--r--crates/ra_mbe/src/mbe_expander/matcher.rs4
-rw-r--r--crates/ra_mbe/src/syntax_bridge.rs221
-rw-r--r--crates/ra_mbe/src/tests.rs12
-rw-r--r--crates/ra_parser/src/grammar/expressions.rs10
-rw-r--r--crates/ra_prof/src/lib.rs57
-rw-r--r--crates/ra_project_model/src/lib.rs36
-rw-r--r--crates/ra_syntax/src/ast/make.rs6
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.rs3
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.txt38
-rw-r--r--crates/rust-analyzer/Cargo.toml2
-rw-r--r--crates/rust-analyzer/src/cli/analysis_bench.rs12
-rw-r--r--crates/rust-analyzer/src/cli/load_cargo.rs18
-rw-r--r--crates/rust-analyzer/src/config.rs4
-rw-r--r--crates/rust-analyzer/src/conv.rs39
-rw-r--r--crates/rust-analyzer/src/feature_flags.rs (renamed from crates/ra_ide_db/src/feature_flags.rs)6
-rw-r--r--crates/rust-analyzer/src/lib.rs1
-rw-r--r--crates/rust-analyzer/src/main_loop.rs22
-rw-r--r--crates/rust-analyzer/src/main_loop/handlers.rs30
-rw-r--r--crates/rust-analyzer/src/world.rs61
-rw-r--r--crates/test_utils/src/lib.rs13
-rw-r--r--docs/user/readme.adoc21
-rw-r--r--editors/code/package-lock.json18
-rw-r--r--editors/code/package.json42
-rw-r--r--editors/code/src/client.ts1
-rw-r--r--editors/code/src/config.ts12
-rw-r--r--editors/code/src/ctx.ts12
-rw-r--r--editors/code/src/inlay_hints.ts290
-rw-r--r--editors/code/src/installation/interfaces.ts15
-rw-r--r--editors/code/src/installation/server.ts28
-rw-r--r--editors/code/src/rust-analyzer-api.ts22
-rw-r--r--editors/code/src/util.ts12
-rw-r--r--xtask/src/dist.rs20
99 files changed, 3128 insertions, 1048 deletions
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 633015956..ee74d7486 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -13,7 +13,7 @@ jobs:
13 runs-on: ubuntu-latest 13 runs-on: ubuntu-latest
14 steps: 14 steps:
15 - name: Checkout repository 15 - name: Checkout repository
16 uses: actions/checkout@v1 16 uses: actions/checkout@v2
17 17
18 - run: cargo install cargo-audit 18 - run: cargo install cargo-audit
19 - run: cargo audit 19 - run: cargo audit
@@ -37,7 +37,7 @@ jobs:
37 37
38 steps: 38 steps:
39 - name: Checkout repository 39 - name: Checkout repository
40 uses: actions/checkout@v1 40 uses: actions/checkout@v2
41 41
42 # We need to disable the existing toolchain to avoid updating rust-docs 42 # We need to disable the existing toolchain to avoid updating rust-docs
43 # which takes a long time. The fastest way to do this is to rename the 43 # which takes a long time. The fastest way to do this is to rename the
@@ -94,7 +94,7 @@ jobs:
94 CC: gcc-4.9 94 CC: gcc-4.9
95 steps: 95 steps:
96 - name: Checkout repository 96 - name: Checkout repository
97 uses: actions/checkout@v1 97 uses: actions/checkout@v2
98 98
99 - name: Install Nodejs 99 - name: Install Nodejs
100 uses: actions/setup-node@v1 100 uses: actions/setup-node@v1
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index d58be0fd7..0434b6128 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -23,7 +23,7 @@ jobs:
23 23
24 steps: 24 steps:
25 - name: Checkout repository 25 - name: Checkout repository
26 uses: actions/checkout@v1 26 uses: actions/checkout@v2
27 27
28 # We need to disable the existing toolchain to avoid updating rust-docs 28 # We need to disable the existing toolchain to avoid updating rust-docs
29 # which takes a long time. The fastest way to do this is to rename the 29 # which takes a long time. The fastest way to do this is to rename the
@@ -42,6 +42,7 @@ jobs:
42 override: true 42 override: true
43 43
44 - name: Install Nodejs 44 - name: Install Nodejs
45 if: matrix.os == 'ubuntu-latest'
45 uses: actions/setup-node@v1 46 uses: actions/setup-node@v1
46 with: 47 with:
47 node-version: 12.x 48 node-version: 12.x
@@ -70,14 +71,14 @@ jobs:
70 with: 71 with:
71 node-version: 12.x 72 node-version: 12.x
72 73
73 - run: echo "::set-env name=TAG::$(date --iso)" 74 - run: echo "::set-env name=TAG::$(date --iso --utc)"
74 if: github.event_name == 'push' 75 if: github.event_name == 'push'
75 - run: echo "::set-env name=TAG::nightly" 76 - run: echo "::set-env name=TAG::nightly"
76 if: github.event_name == 'schedule' 77 if: github.event_name == 'schedule'
77 - run: 'echo "TAG: $TAG"' 78 - run: 'echo "TAG: $TAG"'
78 79
79 - name: Checkout repository 80 - name: Checkout repository
80 uses: actions/checkout@v1 81 uses: actions/checkout@v2
81 82
82 - uses: actions/download-artifact@v1 83 - uses: actions/download-artifact@v1
83 with: 84 with:
@@ -107,4 +108,4 @@ jobs:
107 if: github.event_name == 'push' 108 if: github.event_name == 'push'
108 working-directory: ./editors/code 109 working-directory: ./editors/code
109 # token from https://dev.azure.com/rust-analyzer/ 110 # token from https://dev.azure.com/rust-analyzer/
110 run: npx vsce publish 0.1.$(date +%Y%m%d) --pat ${{ secrets.MARKETPLACE_TOKEN }} --packagePath ../../dist/rust-analyzer.vsix 111 run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} --packagePath ../../dist/rust-analyzer.vsix
diff --git a/.github/workflows/rustdoc.yaml b/.github/workflows/rustdoc.yaml
index e75e92695..c84ce5d48 100644
--- a/.github/workflows/rustdoc.yaml
+++ b/.github/workflows/rustdoc.yaml
@@ -13,7 +13,7 @@ jobs:
13 13
14 steps: 14 steps:
15 - name: Checkout repository 15 - name: Checkout repository
16 uses: actions/checkout@v1 16 uses: actions/checkout@v2
17 17
18 - name: Install Rust toolchain 18 - name: Install Rust toolchain
19 uses: actions-rs/toolchain@v1 19 uses: actions-rs/toolchain@v1
diff --git a/Cargo.lock b/Cargo.lock
index 80e778bcf..330bdd1cb 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2,9 +2,9 @@
2# It is not intended for manual editing. 2# It is not intended for manual editing.
3[[package]] 3[[package]]
4name = "aho-corasick" 4name = "aho-corasick"
5version = "0.7.9" 5version = "0.7.10"
6source = "registry+https://github.com/rust-lang/crates.io-index" 6source = "registry+https://github.com/rust-lang/crates.io-index"
7checksum = "d5e63fd144e18ba274ae7095c0197a870a7b9468abc801dd62f190d80817d2ec" 7checksum = "8716408b8bc624ed7f65d223ddb9ac2d044c0547b6fa4b0d554f3a9540496ada"
8dependencies = [ 8dependencies = [
9 "memchr", 9 "memchr",
10] 10]
@@ -88,21 +88,6 @@ dependencies = [
88] 88]
89 89
90[[package]] 90[[package]]
91name = "byteorder"
92version = "1.3.4"
93source = "registry+https://github.com/rust-lang/crates.io-index"
94checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
95
96[[package]]
97name = "c2-chacha"
98version = "0.2.3"
99source = "registry+https://github.com/rust-lang/crates.io-index"
100checksum = "214238caa1bf3a496ec3392968969cab8549f96ff30652c9e56885329315f6bb"
101dependencies = [
102 "ppv-lite86",
103]
104
105[[package]]
106name = "cargo_metadata" 91name = "cargo_metadata"
107version = "0.9.1" 92version = "0.9.1"
108source = "registry+https://github.com/rust-lang/crates.io-index" 93source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -213,15 +198,14 @@ dependencies = [
213 198
214[[package]] 199[[package]]
215name = "console" 200name = "console"
216version = "0.9.2" 201version = "0.10.0"
217source = "registry+https://github.com/rust-lang/crates.io-index" 202source = "registry+https://github.com/rust-lang/crates.io-index"
218checksum = "45e0f3986890b3acbc782009e2629dfe2baa430ac091519ce3be26164a2ae6c0" 203checksum = "6728a28023f207181b193262711102bfbaf47cc9d13bc71d0736607ef8efe88c"
219dependencies = [ 204dependencies = [
220 "clicolors-control", 205 "clicolors-control",
221 "encode_unicode", 206 "encode_unicode",
222 "lazy_static", 207 "lazy_static",
223 "libc", 208 "libc",
224 "regex",
225 "termios", 209 "termios",
226 "winapi 0.3.8", 210 "winapi 0.3.8",
227] 211]
@@ -402,12 +386,9 @@ dependencies = [
402 386
403[[package]] 387[[package]]
404name = "fst" 388name = "fst"
405version = "0.3.5" 389version = "0.4.0"
406source = "registry+https://github.com/rust-lang/crates.io-index" 390source = "registry+https://github.com/rust-lang/crates.io-index"
407checksum = "927fb434ff9f0115b215dc0efd2e4fbdd7448522a92a1aa37c77d6a2f8f1ebd6" 391checksum = "3f7c13470d799474d44e2b9c6a0925807def7af4d120cd4de761433be76f7579"
408dependencies = [
409 "byteorder",
410]
411 392
412[[package]] 393[[package]]
413name = "fuchsia-zircon" 394name = "fuchsia-zircon"
@@ -509,9 +490,9 @@ dependencies = [
509 490
510[[package]] 491[[package]]
511name = "insta" 492name = "insta"
512version = "0.13.1" 493version = "0.15.0"
513source = "registry+https://github.com/rust-lang/crates.io-index" 494source = "registry+https://github.com/rust-lang/crates.io-index"
514checksum = "8df742abee84dbf27d20869c9adf77b0d8f7ea3eead13c2c9e3998d136a97058" 495checksum = "8de3f029212a3fe78a6090f1f2b993877ca245a9ded863f3fcbd6eae084fc1ed"
515dependencies = [ 496dependencies = [
516 "console", 497 "console",
517 "difference", 498 "difference",
@@ -655,9 +636,9 @@ dependencies = [
655 636
656[[package]] 637[[package]]
657name = "lsp-types" 638name = "lsp-types"
658version = "0.72.0" 639version = "0.73.0"
659source = "registry+https://github.com/rust-lang/crates.io-index" 640source = "registry+https://github.com/rust-lang/crates.io-index"
660checksum = "face91691e558746745dc9dc6c67a4e2a24e044926e274d8378e6f19659329f0" 641checksum = "93d0cf64ea141b43d9e055f6b9df13f0bce32b103d84237509ce0a571ab9b159"
661dependencies = [ 642dependencies = [
662 "base64", 643 "base64",
663 "bitflags", 644 "bitflags",
@@ -960,6 +941,7 @@ name = "ra_hir"
960version = "0.1.0" 941version = "0.1.0"
961dependencies = [ 942dependencies = [
962 "either", 943 "either",
944 "itertools",
963 "log", 945 "log",
964 "ra_db", 946 "ra_db",
965 "ra_hir_def", 947 "ra_hir_def",
@@ -1180,11 +1162,11 @@ dependencies = [
1180 1162
1181[[package]] 1163[[package]]
1182name = "rand_chacha" 1164name = "rand_chacha"
1183version = "0.2.1" 1165version = "0.2.2"
1184source = "registry+https://github.com/rust-lang/crates.io-index" 1166source = "registry+https://github.com/rust-lang/crates.io-index"
1185checksum = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" 1167checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
1186dependencies = [ 1168dependencies = [
1187 "c2-chacha", 1169 "ppv-lite86",
1188 "rand_core", 1170 "rand_core",
1189] 1171]
1190 1172
diff --git a/crates/ra_cargo_watch/Cargo.toml b/crates/ra_cargo_watch/Cargo.toml
index 731d50371..741345a21 100644
--- a/crates/ra_cargo_watch/Cargo.toml
+++ b/crates/ra_cargo_watch/Cargo.toml
@@ -6,10 +6,10 @@ authors = ["rust-analyzer developers"]
6 6
7[dependencies] 7[dependencies]
8crossbeam-channel = "0.4.0" 8crossbeam-channel = "0.4.0"
9lsp-types = { version = "0.72.0", features = ["proposed"] } 9lsp-types = { version = "0.73.0", features = ["proposed"] }
10log = "0.4.8" 10log = "0.4.8"
11cargo_metadata = "0.9.1" 11cargo_metadata = "0.9.1"
12serde_json = "1.0.48" 12serde_json = "1.0.48"
13 13
14[dev-dependencies] 14[dev-dependencies]
15insta = "0.13.1" 15insta = "0.15.0"
diff --git a/crates/ra_db/src/fixture.rs b/crates/ra_db/src/fixture.rs
index da7af110c..3dc86ca2d 100644
--- a/crates/ra_db/src/fixture.rs
+++ b/crates/ra_db/src/fixture.rs
@@ -5,7 +5,7 @@ use std::sync::Arc;
5 5
6use ra_cfg::CfgOptions; 6use ra_cfg::CfgOptions;
7use rustc_hash::FxHashMap; 7use rustc_hash::FxHashMap;
8use test_utils::{extract_offset, parse_fixture, CURSOR_MARKER}; 8use test_utils::{extract_offset, parse_fixture, parse_single_fixture, CURSOR_MARKER};
9 9
10use crate::{ 10use crate::{
11 input::CrateName, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, RelativePathBuf, 11 input::CrateName, CrateGraph, CrateId, Edition, Env, FileId, FilePosition, RelativePathBuf,
@@ -45,22 +45,45 @@ pub trait WithFixture: Default + SourceDatabaseExt + 'static {
45 45
46impl<DB: SourceDatabaseExt + Default + 'static> WithFixture for DB {} 46impl<DB: SourceDatabaseExt + Default + 'static> WithFixture for DB {}
47 47
48fn with_single_file(db: &mut dyn SourceDatabaseExt, text: &str) -> FileId { 48fn with_single_file(db: &mut dyn SourceDatabaseExt, ra_fixture: &str) -> FileId {
49 let file_id = FileId(0); 49 let file_id = FileId(0);
50 let rel_path: RelativePathBuf = "/main.rs".into(); 50 let rel_path: RelativePathBuf = "/main.rs".into();
51 51
52 let mut source_root = SourceRoot::new_local(); 52 let mut source_root = SourceRoot::new_local();
53 source_root.insert_file(rel_path.clone(), file_id); 53 source_root.insert_file(rel_path.clone(), file_id);
54 54
55 let mut crate_graph = CrateGraph::default(); 55 let fixture = parse_single_fixture(ra_fixture);
56 crate_graph.add_crate_root( 56
57 file_id, 57 let crate_graph = if let Some(entry) = fixture {
58 Edition::Edition2018, 58 let meta = match parse_meta(&entry.meta) {
59 CfgOptions::default(), 59 ParsedMeta::File(it) => it,
60 Env::default(), 60 _ => panic!("with_single_file only support file meta"),
61 ); 61 };
62 62
63 db.set_file_text(file_id, Arc::new(text.to_string())); 63 let mut crate_graph = CrateGraph::default();
64 crate_graph.add_crate_root(
65 file_id,
66 meta.edition,
67 meta.krate,
68 meta.cfg,
69 meta.env,
70 Default::default(),
71 );
72 crate_graph
73 } else {
74 let mut crate_graph = CrateGraph::default();
75 crate_graph.add_crate_root(
76 file_id,
77 Edition::Edition2018,
78 None,
79 CfgOptions::default(),
80 Env::default(),
81 Default::default(),
82 );
83 crate_graph
84 };
85
86 db.set_file_text(file_id, Arc::new(ra_fixture.to_string()));
64 db.set_file_relative_path(file_id, rel_path); 87 db.set_file_relative_path(file_id, rel_path);
65 db.set_file_source_root(file_id, WORKSPACE); 88 db.set_file_source_root(file_id, WORKSPACE);
66 db.set_source_root(WORKSPACE, Arc::new(source_root)); 89 db.set_source_root(WORKSPACE, Arc::new(source_root));
@@ -98,8 +121,14 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option<FilePosit
98 assert!(meta.path.starts_with(&source_root_prefix)); 121 assert!(meta.path.starts_with(&source_root_prefix));
99 122
100 if let Some(krate) = meta.krate { 123 if let Some(krate) = meta.krate {
101 let crate_id = 124 let crate_id = crate_graph.add_crate_root(
102 crate_graph.add_crate_root(file_id, meta.edition, meta.cfg, Env::default()); 125 file_id,
126 meta.edition,
127 Some(krate.clone()),
128 meta.cfg,
129 meta.env,
130 Default::default(),
131 );
103 let prev = crates.insert(krate.clone(), crate_id); 132 let prev = crates.insert(krate.clone(), crate_id);
104 assert!(prev.is_none()); 133 assert!(prev.is_none());
105 for dep in meta.deps { 134 for dep in meta.deps {
@@ -132,8 +161,10 @@ fn with_files(db: &mut dyn SourceDatabaseExt, fixture: &str) -> Option<FilePosit
132 crate_graph.add_crate_root( 161 crate_graph.add_crate_root(
133 crate_root, 162 crate_root,
134 Edition::Edition2018, 163 Edition::Edition2018,
164 None,
135 CfgOptions::default(), 165 CfgOptions::default(),
136 Env::default(), 166 Env::default(),
167 Default::default(),
137 ); 168 );
138 } else { 169 } else {
139 for (from, to) in crate_deps { 170 for (from, to) in crate_deps {
@@ -160,9 +191,10 @@ struct FileMeta {
160 deps: Vec<String>, 191 deps: Vec<String>,
161 cfg: CfgOptions, 192 cfg: CfgOptions,
162 edition: Edition, 193 edition: Edition,
194 env: Env,
163} 195}
164 196
165//- /lib.rs crate:foo deps:bar,baz 197//- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo)
166fn parse_meta(meta: &str) -> ParsedMeta { 198fn parse_meta(meta: &str) -> ParsedMeta {
167 let components = meta.split_ascii_whitespace().collect::<Vec<_>>(); 199 let components = meta.split_ascii_whitespace().collect::<Vec<_>>();
168 200
@@ -179,6 +211,7 @@ fn parse_meta(meta: &str) -> ParsedMeta {
179 let mut deps = Vec::new(); 211 let mut deps = Vec::new();
180 let mut edition = Edition::Edition2018; 212 let mut edition = Edition::Edition2018;
181 let mut cfg = CfgOptions::default(); 213 let mut cfg = CfgOptions::default();
214 let mut env = Env::default();
182 for component in components[1..].iter() { 215 for component in components[1..].iter() {
183 let (key, value) = split1(component, ':').unwrap(); 216 let (key, value) = split1(component, ':').unwrap();
184 match key { 217 match key {
@@ -193,11 +226,18 @@ fn parse_meta(meta: &str) -> ParsedMeta {
193 } 226 }
194 } 227 }
195 } 228 }
229 "env" => {
230 for key in value.split(',') {
231 if let Some((k, v)) = split1(key, '=') {
232 env.set(k.into(), v.into());
233 }
234 }
235 }
196 _ => panic!("bad component: {:?}", component), 236 _ => panic!("bad component: {:?}", component),
197 } 237 }
198 } 238 }
199 239
200 ParsedMeta::File(FileMeta { path, krate, deps, edition, cfg }) 240 ParsedMeta::File(FileMeta { path, krate, deps, edition, cfg, env })
201} 241}
202 242
203fn split1(haystack: &str, delim: char) -> Option<(&str, &str)> { 243fn split1(haystack: &str, delim: char) -> Option<(&str, &str)> {
diff --git a/crates/ra_db/src/input.rs b/crates/ra_db/src/input.rs
index eaff99fd3..06d40db96 100644
--- a/crates/ra_db/src/input.rs
+++ b/crates/ra_db/src/input.rs
@@ -6,7 +6,7 @@
6//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how 6//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
7//! actual IO is done and lowered to input. 7//! actual IO is done and lowered to input.
8 8
9use std::{fmt, str::FromStr}; 9use std::{fmt, ops, str::FromStr};
10 10
11use ra_cfg::CfgOptions; 11use ra_cfg::CfgOptions;
12use ra_syntax::SmolStr; 12use ra_syntax::SmolStr;
@@ -86,7 +86,7 @@ pub struct CrateId(pub u32);
86pub struct CrateName(SmolStr); 86pub struct CrateName(SmolStr);
87 87
88impl CrateName { 88impl CrateName {
89 /// Crates a crate name, checking for dashes in the string provided. 89 /// Creates a crate name, checking for dashes in the string provided.
90 /// Dashes are not allowed in the crate names, 90 /// Dashes are not allowed in the crate names,
91 /// hence the input string is returned as `Err` for those cases. 91 /// hence the input string is returned as `Err` for those cases.
92 pub fn new(name: &str) -> Result<CrateName, &str> { 92 pub fn new(name: &str) -> Result<CrateName, &str> {
@@ -97,19 +97,24 @@ impl CrateName {
97 } 97 }
98 } 98 }
99 99
100 /// Crates a crate name, unconditionally replacing the dashes with underscores. 100 /// Creates a crate name, unconditionally replacing the dashes with underscores.
101 pub fn normalize_dashes(name: &str) -> CrateName { 101 pub fn normalize_dashes(name: &str) -> CrateName {
102 Self(SmolStr::new(name.replace('-', "_"))) 102 Self(SmolStr::new(name.replace('-', "_")))
103 } 103 }
104} 104}
105 105
106#[derive(Debug, Clone, PartialEq, Eq)] 106#[derive(Debug, Clone, PartialEq, Eq)]
107struct CrateData { 107pub struct CrateData {
108 file_id: FileId, 108 pub root_file_id: FileId,
109 edition: Edition, 109 pub edition: Edition,
110 cfg_options: CfgOptions, 110 /// The name to display to the end user.
111 env: Env, 111 /// This actual crate name can be different in a particular dependent crate
112 dependencies: Vec<Dependency>, 112 /// or may even be missing for some cases, such as a dummy crate for the code snippet.
113 pub display_name: Option<String>,
114 pub cfg_options: CfgOptions,
115 pub env: Env,
116 pub extern_source: ExternSource,
117 pub dependencies: Vec<Dependency>,
113} 118}
114 119
115#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 120#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -118,11 +123,22 @@ pub enum Edition {
118 Edition2015, 123 Edition2015,
119} 124}
120 125
126#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
127pub struct ExternSourceId(pub u32);
128
121#[derive(Default, Debug, Clone, PartialEq, Eq)] 129#[derive(Default, Debug, Clone, PartialEq, Eq)]
122pub struct Env { 130pub struct Env {
123 entries: FxHashMap<String, String>, 131 entries: FxHashMap<String, String>,
124} 132}
125 133
134// FIXME: Redesign vfs for solve the following limitation ?
135// Note: Some env variables (e.g. OUT_DIR) are located outside of the
136// crate. We store a map to allow remap it to ExternSourceId
137#[derive(Default, Debug, Clone, PartialEq, Eq)]
138pub struct ExternSource {
139 extern_paths: FxHashMap<String, ExternSourceId>,
140}
141
126#[derive(Debug, Clone, PartialEq, Eq)] 142#[derive(Debug, Clone, PartialEq, Eq)]
127pub struct Dependency { 143pub struct Dependency {
128 pub crate_id: CrateId, 144 pub crate_id: CrateId,
@@ -134,20 +150,26 @@ impl CrateGraph {
134 &mut self, 150 &mut self,
135 file_id: FileId, 151 file_id: FileId,
136 edition: Edition, 152 edition: Edition,
153 display_name: Option<String>,
137 cfg_options: CfgOptions, 154 cfg_options: CfgOptions,
138 env: Env, 155 env: Env,
156 extern_source: ExternSource,
139 ) -> CrateId { 157 ) -> CrateId {
140 let data = CrateData::new(file_id, edition, cfg_options, env); 158 let data = CrateData {
159 root_file_id: file_id,
160 edition,
161 display_name,
162 cfg_options,
163 env,
164 extern_source,
165 dependencies: Vec::new(),
166 };
141 let crate_id = CrateId(self.arena.len() as u32); 167 let crate_id = CrateId(self.arena.len() as u32);
142 let prev = self.arena.insert(crate_id, data); 168 let prev = self.arena.insert(crate_id, data);
143 assert!(prev.is_none()); 169 assert!(prev.is_none());
144 crate_id 170 crate_id
145 } 171 }
146 172
147 pub fn cfg_options(&self, crate_id: CrateId) -> &CfgOptions {
148 &self.arena[&crate_id].cfg_options
149 }
150
151 pub fn add_dep( 173 pub fn add_dep(
152 &mut self, 174 &mut self,
153 from: CrateId, 175 from: CrateId,
@@ -169,24 +191,13 @@ impl CrateGraph {
169 self.arena.keys().copied() 191 self.arena.keys().copied()
170 } 192 }
171 193
172 pub fn crate_root(&self, crate_id: CrateId) -> FileId {
173 self.arena[&crate_id].file_id
174 }
175
176 pub fn edition(&self, crate_id: CrateId) -> Edition {
177 self.arena[&crate_id].edition
178 }
179
180 // FIXME: this only finds one crate with the given root; we could have multiple 194 // FIXME: this only finds one crate with the given root; we could have multiple
181 pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> { 195 pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
182 let (&crate_id, _) = self.arena.iter().find(|(_crate_id, data)| data.file_id == file_id)?; 196 let (&crate_id, _) =
197 self.arena.iter().find(|(_crate_id, data)| data.root_file_id == file_id)?;
183 Some(crate_id) 198 Some(crate_id)
184 } 199 }
185 200
186 pub fn dependencies(&self, crate_id: CrateId) -> impl Iterator<Item = &Dependency> {
187 self.arena[&crate_id].dependencies.iter()
188 }
189
190 /// Extends this crate graph by adding a complete disjoint second crate 201 /// Extends this crate graph by adding a complete disjoint second crate
191 /// graph. 202 /// graph.
192 /// 203 ///
@@ -209,8 +220,8 @@ impl CrateGraph {
209 return false; 220 return false;
210 } 221 }
211 222
212 for dep in self.dependencies(from) { 223 for dep in &self[from].dependencies {
213 let crate_id = dep.crate_id(); 224 let crate_id = dep.crate_id;
214 if crate_id == target { 225 if crate_id == target {
215 return true; 226 return true;
216 } 227 }
@@ -223,6 +234,13 @@ impl CrateGraph {
223 } 234 }
224} 235}
225 236
237impl ops::Index<CrateId> for CrateGraph {
238 type Output = CrateData;
239 fn index(&self, crate_id: CrateId) -> &CrateData {
240 &self.arena[&crate_id]
241 }
242}
243
226impl CrateId { 244impl CrateId {
227 pub fn shift(self, amount: u32) -> CrateId { 245 pub fn shift(self, amount: u32) -> CrateId {
228 CrateId(self.0 + amount) 246 CrateId(self.0 + amount)
@@ -230,10 +248,6 @@ impl CrateId {
230} 248}
231 249
232impl CrateData { 250impl CrateData {
233 fn new(file_id: FileId, edition: Edition, cfg_options: CfgOptions, env: Env) -> CrateData {
234 CrateData { file_id, edition, dependencies: Vec::new(), cfg_options, env }
235 }
236
237 fn add_dep(&mut self, name: SmolStr, crate_id: CrateId) { 251 fn add_dep(&mut self, name: SmolStr, crate_id: CrateId) {
238 self.dependencies.push(Dependency { name, crate_id }) 252 self.dependencies.push(Dependency { name, crate_id })
239 } 253 }
@@ -261,9 +275,34 @@ impl fmt::Display for Edition {
261 } 275 }
262} 276}
263 277
264impl Dependency { 278impl Env {
265 pub fn crate_id(&self) -> CrateId { 279 pub fn set(&mut self, env: &str, value: String) {
266 self.crate_id 280 self.entries.insert(env.to_owned(), value);
281 }
282
283 pub fn get(&self, env: &str) -> Option<String> {
284 self.entries.get(env).cloned()
285 }
286}
287
288impl ExternSource {
289 pub fn extern_path(&self, path: &str) -> Option<(ExternSourceId, RelativePathBuf)> {
290 self.extern_paths.iter().find_map(|(root_path, id)| {
291 if path.starts_with(root_path) {
292 let mut rel_path = &path[root_path.len()..];
293 if rel_path.starts_with("/") {
294 rel_path = &rel_path[1..];
295 }
296 let rel_path = RelativePathBuf::from_path(rel_path).ok()?;
297 Some((id.clone(), rel_path))
298 } else {
299 None
300 }
301 })
302 }
303
304 pub fn set_extern_path(&mut self, root_path: &str, root: ExternSourceId) {
305 self.extern_paths.insert(root_path.to_owned(), root);
267 } 306 }
268} 307}
269 308
@@ -290,12 +329,30 @@ mod tests {
290 #[test] 329 #[test]
291 fn it_should_panic_because_of_cycle_dependencies() { 330 fn it_should_panic_because_of_cycle_dependencies() {
292 let mut graph = CrateGraph::default(); 331 let mut graph = CrateGraph::default();
293 let crate1 = 332 let crate1 = graph.add_crate_root(
294 graph.add_crate_root(FileId(1u32), Edition2018, CfgOptions::default(), Env::default()); 333 FileId(1u32),
295 let crate2 = 334 Edition2018,
296 graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default()); 335 None,
297 let crate3 = 336 CfgOptions::default(),
298 graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default(), Env::default()); 337 Env::default(),
338 Default::default(),
339 );
340 let crate2 = graph.add_crate_root(
341 FileId(2u32),
342 Edition2018,
343 None,
344 CfgOptions::default(),
345 Env::default(),
346 Default::default(),
347 );
348 let crate3 = graph.add_crate_root(
349 FileId(3u32),
350 Edition2018,
351 None,
352 CfgOptions::default(),
353 Env::default(),
354 Default::default(),
355 );
299 assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); 356 assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok());
300 assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); 357 assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok());
301 assert!(graph.add_dep(crate3, CrateName::new("crate1").unwrap(), crate1).is_err()); 358 assert!(graph.add_dep(crate3, CrateName::new("crate1").unwrap(), crate1).is_err());
@@ -304,12 +361,30 @@ mod tests {
304 #[test] 361 #[test]
305 fn it_works() { 362 fn it_works() {
306 let mut graph = CrateGraph::default(); 363 let mut graph = CrateGraph::default();
307 let crate1 = 364 let crate1 = graph.add_crate_root(
308 graph.add_crate_root(FileId(1u32), Edition2018, CfgOptions::default(), Env::default()); 365 FileId(1u32),
309 let crate2 = 366 Edition2018,
310 graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default()); 367 None,
311 let crate3 = 368 CfgOptions::default(),
312 graph.add_crate_root(FileId(3u32), Edition2018, CfgOptions::default(), Env::default()); 369 Env::default(),
370 Default::default(),
371 );
372 let crate2 = graph.add_crate_root(
373 FileId(2u32),
374 Edition2018,
375 None,
376 CfgOptions::default(),
377 Env::default(),
378 Default::default(),
379 );
380 let crate3 = graph.add_crate_root(
381 FileId(3u32),
382 Edition2018,
383 None,
384 CfgOptions::default(),
385 Env::default(),
386 Default::default(),
387 );
313 assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok()); 388 assert!(graph.add_dep(crate1, CrateName::new("crate2").unwrap(), crate2).is_ok());
314 assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok()); 389 assert!(graph.add_dep(crate2, CrateName::new("crate3").unwrap(), crate3).is_ok());
315 } 390 }
@@ -317,16 +392,28 @@ mod tests {
317 #[test] 392 #[test]
318 fn dashes_are_normalized() { 393 fn dashes_are_normalized() {
319 let mut graph = CrateGraph::default(); 394 let mut graph = CrateGraph::default();
320 let crate1 = 395 let crate1 = graph.add_crate_root(
321 graph.add_crate_root(FileId(1u32), Edition2018, CfgOptions::default(), Env::default()); 396 FileId(1u32),
322 let crate2 = 397 Edition2018,
323 graph.add_crate_root(FileId(2u32), Edition2018, CfgOptions::default(), Env::default()); 398 None,
399 CfgOptions::default(),
400 Env::default(),
401 Default::default(),
402 );
403 let crate2 = graph.add_crate_root(
404 FileId(2u32),
405 Edition2018,
406 None,
407 CfgOptions::default(),
408 Env::default(),
409 Default::default(),
410 );
324 assert!(graph 411 assert!(graph
325 .add_dep(crate1, CrateName::normalize_dashes("crate-name-with-dashes"), crate2) 412 .add_dep(crate1, CrateName::normalize_dashes("crate-name-with-dashes"), crate2)
326 .is_ok()); 413 .is_ok());
327 assert_eq!( 414 assert_eq!(
328 graph.dependencies(crate1).collect::<Vec<_>>(), 415 graph[crate1].dependencies,
329 vec![&Dependency { crate_id: crate2, name: "crate_name_with_dashes".into() }] 416 vec![Dependency { crate_id: crate2, name: "crate_name_with_dashes".into() }]
330 ); 417 );
331 } 418 }
332} 419}
diff --git a/crates/ra_db/src/lib.rs b/crates/ra_db/src/lib.rs
index fb002d717..d500d5e85 100644
--- a/crates/ra_db/src/lib.rs
+++ b/crates/ra_db/src/lib.rs
@@ -11,7 +11,8 @@ use ra_syntax::{ast, Parse, SourceFile, TextRange, TextUnit};
11pub use crate::{ 11pub use crate::{
12 cancellation::Canceled, 12 cancellation::Canceled,
13 input::{ 13 input::{
14 CrateGraph, CrateId, CrateName, Dependency, Edition, Env, FileId, SourceRoot, SourceRootId, 14 CrateGraph, CrateId, CrateName, Dependency, Edition, Env, ExternSource, ExternSourceId,
15 FileId, SourceRoot, SourceRootId,
15 }, 16 },
16}; 17};
17pub use relative_path::{RelativePath, RelativePathBuf}; 18pub use relative_path::{RelativePath, RelativePathBuf};
@@ -87,6 +88,12 @@ pub trait FileLoader {
87 fn resolve_relative_path(&self, anchor: FileId, relative_path: &RelativePath) 88 fn resolve_relative_path(&self, anchor: FileId, relative_path: &RelativePath)
88 -> Option<FileId>; 89 -> Option<FileId>;
89 fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>>; 90 fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>>;
91
92 fn resolve_extern_path(
93 &self,
94 extern_id: ExternSourceId,
95 relative_path: &RelativePath,
96 ) -> Option<FileId>;
90} 97}
91 98
92/// Database which stores all significant input facts: source code and project 99/// Database which stores all significant input facts: source code and project
@@ -164,4 +171,13 @@ impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
164 let source_root = self.0.file_source_root(file_id); 171 let source_root = self.0.file_source_root(file_id);
165 self.0.source_root_crates(source_root) 172 self.0.source_root_crates(source_root)
166 } 173 }
174
175 fn resolve_extern_path(
176 &self,
177 extern_id: ExternSourceId,
178 relative_path: &RelativePath,
179 ) -> Option<FileId> {
180 let source_root = self.0.source_root(SourceRootId(extern_id.0));
181 source_root.file_by_relative_path(&relative_path)
182 }
167} 183}
diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml
index 0555a0de7..266c4cff3 100644
--- a/crates/ra_hir/Cargo.toml
+++ b/crates/ra_hir/Cargo.toml
@@ -12,6 +12,8 @@ log = "0.4.8"
12rustc-hash = "1.1.0" 12rustc-hash = "1.1.0"
13either = "1.5.3" 13either = "1.5.3"
14 14
15itertools = "0.8.2"
16
15ra_syntax = { path = "../ra_syntax" } 17ra_syntax = { path = "../ra_syntax" }
16ra_db = { path = "../ra_db" } 18ra_db = { path = "../ra_db" }
17ra_prof = { path = "../ra_prof" } 19ra_prof = { path = "../ra_prof" }
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs
index 2944926e6..41d4e2ed3 100644
--- a/crates/ra_hir/src/code_model.rs
+++ b/crates/ra_hir/src/code_model.rs
@@ -54,10 +54,11 @@ pub struct CrateDependency {
54 54
55impl Crate { 55impl Crate {
56 pub fn dependencies(self, db: &impl DefDatabase) -> Vec<CrateDependency> { 56 pub fn dependencies(self, db: &impl DefDatabase) -> Vec<CrateDependency> {
57 db.crate_graph() 57 db.crate_graph()[self.id]
58 .dependencies(self.id) 58 .dependencies
59 .iter()
59 .map(|dep| { 60 .map(|dep| {
60 let krate = Crate { id: dep.crate_id() }; 61 let krate = Crate { id: dep.crate_id };
61 let name = dep.as_name(); 62 let name = dep.as_name();
62 CrateDependency { krate, name } 63 CrateDependency { krate, name }
63 }) 64 })
@@ -69,7 +70,9 @@ impl Crate {
69 let crate_graph = db.crate_graph(); 70 let crate_graph = db.crate_graph();
70 crate_graph 71 crate_graph
71 .iter() 72 .iter()
72 .filter(|&krate| crate_graph.dependencies(krate).any(|it| it.crate_id == self.id)) 73 .filter(|&krate| {
74 crate_graph[krate].dependencies.iter().any(|it| it.crate_id == self.id)
75 })
73 .map(|id| Crate { id }) 76 .map(|id| Crate { id })
74 .collect() 77 .collect()
75 } 78 }
@@ -80,12 +83,11 @@ impl Crate {
80 } 83 }
81 84
82 pub fn root_file(self, db: &impl DefDatabase) -> FileId { 85 pub fn root_file(self, db: &impl DefDatabase) -> FileId {
83 db.crate_graph().crate_root(self.id) 86 db.crate_graph()[self.id].root_file_id
84 } 87 }
85 88
86 pub fn edition(self, db: &impl DefDatabase) -> Edition { 89 pub fn edition(self, db: &impl DefDatabase) -> Edition {
87 let crate_graph = db.crate_graph(); 90 db.crate_graph()[self.id].edition
88 crate_graph.edition(self.id)
89 } 91 }
90 92
91 pub fn all(db: &impl DefDatabase) -> Vec<Crate> { 93 pub fn all(db: &impl DefDatabase) -> Vec<Crate> {
@@ -204,10 +206,26 @@ impl Module {
204 } 206 }
205 207
206 /// Returns a `ModuleScope`: a set of items, visible in this module. 208 /// Returns a `ModuleScope`: a set of items, visible in this module.
207 pub fn scope(self, db: &impl HirDatabase) -> Vec<(Name, ScopeDef)> { 209 pub fn scope(
210 self,
211 db: &impl HirDatabase,
212 visible_from: Option<Module>,
213 ) -> Vec<(Name, ScopeDef)> {
208 db.crate_def_map(self.id.krate)[self.id.local_id] 214 db.crate_def_map(self.id.krate)[self.id.local_id]
209 .scope 215 .scope
210 .entries() 216 .entries()
217 .filter_map(|(name, def)| {
218 if let Some(m) = visible_from {
219 let filtered = def.filter_visibility(|vis| vis.is_visible_from(db, m.id));
220 if filtered.is_none() && !def.is_none() {
221 None
222 } else {
223 Some((name, filtered))
224 }
225 } else {
226 Some((name, def))
227 }
228 })
211 .map(|(name, def)| (name.clone(), def.into())) 229 .map(|(name, def)| (name.clone(), def.into()))
212 .collect() 230 .collect()
213 } 231 }
@@ -480,6 +498,14 @@ impl Adt {
480 pub fn krate(self, db: &impl HirDatabase) -> Option<Crate> { 498 pub fn krate(self, db: &impl HirDatabase) -> Option<Crate> {
481 Some(self.module(db).krate()) 499 Some(self.module(db).krate())
482 } 500 }
501
502 pub fn name(&self, db: &impl HirDatabase) -> Name {
503 match self {
504 Adt::Struct(s) => s.name(db),
505 Adt::Union(u) => u.name(db),
506 Adt::Enum(e) => e.name(db),
507 }
508 }
483} 509}
484 510
485#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] 511#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
@@ -507,6 +533,14 @@ impl VariantDef {
507 } 533 }
508 } 534 }
509 535
536 pub fn name(&self, db: &impl HirDatabase) -> Name {
537 match self {
538 VariantDef::Struct(s) => s.name(db),
539 VariantDef::Union(u) => u.name(db),
540 VariantDef::EnumVariant(e) => e.name(db),
541 }
542 }
543
510 pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> { 544 pub(crate) fn variant_data(self, db: &impl DefDatabase) -> Arc<VariantData> {
511 match self { 545 match self {
512 VariantDef::Struct(it) => it.variant_data(db), 546 VariantDef::Struct(it) => it.variant_data(db),
@@ -534,6 +568,14 @@ impl DefWithBody {
534 DefWithBody::Static(s) => s.module(db), 568 DefWithBody::Static(s) => s.module(db),
535 } 569 }
536 } 570 }
571
572 pub fn name(self, db: &impl HirDatabase) -> Option<Name> {
573 match self {
574 DefWithBody::Function(f) => Some(f.name(db)),
575 DefWithBody::Static(s) => s.name(db),
576 DefWithBody::Const(c) => c.name(db),
577 }
578 }
537} 579}
538 580
539#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 581#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@@ -571,6 +613,14 @@ impl Function {
571 } 613 }
572} 614}
573 615
616impl HasVisibility for Function {
617 fn visibility(&self, db: &impl HirDatabase) -> Visibility {
618 let function_data = db.function_data(self.id);
619 let visibility = &function_data.visibility;
620 visibility.resolve(db, &self.id.resolver(db))
621 }
622}
623
574#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 624#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
575pub struct Const { 625pub struct Const {
576 pub(crate) id: ConstId, 626 pub(crate) id: ConstId,
@@ -590,6 +640,14 @@ impl Const {
590 } 640 }
591} 641}
592 642
643impl HasVisibility for Const {
644 fn visibility(&self, db: &impl HirDatabase) -> Visibility {
645 let function_data = db.const_data(self.id);
646 let visibility = &function_data.visibility;
647 visibility.resolve(db, &self.id.resolver(db))
648 }
649}
650
593#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 651#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
594pub struct Static { 652pub struct Static {
595 pub(crate) id: StaticId, 653 pub(crate) id: StaticId,
@@ -664,6 +722,14 @@ impl TypeAlias {
664 } 722 }
665} 723}
666 724
725impl HasVisibility for TypeAlias {
726 fn visibility(&self, db: &impl HirDatabase) -> Visibility {
727 let function_data = db.type_alias_data(self.id);
728 let visibility = &function_data.visibility;
729 visibility.resolve(db, &self.id.resolver(db))
730 }
731}
732
667#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 733#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
668pub struct MacroDef { 734pub struct MacroDef {
669 pub(crate) id: MacroDefId, 735 pub(crate) id: MacroDefId,
@@ -751,6 +817,16 @@ impl AssocItem {
751 } 817 }
752} 818}
753 819
820impl HasVisibility for AssocItem {
821 fn visibility(&self, db: &impl HirDatabase) -> Visibility {
822 match self {
823 AssocItem::Function(f) => f.visibility(db),
824 AssocItem::Const(c) => c.visibility(db),
825 AssocItem::TypeAlias(t) => t.visibility(db),
826 }
827 }
828}
829
754#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] 830#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
755pub enum GenericDef { 831pub enum GenericDef {
756 Function(Function), 832 Function(Function),
diff --git a/crates/ra_hir/src/db.rs b/crates/ra_hir/src/db.rs
index a77bf6de6..fcba95091 100644
--- a/crates/ra_hir/src/db.rs
+++ b/crates/ra_hir/src/db.rs
@@ -1,7 +1,7 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3pub use hir_def::db::{ 3pub use hir_def::db::{
4 AttrsQuery, BodyQuery, BodyWithSourceMapQuery, ComputeCrateDefMapQuery, ConstDataQuery, 4 AttrsQuery, BodyQuery, BodyWithSourceMapQuery, ConstDataQuery, CrateDefMapQueryQuery,
5 CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery, 5 CrateLangItemsQuery, DefDatabase, DefDatabaseStorage, DocumentationQuery, EnumDataQuery,
6 ExprScopesQuery, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternConstQuery, 6 ExprScopesQuery, FunctionDataQuery, GenericParamsQuery, ImplDataQuery, InternConstQuery,
7 InternDatabase, InternDatabaseStorage, InternEnumQuery, InternFunctionQuery, InternImplQuery, 7 InternDatabase, InternDatabaseStorage, InternEnumQuery, InternFunctionQuery, InternImplQuery,
@@ -14,12 +14,11 @@ pub use hir_expand::db::{
14 MacroExpandQuery, ParseMacroQuery, 14 MacroExpandQuery, ParseMacroQuery,
15}; 15};
16pub use hir_ty::db::{ 16pub use hir_ty::db::{
17 AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, DoInferQuery, 17 AssociatedTyDataQuery, AssociatedTyValueQuery, CallableItemSignatureQuery, FieldTypesQuery,
18 FieldTypesQuery, GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, 18 GenericDefaultsQuery, GenericPredicatesForParamQuery, GenericPredicatesQuery, HirDatabase,
19 HirDatabase, HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, 19 HirDatabaseStorage, ImplDatumQuery, ImplSelfTyQuery, ImplTraitQuery, ImplsForTraitQuery,
20 ImplsForTraitQuery, ImplsInCrateQuery, InternAssocTyValueQuery, InternChalkImplQuery, 20 ImplsInCrateQuery, InferQueryQuery, InternAssocTyValueQuery, InternChalkImplQuery,
21 InternTypeCtorQuery, StructDatumQuery, TraitDatumQuery, TraitSolveQuery, TraitSolverQuery, 21 InternTypeCtorQuery, StructDatumQuery, TraitDatumQuery, TraitSolveQuery, TyQuery, ValueTyQuery,
22 TyQuery, ValueTyQuery,
23}; 22};
24 23
25#[test] 24#[test]
diff --git a/crates/ra_hir/src/semantics.rs b/crates/ra_hir/src/semantics.rs
index 965d185a4..3782a9984 100644
--- a/crates/ra_hir/src/semantics.rs
+++ b/crates/ra_hir/src/semantics.rs
@@ -6,7 +6,7 @@ use std::{cell::RefCell, fmt, iter::successors};
6 6
7use hir_def::{ 7use hir_def::{
8 resolver::{self, HasResolver, Resolver}, 8 resolver::{self, HasResolver, Resolver},
9 TraitId, 9 AsMacroCall, TraitId,
10}; 10};
11use hir_expand::ExpansionInfo; 11use hir_expand::ExpansionInfo;
12use ra_db::{FileId, FileRange}; 12use ra_db::{FileId, FileRange};
@@ -70,6 +70,20 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
70 Some(node) 70 Some(node)
71 } 71 }
72 72
73 pub fn expand_hypothetical(
74 &self,
75 actual_macro_call: &ast::MacroCall,
76 hypothetical_args: &ast::TokenTree,
77 token_to_map: SyntaxToken,
78 ) -> Option<(SyntaxNode, SyntaxToken)> {
79 let macro_call =
80 self.find_file(actual_macro_call.syntax().clone()).with_value(actual_macro_call);
81 let sa = self.analyze2(macro_call.map(|it| it.syntax()), None);
82 let macro_call_id = macro_call
83 .as_call_id(self.db, |path| sa.resolver.resolve_path_as_macro(self.db, &path))?;
84 hir_expand::db::expand_hypothetical(self.db, macro_call_id, hypothetical_args, token_to_map)
85 }
86
73 pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken { 87 pub fn descend_into_macros(&self, token: SyntaxToken) -> SyntaxToken {
74 let parent = token.parent(); 88 let parent = token.parent();
75 let parent = self.find_file(parent); 89 let parent = self.find_file(parent);
@@ -104,6 +118,25 @@ impl<'db, DB: HirDatabase> Semantics<'db, DB> {
104 node.ancestors_with_macros(self.db).map(|it| it.value) 118 node.ancestors_with_macros(self.db).map(|it| it.value)
105 } 119 }
106 120
121 pub fn ancestors_at_offset_with_macros(
122 &self,
123 node: &SyntaxNode,
124 offset: TextUnit,
125 ) -> impl Iterator<Item = SyntaxNode> + '_ {
126 use itertools::Itertools;
127 node.token_at_offset(offset)
128 .map(|token| self.ancestors_with_macros(token.parent()))
129 .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
130 }
131
132 pub fn find_node_at_offset_with_macros<N: AstNode>(
133 &self,
134 node: &SyntaxNode,
135 offset: TextUnit,
136 ) -> Option<N> {
137 self.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
138 }
139
107 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> { 140 pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<Type> {
108 self.analyze(expr.syntax()).type_of(self.db, &expr) 141 self.analyze(expr.syntax()).type_of(self.db, &expr)
109 } 142 }
diff --git a/crates/ra_hir_def/Cargo.toml b/crates/ra_hir_def/Cargo.toml
index fa25cc4fb..30a12337e 100644
--- a/crates/ra_hir_def/Cargo.toml
+++ b/crates/ra_hir_def/Cargo.toml
@@ -26,4 +26,4 @@ ra_cfg = { path = "../ra_cfg" }
26tt = { path = "../ra_tt", package = "ra_tt" } 26tt = { path = "../ra_tt", package = "ra_tt" }
27 27
28[dev-dependencies] 28[dev-dependencies]
29insta = "0.13.1" 29insta = "0.15.0"
diff --git a/crates/ra_hir_def/src/data.rs b/crates/ra_hir_def/src/data.rs
index 9fc43f3fb..a72eb5369 100644
--- a/crates/ra_hir_def/src/data.rs
+++ b/crates/ra_hir_def/src/data.rs
@@ -7,13 +7,16 @@ use hir_expand::{
7 AstId, InFile, 7 AstId, InFile,
8}; 8};
9use ra_prof::profile; 9use ra_prof::profile;
10use ra_syntax::ast::{self, AstNode, ImplItem, ModuleItemOwner, NameOwner, TypeAscriptionOwner}; 10use ra_syntax::ast::{
11 self, AstNode, ImplItem, ModuleItemOwner, NameOwner, TypeAscriptionOwner, VisibilityOwner,
12};
11 13
12use crate::{ 14use crate::{
13 db::DefDatabase, 15 db::DefDatabase,
14 path::{path, GenericArgs, Path}, 16 path::{path, GenericArgs, Path},
15 src::HasSource, 17 src::HasSource,
16 type_ref::{Mutability, TypeBound, TypeRef}, 18 type_ref::{Mutability, TypeBound, TypeRef},
19 visibility::RawVisibility,
17 AssocContainerId, AssocItemId, ConstId, ConstLoc, Expander, FunctionId, FunctionLoc, HasModule, 20 AssocContainerId, AssocItemId, ConstId, ConstLoc, Expander, FunctionId, FunctionLoc, HasModule,
18 ImplId, Intern, Lookup, ModuleId, StaticId, TraitId, TypeAliasId, TypeAliasLoc, 21 ImplId, Intern, Lookup, ModuleId, StaticId, TraitId, TypeAliasId, TypeAliasLoc,
19}; 22};
@@ -26,6 +29,7 @@ pub struct FunctionData {
26 /// True if the first param is `self`. This is relevant to decide whether this 29 /// True if the first param is `self`. This is relevant to decide whether this
27 /// can be called as a method. 30 /// can be called as a method.
28 pub has_self_param: bool, 31 pub has_self_param: bool,
32 pub visibility: RawVisibility,
29} 33}
30 34
31impl FunctionData { 35impl FunctionData {
@@ -72,7 +76,9 @@ impl FunctionData {
72 ret_type 76 ret_type
73 }; 77 };
74 78
75 let sig = FunctionData { name, params, ret_type, has_self_param }; 79 let visibility = RawVisibility::from_ast(db, src.map(|s| s.visibility()));
80
81 let sig = FunctionData { name, params, ret_type, has_self_param, visibility };
76 Arc::new(sig) 82 Arc::new(sig)
77 } 83 }
78} 84}
@@ -91,6 +97,7 @@ fn desugar_future_path(orig: TypeRef) -> Path {
91pub struct TypeAliasData { 97pub struct TypeAliasData {
92 pub name: Name, 98 pub name: Name,
93 pub type_ref: Option<TypeRef>, 99 pub type_ref: Option<TypeRef>,
100 pub visibility: RawVisibility,
94} 101}
95 102
96impl TypeAliasData { 103impl TypeAliasData {
@@ -98,10 +105,11 @@ impl TypeAliasData {
98 db: &impl DefDatabase, 105 db: &impl DefDatabase,
99 typ: TypeAliasId, 106 typ: TypeAliasId,
100 ) -> Arc<TypeAliasData> { 107 ) -> Arc<TypeAliasData> {
101 let node = typ.lookup(db).source(db).value; 108 let node = typ.lookup(db).source(db);
102 let name = node.name().map_or_else(Name::missing, |n| n.as_name()); 109 let name = node.value.name().map_or_else(Name::missing, |n| n.as_name());
103 let type_ref = node.type_ref().map(TypeRef::from_ast); 110 let type_ref = node.value.type_ref().map(TypeRef::from_ast);
104 Arc::new(TypeAliasData { name, type_ref }) 111 let visibility = RawVisibility::from_ast(db, node.map(|n| n.visibility()));
112 Arc::new(TypeAliasData { name, type_ref, visibility })
105 } 113 }
106} 114}
107 115
@@ -217,23 +225,28 @@ pub struct ConstData {
217 /// const _: () = (); 225 /// const _: () = ();
218 pub name: Option<Name>, 226 pub name: Option<Name>,
219 pub type_ref: TypeRef, 227 pub type_ref: TypeRef,
228 pub visibility: RawVisibility,
220} 229}
221 230
222impl ConstData { 231impl ConstData {
223 pub(crate) fn const_data_query(db: &impl DefDatabase, konst: ConstId) -> Arc<ConstData> { 232 pub(crate) fn const_data_query(db: &impl DefDatabase, konst: ConstId) -> Arc<ConstData> {
224 let node = konst.lookup(db).source(db).value; 233 let node = konst.lookup(db).source(db);
225 Arc::new(ConstData::new(&node)) 234 Arc::new(ConstData::new(db, node))
226 } 235 }
227 236
228 pub(crate) fn static_data_query(db: &impl DefDatabase, konst: StaticId) -> Arc<ConstData> { 237 pub(crate) fn static_data_query(db: &impl DefDatabase, konst: StaticId) -> Arc<ConstData> {
229 let node = konst.lookup(db).source(db).value; 238 let node = konst.lookup(db).source(db);
230 Arc::new(ConstData::new(&node)) 239 Arc::new(ConstData::new(db, node))
231 } 240 }
232 241
233 fn new<N: NameOwner + TypeAscriptionOwner>(node: &N) -> ConstData { 242 fn new<N: NameOwner + TypeAscriptionOwner + VisibilityOwner>(
234 let name = node.name().map(|n| n.as_name()); 243 db: &impl DefDatabase,
235 let type_ref = TypeRef::from_ast_opt(node.ascribed_type()); 244 node: InFile<N>,
236 ConstData { name, type_ref } 245 ) -> ConstData {
246 let name = node.value.name().map(|n| n.as_name());
247 let type_ref = TypeRef::from_ast_opt(node.value.ascribed_type());
248 let visibility = RawVisibility::from_ast(db, node.map(|n| n.visibility()));
249 ConstData { name, type_ref, visibility }
237 } 250 }
238} 251}
239 252
diff --git a/crates/ra_hir_def/src/db.rs b/crates/ra_hir_def/src/db.rs
index 2338b2cbe..dcd377aae 100644
--- a/crates/ra_hir_def/src/db.rs
+++ b/crates/ra_hir_def/src/db.rs
@@ -47,11 +47,11 @@ pub trait DefDatabase: InternDatabase + AstDatabase {
47 #[salsa::invoke(RawItems::raw_items_query)] 47 #[salsa::invoke(RawItems::raw_items_query)]
48 fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>; 48 fn raw_items(&self, file_id: HirFileId) -> Arc<RawItems>;
49 49
50 #[salsa::transparent] 50 #[salsa::invoke(crate_def_map_wait)]
51 fn crate_def_map(&self, krate: CrateId) -> Arc<CrateDefMap>; 51 fn crate_def_map(&self, krate: CrateId) -> Arc<CrateDefMap>;
52 52
53 #[salsa::invoke(CrateDefMap::compute_crate_def_map_query)] 53 #[salsa::invoke(CrateDefMap::crate_def_map_query)]
54 fn compute_crate_def_map(&self, krate: CrateId) -> Arc<CrateDefMap>; 54 fn crate_def_map_query(&self, krate: CrateId) -> Arc<CrateDefMap>;
55 55
56 #[salsa::invoke(StructData::struct_data_query)] 56 #[salsa::invoke(StructData::struct_data_query)]
57 fn struct_data(&self, id: StructId) -> Arc<StructData>; 57 fn struct_data(&self, id: StructId) -> Arc<StructData>;
@@ -109,7 +109,7 @@ pub trait DefDatabase: InternDatabase + AstDatabase {
109 fn documentation(&self, def: AttrDefId) -> Option<Documentation>; 109 fn documentation(&self, def: AttrDefId) -> Option<Documentation>;
110} 110}
111 111
112fn crate_def_map(db: &impl DefDatabase, krate: CrateId) -> Arc<CrateDefMap> { 112fn crate_def_map_wait(db: &impl DefDatabase, krate: CrateId) -> Arc<CrateDefMap> {
113 let _p = profile("wait_crate_def_map"); 113 let _p = profile("crate_def_map:wait");
114 db.compute_crate_def_map(krate) 114 db.crate_def_map_query(krate)
115} 115}
diff --git a/crates/ra_hir_def/src/find_path.rs b/crates/ra_hir_def/src/find_path.rs
index 43b9b124a..07ca74ec3 100644
--- a/crates/ra_hir_def/src/find_path.rs
+++ b/crates/ra_hir_def/src/find_path.rs
@@ -176,7 +176,7 @@ fn find_importable_locations(
176 // directly (only through reexports in direct dependencies). 176 // directly (only through reexports in direct dependencies).
177 for krate in Some(from.krate) 177 for krate in Some(from.krate)
178 .into_iter() 178 .into_iter()
179 .chain(crate_graph.dependencies(from.krate).map(|dep| dep.crate_id)) 179 .chain(crate_graph[from.krate].dependencies.iter().map(|dep| dep.crate_id))
180 { 180 {
181 result.extend( 181 result.extend(
182 importable_locations_in_crate(db, item, krate) 182 importable_locations_in_crate(db, item, krate)
diff --git a/crates/ra_hir_def/src/lang_item.rs b/crates/ra_hir_def/src/lang_item.rs
index 5a336ea1f..6de49730e 100644
--- a/crates/ra_hir_def/src/lang_item.rs
+++ b/crates/ra_hir_def/src/lang_item.rs
@@ -116,8 +116,9 @@ impl LangItems {
116 if let Some(target) = start_crate_target { 116 if let Some(target) = start_crate_target {
117 return Some(*target); 117 return Some(*target);
118 } 118 }
119 db.crate_graph() 119 db.crate_graph()[start_crate]
120 .dependencies(start_crate) 120 .dependencies
121 .iter()
121 .find_map(|dep| db.lang_item(dep.crate_id, item.clone())) 122 .find_map(|dep| db.lang_item(dep.crate_id, item.clone()))
122 } 123 }
123 124
diff --git a/crates/ra_hir_def/src/nameres.rs b/crates/ra_hir_def/src/nameres.rs
index 166d1c421..81eac52ad 100644
--- a/crates/ra_hir_def/src/nameres.rs
+++ b/crates/ra_hir_def/src/nameres.rs
@@ -176,16 +176,11 @@ pub struct ModuleData {
176} 176}
177 177
178impl CrateDefMap { 178impl CrateDefMap {
179 pub(crate) fn compute_crate_def_map_query( 179 pub(crate) fn crate_def_map_query(db: &impl DefDatabase, krate: CrateId) -> Arc<CrateDefMap> {
180 // Note that this doesn't have `+ AstDatabase`! 180 let _p = profile("crate_def_map_query")
181 // This gurantess that `CrateDefMap` is stable across reparses. 181 .detail(|| db.crate_graph()[krate].display_name.clone().unwrap_or_default());
182 db: &impl DefDatabase,
183 krate: CrateId,
184 ) -> Arc<CrateDefMap> {
185 let _p = profile("crate_def_map");
186 let def_map = { 182 let def_map = {
187 let crate_graph = db.crate_graph(); 183 let edition = db.crate_graph()[krate].edition;
188 let edition = crate_graph.edition(krate);
189 let mut modules: Arena<LocalModuleId, ModuleData> = Arena::default(); 184 let mut modules: Arena<LocalModuleId, ModuleData> = Arena::default();
190 let root = modules.alloc(ModuleData::default()); 185 let root = modules.alloc(ModuleData::default());
191 CrateDefMap { 186 CrateDefMap {
diff --git a/crates/ra_hir_def/src/nameres/collector.rs b/crates/ra_hir_def/src/nameres/collector.rs
index 51c65a5d7..d0459d9b0 100644
--- a/crates/ra_hir_def/src/nameres/collector.rs
+++ b/crates/ra_hir_def/src/nameres/collector.rs
@@ -34,7 +34,7 @@ pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> C
34 let crate_graph = db.crate_graph(); 34 let crate_graph = db.crate_graph();
35 35
36 // populate external prelude 36 // populate external prelude
37 for dep in crate_graph.dependencies(def_map.krate) { 37 for dep in &crate_graph[def_map.krate].dependencies {
38 let dep_def_map = db.crate_def_map(dep.crate_id); 38 let dep_def_map = db.crate_def_map(dep.crate_id);
39 log::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id); 39 log::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id);
40 def_map.extern_prelude.insert( 40 def_map.extern_prelude.insert(
@@ -51,7 +51,7 @@ pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> C
51 } 51 }
52 } 52 }
53 53
54 let cfg_options = crate_graph.cfg_options(def_map.krate); 54 let cfg_options = &crate_graph[def_map.krate].cfg_options;
55 55
56 let mut collector = DefCollector { 56 let mut collector = DefCollector {
57 db, 57 db,
@@ -128,8 +128,7 @@ where
128 DB: DefDatabase, 128 DB: DefDatabase,
129{ 129{
130 fn collect(&mut self) { 130 fn collect(&mut self) {
131 let crate_graph = self.db.crate_graph(); 131 let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id;
132 let file_id = crate_graph.crate_root(self.def_map.krate);
133 let raw_items = self.db.raw_items(file_id.into()); 132 let raw_items = self.db.raw_items(file_id.into());
134 let module_id = self.def_map.root; 133 let module_id = self.def_map.root;
135 self.def_map.modules[module_id].origin = ModuleOrigin::CrateRoot { definition: file_id }; 134 self.def_map.modules[module_id].origin = ModuleOrigin::CrateRoot { definition: file_id };
@@ -955,7 +954,7 @@ mod tests {
955 let krate = db.test_crate(); 954 let krate = db.test_crate();
956 955
957 let def_map = { 956 let def_map = {
958 let edition = db.crate_graph().edition(krate); 957 let edition = db.crate_graph()[krate].edition;
959 let mut modules: Arena<LocalModuleId, ModuleData> = Arena::default(); 958 let mut modules: Arena<LocalModuleId, ModuleData> = Arena::default();
960 let root = modules.alloc(ModuleData::default()); 959 let root = modules.alloc(ModuleData::default());
961 CrateDefMap { 960 CrateDefMap {
diff --git a/crates/ra_hir_def/src/test_db.rs b/crates/ra_hir_def/src/test_db.rs
index 1568820e9..0756916a8 100644
--- a/crates/ra_hir_def/src/test_db.rs
+++ b/crates/ra_hir_def/src/test_db.rs
@@ -6,7 +6,7 @@ use std::{
6}; 6};
7 7
8use crate::db::DefDatabase; 8use crate::db::DefDatabase;
9use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath}; 9use ra_db::{salsa, CrateId, ExternSourceId, FileId, FileLoader, FileLoaderDelegate, RelativePath};
10 10
11#[salsa::database( 11#[salsa::database(
12 ra_db::SourceDatabaseExtStorage, 12 ra_db::SourceDatabaseExtStorage,
@@ -52,6 +52,14 @@ impl FileLoader for TestDB {
52 fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> { 52 fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> {
53 FileLoaderDelegate(self).relevant_crates(file_id) 53 FileLoaderDelegate(self).relevant_crates(file_id)
54 } 54 }
55
56 fn resolve_extern_path(
57 &self,
58 extern_id: ExternSourceId,
59 relative_path: &RelativePath,
60 ) -> Option<FileId> {
61 FileLoaderDelegate(self).resolve_extern_path(extern_id, relative_path)
62 }
55} 63}
56 64
57impl TestDB { 65impl TestDB {
diff --git a/crates/ra_hir_expand/src/builtin_macro.rs b/crates/ra_hir_expand/src/builtin_macro.rs
index 9fc33e4b1..a90007f26 100644
--- a/crates/ra_hir_expand/src/builtin_macro.rs
+++ b/crates/ra_hir_expand/src/builtin_macro.rs
@@ -7,6 +7,7 @@ use crate::{
7 7
8use crate::{quote, EagerMacroId, LazyMacroId, MacroCallId}; 8use crate::{quote, EagerMacroId, LazyMacroId, MacroCallId};
9use either::Either; 9use either::Either;
10use mbe::parse_to_token_tree;
10use ra_db::{FileId, RelativePath}; 11use ra_db::{FileId, RelativePath};
11use ra_parser::FragmentKind; 12use ra_parser::FragmentKind;
12 13
@@ -89,15 +90,15 @@ register_builtin! {
89 (line, Line) => line_expand, 90 (line, Line) => line_expand,
90 (stringify, Stringify) => stringify_expand, 91 (stringify, Stringify) => stringify_expand,
91 (format_args, FormatArgs) => format_args_expand, 92 (format_args, FormatArgs) => format_args_expand,
92 (env, Env) => env_expand,
93 (option_env, OptionEnv) => option_env_expand,
94 // format_args_nl only differs in that it adds a newline in the end, 93 // format_args_nl only differs in that it adds a newline in the end,
95 // so we use the same stub expansion for now 94 // so we use the same stub expansion for now
96 (format_args_nl, FormatArgsNl) => format_args_expand, 95 (format_args_nl, FormatArgsNl) => format_args_expand,
97 96
98 EAGER: 97 EAGER:
99 (concat, Concat) => concat_expand, 98 (concat, Concat) => concat_expand,
100 (include, Include) => include_expand 99 (include, Include) => include_expand,
100 (env, Env) => env_expand,
101 (option_env, OptionEnv) => option_env_expand
101} 102}
102 103
103fn line_expand( 104fn line_expand(
@@ -136,28 +137,6 @@ fn stringify_expand(
136 Ok(expanded) 137 Ok(expanded)
137} 138}
138 139
139fn env_expand(
140 _db: &dyn AstDatabase,
141 _id: LazyMacroId,
142 _tt: &tt::Subtree,
143) -> Result<tt::Subtree, mbe::ExpandError> {
144 // dummy implementation for type-checking purposes
145 let expanded = quote! { "" };
146
147 Ok(expanded)
148}
149
150fn option_env_expand(
151 _db: &dyn AstDatabase,
152 _id: LazyMacroId,
153 _tt: &tt::Subtree,
154) -> Result<tt::Subtree, mbe::ExpandError> {
155 // dummy implementation for type-checking purposes
156 let expanded = quote! { std::option::Option::None::<&str> };
157
158 Ok(expanded)
159}
160
161fn column_expand( 140fn column_expand(
162 _db: &dyn AstDatabase, 141 _db: &dyn AstDatabase,
163 _id: LazyMacroId, 142 _id: LazyMacroId,
@@ -274,44 +253,101 @@ fn concat_expand(
274 253
275fn relative_file(db: &dyn AstDatabase, call_id: MacroCallId, path: &str) -> Option<FileId> { 254fn relative_file(db: &dyn AstDatabase, call_id: MacroCallId, path: &str) -> Option<FileId> {
276 let call_site = call_id.as_file().original_file(db); 255 let call_site = call_id.as_file().original_file(db);
277 let path = RelativePath::new(&path);
278 256
279 db.resolve_relative_path(call_site, &path) 257 // Handle trivial case
258 if let Some(res) = db.resolve_relative_path(call_site, &RelativePath::new(&path)) {
259 // Prevent include itself
260 return if res == call_site { None } else { Some(res) };
261 }
262
263 // Extern paths ?
264 let krate = db.relevant_crates(call_site).get(0)?.clone();
265 let (extern_source_id, relative_file) =
266 db.crate_graph()[krate].extern_source.extern_path(path)?;
267
268 db.resolve_extern_path(extern_source_id, &relative_file)
280} 269}
281 270
282fn include_expand( 271fn parse_string(tt: &tt::Subtree) -> Result<String, mbe::ExpandError> {
283 db: &dyn AstDatabase, 272 tt.token_trees
284 arg_id: EagerMacroId,
285 tt: &tt::Subtree,
286) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
287 let path = tt
288 .token_trees
289 .get(0) 273 .get(0)
290 .and_then(|tt| match tt { 274 .and_then(|tt| match tt {
291 tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(&it), 275 tt::TokenTree::Leaf(tt::Leaf::Literal(it)) => unquote_str(&it),
292 _ => None, 276 _ => None,
293 }) 277 })
294 .ok_or_else(|| mbe::ExpandError::ConversionError)?; 278 .ok_or_else(|| mbe::ExpandError::ConversionError)
279}
295 280
281fn include_expand(
282 db: &dyn AstDatabase,
283 arg_id: EagerMacroId,
284 tt: &tt::Subtree,
285) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
286 let path = parse_string(tt)?;
296 let file_id = 287 let file_id =
297 relative_file(db, arg_id.into(), &path).ok_or_else(|| mbe::ExpandError::ConversionError)?; 288 relative_file(db, arg_id.into(), &path).ok_or_else(|| mbe::ExpandError::ConversionError)?;
298 289
299 // FIXME: 290 // FIXME:
300 // Handle include as expression 291 // Handle include as expression
301 let node = 292 let res = parse_to_token_tree(&db.file_text(file_id.into()))
302 db.parse_or_expand(file_id.into()).ok_or_else(|| mbe::ExpandError::ConversionError)?; 293 .ok_or_else(|| mbe::ExpandError::ConversionError)?
303 let res = 294 .0;
304 mbe::syntax_node_to_token_tree(&node).ok_or_else(|| mbe::ExpandError::ConversionError)?.0;
305 295
306 Ok((res, FragmentKind::Items)) 296 Ok((res, FragmentKind::Items))
307} 297}
308 298
299fn get_env_inner(db: &dyn AstDatabase, arg_id: EagerMacroId, key: &str) -> Option<String> {
300 let call_id: MacroCallId = arg_id.into();
301 let original_file = call_id.as_file().original_file(db);
302
303 let krate = db.relevant_crates(original_file).get(0)?.clone();
304 db.crate_graph()[krate].env.get(key)
305}
306
307fn env_expand(
308 db: &dyn AstDatabase,
309 arg_id: EagerMacroId,
310 tt: &tt::Subtree,
311) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
312 let key = parse_string(tt)?;
313
314 // FIXME:
315 // If the environment variable is not defined int rustc, then a compilation error will be emitted.
316 // We might do the same if we fully support all other stuffs.
317 // But for now on, we should return some dummy string for better type infer purpose.
318 // However, we cannot use an empty string here, because for
319 // `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` will become
320 // `include!("foo.rs"), which might go to infinite loop
321 let s = get_env_inner(db, arg_id, &key).unwrap_or("__RA_UNIMPLEMENTATED__".to_string());
322 let expanded = quote! { #s };
323
324 Ok((expanded, FragmentKind::Expr))
325}
326
327fn option_env_expand(
328 db: &dyn AstDatabase,
329 arg_id: EagerMacroId,
330 tt: &tt::Subtree,
331) -> Result<(tt::Subtree, FragmentKind), mbe::ExpandError> {
332 let key = parse_string(tt)?;
333 let expanded = match get_env_inner(db, arg_id, &key) {
334 None => quote! { std::option::Option::None::<&str> },
335 Some(s) => quote! { std::option::Some(#s) },
336 };
337
338 Ok((expanded, FragmentKind::Expr))
339}
340
309#[cfg(test)] 341#[cfg(test)]
310mod tests { 342mod tests {
311 use super::*; 343 use super::*;
312 use crate::{name::AsName, test_db::TestDB, AstNode, MacroCallId, MacroCallKind, MacroCallLoc}; 344 use crate::{
345 name::AsName, test_db::TestDB, AstNode, EagerCallLoc, MacroCallId, MacroCallKind,
346 MacroCallLoc,
347 };
313 use ra_db::{fixture::WithFixture, SourceDatabase}; 348 use ra_db::{fixture::WithFixture, SourceDatabase};
314 use ra_syntax::ast::NameOwner; 349 use ra_syntax::ast::NameOwner;
350 use std::sync::Arc;
315 351
316 fn expand_builtin_macro(ra_fixture: &str) -> String { 352 fn expand_builtin_macro(ra_fixture: &str) -> String {
317 let (db, file_id) = TestDB::with_single_file(&ra_fixture); 353 let (db, file_id) = TestDB::with_single_file(&ra_fixture);
@@ -322,27 +358,61 @@ mod tests {
322 let ast_id_map = db.ast_id_map(file_id.into()); 358 let ast_id_map = db.ast_id_map(file_id.into());
323 359
324 let expander = find_by_name(&macro_calls[0].name().unwrap().as_name()).unwrap(); 360 let expander = find_by_name(&macro_calls[0].name().unwrap().as_name()).unwrap();
325 let expander = expander.left().unwrap();
326 361
327 // the first one should be a macro_rules 362 let file_id = match expander {
328 let def = MacroDefId { 363 Either::Left(expander) => {
329 krate: Some(CrateId(0)), 364 // the first one should be a macro_rules
330 ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(&macro_calls[0]))), 365 let def = MacroDefId {
331 kind: MacroDefKind::BuiltIn(expander), 366 krate: Some(CrateId(0)),
332 }; 367 ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(&macro_calls[0]))),
368 kind: MacroDefKind::BuiltIn(expander),
369 };
333 370
334 let loc = MacroCallLoc { 371 let loc = MacroCallLoc {
335 def, 372 def,
336 kind: MacroCallKind::FnLike(AstId::new( 373 kind: MacroCallKind::FnLike(AstId::new(
337 file_id.into(), 374 file_id.into(),
338 ast_id_map.ast_id(&macro_calls[1]), 375 ast_id_map.ast_id(&macro_calls[1]),
339 )), 376 )),
340 }; 377 };
341 378
342 let id: MacroCallId = db.intern_macro(loc).into(); 379 let id: MacroCallId = db.intern_macro(loc).into();
343 let parsed = db.parse_or_expand(id.as_file()).unwrap(); 380 id.as_file()
381 }
382 Either::Right(expander) => {
383 // the first one should be a macro_rules
384 let def = MacroDefId {
385 krate: Some(CrateId(0)),
386 ast_id: Some(AstId::new(file_id.into(), ast_id_map.ast_id(&macro_calls[0]))),
387 kind: MacroDefKind::BuiltInEager(expander),
388 };
389
390 let args = macro_calls[1].token_tree().unwrap();
391 let parsed_args = mbe::ast_to_token_tree(&args).unwrap().0;
392
393 let arg_id = db.intern_eager_expansion({
394 EagerCallLoc {
395 def,
396 fragment: FragmentKind::Expr,
397 subtree: Arc::new(parsed_args.clone()),
398 file_id: file_id.into(),
399 }
400 });
401
402 let (subtree, fragment) = expander.expand(&db, arg_id, &parsed_args).unwrap();
403 let eager = EagerCallLoc {
404 def,
405 fragment,
406 subtree: Arc::new(subtree),
407 file_id: file_id.into(),
408 };
344 409
345 parsed.text().to_string() 410 let id: MacroCallId = db.intern_eager_expansion(eager.into()).into();
411 id.as_file()
412 }
413 };
414
415 db.parse_or_expand(file_id).unwrap().to_string()
346 } 416 }
347 417
348 #[test] 418 #[test]
@@ -394,7 +464,7 @@ mod tests {
394 "#, 464 "#,
395 ); 465 );
396 466
397 assert_eq!(expanded, "\"\""); 467 assert_eq!(expanded, "\"__RA_UNIMPLEMENTATED__\"");
398 } 468 }
399 469
400 #[test] 470 #[test]
diff --git a/crates/ra_hir_expand/src/db.rs b/crates/ra_hir_expand/src/db.rs
index f3a84cacc..29dde3d80 100644
--- a/crates/ra_hir_expand/src/db.rs
+++ b/crates/ra_hir_expand/src/db.rs
@@ -72,6 +72,30 @@ pub trait AstDatabase: SourceDatabase {
72 fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId; 72 fn intern_eager_expansion(&self, eager: EagerCallLoc) -> EagerMacroId;
73} 73}
74 74
75/// This expands the given macro call, but with different arguments. This is
76/// used for completion, where we want to see what 'would happen' if we insert a
77/// token. The `token_to_map` mapped down into the expansion, with the mapped
78/// token returned.
79pub fn expand_hypothetical(
80 db: &impl AstDatabase,
81 actual_macro_call: MacroCallId,
82 hypothetical_args: &ra_syntax::ast::TokenTree,
83 token_to_map: ra_syntax::SyntaxToken,
84) -> Option<(SyntaxNode, ra_syntax::SyntaxToken)> {
85 let macro_file = MacroFile { macro_call_id: actual_macro_call };
86 let (tt, tmap_1) = mbe::syntax_node_to_token_tree(hypothetical_args.syntax()).unwrap();
87 let range =
88 token_to_map.text_range().checked_sub(hypothetical_args.syntax().text_range().start())?;
89 let token_id = tmap_1.token_by_range(range)?;
90 let macro_def = expander(db, actual_macro_call)?;
91 let (node, tmap_2) =
92 parse_macro_with_arg(db, macro_file, Some(std::sync::Arc::new((tt, tmap_1))))?;
93 let token_id = macro_def.0.map_id_down(token_id);
94 let range = tmap_2.range_by_token(token_id)?.by_kind(token_to_map.kind())?;
95 let token = ra_syntax::algo::find_covering_element(&node.syntax_node(), range).into_token()?;
96 Some((node.syntax_node(), token))
97}
98
75pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> { 99pub(crate) fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
76 let map = 100 let map =
77 db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it)); 101 db.parse_or_expand(file_id).map_or_else(AstIdMap::default, |it| AstIdMap::from_source(&it));
@@ -130,15 +154,42 @@ pub(crate) fn macro_expand(
130 db: &dyn AstDatabase, 154 db: &dyn AstDatabase,
131 id: MacroCallId, 155 id: MacroCallId,
132) -> Result<Arc<tt::Subtree>, String> { 156) -> Result<Arc<tt::Subtree>, String> {
157 macro_expand_with_arg(db, id, None)
158}
159
160fn expander(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(TokenExpander, mbe::TokenMap)>> {
161 let lazy_id = match id {
162 MacroCallId::LazyMacro(id) => id,
163 MacroCallId::EagerMacro(_id) => {
164 return None;
165 }
166 };
167
168 let loc = db.lookup_intern_macro(lazy_id);
169 let macro_rules = db.macro_def(loc.def)?;
170 Some(macro_rules)
171}
172
173fn macro_expand_with_arg(
174 db: &dyn AstDatabase,
175 id: MacroCallId,
176 arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
177) -> Result<Arc<tt::Subtree>, String> {
133 let lazy_id = match id { 178 let lazy_id = match id {
134 MacroCallId::LazyMacro(id) => id, 179 MacroCallId::LazyMacro(id) => id,
135 MacroCallId::EagerMacro(id) => { 180 MacroCallId::EagerMacro(id) => {
136 return Ok(db.lookup_intern_eager_expansion(id).subtree); 181 if arg.is_some() {
182 return Err(
183 "hypothetical macro expansion not implemented for eager macro".to_owned()
184 );
185 } else {
186 return Ok(db.lookup_intern_eager_expansion(id).subtree);
187 }
137 } 188 }
138 }; 189 };
139 190
140 let loc = db.lookup_intern_macro(lazy_id); 191 let loc = db.lookup_intern_macro(lazy_id);
141 let macro_arg = db.macro_arg(id).ok_or("Fail to args in to tt::TokenTree")?; 192 let macro_arg = arg.or_else(|| db.macro_arg(id)).ok_or("Fail to args in to tt::TokenTree")?;
142 193
143 let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?; 194 let macro_rules = db.macro_def(loc.def).ok_or("Fail to find macro definition")?;
144 let tt = macro_rules.0.expand(db, lazy_id, &macro_arg.0).map_err(|err| format!("{:?}", err))?; 195 let tt = macro_rules.0.expand(db, lazy_id, &macro_arg.0).map_err(|err| format!("{:?}", err))?;
@@ -163,11 +214,23 @@ pub(crate) fn parse_macro(
163 db: &dyn AstDatabase, 214 db: &dyn AstDatabase,
164 macro_file: MacroFile, 215 macro_file: MacroFile,
165) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> { 216) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
217 parse_macro_with_arg(db, macro_file, None)
218}
219
220pub fn parse_macro_with_arg(
221 db: &dyn AstDatabase,
222 macro_file: MacroFile,
223 arg: Option<Arc<(tt::Subtree, mbe::TokenMap)>>,
224) -> Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)> {
166 let _p = profile("parse_macro_query"); 225 let _p = profile("parse_macro_query");
167 226
168 let macro_call_id = macro_file.macro_call_id; 227 let macro_call_id = macro_file.macro_call_id;
169 let tt = db 228 let expansion = if let Some(arg) = arg {
170 .macro_expand(macro_call_id) 229 macro_expand_with_arg(db, macro_call_id, Some(arg))
230 } else {
231 db.macro_expand(macro_call_id)
232 };
233 let tt = expansion
171 .map_err(|err| { 234 .map_err(|err| {
172 // Note: 235 // Note:
173 // The final goal we would like to make all parse_macro success, 236 // The final goal we would like to make all parse_macro success,
diff --git a/crates/ra_hir_expand/src/test_db.rs b/crates/ra_hir_expand/src/test_db.rs
index 918736e2a..c1fb762de 100644
--- a/crates/ra_hir_expand/src/test_db.rs
+++ b/crates/ra_hir_expand/src/test_db.rs
@@ -5,7 +5,7 @@ use std::{
5 sync::{Arc, Mutex}, 5 sync::{Arc, Mutex},
6}; 6};
7 7
8use ra_db::{salsa, CrateId, FileId, FileLoader, FileLoaderDelegate, RelativePath}; 8use ra_db::{salsa, CrateId, ExternSourceId, FileId, FileLoader, FileLoaderDelegate, RelativePath};
9 9
10#[salsa::database( 10#[salsa::database(
11 ra_db::SourceDatabaseExtStorage, 11 ra_db::SourceDatabaseExtStorage,
@@ -51,4 +51,11 @@ impl FileLoader for TestDB {
51 fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> { 51 fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> {
52 FileLoaderDelegate(self).relevant_crates(file_id) 52 FileLoaderDelegate(self).relevant_crates(file_id)
53 } 53 }
54 fn resolve_extern_path(
55 &self,
56 anchor: ExternSourceId,
57 relative_path: &RelativePath,
58 ) -> Option<FileId> {
59 FileLoaderDelegate(self).resolve_extern_path(anchor, relative_path)
60 }
54} 61}
diff --git a/crates/ra_hir_ty/Cargo.toml b/crates/ra_hir_ty/Cargo.toml
index f2558b579..9962112db 100644
--- a/crates/ra_hir_ty/Cargo.toml
+++ b/crates/ra_hir_ty/Cargo.toml
@@ -26,4 +26,4 @@ chalk-rust-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "177d713
26chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "177d71340acc7a7204a33115fc63075d86452179" } 26chalk-ir = { git = "https://github.com/rust-lang/chalk.git", rev = "177d71340acc7a7204a33115fc63075d86452179" }
27 27
28[dev-dependencies] 28[dev-dependencies]
29insta = "0.13.1" 29insta = "0.15.0"
diff --git a/crates/ra_hir_ty/src/db.rs b/crates/ra_hir_ty/src/db.rs
index c43619d1c..74b309005 100644
--- a/crates/ra_hir_ty/src/db.rs
+++ b/crates/ra_hir_ty/src/db.rs
@@ -16,15 +16,16 @@ use crate::{
16 Binders, CallableDef, GenericPredicate, InferenceResult, PolyFnSig, Substs, TraitRef, Ty, 16 Binders, CallableDef, GenericPredicate, InferenceResult, PolyFnSig, Substs, TraitRef, Ty,
17 TyDefId, TypeCtor, ValueTyDefId, 17 TyDefId, TypeCtor, ValueTyDefId,
18}; 18};
19use hir_expand::name::Name;
19 20
20#[salsa::query_group(HirDatabaseStorage)] 21#[salsa::query_group(HirDatabaseStorage)]
21#[salsa::requires(salsa::Database)] 22#[salsa::requires(salsa::Database)]
22pub trait HirDatabase: DefDatabase { 23pub trait HirDatabase: DefDatabase {
23 #[salsa::transparent] 24 #[salsa::invoke(infer_wait)]
24 fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>; 25 fn infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
25 26
26 #[salsa::invoke(crate::do_infer_query)] 27 #[salsa::invoke(crate::infer::infer_query)]
27 fn do_infer(&self, def: DefWithBodyId) -> Arc<InferenceResult>; 28 fn infer_query(&self, def: DefWithBodyId) -> Arc<InferenceResult>;
28 29
29 #[salsa::invoke(crate::lower::ty_query)] 30 #[salsa::invoke(crate::lower::ty_query)]
30 #[salsa::cycle(crate::lower::ty_recover)] 31 #[salsa::cycle(crate::lower::ty_recover)]
@@ -65,14 +66,6 @@ pub trait HirDatabase: DefDatabase {
65 #[salsa::invoke(crate::traits::impls_for_trait_query)] 66 #[salsa::invoke(crate::traits::impls_for_trait_query)]
66 fn impls_for_trait(&self, krate: CrateId, trait_: TraitId) -> Arc<[ImplId]>; 67 fn impls_for_trait(&self, krate: CrateId, trait_: TraitId) -> Arc<[ImplId]>;
67 68
68 /// This provides the Chalk trait solver instance. Because Chalk always
69 /// works from a specific crate, this query is keyed on the crate; and
70 /// because Chalk does its own internal caching, the solver is wrapped in a
71 /// Mutex and the query does an untracked read internally, to make sure the
72 /// cached state is thrown away when input facts change.
73 #[salsa::invoke(crate::traits::trait_solver_query)]
74 fn trait_solver(&self, krate: CrateId) -> crate::traits::TraitSolver;
75
76 // Interned IDs for Chalk integration 69 // Interned IDs for Chalk integration
77 #[salsa::interned] 70 #[salsa::interned]
78 fn intern_type_ctor(&self, type_ctor: TypeCtor) -> crate::TypeCtorId; 71 fn intern_type_ctor(&self, type_ctor: TypeCtor) -> crate::TypeCtorId;
@@ -110,9 +103,17 @@ pub trait HirDatabase: DefDatabase {
110 ) -> Option<crate::traits::Solution>; 103 ) -> Option<crate::traits::Solution>;
111} 104}
112 105
113fn infer(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { 106fn infer_wait(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
114 let _p = profile("wait_infer"); 107 let _p = profile("infer:wait").detail(|| match def {
115 db.do_infer(def) 108 DefWithBodyId::FunctionId(it) => db.function_data(it).name.to_string(),
109 DefWithBodyId::StaticId(it) => {
110 db.static_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
111 }
112 DefWithBodyId::ConstId(it) => {
113 db.const_data(it).name.clone().unwrap_or_else(Name::missing).to_string()
114 }
115 });
116 db.infer_query(def)
116} 117}
117 118
118#[test] 119#[test]
diff --git a/crates/ra_hir_ty/src/infer.rs b/crates/ra_hir_ty/src/infer.rs
index 437086ff6..947833412 100644
--- a/crates/ra_hir_ty/src/infer.rs
+++ b/crates/ra_hir_ty/src/infer.rs
@@ -63,8 +63,8 @@ mod pat;
63mod coerce; 63mod coerce;
64 64
65/// The entry point of type inference. 65/// The entry point of type inference.
66pub fn do_infer_query(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> { 66pub(crate) fn infer_query(db: &impl HirDatabase, def: DefWithBodyId) -> Arc<InferenceResult> {
67 let _p = profile("infer"); 67 let _p = profile("infer_query");
68 let resolver = def.resolver(db); 68 let resolver = def.resolver(db);
69 let mut ctx = InferenceContext::new(db, def, resolver); 69 let mut ctx = InferenceContext::new(db, def, resolver);
70 70
diff --git a/crates/ra_hir_ty/src/infer/path.rs b/crates/ra_hir_ty/src/infer/path.rs
index 471d60342..c733b9e1d 100644
--- a/crates/ra_hir_ty/src/infer/path.rs
+++ b/crates/ra_hir_ty/src/infer/path.rs
@@ -40,7 +40,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
40 let ty = self.make_ty(type_ref); 40 let ty = self.make_ty(type_ref);
41 let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1); 41 let remaining_segments_for_ty = path.segments().take(path.segments().len() - 1);
42 let ctx = crate::lower::TyLoweringContext::new(self.db, &resolver); 42 let ctx = crate::lower::TyLoweringContext::new(self.db, &resolver);
43 let ty = Ty::from_type_relative_path(&ctx, ty, remaining_segments_for_ty); 43 let (ty, _) = Ty::from_type_relative_path(&ctx, ty, None, remaining_segments_for_ty);
44 self.resolve_ty_assoc_item( 44 self.resolve_ty_assoc_item(
45 ty, 45 ty,
46 &path.segments().last().expect("path had at least one segment").name, 46 &path.segments().last().expect("path had at least one segment").name,
@@ -115,7 +115,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
115 let remaining_segments_for_ty = 115 let remaining_segments_for_ty =
116 remaining_segments.take(remaining_segments.len() - 1); 116 remaining_segments.take(remaining_segments.len() - 1);
117 let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver); 117 let ctx = crate::lower::TyLoweringContext::new(self.db, &self.resolver);
118 let ty = Ty::from_partly_resolved_hir_path( 118 let (ty, _) = Ty::from_partly_resolved_hir_path(
119 &ctx, 119 &ctx,
120 def, 120 def,
121 resolved_segment, 121 resolved_segment,
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs
index ca194f806..4127f1a8d 100644
--- a/crates/ra_hir_ty/src/lib.rs
+++ b/crates/ra_hir_ty/src/lib.rs
@@ -26,7 +26,7 @@ pub mod traits;
26pub mod method_resolution; 26pub mod method_resolution;
27mod op; 27mod op;
28mod lower; 28mod lower;
29mod infer; 29pub(crate) mod infer;
30pub mod display; 30pub mod display;
31pub(crate) mod utils; 31pub(crate) mod utils;
32pub mod db; 32pub mod db;
@@ -57,7 +57,7 @@ use crate::{
57use display::HirDisplay; 57use display::HirDisplay;
58 58
59pub use autoderef::autoderef; 59pub use autoderef::autoderef;
60pub use infer::{do_infer_query, InferTy, InferenceResult}; 60pub use infer::{InferTy, InferenceResult};
61pub use lower::CallableDef; 61pub use lower::CallableDef;
62pub use lower::{ 62pub use lower::{
63 callable_item_sig, ImplTraitLoweringMode, TyDefId, TyLoweringContext, ValueTyDefId, 63 callable_item_sig, ImplTraitLoweringMode, TyDefId, TyLoweringContext, ValueTyDefId,
diff --git a/crates/ra_hir_ty/src/lower.rs b/crates/ra_hir_ty/src/lower.rs
index 092977e93..b96dc126c 100644
--- a/crates/ra_hir_ty/src/lower.rs
+++ b/crates/ra_hir_ty/src/lower.rs
@@ -91,7 +91,14 @@ pub enum TypeParamLoweringMode {
91 91
92impl Ty { 92impl Ty {
93 pub fn from_hir(ctx: &TyLoweringContext<'_, impl HirDatabase>, type_ref: &TypeRef) -> Self { 93 pub fn from_hir(ctx: &TyLoweringContext<'_, impl HirDatabase>, type_ref: &TypeRef) -> Self {
94 match type_ref { 94 Ty::from_hir_ext(ctx, type_ref).0
95 }
96 pub fn from_hir_ext(
97 ctx: &TyLoweringContext<'_, impl HirDatabase>,
98 type_ref: &TypeRef,
99 ) -> (Self, Option<TypeNs>) {
100 let mut res = None;
101 let ty = match type_ref {
95 TypeRef::Never => Ty::simple(TypeCtor::Never), 102 TypeRef::Never => Ty::simple(TypeCtor::Never),
96 TypeRef::Tuple(inner) => { 103 TypeRef::Tuple(inner) => {
97 let inner_tys: Arc<[Ty]> = inner.iter().map(|tr| Ty::from_hir(ctx, tr)).collect(); 104 let inner_tys: Arc<[Ty]> = inner.iter().map(|tr| Ty::from_hir(ctx, tr)).collect();
@@ -100,7 +107,11 @@ impl Ty {
100 Substs(inner_tys), 107 Substs(inner_tys),
101 ) 108 )
102 } 109 }
103 TypeRef::Path(path) => Ty::from_hir_path(ctx, path), 110 TypeRef::Path(path) => {
111 let (ty, res_) = Ty::from_hir_path(ctx, path);
112 res = res_;
113 ty
114 }
104 TypeRef::RawPtr(inner, mutability) => { 115 TypeRef::RawPtr(inner, mutability) => {
105 let inner_ty = Ty::from_hir(ctx, inner); 116 let inner_ty = Ty::from_hir(ctx, inner);
106 Ty::apply_one(TypeCtor::RawPtr(*mutability), inner_ty) 117 Ty::apply_one(TypeCtor::RawPtr(*mutability), inner_ty)
@@ -183,7 +194,8 @@ impl Ty {
183 } 194 }
184 } 195 }
185 TypeRef::Error => Ty::Unknown, 196 TypeRef::Error => Ty::Unknown,
186 } 197 };
198 (ty, res)
187 } 199 }
188 200
189 /// This is only for `generic_predicates_for_param`, where we can't just 201 /// This is only for `generic_predicates_for_param`, where we can't just
@@ -217,17 +229,19 @@ impl Ty {
217 pub(crate) fn from_type_relative_path( 229 pub(crate) fn from_type_relative_path(
218 ctx: &TyLoweringContext<'_, impl HirDatabase>, 230 ctx: &TyLoweringContext<'_, impl HirDatabase>,
219 ty: Ty, 231 ty: Ty,
232 // We need the original resolution to lower `Self::AssocTy` correctly
233 res: Option<TypeNs>,
220 remaining_segments: PathSegments<'_>, 234 remaining_segments: PathSegments<'_>,
221 ) -> Ty { 235 ) -> (Ty, Option<TypeNs>) {
222 if remaining_segments.len() == 1 { 236 if remaining_segments.len() == 1 {
223 // resolve unselected assoc types 237 // resolve unselected assoc types
224 let segment = remaining_segments.first().unwrap(); 238 let segment = remaining_segments.first().unwrap();
225 Ty::select_associated_type(ctx, ty, segment) 239 (Ty::select_associated_type(ctx, ty, res, segment), None)
226 } else if remaining_segments.len() > 1 { 240 } else if remaining_segments.len() > 1 {
227 // FIXME report error (ambiguous associated type) 241 // FIXME report error (ambiguous associated type)
228 Ty::Unknown 242 (Ty::Unknown, None)
229 } else { 243 } else {
230 ty 244 (ty, res)
231 } 245 }
232 } 246 }
233 247
@@ -236,14 +250,14 @@ impl Ty {
236 resolution: TypeNs, 250 resolution: TypeNs,
237 resolved_segment: PathSegment<'_>, 251 resolved_segment: PathSegment<'_>,
238 remaining_segments: PathSegments<'_>, 252 remaining_segments: PathSegments<'_>,
239 ) -> Ty { 253 ) -> (Ty, Option<TypeNs>) {
240 let ty = match resolution { 254 let ty = match resolution {
241 TypeNs::TraitId(trait_) => { 255 TypeNs::TraitId(trait_) => {
242 // if this is a bare dyn Trait, we'll directly put the required ^0 for the self type in there 256 // if this is a bare dyn Trait, we'll directly put the required ^0 for the self type in there
243 let self_ty = if remaining_segments.len() == 0 { Some(Ty::Bound(0)) } else { None }; 257 let self_ty = if remaining_segments.len() == 0 { Some(Ty::Bound(0)) } else { None };
244 let trait_ref = 258 let trait_ref =
245 TraitRef::from_resolved_path(ctx, trait_, resolved_segment, self_ty); 259 TraitRef::from_resolved_path(ctx, trait_, resolved_segment, self_ty);
246 return if remaining_segments.len() == 1 { 260 let ty = if remaining_segments.len() == 1 {
247 let segment = remaining_segments.first().unwrap(); 261 let segment = remaining_segments.first().unwrap();
248 let associated_ty = associated_type_by_name_including_super_traits( 262 let associated_ty = associated_type_by_name_including_super_traits(
249 ctx.db, 263 ctx.db,
@@ -269,6 +283,7 @@ impl Ty {
269 } else { 283 } else {
270 Ty::Dyn(Arc::new([GenericPredicate::Implemented(trait_ref)])) 284 Ty::Dyn(Arc::new([GenericPredicate::Implemented(trait_ref)]))
271 }; 285 };
286 return (ty, None);
272 } 287 }
273 TypeNs::GenericParam(param_id) => { 288 TypeNs::GenericParam(param_id) => {
274 let generics = 289 let generics =
@@ -306,22 +321,25 @@ impl Ty {
306 TypeNs::BuiltinType(it) => Ty::from_hir_path_inner(ctx, resolved_segment, it.into()), 321 TypeNs::BuiltinType(it) => Ty::from_hir_path_inner(ctx, resolved_segment, it.into()),
307 TypeNs::TypeAliasId(it) => Ty::from_hir_path_inner(ctx, resolved_segment, it.into()), 322 TypeNs::TypeAliasId(it) => Ty::from_hir_path_inner(ctx, resolved_segment, it.into()),
308 // FIXME: report error 323 // FIXME: report error
309 TypeNs::EnumVariantId(_) => return Ty::Unknown, 324 TypeNs::EnumVariantId(_) => return (Ty::Unknown, None),
310 }; 325 };
311 326
312 Ty::from_type_relative_path(ctx, ty, remaining_segments) 327 Ty::from_type_relative_path(ctx, ty, Some(resolution), remaining_segments)
313 } 328 }
314 329
315 pub(crate) fn from_hir_path(ctx: &TyLoweringContext<'_, impl HirDatabase>, path: &Path) -> Ty { 330 pub(crate) fn from_hir_path(
331 ctx: &TyLoweringContext<'_, impl HirDatabase>,
332 path: &Path,
333 ) -> (Ty, Option<TypeNs>) {
316 // Resolve the path (in type namespace) 334 // Resolve the path (in type namespace)
317 if let Some(type_ref) = path.type_anchor() { 335 if let Some(type_ref) = path.type_anchor() {
318 let ty = Ty::from_hir(ctx, &type_ref); 336 let (ty, res) = Ty::from_hir_ext(ctx, &type_ref);
319 return Ty::from_type_relative_path(ctx, ty, path.segments()); 337 return Ty::from_type_relative_path(ctx, ty, res, path.segments());
320 } 338 }
321 let (resolution, remaining_index) = 339 let (resolution, remaining_index) =
322 match ctx.resolver.resolve_path_in_type_ns(ctx.db, path.mod_path()) { 340 match ctx.resolver.resolve_path_in_type_ns(ctx.db, path.mod_path()) {
323 Some(it) => it, 341 Some(it) => it,
324 None => return Ty::Unknown, 342 None => return (Ty::Unknown, None),
325 }; 343 };
326 let (resolved_segment, remaining_segments) = match remaining_index { 344 let (resolved_segment, remaining_segments) = match remaining_index {
327 None => ( 345 None => (
@@ -336,31 +354,27 @@ impl Ty {
336 fn select_associated_type( 354 fn select_associated_type(
337 ctx: &TyLoweringContext<'_, impl HirDatabase>, 355 ctx: &TyLoweringContext<'_, impl HirDatabase>,
338 self_ty: Ty, 356 self_ty: Ty,
357 res: Option<TypeNs>,
339 segment: PathSegment<'_>, 358 segment: PathSegment<'_>,
340 ) -> Ty { 359 ) -> Ty {
341 let def = match ctx.resolver.generic_def() { 360 let traits_from_env: Vec<_> = match res {
342 Some(def) => def, 361 Some(TypeNs::SelfType(impl_id)) => match ctx.db.impl_trait(impl_id) {
343 None => return Ty::Unknown, // this can't actually happen 362 None => return Ty::Unknown,
344 }; 363 Some(trait_ref) => vec![trait_ref.value.trait_],
345 let param_id = match self_ty { 364 },
346 Ty::Placeholder(id) if ctx.type_param_mode == TypeParamLoweringMode::Placeholder => id, 365 Some(TypeNs::GenericParam(param_id)) => {
347 Ty::Bound(idx) if ctx.type_param_mode == TypeParamLoweringMode::Variable => { 366 let predicates = ctx.db.generic_predicates_for_param(param_id);
348 let generics = generics(ctx.db, def); 367 predicates
349 let param_id = if let Some((id, _)) = generics.iter().nth(idx as usize) { 368 .iter()
350 id 369 .filter_map(|pred| match &pred.value {
351 } else { 370 GenericPredicate::Implemented(tr) => Some(tr.trait_),
352 return Ty::Unknown; 371 _ => None,
353 }; 372 })
354 param_id 373 .collect()
355 } 374 }
356 _ => return Ty::Unknown, // Error: Ambiguous associated type 375 _ => return Ty::Unknown,
357 }; 376 };
358 let predicates = ctx.db.generic_predicates_for_param(param_id); 377 let traits = traits_from_env.into_iter().flat_map(|t| all_super_traits(ctx.db, t));
359 let traits_from_env = predicates.iter().filter_map(|pred| match &pred.value {
360 GenericPredicate::Implemented(tr) => Some(tr.trait_),
361 _ => None,
362 });
363 let traits = traits_from_env.flat_map(|t| all_super_traits(ctx.db, t));
364 for t in traits { 378 for t in traits {
365 if let Some(associated_ty) = ctx.db.trait_data(t).associated_type_by_name(&segment.name) 379 if let Some(associated_ty) = ctx.db.trait_data(t).associated_type_by_name(&segment.name)
366 { 380 {
diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs
index b7e8855fb..7f5e1469e 100644
--- a/crates/ra_hir_ty/src/method_resolution.rs
+++ b/crates/ra_hir_ty/src/method_resolution.rs
@@ -516,9 +516,31 @@ pub(crate) fn inherent_impl_substs(
516 let self_ty_with_vars = 516 let self_ty_with_vars =
517 Canonical { num_vars: vars.len() + self_ty.num_vars, value: self_ty_with_vars }; 517 Canonical { num_vars: vars.len() + self_ty.num_vars, value: self_ty_with_vars };
518 let substs = super::infer::unify(&self_ty_with_vars, self_ty); 518 let substs = super::infer::unify(&self_ty_with_vars, self_ty);
519 // we only want the substs for the vars we added, not the ones from self_ty 519 // We only want the substs for the vars we added, not the ones from self_ty.
520 let result = substs.map(|s| s.suffix(vars.len())); 520 // Also, if any of the vars we added are still in there, we replace them by
521 result 521 // Unknown. I think this can only really happen if self_ty contained
522 // Unknown, and in that case we want the result to contain Unknown in those
523 // places again.
524 substs.map(|s| fallback_bound_vars(s.suffix(vars.len()), self_ty.num_vars))
525}
526
527/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past
528/// num_vars_to_keep) by `Ty::Unknown`.
529fn fallback_bound_vars(s: Substs, num_vars_to_keep: usize) -> Substs {
530 s.fold_binders(
531 &mut |ty, binders| {
532 if let Ty::Bound(idx) = &ty {
533 if *idx >= binders as u32 {
534 Ty::Unknown
535 } else {
536 ty
537 }
538 } else {
539 ty
540 }
541 },
542 num_vars_to_keep,
543 )
522} 544}
523 545
524fn transform_receiver_ty( 546fn transform_receiver_ty(
diff --git a/crates/ra_hir_ty/src/test_db.rs b/crates/ra_hir_ty/src/test_db.rs
index c794f7b84..0be2fea4b 100644
--- a/crates/ra_hir_ty/src/test_db.rs
+++ b/crates/ra_hir_ty/src/test_db.rs
@@ -67,6 +67,13 @@ impl FileLoader for TestDB {
67 fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> { 67 fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> {
68 FileLoaderDelegate(self).relevant_crates(file_id) 68 FileLoaderDelegate(self).relevant_crates(file_id)
69 } 69 }
70 fn resolve_extern_path(
71 &self,
72 extern_id: ra_db::ExternSourceId,
73 relative_path: &RelativePath,
74 ) -> Option<FileId> {
75 FileLoaderDelegate(self).resolve_extern_path(extern_id, relative_path)
76 }
70} 77}
71 78
72impl TestDB { 79impl TestDB {
diff --git a/crates/ra_hir_ty/src/tests/macros.rs b/crates/ra_hir_ty/src/tests/macros.rs
index 42814941f..32457bbf7 100644
--- a/crates/ra_hir_ty/src/tests/macros.rs
+++ b/crates/ra_hir_ty/src/tests/macros.rs
@@ -484,6 +484,51 @@ fn bar() -> u32 {0}
484} 484}
485 485
486#[test] 486#[test]
487fn infer_builtin_macros_include_concat_with_bad_env_should_failed() {
488 let (db, pos) = TestDB::with_position(
489 r#"
490//- /main.rs
491#[rustc_builtin_macro]
492macro_rules! include {() => {}}
493
494#[rustc_builtin_macro]
495macro_rules! concat {() => {}}
496
497#[rustc_builtin_macro]
498macro_rules! env {() => {}}
499
500include!(concat!(env!("OUT_DIR"), "/foo.rs"));
501
502fn main() {
503 bar()<|>;
504}
505
506//- /foo.rs
507fn bar() -> u32 {0}
508"#,
509 );
510 assert_eq!("{unknown}", type_at_pos(&db, pos));
511}
512
513#[test]
514fn infer_builtin_macros_include_itself_should_failed() {
515 let (db, pos) = TestDB::with_position(
516 r#"
517//- /main.rs
518#[rustc_builtin_macro]
519macro_rules! include {() => {}}
520
521include!("main.rs");
522
523fn main() {
524 0<|>
525}
526"#,
527 );
528 assert_eq!("i32", type_at_pos(&db, pos));
529}
530
531#[test]
487fn infer_builtin_macros_concat_with_lazy() { 532fn infer_builtin_macros_concat_with_lazy() {
488 assert_snapshot!( 533 assert_snapshot!(
489 infer(r#" 534 infer(r#"
@@ -505,6 +550,26 @@ fn main() {
505} 550}
506 551
507#[test] 552#[test]
553fn infer_builtin_macros_env() {
554 assert_snapshot!(
555 infer(r#"
556//- /main.rs env:foo=bar
557#[rustc_builtin_macro]
558macro_rules! env {() => {}}
559
560fn main() {
561 let x = env!("foo");
562}
563"#),
564 @r###"
565 ![0; 5) '"bar"': &str
566 [88; 116) '{ ...o"); }': ()
567 [98; 99) 'x': &str
568 "###
569 );
570}
571
572#[test]
508fn infer_derive_clone_simple() { 573fn infer_derive_clone_simple() {
509 let (db, pos) = TestDB::with_position( 574 let (db, pos) = TestDB::with_position(
510 r#" 575 r#"
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs
index 547010b35..f009a708c 100644
--- a/crates/ra_hir_ty/src/tests/traits.rs
+++ b/crates/ra_hir_ty/src/tests/traits.rs
@@ -1803,6 +1803,47 @@ fn test<T, U>() where T::Item: Trait2, T: Trait<U::Item>, U: Trait<()> {
1803} 1803}
1804 1804
1805#[test] 1805#[test]
1806fn unselected_projection_on_trait_self() {
1807 assert_snapshot!(infer(
1808 r#"
1809//- /main.rs
1810trait Trait {
1811 type Item;
1812
1813 fn f(&self, x: Self::Item);
1814}
1815
1816struct S;
1817
1818impl Trait for S {
1819 type Item = u32;
1820 fn f(&self, x: Self::Item) { let y = x; }
1821}
1822
1823struct S2;
1824
1825impl Trait for S2 {
1826 type Item = i32;
1827 fn f(&self, x: <Self>::Item) { let y = x; }
1828}
1829"#,
1830 ), @r###"
1831 [54; 58) 'self': &Self
1832 [60; 61) 'x': {unknown}
1833 [140; 144) 'self': &S
1834 [146; 147) 'x': u32
1835 [161; 175) '{ let y = x; }': ()
1836 [167; 168) 'y': u32
1837 [171; 172) 'x': u32
1838 [242; 246) 'self': &S2
1839 [248; 249) 'x': i32
1840 [265; 279) '{ let y = x; }': ()
1841 [271; 272) 'y': i32
1842 [275; 276) 'x': i32
1843 "###);
1844}
1845
1846#[test]
1806fn trait_impl_self_ty() { 1847fn trait_impl_self_ty() {
1807 let t = type_at( 1848 let t = type_at(
1808 r#" 1849 r#"
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs
index bc6ee2600..6e1c8e42a 100644
--- a/crates/ra_hir_ty/src/traits.rs
+++ b/crates/ra_hir_ty/src/traits.rs
@@ -1,12 +1,9 @@
1//! Trait solving using Chalk. 1//! Trait solving using Chalk.
2use std::{ 2use std::{panic, sync::Arc};
3 panic,
4 sync::{Arc, Mutex},
5};
6 3
7use chalk_ir::cast::Cast; 4use chalk_ir::cast::Cast;
8use hir_def::{expr::ExprId, DefWithBodyId, ImplId, TraitId, TypeAliasId}; 5use hir_def::{expr::ExprId, DefWithBodyId, ImplId, TraitId, TypeAliasId};
9use ra_db::{impl_intern_key, salsa, Canceled, CrateId}; 6use ra_db::{impl_intern_key, salsa, CrateId};
10use ra_prof::profile; 7use ra_prof::profile;
11use rustc_hash::FxHashSet; 8use rustc_hash::FxHashSet;
12 9
@@ -19,74 +16,6 @@ use self::chalk::{from_chalk, Interner, ToChalk};
19pub(crate) mod chalk; 16pub(crate) mod chalk;
20mod builtin; 17mod builtin;
21 18
22#[derive(Debug, Clone)]
23pub struct TraitSolver {
24 krate: CrateId,
25 inner: Arc<Mutex<chalk_solve::Solver<Interner>>>,
26}
27
28/// We need eq for salsa
29impl PartialEq for TraitSolver {
30 fn eq(&self, other: &TraitSolver) -> bool {
31 Arc::ptr_eq(&self.inner, &other.inner)
32 }
33}
34
35impl Eq for TraitSolver {}
36
37impl TraitSolver {
38 fn solve(
39 &self,
40 db: &impl HirDatabase,
41 goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
42 ) -> Option<chalk_solve::Solution<Interner>> {
43 let context = ChalkContext { db, krate: self.krate };
44 log::debug!("solve goal: {:?}", goal);
45 let mut solver = match self.inner.lock() {
46 Ok(it) => it,
47 // Our cancellation works via unwinding, but, as chalk is not
48 // panic-safe, we need to make sure to propagate the cancellation.
49 // Ideally, we should also make chalk panic-safe.
50 Err(_) => ra_db::Canceled::throw(),
51 };
52
53 let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
54
55 let solution = panic::catch_unwind({
56 let solver = panic::AssertUnwindSafe(&mut solver);
57 let context = panic::AssertUnwindSafe(&context);
58 move || {
59 solver.0.solve_limited(context.0, goal, || {
60 context.0.db.check_canceled();
61 let remaining = fuel.get();
62 fuel.set(remaining - 1);
63 if remaining == 0 {
64 log::debug!("fuel exhausted");
65 }
66 remaining > 0
67 })
68 }
69 });
70
71 let solution = match solution {
72 Ok(it) => it,
73 Err(err) => {
74 if err.downcast_ref::<Canceled>().is_some() {
75 panic::resume_unwind(err)
76 } else {
77 log::error!("chalk panicked :-(");
78 // Reset the solver, as it is not panic-safe.
79 *solver = create_chalk_solver();
80 None
81 }
82 }
83 };
84
85 log::debug!("solve({:?}) => {:?}", goal, solution);
86 solution
87 }
88}
89
90/// This controls the maximum size of types Chalk considers. If we set this too 19/// This controls the maximum size of types Chalk considers. If we set this too
91/// high, we can run into slow edge cases; if we set it too low, Chalk won't 20/// high, we can run into slow edge cases; if we set it too low, Chalk won't
92/// find some solutions. 21/// find some solutions.
@@ -100,16 +29,6 @@ struct ChalkContext<'a, DB> {
100 krate: CrateId, 29 krate: CrateId,
101} 30}
102 31
103pub(crate) fn trait_solver_query(
104 db: &(impl HirDatabase + salsa::Database),
105 krate: CrateId,
106) -> TraitSolver {
107 db.salsa_runtime().report_untracked_read();
108 // krate parameter is just so we cache a unique solver per crate
109 log::debug!("Creating new solver for crate {:?}", krate);
110 TraitSolver { krate, inner: Arc::new(Mutex::new(create_chalk_solver())) }
111}
112
113fn create_chalk_solver() -> chalk_solve::Solver<Interner> { 32fn create_chalk_solver() -> chalk_solve::Solver<Interner> {
114 let solver_choice = 33 let solver_choice =
115 chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE, expected_answers: None }; 34 chalk_solve::SolverChoice::SLG { max_size: CHALK_SOLVER_MAX_SIZE, expected_answers: None };
@@ -128,7 +47,7 @@ pub(crate) fn impls_for_trait_query(
128 // will only ever get called for a few crates near the root of the tree (the 47 // will only ever get called for a few crates near the root of the tree (the
129 // ones the user is editing), so this may actually be a waste of memory. I'm 48 // ones the user is editing), so this may actually be a waste of memory. I'm
130 // doing it like this mainly for simplicity for now. 49 // doing it like this mainly for simplicity for now.
131 for dep in db.crate_graph().dependencies(krate) { 50 for dep in &db.crate_graph()[krate].dependencies {
132 impls.extend(db.impls_for_trait(dep.crate_id, trait_).iter()); 51 impls.extend(db.impls_for_trait(dep.crate_id, trait_).iter());
133 } 52 }
134 let crate_impl_defs = db.impls_in_crate(krate); 53 let crate_impl_defs = db.impls_in_crate(krate);
@@ -221,7 +140,10 @@ pub(crate) fn trait_solve_query(
221 krate: CrateId, 140 krate: CrateId,
222 goal: Canonical<InEnvironment<Obligation>>, 141 goal: Canonical<InEnvironment<Obligation>>,
223) -> Option<Solution> { 142) -> Option<Solution> {
224 let _p = profile("trait_solve_query"); 143 let _p = profile("trait_solve_query").detail(|| match &goal.value.value {
144 Obligation::Trait(it) => db.trait_data(it.trait_).name.to_string(),
145 Obligation::Projection(_) => "projection".to_string(),
146 });
225 log::debug!("trait_solve_query({})", goal.value.value.display(db)); 147 log::debug!("trait_solve_query({})", goal.value.value.display(db));
226 148
227 if let Obligation::Projection(pred) = &goal.value.value { 149 if let Obligation::Projection(pred) = &goal.value.value {
@@ -236,10 +158,35 @@ pub(crate) fn trait_solve_query(
236 // We currently don't deal with universes (I think / hope they're not yet 158 // We currently don't deal with universes (I think / hope they're not yet
237 // relevant for our use cases?) 159 // relevant for our use cases?)
238 let u_canonical = chalk_ir::UCanonical { canonical, universes: 1 }; 160 let u_canonical = chalk_ir::UCanonical { canonical, universes: 1 };
239 let solution = db.trait_solver(krate).solve(db, &u_canonical); 161 let solution = solve(db, krate, &u_canonical);
240 solution.map(|solution| solution_from_chalk(db, solution)) 162 solution.map(|solution| solution_from_chalk(db, solution))
241} 163}
242 164
165fn solve(
166 db: &impl HirDatabase,
167 krate: CrateId,
168 goal: &chalk_ir::UCanonical<chalk_ir::InEnvironment<chalk_ir::Goal<Interner>>>,
169) -> Option<chalk_solve::Solution<Interner>> {
170 let context = ChalkContext { db, krate };
171 log::debug!("solve goal: {:?}", goal);
172 let mut solver = create_chalk_solver();
173
174 let fuel = std::cell::Cell::new(CHALK_SOLVER_FUEL);
175
176 let solution = solver.solve_limited(&context, goal, || {
177 context.db.check_canceled();
178 let remaining = fuel.get();
179 fuel.set(remaining - 1);
180 if remaining == 0 {
181 log::debug!("fuel exhausted");
182 }
183 remaining > 0
184 });
185
186 log::debug!("solve({:?}) => {:?}", goal, solution);
187 solution
188}
189
243fn solution_from_chalk( 190fn solution_from_chalk(
244 db: &impl HirDatabase, 191 db: &impl HirDatabase,
245 solution: chalk_solve::Solution<Interner>, 192 solution: chalk_solve::Solution<Interner>,
diff --git a/crates/ra_ide/Cargo.toml b/crates/ra_ide/Cargo.toml
index 410d8de62..7235c944c 100644
--- a/crates/ra_ide/Cargo.toml
+++ b/crates/ra_ide/Cargo.toml
@@ -35,4 +35,4 @@ ra_assists = { path = "../ra_assists" }
35hir = { path = "../ra_hir", package = "ra_hir" } 35hir = { path = "../ra_hir", package = "ra_hir" }
36 36
37[dev-dependencies] 37[dev-dependencies]
38insta = "0.13.1" 38insta = "0.15.0"
diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs
index c378c2c62..93e53c921 100644
--- a/crates/ra_ide/src/completion.rs
+++ b/crates/ra_ide/src/completion.rs
@@ -16,11 +16,11 @@ mod complete_scope;
16mod complete_postfix; 16mod complete_postfix;
17mod complete_macro_in_item_position; 17mod complete_macro_in_item_position;
18mod complete_trait_impl; 18mod complete_trait_impl;
19#[cfg(test)]
20mod test_utils;
19 21
20use ra_ide_db::RootDatabase; 22use ra_ide_db::RootDatabase;
21 23
22#[cfg(test)]
23use crate::completion::completion_item::do_completion;
24use crate::{ 24use crate::{
25 completion::{ 25 completion::{
26 completion_context::CompletionContext, 26 completion_context::CompletionContext,
@@ -33,6 +33,23 @@ pub use crate::completion::completion_item::{
33 CompletionItem, CompletionItemKind, InsertTextFormat, 33 CompletionItem, CompletionItemKind, InsertTextFormat,
34}; 34};
35 35
36#[derive(Clone, Debug, PartialEq, Eq)]
37pub struct CompletionOptions {
38 pub enable_postfix_completions: bool,
39 pub add_call_parenthesis: bool,
40 pub add_call_argument_snippets: bool,
41}
42
43impl Default for CompletionOptions {
44 fn default() -> Self {
45 CompletionOptions {
46 enable_postfix_completions: true,
47 add_call_parenthesis: true,
48 add_call_argument_snippets: true,
49 }
50 }
51}
52
36/// Main entry point for completion. We run completion as a two-phase process. 53/// Main entry point for completion. We run completion as a two-phase process.
37/// 54///
38/// First, we look at the position and collect a so-called `CompletionContext. 55/// First, we look at the position and collect a so-called `CompletionContext.
@@ -55,8 +72,12 @@ pub use crate::completion::completion_item::{
55/// `foo` *should* be present among the completion variants. Filtering by 72/// `foo` *should* be present among the completion variants. Filtering by
56/// identifier prefix/fuzzy match should be done higher in the stack, together 73/// identifier prefix/fuzzy match should be done higher in the stack, together
57/// with ordering of completions (currently this is done by the client). 74/// with ordering of completions (currently this is done by the client).
58pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option<Completions> { 75pub(crate) fn completions(
59 let ctx = CompletionContext::new(db, position)?; 76 db: &RootDatabase,
77 position: FilePosition,
78 opts: &CompletionOptions,
79) -> Option<Completions> {
80 let ctx = CompletionContext::new(db, position, opts)?;
60 81
61 let mut acc = Completions::default(); 82 let mut acc = Completions::default();
62 83
diff --git a/crates/ra_ide/src/completion/complete_dot.rs b/crates/ra_ide/src/completion/complete_dot.rs
index 9145aa183..81e5037aa 100644
--- a/crates/ra_ide/src/completion/complete_dot.rs
+++ b/crates/ra_ide/src/completion/complete_dot.rs
@@ -38,7 +38,7 @@ pub(super) fn complete_dot(acc: &mut Completions, ctx: &CompletionContext) {
38fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { 38fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) {
39 for receiver in receiver.autoderef(ctx.db) { 39 for receiver in receiver.autoderef(ctx.db) {
40 for (field, ty) in receiver.fields(ctx.db) { 40 for (field, ty) in receiver.fields(ctx.db) {
41 if ctx.module.map_or(false, |m| !field.is_visible_from(ctx.db, m)) { 41 if ctx.scope().module().map_or(false, |m| !field.is_visible_from(ctx.db, m)) {
42 // Skip private field. FIXME: If the definition location of the 42 // Skip private field. FIXME: If the definition location of the
43 // field is editable, we should show the completion 43 // field is editable, we should show the completion
44 continue; 44 continue;
@@ -53,11 +53,14 @@ fn complete_fields(acc: &mut Completions, ctx: &CompletionContext, receiver: &Ty
53} 53}
54 54
55fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) { 55fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &Type) {
56 if let Some(krate) = ctx.module.map(|it| it.krate()) { 56 if let Some(krate) = ctx.krate {
57 let mut seen_methods = FxHashSet::default(); 57 let mut seen_methods = FxHashSet::default();
58 let traits_in_scope = ctx.scope().traits_in_scope(); 58 let traits_in_scope = ctx.scope().traits_in_scope();
59 receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| { 59 receiver.iterate_method_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, func| {
60 if func.has_self_param(ctx.db) && seen_methods.insert(func.name(ctx.db)) { 60 if func.has_self_param(ctx.db)
61 && ctx.scope().module().map_or(true, |m| func.is_visible_from(ctx.db, m))
62 && seen_methods.insert(func.name(ctx.db))
63 {
61 acc.add_function(ctx, func); 64 acc.add_function(ctx, func);
62 } 65 }
63 None::<()> 66 None::<()>
@@ -67,7 +70,7 @@ fn complete_methods(acc: &mut Completions, ctx: &CompletionContext, receiver: &T
67 70
68#[cfg(test)] 71#[cfg(test)]
69mod tests { 72mod tests {
70 use crate::completion::{do_completion, CompletionItem, CompletionKind}; 73 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
71 use insta::assert_debug_snapshot; 74 use insta::assert_debug_snapshot;
72 75
73 fn do_ref_completion(code: &str) -> Vec<CompletionItem> { 76 fn do_ref_completion(code: &str) -> Vec<CompletionItem> {
@@ -308,6 +311,39 @@ mod tests {
308 } 311 }
309 312
310 #[test] 313 #[test]
314 fn test_method_completion_private() {
315 assert_debug_snapshot!(
316 do_ref_completion(
317 r"
318 struct A {}
319 mod m {
320 impl super::A {
321 fn private_method(&self) {}
322 pub(super) fn the_method(&self) {}
323 }
324 }
325 fn foo(a: A) {
326 a.<|>
327 }
328 ",
329 ),
330 @r###"
331 [
332 CompletionItem {
333 label: "the_method()",
334 source_range: [256; 256),
335 delete: [256; 256),
336 insert: "the_method()$0",
337 kind: Method,
338 lookup: "the_method",
339 detail: "pub(super) fn the_method(&self)",
340 },
341 ]
342 "###
343 );
344 }
345
346 #[test]
311 fn test_trait_method_completion() { 347 fn test_trait_method_completion() {
312 assert_debug_snapshot!( 348 assert_debug_snapshot!(
313 do_ref_completion( 349 do_ref_completion(
@@ -584,4 +620,133 @@ mod tests {
584 "### 620 "###
585 ); 621 );
586 } 622 }
623
624 #[test]
625 fn works_in_simple_macro_1() {
626 assert_debug_snapshot!(
627 do_ref_completion(
628 r"
629 macro_rules! m { ($e:expr) => { $e } }
630 struct A { the_field: u32 }
631 fn foo(a: A) {
632 m!(a.x<|>)
633 }
634 ",
635 ),
636 @r###"
637 [
638 CompletionItem {
639 label: "the_field",
640 source_range: [156; 157),
641 delete: [156; 157),
642 insert: "the_field",
643 kind: Field,
644 detail: "u32",
645 },
646 ]
647 "###
648 );
649 }
650
651 #[test]
652 fn works_in_simple_macro_recursive() {
653 assert_debug_snapshot!(
654 do_ref_completion(
655 r"
656 macro_rules! m { ($e:expr) => { $e } }
657 struct A { the_field: u32 }
658 fn foo(a: A) {
659 m!(a.x<|>)
660 }
661 ",
662 ),
663 @r###"
664 [
665 CompletionItem {
666 label: "the_field",
667 source_range: [156; 157),
668 delete: [156; 157),
669 insert: "the_field",
670 kind: Field,
671 detail: "u32",
672 },
673 ]
674 "###
675 );
676 }
677
678 #[test]
679 fn works_in_simple_macro_2() {
680 // this doesn't work yet because the macro doesn't expand without the token -- maybe it can be fixed with better recovery
681 assert_debug_snapshot!(
682 do_ref_completion(
683 r"
684 macro_rules! m { ($e:expr) => { $e } }
685 struct A { the_field: u32 }
686 fn foo(a: A) {
687 m!(a.<|>)
688 }
689 ",
690 ),
691 @r###"[]"###
692 );
693 }
694
695 #[test]
696 fn works_in_simple_macro_recursive_1() {
697 assert_debug_snapshot!(
698 do_ref_completion(
699 r"
700 macro_rules! m { ($e:expr) => { $e } }
701 struct A { the_field: u32 }
702 fn foo(a: A) {
703 m!(m!(m!(a.x<|>)))
704 }
705 ",
706 ),
707 @r###"
708 [
709 CompletionItem {
710 label: "the_field",
711 source_range: [162; 163),
712 delete: [162; 163),
713 insert: "the_field",
714 kind: Field,
715 detail: "u32",
716 },
717 ]
718 "###
719 );
720 }
721
722 #[test]
723 fn test_method_completion_3547() {
724 assert_debug_snapshot!(
725 do_ref_completion(
726 r"
727 struct HashSet<T> {}
728 impl<T> HashSet<T> {
729 pub fn the_method(&self) {}
730 }
731 fn foo() {
732 let s: HashSet<_>;
733 s.<|>
734 }
735 ",
736 ),
737 @r###"
738 [
739 CompletionItem {
740 label: "the_method()",
741 source_range: [201; 201),
742 delete: [201; 201),
743 insert: "the_method()$0",
744 kind: Method,
745 lookup: "the_method",
746 detail: "pub fn the_method(&self)",
747 },
748 ]
749 "###
750 );
751 }
587} 752}
diff --git a/crates/ra_ide/src/completion/complete_fn_param.rs b/crates/ra_ide/src/completion/complete_fn_param.rs
index 502458706..9226ac055 100644
--- a/crates/ra_ide/src/completion/complete_fn_param.rs
+++ b/crates/ra_ide/src/completion/complete_fn_param.rs
@@ -52,7 +52,7 @@ pub(super) fn complete_fn_param(acc: &mut Completions, ctx: &CompletionContext)
52 52
53#[cfg(test)] 53#[cfg(test)]
54mod tests { 54mod tests {
55 use crate::completion::{do_completion, CompletionItem, CompletionKind}; 55 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
56 use insta::assert_debug_snapshot; 56 use insta::assert_debug_snapshot;
57 57
58 fn do_magic_completion(code: &str) -> Vec<CompletionItem> { 58 fn do_magic_completion(code: &str) -> Vec<CompletionItem> {
diff --git a/crates/ra_ide/src/completion/complete_keyword.rs b/crates/ra_ide/src/completion/complete_keyword.rs
index eb7cd9ac2..1e053ea4a 100644
--- a/crates/ra_ide/src/completion/complete_keyword.rs
+++ b/crates/ra_ide/src/completion/complete_keyword.rs
@@ -79,6 +79,7 @@ pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionConte
79} 79}
80 80
81fn is_in_loop_body(leaf: &SyntaxToken) -> bool { 81fn is_in_loop_body(leaf: &SyntaxToken) -> bool {
82 // FIXME move this to CompletionContext and make it handle macros
82 for node in leaf.parent().ancestors() { 83 for node in leaf.parent().ancestors() {
83 if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR { 84 if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
84 break; 85 break;
@@ -116,7 +117,7 @@ fn complete_return(
116 117
117#[cfg(test)] 118#[cfg(test)]
118mod tests { 119mod tests {
119 use crate::completion::{do_completion, CompletionItem, CompletionKind}; 120 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
120 use insta::assert_debug_snapshot; 121 use insta::assert_debug_snapshot;
121 122
122 fn do_keyword_completion(code: &str) -> Vec<CompletionItem> { 123 fn do_keyword_completion(code: &str) -> Vec<CompletionItem> {
diff --git a/crates/ra_ide/src/completion/complete_macro_in_item_position.rs b/crates/ra_ide/src/completion/complete_macro_in_item_position.rs
index 1866d9e6c..270e96df0 100644
--- a/crates/ra_ide/src/completion/complete_macro_in_item_position.rs
+++ b/crates/ra_ide/src/completion/complete_macro_in_item_position.rs
@@ -15,9 +15,10 @@ pub(super) fn complete_macro_in_item_position(acc: &mut Completions, ctx: &Compl
15 15
16#[cfg(test)] 16#[cfg(test)]
17mod tests { 17mod tests {
18 use crate::completion::{do_completion, CompletionItem, CompletionKind};
19 use insta::assert_debug_snapshot; 18 use insta::assert_debug_snapshot;
20 19
20 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
21
21 fn do_reference_completion(code: &str) -> Vec<CompletionItem> { 22 fn do_reference_completion(code: &str) -> Vec<CompletionItem> {
22 do_completion(code, CompletionKind::Reference) 23 do_completion(code, CompletionKind::Reference)
23 } 24 }
diff --git a/crates/ra_ide/src/completion/complete_path.rs b/crates/ra_ide/src/completion/complete_path.rs
index 1a9699466..d588ee364 100644
--- a/crates/ra_ide/src/completion/complete_path.rs
+++ b/crates/ra_ide/src/completion/complete_path.rs
@@ -1,6 +1,6 @@
1//! Completion of paths, including when writing a single name. 1//! Completion of paths, i.e. `some::prefix::<|>`.
2 2
3use hir::{Adt, PathResolution, ScopeDef}; 3use hir::{Adt, HasVisibility, PathResolution, ScopeDef};
4use ra_syntax::AstNode; 4use ra_syntax::AstNode;
5use test_utils::tested_by; 5use test_utils::tested_by;
6 6
@@ -15,9 +15,10 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
15 Some(PathResolution::Def(def)) => def, 15 Some(PathResolution::Def(def)) => def,
16 _ => return, 16 _ => return,
17 }; 17 };
18 let context_module = ctx.scope().module();
18 match def { 19 match def {
19 hir::ModuleDef::Module(module) => { 20 hir::ModuleDef::Module(module) => {
20 let module_scope = module.scope(ctx.db); 21 let module_scope = module.scope(ctx.db, context_module);
21 for (name, def) in module_scope { 22 for (name, def) in module_scope {
22 if ctx.use_item_syntax.is_some() { 23 if ctx.use_item_syntax.is_some() {
23 if let ScopeDef::Unknown = def { 24 if let ScopeDef::Unknown = def {
@@ -47,10 +48,13 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
47 }; 48 };
48 // Iterate assoc types separately 49 // Iterate assoc types separately
49 // FIXME: complete T::AssocType 50 // FIXME: complete T::AssocType
50 let krate = ctx.module.map(|m| m.krate()); 51 let krate = ctx.krate;
51 if let Some(krate) = krate { 52 if let Some(krate) = krate {
52 let traits_in_scope = ctx.scope().traits_in_scope(); 53 let traits_in_scope = ctx.scope().traits_in_scope();
53 ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| { 54 ty.iterate_path_candidates(ctx.db, krate, &traits_in_scope, None, |_ty, item| {
55 if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) {
56 return None;
57 }
54 match item { 58 match item {
55 hir::AssocItem::Function(func) => { 59 hir::AssocItem::Function(func) => {
56 if !func.has_self_param(ctx.db) { 60 if !func.has_self_param(ctx.db) {
@@ -64,6 +68,9 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
64 }); 68 });
65 69
66 ty.iterate_impl_items(ctx.db, krate, |item| { 70 ty.iterate_impl_items(ctx.db, krate, |item| {
71 if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) {
72 return None;
73 }
67 match item { 74 match item {
68 hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => {} 75 hir::AssocItem::Function(_) | hir::AssocItem::Const(_) => {}
69 hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty), 76 hir::AssocItem::TypeAlias(ty) => acc.add_type_alias(ctx, ty),
@@ -74,6 +81,9 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
74 } 81 }
75 hir::ModuleDef::Trait(t) => { 82 hir::ModuleDef::Trait(t) => {
76 for item in t.items(ctx.db) { 83 for item in t.items(ctx.db) {
84 if context_module.map_or(false, |m| !item.is_visible_from(ctx.db, m)) {
85 continue;
86 }
77 match item { 87 match item {
78 hir::AssocItem::Function(func) => { 88 hir::AssocItem::Function(func) => {
79 if !func.has_self_param(ctx.db) { 89 if !func.has_self_param(ctx.db) {
@@ -93,7 +103,7 @@ pub(super) fn complete_path(acc: &mut Completions, ctx: &CompletionContext) {
93mod tests { 103mod tests {
94 use test_utils::covers; 104 use test_utils::covers;
95 105
96 use crate::completion::{do_completion, CompletionItem, CompletionKind}; 106 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
97 use insta::assert_debug_snapshot; 107 use insta::assert_debug_snapshot;
98 108
99 fn do_reference_completion(code: &str) -> Vec<CompletionItem> { 109 fn do_reference_completion(code: &str) -> Vec<CompletionItem> {
@@ -170,6 +180,41 @@ mod tests {
170 } 180 }
171 181
172 #[test] 182 #[test]
183 fn path_visibility() {
184 assert_debug_snapshot!(
185 do_reference_completion(
186 r"
187 use self::my::<|>;
188
189 mod my {
190 struct Bar;
191 pub struct Foo;
192 pub use Bar as PublicBar;
193 }
194 "
195 ),
196 @r###"
197 [
198 CompletionItem {
199 label: "Foo",
200 source_range: [31; 31),
201 delete: [31; 31),
202 insert: "Foo",
203 kind: Struct,
204 },
205 CompletionItem {
206 label: "PublicBar",
207 source_range: [31; 31),
208 delete: [31; 31),
209 insert: "PublicBar",
210 kind: Struct,
211 },
212 ]
213 "###
214 );
215 }
216
217 #[test]
173 fn completes_use_item_starting_with_self() { 218 fn completes_use_item_starting_with_self() {
174 assert_debug_snapshot!( 219 assert_debug_snapshot!(
175 do_reference_completion( 220 do_reference_completion(
@@ -177,7 +222,7 @@ mod tests {
177 use self::m::<|>; 222 use self::m::<|>;
178 223
179 mod m { 224 mod m {
180 struct Bar; 225 pub struct Bar;
181 } 226 }
182 " 227 "
183 ), 228 ),
@@ -502,6 +547,60 @@ mod tests {
502 } 547 }
503 548
504 #[test] 549 #[test]
550 fn associated_item_visibility() {
551 assert_debug_snapshot!(
552 do_reference_completion(
553 "
554 //- /lib.rs
555 struct S;
556
557 mod m {
558 impl super::S {
559 pub(super) fn public_method() { }
560 fn private_method() { }
561 pub(super) type PublicType = u32;
562 type PrivateType = u32;
563 pub(super) const PUBLIC_CONST: u32 = 1;
564 const PRIVATE_CONST: u32 = 1;
565 }
566 }
567
568 fn foo() { let _ = S::<|> }
569 "
570 ),
571 @r###"
572 [
573 CompletionItem {
574 label: "PUBLIC_CONST",
575 source_range: [302; 302),
576 delete: [302; 302),
577 insert: "PUBLIC_CONST",
578 kind: Const,
579 detail: "pub(super) const PUBLIC_CONST: u32 = 1;",
580 },
581 CompletionItem {
582 label: "PublicType",
583 source_range: [302; 302),
584 delete: [302; 302),
585 insert: "PublicType",
586 kind: TypeAlias,
587 detail: "pub(super) type PublicType = u32;",
588 },
589 CompletionItem {
590 label: "public_method()",
591 source_range: [302; 302),
592 delete: [302; 302),
593 insert: "public_method()$0",
594 kind: Function,
595 lookup: "public_method",
596 detail: "pub(super) fn public_method()",
597 },
598 ]
599 "###
600 );
601 }
602
603 #[test]
505 fn completes_enum_associated_method() { 604 fn completes_enum_associated_method() {
506 assert_debug_snapshot!( 605 assert_debug_snapshot!(
507 do_reference_completion( 606 do_reference_completion(
@@ -835,4 +934,37 @@ mod tests {
835 "### 934 "###
836 ); 935 );
837 } 936 }
937
938 #[test]
939 fn completes_in_simple_macro_call() {
940 let completions = do_reference_completion(
941 r#"
942 macro_rules! m { ($e:expr) => { $e } }
943 fn main() { m!(self::f<|>); }
944 fn foo() {}
945 "#,
946 );
947 assert_debug_snapshot!(completions, @r###"
948 [
949 CompletionItem {
950 label: "foo()",
951 source_range: [93; 94),
952 delete: [93; 94),
953 insert: "foo()$0",
954 kind: Function,
955 lookup: "foo",
956 detail: "fn foo()",
957 },
958 CompletionItem {
959 label: "main()",
960 source_range: [93; 94),
961 delete: [93; 94),
962 insert: "main()$0",
963 kind: Function,
964 lookup: "main",
965 detail: "fn main()",
966 },
967 ]
968 "###);
969 }
838} 970}
diff --git a/crates/ra_ide/src/completion/complete_pattern.rs b/crates/ra_ide/src/completion/complete_pattern.rs
index c2c6ca002..6a1a66ef1 100644
--- a/crates/ra_ide/src/completion/complete_pattern.rs
+++ b/crates/ra_ide/src/completion/complete_pattern.rs
@@ -27,7 +27,7 @@ pub(super) fn complete_pattern(acc: &mut Completions, ctx: &CompletionContext) {
27 27
28#[cfg(test)] 28#[cfg(test)]
29mod tests { 29mod tests {
30 use crate::completion::{do_completion, CompletionItem, CompletionKind}; 30 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
31 use insta::assert_debug_snapshot; 31 use insta::assert_debug_snapshot;
32 32
33 fn complete(code: &str) -> Vec<CompletionItem> { 33 fn complete(code: &str) -> Vec<CompletionItem> {
@@ -86,4 +86,22 @@ mod tests {
86 ] 86 ]
87 "###); 87 "###);
88 } 88 }
89
90 #[test]
91 fn completes_in_simple_macro_call() {
92 // FIXME: doesn't work yet because of missing error recovery in macro expansion
93 let completions = complete(
94 r"
95 macro_rules! m { ($e:expr) => { $e } }
96 enum E { X }
97
98 fn foo() {
99 m!(match E::X {
100 <|>
101 })
102 }
103 ",
104 );
105 assert_debug_snapshot!(completions, @r###"[]"###);
106 }
89} 107}
diff --git a/crates/ra_ide/src/completion/complete_postfix.rs b/crates/ra_ide/src/completion/complete_postfix.rs
index 8a74f993a..0ba382165 100644
--- a/crates/ra_ide/src/completion/complete_postfix.rs
+++ b/crates/ra_ide/src/completion/complete_postfix.rs
@@ -12,7 +12,7 @@ use crate::{
12}; 12};
13 13
14pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) { 14pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
15 if !ctx.db.feature_flags.get("completion.enable-postfix") { 15 if !ctx.options.enable_postfix_completions {
16 return; 16 return;
17 } 17 }
18 18
@@ -67,8 +67,8 @@ pub(super) fn complete_postfix(acc: &mut Completions, ctx: &CompletionContext) {
67 67
68fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder { 68fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet: &str) -> Builder {
69 let edit = { 69 let edit = {
70 let receiver_range = 70 let receiver_syntax = ctx.dot_receiver.as_ref().expect("no receiver available").syntax();
71 ctx.dot_receiver.as_ref().expect("no receiver available").syntax().text_range(); 71 let receiver_range = ctx.sema.original_range(receiver_syntax).range;
72 let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end()); 72 let delete_range = TextRange::from_to(receiver_range.start(), ctx.source_range().end());
73 TextEdit::replace(delete_range, snippet.to_string()) 73 TextEdit::replace(delete_range, snippet.to_string())
74 }; 74 };
@@ -81,7 +81,7 @@ fn postfix_snippet(ctx: &CompletionContext, label: &str, detail: &str, snippet:
81mod tests { 81mod tests {
82 use insta::assert_debug_snapshot; 82 use insta::assert_debug_snapshot;
83 83
84 use crate::completion::{do_completion, CompletionItem, CompletionKind}; 84 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
85 85
86 fn do_postfix_completion(code: &str) -> Vec<CompletionItem> { 86 fn do_postfix_completion(code: &str) -> Vec<CompletionItem> {
87 do_completion(code, CompletionKind::Postfix) 87 do_completion(code, CompletionKind::Postfix)
@@ -279,4 +279,65 @@ mod tests {
279 "### 279 "###
280 ); 280 );
281 } 281 }
282
283 #[test]
284 fn works_in_simple_macro() {
285 assert_debug_snapshot!(
286 do_postfix_completion(
287 r#"
288 macro_rules! m { ($e:expr) => { $e } }
289 fn main() {
290 let bar: u8 = 12;
291 m!(bar.b<|>)
292 }
293 "#,
294 ),
295 @r###"
296 [
297 CompletionItem {
298 label: "box",
299 source_range: [149; 150),
300 delete: [145; 150),
301 insert: "Box::new(bar)",
302 detail: "Box::new(expr)",
303 },
304 CompletionItem {
305 label: "dbg",
306 source_range: [149; 150),
307 delete: [145; 150),
308 insert: "dbg!(bar)",
309 detail: "dbg!(expr)",
310 },
311 CompletionItem {
312 label: "match",
313 source_range: [149; 150),
314 delete: [145; 150),
315 insert: "match bar {\n ${1:_} => {$0\\},\n}",
316 detail: "match expr {}",
317 },
318 CompletionItem {
319 label: "not",
320 source_range: [149; 150),
321 delete: [145; 150),
322 insert: "!bar",
323 detail: "!expr",
324 },
325 CompletionItem {
326 label: "ref",
327 source_range: [149; 150),
328 delete: [145; 150),
329 insert: "&bar",
330 detail: "&expr",
331 },
332 CompletionItem {
333 label: "refm",
334 source_range: [149; 150),
335 delete: [145; 150),
336 insert: "&mut bar",
337 detail: "&mut expr",
338 },
339 ]
340 "###
341 );
342 }
282} 343}
diff --git a/crates/ra_ide/src/completion/complete_record_literal.rs b/crates/ra_ide/src/completion/complete_record_literal.rs
index f98353d76..83ed1d52c 100644
--- a/crates/ra_ide/src/completion/complete_record_literal.rs
+++ b/crates/ra_ide/src/completion/complete_record_literal.rs
@@ -18,7 +18,7 @@ pub(super) fn complete_record_literal(acc: &mut Completions, ctx: &CompletionCon
18 18
19#[cfg(test)] 19#[cfg(test)]
20mod tests { 20mod tests {
21 use crate::completion::{do_completion, CompletionItem, CompletionKind}; 21 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
22 use insta::assert_debug_snapshot; 22 use insta::assert_debug_snapshot;
23 23
24 fn complete(code: &str) -> Vec<CompletionItem> { 24 fn complete(code: &str) -> Vec<CompletionItem> {
@@ -153,4 +153,29 @@ mod tests {
153 ] 153 ]
154 "###); 154 "###);
155 } 155 }
156
157 #[test]
158 fn test_record_literal_field_in_simple_macro() {
159 let completions = complete(
160 r"
161 macro_rules! m { ($e:expr) => { $e } }
162 struct A { the_field: u32 }
163 fn foo() {
164 m!(A { the<|> })
165 }
166 ",
167 );
168 assert_debug_snapshot!(completions, @r###"
169 [
170 CompletionItem {
171 label: "the_field",
172 source_range: [137; 140),
173 delete: [137; 140),
174 insert: "the_field",
175 kind: Field,
176 detail: "u32",
177 },
178 ]
179 "###);
180 }
156} 181}
diff --git a/crates/ra_ide/src/completion/complete_record_pattern.rs b/crates/ra_ide/src/completion/complete_record_pattern.rs
index 9bdeae49f..962376428 100644
--- a/crates/ra_ide/src/completion/complete_record_pattern.rs
+++ b/crates/ra_ide/src/completion/complete_record_pattern.rs
@@ -17,7 +17,7 @@ pub(super) fn complete_record_pattern(acc: &mut Completions, ctx: &CompletionCon
17 17
18#[cfg(test)] 18#[cfg(test)]
19mod tests { 19mod tests {
20 use crate::completion::{do_completion, CompletionItem, CompletionKind}; 20 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
21 use insta::assert_debug_snapshot; 21 use insta::assert_debug_snapshot;
22 22
23 fn complete(code: &str) -> Vec<CompletionItem> { 23 fn complete(code: &str) -> Vec<CompletionItem> {
@@ -87,4 +87,32 @@ mod tests {
87 ] 87 ]
88 "###); 88 "###);
89 } 89 }
90
91 #[test]
92 fn test_record_pattern_field_in_simple_macro() {
93 let completions = complete(
94 r"
95 macro_rules! m { ($e:expr) => { $e } }
96 struct S { foo: u32 }
97
98 fn process(f: S) {
99 m!(match f {
100 S { f<|>: 92 } => (),
101 })
102 }
103 ",
104 );
105 assert_debug_snapshot!(completions, @r###"
106 [
107 CompletionItem {
108 label: "foo",
109 source_range: [171; 172),
110 delete: [171; 172),
111 insert: "foo",
112 kind: Field,
113 detail: "u32",
114 },
115 ]
116 "###);
117 }
90} 118}
diff --git a/crates/ra_ide/src/completion/complete_scope.rs b/crates/ra_ide/src/completion/complete_scope.rs
index 2b9a0e556..bd4adf23a 100644
--- a/crates/ra_ide/src/completion/complete_scope.rs
+++ b/crates/ra_ide/src/completion/complete_scope.rs
@@ -1,4 +1,4 @@
1//! FIXME: write short doc here 1//! Completion of names from the current scope, e.g. locals and imported items.
2 2
3use crate::completion::{CompletionContext, Completions}; 3use crate::completion::{CompletionContext, Completions};
4 4
@@ -14,10 +14,10 @@ pub(super) fn complete_scope(acc: &mut Completions, ctx: &CompletionContext) {
14mod tests { 14mod tests {
15 use insta::assert_debug_snapshot; 15 use insta::assert_debug_snapshot;
16 16
17 use crate::completion::{do_completion, CompletionItem, CompletionKind}; 17 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
18 18
19 fn do_reference_completion(code: &str) -> Vec<CompletionItem> { 19 fn do_reference_completion(ra_fixture: &str) -> Vec<CompletionItem> {
20 do_completion(code, CompletionKind::Reference) 20 do_completion(ra_fixture, CompletionKind::Reference)
21 } 21 }
22 22
23 #[test] 23 #[test]
@@ -797,4 +797,72 @@ mod tests {
797 "### 797 "###
798 ) 798 )
799 } 799 }
800
801 #[test]
802 fn completes_in_simple_macro_1() {
803 assert_debug_snapshot!(
804 do_reference_completion(
805 r"
806 macro_rules! m { ($e:expr) => { $e } }
807 fn quux(x: i32) {
808 let y = 92;
809 m!(<|>);
810 }
811 "
812 ),
813 @"[]"
814 );
815 }
816
817 #[test]
818 fn completes_in_simple_macro_2() {
819 assert_debug_snapshot!(
820 do_reference_completion(
821 r"
822 macro_rules! m { ($e:expr) => { $e } }
823 fn quux(x: i32) {
824 let y = 92;
825 m!(x<|>);
826 }
827 "
828 ),
829 @r###"
830 [
831 CompletionItem {
832 label: "m!",
833 source_range: [145; 146),
834 delete: [145; 146),
835 insert: "m!($0)",
836 kind: Macro,
837 detail: "macro_rules! m",
838 },
839 CompletionItem {
840 label: "quux(…)",
841 source_range: [145; 146),
842 delete: [145; 146),
843 insert: "quux(${1:x})$0",
844 kind: Function,
845 lookup: "quux",
846 detail: "fn quux(x: i32)",
847 },
848 CompletionItem {
849 label: "x",
850 source_range: [145; 146),
851 delete: [145; 146),
852 insert: "x",
853 kind: Binding,
854 detail: "i32",
855 },
856 CompletionItem {
857 label: "y",
858 source_range: [145; 146),
859 delete: [145; 146),
860 insert: "y",
861 kind: Binding,
862 detail: "i32",
863 },
864 ]
865 "###
866 );
867 }
800} 868}
diff --git a/crates/ra_ide/src/completion/complete_snippet.rs b/crates/ra_ide/src/completion/complete_snippet.rs
index 731b4fd82..f731e9b9a 100644
--- a/crates/ra_ide/src/completion/complete_snippet.rs
+++ b/crates/ra_ide/src/completion/complete_snippet.rs
@@ -42,7 +42,7 @@ fn ${1:feature}() {
42 42
43#[cfg(test)] 43#[cfg(test)]
44mod tests { 44mod tests {
45 use crate::completion::{do_completion, CompletionItem, CompletionKind}; 45 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
46 use insta::assert_debug_snapshot; 46 use insta::assert_debug_snapshot;
47 47
48 fn do_snippet_completion(code: &str) -> Vec<CompletionItem> { 48 fn do_snippet_completion(code: &str) -> Vec<CompletionItem> {
diff --git a/crates/ra_ide/src/completion/complete_trait_impl.rs b/crates/ra_ide/src/completion/complete_trait_impl.rs
index 18a1d2995..7fefa2c7a 100644
--- a/crates/ra_ide/src/completion/complete_trait_impl.rs
+++ b/crates/ra_ide/src/completion/complete_trait_impl.rs
@@ -34,7 +34,7 @@
34use hir::{self, Docs, HasSource}; 34use hir::{self, Docs, HasSource};
35use ra_assists::utils::get_missing_impl_items; 35use ra_assists::utils::get_missing_impl_items;
36use ra_syntax::{ 36use ra_syntax::{
37 ast::{self, edit}, 37 ast::{self, edit, ImplDef},
38 AstNode, SyntaxKind, SyntaxNode, TextRange, 38 AstNode, SyntaxKind, SyntaxNode, TextRange,
39}; 39};
40use ra_text_edit::TextEdit; 40use ra_text_edit::TextEdit;
@@ -47,22 +47,22 @@ use crate::{
47}; 47};
48 48
49pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext) { 49pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext) {
50 let trigger = ctx.token.ancestors().find(|p| match p.kind() { 50 if let Some((trigger, impl_def)) = completion_match(ctx) {
51 SyntaxKind::FN_DEF
52 | SyntaxKind::TYPE_ALIAS_DEF
53 | SyntaxKind::CONST_DEF
54 | SyntaxKind::BLOCK_EXPR => true,
55 _ => false,
56 });
57
58 let impl_def = trigger
59 .as_ref()
60 .and_then(|node| node.parent())
61 .and_then(|node| node.parent())
62 .and_then(ast::ImplDef::cast);
63
64 if let (Some(trigger), Some(impl_def)) = (trigger, impl_def) {
65 match trigger.kind() { 51 match trigger.kind() {
52 SyntaxKind::NAME_REF => {
53 get_missing_impl_items(&ctx.sema, &impl_def).iter().for_each(|item| match item {
54 hir::AssocItem::Function(fn_item) => {
55 add_function_impl(&trigger, acc, ctx, &fn_item)
56 }
57 hir::AssocItem::TypeAlias(type_item) => {
58 add_type_alias_impl(&trigger, acc, ctx, &type_item)
59 }
60 hir::AssocItem::Const(const_item) => {
61 add_const_impl(&trigger, acc, ctx, &const_item)
62 }
63 })
64 }
65
66 SyntaxKind::FN_DEF => { 66 SyntaxKind::FN_DEF => {
67 for missing_fn in get_missing_impl_items(&ctx.sema, &impl_def).iter().filter_map( 67 for missing_fn in get_missing_impl_items(&ctx.sema, &impl_def).iter().filter_map(
68 |item| match item { 68 |item| match item {
@@ -101,6 +101,21 @@ pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext
101 } 101 }
102} 102}
103 103
104fn completion_match(ctx: &CompletionContext) -> Option<(SyntaxNode, ImplDef)> {
105 let (trigger, impl_def_offset) = ctx.token.ancestors().find_map(|p| match p.kind() {
106 SyntaxKind::FN_DEF
107 | SyntaxKind::TYPE_ALIAS_DEF
108 | SyntaxKind::CONST_DEF
109 | SyntaxKind::BLOCK_EXPR => Some((p, 2)),
110 SyntaxKind::NAME_REF => Some((p, 5)),
111 _ => None,
112 })?;
113 let impl_def = (0..impl_def_offset - 1)
114 .try_fold(trigger.parent()?, |t, _| t.parent())
115 .and_then(ast::ImplDef::cast)?;
116 Some((trigger, impl_def))
117}
118
104fn add_function_impl( 119fn add_function_impl(
105 fn_def_node: &SyntaxNode, 120 fn_def_node: &SyntaxNode,
106 acc: &mut Completions, 121 acc: &mut Completions,
@@ -202,7 +217,7 @@ fn make_const_compl_syntax(const_: &ast::ConstDef) -> String {
202 217
203#[cfg(test)] 218#[cfg(test)]
204mod tests { 219mod tests {
205 use crate::completion::{do_completion, CompletionItem, CompletionKind}; 220 use crate::completion::{test_utils::do_completion, CompletionItem, CompletionKind};
206 use insta::assert_debug_snapshot; 221 use insta::assert_debug_snapshot;
207 222
208 fn complete(code: &str) -> Vec<CompletionItem> { 223 fn complete(code: &str) -> Vec<CompletionItem> {
@@ -210,6 +225,103 @@ mod tests {
210 } 225 }
211 226
212 #[test] 227 #[test]
228 fn name_ref_function_type_const() {
229 let completions = complete(
230 r"
231 trait Test {
232 type TestType;
233 const TEST_CONST: u16;
234 fn test();
235 }
236
237 struct T1;
238
239 impl Test for T1 {
240 t<|>
241 }
242 ",
243 );
244 assert_debug_snapshot!(completions, @r###"
245 [
246 CompletionItem {
247 label: "const TEST_CONST: u16 = ",
248 source_range: [209; 210),
249 delete: [209; 210),
250 insert: "const TEST_CONST: u16 = ",
251 kind: Const,
252 lookup: "TEST_CONST",
253 },
254 CompletionItem {
255 label: "fn test()",
256 source_range: [209; 210),
257 delete: [209; 210),
258 insert: "fn test() {}",
259 kind: Function,
260 lookup: "test",
261 },
262 CompletionItem {
263 label: "type TestType = ",
264 source_range: [209; 210),
265 delete: [209; 210),
266 insert: "type TestType = ",
267 kind: TypeAlias,
268 lookup: "TestType",
269 },
270 ]
271 "###);
272 }
273
274 #[test]
275 fn no_nested_fn_completions() {
276 let completions = complete(
277 r"
278 trait Test {
279 fn test();
280 fn test2();
281 }
282
283 struct T1;
284
285 impl Test for T1 {
286 fn test() {
287 t<|>
288 }
289 }
290 ",
291 );
292 assert_debug_snapshot!(completions, @r###"[]"###);
293 }
294
295 #[test]
296 fn name_ref_single_function() {
297 let completions = complete(
298 r"
299 trait Test {
300 fn test();
301 }
302
303 struct T1;
304
305 impl Test for T1 {
306 t<|>
307 }
308 ",
309 );
310 assert_debug_snapshot!(completions, @r###"
311 [
312 CompletionItem {
313 label: "fn test()",
314 source_range: [139; 140),
315 delete: [139; 140),
316 insert: "fn test() {}",
317 kind: Function,
318 lookup: "test",
319 },
320 ]
321 "###);
322 }
323
324 #[test]
213 fn single_function() { 325 fn single_function() {
214 let completions = complete( 326 let completions = complete(
215 r" 327 r"
diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs
index 9aa5a705d..3646fb8dc 100644
--- a/crates/ra_ide/src/completion/completion_context.rs
+++ b/crates/ra_ide/src/completion/completion_context.rs
@@ -5,13 +5,13 @@ use ra_db::SourceDatabase;
5use ra_ide_db::RootDatabase; 5use ra_ide_db::RootDatabase;
6use ra_syntax::{ 6use ra_syntax::{
7 algo::{find_covering_element, find_node_at_offset}, 7 algo::{find_covering_element, find_node_at_offset},
8 ast, AstNode, SourceFile, 8 ast, AstNode,
9 SyntaxKind::*, 9 SyntaxKind::*,
10 SyntaxNode, SyntaxToken, TextRange, TextUnit, 10 SyntaxNode, SyntaxToken, TextRange, TextUnit,
11}; 11};
12use ra_text_edit::AtomTextEdit; 12use ra_text_edit::AtomTextEdit;
13 13
14use crate::FilePosition; 14use crate::{completion::CompletionOptions, FilePosition};
15 15
16/// `CompletionContext` is created early during completion to figure out, where 16/// `CompletionContext` is created early during completion to figure out, where
17/// exactly is the cursor, syntax-wise. 17/// exactly is the cursor, syntax-wise.
@@ -19,9 +19,13 @@ use crate::FilePosition;
19pub(crate) struct CompletionContext<'a> { 19pub(crate) struct CompletionContext<'a> {
20 pub(super) sema: Semantics<'a, RootDatabase>, 20 pub(super) sema: Semantics<'a, RootDatabase>,
21 pub(super) db: &'a RootDatabase, 21 pub(super) db: &'a RootDatabase,
22 pub(super) options: &'a CompletionOptions,
22 pub(super) offset: TextUnit, 23 pub(super) offset: TextUnit,
24 /// The token before the cursor, in the original file.
25 pub(super) original_token: SyntaxToken,
26 /// The token before the cursor, in the macro-expanded file.
23 pub(super) token: SyntaxToken, 27 pub(super) token: SyntaxToken,
24 pub(super) module: Option<hir::Module>, 28 pub(super) krate: Option<hir::Crate>,
25 pub(super) name_ref_syntax: Option<ast::NameRef>, 29 pub(super) name_ref_syntax: Option<ast::NameRef>,
26 pub(super) function_syntax: Option<ast::FnDef>, 30 pub(super) function_syntax: Option<ast::FnDef>,
27 pub(super) use_item_syntax: Option<ast::UseItem>, 31 pub(super) use_item_syntax: Option<ast::UseItem>,
@@ -54,6 +58,7 @@ impl<'a> CompletionContext<'a> {
54 pub(super) fn new( 58 pub(super) fn new(
55 db: &'a RootDatabase, 59 db: &'a RootDatabase,
56 position: FilePosition, 60 position: FilePosition,
61 options: &'a CompletionOptions,
57 ) -> Option<CompletionContext<'a>> { 62 ) -> Option<CompletionContext<'a>> {
58 let sema = Semantics::new(db); 63 let sema = Semantics::new(db);
59 64
@@ -67,15 +72,21 @@ impl<'a> CompletionContext<'a> {
67 let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string()); 72 let edit = AtomTextEdit::insert(position.offset, "intellijRulezz".to_string());
68 parse.reparse(&edit).tree() 73 parse.reparse(&edit).tree()
69 }; 74 };
75 let fake_ident_token =
76 file_with_fake_ident.syntax().token_at_offset(position.offset).right_biased().unwrap();
70 77
71 let module = sema.to_module_def(position.file_id); 78 let krate = sema.to_module_def(position.file_id).map(|m| m.krate());
72 let token = original_file.syntax().token_at_offset(position.offset).left_biased()?; 79 let original_token =
80 original_file.syntax().token_at_offset(position.offset).left_biased()?;
81 let token = sema.descend_into_macros(original_token.clone());
73 let mut ctx = CompletionContext { 82 let mut ctx = CompletionContext {
74 sema, 83 sema,
75 db, 84 db,
85 options,
86 original_token,
76 token, 87 token,
77 offset: position.offset, 88 offset: position.offset,
78 module, 89 krate,
79 name_ref_syntax: None, 90 name_ref_syntax: None,
80 function_syntax: None, 91 function_syntax: None,
81 use_item_syntax: None, 92 use_item_syntax: None,
@@ -95,15 +106,57 @@ impl<'a> CompletionContext<'a> {
95 has_type_args: false, 106 has_type_args: false,
96 dot_receiver_is_ambiguous_float_literal: false, 107 dot_receiver_is_ambiguous_float_literal: false,
97 }; 108 };
98 ctx.fill(&original_file, file_with_fake_ident, position.offset); 109
110 let mut original_file = original_file.syntax().clone();
111 let mut hypothetical_file = file_with_fake_ident.syntax().clone();
112 let mut offset = position.offset;
113 let mut fake_ident_token = fake_ident_token;
114
115 // Are we inside a macro call?
116 while let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
117 find_node_at_offset::<ast::MacroCall>(&original_file, offset),
118 find_node_at_offset::<ast::MacroCall>(&hypothetical_file, offset),
119 ) {
120 if actual_macro_call.path().as_ref().map(|s| s.syntax().text())
121 != macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text())
122 {
123 break;
124 }
125 let hypothetical_args = match macro_call_with_fake_ident.token_tree() {
126 Some(tt) => tt,
127 None => break,
128 };
129 if let (Some(actual_expansion), Some(hypothetical_expansion)) = (
130 ctx.sema.expand(&actual_macro_call),
131 ctx.sema.expand_hypothetical(
132 &actual_macro_call,
133 &hypothetical_args,
134 fake_ident_token,
135 ),
136 ) {
137 let new_offset = hypothetical_expansion.1.text_range().start();
138 if new_offset >= actual_expansion.text_range().end() {
139 break;
140 }
141 original_file = actual_expansion;
142 hypothetical_file = hypothetical_expansion.0;
143 fake_ident_token = hypothetical_expansion.1;
144 offset = new_offset;
145 } else {
146 break;
147 }
148 }
149
150 ctx.fill(&original_file, hypothetical_file, offset);
99 Some(ctx) 151 Some(ctx)
100 } 152 }
101 153
102 // The range of the identifier that is being completed. 154 // The range of the identifier that is being completed.
103 pub(crate) fn source_range(&self) -> TextRange { 155 pub(crate) fn source_range(&self) -> TextRange {
156 // check kind of macro-expanded token, but use range of original token
104 match self.token.kind() { 157 match self.token.kind() {
105 // workaroud when completion is triggered by trigger characters. 158 // workaroud when completion is triggered by trigger characters.
106 IDENT => self.token.text_range(), 159 IDENT => self.original_token.text_range(),
107 _ => TextRange::offset_len(self.offset, 0.into()), 160 _ => TextRange::offset_len(self.offset, 0.into()),
108 } 161 }
109 } 162 }
@@ -114,27 +167,24 @@ impl<'a> CompletionContext<'a> {
114 167
115 fn fill( 168 fn fill(
116 &mut self, 169 &mut self,
117 original_file: &ast::SourceFile, 170 original_file: &SyntaxNode,
118 file_with_fake_ident: ast::SourceFile, 171 file_with_fake_ident: SyntaxNode,
119 offset: TextUnit, 172 offset: TextUnit,
120 ) { 173 ) {
121 // First, let's try to complete a reference to some declaration. 174 // First, let's try to complete a reference to some declaration.
122 if let Some(name_ref) = 175 if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(&file_with_fake_ident, offset) {
123 find_node_at_offset::<ast::NameRef>(file_with_fake_ident.syntax(), offset)
124 {
125 // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`. 176 // Special case, `trait T { fn foo(i_am_a_name_ref) {} }`.
126 // See RFC#1685. 177 // See RFC#1685.
127 if is_node::<ast::Param>(name_ref.syntax()) { 178 if is_node::<ast::Param>(name_ref.syntax()) {
128 self.is_param = true; 179 self.is_param = true;
129 return; 180 return;
130 } 181 }
131 self.classify_name_ref(original_file, name_ref); 182 self.classify_name_ref(original_file, name_ref, offset);
132 } 183 }
133 184
134 // Otherwise, see if this is a declaration. We can use heuristics to 185 // Otherwise, see if this is a declaration. We can use heuristics to
135 // suggest declaration names, see `CompletionKind::Magic`. 186 // suggest declaration names, see `CompletionKind::Magic`.
136 if let Some(name) = find_node_at_offset::<ast::Name>(file_with_fake_ident.syntax(), offset) 187 if let Some(name) = find_node_at_offset::<ast::Name>(&file_with_fake_ident, offset) {
137 {
138 if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) { 188 if let Some(bind_pat) = name.syntax().ancestors().find_map(ast::BindPat::cast) {
139 let parent = bind_pat.syntax().parent(); 189 let parent = bind_pat.syntax().parent();
140 if parent.clone().and_then(ast::MatchArm::cast).is_some() 190 if parent.clone().and_then(ast::MatchArm::cast).is_some()
@@ -148,23 +198,29 @@ impl<'a> CompletionContext<'a> {
148 return; 198 return;
149 } 199 }
150 if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() { 200 if name.syntax().ancestors().find_map(ast::RecordFieldPatList::cast).is_some() {
151 self.record_lit_pat = find_node_at_offset(original_file.syntax(), self.offset); 201 self.record_lit_pat =
202 self.sema.find_node_at_offset_with_macros(&original_file, offset);
152 } 203 }
153 } 204 }
154 } 205 }
155 206
156 fn classify_name_ref(&mut self, original_file: &SourceFile, name_ref: ast::NameRef) { 207 fn classify_name_ref(
208 &mut self,
209 original_file: &SyntaxNode,
210 name_ref: ast::NameRef,
211 offset: TextUnit,
212 ) {
157 self.name_ref_syntax = 213 self.name_ref_syntax =
158 find_node_at_offset(original_file.syntax(), name_ref.syntax().text_range().start()); 214 find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
159 let name_range = name_ref.syntax().text_range(); 215 let name_range = name_ref.syntax().text_range();
160 if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() { 216 if name_ref.syntax().parent().and_then(ast::RecordField::cast).is_some() {
161 self.record_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset); 217 self.record_lit_syntax =
218 self.sema.find_node_at_offset_with_macros(&original_file, offset);
162 } 219 }
163 220
164 self.impl_def = self 221 self.impl_def = self
165 .token 222 .sema
166 .parent() 223 .ancestors_with_macros(self.token.parent())
167 .ancestors()
168 .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) 224 .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
169 .find_map(ast::ImplDef::cast); 225 .find_map(ast::ImplDef::cast);
170 226
@@ -183,12 +239,12 @@ impl<'a> CompletionContext<'a> {
183 _ => (), 239 _ => (),
184 } 240 }
185 241
186 self.use_item_syntax = self.token.parent().ancestors().find_map(ast::UseItem::cast); 242 self.use_item_syntax =
243 self.sema.ancestors_with_macros(self.token.parent()).find_map(ast::UseItem::cast);
187 244
188 self.function_syntax = self 245 self.function_syntax = self
189 .token 246 .sema
190 .parent() 247 .ancestors_with_macros(self.token.parent())
191 .ancestors()
192 .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE) 248 .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
193 .find_map(ast::FnDef::cast); 249 .find_map(ast::FnDef::cast);
194 250
@@ -242,7 +298,7 @@ impl<'a> CompletionContext<'a> {
242 298
243 if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) { 299 if let Some(off) = name_ref.syntax().text_range().start().checked_sub(2.into()) {
244 if let Some(if_expr) = 300 if let Some(if_expr) =
245 find_node_at_offset::<ast::IfExpr>(original_file.syntax(), off) 301 self.sema.find_node_at_offset_with_macros::<ast::IfExpr>(original_file, off)
246 { 302 {
247 if if_expr.syntax().text_range().end() 303 if if_expr.syntax().text_range().end()
248 < name_ref.syntax().text_range().start() 304 < name_ref.syntax().text_range().start()
@@ -259,7 +315,7 @@ impl<'a> CompletionContext<'a> {
259 self.dot_receiver = field_expr 315 self.dot_receiver = field_expr
260 .expr() 316 .expr()
261 .map(|e| e.syntax().text_range()) 317 .map(|e| e.syntax().text_range())
262 .and_then(|r| find_node_with_range(original_file.syntax(), r)); 318 .and_then(|r| find_node_with_range(original_file, r));
263 self.dot_receiver_is_ambiguous_float_literal = 319 self.dot_receiver_is_ambiguous_float_literal =
264 if let Some(ast::Expr::Literal(l)) = &self.dot_receiver { 320 if let Some(ast::Expr::Literal(l)) = &self.dot_receiver {
265 match l.kind() { 321 match l.kind() {
@@ -275,7 +331,7 @@ impl<'a> CompletionContext<'a> {
275 self.dot_receiver = method_call_expr 331 self.dot_receiver = method_call_expr
276 .expr() 332 .expr()
277 .map(|e| e.syntax().text_range()) 333 .map(|e| e.syntax().text_range())
278 .and_then(|r| find_node_with_range(original_file.syntax(), r)); 334 .and_then(|r| find_node_with_range(original_file, r));
279 self.is_call = true; 335 self.is_call = true;
280 } 336 }
281 } 337 }
diff --git a/crates/ra_ide/src/completion/completion_item.rs b/crates/ra_ide/src/completion/completion_item.rs
index 61867c0ff..ef0eb43b2 100644
--- a/crates/ra_ide/src/completion/completion_item.rs
+++ b/crates/ra_ide/src/completion/completion_item.rs
@@ -13,7 +13,7 @@ pub struct CompletionItem {
13 /// Used only internally in tests, to check only specific kind of 13 /// Used only internally in tests, to check only specific kind of
14 /// completion (postfix, keyword, reference, etc). 14 /// completion (postfix, keyword, reference, etc).
15 #[allow(unused)] 15 #[allow(unused)]
16 completion_kind: CompletionKind, 16 pub(crate) completion_kind: CompletionKind,
17 /// Label in the completion pop up which identifies completion. 17 /// Label in the completion pop up which identifies completion.
18 label: String, 18 label: String,
19 /// Range of identifier that is being completed. 19 /// Range of identifier that is being completed.
@@ -47,6 +47,10 @@ pub struct CompletionItem {
47 47
48 /// Whether this item is marked as deprecated 48 /// Whether this item is marked as deprecated
49 deprecated: bool, 49 deprecated: bool,
50
51 /// If completing a function call, ask the editor to show parameter popup
52 /// after completion.
53 trigger_call_info: bool,
50} 54}
51 55
52// We use custom debug for CompletionItem to make `insta`'s diffs more readable. 56// We use custom debug for CompletionItem to make `insta`'s diffs more readable.
@@ -139,6 +143,7 @@ impl CompletionItem {
139 kind: None, 143 kind: None,
140 text_edit: None, 144 text_edit: None,
141 deprecated: None, 145 deprecated: None,
146 trigger_call_info: None,
142 } 147 }
143 } 148 }
144 /// What user sees in pop-up in the UI. 149 /// What user sees in pop-up in the UI.
@@ -177,6 +182,10 @@ impl CompletionItem {
177 pub fn deprecated(&self) -> bool { 182 pub fn deprecated(&self) -> bool {
178 self.deprecated 183 self.deprecated
179 } 184 }
185
186 pub fn trigger_call_info(&self) -> bool {
187 self.trigger_call_info
188 }
180} 189}
181 190
182/// A helper to make `CompletionItem`s. 191/// A helper to make `CompletionItem`s.
@@ -193,6 +202,7 @@ pub(crate) struct Builder {
193 kind: Option<CompletionItemKind>, 202 kind: Option<CompletionItemKind>,
194 text_edit: Option<TextEdit>, 203 text_edit: Option<TextEdit>,
195 deprecated: Option<bool>, 204 deprecated: Option<bool>,
205 trigger_call_info: Option<bool>,
196} 206}
197 207
198impl Builder { 208impl Builder {
@@ -221,6 +231,7 @@ impl Builder {
221 kind: self.kind, 231 kind: self.kind,
222 completion_kind: self.completion_kind, 232 completion_kind: self.completion_kind,
223 deprecated: self.deprecated.unwrap_or(false), 233 deprecated: self.deprecated.unwrap_or(false),
234 trigger_call_info: self.trigger_call_info.unwrap_or(false),
224 } 235 }
225 } 236 }
226 pub(crate) fn lookup_by(mut self, lookup: impl Into<String>) -> Builder { 237 pub(crate) fn lookup_by(mut self, lookup: impl Into<String>) -> Builder {
@@ -271,6 +282,10 @@ impl Builder {
271 self.deprecated = Some(deprecated); 282 self.deprecated = Some(deprecated);
272 self 283 self
273 } 284 }
285 pub(crate) fn trigger_call_info(mut self) -> Builder {
286 self.trigger_call_info = Some(true);
287 self
288 }
274} 289}
275 290
276impl<'a> Into<CompletionItem> for Builder { 291impl<'a> Into<CompletionItem> for Builder {
@@ -303,20 +318,3 @@ impl Into<Vec<CompletionItem>> for Completions {
303 self.buf 318 self.buf
304 } 319 }
305} 320}
306
307#[cfg(test)]
308pub(crate) fn do_completion(code: &str, kind: CompletionKind) -> Vec<CompletionItem> {
309 use crate::completion::completions;
310 use crate::mock_analysis::{analysis_and_position, single_file_with_position};
311 let (analysis, position) = if code.contains("//-") {
312 analysis_and_position(code)
313 } else {
314 single_file_with_position(code)
315 };
316 let completions = completions(&analysis.db, position).unwrap();
317 let completion_items: Vec<CompletionItem> = completions.into();
318 let mut kind_completions: Vec<CompletionItem> =
319 completion_items.into_iter().filter(|c| c.completion_kind == kind).collect();
320 kind_completions.sort_by_key(|c| c.label.clone());
321 kind_completions
322}
diff --git a/crates/ra_ide/src/completion/presentation.rs b/crates/ra_ide/src/completion/presentation.rs
index dac232a85..5213def20 100644
--- a/crates/ra_ide/src/completion/presentation.rs
+++ b/crates/ra_ide/src/completion/presentation.rs
@@ -103,11 +103,8 @@ impl Completions {
103 } 103 }
104 }; 104 };
105 105
106 // If not an import, add parenthesis automatically. 106 // Add `<>` for generic types
107 if ctx.is_path_type 107 if ctx.is_path_type && !ctx.has_type_args && ctx.options.add_call_parenthesis {
108 && !ctx.has_type_args
109 && ctx.db.feature_flags.get("completion.insertion.add-call-parenthesis")
110 {
111 let has_non_default_type_params = match resolution { 108 let has_non_default_type_params = match resolution {
112 ScopeDef::ModuleDef(Adt(it)) => it.has_non_default_type_params(ctx.db), 109 ScopeDef::ModuleDef(Adt(it)) => it.has_non_default_type_params(ctx.db),
113 ScopeDef::ModuleDef(TypeAlias(it)) => it.has_non_default_type_params(ctx.db), 110 ScopeDef::ModuleDef(TypeAlias(it)) => it.has_non_default_type_params(ctx.db),
@@ -211,26 +208,29 @@ impl Completions {
211 .set_deprecated(is_deprecated(func, ctx.db)) 208 .set_deprecated(is_deprecated(func, ctx.db))
212 .detail(function_signature.to_string()); 209 .detail(function_signature.to_string());
213 210
214 // Add `<>` for generic types 211 // If not an import, add parenthesis automatically.
215 if ctx.use_item_syntax.is_none() 212 if ctx.use_item_syntax.is_none() && !ctx.is_call && ctx.options.add_call_parenthesis {
216 && !ctx.is_call
217 && ctx.db.feature_flags.get("completion.insertion.add-call-parenthesis")
218 {
219 tested_by!(inserts_parens_for_function_calls); 213 tested_by!(inserts_parens_for_function_calls);
220 214
221 let (snippet, label) = if params.is_empty() || has_self_param && params.len() == 1 { 215 let (snippet, label) = if params.is_empty() || has_self_param && params.len() == 1 {
222 (format!("{}()$0", name), format!("{}()", name)) 216 (format!("{}()$0", name), format!("{}()", name))
223 } else { 217 } else {
224 let to_skip = if has_self_param { 1 } else { 0 }; 218 builder = builder.trigger_call_info();
225 let function_params_snippet = 219 let snippet = if ctx.options.add_call_argument_snippets {
226 join( 220 let to_skip = if has_self_param { 1 } else { 0 };
221 let function_params_snippet = join(
227 function_signature.parameter_names.iter().skip(to_skip).enumerate().map( 222 function_signature.parameter_names.iter().skip(to_skip).enumerate().map(
228 |(index, param_name)| format!("${{{}:{}}}", index + 1, param_name), 223 |(index, param_name)| format!("${{{}:{}}}", index + 1, param_name),
229 ), 224 ),
230 ) 225 )
231 .separator(", ") 226 .separator(", ")
232 .to_string(); 227 .to_string();
233 (format!("{}({})$0", name, function_params_snippet), format!("{}(…)", name)) 228 format!("{}({})$0", name, function_params_snippet)
229 } else {
230 format!("{}($0)", name)
231 };
232
233 (snippet, format!("{}(…)", name))
234 }; 234 };
235 builder = builder.lookup_by(name).label(label).insert_snippet(snippet); 235 builder = builder.lookup_by(name).label(label).insert_snippet(snippet);
236 } 236 }
@@ -307,12 +307,22 @@ mod tests {
307 use insta::assert_debug_snapshot; 307 use insta::assert_debug_snapshot;
308 use test_utils::covers; 308 use test_utils::covers;
309 309
310 use crate::completion::{do_completion, CompletionItem, CompletionKind}; 310 use crate::completion::{
311 test_utils::{do_completion, do_completion_with_options},
312 CompletionItem, CompletionKind, CompletionOptions,
313 };
311 314
312 fn do_reference_completion(ra_fixture: &str) -> Vec<CompletionItem> { 315 fn do_reference_completion(ra_fixture: &str) -> Vec<CompletionItem> {
313 do_completion(ra_fixture, CompletionKind::Reference) 316 do_completion(ra_fixture, CompletionKind::Reference)
314 } 317 }
315 318
319 fn do_reference_completion_with_options(
320 ra_fixture: &str,
321 options: CompletionOptions,
322 ) -> Vec<CompletionItem> {
323 do_completion_with_options(ra_fixture, CompletionKind::Reference, &options)
324 }
325
316 #[test] 326 #[test]
317 fn enum_detail_includes_names_for_record() { 327 fn enum_detail_includes_names_for_record() {
318 assert_debug_snapshot!( 328 assert_debug_snapshot!(
@@ -533,7 +543,7 @@ mod tests {
533 } 543 }
534 544
535 #[test] 545 #[test]
536 fn parens_for_method_call() { 546 fn arg_snippets_for_method_call() {
537 assert_debug_snapshot!( 547 assert_debug_snapshot!(
538 do_reference_completion( 548 do_reference_completion(
539 r" 549 r"
@@ -563,6 +573,40 @@ mod tests {
563 } 573 }
564 574
565 #[test] 575 #[test]
576 fn no_arg_snippets_for_method_call() {
577 assert_debug_snapshot!(
578 do_reference_completion_with_options(
579 r"
580 struct S {}
581 impl S {
582 fn foo(&self, x: i32) {}
583 }
584 fn bar(s: &S) {
585 s.f<|>
586 }
587 ",
588 CompletionOptions {
589 add_call_argument_snippets: false,
590 .. Default::default()
591 }
592 ),
593 @r###"
594 [
595 CompletionItem {
596 label: "foo(…)",
597 source_range: [171; 172),
598 delete: [171; 172),
599 insert: "foo($0)",
600 kind: Method,
601 lookup: "foo",
602 detail: "fn foo(&self, x: i32)",
603 },
604 ]
605 "###
606 )
607 }
608
609 #[test]
566 fn dont_render_function_parens_in_use_item() { 610 fn dont_render_function_parens_in_use_item() {
567 assert_debug_snapshot!( 611 assert_debug_snapshot!(
568 do_reference_completion( 612 do_reference_completion(
diff --git a/crates/ra_ide/src/completion/test_utils.rs b/crates/ra_ide/src/completion/test_utils.rs
new file mode 100644
index 000000000..136857315
--- /dev/null
+++ b/crates/ra_ide/src/completion/test_utils.rs
@@ -0,0 +1,29 @@
1//! Runs completion for testing purposes.
2
3use crate::{
4 completion::{completion_item::CompletionKind, CompletionOptions},
5 mock_analysis::{analysis_and_position, single_file_with_position},
6 CompletionItem,
7};
8
9pub(crate) fn do_completion(code: &str, kind: CompletionKind) -> Vec<CompletionItem> {
10 do_completion_with_options(code, kind, &CompletionOptions::default())
11}
12
13pub(crate) fn do_completion_with_options(
14 code: &str,
15 kind: CompletionKind,
16 options: &CompletionOptions,
17) -> Vec<CompletionItem> {
18 let (analysis, position) = if code.contains("//-") {
19 analysis_and_position(code)
20 } else {
21 single_file_with_position(code)
22 };
23 let completions = analysis.completions(position, options).unwrap().unwrap();
24 let completion_items: Vec<CompletionItem> = completions.into();
25 let mut kind_completions: Vec<CompletionItem> =
26 completion_items.into_iter().filter(|c| c.completion_kind == kind).collect();
27 kind_completions.sort_by_key(|c| c.label().to_owned());
28 kind_completions
29}
diff --git a/crates/ra_ide/src/display.rs b/crates/ra_ide/src/display.rs
index 1c26a8697..eaeaaa2b4 100644
--- a/crates/ra_ide/src/display.rs
+++ b/crates/ra_ide/src/display.rs
@@ -68,17 +68,23 @@ pub(crate) fn macro_label(node: &ast::MacroCall) -> String {
68} 68}
69 69
70pub(crate) fn rust_code_markup<CODE: AsRef<str>>(val: CODE) -> String { 70pub(crate) fn rust_code_markup<CODE: AsRef<str>>(val: CODE) -> String {
71 rust_code_markup_with_doc::<_, &str>(val, None) 71 rust_code_markup_with_doc::<_, &str>(val, None, None)
72} 72}
73 73
74pub(crate) fn rust_code_markup_with_doc<CODE, DOC>(val: CODE, doc: Option<DOC>) -> String 74pub(crate) fn rust_code_markup_with_doc<CODE, DOC>(
75 val: CODE,
76 doc: Option<DOC>,
77 mod_path: Option<String>,
78) -> String
75where 79where
76 CODE: AsRef<str>, 80 CODE: AsRef<str>,
77 DOC: AsRef<str>, 81 DOC: AsRef<str>,
78{ 82{
83 let mod_path =
84 mod_path.filter(|path| !path.is_empty()).map(|path| path + "\n").unwrap_or_default();
79 if let Some(doc) = doc { 85 if let Some(doc) = doc {
80 format!("```rust\n{}\n```\n\n{}", val.as_ref(), doc.as_ref()) 86 format!("```rust\n{}{}\n```\n\n{}", mod_path, val.as_ref(), doc.as_ref())
81 } else { 87 } else {
82 format!("```rust\n{}\n```", val.as_ref()) 88 format!("```rust\n{}{}\n```", mod_path, val.as_ref())
83 } 89 }
84} 90}
diff --git a/crates/ra_ide/src/hover.rs b/crates/ra_ide/src/hover.rs
index e9c682557..25e038a55 100644
--- a/crates/ra_ide/src/hover.rs
+++ b/crates/ra_ide/src/hover.rs
@@ -1,6 +1,10 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::{Adt, HasSource, HirDisplay, Semantics}; 3use hir::{
4 Adt, AsAssocItem, AssocItemContainer, FieldSource, HasSource, HirDisplay, ModuleDef,
5 ModuleSource, Semantics,
6};
7use ra_db::SourceDatabase;
4use ra_ide_db::{ 8use ra_ide_db::{
5 defs::{classify_name, classify_name_ref, Definition}, 9 defs::{classify_name, classify_name_ref, Definition},
6 RootDatabase, 10 RootDatabase,
@@ -16,6 +20,8 @@ use crate::{
16 display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel}, 20 display::{macro_label, rust_code_markup, rust_code_markup_with_doc, ShortLabel},
17 FilePosition, RangeInfo, 21 FilePosition, RangeInfo,
18}; 22};
23use itertools::Itertools;
24use std::iter::once;
19 25
20/// Contains the results when hovering over an item 26/// Contains the results when hovering over an item
21#[derive(Debug, Clone)] 27#[derive(Debug, Clone)]
@@ -83,44 +89,86 @@ impl HoverResult {
83 } 89 }
84} 90}
85 91
86fn hover_text(docs: Option<String>, desc: Option<String>) -> Option<String> { 92fn hover_text(
87 match (desc, docs) { 93 docs: Option<String>,
88 (Some(desc), docs) => Some(rust_code_markup_with_doc(desc, docs)), 94 desc: Option<String>,
89 (None, Some(docs)) => Some(docs), 95 mod_path: Option<String>,
96) -> Option<String> {
97 match (desc, docs, mod_path) {
98 (Some(desc), docs, mod_path) => Some(rust_code_markup_with_doc(desc, docs, mod_path)),
99 (None, Some(docs), _) => Some(docs),
100 _ => None,
101 }
102}
103
104fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option<String> {
105 match def {
106 Definition::StructField(f) => Some(f.parent_def(db).name(db)),
107 Definition::Local(l) => l.parent(db).name(db),
108 Definition::ModuleDef(md) => match md {
109 ModuleDef::Function(f) => match f.as_assoc_item(db)?.container(db) {
110 AssocItemContainer::Trait(t) => Some(t.name(db)),
111 AssocItemContainer::ImplDef(i) => i.target_ty(db).as_adt().map(|adt| adt.name(db)),
112 },
113 ModuleDef::EnumVariant(e) => Some(e.parent_enum(db).name(db)),
114 _ => None,
115 },
116 Definition::SelfType(i) => i.target_ty(db).as_adt().map(|adt| adt.name(db)),
90 _ => None, 117 _ => None,
91 } 118 }
119 .map(|name| name.to_string())
120}
121
122fn determine_mod_path(db: &RootDatabase, def: &Definition) -> Option<String> {
123 let mod_path = def.module(db).map(|module| {
124 once(db.crate_graph()[module.krate().into()].display_name.clone())
125 .chain(
126 module
127 .path_to_root(db)
128 .into_iter()
129 .rev()
130 .map(|it| it.name(db).map(|name| name.to_string())),
131 )
132 .chain(once(definition_owner_name(db, def)))
133 .flatten()
134 .join("::")
135 });
136 mod_path
92} 137}
93 138
94fn hover_text_from_name_kind(db: &RootDatabase, def: Definition) -> Option<String> { 139fn hover_text_from_name_kind(db: &RootDatabase, def: Definition) -> Option<String> {
140 let mod_path = determine_mod_path(db, &def);
95 return match def { 141 return match def {
96 Definition::Macro(it) => { 142 Definition::Macro(it) => {
97 let src = it.source(db); 143 let src = it.source(db);
98 hover_text(src.value.doc_comment_text(), Some(macro_label(&src.value))) 144 hover_text(src.value.doc_comment_text(), Some(macro_label(&src.value)), mod_path)
99 } 145 }
100 Definition::StructField(it) => { 146 Definition::StructField(it) => {
101 let src = it.source(db); 147 let src = it.source(db);
102 match src.value { 148 match src.value {
103 hir::FieldSource::Named(it) => hover_text(it.doc_comment_text(), it.short_label()), 149 FieldSource::Named(it) => {
150 hover_text(it.doc_comment_text(), it.short_label(), mod_path)
151 }
104 _ => None, 152 _ => None,
105 } 153 }
106 } 154 }
107 Definition::ModuleDef(it) => match it { 155 Definition::ModuleDef(it) => match it {
108 hir::ModuleDef::Module(it) => match it.definition_source(db).value { 156 ModuleDef::Module(it) => match it.definition_source(db).value {
109 hir::ModuleSource::Module(it) => { 157 ModuleSource::Module(it) => {
110 hover_text(it.doc_comment_text(), it.short_label()) 158 hover_text(it.doc_comment_text(), it.short_label(), mod_path)
111 } 159 }
112 _ => None, 160 _ => None,
113 }, 161 },
114 hir::ModuleDef::Function(it) => from_def_source(db, it), 162 ModuleDef::Function(it) => from_def_source(db, it, mod_path),
115 hir::ModuleDef::Adt(Adt::Struct(it)) => from_def_source(db, it), 163 ModuleDef::Adt(Adt::Struct(it)) => from_def_source(db, it, mod_path),
116 hir::ModuleDef::Adt(Adt::Union(it)) => from_def_source(db, it), 164 ModuleDef::Adt(Adt::Union(it)) => from_def_source(db, it, mod_path),
117 hir::ModuleDef::Adt(Adt::Enum(it)) => from_def_source(db, it), 165 ModuleDef::Adt(Adt::Enum(it)) => from_def_source(db, it, mod_path),
118 hir::ModuleDef::EnumVariant(it) => from_def_source(db, it), 166 ModuleDef::EnumVariant(it) => from_def_source(db, it, mod_path),
119 hir::ModuleDef::Const(it) => from_def_source(db, it), 167 ModuleDef::Const(it) => from_def_source(db, it, mod_path),
120 hir::ModuleDef::Static(it) => from_def_source(db, it), 168 ModuleDef::Static(it) => from_def_source(db, it, mod_path),
121 hir::ModuleDef::Trait(it) => from_def_source(db, it), 169 ModuleDef::Trait(it) => from_def_source(db, it, mod_path),
122 hir::ModuleDef::TypeAlias(it) => from_def_source(db, it), 170 ModuleDef::TypeAlias(it) => from_def_source(db, it, mod_path),
123 hir::ModuleDef::BuiltinType(it) => Some(it.to_string()), 171 ModuleDef::BuiltinType(it) => Some(it.to_string()),
124 }, 172 },
125 Definition::Local(it) => { 173 Definition::Local(it) => {
126 Some(rust_code_markup(it.ty(db).display_truncated(db, None).to_string())) 174 Some(rust_code_markup(it.ty(db).display_truncated(db, None).to_string()))
@@ -131,13 +179,13 @@ fn hover_text_from_name_kind(db: &RootDatabase, def: Definition) -> Option<Strin
131 } 179 }
132 }; 180 };
133 181
134 fn from_def_source<A, D>(db: &RootDatabase, def: D) -> Option<String> 182 fn from_def_source<A, D>(db: &RootDatabase, def: D, mod_path: Option<String>) -> Option<String>
135 where 183 where
136 D: HasSource<Ast = A>, 184 D: HasSource<Ast = A>,
137 A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel, 185 A: ast::DocCommentsOwner + ast::NameOwner + ShortLabel,
138 { 186 {
139 let src = def.source(db); 187 let src = def.source(db);
140 hover_text(src.value.doc_comment_text(), src.value.short_label()) 188 hover_text(src.value.doc_comment_text(), src.value.short_label(), mod_path)
141 } 189 }
142} 190}
143 191
@@ -345,7 +393,7 @@ mod tests {
345 }; 393 };
346 } 394 }
347 "#, 395 "#,
348 &["field_a: u32"], 396 &["Foo\nfield_a: u32"],
349 ); 397 );
350 398
351 // Hovering over the field in the definition 399 // Hovering over the field in the definition
@@ -362,7 +410,7 @@ mod tests {
362 }; 410 };
363 } 411 }
364 "#, 412 "#,
365 &["field_a: u32"], 413 &["Foo\nfield_a: u32"],
366 ); 414 );
367 } 415 }
368 416
@@ -415,7 +463,7 @@ fn main() {
415 ", 463 ",
416 ); 464 );
417 let hover = analysis.hover(position).unwrap().unwrap(); 465 let hover = analysis.hover(position).unwrap().unwrap();
418 assert_eq!(trim_markup_opt(hover.info.first()), Some("Some")); 466 assert_eq!(trim_markup_opt(hover.info.first()), Some("Option\nSome"));
419 467
420 let (analysis, position) = single_file_with_position( 468 let (analysis, position) = single_file_with_position(
421 " 469 "
@@ -442,6 +490,7 @@ fn main() {
442 } 490 }
443 "#, 491 "#,
444 &[" 492 &["
493Option
445None 494None
446``` 495```
447 496
@@ -462,6 +511,7 @@ The None variant
462 } 511 }
463 "#, 512 "#,
464 &[" 513 &["
514Option
465Some 515Some
466``` 516```
467 517
@@ -528,21 +578,23 @@ fn func(foo: i32) { if true { <|>foo; }; }
528 fn test_hover_infer_associated_method_exact() { 578 fn test_hover_infer_associated_method_exact() {
529 let (analysis, position) = single_file_with_position( 579 let (analysis, position) = single_file_with_position(
530 " 580 "
531 struct Thing { x: u32 } 581 mod wrapper {
582 struct Thing { x: u32 }
532 583
533 impl Thing { 584 impl Thing {
534 fn new() -> Thing { 585 fn new() -> Thing {
535 Thing { x: 0 } 586 Thing { x: 0 }
587 }
536 } 588 }
537 } 589 }
538 590
539 fn main() { 591 fn main() {
540 let foo_test = Thing::new<|>(); 592 let foo_test = wrapper::Thing::new<|>();
541 } 593 }
542 ", 594 ",
543 ); 595 );
544 let hover = analysis.hover(position).unwrap().unwrap(); 596 let hover = analysis.hover(position).unwrap().unwrap();
545 assert_eq!(trim_markup_opt(hover.info.first()), Some("fn new() -> Thing")); 597 assert_eq!(trim_markup_opt(hover.info.first()), Some("wrapper::Thing\nfn new() -> Thing"));
546 assert_eq!(hover.info.is_exact(), true); 598 assert_eq!(hover.info.is_exact(), true);
547 } 599 }
548 600
diff --git a/crates/ra_ide/src/inlay_hints.rs b/crates/ra_ide/src/inlay_hints.rs
index 69098a630..cf0cbdbd0 100644
--- a/crates/ra_ide/src/inlay_hints.rs
+++ b/crates/ra_ide/src/inlay_hints.rs
@@ -119,6 +119,12 @@ fn should_not_display_type_hint(db: &RootDatabase, bind_pat: &ast::BindPat, pat_
119 return true; 119 return true;
120 } 120 }
121 121
122 if let Some(Adt::Struct(s)) = pat_ty.as_adt() {
123 if s.fields(db).is_empty() && s.name(db).to_string() == bind_pat.syntax().to_string() {
124 return true;
125 }
126 }
127
122 for node in bind_pat.syntax().ancestors() { 128 for node in bind_pat.syntax().ancestors() {
123 match_ast! { 129 match_ast! {
124 match node { 130 match node {
@@ -943,4 +949,30 @@ fn main() {
943 "### 949 "###
944 ); 950 );
945 } 951 }
952
953 #[test]
954 fn unit_structs_have_no_type_hints() {
955 let (analysis, file_id) = single_file(
956 r#"
957enum CustomResult<T, E> {
958 Ok(T),
959 Err(E),
960}
961use CustomResult::*;
962
963struct SyntheticSyntax;
964
965fn main() {
966 match Ok(()) {
967 Ok(_) => (),
968 Err(SyntheticSyntax) => (),
969 }
970}"#,
971 );
972
973 assert_debug_snapshot!(analysis.inlay_hints(file_id, Some(8)).unwrap(), @r###"
974 []
975 "###
976 );
977 }
946} 978}
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs
index 4dfe0553e..015fae195 100644
--- a/crates/ra_ide/src/lib.rs
+++ b/crates/ra_ide/src/lib.rs
@@ -62,7 +62,7 @@ use crate::display::ToNav;
62pub use crate::{ 62pub use crate::{
63 assists::{Assist, AssistId}, 63 assists::{Assist, AssistId},
64 call_hierarchy::CallItem, 64 call_hierarchy::CallItem,
65 completion::{CompletionItem, CompletionItemKind, InsertTextFormat}, 65 completion::{CompletionItem, CompletionItemKind, CompletionOptions, InsertTextFormat},
66 diagnostics::Severity, 66 diagnostics::Severity,
67 display::{file_structure, FunctionSignature, NavigationTarget, StructureNode}, 67 display::{file_structure, FunctionSignature, NavigationTarget, StructureNode},
68 expand_macro::ExpandedMacro, 68 expand_macro::ExpandedMacro,
@@ -84,7 +84,6 @@ pub use ra_db::{
84}; 84};
85pub use ra_ide_db::{ 85pub use ra_ide_db::{
86 change::{AnalysisChange, LibraryData}, 86 change::{AnalysisChange, LibraryData},
87 feature_flags::FeatureFlags,
88 line_index::{LineCol, LineIndex}, 87 line_index::{LineCol, LineIndex},
89 line_index_utils::translate_offset_with_edit, 88 line_index_utils::translate_offset_with_edit,
90 search::SearchScope, 89 search::SearchScope,
@@ -131,13 +130,13 @@ pub struct AnalysisHost {
131 130
132impl Default for AnalysisHost { 131impl Default for AnalysisHost {
133 fn default() -> AnalysisHost { 132 fn default() -> AnalysisHost {
134 AnalysisHost::new(None, FeatureFlags::default()) 133 AnalysisHost::new(None)
135 } 134 }
136} 135}
137 136
138impl AnalysisHost { 137impl AnalysisHost {
139 pub fn new(lru_capcity: Option<usize>, feature_flags: FeatureFlags) -> AnalysisHost { 138 pub fn new(lru_capacity: Option<usize>) -> AnalysisHost {
140 AnalysisHost { db: RootDatabase::new(lru_capcity, feature_flags) } 139 AnalysisHost { db: RootDatabase::new(lru_capacity) }
141 } 140 }
142 /// Returns a snapshot of the current state, which you can query for 141 /// Returns a snapshot of the current state, which you can query for
143 /// semantic information. 142 /// semantic information.
@@ -145,10 +144,6 @@ impl AnalysisHost {
145 Analysis { db: self.db.snapshot() } 144 Analysis { db: self.db.snapshot() }
146 } 145 }
147 146
148 pub fn feature_flags(&self) -> &FeatureFlags {
149 &self.db.feature_flags
150 }
151
152 /// Applies changes to the current state of the world. If there are 147 /// Applies changes to the current state of the world. If there are
153 /// outstanding snapshots, they will be canceled. 148 /// outstanding snapshots, they will be canceled.
154 pub fn apply_change(&mut self, change: AnalysisChange) { 149 pub fn apply_change(&mut self, change: AnalysisChange) {
@@ -211,18 +206,20 @@ impl Analysis {
211 // Default to enable test for single file. 206 // Default to enable test for single file.
212 let mut cfg_options = CfgOptions::default(); 207 let mut cfg_options = CfgOptions::default();
213 cfg_options.insert_atom("test".into()); 208 cfg_options.insert_atom("test".into());
214 crate_graph.add_crate_root(file_id, Edition::Edition2018, cfg_options, Env::default()); 209 crate_graph.add_crate_root(
210 file_id,
211 Edition::Edition2018,
212 None,
213 cfg_options,
214 Env::default(),
215 Default::default(),
216 );
215 change.add_file(source_root, file_id, "main.rs".into(), Arc::new(text)); 217 change.add_file(source_root, file_id, "main.rs".into(), Arc::new(text));
216 change.set_crate_graph(crate_graph); 218 change.set_crate_graph(crate_graph);
217 host.apply_change(change); 219 host.apply_change(change);
218 (host.analysis(), file_id) 220 (host.analysis(), file_id)
219 } 221 }
220 222
221 /// Features for Analysis.
222 pub fn feature_flags(&self) -> &FeatureFlags {
223 &self.db.feature_flags
224 }
225
226 /// Debug info about the current state of the analysis. 223 /// Debug info about the current state of the analysis.
227 pub fn status(&self) -> Cancelable<String> { 224 pub fn status(&self) -> Cancelable<String> {
228 self.with_db(|db| status::status(&*db)) 225 self.with_db(|db| status::status(&*db))
@@ -415,12 +412,12 @@ impl Analysis {
415 412
416 /// Returns the edition of the given crate. 413 /// Returns the edition of the given crate.
417 pub fn crate_edition(&self, crate_id: CrateId) -> Cancelable<Edition> { 414 pub fn crate_edition(&self, crate_id: CrateId) -> Cancelable<Edition> {
418 self.with_db(|db| db.crate_graph().edition(crate_id)) 415 self.with_db(|db| db.crate_graph()[crate_id].edition)
419 } 416 }
420 417
421 /// Returns the root file of the given crate. 418 /// Returns the root file of the given crate.
422 pub fn crate_root(&self, crate_id: CrateId) -> Cancelable<FileId> { 419 pub fn crate_root(&self, crate_id: CrateId) -> Cancelable<FileId> {
423 self.with_db(|db| db.crate_graph().crate_root(crate_id)) 420 self.with_db(|db| db.crate_graph()[crate_id].root_file_id)
424 } 421 }
425 422
426 /// Returns the set of possible targets to run for the current file. 423 /// Returns the set of possible targets to run for the current file.
@@ -444,8 +441,12 @@ impl Analysis {
444 } 441 }
445 442
446 /// Computes completions at the given position. 443 /// Computes completions at the given position.
447 pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> { 444 pub fn completions(
448 self.with_db(|db| completion::completions(db, position).map(Into::into)) 445 &self,
446 position: FilePosition,
447 options: &CompletionOptions,
448 ) -> Cancelable<Option<Vec<CompletionItem>>> {
449 self.with_db(|db| completion::completions(db, position, options).map(Into::into))
449 } 450 }
450 451
451 /// Computes assists (aka code actions aka intentions) for the given 452 /// Computes assists (aka code actions aka intentions) for the given
diff --git a/crates/ra_ide/src/mock_analysis.rs b/crates/ra_ide/src/mock_analysis.rs
index f4cd6deb7..25816cf6f 100644
--- a/crates/ra_ide/src/mock_analysis.rs
+++ b/crates/ra_ide/src/mock_analysis.rs
@@ -99,13 +99,21 @@ impl MockAnalysis {
99 root_crate = Some(crate_graph.add_crate_root( 99 root_crate = Some(crate_graph.add_crate_root(
100 file_id, 100 file_id,
101 Edition2018, 101 Edition2018,
102 None,
102 cfg_options, 103 cfg_options,
103 Env::default(), 104 Env::default(),
105 Default::default(),
104 )); 106 ));
105 } else if path.ends_with("/lib.rs") { 107 } else if path.ends_with("/lib.rs") {
106 let other_crate =
107 crate_graph.add_crate_root(file_id, Edition2018, cfg_options, Env::default());
108 let crate_name = path.parent().unwrap().file_name().unwrap(); 108 let crate_name = path.parent().unwrap().file_name().unwrap();
109 let other_crate = crate_graph.add_crate_root(
110 file_id,
111 Edition2018,
112 Some(crate_name.to_owned()),
113 cfg_options,
114 Env::default(),
115 Default::default(),
116 );
109 if let Some(root_crate) = root_crate { 117 if let Some(root_crate) = root_crate {
110 crate_graph 118 crate_graph
111 .add_dep(root_crate, CrateName::new(crate_name).unwrap(), other_crate) 119 .add_dep(root_crate, CrateName::new(crate_name).unwrap(), other_crate)
diff --git a/crates/ra_ide/src/parent_module.rs b/crates/ra_ide/src/parent_module.rs
index 2c4bdb039..76d130b9b 100644
--- a/crates/ra_ide/src/parent_module.rs
+++ b/crates/ra_ide/src/parent_module.rs
@@ -133,8 +133,10 @@ mod tests {
133 let crate_id = crate_graph.add_crate_root( 133 let crate_id = crate_graph.add_crate_root(
134 root_file, 134 root_file,
135 Edition2018, 135 Edition2018,
136 None,
136 CfgOptions::default(), 137 CfgOptions::default(),
137 Env::default(), 138 Env::default(),
139 Default::default(),
138 ); 140 );
139 let mut change = AnalysisChange::new(); 141 let mut change = AnalysisChange::new();
140 change.set_crate_graph(crate_graph); 142 change.set_crate_graph(crate_graph);
diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs
index 5b4bcf434..7d1190af9 100644
--- a/crates/ra_ide/src/references/rename.rs
+++ b/crates/ra_ide/src/references/rename.rs
@@ -9,7 +9,8 @@ use ra_syntax::{
9use ra_text_edit::TextEdit; 9use ra_text_edit::TextEdit;
10 10
11use crate::{ 11use crate::{
12 FileId, FilePosition, FileSystemEdit, RangeInfo, SourceChange, SourceFileEdit, TextRange, 12 FilePosition, FileSystemEdit, RangeInfo, Reference, ReferenceKind, SourceChange,
13 SourceFileEdit, TextRange,
13}; 14};
14 15
15use super::find_all_refs; 16use super::find_all_refs;
@@ -46,12 +47,29 @@ fn find_name_and_module_at_offset(
46 Some((ast_name, ast_module)) 47 Some((ast_name, ast_module))
47} 48}
48 49
49fn source_edit_from_file_id_range( 50fn source_edit_from_reference(reference: Reference, new_name: &str) -> SourceFileEdit {
50 file_id: FileId, 51 let mut replacement_text = String::new();
51 range: TextRange, 52 let file_id = reference.file_range.file_id;
52 new_name: &str, 53 let range = match reference.kind {
53) -> SourceFileEdit { 54 ReferenceKind::StructFieldShorthandForField => {
54 SourceFileEdit { file_id, edit: TextEdit::replace(range, new_name.into()) } 55 replacement_text.push_str(new_name);
56 replacement_text.push_str(": ");
57 TextRange::from_to(
58 reference.file_range.range.start(),
59 reference.file_range.range.start(),
60 )
61 }
62 ReferenceKind::StructFieldShorthandForLocal => {
63 replacement_text.push_str(": ");
64 replacement_text.push_str(new_name);
65 TextRange::from_to(reference.file_range.range.end(), reference.file_range.range.end())
66 }
67 _ => {
68 replacement_text.push_str(new_name);
69 reference.file_range.range
70 }
71 };
72 SourceFileEdit { file_id, edit: TextEdit::replace(range, replacement_text) }
55} 73}
56 74
57fn rename_mod( 75fn rename_mod(
@@ -99,13 +117,10 @@ fn rename_mod(
99 source_file_edits.push(edit); 117 source_file_edits.push(edit);
100 118
101 if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(sema.db, position, None) { 119 if let Some(RangeInfo { range: _, info: refs }) = find_all_refs(sema.db, position, None) {
102 let ref_edits = refs.references.into_iter().map(|reference| { 120 let ref_edits = refs
103 source_edit_from_file_id_range( 121 .references
104 reference.file_range.file_id, 122 .into_iter()
105 reference.file_range.range, 123 .map(|reference| source_edit_from_reference(reference, new_name));
106 new_name,
107 )
108 });
109 source_file_edits.extend(ref_edits); 124 source_file_edits.extend(ref_edits);
110 } 125 }
111 126
@@ -121,13 +136,7 @@ fn rename_reference(
121 136
122 let edit = refs 137 let edit = refs
123 .into_iter() 138 .into_iter()
124 .map(|reference| { 139 .map(|reference| source_edit_from_reference(reference, new_name))
125 source_edit_from_file_id_range(
126 reference.file_range.file_id,
127 reference.file_range.range,
128 new_name,
129 )
130 })
131 .collect::<Vec<_>>(); 140 .collect::<Vec<_>>();
132 141
133 if edit.is_empty() { 142 if edit.is_empty() {
@@ -286,6 +295,163 @@ mod tests {
286 } 295 }
287 296
288 #[test] 297 #[test]
298 fn test_rename_struct_field() {
299 test_rename(
300 r#"
301 struct Foo {
302 i<|>: i32,
303 }
304
305 impl Foo {
306 fn new(i: i32) -> Self {
307 Self { i: i }
308 }
309 }
310 "#,
311 "j",
312 r#"
313 struct Foo {
314 j: i32,
315 }
316
317 impl Foo {
318 fn new(i: i32) -> Self {
319 Self { j: i }
320 }
321 }
322 "#,
323 );
324 }
325
326 #[test]
327 fn test_rename_struct_field_for_shorthand() {
328 test_rename(
329 r#"
330 struct Foo {
331 i<|>: i32,
332 }
333
334 impl Foo {
335 fn new(i: i32) -> Self {
336 Self { i }
337 }
338 }
339 "#,
340 "j",
341 r#"
342 struct Foo {
343 j: i32,
344 }
345
346 impl Foo {
347 fn new(i: i32) -> Self {
348 Self { j: i }
349 }
350 }
351 "#,
352 );
353 }
354
355 #[test]
356 fn test_rename_local_for_field_shorthand() {
357 test_rename(
358 r#"
359 struct Foo {
360 i: i32,
361 }
362
363 impl Foo {
364 fn new(i<|>: i32) -> Self {
365 Self { i }
366 }
367 }
368 "#,
369 "j",
370 r#"
371 struct Foo {
372 i: i32,
373 }
374
375 impl Foo {
376 fn new(j: i32) -> Self {
377 Self { i: j }
378 }
379 }
380 "#,
381 );
382 }
383
384 #[test]
385 fn test_field_shorthand_correct_struct() {
386 test_rename(
387 r#"
388 struct Foo {
389 i<|>: i32,
390 }
391
392 struct Bar {
393 i: i32,
394 }
395
396 impl Bar {
397 fn new(i: i32) -> Self {
398 Self { i }
399 }
400 }
401 "#,
402 "j",
403 r#"
404 struct Foo {
405 j: i32,
406 }
407
408 struct Bar {
409 i: i32,
410 }
411
412 impl Bar {
413 fn new(i: i32) -> Self {
414 Self { i }
415 }
416 }
417 "#,
418 );
419 }
420
421 #[test]
422 fn test_shadow_local_for_struct_shorthand() {
423 test_rename(
424 r#"
425 struct Foo {
426 i: i32,
427 }
428
429 fn baz(i<|>: i32) -> Self {
430 let x = Foo { i };
431 {
432 let i = 0;
433 Foo { i }
434 }
435 }
436 "#,
437 "j",
438 r#"
439 struct Foo {
440 i: i32,
441 }
442
443 fn baz(j: i32) -> Self {
444 let x = Foo { i: j };
445 {
446 let i = 0;
447 Foo { i }
448 }
449 }
450 "#,
451 );
452 }
453
454 #[test]
289 fn test_rename_mod() { 455 fn test_rename_mod() {
290 let (analysis, position) = analysis_and_position( 456 let (analysis, position) = analysis_and_position(
291 " 457 "
diff --git a/crates/ra_ide/src/typing.rs b/crates/ra_ide/src/typing.rs
index 7f1b9150f..53c65f8bc 100644
--- a/crates/ra_ide/src/typing.rs
+++ b/crates/ra_ide/src/typing.rs
@@ -13,77 +13,21 @@
13//! Language server executes such typing assists synchronously. That is, they 13//! Language server executes such typing assists synchronously. That is, they
14//! block user's typing and should be pretty fast for this reason! 14//! block user's typing and should be pretty fast for this reason!
15 15
16mod on_enter;
17
16use ra_db::{FilePosition, SourceDatabase}; 18use ra_db::{FilePosition, SourceDatabase};
17use ra_fmt::leading_indent; 19use ra_fmt::leading_indent;
18use ra_ide_db::RootDatabase; 20use ra_ide_db::RootDatabase;
19use ra_syntax::{ 21use ra_syntax::{
20 algo::find_node_at_offset, 22 algo::find_node_at_offset,
21 ast::{self, AstToken}, 23 ast::{self, AstToken},
22 AstNode, SmolStr, SourceFile, 24 AstNode, SourceFile, TextRange, TextUnit,
23 SyntaxKind::*,
24 SyntaxToken, TextRange, TextUnit, TokenAtOffset,
25}; 25};
26use ra_text_edit::TextEdit; 26use ra_text_edit::TextEdit;
27 27
28use crate::{source_change::SingleFileChange, SourceChange, SourceFileEdit}; 28use crate::{source_change::SingleFileChange, SourceChange};
29
30pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
31 let parse = db.parse(position.file_id);
32 let file = parse.tree();
33 let comment = file
34 .syntax()
35 .token_at_offset(position.offset)
36 .left_biased()
37 .and_then(ast::Comment::cast)?;
38
39 if comment.kind().shape.is_block() {
40 return None;
41 }
42
43 let prefix = comment.prefix();
44 let comment_range = comment.syntax().text_range();
45 if position.offset < comment_range.start() + TextUnit::of_str(prefix) {
46 return None;
47 }
48
49 // Continuing non-doc line comments (like this one :) ) is annoying
50 if prefix == "//" && comment_range.end() == position.offset {
51 return None;
52 }
53
54 let indent = node_indent(&file, comment.syntax())?;
55 let inserted = format!("\n{}{} ", indent, prefix);
56 let cursor_position = position.offset + TextUnit::of_str(&inserted);
57 let edit = TextEdit::insert(position.offset, inserted);
58 29
59 Some( 30pub(crate) use on_enter::on_enter;
60 SourceChange::source_file_edit(
61 "on enter",
62 SourceFileEdit { edit, file_id: position.file_id },
63 )
64 .with_cursor(FilePosition { offset: cursor_position, file_id: position.file_id }),
65 )
66}
67
68fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
69 let ws = match file.syntax().token_at_offset(token.text_range().start()) {
70 TokenAtOffset::Between(l, r) => {
71 assert!(r == *token);
72 l
73 }
74 TokenAtOffset::Single(n) => {
75 assert!(n == *token);
76 return Some("".into());
77 }
78 TokenAtOffset::None => unreachable!(),
79 };
80 if ws.kind() != WHITESPACE {
81 return None;
82 }
83 let text = ws.text();
84 let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0);
85 Some(text[pos..].into())
86}
87 31
88pub(crate) const TRIGGER_CHARS: &str = ".=>"; 32pub(crate) const TRIGGER_CHARS: &str = ".=>";
89 33
@@ -196,102 +140,10 @@ fn on_arrow_typed(file: &SourceFile, offset: TextUnit) -> Option<SingleFileChang
196 140
197#[cfg(test)] 141#[cfg(test)]
198mod tests { 142mod tests {
199 use test_utils::{add_cursor, assert_eq_text, extract_offset}; 143 use test_utils::{assert_eq_text, extract_offset};
200
201 use crate::mock_analysis::single_file;
202 144
203 use super::*; 145 use super::*;
204 146
205 #[test]
206 fn test_on_enter() {
207 fn apply_on_enter(before: &str) -> Option<String> {
208 let (offset, before) = extract_offset(before);
209 let (analysis, file_id) = single_file(&before);
210 let result = analysis.on_enter(FilePosition { offset, file_id }).unwrap()?;
211
212 assert_eq!(result.source_file_edits.len(), 1);
213 let actual = result.source_file_edits[0].edit.apply(&before);
214 let actual = add_cursor(&actual, result.cursor_position.unwrap().offset);
215 Some(actual)
216 }
217
218 fn do_check(before: &str, after: &str) {
219 let actual = apply_on_enter(before).unwrap();
220 assert_eq_text!(after, &actual);
221 }
222
223 fn do_check_noop(text: &str) {
224 assert!(apply_on_enter(text).is_none())
225 }
226
227 do_check(
228 r"
229/// Some docs<|>
230fn foo() {
231}
232",
233 r"
234/// Some docs
235/// <|>
236fn foo() {
237}
238",
239 );
240 do_check(
241 r"
242impl S {
243 /// Some<|> docs.
244 fn foo() {}
245}
246",
247 r"
248impl S {
249 /// Some
250 /// <|> docs.
251 fn foo() {}
252}
253",
254 );
255 do_check(
256 r"
257fn main() {
258 // Fix<|> me
259 let x = 1 + 1;
260}
261",
262 r"
263fn main() {
264 // Fix
265 // <|> me
266 let x = 1 + 1;
267}
268",
269 );
270 do_check(
271 r"
272///<|> Some docs
273fn foo() {
274}
275",
276 r"
277///
278/// <|> Some docs
279fn foo() {
280}
281",
282 );
283 do_check_noop(
284 r"
285fn main() {
286 // Fix me<|>
287 let x = 1 + 1;
288}
289",
290 );
291
292 do_check_noop(r"<|>//! docz");
293 }
294
295 fn do_type_char(char_typed: char, before: &str) -> Option<(String, SingleFileChange)> { 147 fn do_type_char(char_typed: char, before: &str) -> Option<(String, SingleFileChange)> {
296 let (offset, before) = extract_offset(before); 148 let (offset, before) = extract_offset(before);
297 let edit = TextEdit::insert(offset, char_typed.to_string()); 149 let edit = TextEdit::insert(offset, char_typed.to_string());
diff --git a/crates/ra_ide/src/typing/on_enter.rs b/crates/ra_ide/src/typing/on_enter.rs
new file mode 100644
index 000000000..6bcf2d72b
--- /dev/null
+++ b/crates/ra_ide/src/typing/on_enter.rs
@@ -0,0 +1,216 @@
1//! Handles the `Enter` key press. At the momently, this only continues
2//! comments, but should handle indent some time in the future as well.
3
4use ra_db::{FilePosition, SourceDatabase};
5use ra_ide_db::RootDatabase;
6use ra_syntax::{
7 ast::{self, AstToken},
8 AstNode, SmolStr, SourceFile,
9 SyntaxKind::*,
10 SyntaxToken, TextUnit, TokenAtOffset,
11};
12use ra_text_edit::TextEdit;
13
14use crate::{SourceChange, SourceFileEdit};
15
16pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> {
17 let parse = db.parse(position.file_id);
18 let file = parse.tree();
19 let comment = file
20 .syntax()
21 .token_at_offset(position.offset)
22 .left_biased()
23 .and_then(ast::Comment::cast)?;
24
25 if comment.kind().shape.is_block() {
26 return None;
27 }
28
29 let prefix = comment.prefix();
30 let comment_range = comment.syntax().text_range();
31 if position.offset < comment_range.start() + TextUnit::of_str(prefix) {
32 return None;
33 }
34
35 // Continuing single-line non-doc comments (like this one :) ) is annoying
36 if prefix == "//" && comment_range.end() == position.offset && !followed_by_comment(&comment) {
37 return None;
38 }
39
40 let indent = node_indent(&file, comment.syntax())?;
41 let inserted = format!("\n{}{} ", indent, prefix);
42 let cursor_position = position.offset + TextUnit::of_str(&inserted);
43 let edit = TextEdit::insert(position.offset, inserted);
44
45 Some(
46 SourceChange::source_file_edit(
47 "on enter",
48 SourceFileEdit { edit, file_id: position.file_id },
49 )
50 .with_cursor(FilePosition { offset: cursor_position, file_id: position.file_id }),
51 )
52}
53
54fn followed_by_comment(comment: &ast::Comment) -> bool {
55 let ws = match comment.syntax().next_token().and_then(ast::Whitespace::cast) {
56 Some(it) => it,
57 None => return false,
58 };
59 if ws.spans_multiple_lines() {
60 return false;
61 }
62 ws.syntax().next_token().and_then(ast::Comment::cast).is_some()
63}
64
65fn node_indent(file: &SourceFile, token: &SyntaxToken) -> Option<SmolStr> {
66 let ws = match file.syntax().token_at_offset(token.text_range().start()) {
67 TokenAtOffset::Between(l, r) => {
68 assert!(r == *token);
69 l
70 }
71 TokenAtOffset::Single(n) => {
72 assert!(n == *token);
73 return Some("".into());
74 }
75 TokenAtOffset::None => unreachable!(),
76 };
77 if ws.kind() != WHITESPACE {
78 return None;
79 }
80 let text = ws.text();
81 let pos = text.rfind('\n').map(|it| it + 1).unwrap_or(0);
82 Some(text[pos..].into())
83}
84
85#[cfg(test)]
86mod tests {
87 use test_utils::{add_cursor, assert_eq_text, extract_offset};
88
89 use crate::mock_analysis::single_file;
90
91 use super::*;
92
93 fn apply_on_enter(before: &str) -> Option<String> {
94 let (offset, before) = extract_offset(before);
95 let (analysis, file_id) = single_file(&before);
96 let result = analysis.on_enter(FilePosition { offset, file_id }).unwrap()?;
97
98 assert_eq!(result.source_file_edits.len(), 1);
99 let actual = result.source_file_edits[0].edit.apply(&before);
100 let actual = add_cursor(&actual, result.cursor_position.unwrap().offset);
101 Some(actual)
102 }
103
104 fn do_check(ra_fixture_before: &str, ra_fixture_after: &str) {
105 let actual = apply_on_enter(ra_fixture_before).unwrap();
106 assert_eq_text!(ra_fixture_after, &actual);
107 }
108
109 fn do_check_noop(ra_fixture_text: &str) {
110 assert!(apply_on_enter(ra_fixture_text).is_none())
111 }
112
113 #[test]
114 fn continues_doc_comment() {
115 do_check(
116 r"
117/// Some docs<|>
118fn foo() {
119}
120",
121 r"
122/// Some docs
123/// <|>
124fn foo() {
125}
126",
127 );
128
129 do_check(
130 r"
131impl S {
132 /// Some<|> docs.
133 fn foo() {}
134}
135",
136 r"
137impl S {
138 /// Some
139 /// <|> docs.
140 fn foo() {}
141}
142",
143 );
144
145 do_check(
146 r"
147///<|> Some docs
148fn foo() {
149}
150",
151 r"
152///
153/// <|> Some docs
154fn foo() {
155}
156",
157 );
158 }
159
160 #[test]
161 fn does_not_continue_before_doc_comment() {
162 do_check_noop(r"<|>//! docz");
163 }
164
165 #[test]
166 fn continues_code_comment_in_the_middle_of_line() {
167 do_check(
168 r"
169fn main() {
170 // Fix<|> me
171 let x = 1 + 1;
172}
173",
174 r"
175fn main() {
176 // Fix
177 // <|> me
178 let x = 1 + 1;
179}
180",
181 );
182 }
183
184 #[test]
185 fn continues_code_comment_in_the_middle_several_lines() {
186 do_check(
187 r"
188fn main() {
189 // Fix<|>
190 // me
191 let x = 1 + 1;
192}
193",
194 r"
195fn main() {
196 // Fix
197 // <|>
198 // me
199 let x = 1 + 1;
200}
201",
202 );
203 }
204
205 #[test]
206 fn does_not_continue_end_of_code_comment() {
207 do_check_noop(
208 r"
209fn main() {
210 // Fix me<|>
211 let x = 1 + 1;
212}
213",
214 );
215 }
216}
diff --git a/crates/ra_ide_db/Cargo.toml b/crates/ra_ide_db/Cargo.toml
index 52f0f23df..de4f5bce0 100644
--- a/crates/ra_ide_db/Cargo.toml
+++ b/crates/ra_ide_db/Cargo.toml
@@ -13,7 +13,7 @@ wasm = []
13[dependencies] 13[dependencies]
14log = "0.4.8" 14log = "0.4.8"
15rayon = "1.3.0" 15rayon = "1.3.0"
16fst = { version = "0.3.5", default-features = false } 16fst = { version = "0.4", default-features = false }
17rustc-hash = "1.1.0" 17rustc-hash = "1.1.0"
18superslice = "1.0.0" 18superslice = "1.0.0"
19once_cell = "1.3.1" 19once_cell = "1.3.1"
diff --git a/crates/ra_ide_db/src/change.rs b/crates/ra_ide_db/src/change.rs
index 7e9310005..628cf6416 100644
--- a/crates/ra_ide_db/src/change.rs
+++ b/crates/ra_ide_db/src/change.rs
@@ -5,7 +5,7 @@ use std::{fmt, sync::Arc, time};
5 5
6use ra_db::{ 6use ra_db::{
7 salsa::{Database, Durability, SweepStrategy}, 7 salsa::{Database, Durability, SweepStrategy},
8 CrateGraph, CrateId, FileId, RelativePathBuf, SourceDatabase, SourceDatabaseExt, SourceRoot, 8 CrateGraph, FileId, RelativePathBuf, SourceDatabase, SourceDatabaseExt, SourceRoot,
9 SourceRootId, 9 SourceRootId,
10}; 10};
11use ra_prof::{memory_usage, profile, Bytes}; 11use ra_prof::{memory_usage, profile, Bytes};
@@ -88,10 +88,6 @@ impl AnalysisChange {
88 self.crate_graph = Some(graph); 88 self.crate_graph = Some(graph);
89 } 89 }
90 90
91 pub fn set_debug_crate_name(&mut self, crate_id: CrateId, name: String) {
92 self.debug_data.crate_names.insert(crate_id, name);
93 }
94
95 pub fn set_debug_root_path(&mut self, source_root_id: SourceRootId, path: String) { 91 pub fn set_debug_root_path(&mut self, source_root_id: SourceRootId, path: String) {
96 self.debug_data.root_paths.insert(source_root_id, path); 92 self.debug_data.root_paths.insert(source_root_id, path);
97 } 93 }
@@ -279,7 +275,7 @@ impl RootDatabase {
279 self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep); 275 self.query(hir::db::BodyWithSourceMapQuery).sweep(sweep);
280 276
281 self.query(hir::db::ExprScopesQuery).sweep(sweep); 277 self.query(hir::db::ExprScopesQuery).sweep(sweep);
282 self.query(hir::db::DoInferQuery).sweep(sweep); 278 self.query(hir::db::InferQueryQuery).sweep(sweep);
283 self.query(hir::db::BodyQuery).sweep(sweep); 279 self.query(hir::db::BodyQuery).sweep(sweep);
284 } 280 }
285 281
@@ -318,7 +314,7 @@ impl RootDatabase {
318 314
319 // DefDatabase 315 // DefDatabase
320 hir::db::RawItemsQuery 316 hir::db::RawItemsQuery
321 hir::db::ComputeCrateDefMapQuery 317 hir::db::CrateDefMapQueryQuery
322 hir::db::StructDataQuery 318 hir::db::StructDataQuery
323 hir::db::UnionDataQuery 319 hir::db::UnionDataQuery
324 hir::db::EnumDataQuery 320 hir::db::EnumDataQuery
@@ -350,7 +346,7 @@ impl RootDatabase {
350 hir::db::InternImplQuery 346 hir::db::InternImplQuery
351 347
352 // HirDatabase 348 // HirDatabase
353 hir::db::DoInferQuery 349 hir::db::InferQueryQuery
354 hir::db::TyQuery 350 hir::db::TyQuery
355 hir::db::ValueTyQuery 351 hir::db::ValueTyQuery
356 hir::db::ImplSelfTyQuery 352 hir::db::ImplSelfTyQuery
@@ -362,7 +358,6 @@ impl RootDatabase {
362 hir::db::GenericDefaultsQuery 358 hir::db::GenericDefaultsQuery
363 hir::db::ImplsInCrateQuery 359 hir::db::ImplsInCrateQuery
364 hir::db::ImplsForTraitQuery 360 hir::db::ImplsForTraitQuery
365 hir::db::TraitSolverQuery
366 hir::db::InternTypeCtorQuery 361 hir::db::InternTypeCtorQuery
367 hir::db::InternChalkImplQuery 362 hir::db::InternChalkImplQuery
368 hir::db::InternAssocTyValueQuery 363 hir::db::InternAssocTyValueQuery
diff --git a/crates/ra_ide_db/src/lib.rs b/crates/ra_ide_db/src/lib.rs
index 79f48c9e3..fc1b19def 100644
--- a/crates/ra_ide_db/src/lib.rs
+++ b/crates/ra_ide_db/src/lib.rs
@@ -5,7 +5,6 @@
5pub mod marks; 5pub mod marks;
6pub mod line_index; 6pub mod line_index;
7pub mod line_index_utils; 7pub mod line_index_utils;
8pub mod feature_flags;
9pub mod symbol_index; 8pub mod symbol_index;
10pub mod change; 9pub mod change;
11pub mod defs; 10pub mod defs;
@@ -22,7 +21,7 @@ use ra_db::{
22}; 21};
23use rustc_hash::FxHashMap; 22use rustc_hash::FxHashMap;
24 23
25use crate::{feature_flags::FeatureFlags, line_index::LineIndex, symbol_index::SymbolsDatabase}; 24use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase};
26 25
27#[salsa::database( 26#[salsa::database(
28 ra_db::SourceDatabaseStorage, 27 ra_db::SourceDatabaseStorage,
@@ -37,7 +36,6 @@ use crate::{feature_flags::FeatureFlags, line_index::LineIndex, symbol_index::Sy
37#[derive(Debug)] 36#[derive(Debug)]
38pub struct RootDatabase { 37pub struct RootDatabase {
39 runtime: salsa::Runtime<RootDatabase>, 38 runtime: salsa::Runtime<RootDatabase>,
40 pub feature_flags: Arc<FeatureFlags>,
41 pub(crate) debug_data: Arc<DebugData>, 39 pub(crate) debug_data: Arc<DebugData>,
42 pub last_gc: crate::wasm_shims::Instant, 40 pub last_gc: crate::wasm_shims::Instant,
43 pub last_gc_check: crate::wasm_shims::Instant, 41 pub last_gc_check: crate::wasm_shims::Instant,
@@ -57,6 +55,13 @@ impl FileLoader for RootDatabase {
57 fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> { 55 fn relevant_crates(&self, file_id: FileId) -> Arc<Vec<CrateId>> {
58 FileLoaderDelegate(self).relevant_crates(file_id) 56 FileLoaderDelegate(self).relevant_crates(file_id)
59 } 57 }
58 fn resolve_extern_path(
59 &self,
60 extern_id: ra_db::ExternSourceId,
61 relative_path: &RelativePath,
62 ) -> Option<FileId> {
63 FileLoaderDelegate(self).resolve_extern_path(extern_id, relative_path)
64 }
60} 65}
61 66
62impl salsa::Database for RootDatabase { 67impl salsa::Database for RootDatabase {
@@ -82,17 +87,16 @@ impl salsa::Database for RootDatabase {
82 87
83impl Default for RootDatabase { 88impl Default for RootDatabase {
84 fn default() -> RootDatabase { 89 fn default() -> RootDatabase {
85 RootDatabase::new(None, FeatureFlags::default()) 90 RootDatabase::new(None)
86 } 91 }
87} 92}
88 93
89impl RootDatabase { 94impl RootDatabase {
90 pub fn new(lru_capacity: Option<usize>, feature_flags: FeatureFlags) -> RootDatabase { 95 pub fn new(lru_capacity: Option<usize>) -> RootDatabase {
91 let mut db = RootDatabase { 96 let mut db = RootDatabase {
92 runtime: salsa::Runtime::default(), 97 runtime: salsa::Runtime::default(),
93 last_gc: crate::wasm_shims::Instant::now(), 98 last_gc: crate::wasm_shims::Instant::now(),
94 last_gc_check: crate::wasm_shims::Instant::now(), 99 last_gc_check: crate::wasm_shims::Instant::now(),
95 feature_flags: Arc::new(feature_flags),
96 debug_data: Default::default(), 100 debug_data: Default::default(),
97 }; 101 };
98 db.set_crate_graph_with_durability(Default::default(), Durability::HIGH); 102 db.set_crate_graph_with_durability(Default::default(), Durability::HIGH);
@@ -112,7 +116,6 @@ impl salsa::ParallelDatabase for RootDatabase {
112 runtime: self.runtime.snapshot(self), 116 runtime: self.runtime.snapshot(self),
113 last_gc: self.last_gc, 117 last_gc: self.last_gc,
114 last_gc_check: self.last_gc_check, 118 last_gc_check: self.last_gc_check,
115 feature_flags: Arc::clone(&self.feature_flags),
116 debug_data: Arc::clone(&self.debug_data), 119 debug_data: Arc::clone(&self.debug_data),
117 }) 120 })
118 } 121 }
@@ -131,12 +134,10 @@ fn line_index(db: &impl LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> {
131#[derive(Debug, Default, Clone)] 134#[derive(Debug, Default, Clone)]
132pub(crate) struct DebugData { 135pub(crate) struct DebugData {
133 pub(crate) root_paths: FxHashMap<SourceRootId, String>, 136 pub(crate) root_paths: FxHashMap<SourceRootId, String>,
134 pub(crate) crate_names: FxHashMap<CrateId, String>,
135} 137}
136 138
137impl DebugData { 139impl DebugData {
138 pub(crate) fn merge(&mut self, other: DebugData) { 140 pub(crate) fn merge(&mut self, other: DebugData) {
139 self.root_paths.extend(other.root_paths.into_iter()); 141 self.root_paths.extend(other.root_paths.into_iter());
140 self.crate_names.extend(other.crate_names.into_iter());
141 } 142 }
142} 143}
diff --git a/crates/ra_ide_db/src/search.rs b/crates/ra_ide_db/src/search.rs
index 6f198df04..cf78d3e41 100644
--- a/crates/ra_ide_db/src/search.rs
+++ b/crates/ra_ide_db/src/search.rs
@@ -17,7 +17,7 @@ use rustc_hash::FxHashMap;
17use test_utils::tested_by; 17use test_utils::tested_by;
18 18
19use crate::{ 19use crate::{
20 defs::{classify_name_ref, Definition}, 20 defs::{classify_name_ref, Definition, NameRefClass},
21 RootDatabase, 21 RootDatabase,
22}; 22};
23 23
@@ -30,6 +30,8 @@ pub struct Reference {
30 30
31#[derive(Debug, Clone, PartialEq)] 31#[derive(Debug, Clone, PartialEq)]
32pub enum ReferenceKind { 32pub enum ReferenceKind {
33 StructFieldShorthandForField,
34 StructFieldShorthandForLocal,
33 StructLiteral, 35 StructLiteral,
34 Other, 36 Other,
35} 37}
@@ -237,9 +239,8 @@ impl Definition {
237 // FIXME: reuse sb 239 // FIXME: reuse sb
238 // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098 240 // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098
239 241
240 if let Some(d) = classify_name_ref(&sema, &name_ref) { 242 match classify_name_ref(&sema, &name_ref) {
241 let d = d.definition(); 243 Some(NameRefClass::Definition(def)) if &def == self => {
242 if &d == self {
243 let kind = if is_record_lit_name_ref(&name_ref) 244 let kind = if is_record_lit_name_ref(&name_ref)
244 || is_call_expr_name_ref(&name_ref) 245 || is_call_expr_name_ref(&name_ref)
245 { 246 {
@@ -252,9 +253,26 @@ impl Definition {
252 refs.push(Reference { 253 refs.push(Reference {
253 file_range, 254 file_range,
254 kind, 255 kind,
255 access: reference_access(&d, &name_ref), 256 access: reference_access(&def, &name_ref),
256 }); 257 });
257 } 258 }
259 Some(NameRefClass::FieldShorthand { local, field }) => {
260 match self {
261 Definition::StructField(_) if &field == self => refs.push(Reference {
262 file_range: sema.original_range(name_ref.syntax()),
263 kind: ReferenceKind::StructFieldShorthandForField,
264 access: reference_access(&field, &name_ref),
265 }),
266 Definition::Local(l) if &local == l => refs.push(Reference {
267 file_range: sema.original_range(name_ref.syntax()),
268 kind: ReferenceKind::StructFieldShorthandForLocal,
269 access: reference_access(&Definition::Local(local), &name_ref),
270 }),
271
272 _ => {} // not a usage
273 };
274 }
275 _ => {} // not a usage
258 } 276 }
259 } 277 }
260 } 278 }
diff --git a/crates/ra_ide_db/src/symbol_index.rs b/crates/ra_ide_db/src/symbol_index.rs
index e6b3126b6..884359ee3 100644
--- a/crates/ra_ide_db/src/symbol_index.rs
+++ b/crates/ra_ide_db/src/symbol_index.rs
@@ -163,7 +163,7 @@ pub fn index_resolve(db: &RootDatabase, name_ref: &ast::NameRef) -> Vec<FileSymb
163#[derive(Default)] 163#[derive(Default)]
164pub struct SymbolIndex { 164pub struct SymbolIndex {
165 symbols: Vec<FileSymbol>, 165 symbols: Vec<FileSymbol>,
166 map: fst::Map, 166 map: fst::Map<Vec<u8>>,
167} 167}
168 168
169impl fmt::Debug for SymbolIndex { 169impl fmt::Debug for SymbolIndex {
@@ -221,7 +221,7 @@ impl SymbolIndex {
221 builder.insert(key, value).unwrap(); 221 builder.insert(key, value).unwrap();
222 } 222 }
223 223
224 let map = fst::Map::from_bytes(builder.into_inner().unwrap()).unwrap(); 224 let map = fst::Map::new(builder.into_inner().unwrap()).unwrap();
225 SymbolIndex { symbols, map } 225 SymbolIndex { symbols, map }
226 } 226 }
227 227
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs
index 2c6ae5658..43afe24cc 100644
--- a/crates/ra_mbe/src/lib.rs
+++ b/crates/ra_mbe/src/lib.rs
@@ -31,7 +31,8 @@ pub enum ExpandError {
31} 31}
32 32
33pub use crate::syntax_bridge::{ 33pub use crate::syntax_bridge::{
34 ast_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node, TokenMap, 34 ast_to_token_tree, parse_to_token_tree, syntax_node_to_token_tree, token_tree_to_syntax_node,
35 TokenMap,
35}; 36};
36 37
37/// This struct contains AST for a single `macro_rules` definition. What might 38/// This struct contains AST for a single `macro_rules` definition. What might
diff --git a/crates/ra_mbe/src/mbe_expander/matcher.rs b/crates/ra_mbe/src/mbe_expander/matcher.rs
index ffba03898..49c53183a 100644
--- a/crates/ra_mbe/src/mbe_expander/matcher.rs
+++ b/crates/ra_mbe/src/mbe_expander/matcher.rs
@@ -247,6 +247,7 @@ impl<'a> TtIter<'a> {
247 ra_parser::parse_fragment(&mut src, &mut sink, fragment_kind); 247 ra_parser::parse_fragment(&mut src, &mut sink, fragment_kind);
248 248
249 if !sink.cursor.is_root() || sink.error { 249 if !sink.cursor.is_root() || sink.error {
250 // FIXME better recovery in this case would help completion inside macros immensely
250 return Err(()); 251 return Err(());
251 } 252 }
252 253
@@ -375,7 +376,8 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> Result<Option<Fragment>, Ex
375 return Ok(Some(Fragment::Tokens(tt))); 376 return Ok(Some(Fragment::Tokens(tt)));
376 } 377 }
377 }; 378 };
378 let tt = input.expect_fragment(fragment).map_err(|()| err!())?; 379 let tt =
380 input.expect_fragment(fragment).map_err(|()| err!("fragment did not parse as {}", kind))?;
379 let fragment = if kind == "expr" { Fragment::Ast(tt) } else { Fragment::Tokens(tt) }; 381 let fragment = if kind == "expr" { Fragment::Ast(tt) } else { Fragment::Tokens(tt) };
380 Ok(Some(fragment)) 382 Ok(Some(fragment))
381} 383}
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs
index fb9fa5314..fcb73fbc7 100644
--- a/crates/ra_mbe/src/syntax_bridge.rs
+++ b/crates/ra_mbe/src/syntax_bridge.rs
@@ -2,8 +2,10 @@
2 2
3use ra_parser::{FragmentKind, ParseError, TreeSink}; 3use ra_parser::{FragmentKind, ParseError, TreeSink};
4use ra_syntax::{ 4use ra_syntax::{
5 ast, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind, SyntaxKind::*, SyntaxNode, 5 ast::{self, make::tokens::doc_comment},
6 SyntaxTreeBuilder, TextRange, TextUnit, T, 6 tokenize, AstToken, NodeOrToken, Parse, SmolStr, SyntaxKind,
7 SyntaxKind::*,
8 SyntaxNode, SyntaxTreeBuilder, TextRange, TextUnit, Token, T,
7}; 9};
8use rustc_hash::FxHashMap; 10use rustc_hash::FxHashMap;
9use std::iter::successors; 11use std::iter::successors;
@@ -48,9 +50,11 @@ pub fn ast_to_token_tree(ast: &impl ast::AstNode) -> Option<(tt::Subtree, TokenM
48/// will consume). 50/// will consume).
49pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> { 51pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> Option<(tt::Subtree, TokenMap)> {
50 let global_offset = node.text_range().start(); 52 let global_offset = node.text_range().start();
51 let mut c = Convertor { map: TokenMap::default(), global_offset, next_id: 0 }; 53 let mut c = Convertor {
54 id_alloc: { TokenIdAlloc { map: TokenMap::default(), global_offset, next_id: 0 } },
55 };
52 let subtree = c.go(node)?; 56 let subtree = c.go(node)?;
53 Some((subtree, c.map)) 57 Some((subtree, c.id_alloc.map))
54} 58}
55 59
56// The following items are what `rustc` macro can be parsed into : 60// The following items are what `rustc` macro can be parsed into :
@@ -89,6 +93,28 @@ pub fn token_tree_to_syntax_node(
89 Ok((parse, range_map)) 93 Ok((parse, range_map))
90} 94}
91 95
96/// Convert a string to a `TokenTree`
97pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
98 let (tokens, errors) = tokenize(text);
99 if !errors.is_empty() {
100 return None;
101 }
102
103 let mut conv = RawConvertor {
104 text,
105 offset: TextUnit::default(),
106 inner: tokens.iter(),
107 id_alloc: TokenIdAlloc {
108 map: Default::default(),
109 global_offset: TextUnit::default(),
110 next_id: 0,
111 },
112 };
113
114 let subtree = conv.go()?;
115 Some((subtree, conv.id_alloc.map))
116}
117
92impl TokenMap { 118impl TokenMap {
93 pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> { 119 pub fn token_by_range(&self, relative_range: TextRange) -> Option<tt::TokenId> {
94 let &(token_id, _) = self.entries.iter().find(|(_, range)| match range { 120 let &(token_id, _) = self.entries.iter().find(|(_, range)| match range {
@@ -118,6 +144,14 @@ impl TokenMap {
118 self.entries 144 self.entries
119 .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range))); 145 .push((token_id, TokenTextRange::Delimiter(open_relative_range, close_relative_range)));
120 } 146 }
147
148 fn update_close_delim(&mut self, token_id: tt::TokenId, close_relative_range: TextRange) {
149 if let Some(entry) = self.entries.iter_mut().find(|(tid, _)| *tid == token_id) {
150 if let TokenTextRange::Delimiter(dim, _) = entry.1 {
151 entry.1 = TokenTextRange::Delimiter(dim, close_relative_range);
152 }
153 }
154 }
121} 155}
122 156
123/// Returns the textual content of a doc comment block as a quoted string 157/// Returns the textual content of a doc comment block as a quoted string
@@ -188,12 +222,161 @@ fn convert_doc_comment(token: &ra_syntax::SyntaxToken) -> Option<Vec<tt::TokenTr
188 } 222 }
189} 223}
190 224
191struct Convertor { 225struct TokenIdAlloc {
192 map: TokenMap, 226 map: TokenMap,
193 global_offset: TextUnit, 227 global_offset: TextUnit,
194 next_id: u32, 228 next_id: u32,
195} 229}
196 230
231impl TokenIdAlloc {
232 fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
233 let relative_range = absolute_range - self.global_offset;
234 let token_id = tt::TokenId(self.next_id);
235 self.next_id += 1;
236 self.map.insert(token_id, relative_range);
237 token_id
238 }
239
240 fn delim(&mut self, open_abs_range: TextRange, close_abs_range: TextRange) -> tt::TokenId {
241 let open_relative_range = open_abs_range - self.global_offset;
242 let close_relative_range = close_abs_range - self.global_offset;
243 let token_id = tt::TokenId(self.next_id);
244 self.next_id += 1;
245
246 self.map.insert_delim(token_id, open_relative_range, close_relative_range);
247 token_id
248 }
249
250 fn open_delim(&mut self, open_abs_range: TextRange) -> tt::TokenId {
251 let token_id = tt::TokenId(self.next_id);
252 self.next_id += 1;
253 self.map.insert_delim(token_id, open_abs_range, open_abs_range);
254 token_id
255 }
256
257 fn close_delim(&mut self, id: tt::TokenId, close_abs_range: TextRange) {
258 self.map.update_close_delim(id, close_abs_range);
259 }
260}
261
262/// A Raw Token (straightly from lexer) convertor
263struct RawConvertor<'a> {
264 text: &'a str,
265 offset: TextUnit,
266 id_alloc: TokenIdAlloc,
267 inner: std::slice::Iter<'a, Token>,
268}
269
270impl RawConvertor<'_> {
271 fn go(&mut self) -> Option<tt::Subtree> {
272 let mut subtree = tt::Subtree::default();
273 subtree.delimiter = None;
274 while self.peek().is_some() {
275 self.collect_leaf(&mut subtree.token_trees);
276 }
277 if subtree.token_trees.is_empty() {
278 return None;
279 }
280 if subtree.token_trees.len() == 1 {
281 if let tt::TokenTree::Subtree(first) = &subtree.token_trees[0] {
282 return Some(first.clone());
283 }
284 }
285 Some(subtree)
286 }
287
288 fn bump(&mut self) -> Option<(Token, TextRange)> {
289 let token = self.inner.next()?;
290 let range = TextRange::offset_len(self.offset, token.len);
291 self.offset += token.len;
292 Some((*token, range))
293 }
294
295 fn peek(&self) -> Option<Token> {
296 self.inner.as_slice().get(0).cloned()
297 }
298
299 fn collect_leaf(&mut self, result: &mut Vec<tt::TokenTree>) {
300 let (token, range) = match self.bump() {
301 None => return,
302 Some(it) => it,
303 };
304
305 let k: SyntaxKind = token.kind;
306 if k == COMMENT {
307 let node = doc_comment(&self.text[range]);
308 if let Some(tokens) = convert_doc_comment(&node) {
309 result.extend(tokens);
310 }
311 return;
312 }
313
314 result.push(if k.is_punct() {
315 let delim = match k {
316 T!['('] => Some((tt::DelimiterKind::Parenthesis, T![')'])),
317 T!['{'] => Some((tt::DelimiterKind::Brace, T!['}'])),
318 T!['['] => Some((tt::DelimiterKind::Bracket, T![']'])),
319 _ => None,
320 };
321
322 if let Some((kind, closed)) = delim {
323 let mut subtree = tt::Subtree::default();
324 let id = self.id_alloc.open_delim(range);
325 subtree.delimiter = Some(tt::Delimiter { kind, id });
326
327 while self.peek().map(|it| it.kind != closed).unwrap_or(false) {
328 self.collect_leaf(&mut subtree.token_trees);
329 }
330 let last_range = match self.bump() {
331 None => return,
332 Some(it) => it.1,
333 };
334 self.id_alloc.close_delim(id, last_range);
335 subtree.into()
336 } else {
337 let spacing = match self.peek() {
338 Some(next)
339 if next.kind.is_trivia()
340 || next.kind == T!['[']
341 || next.kind == T!['{']
342 || next.kind == T!['('] =>
343 {
344 tt::Spacing::Alone
345 }
346 Some(next) if next.kind.is_punct() => tt::Spacing::Joint,
347 _ => tt::Spacing::Alone,
348 };
349 let char =
350 self.text[range].chars().next().expect("Token from lexer must be single char");
351
352 tt::Leaf::from(tt::Punct { char, spacing, id: self.id_alloc.alloc(range) }).into()
353 }
354 } else {
355 macro_rules! make_leaf {
356 ($i:ident) => {
357 tt::$i { id: self.id_alloc.alloc(range), text: self.text[range].into() }.into()
358 };
359 }
360 let leaf: tt::Leaf = match k {
361 T![true] | T![false] => make_leaf!(Literal),
362 IDENT | LIFETIME => make_leaf!(Ident),
363 k if k.is_keyword() => make_leaf!(Ident),
364 k if k.is_literal() => make_leaf!(Literal),
365 _ => return,
366 };
367
368 leaf.into()
369 });
370 }
371}
372
373// FIXME: There are some duplicate logic between RawConvertor and Convertor
374// It would be nice to refactor to converting SyntaxNode to ra_parser::Token and thus
375// use RawConvertor directly. But performance-wise it may not be a good idea ?
376struct Convertor {
377 id_alloc: TokenIdAlloc,
378}
379
197impl Convertor { 380impl Convertor {
198 fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> { 381 fn go(&mut self, tt: &SyntaxNode) -> Option<tt::Subtree> {
199 // This tree is empty 382 // This tree is empty
@@ -236,7 +419,7 @@ impl Convertor {
236 }; 419 };
237 let delimiter = delimiter_kind.map(|kind| tt::Delimiter { 420 let delimiter = delimiter_kind.map(|kind| tt::Delimiter {
238 kind, 421 kind,
239 id: self.alloc_delim(first_child.text_range(), last_child.text_range()), 422 id: self.id_alloc.delim(first_child.text_range(), last_child.text_range()),
240 }); 423 });
241 424
242 let mut token_trees = Vec::new(); 425 let mut token_trees = Vec::new();
@@ -273,7 +456,7 @@ impl Convertor {
273 tt::Leaf::from(tt::Punct { 456 tt::Leaf::from(tt::Punct {
274 char, 457 char,
275 spacing, 458 spacing,
276 id: self.alloc(token.text_range()), 459 id: self.id_alloc.alloc(token.text_range()),
277 }) 460 })
278 .into(), 461 .into(),
279 ); 462 );
@@ -282,7 +465,7 @@ impl Convertor {
282 macro_rules! make_leaf { 465 macro_rules! make_leaf {
283 ($i:ident) => { 466 ($i:ident) => {
284 tt::$i { 467 tt::$i {
285 id: self.alloc(token.text_range()), 468 id: self.id_alloc.alloc(token.text_range()),
286 text: token.text().clone(), 469 text: token.text().clone(),
287 } 470 }
288 .into() 471 .into()
@@ -313,28 +496,6 @@ impl Convertor {
313 let res = tt::Subtree { delimiter, token_trees }; 496 let res = tt::Subtree { delimiter, token_trees };
314 Some(res) 497 Some(res)
315 } 498 }
316
317 fn alloc(&mut self, absolute_range: TextRange) -> tt::TokenId {
318 let relative_range = absolute_range - self.global_offset;
319 let token_id = tt::TokenId(self.next_id);
320 self.next_id += 1;
321 self.map.insert(token_id, relative_range);
322 token_id
323 }
324
325 fn alloc_delim(
326 &mut self,
327 open_abs_range: TextRange,
328 close_abs_range: TextRange,
329 ) -> tt::TokenId {
330 let open_relative_range = open_abs_range - self.global_offset;
331 let close_relative_range = close_abs_range - self.global_offset;
332 let token_id = tt::TokenId(self.next_id);
333 self.next_id += 1;
334
335 self.map.insert_delim(token_id, open_relative_range, close_relative_range);
336 token_id
337 }
338} 499}
339 500
340struct TtTreeSink<'a> { 501struct TtTreeSink<'a> {
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs
index 066ce150b..6d5d1e9e6 100644
--- a/crates/ra_mbe/src/tests.rs
+++ b/crates/ra_mbe/src/tests.rs
@@ -1499,12 +1499,20 @@ impl MacroFixture {
1499 } 1499 }
1500} 1500}
1501 1501
1502pub(crate) fn parse_macro(macro_definition: &str) -> MacroFixture { 1502pub(crate) fn parse_macro(ra_fixture: &str) -> MacroFixture {
1503 let source_file = ast::SourceFile::parse(macro_definition).ok().unwrap(); 1503 let source_file = ast::SourceFile::parse(ra_fixture).ok().unwrap();
1504 let macro_definition = 1504 let macro_definition =
1505 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); 1505 source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
1506 1506
1507 let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap(); 1507 let (definition_tt, _) = ast_to_token_tree(&macro_definition.token_tree().unwrap()).unwrap();
1508
1509 let parsed = parse_to_token_tree(
1510 &ra_fixture[macro_definition.token_tree().unwrap().syntax().text_range()],
1511 )
1512 .unwrap()
1513 .0;
1514 assert_eq!(definition_tt, parsed);
1515
1508 let rules = MacroRules::parse(&definition_tt).unwrap(); 1516 let rules = MacroRules::parse(&definition_tt).unwrap();
1509 MacroFixture { rules } 1517 MacroFixture { rules }
1510} 1518}
diff --git a/crates/ra_parser/src/grammar/expressions.rs b/crates/ra_parser/src/grammar/expressions.rs
index 4163a2cf5..0c170ac5e 100644
--- a/crates/ra_parser/src/grammar/expressions.rs
+++ b/crates/ra_parser/src/grammar/expressions.rs
@@ -278,7 +278,7 @@ fn current_op(p: &Parser) -> (u8, SyntaxKind) {
278} 278}
279 279
280// Parses expression with binding power of at least bp. 280// Parses expression with binding power of at least bp.
281fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>, BlockLike) { 281fn expr_bp(p: &mut Parser, mut r: Restrictions, bp: u8) -> (Option<CompletedMarker>, BlockLike) {
282 let mut lhs = match lhs(p, r) { 282 let mut lhs = match lhs(p, r) {
283 Some((lhs, blocklike)) => { 283 Some((lhs, blocklike)) => {
284 // test stmt_bin_expr_ambiguity 284 // test stmt_bin_expr_ambiguity
@@ -311,6 +311,12 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>,
311 let m = lhs.precede(p); 311 let m = lhs.precede(p);
312 p.bump(op); 312 p.bump(op);
313 313
314 // test binop_resets_statementness
315 // fn foo() {
316 // v = {1}&2;
317 // }
318 r = Restrictions { prefer_stmt: false, ..r };
319
314 if is_range { 320 if is_range {
315 // test postfix_range 321 // test postfix_range
316 // fn foo() { 322 // fn foo() {
@@ -327,7 +333,7 @@ fn expr_bp(p: &mut Parser, r: Restrictions, bp: u8) -> (Option<CompletedMarker>,
327 } 333 }
328 } 334 }
329 335
330 expr_bp(p, r, op_bp + 1); 336 expr_bp(p, Restrictions { prefer_stmt: false, ..r }, op_bp + 1);
331 lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR }); 337 lhs = m.complete(p, if is_range { RANGE_EXPR } else { BIN_EXPR });
332 } 338 }
333 (Some(lhs), BlockLike::NotBlock) 339 (Some(lhs), BlockLike::NotBlock)
diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs
index 6853a4794..9e167db96 100644
--- a/crates/ra_prof/src/lib.rs
+++ b/crates/ra_prof/src/lib.rs
@@ -88,7 +88,7 @@ pub type Label = &'static str;
88pub fn profile(label: Label) -> Profiler { 88pub fn profile(label: Label) -> Profiler {
89 assert!(!label.is_empty()); 89 assert!(!label.is_empty());
90 if !PROFILING_ENABLED.load(Ordering::Relaxed) { 90 if !PROFILING_ENABLED.load(Ordering::Relaxed) {
91 return Profiler { label: None }; 91 return Profiler { label: None, detail: None };
92 } 92 }
93 93
94 PROFILE_STACK.with(|stack| { 94 PROFILE_STACK.with(|stack| {
@@ -101,15 +101,15 @@ pub fn profile(label: Label) -> Profiler {
101 }; 101 };
102 } 102 }
103 if stack.starts.len() > stack.filter_data.depth { 103 if stack.starts.len() > stack.filter_data.depth {
104 return Profiler { label: None }; 104 return Profiler { label: None, detail: None };
105 } 105 }
106 let allowed = &stack.filter_data.allowed; 106 let allowed = &stack.filter_data.allowed;
107 if stack.starts.is_empty() && !allowed.is_empty() && !allowed.contains(label) { 107 if stack.starts.is_empty() && !allowed.is_empty() && !allowed.contains(label) {
108 return Profiler { label: None }; 108 return Profiler { label: None, detail: None };
109 } 109 }
110 110
111 stack.starts.push(Instant::now()); 111 stack.starts.push(Instant::now());
112 Profiler { label: Some(label) } 112 Profiler { label: Some(label), detail: None }
113 }) 113 })
114} 114}
115 115
@@ -130,6 +130,16 @@ pub fn print_time(label: Label) -> impl Drop {
130 130
131pub struct Profiler { 131pub struct Profiler {
132 label: Option<Label>, 132 label: Option<Label>,
133 detail: Option<String>,
134}
135
136impl Profiler {
137 pub fn detail(mut self, detail: impl FnOnce() -> String) -> Profiler {
138 if self.label.is_some() {
139 self.detail = Some(detail())
140 }
141 self
142 }
133} 143}
134 144
135pub struct Filter { 145pub struct Filter {
@@ -183,6 +193,7 @@ struct Message {
183 level: usize, 193 level: usize,
184 duration: Duration, 194 duration: Duration,
185 label: Label, 195 label: Label,
196 detail: Option<String>,
186} 197}
187 198
188impl ProfileStack { 199impl ProfileStack {
@@ -208,13 +219,13 @@ thread_local!(static PROFILE_STACK: RefCell<ProfileStack> = RefCell::new(Profile
208impl Drop for Profiler { 219impl Drop for Profiler {
209 fn drop(&mut self) { 220 fn drop(&mut self) {
210 match self { 221 match self {
211 Profiler { label: Some(label) } => { 222 Profiler { label: Some(label), detail } => {
212 PROFILE_STACK.with(|stack| { 223 PROFILE_STACK.with(|stack| {
213 let mut stack = stack.borrow_mut(); 224 let mut stack = stack.borrow_mut();
214 let start = stack.starts.pop().unwrap(); 225 let start = stack.starts.pop().unwrap();
215 let duration = start.elapsed(); 226 let duration = start.elapsed();
216 let level = stack.starts.len(); 227 let level = stack.starts.len();
217 stack.messages.push(Message { level, duration, label }); 228 stack.messages.push(Message { level, duration, label, detail: detail.take() });
218 if level == 0 { 229 if level == 0 {
219 let stdout = stderr(); 230 let stdout = stderr();
220 let longer_than = stack.filter_data.longer_than; 231 let longer_than = stack.filter_data.longer_than;
@@ -228,7 +239,7 @@ impl Drop for Profiler {
228 } 239 }
229 }); 240 });
230 } 241 }
231 Profiler { label: None } => (), 242 Profiler { label: None, .. } => (),
232 } 243 }
233 } 244 }
234} 245}
@@ -251,8 +262,16 @@ fn print_for_idx(
251) { 262) {
252 let current = &msgs[current_idx]; 263 let current = &msgs[current_idx];
253 let current_indent = " ".repeat(current.level); 264 let current_indent = " ".repeat(current.level);
254 writeln!(out, "{}{:5}ms - {}", current_indent, current.duration.as_millis(), current.label) 265 let detail = current.detail.as_ref().map(|it| format!(" @ {}", it)).unwrap_or_default();
255 .expect("printing profiling info"); 266 writeln!(
267 out,
268 "{}{:5}ms - {}{}",
269 current_indent,
270 current.duration.as_millis(),
271 current.label,
272 detail,
273 )
274 .expect("printing profiling info");
256 275
257 let longer_than_millis = longer_than.as_millis(); 276 let longer_than_millis = longer_than.as_millis();
258 let children_indices = &children_map[current_idx]; 277 let children_indices = &children_map[current_idx];
@@ -417,9 +436,9 @@ mod tests {
417 fn test_longer_than() { 436 fn test_longer_than() {
418 let mut result = vec![]; 437 let mut result = vec![];
419 let msgs = vec![ 438 let msgs = vec![
420 Message { level: 1, duration: Duration::from_nanos(3), label: "bar" }, 439 Message { level: 1, duration: Duration::from_nanos(3), label: "bar", detail: None },
421 Message { level: 1, duration: Duration::from_nanos(2), label: "bar" }, 440 Message { level: 1, duration: Duration::from_nanos(2), label: "bar", detail: None },
422 Message { level: 0, duration: Duration::from_millis(1), label: "foo" }, 441 Message { level: 0, duration: Duration::from_millis(1), label: "foo", detail: None },
423 ]; 442 ];
424 print(&msgs, Duration::from_millis(0), &mut result); 443 print(&msgs, Duration::from_millis(0), &mut result);
425 // The calls to `bar` are so short that they'll be rounded to 0ms and should get collapsed 444 // The calls to `bar` are so short that they'll be rounded to 0ms and should get collapsed
@@ -434,8 +453,8 @@ mod tests {
434 fn test_unaccounted_for_topmost() { 453 fn test_unaccounted_for_topmost() {
435 let mut result = vec![]; 454 let mut result = vec![];
436 let msgs = vec![ 455 let msgs = vec![
437 Message { level: 1, duration: Duration::from_millis(2), label: "bar" }, 456 Message { level: 1, duration: Duration::from_millis(2), label: "bar", detail: None },
438 Message { level: 0, duration: Duration::from_millis(5), label: "foo" }, 457 Message { level: 0, duration: Duration::from_millis(5), label: "foo", detail: None },
439 ]; 458 ];
440 print(&msgs, Duration::from_millis(0), &mut result); 459 print(&msgs, Duration::from_millis(0), &mut result);
441 assert_eq!( 460 assert_eq!(
@@ -453,11 +472,11 @@ mod tests {
453 fn test_unaccounted_for_multiple_levels() { 472 fn test_unaccounted_for_multiple_levels() {
454 let mut result = vec![]; 473 let mut result = vec![];
455 let msgs = vec![ 474 let msgs = vec![
456 Message { level: 2, duration: Duration::from_millis(3), label: "baz" }, 475 Message { level: 2, duration: Duration::from_millis(3), label: "baz", detail: None },
457 Message { level: 1, duration: Duration::from_millis(5), label: "bar" }, 476 Message { level: 1, duration: Duration::from_millis(5), label: "bar", detail: None },
458 Message { level: 2, duration: Duration::from_millis(2), label: "baz" }, 477 Message { level: 2, duration: Duration::from_millis(2), label: "baz", detail: None },
459 Message { level: 1, duration: Duration::from_millis(4), label: "bar" }, 478 Message { level: 1, duration: Duration::from_millis(4), label: "bar", detail: None },
460 Message { level: 0, duration: Duration::from_millis(9), label: "foo" }, 479 Message { level: 0, duration: Duration::from_millis(9), label: "foo", detail: None },
461 ]; 480 ];
462 print(&msgs, Duration::from_millis(0), &mut result); 481 print(&msgs, Duration::from_millis(0), &mut result);
463 assert_eq!( 482 assert_eq!(
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs
index bcf12460d..a6274709d 100644
--- a/crates/ra_project_model/src/lib.rs
+++ b/crates/ra_project_model/src/lib.rs
@@ -14,7 +14,7 @@ use std::{
14 14
15use anyhow::{bail, Context, Result}; 15use anyhow::{bail, Context, Result};
16use ra_cfg::CfgOptions; 16use ra_cfg::CfgOptions;
17use ra_db::{CrateGraph, CrateId, CrateName, Edition, Env, FileId}; 17use ra_db::{CrateGraph, CrateName, Edition, Env, ExternSource, ExternSourceId, FileId};
18use rustc_hash::FxHashMap; 18use rustc_hash::FxHashMap;
19use serde_json::from_reader; 19use serde_json::from_reader;
20 20
@@ -162,10 +162,10 @@ impl ProjectWorkspace {
162 pub fn to_crate_graph( 162 pub fn to_crate_graph(
163 &self, 163 &self,
164 default_cfg_options: &CfgOptions, 164 default_cfg_options: &CfgOptions,
165 outdirs: &FxHashMap<String, (ExternSourceId, String)>,
165 load: &mut dyn FnMut(&Path) -> Option<FileId>, 166 load: &mut dyn FnMut(&Path) -> Option<FileId>,
166 ) -> (CrateGraph, FxHashMap<CrateId, String>) { 167 ) -> CrateGraph {
167 let mut crate_graph = CrateGraph::default(); 168 let mut crate_graph = CrateGraph::default();
168 let mut names = FxHashMap::default();
169 match self { 169 match self {
170 ProjectWorkspace::Json { project } => { 170 ProjectWorkspace::Json { project } => {
171 let mut crates = FxHashMap::default(); 171 let mut crates = FxHashMap::default();
@@ -186,13 +186,18 @@ impl ProjectWorkspace {
186 } 186 }
187 opts 187 opts
188 }; 188 };
189
190 // FIXME: No crate name in json definition such that we cannot add OUT_DIR to env
189 crates.insert( 191 crates.insert(
190 crate_id, 192 crate_id,
191 crate_graph.add_crate_root( 193 crate_graph.add_crate_root(
192 file_id, 194 file_id,
193 edition, 195 edition,
196 // FIXME json definitions can store the crate name
197 None,
194 cfg_options, 198 cfg_options,
195 Env::default(), 199 Env::default(),
200 Default::default(),
196 ), 201 ),
197 ); 202 );
198 } 203 }
@@ -230,14 +235,22 @@ impl ProjectWorkspace {
230 opts 235 opts
231 }; 236 };
232 237
238 let mut env = Env::default();
239 let mut extern_source = ExternSource::default();
240 if let Some((id, path)) = outdirs.get(krate.name(&sysroot)) {
241 env.set("OUT_DIR", path.clone());
242 extern_source.set_extern_path(&path, *id);
243 }
244
233 let crate_id = crate_graph.add_crate_root( 245 let crate_id = crate_graph.add_crate_root(
234 file_id, 246 file_id,
235 Edition::Edition2018, 247 Edition::Edition2018,
248 Some(krate.name(&sysroot).to_string()),
236 cfg_options, 249 cfg_options,
237 Env::default(), 250 env,
251 extern_source,
238 ); 252 );
239 sysroot_crates.insert(krate, crate_id); 253 sysroot_crates.insert(krate, crate_id);
240 names.insert(crate_id, krate.name(&sysroot).to_string());
241 } 254 }
242 } 255 }
243 for from in sysroot.crates() { 256 for from in sysroot.crates() {
@@ -274,13 +287,20 @@ impl ProjectWorkspace {
274 opts.insert_features(pkg.features(&cargo).iter().map(Into::into)); 287 opts.insert_features(pkg.features(&cargo).iter().map(Into::into));
275 opts 288 opts
276 }; 289 };
290 let mut env = Env::default();
291 let mut extern_source = ExternSource::default();
292 if let Some((id, path)) = outdirs.get(pkg.name(&cargo)) {
293 env.set("OUT_DIR", path.clone());
294 extern_source.set_extern_path(&path, *id);
295 }
277 let crate_id = crate_graph.add_crate_root( 296 let crate_id = crate_graph.add_crate_root(
278 file_id, 297 file_id,
279 edition, 298 edition,
299 Some(pkg.name(&cargo).to_string()),
280 cfg_options, 300 cfg_options,
281 Env::default(), 301 env,
302 extern_source,
282 ); 303 );
283 names.insert(crate_id, pkg.name(&cargo).to_string());
284 if tgt.kind(&cargo) == TargetKind::Lib { 304 if tgt.kind(&cargo) == TargetKind::Lib {
285 lib_tgt = Some(crate_id); 305 lib_tgt = Some(crate_id);
286 pkg_to_lib_crate.insert(pkg, crate_id); 306 pkg_to_lib_crate.insert(pkg, crate_id);
@@ -381,7 +401,7 @@ impl ProjectWorkspace {
381 } 401 }
382 } 402 }
383 } 403 }
384 (crate_graph, names) 404 crate_graph
385 } 405 }
386 406
387 pub fn workspace_root_for(&self, path: &Path) -> Option<&Path> { 407 pub fn workspace_root_for(&self, path: &Path) -> Option<&Path> {
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs
index 53d6fa562..ae8829807 100644
--- a/crates/ra_syntax/src/ast/make.rs
+++ b/crates/ra_syntax/src/ast/make.rs
@@ -267,6 +267,12 @@ pub mod tokens {
267 sf.syntax().first_child_or_token().unwrap().into_token().unwrap() 267 sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
268 } 268 }
269 269
270 pub fn doc_comment(text: &str) -> SyntaxToken {
271 assert!(!text.trim().is_empty());
272 let sf = SourceFile::parse(text).ok().unwrap();
273 sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
274 }
275
270 pub fn literal(text: &str) -> SyntaxToken { 276 pub fn literal(text: &str) -> SyntaxToken {
271 assert_eq!(text.trim(), text); 277 assert_eq!(text.trim(), text);
272 let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text)); 278 let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text));
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.rs b/crates/ra_syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.rs
new file mode 100644
index 000000000..05acc30f1
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.rs
@@ -0,0 +1,3 @@
1fn foo() {
2 v = {1}&2;
3}
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.txt b/crates/ra_syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.txt
new file mode 100644
index 000000000..d568a1d45
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0158_binop_resets_statementness.txt
@@ -0,0 +1,38 @@
1SOURCE_FILE@[0; 28)
2 FN_DEF@[0; 27)
3 FN_KW@[0; 2) "fn"
4 WHITESPACE@[2; 3) " "
5 NAME@[3; 6)
6 IDENT@[3; 6) "foo"
7 PARAM_LIST@[6; 8)
8 L_PAREN@[6; 7) "("
9 R_PAREN@[7; 8) ")"
10 WHITESPACE@[8; 9) " "
11 BLOCK_EXPR@[9; 27)
12 BLOCK@[9; 27)
13 L_CURLY@[9; 10) "{"
14 WHITESPACE@[10; 15) "\n "
15 EXPR_STMT@[15; 25)
16 BIN_EXPR@[15; 24)
17 PATH_EXPR@[15; 16)
18 PATH@[15; 16)
19 PATH_SEGMENT@[15; 16)
20 NAME_REF@[15; 16)
21 IDENT@[15; 16) "v"
22 WHITESPACE@[16; 17) " "
23 EQ@[17; 18) "="
24 WHITESPACE@[18; 19) " "
25 BIN_EXPR@[19; 24)
26 BLOCK_EXPR@[19; 22)
27 BLOCK@[19; 22)
28 L_CURLY@[19; 20) "{"
29 LITERAL@[20; 21)
30 INT_NUMBER@[20; 21) "1"
31 R_CURLY@[21; 22) "}"
32 AMP@[22; 23) "&"
33 LITERAL@[23; 24)
34 INT_NUMBER@[23; 24) "2"
35 SEMI@[24; 25) ";"
36 WHITESPACE@[25; 26) "\n"
37 R_CURLY@[26; 27) "}"
38 WHITESPACE@[27; 28) "\n"
diff --git a/crates/rust-analyzer/Cargo.toml b/crates/rust-analyzer/Cargo.toml
index b14ebb268..d44f0ef1d 100644
--- a/crates/rust-analyzer/Cargo.toml
+++ b/crates/rust-analyzer/Cargo.toml
@@ -20,7 +20,7 @@ globset = "0.4.4"
20itertools = "0.8.2" 20itertools = "0.8.2"
21jod-thread = "0.1.0" 21jod-thread = "0.1.0"
22log = "0.4.8" 22log = "0.4.8"
23lsp-types = { version = "0.72.0", features = ["proposed"] } 23lsp-types = { version = "0.73.0", features = ["proposed"] }
24parking_lot = "0.10.0" 24parking_lot = "0.10.0"
25pico-args = "0.3.1" 25pico-args = "0.3.1"
26rand = { version = "0.7.3", features = ["small_rng"] } 26rand = { version = "0.7.3", features = ["small_rng"] }
diff --git a/crates/rust-analyzer/src/cli/analysis_bench.rs b/crates/rust-analyzer/src/cli/analysis_bench.rs
index 91855e592..28a23934f 100644
--- a/crates/rust-analyzer/src/cli/analysis_bench.rs
+++ b/crates/rust-analyzer/src/cli/analysis_bench.rs
@@ -12,7 +12,7 @@ use ra_db::{
12 salsa::{Database, Durability}, 12 salsa::{Database, Durability},
13 FileId, SourceDatabaseExt, 13 FileId, SourceDatabaseExt,
14}; 14};
15use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FilePosition, LineCol}; 15use ra_ide::{Analysis, AnalysisChange, AnalysisHost, CompletionOptions, FilePosition, LineCol};
16 16
17use crate::cli::{load_cargo::load_cargo, Verbosity}; 17use crate::cli::{load_cargo::load_cargo, Verbosity};
18 18
@@ -94,17 +94,19 @@ pub fn analysis_bench(verbosity: Verbosity, path: &Path, what: BenchWhat) -> Res
94 .analysis() 94 .analysis()
95 .file_line_index(file_id)? 95 .file_line_index(file_id)?
96 .offset(LineCol { line: pos.line - 1, col_utf16: pos.column }); 96 .offset(LineCol { line: pos.line - 1, col_utf16: pos.column });
97 let file_postion = FilePosition { file_id, offset }; 97 let file_position = FilePosition { file_id, offset };
98 98
99 if is_completion { 99 if is_completion {
100 let res = 100 let options = CompletionOptions::default();
101 do_work(&mut host, file_id, |analysis| analysis.completions(file_postion)); 101 let res = do_work(&mut host, file_id, |analysis| {
102 analysis.completions(file_position, &options)
103 });
102 if verbosity.is_verbose() { 104 if verbosity.is_verbose() {
103 println!("\n{:#?}", res); 105 println!("\n{:#?}", res);
104 } 106 }
105 } else { 107 } else {
106 let res = 108 let res =
107 do_work(&mut host, file_id, |analysis| analysis.goto_definition(file_postion)); 109 do_work(&mut host, file_id, |analysis| analysis.goto_definition(file_position));
108 if verbosity.is_verbose() { 110 if verbosity.is_verbose() {
109 println!("\n{:#?}", res); 111 println!("\n{:#?}", res);
110 } 112 }
diff --git a/crates/rust-analyzer/src/cli/load_cargo.rs b/crates/rust-analyzer/src/cli/load_cargo.rs
index 8cd08ecb6..2ce69c9b3 100644
--- a/crates/rust-analyzer/src/cli/load_cargo.rs
+++ b/crates/rust-analyzer/src/cli/load_cargo.rs
@@ -6,7 +6,7 @@ use std::path::Path;
6use anyhow::Result; 6use anyhow::Result;
7use crossbeam_channel::{unbounded, Receiver}; 7use crossbeam_channel::{unbounded, Receiver};
8use ra_db::{CrateGraph, FileId, SourceRootId}; 8use ra_db::{CrateGraph, FileId, SourceRootId};
9use ra_ide::{AnalysisChange, AnalysisHost, FeatureFlags}; 9use ra_ide::{AnalysisChange, AnalysisHost};
10use ra_project_model::{get_rustc_cfg_options, PackageRoot, ProjectWorkspace}; 10use ra_project_model::{get_rustc_cfg_options, PackageRoot, ProjectWorkspace};
11use ra_vfs::{RootEntry, Vfs, VfsChange, VfsTask, Watch}; 11use ra_vfs::{RootEntry, Vfs, VfsChange, VfsTask, Watch};
12use rustc_hash::{FxHashMap, FxHashSet}; 12use rustc_hash::{FxHashMap, FxHashSet};
@@ -52,12 +52,14 @@ pub(crate) fn load_cargo(
52 opts 52 opts
53 }; 53 };
54 54
55 let (crate_graph, _crate_names) = 55 // FIXME: outdirs?
56 ws.to_crate_graph(&default_cfg_options, &mut |path: &Path| { 56 let outdirs = FxHashMap::default();
57 let vfs_file = vfs.load(path); 57
58 log::debug!("vfs file {:?} -> {:?}", path, vfs_file); 58 let crate_graph = ws.to_crate_graph(&default_cfg_options, &outdirs, &mut |path: &Path| {
59 vfs_file.map(vfs_file_to_id) 59 let vfs_file = vfs.load(path);
60 }); 60 log::debug!("vfs file {:?} -> {:?}", path, vfs_file);
61 vfs_file.map(vfs_file_to_id)
62 });
61 log::debug!("crate graph: {:?}", crate_graph); 63 log::debug!("crate graph: {:?}", crate_graph);
62 64
63 let source_roots = roots 65 let source_roots = roots
@@ -83,7 +85,7 @@ pub(crate) fn load(
83 receiver: Receiver<VfsTask>, 85 receiver: Receiver<VfsTask>,
84) -> AnalysisHost { 86) -> AnalysisHost {
85 let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok()); 87 let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
86 let mut host = AnalysisHost::new(lru_cap, FeatureFlags::default()); 88 let mut host = AnalysisHost::new(lru_cap);
87 let mut analysis_change = AnalysisChange::new(); 89 let mut analysis_change = AnalysisChange::new();
88 analysis_change.set_crate_graph(crate_graph); 90 analysis_change.set_crate_graph(crate_graph);
89 91
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index 3314269ec..a8bf29ddf 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -44,6 +44,9 @@ pub struct ServerConfig {
44 /// Fine grained feature flags to disable specific features. 44 /// Fine grained feature flags to disable specific features.
45 pub feature_flags: FxHashMap<String, bool>, 45 pub feature_flags: FxHashMap<String, bool>,
46 46
47 /// Fine grained controls for additional `OUT_DIR` env variables
48 pub additional_out_dirs: FxHashMap<String, String>,
49
47 pub rustfmt_args: Vec<String>, 50 pub rustfmt_args: Vec<String>,
48 51
49 /// Cargo feature configurations. 52 /// Cargo feature configurations.
@@ -64,6 +67,7 @@ impl Default for ServerConfig {
64 cargo_watch_all_targets: true, 67 cargo_watch_all_targets: true,
65 with_sysroot: true, 68 with_sysroot: true,
66 feature_flags: FxHashMap::default(), 69 feature_flags: FxHashMap::default(),
70 additional_out_dirs: FxHashMap::default(),
67 cargo_features: Default::default(), 71 cargo_features: Default::default(),
68 rustfmt_args: Vec::new(), 72 rustfmt_args: Vec::new(),
69 } 73 }
diff --git a/crates/rust-analyzer/src/conv.rs b/crates/rust-analyzer/src/conv.rs
index eeeb33e8f..a2d68c344 100644
--- a/crates/rust-analyzer/src/conv.rs
+++ b/crates/rust-analyzer/src/conv.rs
@@ -3,10 +3,10 @@
3 3
4use lsp_types::{ 4use lsp_types::{
5 self, CreateFile, DiagnosticSeverity, DocumentChangeOperation, DocumentChanges, Documentation, 5 self, CreateFile, DiagnosticSeverity, DocumentChangeOperation, DocumentChanges, Documentation,
6 Location, LocationLink, MarkupContent, MarkupKind, Position, Range, RenameFile, ResourceOp, 6 Location, LocationLink, MarkupContent, MarkupKind, ParameterInformation, ParameterLabel,
7 SemanticTokenModifier, SemanticTokenType, SymbolKind, TextDocumentEdit, TextDocumentIdentifier, 7 Position, Range, RenameFile, ResourceOp, SemanticTokenModifier, SemanticTokenType,
8 TextDocumentItem, TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier, 8 SignatureInformation, SymbolKind, TextDocumentEdit, TextDocumentIdentifier, TextDocumentItem,
9 WorkspaceEdit, 9 TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier, WorkspaceEdit,
10}; 10};
11use ra_ide::{ 11use ra_ide::{
12 translate_offset_with_edit, CompletionItem, CompletionItemKind, FileId, FilePosition, 12 translate_offset_with_edit, CompletionItem, CompletionItemKind, FileId, FilePosition,
@@ -150,6 +150,16 @@ impl ConvWith<(&LineIndex, LineEndings)> for CompletionItem {
150 additional_text_edits: Some(additional_text_edits), 150 additional_text_edits: Some(additional_text_edits),
151 documentation: self.documentation().map(|it| it.conv()), 151 documentation: self.documentation().map(|it| it.conv()),
152 deprecated: Some(self.deprecated()), 152 deprecated: Some(self.deprecated()),
153 command: if self.trigger_call_info() {
154 let cmd = lsp_types::Command {
155 title: "triggerParameterHints".into(),
156 command: "editor.action.triggerParameterHints".into(),
157 arguments: None,
158 };
159 Some(cmd)
160 } else {
161 None
162 },
153 ..Default::default() 163 ..Default::default()
154 }; 164 };
155 165
@@ -210,17 +220,20 @@ impl Conv for ra_ide::Documentation {
210 } 220 }
211} 221}
212 222
213impl Conv for ra_ide::FunctionSignature { 223impl ConvWith<bool> for ra_ide::FunctionSignature {
214 type Output = lsp_types::SignatureInformation; 224 type Output = lsp_types::SignatureInformation;
215 fn conv(self) -> Self::Output { 225 fn conv_with(self, concise: bool) -> Self::Output {
216 use lsp_types::{ParameterInformation, ParameterLabel, SignatureInformation}; 226 let (label, documentation, params) = if concise {
217 227 let mut params = self.parameters;
218 let label = self.to_string(); 228 if self.has_self_param {
219 229 params.remove(0);
220 let documentation = self.doc.map(|it| it.conv()); 230 }
231 (params.join(", "), None, params)
232 } else {
233 (self.to_string(), self.doc.map(|it| it.conv()), self.parameters)
234 };
221 235
222 let parameters: Vec<ParameterInformation> = self 236 let parameters: Vec<ParameterInformation> = params
223 .parameters
224 .into_iter() 237 .into_iter()
225 .map(|param| ParameterInformation { 238 .map(|param| ParameterInformation {
226 label: ParameterLabel::Simple(param), 239 label: ParameterLabel::Simple(param),
diff --git a/crates/ra_ide_db/src/feature_flags.rs b/crates/rust-analyzer/src/feature_flags.rs
index 76655f572..dbb3f50a0 100644
--- a/crates/ra_ide_db/src/feature_flags.rs
+++ b/crates/rust-analyzer/src/feature_flags.rs
@@ -2,6 +2,10 @@
2 2
3use rustc_hash::FxHashMap; 3use rustc_hash::FxHashMap;
4 4
5// FIXME: looks like a much better design is to pass options to each call,
6// rather than to have a global ambient feature flags -- that way, the clients
7// can issue two successive calls with different options.
8
5/// Feature flags hold fine-grained toggles for all *user-visible* features of 9/// Feature flags hold fine-grained toggles for all *user-visible* features of
6/// rust-analyzer. 10/// rust-analyzer.
7/// 11///
@@ -54,7 +58,9 @@ impl Default for FeatureFlags {
54 FeatureFlags::new(&[ 58 FeatureFlags::new(&[
55 ("lsp.diagnostics", true), 59 ("lsp.diagnostics", true),
56 ("completion.insertion.add-call-parenthesis", true), 60 ("completion.insertion.add-call-parenthesis", true),
61 ("completion.insertion.add-argument-snippets", true),
57 ("completion.enable-postfix", true), 62 ("completion.enable-postfix", true),
63 ("call-info.full", true),
58 ("notifications.workspace-loaded", true), 64 ("notifications.workspace-loaded", true),
59 ("notifications.cargo-toml-not-found", true), 65 ("notifications.cargo-toml-not-found", true),
60 ]) 66 ])
diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs
index a0f968823..e50e47b19 100644
--- a/crates/rust-analyzer/src/lib.rs
+++ b/crates/rust-analyzer/src/lib.rs
@@ -37,6 +37,7 @@ mod config;
37mod world; 37mod world;
38mod diagnostics; 38mod diagnostics;
39mod semantic_tokens; 39mod semantic_tokens;
40mod feature_flags;
40 41
41use serde::de::DeserializeOwned; 42use serde::de::DeserializeOwned;
42 43
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 580ad1f2c..4f7aac754 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -18,7 +18,7 @@ use crossbeam_channel::{select, unbounded, RecvError, Sender};
18use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; 18use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response};
19use lsp_types::{ClientCapabilities, NumberOrString}; 19use lsp_types::{ClientCapabilities, NumberOrString};
20use ra_cargo_watch::{url_from_path_with_drive_lowercasing, CheckOptions, CheckTask}; 20use ra_cargo_watch::{url_from_path_with_drive_lowercasing, CheckOptions, CheckTask};
21use ra_ide::{Canceled, FeatureFlags, FileId, LibraryData, SourceRootId}; 21use ra_ide::{Canceled, FileId, LibraryData, SourceRootId};
22use ra_prof::profile; 22use ra_prof::profile;
23use ra_vfs::{VfsFile, VfsTask, Watch}; 23use ra_vfs::{VfsFile, VfsTask, Watch};
24use relative_path::RelativePathBuf; 24use relative_path::RelativePathBuf;
@@ -28,6 +28,7 @@ use threadpool::ThreadPool;
28 28
29use crate::{ 29use crate::{
30 diagnostics::DiagnosticTask, 30 diagnostics::DiagnosticTask,
31 feature_flags::FeatureFlags,
31 main_loop::{ 32 main_loop::{
32 pending_requests::{PendingRequest, PendingRequests}, 33 pending_requests::{PendingRequest, PendingRequests},
33 subscriptions::Subscriptions, 34 subscriptions::Subscriptions,
@@ -44,6 +45,8 @@ pub struct LspError {
44} 45}
45 46
46impl LspError { 47impl LspError {
48 pub const UNKNOWN_FILE: i32 = -32900;
49
47 pub fn new(code: i32, message: String) -> LspError { 50 pub fn new(code: i32, message: String) -> LspError {
48 LspError { code, message } 51 LspError { code, message }
49 } 52 }
@@ -193,6 +196,7 @@ pub fn main_loop(
193 Watch(!config.use_client_watching), 196 Watch(!config.use_client_watching),
194 options, 197 options,
195 feature_flags, 198 feature_flags,
199 config.additional_out_dirs,
196 ) 200 )
197 }; 201 };
198 202
@@ -421,7 +425,7 @@ fn loop_turn(
421 { 425 {
422 loop_state.workspace_loaded = true; 426 loop_state.workspace_loaded = true;
423 let n_packages: usize = world_state.workspaces.iter().map(|it| it.n_packages()).sum(); 427 let n_packages: usize = world_state.workspaces.iter().map(|it| it.n_packages()).sum();
424 if world_state.feature_flags().get("notifications.workspace-loaded") { 428 if world_state.feature_flags.get("notifications.workspace-loaded") {
425 let msg = format!("workspace loaded, {} rust packages", n_packages); 429 let msg = format!("workspace loaded, {} rust packages", n_packages);
426 show_message(req::MessageType::Info, msg, &connection.sender); 430 show_message(req::MessageType::Info, msg, &connection.sender);
427 } 431 }
@@ -631,6 +635,9 @@ fn on_notification(
631 } 635 }
632 Err(not) => not, 636 Err(not) => not,
633 }; 637 };
638 if not.method.starts_with("$/") {
639 return Ok(());
640 }
634 log::error!("unhandled notification: {:?}", not); 641 log::error!("unhandled notification: {:?}", not);
635 Ok(()) 642 Ok(())
636} 643}
@@ -802,7 +809,14 @@ where
802 let response = match result { 809 let response = match result {
803 Ok(resp) => Response::new_ok(id, &resp), 810 Ok(resp) => Response::new_ok(id, &resp),
804 Err(e) => match e.downcast::<LspError>() { 811 Err(e) => match e.downcast::<LspError>() {
805 Ok(lsp_error) => Response::new_err(id, lsp_error.code, lsp_error.message), 812 Ok(lsp_error) => {
813 if lsp_error.code == LspError::UNKNOWN_FILE {
814 // Work-around for https://github.com/rust-analyzer/rust-analyzer/issues/1521
815 Response::new_ok(id, ())
816 } else {
817 Response::new_err(id, lsp_error.code, lsp_error.message)
818 }
819 }
806 Err(e) => { 820 Err(e) => {
807 if is_canceled(&e) { 821 if is_canceled(&e) {
808 Response::new_err( 822 Response::new_err(
@@ -827,7 +841,7 @@ fn update_file_notifications_on_threadpool(
827 subscriptions: Vec<FileId>, 841 subscriptions: Vec<FileId>,
828) { 842) {
829 log::trace!("updating notifications for {:?}", subscriptions); 843 log::trace!("updating notifications for {:?}", subscriptions);
830 let publish_diagnostics = world.feature_flags().get("lsp.diagnostics"); 844 let publish_diagnostics = world.feature_flags.get("lsp.diagnostics");
831 pool.execute(move || { 845 pool.execute(move || {
832 for file_id in subscriptions { 846 for file_id in subscriptions {
833 if publish_diagnostics { 847 if publish_diagnostics {
diff --git a/crates/rust-analyzer/src/main_loop/handlers.rs b/crates/rust-analyzer/src/main_loop/handlers.rs
index b5db1fd38..fcb40432d 100644
--- a/crates/rust-analyzer/src/main_loop/handlers.rs
+++ b/crates/rust-analyzer/src/main_loop/handlers.rs
@@ -20,8 +20,8 @@ use lsp_types::{
20 TextEdit, WorkspaceEdit, 20 TextEdit, WorkspaceEdit,
21}; 21};
22use ra_ide::{ 22use ra_ide::{
23 Assist, AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, 23 Assist, AssistId, CompletionOptions, FileId, FilePosition, FileRange, Query, RangeInfo,
24 SearchScope, 24 Runnable, RunnableKind, SearchScope,
25}; 25};
26use ra_prof::profile; 26use ra_prof::profile;
27use ra_syntax::{AstNode, SyntaxKind, TextRange, TextUnit}; 27use ra_syntax::{AstNode, SyntaxKind, TextRange, TextUnit};
@@ -85,11 +85,11 @@ pub fn handle_expand_macro(
85pub fn handle_selection_range( 85pub fn handle_selection_range(
86 world: WorldSnapshot, 86 world: WorldSnapshot,
87 params: req::SelectionRangeParams, 87 params: req::SelectionRangeParams,
88) -> Result<Vec<req::SelectionRange>> { 88) -> Result<Option<Vec<req::SelectionRange>>> {
89 let _p = profile("handle_selection_range"); 89 let _p = profile("handle_selection_range");
90 let file_id = params.text_document.try_conv_with(&world)?; 90 let file_id = params.text_document.try_conv_with(&world)?;
91 let line_index = world.analysis().file_line_index(file_id)?; 91 let line_index = world.analysis().file_line_index(file_id)?;
92 params 92 let res: Result<Vec<req::SelectionRange>> = params
93 .positions 93 .positions
94 .into_iter() 94 .into_iter()
95 .map_conv_with(&line_index) 95 .map_conv_with(&line_index)
@@ -120,7 +120,9 @@ pub fn handle_selection_range(
120 } 120 }
121 Ok(range) 121 Ok(range)
122 }) 122 })
123 .collect() 123 .collect();
124
125 Ok(Some(res?))
124} 126}
125 127
126pub fn handle_find_matching_brace( 128pub fn handle_find_matching_brace(
@@ -422,7 +424,15 @@ pub fn handle_completion(
422 return Ok(None); 424 return Ok(None);
423 } 425 }
424 426
425 let items = match world.analysis().completions(position)? { 427 let options = CompletionOptions {
428 enable_postfix_completions: world.feature_flags.get("completion.enable-postfix"),
429 add_call_parenthesis: world.feature_flags.get("completion.insertion.add-call-parenthesis"),
430 add_call_argument_snippets: world
431 .feature_flags
432 .get("completion.insertion.add-argument-snippets"),
433 };
434
435 let items = match world.analysis().completions(position, &options)? {
426 None => return Ok(None), 436 None => return Ok(None),
427 Some(items) => items, 437 Some(items) => items,
428 }; 438 };
@@ -459,8 +469,12 @@ pub fn handle_signature_help(
459 let _p = profile("handle_signature_help"); 469 let _p = profile("handle_signature_help");
460 let position = params.try_conv_with(&world)?; 470 let position = params.try_conv_with(&world)?;
461 if let Some(call_info) = world.analysis().call_info(position)? { 471 if let Some(call_info) = world.analysis().call_info(position)? {
462 let active_parameter = call_info.active_parameter.map(|it| it as i64); 472 let concise = !world.feature_flags.get("call-info.full");
463 let sig_info = call_info.signature.conv(); 473 let mut active_parameter = call_info.active_parameter.map(|it| it as i64);
474 if concise && call_info.signature.has_self_param {
475 active_parameter = active_parameter.map(|it| it.saturating_sub(1));
476 }
477 let sig_info = call_info.signature.conv_with(concise);
464 478
465 Ok(Some(req::SignatureHelp { 479 Ok(Some(req::SignatureHelp {
466 signatures: vec![sig_info], 480 signatures: vec![sig_info],
diff --git a/crates/rust-analyzer/src/world.rs b/crates/rust-analyzer/src/world.rs
index 96efab844..9ef368529 100644
--- a/crates/rust-analyzer/src/world.rs
+++ b/crates/rust-analyzer/src/world.rs
@@ -9,13 +9,11 @@ use std::{
9}; 9};
10 10
11use crossbeam_channel::{unbounded, Receiver}; 11use crossbeam_channel::{unbounded, Receiver};
12use lsp_server::ErrorCode;
13use lsp_types::Url; 12use lsp_types::Url;
14use parking_lot::RwLock; 13use parking_lot::RwLock;
15use ra_cargo_watch::{url_from_path_with_drive_lowercasing, CheckOptions, CheckWatcher}; 14use ra_cargo_watch::{url_from_path_with_drive_lowercasing, CheckOptions, CheckWatcher};
16use ra_ide::{ 15use ra_ide::{
17 Analysis, AnalysisChange, AnalysisHost, CrateGraph, FeatureFlags, FileId, LibraryData, 16 Analysis, AnalysisChange, AnalysisHost, CrateGraph, FileId, LibraryData, SourceRootId,
18 SourceRootId,
19}; 17};
20use ra_project_model::{get_rustc_cfg_options, ProjectWorkspace}; 18use ra_project_model::{get_rustc_cfg_options, ProjectWorkspace};
21use ra_vfs::{LineEndings, RootEntry, Vfs, VfsChange, VfsFile, VfsRoot, VfsTask, Watch}; 19use ra_vfs::{LineEndings, RootEntry, Vfs, VfsChange, VfsFile, VfsRoot, VfsTask, Watch};
@@ -23,10 +21,13 @@ use relative_path::RelativePathBuf;
23 21
24use crate::{ 22use crate::{
25 diagnostics::{CheckFixes, DiagnosticCollection}, 23 diagnostics::{CheckFixes, DiagnosticCollection},
24 feature_flags::FeatureFlags,
26 main_loop::pending_requests::{CompletedRequest, LatestRequests}, 25 main_loop::pending_requests::{CompletedRequest, LatestRequests},
27 vfs_glob::{Glob, RustPackageFilterBuilder}, 26 vfs_glob::{Glob, RustPackageFilterBuilder},
28 LspError, Result, 27 LspError, Result,
29}; 28};
29use ra_db::ExternSourceId;
30use rustc_hash::{FxHashMap, FxHashSet};
30 31
31#[derive(Debug, Clone)] 32#[derive(Debug, Clone)]
32pub struct Options { 33pub struct Options {
@@ -46,6 +47,7 @@ pub struct Options {
46#[derive(Debug)] 47#[derive(Debug)]
47pub struct WorldState { 48pub struct WorldState {
48 pub options: Options, 49 pub options: Options,
50 pub feature_flags: Arc<FeatureFlags>,
49 //FIXME: this belongs to `LoopState` rather than to `WorldState` 51 //FIXME: this belongs to `LoopState` rather than to `WorldState`
50 pub roots_to_scan: usize, 52 pub roots_to_scan: usize,
51 pub roots: Vec<PathBuf>, 53 pub roots: Vec<PathBuf>,
@@ -61,6 +63,7 @@ pub struct WorldState {
61/// An immutable snapshot of the world's state at a point in time. 63/// An immutable snapshot of the world's state at a point in time.
62pub struct WorldSnapshot { 64pub struct WorldSnapshot {
63 pub options: Options, 65 pub options: Options,
66 pub feature_flags: Arc<FeatureFlags>,
64 pub workspaces: Arc<Vec<ProjectWorkspace>>, 67 pub workspaces: Arc<Vec<ProjectWorkspace>>,
65 pub analysis: Analysis, 68 pub analysis: Analysis,
66 pub latest_requests: Arc<RwLock<LatestRequests>>, 69 pub latest_requests: Arc<RwLock<LatestRequests>>,
@@ -77,6 +80,7 @@ impl WorldState {
77 watch: Watch, 80 watch: Watch,
78 options: Options, 81 options: Options,
79 feature_flags: FeatureFlags, 82 feature_flags: FeatureFlags,
83 additional_out_dirs: FxHashMap<String, String>,
80 ) -> WorldState { 84 ) -> WorldState {
81 let mut change = AnalysisChange::new(); 85 let mut change = AnalysisChange::new();
82 86
@@ -98,6 +102,19 @@ impl WorldState {
98 RootEntry::new(pkg_root.path().clone(), filter.into_vfs_filter()) 102 RootEntry::new(pkg_root.path().clone(), filter.into_vfs_filter())
99 })); 103 }));
100 } 104 }
105
106 let extern_dirs: FxHashSet<_> =
107 additional_out_dirs.iter().map(|(_, path)| (PathBuf::from(path))).collect();
108 let mut extern_source_roots = FxHashMap::default();
109
110 roots.extend(additional_out_dirs.iter().map(|(_, path)| {
111 let mut filter = RustPackageFilterBuilder::default().set_member(false);
112 for glob in exclude_globs.iter() {
113 filter = filter.exclude(glob.clone());
114 }
115 RootEntry::new(PathBuf::from(&path), filter.into_vfs_filter())
116 }));
117
101 let (task_sender, task_receiver) = unbounded(); 118 let (task_sender, task_receiver) = unbounded();
102 let task_sender = Box::new(move |t| task_sender.send(t).unwrap()); 119 let task_sender = Box::new(move |t| task_sender.send(t).unwrap());
103 let (mut vfs, vfs_roots) = Vfs::new(roots, task_sender, watch); 120 let (mut vfs, vfs_roots) = Vfs::new(roots, task_sender, watch);
@@ -107,6 +124,11 @@ impl WorldState {
107 let is_local = folder_roots.iter().any(|it| vfs_root_path.starts_with(it)); 124 let is_local = folder_roots.iter().any(|it| vfs_root_path.starts_with(it));
108 change.add_root(SourceRootId(r.0), is_local); 125 change.add_root(SourceRootId(r.0), is_local);
109 change.set_debug_root_path(SourceRootId(r.0), vfs_root_path.display().to_string()); 126 change.set_debug_root_path(SourceRootId(r.0), vfs_root_path.display().to_string());
127
128 // FIXME: add path2root in vfs to simpily this logic
129 if extern_dirs.contains(&vfs_root_path) {
130 extern_source_roots.insert(vfs_root_path, ExternSourceId(r.0));
131 }
110 } 132 }
111 133
112 // FIXME: Read default cfgs from config 134 // FIXME: Read default cfgs from config
@@ -123,13 +145,21 @@ impl WorldState {
123 let vfs_file = vfs.load(path); 145 let vfs_file = vfs.load(path);
124 vfs_file.map(|f| FileId(f.0)) 146 vfs_file.map(|f| FileId(f.0))
125 }; 147 };
126 for ws in workspaces.iter() { 148
127 let (graph, crate_names) = ws.to_crate_graph(&default_cfg_options, &mut load); 149 let mut outdirs = FxHashMap::default();
128 let shift = crate_graph.extend(graph); 150 for (name, path) in additional_out_dirs {
129 for (crate_id, name) in crate_names { 151 let path = PathBuf::from(&path);
130 change.set_debug_crate_name(crate_id.shift(shift), name) 152 if let Some(id) = extern_source_roots.get(&path) {
153 outdirs.insert(name, (id.clone(), path.to_string_lossy().replace("\\", "/")));
131 } 154 }
132 } 155 }
156
157 workspaces
158 .iter()
159 .map(|ws| ws.to_crate_graph(&default_cfg_options, &outdirs, &mut load))
160 .for_each(|graph| {
161 crate_graph.extend(graph);
162 });
133 change.set_crate_graph(crate_graph); 163 change.set_crate_graph(crate_graph);
134 164
135 // FIXME: Figure out the multi-workspace situation 165 // FIXME: Figure out the multi-workspace situation
@@ -148,10 +178,11 @@ impl WorldState {
148 CheckWatcher::dummy() 178 CheckWatcher::dummy()
149 }); 179 });
150 180
151 let mut analysis_host = AnalysisHost::new(lru_capacity, feature_flags); 181 let mut analysis_host = AnalysisHost::new(lru_capacity);
152 analysis_host.apply_change(change); 182 analysis_host.apply_change(change);
153 WorldState { 183 WorldState {
154 options, 184 options,
185 feature_flags: Arc::new(feature_flags),
155 roots_to_scan, 186 roots_to_scan,
156 roots: folder_roots, 187 roots: folder_roots,
157 workspaces: Arc::new(workspaces), 188 workspaces: Arc::new(workspaces),
@@ -218,6 +249,7 @@ impl WorldState {
218 pub fn snapshot(&self) -> WorldSnapshot { 249 pub fn snapshot(&self) -> WorldSnapshot {
219 WorldSnapshot { 250 WorldSnapshot {
220 options: self.options.clone(), 251 options: self.options.clone(),
252 feature_flags: Arc::clone(&self.feature_flags),
221 workspaces: Arc::clone(&self.workspaces), 253 workspaces: Arc::clone(&self.workspaces),
222 analysis: self.analysis_host.analysis(), 254 analysis: self.analysis_host.analysis(),
223 vfs: Arc::clone(&self.vfs), 255 vfs: Arc::clone(&self.vfs),
@@ -237,10 +269,6 @@ impl WorldState {
237 pub fn complete_request(&mut self, request: CompletedRequest) { 269 pub fn complete_request(&mut self, request: CompletedRequest) {
238 self.latest_requests.write().record(request) 270 self.latest_requests.write().record(request)
239 } 271 }
240
241 pub fn feature_flags(&self) -> &FeatureFlags {
242 self.analysis_host.feature_flags()
243 }
244} 272}
245 273
246impl WorldSnapshot { 274impl WorldSnapshot {
@@ -252,8 +280,9 @@ impl WorldSnapshot {
252 let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; 280 let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?;
253 let file = self.vfs.read().path2file(&path).ok_or_else(|| { 281 let file = self.vfs.read().path2file(&path).ok_or_else(|| {
254 // Show warning as this file is outside current workspace 282 // Show warning as this file is outside current workspace
283 // FIXME: just handle such files, and remove `LspError::UNKNOWN_FILE`.
255 LspError { 284 LspError {
256 code: ErrorCode::InvalidRequest as i32, 285 code: LspError::UNKNOWN_FILE,
257 message: "Rust file outside current workspace is not supported yet.".to_string(), 286 message: "Rust file outside current workspace is not supported yet.".to_string(),
258 } 287 }
259 })?; 288 })?;
@@ -307,8 +336,4 @@ impl WorldSnapshot {
307 let path = self.vfs.read().file2path(VfsFile(file_id.0)); 336 let path = self.vfs.read().file2path(VfsFile(file_id.0));
308 self.workspaces.iter().find_map(|ws| ws.workspace_root_for(&path)) 337 self.workspaces.iter().find_map(|ws| ws.workspace_root_for(&path))
309 } 338 }
310
311 pub fn feature_flags(&self) -> &FeatureFlags {
312 self.analysis.feature_flags()
313 }
314} 339}
diff --git a/crates/test_utils/src/lib.rs b/crates/test_utils/src/lib.rs
index 69deddcb5..a0d8f4d37 100644
--- a/crates/test_utils/src/lib.rs
+++ b/crates/test_utils/src/lib.rs
@@ -202,6 +202,19 @@ pub fn parse_fixture(fixture: &str) -> Vec<FixtureEntry> {
202 res 202 res
203} 203}
204 204
205/// Same as `parse_fixture`, except it allow empty fixture
206pub fn parse_single_fixture(fixture: &str) -> Option<FixtureEntry> {
207 if !fixture.lines().any(|it| it.trim_start().starts_with("//-")) {
208 return None;
209 }
210
211 let fixtures = parse_fixture(fixture);
212 if fixtures.len() > 1 {
213 panic!("too many fixtures");
214 }
215 fixtures.into_iter().nth(0)
216}
217
205// Comparison functionality borrowed from cargo: 218// Comparison functionality borrowed from cargo:
206 219
207/// Compare a line with an expected pattern. 220/// Compare a line with an expected pattern.
diff --git a/docs/user/readme.adoc b/docs/user/readme.adoc
index f1386a8f9..4e99dd0a6 100644
--- a/docs/user/readme.adoc
+++ b/docs/user/readme.adoc
@@ -2,6 +2,13 @@
2:toc: preamble 2:toc: preamble
3:sectanchors: 3:sectanchors:
4:page-layout: post 4:page-layout: post
5// https://gist.github.com/dcode/0cfbf2699a1fe9b46ff04c41721dda74#admonitions
6:tip-caption: :bulb:
7:note-caption: :information_source:
8:important-caption: :heavy_exclamation_mark:
9:caution-caption: :fire:
10:warning-caption: :warning:
11
5 12
6 13
7// Master copy of this document lives in the https://github.com/rust-analyzer/rust-analyzer repository 14// Master copy of this document lives in the https://github.com/rust-analyzer/rust-analyzer repository
@@ -30,7 +37,7 @@ $ rustup component add rust-src
30 37
31=== VS Code 38=== VS Code
32 39
33This the best supported editor at the moment. 40This is the best supported editor at the moment.
34rust-analyzer plugin for VS Code is maintained 41rust-analyzer plugin for VS Code is maintained
35https://github.com/rust-analyzer/rust-analyzer/tree/master/editors/code[in tree]. 42https://github.com/rust-analyzer/rust-analyzer/tree/master/editors/code[in tree].
36 43
@@ -40,6 +47,16 @@ By default, the plugin will prompt you to download the matching version of the s
40 47
41image::https://user-images.githubusercontent.com/9021944/75067008-17502500-54ba-11ea-835a-f92aac50e866.png[] 48image::https://user-images.githubusercontent.com/9021944/75067008-17502500-54ba-11ea-835a-f92aac50e866.png[]
42 49
50[NOTE]
51====
52To disable this notification put the following to `settings.json`
53
54[source,json]
55----
56{ "rust-analyzer.updates.askBeforeDownload": false }
57----
58====
59
43The server binary is stored in `~/.config/Code/User/globalStorage/matklad.rust-analyzer`. 60The server binary is stored in `~/.config/Code/User/globalStorage/matklad.rust-analyzer`.
44 61
45Note that we only support the latest version of VS Code. 62Note that we only support the latest version of VS Code.
@@ -124,7 +141,7 @@ let g:LanguageClient_serverCommands = {
124 141
125NeoVim 0.5 (not yet released) has built-in language server support. 142NeoVim 0.5 (not yet released) has built-in language server support.
126For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lsp#rust_analyzer[neovim/nvim-lsp]. 143For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lsp#rust_analyzer[neovim/nvim-lsp].
127Once `neovim/nvim-lsp` is installed, use `lua require'nvim_lsp'.rust_analyzer.setup({})` in your `init.vim`. 144Once `neovim/nvim-lsp` is installed, use `+lua require'nvim_lsp'.rust_analyzer.setup({})+` in your `init.vim`.
128 145
129=== Sublime Text 3 146=== Sublime Text 3
130 147
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index b07caf034..b07964546 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -114,9 +114,9 @@
114 } 114 }
115 }, 115 },
116 "@types/vscode": { 116 "@types/vscode": {
117 "version": "1.42.0", 117 "version": "1.43.0",
118 "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.42.0.tgz", 118 "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.43.0.tgz",
119 "integrity": "sha512-ds6TceMsh77Fs0Mq0Vap6Y72JbGWB8Bay4DrnJlf5d9ui2RSe1wis13oQm+XhguOeH1HUfLGzaDAoupTUtgabw==", 119 "integrity": "sha512-kIaR9qzd80rJOxePKpCB/mdy00mz8Apt2QA5Y6rdrKFn13QNFNeP3Hzmsf37Bwh/3cS7QjtAeGSK7wSqAU0sYQ==",
120 "dev": true 120 "dev": true
121 }, 121 },
122 "@typescript-eslint/eslint-plugin": { 122 "@typescript-eslint/eslint-plugin": {
@@ -171,9 +171,9 @@
171 } 171 }
172 }, 172 },
173 "acorn": { 173 "acorn": {
174 "version": "7.1.0", 174 "version": "7.1.1",
175 "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", 175 "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.1.tgz",
176 "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==", 176 "integrity": "sha512-add7dgA5ppRPxCFJoAGfMDi7PIBXq1RtGo7BhbLaxwrXPOmw8gq48Y9ozT01hUKy9byMjlR20EJhu5zlkErEkg==",
177 "dev": true 177 "dev": true
178 }, 178 },
179 "acorn-jsx": { 179 "acorn-jsx": {
@@ -1328,9 +1328,9 @@
1328 } 1328 }
1329 }, 1329 },
1330 "rollup": { 1330 "rollup": {
1331 "version": "1.32.0", 1331 "version": "1.32.1",
1332 "resolved": "https://registry.npmjs.org/rollup/-/rollup-1.32.0.tgz", 1332 "resolved": "https://registry.npmjs.org/rollup/-/rollup-1.32.1.tgz",
1333 "integrity": "sha512-ab2tF5pdDqm2zuI8j02ceyrJSScl9V2C24FgWQ1v1kTFTu1UrG5H0hpP++mDZlEFyZX4k0chtGEHU2i+pAzBgA==", 1333 "integrity": "sha512-/2HA0Ec70TvQnXdzynFffkjA6XN+1e2pEv/uKS5Ulca40g2L7KuOE3riasHoNVHOsFD5KKZgDsMk1CP3Tw9s+A==",
1334 "dev": true, 1334 "dev": true,
1335 "requires": { 1335 "requires": {
1336 "@types/estree": "*", 1336 "@types/estree": "*",
diff --git a/editors/code/package.json b/editors/code/package.json
index 830358605..1fe8e9f8a 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -21,7 +21,7 @@
21 "Programming Languages" 21 "Programming Languages"
22 ], 22 ],
23 "engines": { 23 "engines": {
24 "vscode": "^1.42.0" 24 "vscode": "^1.43.0"
25 }, 25 },
26 "enableProposedApi": true, 26 "enableProposedApi": true,
27 "scripts": { 27 "scripts": {
@@ -41,11 +41,11 @@
41 "@rollup/plugin-node-resolve": "^7.1.1", 41 "@rollup/plugin-node-resolve": "^7.1.1",
42 "@types/node": "^12.12.29", 42 "@types/node": "^12.12.29",
43 "@types/node-fetch": "^2.5.5", 43 "@types/node-fetch": "^2.5.5",
44 "@types/vscode": "^1.42.0", 44 "@types/vscode": "^1.43.0",
45 "@typescript-eslint/eslint-plugin": "^2.22.0", 45 "@typescript-eslint/eslint-plugin": "^2.22.0",
46 "@typescript-eslint/parser": "^2.22.0", 46 "@typescript-eslint/parser": "^2.22.0",
47 "eslint": "^6.8.0", 47 "eslint": "^6.8.0",
48 "rollup": "^1.32.0", 48 "rollup": "^1.32.1",
49 "tslib": "^1.11.1", 49 "tslib": "^1.11.1",
50 "typescript": "^3.8.3", 50 "typescript": "^3.8.3",
51 "typescript-formatter": "^7.2.2", 51 "typescript-formatter": "^7.2.2",
@@ -191,26 +191,44 @@
191 "properties": { 191 "properties": {
192 "lsp.diagnostics": { 192 "lsp.diagnostics": {
193 "type": "boolean", 193 "type": "boolean",
194 "description": "Whether to show diagnostics from `cargo check`" 194 "markdownDescription": "Whether to show diagnostics from `cargo check`"
195 }, 195 },
196 "completion.insertion.add-call-parenthesis": { 196 "completion.insertion.add-call-parenthesis": {
197 "type": "boolean", 197 "type": "boolean",
198 "description": "Whether to add parenthesis when completing functions" 198 "description": "Whether to add parenthesis when completing functions"
199 }, 199 },
200 "completion.insertion.add-argument-snippets": {
201 "type": "boolean",
202 "description": "Whether to add argument snippets when completing functions"
203 },
200 "completion.enable-postfix": { 204 "completion.enable-postfix": {
201 "type": "boolean", 205 "type": "boolean",
202 "description": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc." 206 "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc."
207 },
208 "call-info.full": {
209 "type": "boolean",
210 "description": "Show function name and docs in parameter hints"
203 }, 211 },
204 "notifications.workspace-loaded": { 212 "notifications.workspace-loaded": {
205 "type": "boolean", 213 "type": "boolean",
206 "description": "Whether to show `workspace loaded` message" 214 "markdownDescription": "Whether to show `workspace loaded` message"
207 }, 215 },
208 "notifications.cargo-toml-not-found": { 216 "notifications.cargo-toml-not-found": {
209 "type": "boolean", 217 "type": "boolean",
210 "description": "Whether to show `can't find Cargo.toml` error message" 218 "markdownDescription": "Whether to show `can't find Cargo.toml` error message"
211 } 219 }
212 } 220 }
213 }, 221 },
222 "rust-analyzer.updates.askBeforeDownload": {
223 "type": "boolean",
224 "default": true,
225 "description": "Whether to ask for permission before downloading any files from the Internet"
226 },
227 "rust-analyzer.additionalOutDirs": {
228 "type": "object",
229 "default": {},
230 "markdownDescription": "Fine grained controls for OUT_DIR `env!(\"OUT_DIR\")` variable. e.g. `{\"foo\":\"/path/to/foo\"}`, "
231 },
214 "rust-analyzer.serverPath": { 232 "rust-analyzer.serverPath": {
215 "type": [ 233 "type": [
216 "null", 234 "null",
@@ -243,24 +261,24 @@
243 "rust-analyzer.cargo-watch.enable": { 261 "rust-analyzer.cargo-watch.enable": {
244 "type": "boolean", 262 "type": "boolean",
245 "default": true, 263 "default": true,
246 "description": "Run `cargo check` for diagnostics on save" 264 "markdownDescription": "Run `cargo check` for diagnostics on save"
247 }, 265 },
248 "rust-analyzer.cargo-watch.arguments": { 266 "rust-analyzer.cargo-watch.arguments": {
249 "type": "array", 267 "type": "array",
250 "items": { 268 "items": {
251 "type": "string" 269 "type": "string"
252 }, 270 },
253 "description": "`cargo-watch` arguments. (e.g: `--features=\"shumway,pdf\"` will run as `cargo watch -x \"check --features=\"shumway,pdf\"\"` )", 271 "markdownDescription": "`cargo-watch` arguments. (e.g: `--features=\"shumway,pdf\"` will run as `cargo watch -x \"check --features=\"shumway,pdf\"\"` )",
254 "default": [] 272 "default": []
255 }, 273 },
256 "rust-analyzer.cargo-watch.command": { 274 "rust-analyzer.cargo-watch.command": {
257 "type": "string", 275 "type": "string",
258 "description": "`cargo-watch` command. (e.g: `clippy` will run as `cargo watch -x clippy` )", 276 "markdownDescription": "`cargo-watch` command. (e.g: `clippy` will run as `cargo watch -x clippy` )",
259 "default": "check" 277 "default": "check"
260 }, 278 },
261 "rust-analyzer.cargo-watch.allTargets": { 279 "rust-analyzer.cargo-watch.allTargets": {
262 "type": "boolean", 280 "type": "boolean",
263 "description": "Check all targets and tests (will be passed as `--all-targets`)", 281 "markdownDescription": "Check all targets and tests (will be passed as `--all-targets`)",
264 "default": true 282 "default": true
265 }, 283 },
266 "rust-analyzer.trace.server": { 284 "rust-analyzer.trace.server": {
@@ -312,7 +330,7 @@
312 "rust-analyzer.cargoFeatures.noDefaultFeatures": { 330 "rust-analyzer.cargoFeatures.noDefaultFeatures": {
313 "type": "boolean", 331 "type": "boolean",
314 "default": false, 332 "default": false,
315 "description": "Do not activate the `default` feature" 333 "markdownDescription": "Do not activate the `default` feature"
316 }, 334 },
317 "rust-analyzer.cargoFeatures.allFeatures": { 335 "rust-analyzer.cargoFeatures.allFeatures": {
318 "type": "boolean", 336 "type": "boolean",
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index 540f7c9ea..6ce3b9235 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -37,6 +37,7 @@ export async function createClient(config: Config, serverPath: string): Promise<
37 excludeGlobs: config.excludeGlobs, 37 excludeGlobs: config.excludeGlobs,
38 useClientWatching: config.useClientWatching, 38 useClientWatching: config.useClientWatching,
39 featureFlags: config.featureFlags, 39 featureFlags: config.featureFlags,
40 additionalOutDirs: config.additionalOutDirs,
40 withSysroot: config.withSysroot, 41 withSysroot: config.withSysroot,
41 cargoFeatures: config.cargoFeatures, 42 cargoFeatures: config.cargoFeatures,
42 rustfmtArgs: config.rustfmtArgs, 43 rustfmtArgs: config.rustfmtArgs,
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index bf915102c..3ade7e900 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -1,6 +1,6 @@
1import * as os from "os"; 1import * as os from "os";
2import * as vscode from 'vscode'; 2import * as vscode from 'vscode';
3import { BinarySource } from "./installation/interfaces"; 3import { ArtifactSource } from "./installation/interfaces";
4import { log } from "./util"; 4import { log } from "./util";
5 5
6const RA_LSP_DEBUG = process.env.__RA_LSP_SERVER_DEBUG; 6const RA_LSP_DEBUG = process.env.__RA_LSP_SERVER_DEBUG;
@@ -114,12 +114,12 @@ export class Config {
114 } 114 }
115 } 115 }
116 116
117 get serverSource(): null | BinarySource { 117 get serverSource(): null | ArtifactSource {
118 const serverPath = RA_LSP_DEBUG ?? this.cfg.get<null | string>("serverPath"); 118 const serverPath = RA_LSP_DEBUG ?? this.cfg.get<null | string>("serverPath");
119 119
120 if (serverPath) { 120 if (serverPath) {
121 return { 121 return {
122 type: BinarySource.Type.ExplicitPath, 122 type: ArtifactSource.Type.ExplicitPath,
123 path: Config.replaceTildeWithHomeDir(serverPath) 123 path: Config.replaceTildeWithHomeDir(serverPath)
124 }; 124 };
125 } 125 }
@@ -129,11 +129,12 @@ export class Config {
129 if (!prebuiltBinaryName) return null; 129 if (!prebuiltBinaryName) return null;
130 130
131 return { 131 return {
132 type: BinarySource.Type.GithubRelease, 132 type: ArtifactSource.Type.GithubRelease,
133 dir: this.ctx.globalStoragePath, 133 dir: this.ctx.globalStoragePath,
134 file: prebuiltBinaryName, 134 file: prebuiltBinaryName,
135 storage: this.ctx.globalState, 135 storage: this.ctx.globalState,
136 version: Config.extensionVersion, 136 tag: Config.extensionVersion,
137 askBeforeDownload: this.cfg.get("updates.askBeforeDownload") as boolean,
137 repo: { 138 repo: {
138 name: "rust-analyzer", 139 name: "rust-analyzer",
139 owner: "rust-analyzer", 140 owner: "rust-analyzer",
@@ -153,6 +154,7 @@ export class Config {
153 get excludeGlobs() { return this.cfg.get("excludeGlobs") as string[]; } 154 get excludeGlobs() { return this.cfg.get("excludeGlobs") as string[]; }
154 get useClientWatching() { return this.cfg.get("useClientWatching") as boolean; } 155 get useClientWatching() { return this.cfg.get("useClientWatching") as boolean; }
155 get featureFlags() { return this.cfg.get("featureFlags") as Record<string, boolean>; } 156 get featureFlags() { return this.cfg.get("featureFlags") as Record<string, boolean>; }
157 get additionalOutDirs() { return this.cfg.get("additionalOutDirs") as Record<string, string>; }
156 get rustfmtArgs() { return this.cfg.get("rustfmtArgs") as string[]; } 158 get rustfmtArgs() { return this.cfg.get("rustfmtArgs") as string[]; }
157 159
158 get cargoWatchOptions(): CargoWatchOptions { 160 get cargoWatchOptions(): CargoWatchOptions {
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index b4e983a0c..25ef38aed 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -3,7 +3,7 @@ import * as lc from 'vscode-languageclient';
3 3
4import { Config } from './config'; 4import { Config } from './config';
5import { createClient } from './client'; 5import { createClient } from './client';
6import { isRustDocument } from './util'; 6import { isRustEditor, RustEditor } from './util';
7 7
8export class Ctx { 8export class Ctx {
9 private constructor( 9 private constructor(
@@ -22,17 +22,15 @@ export class Ctx {
22 return res; 22 return res;
23 } 23 }
24 24
25 get activeRustEditor(): vscode.TextEditor | undefined { 25 get activeRustEditor(): RustEditor | undefined {
26 const editor = vscode.window.activeTextEditor; 26 const editor = vscode.window.activeTextEditor;
27 return editor && isRustDocument(editor.document) 27 return editor && isRustEditor(editor)
28 ? editor 28 ? editor
29 : undefined; 29 : undefined;
30 } 30 }
31 31
32 get visibleRustEditors(): vscode.TextEditor[] { 32 get visibleRustEditors(): RustEditor[] {
33 return vscode.window.visibleTextEditors.filter( 33 return vscode.window.visibleTextEditors.filter(isRustEditor);
34 editor => isRustDocument(editor.document),
35 );
36 } 34 }
37 35
38 registerCommand(name: string, factory: (ctx: Ctx) => Cmd) { 36 registerCommand(name: string, factory: (ctx: Ctx) => Cmd) {
diff --git a/editors/code/src/inlay_hints.ts b/editors/code/src/inlay_hints.ts
index 08d3a64a7..e1a82e03e 100644
--- a/editors/code/src/inlay_hints.ts
+++ b/editors/code/src/inlay_hints.ts
@@ -1,156 +1,214 @@
1import * as lc from "vscode-languageclient";
1import * as vscode from 'vscode'; 2import * as vscode from 'vscode';
2import * as ra from './rust-analyzer-api'; 3import * as ra from './rust-analyzer-api';
3 4
4import { Ctx } from './ctx'; 5import { Ctx, Disposable } from './ctx';
5import { log, sendRequestWithRetry, isRustDocument } from './util'; 6import { sendRequestWithRetry, isRustDocument, RustDocument, RustEditor } from './util';
6 7
7export function activateInlayHints(ctx: Ctx) {
8 const hintsUpdater = new HintsUpdater(ctx);
9 vscode.window.onDidChangeVisibleTextEditors(
10 async _ => hintsUpdater.refresh(),
11 null,
12 ctx.subscriptions
13 );
14 8
15 vscode.workspace.onDidChangeTextDocument( 9export function activateInlayHints(ctx: Ctx) {
16 async event => { 10 const maybeUpdater = {
17 if (event.contentChanges.length === 0) return; 11 updater: null as null | HintsUpdater,
18 if (!isRustDocument(event.document)) return; 12 onConfigChange() {
19 await hintsUpdater.refresh(); 13 if (!ctx.config.displayInlayHints) {
14 return this.dispose();
15 }
16 if (!this.updater) this.updater = new HintsUpdater(ctx);
20 }, 17 },
21 null, 18 dispose() {
22 ctx.subscriptions 19 this.updater?.dispose();
23 ); 20 this.updater = null;
21 }
22 };
23
24 ctx.pushCleanup(maybeUpdater);
24 25
25 vscode.workspace.onDidChangeConfiguration( 26 vscode.workspace.onDidChangeConfiguration(
26 async _ => hintsUpdater.setEnabled(ctx.config.displayInlayHints), 27 maybeUpdater.onConfigChange, maybeUpdater, ctx.subscriptions
27 null,
28 ctx.subscriptions
29 ); 28 );
30 29
31 ctx.pushCleanup({ 30 maybeUpdater.onConfigChange();
32 dispose() { 31}
33 hintsUpdater.clear(); 32
33
34const typeHints = {
35 decorationType: vscode.window.createTextEditorDecorationType({
36 after: {
37 color: new vscode.ThemeColor('rust_analyzer.inlayHint'),
38 fontStyle: "normal",
34 } 39 }
35 }); 40 }),
36 41
37 // XXX: we don't await this, thus Promise rejections won't be handled, but 42 toDecoration(hint: ra.InlayHint.TypeHint, conv: lc.Protocol2CodeConverter): vscode.DecorationOptions {
38 // this should never throw in fact... 43 return {
39 void hintsUpdater.setEnabled(ctx.config.displayInlayHints); 44 range: conv.asRange(hint.range),
40} 45 renderOptions: { after: { contentText: `: ${hint.label}` } }
46 };
47 }
48};
49
50const paramHints = {
51 decorationType: vscode.window.createTextEditorDecorationType({
52 before: {
53 color: new vscode.ThemeColor('rust_analyzer.inlayHint'),
54 fontStyle: "normal",
55 }
56 }),
41 57
42const typeHintDecorationType = vscode.window.createTextEditorDecorationType({ 58 toDecoration(hint: ra.InlayHint.ParamHint, conv: lc.Protocol2CodeConverter): vscode.DecorationOptions {
43 after: { 59 return {
44 color: new vscode.ThemeColor('rust_analyzer.inlayHint'), 60 range: conv.asRange(hint.range),
45 fontStyle: "normal", 61 renderOptions: { before: { contentText: `${hint.label}: ` } }
46 }, 62 };
47});
48
49const parameterHintDecorationType = vscode.window.createTextEditorDecorationType({
50 before: {
51 color: new vscode.ThemeColor('rust_analyzer.inlayHint'),
52 fontStyle: "normal",
53 },
54});
55
56class HintsUpdater {
57 private pending = new Map<string, vscode.CancellationTokenSource>();
58 private ctx: Ctx;
59 private enabled: boolean;
60
61 constructor(ctx: Ctx) {
62 this.ctx = ctx;
63 this.enabled = false;
64 } 63 }
64};
65 65
66 async setEnabled(enabled: boolean): Promise<void> { 66class HintsUpdater implements Disposable {
67 log.debug({ enabled, prev: this.enabled }); 67 private sourceFiles = new Map<string, RustSourceFile>(); // map Uri -> RustSourceFile
68 private readonly disposables: Disposable[] = [];
68 69
69 if (this.enabled === enabled) return; 70 constructor(private readonly ctx: Ctx) {
70 this.enabled = enabled; 71 vscode.window.onDidChangeVisibleTextEditors(
72 this.onDidChangeVisibleTextEditors,
73 this,
74 this.disposables
75 );
71 76
72 if (this.enabled) { 77 vscode.workspace.onDidChangeTextDocument(
73 return await this.refresh(); 78 this.onDidChangeTextDocument,
74 } else { 79 this,
75 return this.clear(); 80 this.disposables
76 } 81 );
82
83 // Set up initial cache shape
84 ctx.visibleRustEditors.forEach(editor => this.sourceFiles.set(
85 editor.document.uri.toString(),
86 {
87 document: editor.document,
88 inlaysRequest: null,
89 cachedDecorations: null
90 }
91 ));
92
93 this.syncCacheAndRenderHints();
77 } 94 }
78 95
79 clear() { 96 dispose() {
80 this.ctx.visibleRustEditors.forEach(it => { 97 this.sourceFiles.forEach(file => file.inlaysRequest?.cancel());
81 this.setTypeDecorations(it, []); 98 this.ctx.visibleRustEditors.forEach(editor => this.renderDecorations(editor, { param: [], type: [] }));
82 this.setParameterDecorations(it, []); 99 this.disposables.forEach(d => d.dispose());
83 }); 100 }
101
102 onDidChangeTextDocument({ contentChanges, document }: vscode.TextDocumentChangeEvent) {
103 if (contentChanges.length === 0 || !isRustDocument(document)) return;
104 this.syncCacheAndRenderHints();
84 } 105 }
85 106
86 async refresh() { 107 private syncCacheAndRenderHints() {
87 if (!this.enabled) return; 108 // FIXME: make inlayHints request pass an array of files?
88 await Promise.all(this.ctx.visibleRustEditors.map(it => this.refreshEditor(it))); 109 this.sourceFiles.forEach((file, uri) => this.fetchHints(file).then(hints => {
110 if (!hints) return;
111
112 file.cachedDecorations = this.hintsToDecorations(hints);
113
114 for (const editor of this.ctx.visibleRustEditors) {
115 if (editor.document.uri.toString() === uri) {
116 this.renderDecorations(editor, file.cachedDecorations);
117 }
118 }
119 }));
89 } 120 }
90 121
91 private async refreshEditor(editor: vscode.TextEditor): Promise<void> { 122 onDidChangeVisibleTextEditors() {
92 const newHints = await this.queryHints(editor.document.uri.toString()); 123 const newSourceFiles = new Map<string, RustSourceFile>();
93 if (newHints == null) return; 124
94 125 // Rerendering all, even up-to-date editors for simplicity
95 const newTypeDecorations = newHints 126 this.ctx.visibleRustEditors.forEach(async editor => {
96 .filter(hint => hint.kind === ra.InlayKind.TypeHint) 127 const uri = editor.document.uri.toString();
97 .map(hint => ({ 128 const file = this.sourceFiles.get(uri) ?? {
98 range: this.ctx.client.protocol2CodeConverter.asRange(hint.range), 129 document: editor.document,
99 renderOptions: { 130 inlaysRequest: null,
100 after: { 131 cachedDecorations: null
101 contentText: `: ${hint.label}`, 132 };
102 }, 133 newSourceFiles.set(uri, file);
103 }, 134
104 })); 135 // No text documents changed, so we may try to use the cache
105 this.setTypeDecorations(editor, newTypeDecorations); 136 if (!file.cachedDecorations) {
106 137 file.inlaysRequest?.cancel();
107 const newParameterDecorations = newHints 138
108 .filter(hint => hint.kind === ra.InlayKind.ParameterHint) 139 const hints = await this.fetchHints(file);
109 .map(hint => ({ 140 if (!hints) return;
110 range: this.ctx.client.protocol2CodeConverter.asRange(hint.range), 141
111 renderOptions: { 142 file.cachedDecorations = this.hintsToDecorations(hints);
112 before: { 143 }
113 contentText: `${hint.label}: `, 144
114 }, 145 this.renderDecorations(editor, file.cachedDecorations);
115 }, 146 });
116 })); 147
117 this.setParameterDecorations(editor, newParameterDecorations); 148 // Cancel requests for no longer visible (disposed) source files
149 this.sourceFiles.forEach((file, uri) => {
150 if (!newSourceFiles.has(uri)) file.inlaysRequest?.cancel();
151 });
152
153 this.sourceFiles = newSourceFiles;
118 } 154 }
119 155
120 private setTypeDecorations( 156 private renderDecorations(editor: RustEditor, decorations: InlaysDecorations) {
121 editor: vscode.TextEditor, 157 editor.setDecorations(typeHints.decorationType, decorations.type);
122 decorations: vscode.DecorationOptions[], 158 editor.setDecorations(paramHints.decorationType, decorations.param);
123 ) {
124 editor.setDecorations(
125 typeHintDecorationType,
126 this.enabled ? decorations : [],
127 );
128 } 159 }
129 160
130 private setParameterDecorations( 161 private hintsToDecorations(hints: ra.InlayHint[]): InlaysDecorations {
131 editor: vscode.TextEditor, 162 const decorations: InlaysDecorations = { type: [], param: [] };
132 decorations: vscode.DecorationOptions[], 163 const conv = this.ctx.client.protocol2CodeConverter;
133 ) { 164
134 editor.setDecorations( 165 for (const hint of hints) {
135 parameterHintDecorationType, 166 switch (hint.kind) {
136 this.enabled ? decorations : [], 167 case ra.InlayHint.Kind.TypeHint: {
137 ); 168 decorations.type.push(typeHints.toDecoration(hint, conv));
169 continue;
170 }
171 case ra.InlayHint.Kind.ParamHint: {
172 decorations.param.push(paramHints.toDecoration(hint, conv));
173 continue;
174 }
175 }
176 }
177 return decorations;
138 } 178 }
139 179
140 private async queryHints(documentUri: string): Promise<ra.InlayHint[] | null> { 180 private async fetchHints(file: RustSourceFile): Promise<null | ra.InlayHint[]> {
141 this.pending.get(documentUri)?.cancel(); 181 file.inlaysRequest?.cancel();
142 182
143 const tokenSource = new vscode.CancellationTokenSource(); 183 const tokenSource = new vscode.CancellationTokenSource();
144 this.pending.set(documentUri, tokenSource); 184 file.inlaysRequest = tokenSource;
145 185
146 const request = { textDocument: { uri: documentUri } }; 186 const request = { textDocument: { uri: file.document.uri.toString() } };
147 187
148 return sendRequestWithRetry(this.ctx.client, ra.inlayHints, request, tokenSource.token) 188 return sendRequestWithRetry(this.ctx.client, ra.inlayHints, request, tokenSource.token)
149 .catch(_ => null) 189 .catch(_ => null)
150 .finally(() => { 190 .finally(() => {
151 if (!tokenSource.token.isCancellationRequested) { 191 if (file.inlaysRequest === tokenSource) {
152 this.pending.delete(documentUri); 192 file.inlaysRequest = null;
153 } 193 }
154 }); 194 });
155 } 195 }
156} 196}
197
198interface InlaysDecorations {
199 type: vscode.DecorationOptions[];
200 param: vscode.DecorationOptions[];
201}
202
203interface RustSourceFile {
204 /*
205 * Source of the token to cancel in-flight inlay hints request if any.
206 */
207 inlaysRequest: null | vscode.CancellationTokenSource;
208 /**
209 * Last applied decorations.
210 */
211 cachedDecorations: null | InlaysDecorations;
212
213 document: RustDocument;
214}
diff --git a/editors/code/src/installation/interfaces.ts b/editors/code/src/installation/interfaces.ts
index e40839e4b..50b635921 100644
--- a/editors/code/src/installation/interfaces.ts
+++ b/editors/code/src/installation/interfaces.ts
@@ -14,14 +14,14 @@ export interface ArtifactReleaseInfo {
14} 14}
15 15
16/** 16/**
17 * Represents the source of a binary artifact which is either specified by the user 17 * Represents the source of a an artifact which is either specified by the user
18 * explicitly, or bundled by this extension from GitHub releases. 18 * explicitly, or bundled by this extension from GitHub releases.
19 */ 19 */
20export type BinarySource = BinarySource.ExplicitPath | BinarySource.GithubRelease; 20export type ArtifactSource = ArtifactSource.ExplicitPath | ArtifactSource.GithubRelease;
21 21
22export namespace BinarySource { 22export namespace ArtifactSource {
23 /** 23 /**
24 * Type tag for `BinarySource` discriminated union. 24 * Type tag for `ArtifactSource` discriminated union.
25 */ 25 */
26 export const enum Type { ExplicitPath, GithubRelease } 26 export const enum Type { ExplicitPath, GithubRelease }
27 27
@@ -56,13 +56,18 @@ export namespace BinarySource {
56 /** 56 /**
57 * Tag of github release that denotes a version required by this extension. 57 * Tag of github release that denotes a version required by this extension.
58 */ 58 */
59 version: string; 59 tag: string;
60 60
61 /** 61 /**
62 * Object that provides `get()/update()` operations to store metadata 62 * Object that provides `get()/update()` operations to store metadata
63 * about the actual binary, e.g. its actual version. 63 * about the actual binary, e.g. its actual version.
64 */ 64 */
65 storage: vscode.Memento; 65 storage: vscode.Memento;
66
67 /**
68 * Ask for the user permission before downloading the artifact.
69 */
70 askBeforeDownload: boolean;
66 } 71 }
67 72
68} 73}
diff --git a/editors/code/src/installation/server.ts b/editors/code/src/installation/server.ts
index 6a6cf4f8c..ef1c45ff6 100644
--- a/editors/code/src/installation/server.ts
+++ b/editors/code/src/installation/server.ts
@@ -3,12 +3,12 @@ import * as path from "path";
3import { promises as dns } from "dns"; 3import { promises as dns } from "dns";
4import { spawnSync } from "child_process"; 4import { spawnSync } from "child_process";
5 5
6import { BinarySource } from "./interfaces"; 6import { ArtifactSource } from "./interfaces";
7import { fetchArtifactReleaseInfo } from "./fetch_artifact_release_info"; 7import { fetchArtifactReleaseInfo } from "./fetch_artifact_release_info";
8import { downloadArtifact } from "./download_artifact"; 8import { downloadArtifact } from "./download_artifact";
9import { log, assert } from "../util"; 9import { log, assert } from "../util";
10 10
11export async function ensureServerBinary(source: null | BinarySource): Promise<null | string> { 11export async function ensureServerBinary(source: null | ArtifactSource): Promise<null | string> {
12 if (!source) { 12 if (!source) {
13 vscode.window.showErrorMessage( 13 vscode.window.showErrorMessage(
14 "Unfortunately we don't ship binaries for your platform yet. " + 14 "Unfortunately we don't ship binaries for your platform yet. " +
@@ -22,7 +22,7 @@ export async function ensureServerBinary(source: null | BinarySource): Promise<n
22 } 22 }
23 23
24 switch (source.type) { 24 switch (source.type) {
25 case BinarySource.Type.ExplicitPath: { 25 case ArtifactSource.Type.ExplicitPath: {
26 if (isBinaryAvailable(source.path)) { 26 if (isBinaryAvailable(source.path)) {
27 return source.path; 27 return source.path;
28 } 28 }
@@ -34,11 +34,11 @@ export async function ensureServerBinary(source: null | BinarySource): Promise<n
34 ); 34 );
35 return null; 35 return null;
36 } 36 }
37 case BinarySource.Type.GithubRelease: { 37 case ArtifactSource.Type.GithubRelease: {
38 const prebuiltBinaryPath = path.join(source.dir, source.file); 38 const prebuiltBinaryPath = path.join(source.dir, source.file);
39 39
40 const installedVersion: null | string = getServerVersion(source.storage); 40 const installedVersion: null | string = getServerVersion(source.storage);
41 const requiredVersion: string = source.version; 41 const requiredVersion: string = source.tag;
42 42
43 log.debug("Installed version:", installedVersion, "required:", requiredVersion); 43 log.debug("Installed version:", installedVersion, "required:", requiredVersion);
44 44
@@ -46,12 +46,14 @@ export async function ensureServerBinary(source: null | BinarySource): Promise<n
46 return prebuiltBinaryPath; 46 return prebuiltBinaryPath;
47 } 47 }
48 48
49 const userResponse = await vscode.window.showInformationMessage( 49 if (source.askBeforeDownload) {
50 `Language server version ${source.version} for rust-analyzer is not installed. ` + 50 const userResponse = await vscode.window.showInformationMessage(
51 "Do you want to download it now?", 51 `Language server version ${source.tag} for rust-analyzer is not installed. ` +
52 "Download now", "Cancel" 52 "Do you want to download it now?",
53 ); 53 "Download now", "Cancel"
54 if (userResponse !== "Download now") return null; 54 );
55 if (userResponse !== "Download now") return null;
56 }
55 57
56 if (!await downloadServer(source)) return null; 58 if (!await downloadServer(source)) return null;
57 59
@@ -60,9 +62,9 @@ export async function ensureServerBinary(source: null | BinarySource): Promise<n
60 } 62 }
61} 63}
62 64
63async function downloadServer(source: BinarySource.GithubRelease): Promise<boolean> { 65async function downloadServer(source: ArtifactSource.GithubRelease): Promise<boolean> {
64 try { 66 try {
65 const releaseInfo = await fetchArtifactReleaseInfo(source.repo, source.file, source.version); 67 const releaseInfo = await fetchArtifactReleaseInfo(source.repo, source.file, source.tag);
66 68
67 await downloadArtifact(releaseInfo, source.file, source.dir, "language server"); 69 await downloadArtifact(releaseInfo, source.file, source.dir, "language server");
68 await setServerVersion(source.storage, releaseInfo.releaseName); 70 await setServerVersion(source.storage, releaseInfo.releaseName);
diff --git a/editors/code/src/rust-analyzer-api.ts b/editors/code/src/rust-analyzer-api.ts
index c5a010e94..bd6e3ada0 100644
--- a/editors/code/src/rust-analyzer-api.ts
+++ b/editors/code/src/rust-analyzer-api.ts
@@ -86,14 +86,20 @@ export interface Runnable {
86export const runnables = request<RunnablesParams, Vec<Runnable>>("runnables"); 86export const runnables = request<RunnablesParams, Vec<Runnable>>("runnables");
87 87
88 88
89export const enum InlayKind { 89
90 TypeHint = "TypeHint", 90export type InlayHint = InlayHint.TypeHint | InlayHint.ParamHint;
91 ParameterHint = "ParameterHint", 91
92} 92export namespace InlayHint {
93export interface InlayHint { 93 export const enum Kind {
94 range: lc.Range; 94 TypeHint = "TypeHint",
95 kind: InlayKind; 95 ParamHint = "ParameterHint",
96 label: string; 96 }
97 interface Common {
98 range: lc.Range;
99 label: string;
100 }
101 export type TypeHint = Common & { kind: Kind.TypeHint };
102 export type ParamHint = Common & { kind: Kind.ParamHint };
97} 103}
98export interface InlayHintsParams { 104export interface InlayHintsParams {
99 textDocument: lc.TextDocumentIdentifier; 105 textDocument: lc.TextDocumentIdentifier;
diff --git a/editors/code/src/util.ts b/editors/code/src/util.ts
index 7c95769bb..95a5f1227 100644
--- a/editors/code/src/util.ts
+++ b/editors/code/src/util.ts
@@ -1,7 +1,6 @@
1import * as lc from "vscode-languageclient"; 1import * as lc from "vscode-languageclient";
2import * as vscode from "vscode"; 2import * as vscode from "vscode";
3import { strict as nativeAssert } from "assert"; 3import { strict as nativeAssert } from "assert";
4import { TextDocument } from "vscode";
5 4
6export function assert(condition: boolean, explanation: string): asserts condition { 5export function assert(condition: boolean, explanation: string): asserts condition {
7 try { 6 try {
@@ -67,9 +66,16 @@ function sleep(ms: number) {
67 return new Promise(resolve => setTimeout(resolve, ms)); 66 return new Promise(resolve => setTimeout(resolve, ms));
68} 67}
69 68
70export function isRustDocument(document: TextDocument) { 69export type RustDocument = vscode.TextDocument & { languageId: "rust" };
70export type RustEditor = vscode.TextEditor & { document: RustDocument; id: string };
71
72export function isRustDocument(document: vscode.TextDocument): document is RustDocument {
71 return document.languageId === 'rust' 73 return document.languageId === 'rust'
72 // SCM diff views have the same URI as the on-disk document but not the same content 74 // SCM diff views have the same URI as the on-disk document but not the same content
73 && document.uri.scheme !== 'git' 75 && document.uri.scheme !== 'git'
74 && document.uri.scheme !== 'svn'; 76 && document.uri.scheme !== 'svn';
75} \ No newline at end of file 77}
78
79export function isRustEditor(editor: vscode.TextEditor): editor is RustEditor {
80 return isRustDocument(editor.document);
81}
diff --git a/xtask/src/dist.rs b/xtask/src/dist.rs
index 12bad820f..bb3593b11 100644
--- a/xtask/src/dist.rs
+++ b/xtask/src/dist.rs
@@ -27,7 +27,13 @@ fn dist_client(nightly: bool) -> Result<()> {
27 let _restore = 27 let _restore =
28 Restore { path: package_json_path.clone(), contents: original_package_json.clone() }; 28 Restore { path: package_json_path.clone(), contents: original_package_json.clone() };
29 29
30 let mut package_json = original_package_json.replace(r#""enableProposedApi": true,"#, r#""#); 30 let date = run!("date --utc +%Y%m%d")?;
31 let version_suffix = if nightly { "-nightly" } else { "" };
32
33 let mut package_json = original_package_json.replace(
34 r#""version": "0.2.20200211-dev""#,
35 &format!(r#""version": "0.1.{}{}""#, date, version_suffix),
36 );
31 37
32 if nightly { 38 if nightly {
33 package_json = package_json.replace( 39 package_json = package_json.replace(
@@ -35,7 +41,7 @@ fn dist_client(nightly: bool) -> Result<()> {
35 r#""displayName": "rust-analyzer nightly""#, 41 r#""displayName": "rust-analyzer nightly""#,
36 ); 42 );
37 } else { 43 } else {
38 package_json = original_package_json.replace(r#""enableProposedApi": true,"#, r#""#); 44 package_json = package_json.replace(r#""enableProposedApi": true,"#, r#""#);
39 } 45 }
40 fs2::write(package_json_path, package_json)?; 46 fs2::write(package_json_path, package_json)?;
41 47
@@ -47,10 +53,16 @@ fn dist_client(nightly: bool) -> Result<()> {
47fn dist_server() -> Result<()> { 53fn dist_server() -> Result<()> {
48 if cfg!(target_os = "linux") { 54 if cfg!(target_os = "linux") {
49 std::env::set_var("CC", "clang"); 55 std::env::set_var("CC", "clang");
50 run!("cargo build --package rust-analyzer --bin rust-analyzer --release --target x86_64-unknown-linux-musl")?; 56 run!(
57 "cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release
58 --target x86_64-unknown-linux-musl
59 "
60 // We'd want to add, but that requires setting the right linker somehow
61 // --features=jemalloc
62 )?;
51 run!("strip ./target/x86_64-unknown-linux-musl/release/rust-analyzer")?; 63 run!("strip ./target/x86_64-unknown-linux-musl/release/rust-analyzer")?;
52 } else { 64 } else {
53 run!("cargo build --package rust-analyzer --bin rust-analyzer --release")?; 65 run!("cargo build --manifest-path ./crates/rust-analyzer/Cargo.toml --bin rust-analyzer --release")?;
54 } 66 }
55 67
56 let (src, dst) = if cfg!(target_os = "linux") { 68 let (src, dst) = if cfg!(target_os = "linux") {