aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/release.yaml32
-rw-r--r--.vscode/launch.json26
-rw-r--r--Cargo.lock20
-rw-r--r--Cargo.toml5
-rw-r--r--crates/ra_assists/src/handlers/auto_import.rs588
-rw-r--r--crates/ra_assists/src/handlers/fill_match_arms.rs6
-rw-r--r--crates/ra_assists/src/handlers/merge_match_arms.rs8
-rw-r--r--crates/ra_assists/src/handlers/move_guard.rs6
-rw-r--r--crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs29
-rw-r--r--crates/ra_cli/Cargo.toml2
-rw-r--r--crates/ra_cli/src/analysis_bench.rs54
-rw-r--r--crates/ra_cli/src/analysis_stats.rs83
-rw-r--r--crates/ra_cli/src/main.rs56
-rw-r--r--crates/ra_hir/src/code_model.rs70
-rw-r--r--crates/ra_hir/src/lib.rs8
-rw-r--r--crates/ra_hir/src/source_analyzer.rs16
-rw-r--r--crates/ra_hir_def/Cargo.toml1
-rw-r--r--crates/ra_hir_def/src/body.rs54
-rw-r--r--crates/ra_hir_def/src/body/lower.rs31
-rw-r--r--crates/ra_hir_def/src/body/scope.rs6
-rw-r--r--crates/ra_hir_def/src/expr.rs11
-rw-r--r--crates/ra_hir_def/src/lib.rs63
-rw-r--r--crates/ra_hir_def/src/nameres/collector.rs79
-rw-r--r--crates/ra_hir_def/src/resolver.rs6
-rw-r--r--crates/ra_hir_expand/src/lib.rs22
-rw-r--r--crates/ra_hir_ty/src/display.rs373
-rw-r--r--crates/ra_hir_ty/src/infer/expr.rs10
-rw-r--r--crates/ra_hir_ty/src/infer/pat.rs12
-rw-r--r--crates/ra_hir_ty/src/infer/unify.rs2
-rw-r--r--crates/ra_hir_ty/src/lib.rs383
-rw-r--r--crates/ra_hir_ty/src/lower.rs40
-rw-r--r--crates/ra_hir_ty/src/marks.rs2
-rw-r--r--crates/ra_hir_ty/src/method_resolution.rs9
-rw-r--r--crates/ra_hir_ty/src/tests/coercion.rs22
-rw-r--r--crates/ra_hir_ty/src/tests/method_resolution.rs32
-rw-r--r--crates/ra_hir_ty/src/tests/never_type.rs17
-rw-r--r--crates/ra_hir_ty/src/tests/traits.rs48
-rw-r--r--crates/ra_hir_ty/src/traits.rs3
-rw-r--r--crates/ra_hir_ty/src/traits/chalk.rs4
-rw-r--r--crates/ra_ide/src/goto_type_definition.rs22
-rw-r--r--crates/ra_ide/src/inlay_hints.rs5
-rw-r--r--crates/ra_ide/src/join_lines.rs85
-rw-r--r--crates/ra_ide/src/lib.rs2
-rw-r--r--crates/ra_ide/src/references.rs74
-rw-r--r--crates/ra_ide/src/references/rename.rs19
-rw-r--r--crates/ra_ide/src/runnables.rs109
-rw-r--r--crates/ra_ide/src/snapshots/highlighting.html1
-rw-r--r--crates/ra_ide/src/snapshots/rainbow_highlighting.html1
-rw-r--r--crates/ra_ide/src/syntax_highlighting.rs1
-rw-r--r--crates/ra_ide_db/Cargo.toml1
-rw-r--r--crates/ra_lsp_server/src/cargo_target_spec.rs16
-rw-r--r--crates/ra_lsp_server/src/config.rs3
-rw-r--r--crates/ra_lsp_server/src/lib.rs8
-rw-r--r--crates/ra_lsp_server/src/main.rs24
-rw-r--r--crates/ra_lsp_server/src/main_loop.rs1
-rw-r--r--crates/ra_lsp_server/src/main_loop/handlers.rs10
-rw-r--r--crates/ra_lsp_server/src/world.rs1
-rw-r--r--crates/ra_lsp_server/tests/heavy_tests/main.rs2
-rw-r--r--crates/ra_parser/src/grammar/expressions/atom.rs4
-rw-r--r--crates/ra_parser/src/grammar/params.rs9
-rw-r--r--crates/ra_parser/src/grammar/patterns.rs67
-rw-r--r--crates/ra_parser/src/syntax_kind/generated.rs2
-rw-r--r--crates/ra_prof/src/lib.rs7
-rw-r--r--crates/ra_project_model/Cargo.toml2
-rw-r--r--crates/ra_project_model/src/cargo_workspace.rs11
-rw-r--r--crates/ra_project_model/src/lib.rs57
-rw-r--r--crates/ra_project_model/src/sysroot.rs15
-rw-r--r--crates/ra_syntax/src/ast.rs8
-rw-r--r--crates/ra_syntax/src/ast/extensions.rs54
-rw-r--r--crates/ra_syntax/src/ast/generated.rs87
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0030_cond.txt86
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0066_match_arm.txt38
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.rs3
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.txt62
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0156_or_pattern.rs8
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0156_or_pattern.txt112
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs1
-rw-r--r--crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.txt26
-rw-r--r--crates/ra_text_edit/src/lib.rs4
-rw-r--r--crates/ra_text_edit/src/text_edit.rs6
-rw-r--r--docs/dev/README.md11
-rw-r--r--docs/dev/debugging.md64
-rw-r--r--docs/user/README.md250
-rw-r--r--docs/user/readme.adoc154
-rw-r--r--editors/code/package-lock.json20
-rw-r--r--editors/code/package.json39
-rw-r--r--editors/code/rollup.config.js3
-rw-r--r--editors/code/src/client.ts32
-rw-r--r--editors/code/src/config.ts299
-rw-r--r--editors/code/src/ctx.ts16
-rw-r--r--editors/code/src/inlay_hints.ts51
-rw-r--r--editors/code/src/installation/download_artifact.ts58
-rw-r--r--editors/code/src/installation/download_file.ts43
-rw-r--r--editors/code/src/installation/fetch_artifact_release_info.ts (renamed from editors/code/src/installation/fetch_latest_artifact_metadata.ts)20
-rw-r--r--editors/code/src/installation/interfaces.ts15
-rw-r--r--editors/code/src/installation/language_server.ts141
-rw-r--r--editors/code/src/installation/server.ts124
-rw-r--r--editors/code/src/status_display.ts4
-rw-r--r--xtask/src/ast_src.rs16
-rw-r--r--xtask/src/cmd.rs53
-rw-r--r--xtask/src/install.rs138
-rw-r--r--xtask/src/lib.rs94
-rw-r--r--xtask/src/main.rs7
-rw-r--r--xtask/src/not_bash.rs165
-rw-r--r--xtask/src/pre_commit.rs8
105 files changed, 3371 insertions, 1747 deletions
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index 77c92512a..eae4fbcb5 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -95,9 +95,6 @@ jobs:
95 - name: Copy vscode extension 95 - name: Copy vscode extension
96 run: mkdir -p ./dist/code && cp ./editors/code/*.vsix ./dist/ 96 run: mkdir -p ./dist/code && cp ./editors/code/*.vsix ./dist/
97 97
98 - name: Copy emacs mode
99 run: cp ./editors/emacs/rust-analyzer.el ./dist/rust-analyzer.el
100
101 - name: Upload artifacts 98 - name: Upload artifacts
102 uses: actions/upload-artifact@v1 99 uses: actions/upload-artifact@v1
103 with: 100 with:
@@ -109,6 +106,17 @@ jobs:
109 runs-on: ubuntu-latest 106 runs-on: ubuntu-latest
110 needs: ['build-server', 'build-clients'] 107 needs: ['build-server', 'build-clients']
111 steps: 108 steps:
109 - name: Install Nodejs
110 uses: actions/setup-node@v1
111 with:
112 node-version: 12.x
113
114 - run: echo "::set-env name=TAG::$(date --iso)"
115 - run: 'echo "TAG: $TAG"'
116
117 - name: Checkout repository
118 uses: actions/checkout@v1
119
112 - uses: actions/download-artifact@v1 120 - uses: actions/download-artifact@v1
113 with: 121 with:
114 name: editor-plugins 122 name: editor-plugins
@@ -127,9 +135,6 @@ jobs:
127 path: dist 135 path: dist
128 - run: ls -all ./dist 136 - run: ls -all ./dist
129 137
130 - run: echo "::set-env name=TAG::$(date --iso)"
131 - run: 'echo "TAG: $TAG"'
132
133 - name: Create Release 138 - name: Create Release
134 id: create_release 139 id: create_release
135 # uses: actions/create-release@v1 140 # uses: actions/create-release@v1
@@ -179,11 +184,10 @@ jobs:
179 asset_name: rust-analyzer-0.1.0.vsix 184 asset_name: rust-analyzer-0.1.0.vsix
180 asset_content_type: application/octet-stream 185 asset_content_type: application/octet-stream
181 186
182 - uses: actions/[email protected] 187 - run: npm ci
183 env: 188 working-directory: ./editors/code
184 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 189
185 with: 190 - name: Publish Extension
186 upload_url: ${{ steps.create_release.outputs.upload_url }} 191 working-directory: ./editors/code
187 asset_path: ./dist/rust-analyzer.el 192 # token from https://dev.azure.com/rust-analyzer/
188 asset_name: rust-analyzer.el 193 run: npx vsce publish 0.1.$(date +%Y%m%d) --pat ${{ secrets.MARKETPLACE_TOKEN }}
189 asset_content_type: text/plain
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 55a2f10f2..b1bd98d4a 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -2,39 +2,61 @@
2 // Use IntelliSense to learn about possible attributes. 2 // Use IntelliSense to learn about possible attributes.
3 // Hover to view descriptions of existing attributes. 3 // Hover to view descriptions of existing attributes.
4 // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 4 // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5
6 // NOTE: --disable-extensions
7 // Disable all installed extensions to increase performance of the debug instance
8 // and prevent potential conflicts with other installed extensions.
9
5 "version": "0.2.0", 10 "version": "0.2.0",
6 "configurations": [ 11 "configurations": [
7 { 12 {
13 // Used for testing the extension with the installed LSP server.
8 "name": "Run Extension", 14 "name": "Run Extension",
9 "type": "extensionHost", 15 "type": "extensionHost",
10 "request": "launch", 16 "request": "launch",
11 "runtimeExecutable": "${execPath}", 17 "runtimeExecutable": "${execPath}",
12 "args": [ 18 "args": [
19 "--disable-extensions",
13 "--extensionDevelopmentPath=${workspaceFolder}/editors/code" 20 "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
14 ], 21 ],
15 "outFiles": [ 22 "outFiles": [
16 "${workspaceFolder}/editors/code/out/**/*.js" 23 "${workspaceFolder}/editors/code/out/**/*.js"
17 ], 24 ],
18 "preLaunchTask": "Build Extension" 25 "preLaunchTask": "Build Extension",
26 "skipFiles": [
27 "<node_internals>/**/*.js"
28 ]
19 }, 29 },
20 { 30 {
31 // Used for testing the extension with a local build of the LSP server (in `target/debug`).
21 "name": "Run Extension (Dev Server)", 32 "name": "Run Extension (Dev Server)",
22 "type": "extensionHost", 33 "type": "extensionHost",
23 "request": "launch", 34 "request": "launch",
24 "runtimeExecutable": "${execPath}", 35 "runtimeExecutable": "${execPath}",
25 "args": [ 36 "args": [
37 "--disable-extensions",
26 "--extensionDevelopmentPath=${workspaceFolder}/editors/code" 38 "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
27 ], 39 ],
28 "outFiles": [ 40 "outFiles": [
29 "${workspaceFolder}/editors/code/out/**/*.js" 41 "${workspaceFolder}/editors/code/out/**/*.js"
30 ], 42 ],
31 "preLaunchTask": "Build Extension", 43 "preLaunchTask": "Build Extension",
44 "skipFiles": [
45 "<node_internals>/**/*.js"
46 ],
32 "env": { 47 "env": {
33 "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/debug/ra_lsp_server" 48 "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/debug/ra_lsp_server"
34 } 49 }
35 }, 50 },
36 { 51 {
37 "name": "Debug Lsp Server", 52 // Used to attach LLDB to a running LSP server.
53 // NOTE: Might require root permissions. For this run:
54 //
55 // `echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope`
56 //
57 // Don't forget to set `debug = 2` in `Cargo.toml` before building the server
58
59 "name": "Attach To Server",
38 "type": "lldb", 60 "type": "lldb",
39 "request": "attach", 61 "request": "attach",
40 "program": "${workspaceFolder}/target/debug/ra_lsp_server", 62 "program": "${workspaceFolder}/target/debug/ra_lsp_server",
diff --git a/Cargo.lock b/Cargo.lock
index e29ff898d..f44e514dd 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -110,9 +110,9 @@ dependencies = [
110 110
111[[package]] 111[[package]]
112name = "byteorder" 112name = "byteorder"
113version = "1.3.2" 113version = "1.3.4"
114source = "registry+https://github.com/rust-lang/crates.io-index" 114source = "registry+https://github.com/rust-lang/crates.io-index"
115checksum = "a7c3dd8985a7111efc5c80b44e23ecdd8c007de8ade3b96595387e812b957cf5" 115checksum = "08c48aae112d48ed9f069b33538ea9e3e90aa263cfa3d1c24309612b1f7472de"
116 116
117[[package]] 117[[package]]
118name = "c2-chacha" 118name = "c2-chacha"
@@ -1015,6 +1015,7 @@ name = "ra_cli"
1015version = "0.1.0" 1015version = "0.1.0"
1016dependencies = [ 1016dependencies = [
1017 "env_logger", 1017 "env_logger",
1018 "itertools",
1018 "pico-args", 1019 "pico-args",
1019 "ra_batch", 1020 "ra_batch",
1020 "ra_db", 1021 "ra_db",
@@ -1024,6 +1025,7 @@ dependencies = [
1024 "ra_ide", 1025 "ra_ide",
1025 "ra_prof", 1026 "ra_prof",
1026 "ra_syntax", 1027 "ra_syntax",
1028 "rand 0.7.3",
1027] 1029]
1028 1030
1029[[package]] 1031[[package]]
@@ -1070,6 +1072,7 @@ dependencies = [
1070 "drop_bomb", 1072 "drop_bomb",
1071 "either", 1073 "either",
1072 "insta", 1074 "insta",
1075 "itertools",
1073 "log", 1076 "log",
1074 "once_cell", 1077 "once_cell",
1075 "ra_arena", 1078 "ra_arena",
@@ -1173,7 +1176,6 @@ dependencies = [
1173 "ra_prof", 1176 "ra_prof",
1174 "ra_syntax", 1177 "ra_syntax",
1175 "ra_text_edit", 1178 "ra_text_edit",
1176 "rand 0.7.3",
1177 "rayon", 1179 "rayon",
1178 "rustc-hash", 1180 "rustc-hash",
1179 "superslice", 1181 "superslice",
@@ -1246,6 +1248,7 @@ dependencies = [
1246name = "ra_project_model" 1248name = "ra_project_model"
1247version = "0.1.0" 1249version = "0.1.0"
1248dependencies = [ 1250dependencies = [
1251 "anyhow",
1249 "cargo_metadata", 1252 "cargo_metadata",
1250 "log", 1253 "log",
1251 "ra_arena", 1254 "ra_arena",
@@ -1563,12 +1566,9 @@ checksum = "4c691c0e608126e00913e33f0ccf3727d5fc84573623b8d65b2df340b5201783"
1563 1566
1564[[package]] 1567[[package]]
1565name = "rustc-hash" 1568name = "rustc-hash"
1566version = "1.0.1" 1569version = "1.1.0"
1567source = "registry+https://github.com/rust-lang/crates.io-index" 1570source = "registry+https://github.com/rust-lang/crates.io-index"
1568checksum = "7540fc8b0c49f096ee9c961cda096467dce8084bec6bdca2fc83895fd9b28cb8" 1571checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
1569dependencies = [
1570 "byteorder",
1571]
1572 1572
1573[[package]] 1573[[package]]
1574name = "rustc_lexer" 1574name = "rustc_lexer"
@@ -1675,9 +1675,9 @@ dependencies = [
1675 1675
1676[[package]] 1676[[package]]
1677name = "serde_json" 1677name = "serde_json"
1678version = "1.0.46" 1678version = "1.0.47"
1679source = "registry+https://github.com/rust-lang/crates.io-index" 1679source = "registry+https://github.com/rust-lang/crates.io-index"
1680checksum = "21b01d7f0288608a01dca632cf1df859df6fd6ffa885300fc275ce2ba6221953" 1680checksum = "15913895b61e0be854afd32fd4163fcd2a3df34142cf2cb961b310ce694cbf90"
1681dependencies = [ 1681dependencies = [
1682 "itoa", 1682 "itoa",
1683 "ryu", 1683 "ryu",
diff --git a/Cargo.toml b/Cargo.toml
index e5620b1b7..c034e2424 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -31,3 +31,8 @@ opt-level = 0
31 31
32[patch.'crates-io'] 32[patch.'crates-io']
33# rowan = { path = "../rowan" } 33# rowan = { path = "../rowan" }
34
35[patch.'https://github.com/rust-lang/chalk.git']
36# chalk-solve = { path = "../chalk/chalk-solve" }
37# chalk-rust-ir = { path = "../chalk/chalk-rust-ir" }
38# chalk-ir = { path = "../chalk/chalk-ir" }
diff --git a/crates/ra_assists/src/handlers/auto_import.rs b/crates/ra_assists/src/handlers/auto_import.rs
index 1fb701da5..c4aea2a06 100644
--- a/crates/ra_assists/src/handlers/auto_import.rs
+++ b/crates/ra_assists/src/handlers/auto_import.rs
@@ -1,10 +1,18 @@
1use ra_ide_db::imports_locator::ImportsLocator;
2use ra_syntax::ast::{self, AstNode};
3
4use crate::{ 1use crate::{
5 assist_ctx::{Assist, AssistCtx}, 2 assist_ctx::{Assist, AssistCtx},
6 insert_use_statement, AssistId, 3 insert_use_statement, AssistId,
7}; 4};
5use hir::{
6 db::HirDatabase, AsAssocItem, AssocItemContainer, ModPath, Module, ModuleDef, PathResolution,
7 SourceAnalyzer, Trait, Type,
8};
9use ra_ide_db::{imports_locator::ImportsLocator, RootDatabase};
10use ra_prof::profile;
11use ra_syntax::{
12 ast::{self, AstNode},
13 SyntaxNode,
14};
15use rustc_hash::FxHashSet;
8use std::collections::BTreeSet; 16use std::collections::BTreeSet;
9 17
10// Assist: auto_import 18// Assist: auto_import
@@ -27,52 +35,24 @@ use std::collections::BTreeSet;
27// # pub mod std { pub mod collections { pub struct HashMap { } } } 35// # pub mod std { pub mod collections { pub struct HashMap { } } }
28// ``` 36// ```
29pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> { 37pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> {
30 let path_under_caret: ast::Path = ctx.find_node_at_offset()?; 38 let auto_import_assets = AutoImportAssets::new(&ctx)?;
31 if path_under_caret.syntax().ancestors().find_map(ast::UseItem::cast).is_some() { 39 let proposed_imports = auto_import_assets.search_for_imports(ctx.db);
32 return None;
33 }
34
35 let module = path_under_caret.syntax().ancestors().find_map(ast::Module::cast);
36 let position = match module.and_then(|it| it.item_list()) {
37 Some(item_list) => item_list.syntax().clone(),
38 None => {
39 let current_file =
40 path_under_caret.syntax().ancestors().find_map(ast::SourceFile::cast)?;
41 current_file.syntax().clone()
42 }
43 };
44 let source_analyzer = ctx.source_analyzer(&position, None);
45 let module_with_name_to_import = source_analyzer.module()?;
46
47 let name_ref_to_import =
48 path_under_caret.syntax().descendants().find_map(ast::NameRef::cast)?;
49 if source_analyzer
50 .resolve_path(ctx.db, &name_ref_to_import.syntax().ancestors().find_map(ast::Path::cast)?)
51 .is_some()
52 {
53 return None;
54 }
55
56 let name_to_import = name_ref_to_import.syntax().to_string();
57 let proposed_imports = ImportsLocator::new(ctx.db)
58 .find_imports(&name_to_import)
59 .into_iter()
60 .filter_map(|module_def| module_with_name_to_import.find_use_path(ctx.db, module_def))
61 .filter(|use_path| !use_path.segments.is_empty())
62 .take(20)
63 .collect::<BTreeSet<_>>();
64
65 if proposed_imports.is_empty() { 40 if proposed_imports.is_empty() {
66 return None; 41 return None;
67 } 42 }
68 43
69 let mut group = ctx.add_assist_group(format!("Import {}", name_to_import)); 44 let assist_group_name = if proposed_imports.len() == 1 {
45 format!("Import `{}`", proposed_imports.iter().next().unwrap())
46 } else {
47 auto_import_assets.get_import_group_message()
48 };
49 let mut group = ctx.add_assist_group(assist_group_name);
70 for import in proposed_imports { 50 for import in proposed_imports {
71 group.add_assist(AssistId("auto_import"), format!("Import `{}`", &import), |edit| { 51 group.add_assist(AssistId("auto_import"), format!("Import `{}`", &import), |edit| {
72 edit.target(path_under_caret.syntax().text_range()); 52 edit.target(auto_import_assets.syntax_under_caret.text_range());
73 insert_use_statement( 53 insert_use_statement(
74 &position, 54 &auto_import_assets.syntax_under_caret,
75 path_under_caret.syntax(), 55 &auto_import_assets.syntax_under_caret,
76 &import, 56 &import,
77 edit.text_edit_builder(), 57 edit.text_edit_builder(),
78 ); 58 );
@@ -81,11 +61,232 @@ pub(crate) fn auto_import(ctx: AssistCtx) -> Option<Assist> {
81 group.finish() 61 group.finish()
82} 62}
83 63
64struct AutoImportAssets {
65 import_candidate: ImportCandidate,
66 module_with_name_to_import: Module,
67 syntax_under_caret: SyntaxNode,
68}
69
70impl AutoImportAssets {
71 fn new(ctx: &AssistCtx) -> Option<Self> {
72 if let Some(path_under_caret) = ctx.find_node_at_offset::<ast::Path>() {
73 Self::for_regular_path(path_under_caret, &ctx)
74 } else {
75 Self::for_method_call(ctx.find_node_at_offset()?, &ctx)
76 }
77 }
78
79 fn for_method_call(method_call: ast::MethodCallExpr, ctx: &AssistCtx) -> Option<Self> {
80 let syntax_under_caret = method_call.syntax().to_owned();
81 let source_analyzer = ctx.source_analyzer(&syntax_under_caret, None);
82 let module_with_name_to_import = source_analyzer.module()?;
83 Some(Self {
84 import_candidate: ImportCandidate::for_method_call(
85 &method_call,
86 &source_analyzer,
87 ctx.db,
88 )?,
89 module_with_name_to_import,
90 syntax_under_caret,
91 })
92 }
93
94 fn for_regular_path(path_under_caret: ast::Path, ctx: &AssistCtx) -> Option<Self> {
95 let syntax_under_caret = path_under_caret.syntax().to_owned();
96 if syntax_under_caret.ancestors().find_map(ast::UseItem::cast).is_some() {
97 return None;
98 }
99
100 let source_analyzer = ctx.source_analyzer(&syntax_under_caret, None);
101 let module_with_name_to_import = source_analyzer.module()?;
102 Some(Self {
103 import_candidate: ImportCandidate::for_regular_path(
104 &path_under_caret,
105 &source_analyzer,
106 ctx.db,
107 )?,
108 module_with_name_to_import,
109 syntax_under_caret,
110 })
111 }
112
113 fn get_search_query(&self) -> &str {
114 match &self.import_candidate {
115 ImportCandidate::UnqualifiedName(name) => name,
116 ImportCandidate::QualifierStart(qualifier_start) => qualifier_start,
117 ImportCandidate::TraitAssocItem(_, trait_assoc_item_name) => trait_assoc_item_name,
118 ImportCandidate::TraitMethod(_, trait_method_name) => trait_method_name,
119 }
120 }
121
122 fn get_import_group_message(&self) -> String {
123 match &self.import_candidate {
124 ImportCandidate::UnqualifiedName(name) => format!("Import {}", name),
125 ImportCandidate::QualifierStart(qualifier_start) => {
126 format!("Import {}", qualifier_start)
127 }
128 ImportCandidate::TraitAssocItem(_, trait_assoc_item_name) => {
129 format!("Import a trait for item {}", trait_assoc_item_name)
130 }
131 ImportCandidate::TraitMethod(_, trait_method_name) => {
132 format!("Import a trait for method {}", trait_method_name)
133 }
134 }
135 }
136
137 fn search_for_imports(&self, db: &RootDatabase) -> BTreeSet<ModPath> {
138 let _p = profile("auto_import::search_for_imports");
139 let current_crate = self.module_with_name_to_import.krate();
140 ImportsLocator::new(db)
141 .find_imports(&self.get_search_query())
142 .into_iter()
143 .filter_map(|module_def| match &self.import_candidate {
144 ImportCandidate::TraitAssocItem(assoc_item_type, _) => {
145 let located_assoc_item = match module_def {
146 ModuleDef::Function(located_function) => located_function
147 .as_assoc_item(db)
148 .map(|assoc| assoc.container(db))
149 .and_then(Self::assoc_to_trait),
150 ModuleDef::Const(located_const) => located_const
151 .as_assoc_item(db)
152 .map(|assoc| assoc.container(db))
153 .and_then(Self::assoc_to_trait),
154 _ => None,
155 }?;
156
157 let mut trait_candidates = FxHashSet::default();
158 trait_candidates.insert(located_assoc_item.into());
159
160 assoc_item_type
161 .iterate_path_candidates(
162 db,
163 current_crate,
164 &trait_candidates,
165 None,
166 |_, assoc| Self::assoc_to_trait(assoc.container(db)),
167 )
168 .map(ModuleDef::from)
169 }
170 ImportCandidate::TraitMethod(function_callee, _) => {
171 let located_assoc_item =
172 if let ModuleDef::Function(located_function) = module_def {
173 located_function
174 .as_assoc_item(db)
175 .map(|assoc| assoc.container(db))
176 .and_then(Self::assoc_to_trait)
177 } else {
178 None
179 }?;
180
181 let mut trait_candidates = FxHashSet::default();
182 trait_candidates.insert(located_assoc_item.into());
183
184 function_callee
185 .iterate_method_candidates(
186 db,
187 current_crate,
188 &trait_candidates,
189 None,
190 |_, function| {
191 Self::assoc_to_trait(function.as_assoc_item(db)?.container(db))
192 },
193 )
194 .map(ModuleDef::from)
195 }
196 _ => Some(module_def),
197 })
198 .filter_map(|module_def| self.module_with_name_to_import.find_use_path(db, module_def))
199 .filter(|use_path| !use_path.segments.is_empty())
200 .take(20)
201 .collect::<BTreeSet<_>>()
202 }
203
204 fn assoc_to_trait(assoc: AssocItemContainer) -> Option<Trait> {
205 if let AssocItemContainer::Trait(extracted_trait) = assoc {
206 Some(extracted_trait)
207 } else {
208 None
209 }
210 }
211}
212
213#[derive(Debug)]
214enum ImportCandidate {
215 /// Simple name like 'HashMap'
216 UnqualifiedName(String),
217 /// First part of the qualified name.
218 /// For 'std::collections::HashMap', that will be 'std'.
219 QualifierStart(String),
220 /// A trait associated function (with no self parameter) or associated constant.
221 /// For 'test_mod::TestEnum::test_function', `Type` is the `test_mod::TestEnum` expression type
222 /// and `String` is the `test_function`
223 TraitAssocItem(Type, String),
224 /// A trait method with self parameter.
225 /// For 'test_enum.test_method()', `Type` is the `test_enum` expression type
226 /// and `String` is the `test_method`
227 TraitMethod(Type, String),
228}
229
230impl ImportCandidate {
231 fn for_method_call(
232 method_call: &ast::MethodCallExpr,
233 source_analyzer: &SourceAnalyzer,
234 db: &impl HirDatabase,
235 ) -> Option<Self> {
236 if source_analyzer.resolve_method_call(method_call).is_some() {
237 return None;
238 }
239 Some(Self::TraitMethod(
240 source_analyzer.type_of(db, &method_call.expr()?)?,
241 method_call.name_ref()?.syntax().to_string(),
242 ))
243 }
244
245 fn for_regular_path(
246 path_under_caret: &ast::Path,
247 source_analyzer: &SourceAnalyzer,
248 db: &impl HirDatabase,
249 ) -> Option<Self> {
250 if source_analyzer.resolve_path(db, path_under_caret).is_some() {
251 return None;
252 }
253
254 let segment = path_under_caret.segment()?;
255 if let Some(qualifier) = path_under_caret.qualifier() {
256 let qualifier_start = qualifier.syntax().descendants().find_map(ast::NameRef::cast)?;
257 let qualifier_start_path =
258 qualifier_start.syntax().ancestors().find_map(ast::Path::cast)?;
259 if let Some(qualifier_start_resolution) =
260 source_analyzer.resolve_path(db, &qualifier_start_path)
261 {
262 let qualifier_resolution = if qualifier_start_path == qualifier {
263 qualifier_start_resolution
264 } else {
265 source_analyzer.resolve_path(db, &qualifier)?
266 };
267 if let PathResolution::Def(ModuleDef::Adt(assoc_item_path)) = qualifier_resolution {
268 Some(ImportCandidate::TraitAssocItem(
269 assoc_item_path.ty(db),
270 segment.syntax().to_string(),
271 ))
272 } else {
273 None
274 }
275 } else {
276 Some(ImportCandidate::QualifierStart(qualifier_start.syntax().to_string()))
277 }
278 } else {
279 Some(ImportCandidate::UnqualifiedName(
280 segment.syntax().descendants().find_map(ast::NameRef::cast)?.syntax().to_string(),
281 ))
282 }
283 }
284}
285
84#[cfg(test)] 286#[cfg(test)]
85mod tests { 287mod tests {
86 use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target};
87
88 use super::*; 288 use super::*;
289 use crate::helpers::{check_assist, check_assist_not_applicable, check_assist_target};
89 290
90 #[test] 291 #[test]
91 fn applicable_when_found_an_import() { 292 fn applicable_when_found_an_import() {
@@ -290,4 +491,303 @@ mod tests {
290 ", 491 ",
291 ); 492 );
292 } 493 }
494
495 #[test]
496 fn not_applicable_for_imported_function() {
497 check_assist_not_applicable(
498 auto_import,
499 r"
500 pub mod test_mod {
501 pub fn test_function() {}
502 }
503
504 use test_mod::test_function;
505 fn main() {
506 test_function<|>
507 }
508 ",
509 );
510 }
511
512 #[test]
513 fn associated_struct_function() {
514 check_assist(
515 auto_import,
516 r"
517 mod test_mod {
518 pub struct TestStruct {}
519 impl TestStruct {
520 pub fn test_function() {}
521 }
522 }
523
524 fn main() {
525 TestStruct::test_function<|>
526 }
527 ",
528 r"
529 use test_mod::TestStruct;
530
531 mod test_mod {
532 pub struct TestStruct {}
533 impl TestStruct {
534 pub fn test_function() {}
535 }
536 }
537
538 fn main() {
539 TestStruct::test_function<|>
540 }
541 ",
542 );
543 }
544
545 #[test]
546 fn associated_struct_const() {
547 check_assist(
548 auto_import,
549 r"
550 mod test_mod {
551 pub struct TestStruct {}
552 impl TestStruct {
553 const TEST_CONST: u8 = 42;
554 }
555 }
556
557 fn main() {
558 TestStruct::TEST_CONST<|>
559 }
560 ",
561 r"
562 use test_mod::TestStruct;
563
564 mod test_mod {
565 pub struct TestStruct {}
566 impl TestStruct {
567 const TEST_CONST: u8 = 42;
568 }
569 }
570
571 fn main() {
572 TestStruct::TEST_CONST<|>
573 }
574 ",
575 );
576 }
577
578 #[test]
579 fn associated_trait_function() {
580 check_assist(
581 auto_import,
582 r"
583 mod test_mod {
584 pub trait TestTrait {
585 fn test_function();
586 }
587 pub struct TestStruct {}
588 impl TestTrait for TestStruct {
589 fn test_function() {}
590 }
591 }
592
593 fn main() {
594 test_mod::TestStruct::test_function<|>
595 }
596 ",
597 r"
598 use test_mod::TestTrait;
599
600 mod test_mod {
601 pub trait TestTrait {
602 fn test_function();
603 }
604 pub struct TestStruct {}
605 impl TestTrait for TestStruct {
606 fn test_function() {}
607 }
608 }
609
610 fn main() {
611 test_mod::TestStruct::test_function<|>
612 }
613 ",
614 );
615 }
616
617 #[test]
618 fn not_applicable_for_imported_trait_for_function() {
619 check_assist_not_applicable(
620 auto_import,
621 r"
622 mod test_mod {
623 pub trait TestTrait {
624 fn test_function();
625 }
626 pub trait TestTrait2 {
627 fn test_function();
628 }
629 pub enum TestEnum {
630 One,
631 Two,
632 }
633 impl TestTrait2 for TestEnum {
634 fn test_function() {}
635 }
636 impl TestTrait for TestEnum {
637 fn test_function() {}
638 }
639 }
640
641 use test_mod::TestTrait2;
642 fn main() {
643 test_mod::TestEnum::test_function<|>;
644 }
645 ",
646 )
647 }
648
649 #[test]
650 fn associated_trait_const() {
651 check_assist(
652 auto_import,
653 r"
654 mod test_mod {
655 pub trait TestTrait {
656 const TEST_CONST: u8;
657 }
658 pub struct TestStruct {}
659 impl TestTrait for TestStruct {
660 const TEST_CONST: u8 = 42;
661 }
662 }
663
664 fn main() {
665 test_mod::TestStruct::TEST_CONST<|>
666 }
667 ",
668 r"
669 use test_mod::TestTrait;
670
671 mod test_mod {
672 pub trait TestTrait {
673 const TEST_CONST: u8;
674 }
675 pub struct TestStruct {}
676 impl TestTrait for TestStruct {
677 const TEST_CONST: u8 = 42;
678 }
679 }
680
681 fn main() {
682 test_mod::TestStruct::TEST_CONST<|>
683 }
684 ",
685 );
686 }
687
688 #[test]
689 fn not_applicable_for_imported_trait_for_const() {
690 check_assist_not_applicable(
691 auto_import,
692 r"
693 mod test_mod {
694 pub trait TestTrait {
695 const TEST_CONST: u8;
696 }
697 pub trait TestTrait2 {
698 const TEST_CONST: f64;
699 }
700 pub enum TestEnum {
701 One,
702 Two,
703 }
704 impl TestTrait2 for TestEnum {
705 const TEST_CONST: f64 = 42.0;
706 }
707 impl TestTrait for TestEnum {
708 const TEST_CONST: u8 = 42;
709 }
710 }
711
712 use test_mod::TestTrait2;
713 fn main() {
714 test_mod::TestEnum::TEST_CONST<|>;
715 }
716 ",
717 )
718 }
719
720 #[test]
721 fn trait_method() {
722 check_assist(
723 auto_import,
724 r"
725 mod test_mod {
726 pub trait TestTrait {
727 fn test_method(&self);
728 }
729 pub struct TestStruct {}
730 impl TestTrait for TestStruct {
731 fn test_method(&self) {}
732 }
733 }
734
735 fn main() {
736 let test_struct = test_mod::TestStruct {};
737 test_struct.test_meth<|>od()
738 }
739 ",
740 r"
741 use test_mod::TestTrait;
742
743 mod test_mod {
744 pub trait TestTrait {
745 fn test_method(&self);
746 }
747 pub struct TestStruct {}
748 impl TestTrait for TestStruct {
749 fn test_method(&self) {}
750 }
751 }
752
753 fn main() {
754 let test_struct = test_mod::TestStruct {};
755 test_struct.test_meth<|>od()
756 }
757 ",
758 );
759 }
760
761 #[test]
762 fn not_applicable_for_imported_trait_for_method() {
763 check_assist_not_applicable(
764 auto_import,
765 r"
766 mod test_mod {
767 pub trait TestTrait {
768 fn test_method(&self);
769 }
770 pub trait TestTrait2 {
771 fn test_method(&self);
772 }
773 pub enum TestEnum {
774 One,
775 Two,
776 }
777 impl TestTrait2 for TestEnum {
778 fn test_method(&self) {}
779 }
780 impl TestTrait for TestEnum {
781 fn test_method(&self) {}
782 }
783 }
784
785 use test_mod::TestTrait2;
786 fn main() {
787 let one = test_mod::TestEnum::One;
788 one.test<|>_method();
789 }
790 ",
791 )
792 }
293} 793}
diff --git a/crates/ra_assists/src/handlers/fill_match_arms.rs b/crates/ra_assists/src/handlers/fill_match_arms.rs
index 0908fc246..ae2437ed3 100644
--- a/crates/ra_assists/src/handlers/fill_match_arms.rs
+++ b/crates/ra_assists/src/handlers/fill_match_arms.rs
@@ -75,10 +75,10 @@ pub(crate) fn fill_match_arms(ctx: AssistCtx) -> Option<Assist> {
75} 75}
76 76
77fn is_trivial(arm: &ast::MatchArm) -> bool { 77fn is_trivial(arm: &ast::MatchArm) -> bool {
78 arm.pats().any(|pat| match pat { 78 match arm.pat() {
79 ast::Pat::PlaceholderPat(..) => true, 79 Some(ast::Pat::PlaceholderPat(..)) => true,
80 _ => false, 80 _ => false,
81 }) 81 }
82} 82}
83 83
84fn resolve_enum_def( 84fn resolve_enum_def(
diff --git a/crates/ra_assists/src/handlers/merge_match_arms.rs b/crates/ra_assists/src/handlers/merge_match_arms.rs
index 670614dd8..b2a194cb5 100644
--- a/crates/ra_assists/src/handlers/merge_match_arms.rs
+++ b/crates/ra_assists/src/handlers/merge_match_arms.rs
@@ -75,7 +75,7 @@ pub(crate) fn merge_match_arms(ctx: AssistCtx) -> Option<Assist> {
75 } else { 75 } else {
76 arms_to_merge 76 arms_to_merge
77 .iter() 77 .iter()
78 .flat_map(ast::MatchArm::pats) 78 .filter_map(ast::MatchArm::pat)
79 .map(|x| x.syntax().to_string()) 79 .map(|x| x.syntax().to_string())
80 .collect::<Vec<String>>() 80 .collect::<Vec<String>>()
81 .join(" | ") 81 .join(" | ")
@@ -96,10 +96,10 @@ pub(crate) fn merge_match_arms(ctx: AssistCtx) -> Option<Assist> {
96} 96}
97 97
98fn contains_placeholder(a: &ast::MatchArm) -> bool { 98fn contains_placeholder(a: &ast::MatchArm) -> bool {
99 a.pats().any(|x| match x { 99 match a.pat() {
100 ra_syntax::ast::Pat::PlaceholderPat(..) => true, 100 Some(ra_syntax::ast::Pat::PlaceholderPat(..)) => true,
101 _ => false, 101 _ => false,
102 }) 102 }
103} 103}
104 104
105fn next_arm(arm: &ast::MatchArm) -> Option<ast::MatchArm> { 105fn next_arm(arm: &ast::MatchArm) -> Option<ast::MatchArm> {
diff --git a/crates/ra_assists/src/handlers/move_guard.rs b/crates/ra_assists/src/handlers/move_guard.rs
index 2b91ce7c4..a61a2ba3e 100644
--- a/crates/ra_assists/src/handlers/move_guard.rs
+++ b/crates/ra_assists/src/handlers/move_guard.rs
@@ -90,7 +90,7 @@ pub(crate) fn move_guard_to_arm_body(ctx: AssistCtx) -> Option<Assist> {
90// ``` 90// ```
91pub(crate) fn move_arm_cond_to_match_guard(ctx: AssistCtx) -> Option<Assist> { 91pub(crate) fn move_arm_cond_to_match_guard(ctx: AssistCtx) -> Option<Assist> {
92 let match_arm: MatchArm = ctx.find_node_at_offset::<MatchArm>()?; 92 let match_arm: MatchArm = ctx.find_node_at_offset::<MatchArm>()?;
93 let last_match_pat = match_arm.pats().last()?; 93 let match_pat = match_arm.pat()?;
94 94
95 let arm_body = match_arm.expr()?; 95 let arm_body = match_arm.expr()?;
96 let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone())?; 96 let if_expr: IfExpr = IfExpr::cast(arm_body.syntax().clone())?;
@@ -122,8 +122,8 @@ pub(crate) fn move_arm_cond_to_match_guard(ctx: AssistCtx) -> Option<Assist> {
122 _ => edit.replace(if_expr.syntax().text_range(), then_block.syntax().text()), 122 _ => edit.replace(if_expr.syntax().text_range(), then_block.syntax().text()),
123 } 123 }
124 124
125 edit.insert(last_match_pat.syntax().text_range().end(), buf); 125 edit.insert(match_pat.syntax().text_range().end(), buf);
126 edit.set_cursor(last_match_pat.syntax().text_range().end() + TextUnit::from(1)); 126 edit.set_cursor(match_pat.syntax().text_range().end() + TextUnit::from(1));
127 }, 127 },
128 ) 128 )
129} 129}
diff --git a/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs b/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs
index b70c88ec2..eac452413 100644
--- a/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs
+++ b/crates/ra_assists/src/handlers/replace_qualified_name_with_use.rs
@@ -431,7 +431,12 @@ fn best_action_for_target(
431 .find(|n| n.text_range().start() < anchor.text_range().start()) 431 .find(|n| n.text_range().start() < anchor.text_range().start())
432 .or_else(|| Some(anchor)); 432 .or_else(|| Some(anchor));
433 433
434 ImportAction::add_new_use(anchor, false) 434 let add_after_anchor = anchor
435 .clone()
436 .and_then(ast::Attr::cast)
437 .map(|attr| attr.kind() == ast::AttrKind::Inner)
438 .unwrap_or(false);
439 ImportAction::add_new_use(anchor, add_after_anchor)
435 } 440 }
436 } 441 }
437} 442}
@@ -962,4 +967,26 @@ mod foo {
962 ", 967 ",
963 ); 968 );
964 } 969 }
970
971 #[test]
972 fn inserts_imports_after_inner_attributes() {
973 check_assist(
974 replace_qualified_name_with_use,
975 "
976#![allow(dead_code)]
977
978fn main() {
979 std::fmt::Debug<|>
980}
981 ",
982 "
983#![allow(dead_code)]
984use std::fmt::Debug;
985
986fn main() {
987 Debug<|>
988}
989 ",
990 );
991 }
965} 992}
diff --git a/crates/ra_cli/Cargo.toml b/crates/ra_cli/Cargo.toml
index bcd408421..53d4876f6 100644
--- a/crates/ra_cli/Cargo.toml
+++ b/crates/ra_cli/Cargo.toml
@@ -6,8 +6,10 @@ authors = ["rust-analyzer developers"]
6publish = false 6publish = false
7 7
8[dependencies] 8[dependencies]
9itertools = "0.8.0"
9pico-args = "0.3.0" 10pico-args = "0.3.0"
10env_logger = { version = "0.7.1", default-features = false } 11env_logger = { version = "0.7.1", default-features = false }
12rand = { version = "0.7.0", features = ["small_rng"] }
11 13
12ra_syntax = { path = "../ra_syntax" } 14ra_syntax = { path = "../ra_syntax" }
13ra_ide = { path = "../ra_ide" } 15ra_ide = { path = "../ra_ide" }
diff --git a/crates/ra_cli/src/analysis_bench.rs b/crates/ra_cli/src/analysis_bench.rs
index 5485a38ff..4835a68ce 100644
--- a/crates/ra_cli/src/analysis_bench.rs
+++ b/crates/ra_cli/src/analysis_bench.rs
@@ -2,6 +2,7 @@
2 2
3use std::{ 3use std::{
4 path::{Path, PathBuf}, 4 path::{Path, PathBuf},
5 str::FromStr,
5 sync::Arc, 6 sync::Arc,
6 time::Instant, 7 time::Instant,
7}; 8};
@@ -14,12 +15,35 @@ use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FilePosition, LineCol};
14 15
15use crate::Result; 16use crate::Result;
16 17
18pub(crate) struct Position {
19 path: PathBuf,
20 line: u32,
21 column: u32,
22}
23
24impl FromStr for Position {
25 type Err = Box<dyn std::error::Error + Send + Sync>;
26 fn from_str(s: &str) -> Result<Self> {
27 let (path_line, column) = rsplit_at_char(s, ':')?;
28 let (path, line) = rsplit_at_char(path_line, ':')?;
29 Ok(Position { path: path.into(), line: line.parse()?, column: column.parse()? })
30 }
31}
32
33fn rsplit_at_char(s: &str, c: char) -> Result<(&str, &str)> {
34 let idx = s.rfind(':').ok_or_else(|| format!("no `{}` in {}", c, s))?;
35 Ok((&s[..idx], &s[idx + 1..]))
36}
37
17pub(crate) enum Op { 38pub(crate) enum Op {
18 Highlight { path: PathBuf }, 39 Highlight { path: PathBuf },
19 Complete { path: PathBuf, line: u32, column: u32 }, 40 Complete(Position),
41 GotoDef(Position),
20} 42}
21 43
22pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> { 44pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
45 ra_prof::init();
46
23 let start = Instant::now(); 47 let start = Instant::now();
24 eprint!("loading: "); 48 eprint!("loading: ");
25 let (mut host, roots) = ra_batch::load_cargo(path)?; 49 let (mut host, roots) = ra_batch::load_cargo(path)?;
@@ -29,7 +53,7 @@ pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
29 let file_id = { 53 let file_id = {
30 let path = match &op { 54 let path = match &op {
31 Op::Highlight { path } => path, 55 Op::Highlight { path } => path,
32 Op::Complete { path, .. } => path, 56 Op::Complete(pos) | Op::GotoDef(pos) => &pos.path,
33 }; 57 };
34 let path = std::env::current_dir()?.join(path).canonicalize()?; 58 let path = std::env::current_dir()?.join(path).canonicalize()?;
35 roots 59 roots
@@ -49,7 +73,7 @@ pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
49 .ok_or_else(|| format!("Can't find {:?}", path))? 73 .ok_or_else(|| format!("Can't find {:?}", path))?
50 }; 74 };
51 75
52 match op { 76 match &op {
53 Op::Highlight { .. } => { 77 Op::Highlight { .. } => {
54 let res = do_work(&mut host, file_id, |analysis| { 78 let res = do_work(&mut host, file_id, |analysis| {
55 analysis.diagnostics(file_id).unwrap(); 79 analysis.diagnostics(file_id).unwrap();
@@ -59,16 +83,30 @@ pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
59 println!("\n{}", res); 83 println!("\n{}", res);
60 } 84 }
61 } 85 }
62 Op::Complete { line, column, .. } => { 86 Op::Complete(pos) | Op::GotoDef(pos) => {
87 let is_completion = match op {
88 Op::Complete(..) => true,
89 _ => false,
90 };
91
63 let offset = host 92 let offset = host
64 .analysis() 93 .analysis()
65 .file_line_index(file_id)? 94 .file_line_index(file_id)?
66 .offset(LineCol { line, col_utf16: column }); 95 .offset(LineCol { line: pos.line - 1, col_utf16: pos.column });
67 let file_postion = FilePosition { file_id, offset }; 96 let file_postion = FilePosition { file_id, offset };
68 97
69 let res = do_work(&mut host, file_id, |analysis| analysis.completions(file_postion)); 98 if is_completion {
70 if verbose { 99 let res =
71 println!("\n{:#?}", res); 100 do_work(&mut host, file_id, |analysis| analysis.completions(file_postion));
101 if verbose {
102 println!("\n{:#?}", res);
103 }
104 } else {
105 let res =
106 do_work(&mut host, file_id, |analysis| analysis.goto_definition(file_postion));
107 if verbose {
108 println!("\n{:#?}", res);
109 }
72 } 110 }
73 } 111 }
74 } 112 }
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs
index 833235bff..6d2dd34c6 100644
--- a/crates/ra_cli/src/analysis_stats.rs
+++ b/crates/ra_cli/src/analysis_stats.rs
@@ -2,6 +2,9 @@
2 2
3use std::{collections::HashSet, fmt::Write, path::Path, time::Instant}; 3use std::{collections::HashSet, fmt::Write, path::Path, time::Instant};
4 4
5use itertools::Itertools;
6use rand::{seq::SliceRandom, thread_rng};
7
5use hir::{ 8use hir::{
6 db::{DefDatabase, HirDatabase}, 9 db::{DefDatabase, HirDatabase},
7 AssocItem, Crate, HasSource, HirDisplay, ModuleDef, 10 AssocItem, Crate, HasSource, HirDisplay, ModuleDef,
@@ -19,6 +22,7 @@ pub fn run(
19 path: &Path, 22 path: &Path,
20 only: Option<&str>, 23 only: Option<&str>,
21 with_deps: bool, 24 with_deps: bool,
25 randomize: bool,
22) -> Result<()> { 26) -> Result<()> {
23 let db_load_time = Instant::now(); 27 let db_load_time = Instant::now();
24 let (mut host, roots) = ra_batch::load_cargo(path)?; 28 let (mut host, roots) = ra_batch::load_cargo(path)?;
@@ -41,7 +45,11 @@ pub fn run(
41 }) 45 })
42 .collect::<HashSet<_>>(); 46 .collect::<HashSet<_>>();
43 47
44 for krate in Crate::all(db) { 48 let mut krates = Crate::all(db);
49 if randomize {
50 krates.shuffle(&mut thread_rng());
51 }
52 for krate in krates {
45 let module = krate.root_module(db).expect("crate without root module"); 53 let module = krate.root_module(db).expect("crate without root module");
46 let file_id = module.definition_source(db).file_id; 54 let file_id = module.definition_source(db).file_id;
47 if members.contains(&db.file_source_root(file_id.original_file(db))) { 55 if members.contains(&db.file_source_root(file_id.original_file(db))) {
@@ -50,6 +58,10 @@ pub fn run(
50 } 58 }
51 } 59 }
52 60
61 if randomize {
62 visit_queue.shuffle(&mut thread_rng());
63 }
64
53 println!("Crates in this dir: {}", num_crates); 65 println!("Crates in this dir: {}", num_crates);
54 let mut num_decls = 0; 66 let mut num_decls = 0;
55 let mut funcs = Vec::new(); 67 let mut funcs = Vec::new();
@@ -79,10 +91,14 @@ pub fn run(
79 println!("Total functions: {}", funcs.len()); 91 println!("Total functions: {}", funcs.len());
80 println!("Item Collection: {:?}, {}", analysis_time.elapsed(), ra_prof::memory_usage()); 92 println!("Item Collection: {:?}, {}", analysis_time.elapsed(), ra_prof::memory_usage());
81 93
94 if randomize {
95 funcs.shuffle(&mut thread_rng());
96 }
97
82 let inference_time = Instant::now(); 98 let inference_time = Instant::now();
83 let mut bar = match verbosity { 99 let mut bar = match verbosity {
84 Verbosity::Verbose | Verbosity::Normal => ProgressReport::new(funcs.len() as u64), 100 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
85 Verbosity::Quiet => ProgressReport::hidden(), 101 _ => ProgressReport::new(funcs.len() as u64),
86 }; 102 };
87 103
88 bar.tick(); 104 bar.tick();
@@ -92,7 +108,20 @@ pub fn run(
92 let mut num_type_mismatches = 0; 108 let mut num_type_mismatches = 0;
93 for f in funcs { 109 for f in funcs {
94 let name = f.name(db); 110 let name = f.name(db);
95 let mut msg = format!("processing: {}", name); 111 let full_name = f
112 .module(db)
113 .path_to_root(db)
114 .into_iter()
115 .rev()
116 .filter_map(|it| it.name(db))
117 .chain(Some(f.name(db)))
118 .join("::");
119 if let Some(only_name) = only {
120 if name.to_string() != only_name && full_name != only_name {
121 continue;
122 }
123 }
124 let mut msg = format!("processing: {}", full_name);
96 if verbosity.is_verbose() { 125 if verbosity.is_verbose() {
97 let src = f.source(db); 126 let src = f.source(db);
98 let original_file = src.file_id.original_file(db); 127 let original_file = src.file_id.original_file(db);
@@ -100,15 +129,15 @@ pub fn run(
100 let syntax_range = src.value.syntax().text_range(); 129 let syntax_range = src.value.syntax().text_range();
101 write!(msg, " ({:?} {})", path, syntax_range).unwrap(); 130 write!(msg, " ({:?} {})", path, syntax_range).unwrap();
102 } 131 }
103 bar.set_message(&msg); 132 if verbosity.is_spammy() {
104 if let Some(only_name) = only { 133 bar.println(format!("{}", msg));
105 if name.to_string() != only_name {
106 continue;
107 }
108 } 134 }
135 bar.set_message(&msg);
109 let f_id = FunctionId::from(f); 136 let f_id = FunctionId::from(f);
110 let body = db.body(f_id.into()); 137 let body = db.body(f_id.into());
111 let inference_result = db.infer(f_id.into()); 138 let inference_result = db.infer(f_id.into());
139 let (previous_exprs, previous_unknown, previous_partially_unknown) =
140 (num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
112 for (expr_id, _) in body.exprs.iter() { 141 for (expr_id, _) in body.exprs.iter() {
113 let ty = &inference_result[expr_id]; 142 let ty = &inference_result[expr_id];
114 num_exprs += 1; 143 num_exprs += 1;
@@ -125,6 +154,33 @@ pub fn run(
125 num_exprs_partially_unknown += 1; 154 num_exprs_partially_unknown += 1;
126 } 155 }
127 } 156 }
157 if only.is_some() && verbosity.is_spammy() {
158 // in super-verbose mode for just one function, we print every single expression
159 let (_, sm) = db.body_with_source_map(f_id.into());
160 let src = sm.expr_syntax(expr_id);
161 if let Some(src) = src {
162 let original_file = src.file_id.original_file(db);
163 let line_index = host.analysis().file_line_index(original_file).unwrap();
164 let text_range = src.value.either(
165 |it| it.syntax_node_ptr().range(),
166 |it| it.syntax_node_ptr().range(),
167 );
168 let (start, end) = (
169 line_index.line_col(text_range.start()),
170 line_index.line_col(text_range.end()),
171 );
172 bar.println(format!(
173 "{}:{}-{}:{}: {}",
174 start.line + 1,
175 start.col_utf16,
176 end.line + 1,
177 end.col_utf16,
178 ty.display(db)
179 ));
180 } else {
181 bar.println(format!("unknown location: {}", ty.display(db)));
182 }
183 }
128 if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) { 184 if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
129 num_type_mismatches += 1; 185 num_type_mismatches += 1;
130 if verbosity.is_verbose() { 186 if verbosity.is_verbose() {
@@ -164,6 +220,15 @@ pub fn run(
164 } 220 }
165 } 221 }
166 } 222 }
223 if verbosity.is_spammy() {
224 bar.println(format!(
225 "In {}: {} exprs, {} unknown, {} partial",
226 full_name,
227 num_exprs - previous_exprs,
228 num_exprs_unknown - previous_unknown,
229 num_exprs_partially_unknown - previous_partially_unknown
230 ));
231 }
167 bar.inc(1); 232 bar.inc(1);
168 } 233 }
169 bar.finish_and_clear(); 234 bar.finish_and_clear();
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs
index 806612c2c..750cbab86 100644
--- a/crates/ra_cli/src/main.rs
+++ b/crates/ra_cli/src/main.rs
@@ -16,6 +16,7 @@ type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>;
16 16
17#[derive(Clone, Copy)] 17#[derive(Clone, Copy)]
18pub enum Verbosity { 18pub enum Verbosity {
19 Spammy,
19 Verbose, 20 Verbose,
20 Normal, 21 Normal,
21 Quiet, 22 Quiet,
@@ -24,7 +25,13 @@ pub enum Verbosity {
24impl Verbosity { 25impl Verbosity {
25 fn is_verbose(self) -> bool { 26 fn is_verbose(self) -> bool {
26 match self { 27 match self {
27 Verbosity::Verbose => true, 28 Verbosity::Verbose | Verbosity::Spammy => true,
29 _ => false,
30 }
31 }
32 fn is_spammy(self) -> bool {
33 match self {
34 Verbosity::Spammy => true,
28 _ => false, 35 _ => false,
29 } 36 }
30 } 37 }
@@ -86,14 +93,18 @@ fn main() -> Result<()> {
86 return Ok(()); 93 return Ok(());
87 } 94 }
88 let verbosity = match ( 95 let verbosity = match (
96 matches.contains(["-vv", "--spammy"]),
89 matches.contains(["-v", "--verbose"]), 97 matches.contains(["-v", "--verbose"]),
90 matches.contains(["-q", "--quiet"]), 98 matches.contains(["-q", "--quiet"]),
91 ) { 99 ) {
92 (false, false) => Verbosity::Normal, 100 (true, _, true) => Err("Invalid flags: -q conflicts with -vv")?,
93 (false, true) => Verbosity::Quiet, 101 (true, _, false) => Verbosity::Spammy,
94 (true, false) => Verbosity::Verbose, 102 (false, false, false) => Verbosity::Normal,
95 (true, true) => Err("Invalid flags: -q conflicts with -v")?, 103 (false, false, true) => Verbosity::Quiet,
104 (false, true, false) => Verbosity::Verbose,
105 (false, true, true) => Err("Invalid flags: -q conflicts with -v")?,
96 }; 106 };
107 let randomize = matches.contains("--randomize");
97 let memory_usage = matches.contains("--memory-usage"); 108 let memory_usage = matches.contains("--memory-usage");
98 let only: Option<String> = matches.opt_value_from_str(["-o", "--only"])?; 109 let only: Option<String> = matches.opt_value_from_str(["-o", "--only"])?;
99 let with_deps: bool = matches.contains("--with-deps"); 110 let with_deps: bool = matches.contains("--with-deps");
@@ -111,6 +122,7 @@ fn main() -> Result<()> {
111 path.as_ref(), 122 path.as_ref(),
112 only.as_ref().map(String::as_ref), 123 only.as_ref().map(String::as_ref),
113 with_deps, 124 with_deps,
125 randomize,
114 )?; 126 )?;
115 } 127 }
116 "analysis-bench" => { 128 "analysis-bench" => {
@@ -120,25 +132,16 @@ fn main() -> Result<()> {
120 } 132 }
121 let verbose = matches.contains(["-v", "--verbose"]); 133 let verbose = matches.contains(["-v", "--verbose"]);
122 let path: String = matches.opt_value_from_str("--path")?.unwrap_or_default(); 134 let path: String = matches.opt_value_from_str("--path")?.unwrap_or_default();
123 let highlight_path = matches.opt_value_from_str("--highlight")?; 135 let highlight_path: Option<String> = matches.opt_value_from_str("--highlight")?;
124 let complete_path = matches.opt_value_from_str("--complete")?; 136 let complete_path: Option<String> = matches.opt_value_from_str("--complete")?;
125 if highlight_path.is_some() && complete_path.is_some() { 137 let goto_def_path: Option<String> = matches.opt_value_from_str("--goto-def")?;
126 panic!("either --highlight or --complete must be set, not both") 138 let op = match (highlight_path, complete_path, goto_def_path) {
127 } 139 (Some(path), None, None) => analysis_bench::Op::Highlight { path: path.into() },
128 let op = if let Some(path) = highlight_path { 140 (None, Some(position), None) => analysis_bench::Op::Complete(position.parse()?),
129 let path: String = path; 141 (None, None, Some(position)) => analysis_bench::Op::GotoDef(position.parse()?),
130 analysis_bench::Op::Highlight { path: path.into() } 142 _ => panic!(
131 } else if let Some(path_line_col) = complete_path { 143 "exactly one of `--highlight`, `--complete` or `--goto-def` must be set"
132 let path_line_col: String = path_line_col; 144 ),
133 let (path_line, column) = rsplit_at_char(path_line_col.as_str(), ':')?;
134 let (path, line) = rsplit_at_char(path_line, ':')?;
135 analysis_bench::Op::Complete {
136 path: path.into(),
137 line: line.parse()?,
138 column: column.parse()?,
139 }
140 } else {
141 panic!("either --highlight or --complete must be set")
142 }; 145 };
143 matches.finish().or_else(handle_extra_flags)?; 146 matches.finish().or_else(handle_extra_flags)?;
144 analysis_bench::run(verbose, path.as_ref(), op)?; 147 analysis_bench::run(verbose, path.as_ref(), op)?;
@@ -171,8 +174,3 @@ fn read_stdin() -> Result<String> {
171 std::io::stdin().read_to_string(&mut buff)?; 174 std::io::stdin().read_to_string(&mut buff)?;
172 Ok(buff) 175 Ok(buff)
173} 176}
174
175fn rsplit_at_char(s: &str, c: char) -> Result<(&str, &str)> {
176 let idx = s.rfind(':').ok_or_else(|| format!("no `{}` in {}", c, s))?;
177 Ok((&s[..idx], &s[idx + 1..]))
178}
diff --git a/crates/ra_hir/src/code_model.rs b/crates/ra_hir/src/code_model.rs
index 4d9641728..a56b8ab04 100644
--- a/crates/ra_hir/src/code_model.rs
+++ b/crates/ra_hir/src/code_model.rs
@@ -10,9 +10,9 @@ use hir_def::{
10 per_ns::PerNs, 10 per_ns::PerNs,
11 resolver::HasResolver, 11 resolver::HasResolver,
12 type_ref::{Mutability, TypeRef}, 12 type_ref::{Mutability, TypeRef},
13 AdtId, ConstId, DefWithBodyId, EnumId, FunctionId, GenericDefId, HasModule, ImplId, 13 AdtId, AssocContainerId, ConstId, DefWithBodyId, EnumId, FunctionId, GenericDefId, HasModule,
14 LocalEnumVariantId, LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId, StructId, 14 ImplId, LocalEnumVariantId, LocalModuleId, LocalStructFieldId, Lookup, ModuleId, StaticId,
15 TraitId, TypeAliasId, TypeParamId, UnionId, 15 StructId, TraitId, TypeAliasId, TypeParamId, UnionId,
16}; 16};
17use hir_expand::{ 17use hir_expand::{
18 diagnostics::DiagnosticSink, 18 diagnostics::DiagnosticSink,
@@ -25,7 +25,10 @@ use hir_ty::{
25}; 25};
26use ra_db::{CrateId, Edition, FileId}; 26use ra_db::{CrateId, Edition, FileId};
27use ra_prof::profile; 27use ra_prof::profile;
28use ra_syntax::ast::{self, AttrsOwner}; 28use ra_syntax::{
29 ast::{self, AttrsOwner},
30 AstNode,
31};
29 32
30use crate::{ 33use crate::{
31 db::{DefDatabase, HirDatabase}, 34 db::{DefDatabase, HirDatabase},
@@ -119,7 +122,9 @@ impl_froms!(
119 BuiltinType 122 BuiltinType
120); 123);
121 124
122pub use hir_def::{attr::Attrs, item_scope::ItemInNs, visibility::Visibility, AssocItemId}; 125pub use hir_def::{
126 attr::Attrs, item_scope::ItemInNs, visibility::Visibility, AssocItemId, AssocItemLoc,
127};
123use rustc_hash::FxHashSet; 128use rustc_hash::FxHashSet;
124 129
125impl Module { 130impl Module {
@@ -639,17 +644,49 @@ pub struct MacroDef {
639 pub(crate) id: MacroDefId, 644 pub(crate) id: MacroDefId,
640} 645}
641 646
647/// Invariant: `inner.as_assoc_item(db).is_some()`
648/// We do not actively enforce this invariant.
642#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] 649#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
643pub enum AssocItem { 650pub enum AssocItem {
644 Function(Function), 651 Function(Function),
645 Const(Const), 652 Const(Const),
646 TypeAlias(TypeAlias), 653 TypeAlias(TypeAlias),
647} 654}
648// FIXME: not every function, ... is actually an assoc item. maybe we should make 655pub enum AssocItemContainer {
649// sure that you can only turn actual assoc items into AssocItems. This would 656 Trait(Trait),
650// require not implementing From, and instead having some checked way of 657 ImplBlock(ImplBlock),
651// casting them, and somehow making the constructors private, which would be annoying. 658}
652impl_froms!(AssocItem: Function, Const, TypeAlias); 659pub trait AsAssocItem {
660 fn as_assoc_item(self, db: &impl DefDatabase) -> Option<AssocItem>;
661}
662
663impl AsAssocItem for Function {
664 fn as_assoc_item(self, db: &impl DefDatabase) -> Option<AssocItem> {
665 as_assoc_item(db, AssocItem::Function, self.id)
666 }
667}
668impl AsAssocItem for Const {
669 fn as_assoc_item(self, db: &impl DefDatabase) -> Option<AssocItem> {
670 as_assoc_item(db, AssocItem::Const, self.id)
671 }
672}
673impl AsAssocItem for TypeAlias {
674 fn as_assoc_item(self, db: &impl DefDatabase) -> Option<AssocItem> {
675 as_assoc_item(db, AssocItem::TypeAlias, self.id)
676 }
677}
678fn as_assoc_item<ID, DEF, CTOR, AST>(db: &impl DefDatabase, ctor: CTOR, id: ID) -> Option<AssocItem>
679where
680 ID: Lookup<Data = AssocItemLoc<AST>>,
681 DEF: From<ID>,
682 CTOR: FnOnce(DEF) -> AssocItem,
683 AST: AstNode,
684{
685 match id.lookup(db).container {
686 AssocContainerId::TraitId(_) | AssocContainerId::ImplId(_) => Some(ctor(DEF::from(id))),
687 AssocContainerId::ContainerId(_) => None,
688 }
689}
653 690
654impl AssocItem { 691impl AssocItem {
655 pub fn module(self, db: &impl DefDatabase) -> Module { 692 pub fn module(self, db: &impl DefDatabase) -> Module {
@@ -659,6 +696,18 @@ impl AssocItem {
659 AssocItem::TypeAlias(t) => t.module(db), 696 AssocItem::TypeAlias(t) => t.module(db),
660 } 697 }
661 } 698 }
699 pub fn container(self, db: &impl DefDatabase) -> AssocItemContainer {
700 let container = match self {
701 AssocItem::Function(it) => it.id.lookup(db).container,
702 AssocItem::Const(it) => it.id.lookup(db).container,
703 AssocItem::TypeAlias(it) => it.id.lookup(db).container,
704 };
705 match container {
706 AssocContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()),
707 AssocContainerId::ImplId(id) => AssocItemContainer::ImplBlock(id.into()),
708 AssocContainerId::ContainerId(_) => panic!("invalid AssocItem"),
709 }
710 }
662} 711}
663 712
664#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)] 713#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
@@ -769,6 +818,7 @@ impl TypeParam {
769 } 818 }
770} 819}
771 820
821// FIXME: rename from `ImplBlock` to `Impl`
772#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] 822#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
773pub struct ImplBlock { 823pub struct ImplBlock {
774 pub(crate) id: ImplId, 824 pub(crate) id: ImplId,
diff --git a/crates/ra_hir/src/lib.rs b/crates/ra_hir/src/lib.rs
index e1c7b7a20..5cd965f7a 100644
--- a/crates/ra_hir/src/lib.rs
+++ b/crates/ra_hir/src/lib.rs
@@ -39,10 +39,10 @@ mod has_source;
39 39
40pub use crate::{ 40pub use crate::{
41 code_model::{ 41 code_model::{
42 Adt, AssocItem, AttrDef, Const, Crate, CrateDependency, DefWithBody, Docs, Enum, 42 Adt, AsAssocItem, AssocItem, AssocItemContainer, AttrDef, Const, Crate, CrateDependency,
43 EnumVariant, FieldSource, Function, GenericDef, HasAttrs, HasVisibility, ImplBlock, Local, 43 DefWithBody, Docs, Enum, EnumVariant, FieldSource, Function, GenericDef, HasAttrs,
44 MacroDef, Module, ModuleDef, ScopeDef, Static, Struct, StructField, Trait, Type, TypeAlias, 44 HasVisibility, ImplBlock, Local, MacroDef, Module, ModuleDef, ScopeDef, Static, Struct,
45 TypeParam, Union, VariantDef, 45 StructField, Trait, Type, TypeAlias, TypeParam, Union, VariantDef,
46 }, 46 },
47 has_source::HasSource, 47 has_source::HasSource,
48 source_analyzer::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer}, 48 source_analyzer::{PathResolution, ScopeEntryWithSyntax, SourceAnalyzer},
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs
index bb9a35c5d..94d5b4cfd 100644
--- a/crates/ra_hir/src/source_analyzer.rs
+++ b/crates/ra_hir/src/source_analyzer.rs
@@ -15,11 +15,9 @@ use hir_def::{
15 }, 15 },
16 expr::{ExprId, PatId}, 16 expr::{ExprId, PatId},
17 resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs}, 17 resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs},
18 DefWithBodyId, TraitId, 18 AsMacroCall, DefWithBodyId, TraitId,
19};
20use hir_expand::{
21 hygiene::Hygiene, name::AsName, AstId, HirFileId, InFile, MacroCallId, MacroCallKind,
22}; 19};
20use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile, MacroCallId};
23use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment}; 21use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment};
24use ra_syntax::{ 22use ra_syntax::{
25 ast::{self, AstNode}, 23 ast::{self, AstNode},
@@ -363,12 +361,10 @@ impl SourceAnalyzer {
363 db: &impl HirDatabase, 361 db: &impl HirDatabase,
364 macro_call: InFile<&ast::MacroCall>, 362 macro_call: InFile<&ast::MacroCall>,
365 ) -> Option<Expansion> { 363 ) -> Option<Expansion> {
366 let def = self.resolve_macro_call(db, macro_call)?.id; 364 let macro_call_id = macro_call.as_call_id(db, |path| {
367 let ast_id = AstId::new( 365 self.resolver.resolve_path_as_macro(db, &path).map(|it| it.into())
368 macro_call.file_id, 366 })?;
369 db.ast_id_map(macro_call.file_id).ast_id(macro_call.value), 367 Some(Expansion { macro_call_id })
370 );
371 Some(Expansion { macro_call_id: def.as_call_id(db, MacroCallKind::FnLike(ast_id)) })
372 } 368 }
373} 369}
374 370
diff --git a/crates/ra_hir_def/Cargo.toml b/crates/ra_hir_def/Cargo.toml
index 1efa00fe0..6b9be9948 100644
--- a/crates/ra_hir_def/Cargo.toml
+++ b/crates/ra_hir_def/Cargo.toml
@@ -14,6 +14,7 @@ rustc-hash = "1.0"
14either = "1.5" 14either = "1.5"
15anymap = "0.12" 15anymap = "0.12"
16drop_bomb = "0.1.4" 16drop_bomb = "0.1.4"
17itertools = "0.8.2"
17 18
18ra_arena = { path = "../ra_arena" } 19ra_arena = { path = "../ra_arena" }
19ra_db = { path = "../ra_db" } 20ra_db = { path = "../ra_db" }
diff --git a/crates/ra_hir_def/src/body.rs b/crates/ra_hir_def/src/body.rs
index 142c52d35..010d35e55 100644
--- a/crates/ra_hir_def/src/body.rs
+++ b/crates/ra_hir_def/src/body.rs
@@ -7,9 +7,7 @@ use std::{mem, ops::Index, sync::Arc};
7 7
8use drop_bomb::DropBomb; 8use drop_bomb::DropBomb;
9use either::Either; 9use either::Either;
10use hir_expand::{ 10use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile, MacroDefId};
11 ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile, MacroCallKind, MacroDefId,
12};
13use ra_arena::{map::ArenaMap, Arena}; 11use ra_arena::{map::ArenaMap, Arena};
14use ra_prof::profile; 12use ra_prof::profile;
15use ra_syntax::{ast, AstNode, AstPtr}; 13use ra_syntax::{ast, AstNode, AstPtr};
@@ -23,7 +21,7 @@ use crate::{
23 nameres::CrateDefMap, 21 nameres::CrateDefMap,
24 path::{ModPath, Path}, 22 path::{ModPath, Path},
25 src::HasSource, 23 src::HasSource,
26 DefWithBodyId, HasModule, Lookup, ModuleId, 24 AsMacroCall, DefWithBodyId, HasModule, Lookup, ModuleId,
27}; 25};
28 26
29pub(crate) struct Expander { 27pub(crate) struct Expander {
@@ -51,30 +49,26 @@ impl Expander {
51 db: &DB, 49 db: &DB,
52 macro_call: ast::MacroCall, 50 macro_call: ast::MacroCall,
53 ) -> Option<(Mark, T)> { 51 ) -> Option<(Mark, T)> {
54 let ast_id = AstId::new( 52 let macro_call = InFile::new(self.current_file_id, &macro_call);
55 self.current_file_id, 53
56 db.ast_id_map(self.current_file_id).ast_id(&macro_call), 54 if let Some(call_id) =
57 ); 55 macro_call.as_call_id(db, |path| self.resolve_path_as_macro(db, &path))
58 56 {
59 if let Some(path) = macro_call.path().and_then(|path| self.parse_mod_path(path)) { 57 let file_id = call_id.as_file();
60 if let Some(def) = self.resolve_path_as_macro(db, &path) { 58 if let Some(node) = db.parse_or_expand(file_id) {
61 let call_id = def.as_call_id(db, MacroCallKind::FnLike(ast_id)); 59 if let Some(expr) = T::cast(node) {
62 let file_id = call_id.as_file(); 60 log::debug!("macro expansion {:#?}", expr.syntax());
63 if let Some(node) = db.parse_or_expand(file_id) { 61
64 if let Some(expr) = T::cast(node) { 62 let mark = Mark {
65 log::debug!("macro expansion {:#?}", expr.syntax()); 63 file_id: self.current_file_id,
66 64 ast_id_map: mem::take(&mut self.ast_id_map),
67 let mark = Mark { 65 bomb: DropBomb::new("expansion mark dropped"),
68 file_id: self.current_file_id, 66 };
69 ast_id_map: mem::take(&mut self.ast_id_map), 67 self.hygiene = Hygiene::new(db, file_id);
70 bomb: DropBomb::new("expansion mark dropped"), 68 self.current_file_id = file_id;
71 }; 69 self.ast_id_map = db.ast_id_map(file_id);
72 self.hygiene = Hygiene::new(db, file_id); 70
73 self.current_file_id = file_id; 71 return Some((mark, expr));
74 self.ast_id_map = db.ast_id_map(file_id);
75
76 return Some((mark, expr));
77 }
78 } 72 }
79 } 73 }
80 } 74 }
@@ -99,10 +93,6 @@ impl Expander {
99 Path::from_src(path, &self.hygiene) 93 Path::from_src(path, &self.hygiene)
100 } 94 }
101 95
102 fn parse_mod_path(&mut self, path: ast::Path) -> Option<ModPath> {
103 ModPath::from_src(path, &self.hygiene)
104 }
105
106 fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &ModPath) -> Option<MacroDefId> { 96 fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &ModPath) -> Option<MacroDefId> {
107 self.crate_def_map 97 self.crate_def_map
108 .resolve_path(db, self.module.local_id, path, BuiltinShadowMode::Other) 98 .resolve_path(db, self.module.local_id, path, BuiltinShadowMode::Other)
diff --git a/crates/ra_hir_def/src/body/lower.rs b/crates/ra_hir_def/src/body/lower.rs
index e656f9a41..1fc892362 100644
--- a/crates/ra_hir_def/src/body/lower.rs
+++ b/crates/ra_hir_def/src/body/lower.rs
@@ -8,7 +8,7 @@ use ra_arena::Arena;
8use ra_syntax::{ 8use ra_syntax::{
9 ast::{ 9 ast::{
10 self, ArgListOwner, ArrayExprKind, LiteralKind, LoopBodyOwner, ModuleItemOwner, NameOwner, 10 self, ArgListOwner, ArrayExprKind, LiteralKind, LoopBodyOwner, ModuleItemOwner, NameOwner,
11 TypeAscriptionOwner, 11 SlicePatComponents, TypeAscriptionOwner,
12 }, 12 },
13 AstNode, AstPtr, 13 AstNode, AstPtr,
14}; 14};
@@ -164,9 +164,9 @@ where
164 let match_expr = self.collect_expr_opt(condition.expr()); 164 let match_expr = self.collect_expr_opt(condition.expr());
165 let placeholder_pat = self.missing_pat(); 165 let placeholder_pat = self.missing_pat();
166 let arms = vec![ 166 let arms = vec![
167 MatchArm { pats: vec![pat], expr: then_branch, guard: None }, 167 MatchArm { pat, expr: then_branch, guard: None },
168 MatchArm { 168 MatchArm {
169 pats: vec![placeholder_pat], 169 pat: placeholder_pat,
170 expr: else_branch.unwrap_or_else(|| self.empty_block()), 170 expr: else_branch.unwrap_or_else(|| self.empty_block()),
171 guard: None, 171 guard: None,
172 }, 172 },
@@ -203,8 +203,8 @@ where
203 let placeholder_pat = self.missing_pat(); 203 let placeholder_pat = self.missing_pat();
204 let break_ = self.alloc_expr_desugared(Expr::Break { expr: None }); 204 let break_ = self.alloc_expr_desugared(Expr::Break { expr: None });
205 let arms = vec![ 205 let arms = vec![
206 MatchArm { pats: vec![pat], expr: body, guard: None }, 206 MatchArm { pat, expr: body, guard: None },
207 MatchArm { pats: vec![placeholder_pat], expr: break_, guard: None }, 207 MatchArm { pat: placeholder_pat, expr: break_, guard: None },
208 ]; 208 ];
209 let match_expr = 209 let match_expr =
210 self.alloc_expr_desugared(Expr::Match { expr: match_expr, arms }); 210 self.alloc_expr_desugared(Expr::Match { expr: match_expr, arms });
@@ -250,7 +250,7 @@ where
250 match_arm_list 250 match_arm_list
251 .arms() 251 .arms()
252 .map(|arm| MatchArm { 252 .map(|arm| MatchArm {
253 pats: arm.pats().map(|p| self.collect_pat(p)).collect(), 253 pat: self.collect_pat_opt(arm.pat()),
254 expr: self.collect_expr_opt(arm.expr()), 254 expr: self.collect_expr_opt(arm.expr()),
255 guard: arm 255 guard: arm
256 .guard() 256 .guard()
@@ -587,11 +587,16 @@ where
587 let path = p.path().and_then(|path| self.expander.parse_path(path)); 587 let path = p.path().and_then(|path| self.expander.parse_path(path));
588 path.map(Pat::Path).unwrap_or(Pat::Missing) 588 path.map(Pat::Path).unwrap_or(Pat::Missing)
589 } 589 }
590 ast::Pat::OrPat(p) => {
591 let pats = p.pats().map(|p| self.collect_pat(p)).collect();
592 Pat::Or(pats)
593 }
594 ast::Pat::ParenPat(p) => return self.collect_pat_opt(p.pat()),
590 ast::Pat::TuplePat(p) => { 595 ast::Pat::TuplePat(p) => {
591 let args = p.args().map(|p| self.collect_pat(p)).collect(); 596 let args = p.args().map(|p| self.collect_pat(p)).collect();
592 Pat::Tuple(args) 597 Pat::Tuple(args)
593 } 598 }
594 ast::Pat::PlaceholderPat(_) => Pat::Wild, 599 ast::Pat::PlaceholderPat(_) | ast::Pat::DotDotPat(_) => Pat::Wild,
595 ast::Pat::RecordPat(p) => { 600 ast::Pat::RecordPat(p) => {
596 let path = p.path().and_then(|path| self.expander.parse_path(path)); 601 let path = p.path().and_then(|path| self.expander.parse_path(path));
597 let record_field_pat_list = 602 let record_field_pat_list =
@@ -616,12 +621,20 @@ where
616 621
617 Pat::Record { path, args: fields } 622 Pat::Record { path, args: fields }
618 } 623 }
624 ast::Pat::SlicePat(p) => {
625 let SlicePatComponents { prefix, slice, suffix } = p.components();
626
627 Pat::Slice {
628 prefix: prefix.into_iter().map(|p| self.collect_pat(p)).collect(),
629 slice: slice.map(|p| self.collect_pat(p)),
630 suffix: suffix.into_iter().map(|p| self.collect_pat(p)).collect(),
631 }
632 }
619 633
620 // FIXME: implement 634 // FIXME: implement
621 ast::Pat::DotDotPat(_) => Pat::Missing,
622 ast::Pat::BoxPat(_) => Pat::Missing, 635 ast::Pat::BoxPat(_) => Pat::Missing,
623 ast::Pat::LiteralPat(_) => Pat::Missing, 636 ast::Pat::LiteralPat(_) => Pat::Missing,
624 ast::Pat::SlicePat(_) | ast::Pat::RangePat(_) => Pat::Missing, 637 ast::Pat::RangePat(_) => Pat::Missing,
625 }; 638 };
626 let ptr = AstPtr::new(&pat); 639 let ptr = AstPtr::new(&pat);
627 self.alloc_pat(pattern, Either::Left(ptr)) 640 self.alloc_pat(pattern, Either::Left(ptr))
diff --git a/crates/ra_hir_def/src/body/scope.rs b/crates/ra_hir_def/src/body/scope.rs
index a63552327..a58a7b21f 100644
--- a/crates/ra_hir_def/src/body/scope.rs
+++ b/crates/ra_hir_def/src/body/scope.rs
@@ -158,9 +158,7 @@ fn compute_expr_scopes(expr: ExprId, body: &Body, scopes: &mut ExprScopes, scope
158 compute_expr_scopes(*expr, body, scopes, scope); 158 compute_expr_scopes(*expr, body, scopes, scope);
159 for arm in arms { 159 for arm in arms {
160 let scope = scopes.new_scope(scope); 160 let scope = scopes.new_scope(scope);
161 for pat in &arm.pats { 161 scopes.add_bindings(body, scope, arm.pat);
162 scopes.add_bindings(body, scope, *pat);
163 }
164 scopes.set_scope(arm.expr, scope); 162 scopes.set_scope(arm.expr, scope);
165 compute_expr_scopes(arm.expr, body, scopes, scope); 163 compute_expr_scopes(arm.expr, body, scopes, scope);
166 } 164 }
@@ -194,7 +192,7 @@ mod tests {
194 let (off, code) = extract_offset(code); 192 let (off, code) = extract_offset(code);
195 let code = { 193 let code = {
196 let mut buf = String::new(); 194 let mut buf = String::new();
197 let off = u32::from(off) as usize; 195 let off = off.to_usize();
198 buf.push_str(&code[..off]); 196 buf.push_str(&code[..off]);
199 buf.push_str("marker"); 197 buf.push_str("marker");
200 buf.push_str(&code[off..]); 198 buf.push_str(&code[off..]);
diff --git a/crates/ra_hir_def/src/expr.rs b/crates/ra_hir_def/src/expr.rs
index a75ef9970..9707c5527 100644
--- a/crates/ra_hir_def/src/expr.rs
+++ b/crates/ra_hir_def/src/expr.rs
@@ -202,7 +202,7 @@ pub enum Array {
202 202
203#[derive(Debug, Clone, Eq, PartialEq)] 203#[derive(Debug, Clone, Eq, PartialEq)]
204pub struct MatchArm { 204pub struct MatchArm {
205 pub pats: Vec<PatId>, 205 pub pat: PatId,
206 pub guard: Option<ExprId>, 206 pub guard: Option<ExprId>,
207 pub expr: ExprId, 207 pub expr: ExprId,
208} 208}
@@ -382,6 +382,7 @@ pub enum Pat {
382 Missing, 382 Missing,
383 Wild, 383 Wild,
384 Tuple(Vec<PatId>), 384 Tuple(Vec<PatId>),
385 Or(Vec<PatId>),
385 Record { 386 Record {
386 path: Option<Path>, 387 path: Option<Path>,
387 args: Vec<RecordFieldPat>, 388 args: Vec<RecordFieldPat>,
@@ -393,7 +394,7 @@ pub enum Pat {
393 }, 394 },
394 Slice { 395 Slice {
395 prefix: Vec<PatId>, 396 prefix: Vec<PatId>,
396 rest: Option<PatId>, 397 slice: Option<PatId>,
397 suffix: Vec<PatId>, 398 suffix: Vec<PatId>,
398 }, 399 },
399 Path(Path), 400 Path(Path),
@@ -420,12 +421,12 @@ impl Pat {
420 Pat::Bind { subpat, .. } => { 421 Pat::Bind { subpat, .. } => {
421 subpat.iter().copied().for_each(f); 422 subpat.iter().copied().for_each(f);
422 } 423 }
423 Pat::Tuple(args) | Pat::TupleStruct { args, .. } => { 424 Pat::Or(args) | Pat::Tuple(args) | Pat::TupleStruct { args, .. } => {
424 args.iter().copied().for_each(f); 425 args.iter().copied().for_each(f);
425 } 426 }
426 Pat::Ref { pat, .. } => f(*pat), 427 Pat::Ref { pat, .. } => f(*pat),
427 Pat::Slice { prefix, rest, suffix } => { 428 Pat::Slice { prefix, slice, suffix } => {
428 let total_iter = prefix.iter().chain(rest.iter()).chain(suffix.iter()); 429 let total_iter = prefix.iter().chain(slice.iter()).chain(suffix.iter());
429 total_iter.copied().for_each(f); 430 total_iter.copied().for_each(f);
430 } 431 }
431 Pat::Record { args, .. } => { 432 Pat::Record { args, .. } => {
diff --git a/crates/ra_hir_def/src/lib.rs b/crates/ra_hir_def/src/lib.rs
index feb3a300d..aa0b558b8 100644
--- a/crates/ra_hir_def/src/lib.rs
+++ b/crates/ra_hir_def/src/lib.rs
@@ -46,7 +46,10 @@ mod marks;
46 46
47use std::hash::Hash; 47use std::hash::Hash;
48 48
49use hir_expand::{ast_id_map::FileAstId, AstId, HirFileId, InFile, MacroDefId}; 49use hir_expand::{
50 ast_id_map::FileAstId, db::AstDatabase, hygiene::Hygiene, AstId, HirFileId, InFile,
51 MacroCallId, MacroCallKind, MacroDefId,
52};
50use ra_arena::{impl_arena_id, RawId}; 53use ra_arena::{impl_arena_id, RawId};
51use ra_db::{impl_intern_key, salsa, CrateId}; 54use ra_db::{impl_intern_key, salsa, CrateId};
52use ra_syntax::{ast, AstNode}; 55use ra_syntax::{ast, AstNode};
@@ -413,3 +416,61 @@ impl HasModule for StaticLoc {
413 self.container.module(db) 416 self.container.module(db)
414 } 417 }
415} 418}
419
420/// A helper trait for converting to MacroCallId
421pub trait AsMacroCall {
422 fn as_call_id(
423 &self,
424 db: &(impl db::DefDatabase + AstDatabase),
425 resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
426 ) -> Option<MacroCallId>;
427}
428
429impl AsMacroCall for InFile<&ast::MacroCall> {
430 fn as_call_id(
431 &self,
432 db: &(impl db::DefDatabase + AstDatabase),
433 resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
434 ) -> Option<MacroCallId> {
435 let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
436 let h = Hygiene::new(db, self.file_id);
437 let path = path::ModPath::from_src(self.value.path()?, &h)?;
438
439 AstIdWithPath::new(ast_id.file_id, ast_id.value, path).as_call_id(db, resolver)
440 }
441}
442
443/// Helper wrapper for `AstId` with `ModPath`
444#[derive(Clone, Debug, Eq, PartialEq)]
445struct AstIdWithPath<T: ast::AstNode> {
446 pub ast_id: AstId<T>,
447 pub path: path::ModPath,
448}
449
450impl<T: ast::AstNode> AstIdWithPath<T> {
451 pub fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
452 AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
453 }
454}
455
456impl AsMacroCall for AstIdWithPath<ast::MacroCall> {
457 fn as_call_id(
458 &self,
459 db: &impl AstDatabase,
460 resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
461 ) -> Option<MacroCallId> {
462 let def = resolver(self.path.clone())?;
463 Some(def.as_call_id(db, MacroCallKind::FnLike(self.ast_id.clone())))
464 }
465}
466
467impl AsMacroCall for AstIdWithPath<ast::ModuleItem> {
468 fn as_call_id(
469 &self,
470 db: &impl AstDatabase,
471 resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
472 ) -> Option<MacroCallId> {
473 let def = resolver(self.path.clone())?;
474 Some(def.as_call_id(db, MacroCallKind::Attr(self.ast_id.clone())))
475 }
476}
diff --git a/crates/ra_hir_def/src/nameres/collector.rs b/crates/ra_hir_def/src/nameres/collector.rs
index 6352c71ef..51c65a5d7 100644
--- a/crates/ra_hir_def/src/nameres/collector.rs
+++ b/crates/ra_hir_def/src/nameres/collector.rs
@@ -7,7 +7,7 @@ use hir_expand::{
7 builtin_derive::find_builtin_derive, 7 builtin_derive::find_builtin_derive,
8 builtin_macro::find_builtin_macro, 8 builtin_macro::find_builtin_macro,
9 name::{name, AsName, Name}, 9 name::{name, AsName, Name},
10 HirFileId, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, 10 HirFileId, MacroCallId, MacroDefId, MacroDefKind,
11}; 11};
12use ra_cfg::CfgOptions; 12use ra_cfg::CfgOptions;
13use ra_db::{CrateId, FileId}; 13use ra_db::{CrateId, FileId};
@@ -25,8 +25,9 @@ use crate::{
25 path::{ImportAlias, ModPath, PathKind}, 25 path::{ImportAlias, ModPath, PathKind},
26 per_ns::PerNs, 26 per_ns::PerNs,
27 visibility::Visibility, 27 visibility::Visibility,
28 AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern, 28 AdtId, AsMacroCall, AstId, AstIdWithPath, ConstLoc, ContainerId, EnumLoc, EnumVariantId,
29 LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, 29 FunctionLoc, ImplLoc, Intern, LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc,
30 TraitLoc, TypeAliasLoc, UnionLoc,
30}; 31};
31 32
32pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap { 33pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
@@ -99,11 +100,16 @@ struct ImportDirective {
99#[derive(Clone, Debug, Eq, PartialEq)] 100#[derive(Clone, Debug, Eq, PartialEq)]
100struct MacroDirective { 101struct MacroDirective {
101 module_id: LocalModuleId, 102 module_id: LocalModuleId,
102 ast_id: AstId<ast::MacroCall>, 103 ast_id: AstIdWithPath<ast::MacroCall>,
103 path: ModPath,
104 legacy: Option<MacroCallId>, 104 legacy: Option<MacroCallId>,
105} 105}
106 106
107#[derive(Clone, Debug, Eq, PartialEq)]
108struct DeriveDirective {
109 module_id: LocalModuleId,
110 ast_id: AstIdWithPath<ast::ModuleItem>,
111}
112
107/// Walks the tree of module recursively 113/// Walks the tree of module recursively
108struct DefCollector<'a, DB> { 114struct DefCollector<'a, DB> {
109 db: &'a DB, 115 db: &'a DB,
@@ -112,7 +118,7 @@ struct DefCollector<'a, DB> {
112 unresolved_imports: Vec<ImportDirective>, 118 unresolved_imports: Vec<ImportDirective>,
113 resolved_imports: Vec<ImportDirective>, 119 resolved_imports: Vec<ImportDirective>,
114 unexpanded_macros: Vec<MacroDirective>, 120 unexpanded_macros: Vec<MacroDirective>,
115 unexpanded_attribute_macros: Vec<(LocalModuleId, AstId<ast::ModuleItem>, ModPath)>, 121 unexpanded_attribute_macros: Vec<DeriveDirective>,
116 mod_dirs: FxHashMap<LocalModuleId, ModDir>, 122 mod_dirs: FxHashMap<LocalModuleId, ModDir>,
117 cfg_options: &'a CfgOptions, 123 cfg_options: &'a CfgOptions,
118} 124}
@@ -146,7 +152,7 @@ where
146 ReachedFixedPoint::Yes => break, 152 ReachedFixedPoint::Yes => break,
147 ReachedFixedPoint::No => i += 1, 153 ReachedFixedPoint::No => i += 1,
148 } 154 }
149 if i == 1000 { 155 if i == 10000 {
150 log::error!("name resolution is stuck"); 156 log::error!("name resolution is stuck");
151 break; 157 break;
152 } 158 }
@@ -515,16 +521,16 @@ where
515 return false; 521 return false;
516 } 522 }
517 523
518 let resolved_res = self.def_map.resolve_path_fp_with_macro( 524 if let Some(call_id) = directive.ast_id.as_call_id(self.db, |path| {
519 self.db, 525 let resolved_res = self.def_map.resolve_path_fp_with_macro(
520 ResolveMode::Other, 526 self.db,
521 directive.module_id, 527 ResolveMode::Other,
522 &directive.path, 528 directive.module_id,
523 BuiltinShadowMode::Module, 529 &path,
524 ); 530 BuiltinShadowMode::Module,
525 531 );
526 if let Some(def) = resolved_res.resolved_def.take_macros() { 532 resolved_res.resolved_def.take_macros()
527 let call_id = def.as_call_id(self.db, MacroCallKind::FnLike(directive.ast_id)); 533 }) {
528 resolved.push((directive.module_id, call_id)); 534 resolved.push((directive.module_id, call_id));
529 res = ReachedFixedPoint::No; 535 res = ReachedFixedPoint::No;
530 return false; 536 return false;
@@ -532,12 +538,11 @@ where
532 538
533 true 539 true
534 }); 540 });
535 attribute_macros.retain(|(module_id, ast_id, path)| { 541 attribute_macros.retain(|directive| {
536 let resolved_res = self.resolve_attribute_macro(path); 542 if let Some(call_id) =
537 543 directive.ast_id.as_call_id(self.db, |path| self.resolve_attribute_macro(&path))
538 if let Some(def) = resolved_res { 544 {
539 let call_id = def.as_call_id(self.db, MacroCallKind::Attr(*ast_id)); 545 resolved.push((directive.module_id, call_id));
540 resolved.push((*module_id, call_id));
541 res = ReachedFixedPoint::No; 546 res = ReachedFixedPoint::No;
542 return false; 547 return false;
543 } 548 }
@@ -833,20 +838,22 @@ where
833 }; 838 };
834 let path = ModPath::from_tt_ident(ident); 839 let path = ModPath::from_tt_ident(ident);
835 840
836 let ast_id = AstId::new(self.file_id, def.kind.ast_id()); 841 let ast_id = AstIdWithPath::new(self.file_id, def.kind.ast_id(), path);
837 self.def_collector.unexpanded_attribute_macros.push((self.module_id, ast_id, path)); 842 self.def_collector
843 .unexpanded_attribute_macros
844 .push(DeriveDirective { module_id: self.module_id, ast_id });
838 } 845 }
839 } 846 }
840 } 847 }
841 848
842 fn collect_macro(&mut self, mac: &raw::MacroData) { 849 fn collect_macro(&mut self, mac: &raw::MacroData) {
843 let ast_id = AstId::new(self.file_id, mac.ast_id); 850 let mut ast_id = AstIdWithPath::new(self.file_id, mac.ast_id, mac.path.clone());
844 851
845 // Case 0: builtin macros 852 // Case 0: builtin macros
846 if mac.builtin { 853 if mac.builtin {
847 if let Some(name) = &mac.name { 854 if let Some(name) = &mac.name {
848 let krate = self.def_collector.def_map.krate; 855 let krate = self.def_collector.def_map.krate;
849 if let Some(macro_id) = find_builtin_macro(name, krate, ast_id) { 856 if let Some(macro_id) = find_builtin_macro(name, krate, ast_id.ast_id) {
850 self.def_collector.define_macro( 857 self.def_collector.define_macro(
851 self.module_id, 858 self.module_id,
852 name.clone(), 859 name.clone(),
@@ -862,7 +869,7 @@ where
862 if is_macro_rules(&mac.path) { 869 if is_macro_rules(&mac.path) {
863 if let Some(name) = &mac.name { 870 if let Some(name) = &mac.name {
864 let macro_id = MacroDefId { 871 let macro_id = MacroDefId {
865 ast_id: Some(ast_id), 872 ast_id: Some(ast_id.ast_id),
866 krate: Some(self.def_collector.def_map.krate), 873 krate: Some(self.def_collector.def_map.krate),
867 kind: MacroDefKind::Declarative, 874 kind: MacroDefKind::Declarative,
868 }; 875 };
@@ -872,15 +879,13 @@ where
872 } 879 }
873 880
874 // Case 2: try to resolve in legacy scope and expand macro_rules 881 // Case 2: try to resolve in legacy scope and expand macro_rules
875 if let Some(macro_def) = mac.path.as_ident().and_then(|name| { 882 if let Some(macro_call_id) = ast_id.as_call_id(self.def_collector.db, |path| {
876 self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name) 883 path.as_ident().and_then(|name| {
884 self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name)
885 })
877 }) { 886 }) {
878 let macro_call_id =
879 macro_def.as_call_id(self.def_collector.db, MacroCallKind::FnLike(ast_id));
880
881 self.def_collector.unexpanded_macros.push(MacroDirective { 887 self.def_collector.unexpanded_macros.push(MacroDirective {
882 module_id: self.module_id, 888 module_id: self.module_id,
883 path: mac.path.clone(),
884 ast_id, 889 ast_id,
885 legacy: Some(macro_call_id), 890 legacy: Some(macro_call_id),
886 }); 891 });
@@ -890,14 +895,12 @@ where
890 895
891 // Case 3: resolve in module scope, expand during name resolution. 896 // Case 3: resolve in module scope, expand during name resolution.
892 // We rewrite simple path `macro_name` to `self::macro_name` to force resolve in module scope only. 897 // We rewrite simple path `macro_name` to `self::macro_name` to force resolve in module scope only.
893 let mut path = mac.path.clone(); 898 if ast_id.path.is_ident() {
894 if path.is_ident() { 899 ast_id.path.kind = PathKind::Super(0);
895 path.kind = PathKind::Super(0);
896 } 900 }
897 901
898 self.def_collector.unexpanded_macros.push(MacroDirective { 902 self.def_collector.unexpanded_macros.push(MacroDirective {
899 module_id: self.module_id, 903 module_id: self.module_id,
900 path,
901 ast_id, 904 ast_id,
902 legacy: None, 905 legacy: None,
903 }); 906 });
diff --git a/crates/ra_hir_def/src/resolver.rs b/crates/ra_hir_def/src/resolver.rs
index 05cf4646a..e2b228e80 100644
--- a/crates/ra_hir_def/src/resolver.rs
+++ b/crates/ra_hir_def/src/resolver.rs
@@ -542,11 +542,7 @@ impl Resolver {
542 542
543 fn push_generic_params_scope(self, db: &impl DefDatabase, def: GenericDefId) -> Resolver { 543 fn push_generic_params_scope(self, db: &impl DefDatabase, def: GenericDefId) -> Resolver {
544 let params = db.generic_params(def); 544 let params = db.generic_params(def);
545 if params.types.is_empty() { 545 self.push_scope(Scope::GenericParams { def, params })
546 self
547 } else {
548 self.push_scope(Scope::GenericParams { def, params })
549 }
550 } 546 }
551 547
552 fn push_impl_block_scope(self, impl_block: ImplId) -> Resolver { 548 fn push_impl_block_scope(self, impl_block: ImplId) -> Resolver {
diff --git a/crates/ra_hir_expand/src/lib.rs b/crates/ra_hir_expand/src/lib.rs
index 7cf3b59a7..9506f2e1c 100644
--- a/crates/ra_hir_expand/src/lib.rs
+++ b/crates/ra_hir_expand/src/lib.rs
@@ -323,11 +323,18 @@ impl<T: Clone> InFile<&T> {
323 } 323 }
324} 324}
325 325
326impl<T> InFile<Option<T>> {
327 pub fn transpose(self) -> Option<InFile<T>> {
328 let value = self.value?;
329 Some(InFile::new(self.file_id, value))
330 }
331}
332
326impl InFile<SyntaxNode> { 333impl InFile<SyntaxNode> {
327 pub fn ancestors_with_macros<'a>( 334 pub fn ancestors_with_macros(
328 self, 335 self,
329 db: &'a impl crate::db::AstDatabase, 336 db: &impl crate::db::AstDatabase,
330 ) -> impl Iterator<Item = InFile<SyntaxNode>> + 'a { 337 ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
331 std::iter::successors(Some(self), move |node| match node.value.parent() { 338 std::iter::successors(Some(self), move |node| match node.value.parent() {
332 Some(parent) => Some(node.with_value(parent)), 339 Some(parent) => Some(node.with_value(parent)),
333 None => { 340 None => {
@@ -338,6 +345,15 @@ impl InFile<SyntaxNode> {
338 } 345 }
339} 346}
340 347
348impl InFile<SyntaxToken> {
349 pub fn ancestors_with_macros(
350 self,
351 db: &impl crate::db::AstDatabase,
352 ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
353 self.map(|it| it.parent()).ancestors_with_macros(db)
354 }
355}
356
341impl<N: AstNode> InFile<N> { 357impl<N: AstNode> InFile<N> {
342 pub fn descendants<T: AstNode>(self) -> impl Iterator<Item = InFile<T>> { 358 pub fn descendants<T: AstNode>(self) -> impl Iterator<Item = InFile<T>> {
343 self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n)) 359 self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n))
diff --git a/crates/ra_hir_ty/src/display.rs b/crates/ra_hir_ty/src/display.rs
index d1ff85f0f..14e089cf4 100644
--- a/crates/ra_hir_ty/src/display.rs
+++ b/crates/ra_hir_ty/src/display.rs
@@ -2,7 +2,12 @@
2 2
3use std::fmt; 3use std::fmt;
4 4
5use crate::db::HirDatabase; 5use crate::{
6 db::HirDatabase, utils::generics, ApplicationTy, CallableDef, FnSig, GenericPredicate,
7 Obligation, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
8};
9use hir_def::{generics::TypeParamProvenance, AdtId, AssocContainerId, Lookup};
10use hir_expand::name::Name;
6 11
7pub struct HirFormatter<'a, 'b, DB> { 12pub struct HirFormatter<'a, 'b, DB> {
8 pub db: &'a DB, 13 pub db: &'a DB,
@@ -97,3 +102,369 @@ where
97 }) 102 })
98 } 103 }
99} 104}
105
106const TYPE_HINT_TRUNCATION: &str = "…";
107
108impl HirDisplay for &Ty {
109 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
110 HirDisplay::hir_fmt(*self, f)
111 }
112}
113
114impl HirDisplay for ApplicationTy {
115 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
116 if f.should_truncate() {
117 return write!(f, "{}", TYPE_HINT_TRUNCATION);
118 }
119
120 match self.ctor {
121 TypeCtor::Bool => write!(f, "bool")?,
122 TypeCtor::Char => write!(f, "char")?,
123 TypeCtor::Int(t) => write!(f, "{}", t)?,
124 TypeCtor::Float(t) => write!(f, "{}", t)?,
125 TypeCtor::Str => write!(f, "str")?,
126 TypeCtor::Slice => {
127 let t = self.parameters.as_single();
128 write!(f, "[{}]", t.display(f.db))?;
129 }
130 TypeCtor::Array => {
131 let t = self.parameters.as_single();
132 write!(f, "[{}; _]", t.display(f.db))?;
133 }
134 TypeCtor::RawPtr(m) => {
135 let t = self.parameters.as_single();
136 write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?;
137 }
138 TypeCtor::Ref(m) => {
139 let t = self.parameters.as_single();
140 let ty_display = if f.omit_verbose_types() {
141 t.display_truncated(f.db, f.max_size)
142 } else {
143 t.display(f.db)
144 };
145 write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?;
146 }
147 TypeCtor::Never => write!(f, "!")?,
148 TypeCtor::Tuple { .. } => {
149 let ts = &self.parameters;
150 if ts.len() == 1 {
151 write!(f, "({},)", ts[0].display(f.db))?;
152 } else {
153 write!(f, "(")?;
154 f.write_joined(&*ts.0, ", ")?;
155 write!(f, ")")?;
156 }
157 }
158 TypeCtor::FnPtr { .. } => {
159 let sig = FnSig::from_fn_ptr_substs(&self.parameters);
160 write!(f, "fn(")?;
161 f.write_joined(sig.params(), ", ")?;
162 write!(f, ") -> {}", sig.ret().display(f.db))?;
163 }
164 TypeCtor::FnDef(def) => {
165 let sig = f.db.callable_item_signature(def).subst(&self.parameters);
166 let name = match def {
167 CallableDef::FunctionId(ff) => f.db.function_data(ff).name.clone(),
168 CallableDef::StructId(s) => f.db.struct_data(s).name.clone(),
169 CallableDef::EnumVariantId(e) => {
170 let enum_data = f.db.enum_data(e.parent);
171 enum_data.variants[e.local_id].name.clone()
172 }
173 };
174 match def {
175 CallableDef::FunctionId(_) => write!(f, "fn {}", name)?,
176 CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {
177 write!(f, "{}", name)?
178 }
179 }
180 if self.parameters.len() > 0 {
181 let generics = generics(f.db, def.into());
182 let (parent_params, self_param, type_params, _impl_trait_params) =
183 generics.provenance_split();
184 let total_len = parent_params + self_param + type_params;
185 // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
186 if total_len > 0 {
187 write!(f, "<")?;
188 f.write_joined(&self.parameters.0[..total_len], ", ")?;
189 write!(f, ">")?;
190 }
191 }
192 write!(f, "(")?;
193 f.write_joined(sig.params(), ", ")?;
194 write!(f, ") -> {}", sig.ret().display(f.db))?;
195 }
196 TypeCtor::Adt(def_id) => {
197 let name = match def_id {
198 AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
199 AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
200 AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
201 };
202 write!(f, "{}", name)?;
203 if self.parameters.len() > 0 {
204 write!(f, "<")?;
205
206 let mut non_default_parameters = Vec::with_capacity(self.parameters.len());
207 let parameters_to_write = if f.omit_verbose_types() {
208 match self
209 .ctor
210 .as_generic_def()
211 .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
212 .filter(|defaults| !defaults.is_empty())
213 {
214 Option::None => self.parameters.0.as_ref(),
215 Option::Some(default_parameters) => {
216 for (i, parameter) in self.parameters.iter().enumerate() {
217 match (parameter, default_parameters.get(i)) {
218 (&Ty::Unknown, _) | (_, None) => {
219 non_default_parameters.push(parameter.clone())
220 }
221 (_, Some(default_parameter))
222 if parameter != default_parameter =>
223 {
224 non_default_parameters.push(parameter.clone())
225 }
226 _ => (),
227 }
228 }
229 &non_default_parameters
230 }
231 }
232 } else {
233 self.parameters.0.as_ref()
234 };
235
236 f.write_joined(parameters_to_write, ", ")?;
237 write!(f, ">")?;
238 }
239 }
240 TypeCtor::AssociatedType(type_alias) => {
241 let trait_ = match type_alias.lookup(f.db).container {
242 AssocContainerId::TraitId(it) => it,
243 _ => panic!("not an associated type"),
244 };
245 let trait_name = f.db.trait_data(trait_).name.clone();
246 let name = f.db.type_alias_data(type_alias).name.clone();
247 write!(f, "{}::{}", trait_name, name)?;
248 if self.parameters.len() > 0 {
249 write!(f, "<")?;
250 f.write_joined(&*self.parameters.0, ", ")?;
251 write!(f, ">")?;
252 }
253 }
254 TypeCtor::Closure { .. } => {
255 let sig = self.parameters[0]
256 .callable_sig(f.db)
257 .expect("first closure parameter should contain signature");
258 let return_type_hint = sig.ret().display(f.db);
259 if sig.params().is_empty() {
260 write!(f, "|| -> {}", return_type_hint)?;
261 } else if f.omit_verbose_types() {
262 write!(f, "|{}| -> {}", TYPE_HINT_TRUNCATION, return_type_hint)?;
263 } else {
264 write!(f, "|")?;
265 f.write_joined(sig.params(), ", ")?;
266 write!(f, "| -> {}", return_type_hint)?;
267 };
268 }
269 }
270 Ok(())
271 }
272}
273
274impl HirDisplay for ProjectionTy {
275 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
276 if f.should_truncate() {
277 return write!(f, "{}", TYPE_HINT_TRUNCATION);
278 }
279
280 let trait_name = f.db.trait_data(self.trait_(f.db)).name.clone();
281 write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_name,)?;
282 if self.parameters.len() > 1 {
283 write!(f, "<")?;
284 f.write_joined(&self.parameters[1..], ", ")?;
285 write!(f, ">")?;
286 }
287 write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?;
288 Ok(())
289 }
290}
291
292impl HirDisplay for Ty {
293 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
294 if f.should_truncate() {
295 return write!(f, "{}", TYPE_HINT_TRUNCATION);
296 }
297
298 match self {
299 Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
300 Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
301 Ty::Placeholder(id) => {
302 let generics = generics(f.db, id.parent);
303 let param_data = &generics.params.types[id.local_id];
304 match param_data.provenance {
305 TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
306 write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))?
307 }
308 TypeParamProvenance::ArgumentImplTrait => {
309 write!(f, "impl ")?;
310 let bounds = f.db.generic_predicates_for_param(*id);
311 let substs = Substs::type_params_for_generics(&generics);
312 write_bounds_like_dyn_trait(
313 &bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(),
314 f,
315 )?;
316 }
317 }
318 }
319 Ty::Bound(idx) => write!(f, "?{}", idx)?,
320 Ty::Dyn(predicates) | Ty::Opaque(predicates) => {
321 match self {
322 Ty::Dyn(_) => write!(f, "dyn ")?,
323 Ty::Opaque(_) => write!(f, "impl ")?,
324 _ => unreachable!(),
325 };
326 write_bounds_like_dyn_trait(&predicates, f)?;
327 }
328 Ty::Unknown => write!(f, "{{unknown}}")?,
329 Ty::Infer(..) => write!(f, "_")?,
330 }
331 Ok(())
332 }
333}
334
335fn write_bounds_like_dyn_trait(
336 predicates: &[GenericPredicate],
337 f: &mut HirFormatter<impl HirDatabase>,
338) -> fmt::Result {
339 // Note: This code is written to produce nice results (i.e.
340 // corresponding to surface Rust) for types that can occur in
341 // actual Rust. It will have weird results if the predicates
342 // aren't as expected (i.e. self types = $0, projection
343 // predicates for a certain trait come after the Implemented
344 // predicate for that trait).
345 let mut first = true;
346 let mut angle_open = false;
347 for p in predicates.iter() {
348 match p {
349 GenericPredicate::Implemented(trait_ref) => {
350 if angle_open {
351 write!(f, ">")?;
352 }
353 if !first {
354 write!(f, " + ")?;
355 }
356 // We assume that the self type is $0 (i.e. the
357 // existential) here, which is the only thing that's
358 // possible in actual Rust, and hence don't print it
359 write!(f, "{}", f.db.trait_data(trait_ref.trait_).name.clone())?;
360 if trait_ref.substs.len() > 1 {
361 write!(f, "<")?;
362 f.write_joined(&trait_ref.substs[1..], ", ")?;
363 // there might be assoc type bindings, so we leave the angle brackets open
364 angle_open = true;
365 }
366 }
367 GenericPredicate::Projection(projection_pred) => {
368 // in types in actual Rust, these will always come
369 // after the corresponding Implemented predicate
370 if angle_open {
371 write!(f, ", ")?;
372 } else {
373 write!(f, "<")?;
374 angle_open = true;
375 }
376 let name =
377 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name.clone();
378 write!(f, "{} = ", name)?;
379 projection_pred.ty.hir_fmt(f)?;
380 }
381 GenericPredicate::Error => {
382 if angle_open {
383 // impl Trait<X, {error}>
384 write!(f, ", ")?;
385 } else if !first {
386 // impl Trait + {error}
387 write!(f, " + ")?;
388 }
389 p.hir_fmt(f)?;
390 }
391 }
392 first = false;
393 }
394 if angle_open {
395 write!(f, ">")?;
396 }
397 Ok(())
398}
399
400impl TraitRef {
401 fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result {
402 if f.should_truncate() {
403 return write!(f, "{}", TYPE_HINT_TRUNCATION);
404 }
405
406 self.substs[0].hir_fmt(f)?;
407 if use_as {
408 write!(f, " as ")?;
409 } else {
410 write!(f, ": ")?;
411 }
412 write!(f, "{}", f.db.trait_data(self.trait_).name.clone())?;
413 if self.substs.len() > 1 {
414 write!(f, "<")?;
415 f.write_joined(&self.substs[1..], ", ")?;
416 write!(f, ">")?;
417 }
418 Ok(())
419 }
420}
421
422impl HirDisplay for TraitRef {
423 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
424 self.hir_fmt_ext(f, false)
425 }
426}
427
428impl HirDisplay for &GenericPredicate {
429 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
430 HirDisplay::hir_fmt(*self, f)
431 }
432}
433
434impl HirDisplay for GenericPredicate {
435 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
436 if f.should_truncate() {
437 return write!(f, "{}", TYPE_HINT_TRUNCATION);
438 }
439
440 match self {
441 GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
442 GenericPredicate::Projection(projection_pred) => {
443 write!(f, "<")?;
444 projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?;
445 write!(
446 f,
447 ">::{} = {}",
448 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name,
449 projection_pred.ty.display(f.db)
450 )?;
451 }
452 GenericPredicate::Error => write!(f, "{{error}}")?,
453 }
454 Ok(())
455 }
456}
457
458impl HirDisplay for Obligation {
459 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
460 match self {
461 Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db)),
462 Obligation::Projection(proj) => write!(
463 f,
464 "Normalize({} => {})",
465 proj.projection_ty.display(f.db),
466 proj.ty.display(f.db)
467 ),
468 }
469 }
470}
diff --git a/crates/ra_hir_ty/src/infer/expr.rs b/crates/ra_hir_ty/src/infer/expr.rs
index 3c9c02d03..39d8bc0ca 100644
--- a/crates/ra_hir_ty/src/infer/expr.rs
+++ b/crates/ra_hir_ty/src/infer/expr.rs
@@ -165,12 +165,14 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
165 Expr::Match { expr, arms } => { 165 Expr::Match { expr, arms } => {
166 let input_ty = self.infer_expr(*expr, &Expectation::none()); 166 let input_ty = self.infer_expr(*expr, &Expectation::none());
167 167
168 let mut result_ty = self.table.new_maybe_never_type_var(); 168 let mut result_ty = if arms.len() == 0 {
169 Ty::simple(TypeCtor::Never)
170 } else {
171 self.table.new_type_var()
172 };
169 173
170 for arm in arms { 174 for arm in arms {
171 for &pat in &arm.pats { 175 let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
172 let _pat_ty = self.infer_pat(pat, &input_ty, BindingMode::default());
173 }
174 if let Some(guard_expr) = arm.guard { 176 if let Some(guard_expr) = arm.guard {
175 self.infer_expr( 177 self.infer_expr(
176 guard_expr, 178 guard_expr,
diff --git a/crates/ra_hir_ty/src/infer/pat.rs b/crates/ra_hir_ty/src/infer/pat.rs
index e7283f24c..a5dfdf6c4 100644
--- a/crates/ra_hir_ty/src/infer/pat.rs
+++ b/crates/ra_hir_ty/src/infer/pat.rs
@@ -82,6 +82,7 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
82 82
83 let is_non_ref_pat = match &body[pat] { 83 let is_non_ref_pat = match &body[pat] {
84 Pat::Tuple(..) 84 Pat::Tuple(..)
85 | Pat::Or(..)
85 | Pat::TupleStruct { .. } 86 | Pat::TupleStruct { .. }
86 | Pat::Record { .. } 87 | Pat::Record { .. }
87 | Pat::Range { .. } 88 | Pat::Range { .. }
@@ -126,6 +127,17 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
126 127
127 Ty::apply(TypeCtor::Tuple { cardinality: args.len() as u16 }, Substs(inner_tys)) 128 Ty::apply(TypeCtor::Tuple { cardinality: args.len() as u16 }, Substs(inner_tys))
128 } 129 }
130 Pat::Or(ref pats) => {
131 if let Some((first_pat, rest)) = pats.split_first() {
132 let ty = self.infer_pat(*first_pat, expected, default_bm);
133 for pat in rest {
134 self.infer_pat(*pat, expected, default_bm);
135 }
136 ty
137 } else {
138 Ty::Unknown
139 }
140 }
129 Pat::Ref { pat, mutability } => { 141 Pat::Ref { pat, mutability } => {
130 let expectation = match expected.as_reference() { 142 let expectation = match expected.as_reference() {
131 Some((inner_ty, exp_mut)) => { 143 Some((inner_ty, exp_mut)) => {
diff --git a/crates/ra_hir_ty/src/infer/unify.rs b/crates/ra_hir_ty/src/infer/unify.rs
index fe05642ae..1dc842f40 100644
--- a/crates/ra_hir_ty/src/infer/unify.rs
+++ b/crates/ra_hir_ty/src/infer/unify.rs
@@ -249,6 +249,8 @@ impl InferenceTable {
249 match (ty1, ty2) { 249 match (ty1, ty2) {
250 (Ty::Unknown, _) | (_, Ty::Unknown) => true, 250 (Ty::Unknown, _) | (_, Ty::Unknown) => true,
251 251
252 (Ty::Placeholder(p1), Ty::Placeholder(p2)) if *p1 == *p2 => true,
253
252 (Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2))) 254 (Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2)))
253 | (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2))) 255 | (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2)))
254 | (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2))) 256 | (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2)))
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs
index c5fe18c85..571579cc4 100644
--- a/crates/ra_hir_ty/src/lib.rs
+++ b/crates/ra_hir_ty/src/lib.rs
@@ -41,13 +41,12 @@ mod marks;
41 41
42use std::ops::Deref; 42use std::ops::Deref;
43use std::sync::Arc; 43use std::sync::Arc;
44use std::{fmt, iter, mem}; 44use std::{iter, mem};
45 45
46use hir_def::{ 46use hir_def::{
47 expr::ExprId, generics::TypeParamProvenance, type_ref::Mutability, AdtId, AssocContainerId, 47 expr::ExprId, type_ref::Mutability, AdtId, AssocContainerId, DefWithBodyId, GenericDefId,
48 DefWithBodyId, GenericDefId, HasModule, Lookup, TraitId, TypeAliasId, TypeParamId, 48 HasModule, Lookup, TraitId, TypeAliasId, TypeParamId,
49}; 49};
50use hir_expand::name::Name;
51use ra_db::{impl_intern_key, salsa, CrateId}; 50use ra_db::{impl_intern_key, salsa, CrateId};
52 51
53use crate::{ 52use crate::{
@@ -55,7 +54,7 @@ use crate::{
55 primitive::{FloatTy, IntTy, Uncertain}, 54 primitive::{FloatTy, IntTy, Uncertain},
56 utils::{generics, make_mut_slice, Generics}, 55 utils::{generics, make_mut_slice, Generics},
57}; 56};
58use display::{HirDisplay, HirFormatter}; 57use display::HirDisplay;
59 58
60pub use autoderef::autoderef; 59pub use autoderef::autoderef;
61pub use infer::{do_infer_query, InferTy, InferenceResult}; 60pub use infer::{do_infer_query, InferTy, InferenceResult};
@@ -291,7 +290,7 @@ pub enum Ty {
291 /// {}` when we're type-checking the body of that function. In this 290 /// {}` when we're type-checking the body of that function. In this
292 /// situation, we know this stands for *some* type, but don't know the exact 291 /// situation, we know this stands for *some* type, but don't know the exact
293 /// type. 292 /// type.
294 Param(TypeParamId), 293 Placeholder(TypeParamId),
295 294
296 /// A bound type variable. This is used in various places: when representing 295 /// A bound type variable. This is used in various places: when representing
297 /// some polymorphic type like the type of function `fn f<T>`, the type 296 /// some polymorphic type like the type of function `fn f<T>`, the type
@@ -365,7 +364,7 @@ impl Substs {
365 364
366 /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). 365 /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
367 pub(crate) fn type_params_for_generics(generic_params: &Generics) -> Substs { 366 pub(crate) fn type_params_for_generics(generic_params: &Generics) -> Substs {
368 Substs(generic_params.iter().map(|(id, _)| Ty::Param(id)).collect()) 367 Substs(generic_params.iter().map(|(id, _)| Ty::Placeholder(id)).collect())
369 } 368 }
370 369
371 /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). 370 /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
@@ -813,7 +812,7 @@ impl TypeWalk for Ty {
813 p.walk(f); 812 p.walk(f);
814 } 813 }
815 } 814 }
816 Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} 815 Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {}
817 } 816 }
818 f(self); 817 f(self);
819 } 818 }
@@ -831,374 +830,8 @@ impl TypeWalk for Ty {
831 p.walk_mut_binders(f, binders + 1); 830 p.walk_mut_binders(f, binders + 1);
832 } 831 }
833 } 832 }
834 Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} 833 Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {}
835 } 834 }
836 f(self, binders); 835 f(self, binders);
837 } 836 }
838} 837}
839
840const TYPE_HINT_TRUNCATION: &str = "…";
841
842impl HirDisplay for &Ty {
843 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
844 HirDisplay::hir_fmt(*self, f)
845 }
846}
847
848impl HirDisplay for ApplicationTy {
849 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
850 if f.should_truncate() {
851 return write!(f, "{}", TYPE_HINT_TRUNCATION);
852 }
853
854 match self.ctor {
855 TypeCtor::Bool => write!(f, "bool")?,
856 TypeCtor::Char => write!(f, "char")?,
857 TypeCtor::Int(t) => write!(f, "{}", t)?,
858 TypeCtor::Float(t) => write!(f, "{}", t)?,
859 TypeCtor::Str => write!(f, "str")?,
860 TypeCtor::Slice => {
861 let t = self.parameters.as_single();
862 write!(f, "[{}]", t.display(f.db))?;
863 }
864 TypeCtor::Array => {
865 let t = self.parameters.as_single();
866 write!(f, "[{}; _]", t.display(f.db))?;
867 }
868 TypeCtor::RawPtr(m) => {
869 let t = self.parameters.as_single();
870 write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?;
871 }
872 TypeCtor::Ref(m) => {
873 let t = self.parameters.as_single();
874 let ty_display = if f.omit_verbose_types() {
875 t.display_truncated(f.db, f.max_size)
876 } else {
877 t.display(f.db)
878 };
879 write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?;
880 }
881 TypeCtor::Never => write!(f, "!")?,
882 TypeCtor::Tuple { .. } => {
883 let ts = &self.parameters;
884 if ts.len() == 1 {
885 write!(f, "({},)", ts[0].display(f.db))?;
886 } else {
887 write!(f, "(")?;
888 f.write_joined(&*ts.0, ", ")?;
889 write!(f, ")")?;
890 }
891 }
892 TypeCtor::FnPtr { .. } => {
893 let sig = FnSig::from_fn_ptr_substs(&self.parameters);
894 write!(f, "fn(")?;
895 f.write_joined(sig.params(), ", ")?;
896 write!(f, ") -> {}", sig.ret().display(f.db))?;
897 }
898 TypeCtor::FnDef(def) => {
899 let sig = f.db.callable_item_signature(def).subst(&self.parameters);
900 let name = match def {
901 CallableDef::FunctionId(ff) => f.db.function_data(ff).name.clone(),
902 CallableDef::StructId(s) => f.db.struct_data(s).name.clone(),
903 CallableDef::EnumVariantId(e) => {
904 let enum_data = f.db.enum_data(e.parent);
905 enum_data.variants[e.local_id].name.clone()
906 }
907 };
908 match def {
909 CallableDef::FunctionId(_) => write!(f, "fn {}", name)?,
910 CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {
911 write!(f, "{}", name)?
912 }
913 }
914 if self.parameters.len() > 0 {
915 let generics = generics(f.db, def.into());
916 let (parent_params, self_param, type_params, _impl_trait_params) =
917 generics.provenance_split();
918 let total_len = parent_params + self_param + type_params;
919 // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
920 if total_len > 0 {
921 write!(f, "<")?;
922 f.write_joined(&self.parameters.0[..total_len], ", ")?;
923 write!(f, ">")?;
924 }
925 }
926 write!(f, "(")?;
927 f.write_joined(sig.params(), ", ")?;
928 write!(f, ") -> {}", sig.ret().display(f.db))?;
929 }
930 TypeCtor::Adt(def_id) => {
931 let name = match def_id {
932 AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
933 AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
934 AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
935 };
936 write!(f, "{}", name)?;
937 if self.parameters.len() > 0 {
938 write!(f, "<")?;
939
940 let mut non_default_parameters = Vec::with_capacity(self.parameters.len());
941 let parameters_to_write = if f.omit_verbose_types() {
942 match self
943 .ctor
944 .as_generic_def()
945 .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
946 .filter(|defaults| !defaults.is_empty())
947 {
948 Option::None => self.parameters.0.as_ref(),
949 Option::Some(default_parameters) => {
950 for (i, parameter) in self.parameters.iter().enumerate() {
951 match (parameter, default_parameters.get(i)) {
952 (&Ty::Unknown, _) | (_, None) => {
953 non_default_parameters.push(parameter.clone())
954 }
955 (_, Some(default_parameter))
956 if parameter != default_parameter =>
957 {
958 non_default_parameters.push(parameter.clone())
959 }
960 _ => (),
961 }
962 }
963 &non_default_parameters
964 }
965 }
966 } else {
967 self.parameters.0.as_ref()
968 };
969
970 f.write_joined(parameters_to_write, ", ")?;
971 write!(f, ">")?;
972 }
973 }
974 TypeCtor::AssociatedType(type_alias) => {
975 let trait_ = match type_alias.lookup(f.db).container {
976 AssocContainerId::TraitId(it) => it,
977 _ => panic!("not an associated type"),
978 };
979 let trait_name = f.db.trait_data(trait_).name.clone();
980 let name = f.db.type_alias_data(type_alias).name.clone();
981 write!(f, "{}::{}", trait_name, name)?;
982 if self.parameters.len() > 0 {
983 write!(f, "<")?;
984 f.write_joined(&*self.parameters.0, ", ")?;
985 write!(f, ">")?;
986 }
987 }
988 TypeCtor::Closure { .. } => {
989 let sig = self.parameters[0]
990 .callable_sig(f.db)
991 .expect("first closure parameter should contain signature");
992 let return_type_hint = sig.ret().display(f.db);
993 if sig.params().is_empty() {
994 write!(f, "|| -> {}", return_type_hint)?;
995 } else if f.omit_verbose_types() {
996 write!(f, "|{}| -> {}", TYPE_HINT_TRUNCATION, return_type_hint)?;
997 } else {
998 write!(f, "|")?;
999 f.write_joined(sig.params(), ", ")?;
1000 write!(f, "| -> {}", return_type_hint)?;
1001 };
1002 }
1003 }
1004 Ok(())
1005 }
1006}
1007
1008impl HirDisplay for ProjectionTy {
1009 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1010 if f.should_truncate() {
1011 return write!(f, "{}", TYPE_HINT_TRUNCATION);
1012 }
1013
1014 let trait_name = f.db.trait_data(self.trait_(f.db)).name.clone();
1015 write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_name,)?;
1016 if self.parameters.len() > 1 {
1017 write!(f, "<")?;
1018 f.write_joined(&self.parameters[1..], ", ")?;
1019 write!(f, ">")?;
1020 }
1021 write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?;
1022 Ok(())
1023 }
1024}
1025
1026impl HirDisplay for Ty {
1027 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1028 if f.should_truncate() {
1029 return write!(f, "{}", TYPE_HINT_TRUNCATION);
1030 }
1031
1032 match self {
1033 Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
1034 Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
1035 Ty::Param(id) => {
1036 let generics = generics(f.db, id.parent);
1037 let param_data = &generics.params.types[id.local_id];
1038 match param_data.provenance {
1039 TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
1040 write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))?
1041 }
1042 TypeParamProvenance::ArgumentImplTrait => {
1043 write!(f, "impl ")?;
1044 let bounds = f.db.generic_predicates_for_param(*id);
1045 let substs = Substs::type_params_for_generics(&generics);
1046 write_bounds_like_dyn_trait(
1047 &bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(),
1048 f,
1049 )?;
1050 }
1051 }
1052 }
1053 Ty::Bound(idx) => write!(f, "?{}", idx)?,
1054 Ty::Dyn(predicates) | Ty::Opaque(predicates) => {
1055 match self {
1056 Ty::Dyn(_) => write!(f, "dyn ")?,
1057 Ty::Opaque(_) => write!(f, "impl ")?,
1058 _ => unreachable!(),
1059 };
1060 write_bounds_like_dyn_trait(&predicates, f)?;
1061 }
1062 Ty::Unknown => write!(f, "{{unknown}}")?,
1063 Ty::Infer(..) => write!(f, "_")?,
1064 }
1065 Ok(())
1066 }
1067}
1068
1069fn write_bounds_like_dyn_trait(
1070 predicates: &[GenericPredicate],
1071 f: &mut HirFormatter<impl HirDatabase>,
1072) -> fmt::Result {
1073 // Note: This code is written to produce nice results (i.e.
1074 // corresponding to surface Rust) for types that can occur in
1075 // actual Rust. It will have weird results if the predicates
1076 // aren't as expected (i.e. self types = $0, projection
1077 // predicates for a certain trait come after the Implemented
1078 // predicate for that trait).
1079 let mut first = true;
1080 let mut angle_open = false;
1081 for p in predicates.iter() {
1082 match p {
1083 GenericPredicate::Implemented(trait_ref) => {
1084 if angle_open {
1085 write!(f, ">")?;
1086 }
1087 if !first {
1088 write!(f, " + ")?;
1089 }
1090 // We assume that the self type is $0 (i.e. the
1091 // existential) here, which is the only thing that's
1092 // possible in actual Rust, and hence don't print it
1093 write!(f, "{}", f.db.trait_data(trait_ref.trait_).name.clone())?;
1094 if trait_ref.substs.len() > 1 {
1095 write!(f, "<")?;
1096 f.write_joined(&trait_ref.substs[1..], ", ")?;
1097 // there might be assoc type bindings, so we leave the angle brackets open
1098 angle_open = true;
1099 }
1100 }
1101 GenericPredicate::Projection(projection_pred) => {
1102 // in types in actual Rust, these will always come
1103 // after the corresponding Implemented predicate
1104 if angle_open {
1105 write!(f, ", ")?;
1106 } else {
1107 write!(f, "<")?;
1108 angle_open = true;
1109 }
1110 let name =
1111 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name.clone();
1112 write!(f, "{} = ", name)?;
1113 projection_pred.ty.hir_fmt(f)?;
1114 }
1115 GenericPredicate::Error => {
1116 if angle_open {
1117 // impl Trait<X, {error}>
1118 write!(f, ", ")?;
1119 } else if !first {
1120 // impl Trait + {error}
1121 write!(f, " + ")?;
1122 }
1123 p.hir_fmt(f)?;
1124 }
1125 }
1126 first = false;
1127 }
1128 if angle_open {
1129 write!(f, ">")?;
1130 }
1131 Ok(())
1132}
1133
1134impl TraitRef {
1135 fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result {
1136 if f.should_truncate() {
1137 return write!(f, "{}", TYPE_HINT_TRUNCATION);
1138 }
1139
1140 self.substs[0].hir_fmt(f)?;
1141 if use_as {
1142 write!(f, " as ")?;
1143 } else {
1144 write!(f, ": ")?;
1145 }
1146 write!(f, "{}", f.db.trait_data(self.trait_).name.clone())?;
1147 if self.substs.len() > 1 {
1148 write!(f, "<")?;
1149 f.write_joined(&self.substs[1..], ", ")?;
1150 write!(f, ">")?;
1151 }
1152 Ok(())
1153 }
1154}
1155
1156impl HirDisplay for TraitRef {
1157 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1158 self.hir_fmt_ext(f, false)
1159 }
1160}
1161
1162impl HirDisplay for &GenericPredicate {
1163 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1164 HirDisplay::hir_fmt(*self, f)
1165 }
1166}
1167
1168impl HirDisplay for GenericPredicate {
1169 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1170 if f.should_truncate() {
1171 return write!(f, "{}", TYPE_HINT_TRUNCATION);
1172 }
1173
1174 match self {
1175 GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
1176 GenericPredicate::Projection(projection_pred) => {
1177 write!(f, "<")?;
1178 projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?;
1179 write!(
1180 f,
1181 ">::{} = {}",
1182 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name,
1183 projection_pred.ty.display(f.db)
1184 )?;
1185 }
1186 GenericPredicate::Error => write!(f, "{{error}}")?,
1187 }
1188 Ok(())
1189 }
1190}
1191
1192impl HirDisplay for Obligation {
1193 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1194 match self {
1195 Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db)),
1196 Obligation::Projection(proj) => write!(
1197 f,
1198 "Normalize({} => {})",
1199 proj.projection_ty.display(f.db),
1200 proj.ty.display(f.db)
1201 ),
1202 }
1203 }
1204}
diff --git a/crates/ra_hir_ty/src/lower.rs b/crates/ra_hir_ty/src/lower.rs
index c68c5852b..6a2aded02 100644
--- a/crates/ra_hir_ty/src/lower.rs
+++ b/crates/ra_hir_ty/src/lower.rs
@@ -14,9 +14,9 @@ use hir_def::{
14 path::{GenericArg, Path, PathSegment, PathSegments}, 14 path::{GenericArg, Path, PathSegment, PathSegments},
15 resolver::{HasResolver, Resolver, TypeNs}, 15 resolver::{HasResolver, Resolver, TypeNs},
16 type_ref::{TypeBound, TypeRef}, 16 type_ref::{TypeBound, TypeRef},
17 AdtId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, 17 AdtId, AssocContainerId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule,
18 LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, 18 ImplId, LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId,
19 VariantId, 19 UnionId, VariantId,
20}; 20};
21use ra_arena::map::ArenaMap; 21use ra_arena::map::ArenaMap;
22use ra_db::CrateId; 22use ra_db::CrateId;
@@ -152,7 +152,7 @@ impl Ty {
152 data.provenance == TypeParamProvenance::ArgumentImplTrait 152 data.provenance == TypeParamProvenance::ArgumentImplTrait
153 }) 153 })
154 .nth(idx as usize) 154 .nth(idx as usize)
155 .map_or(Ty::Unknown, |(id, _)| Ty::Param(id)); 155 .map_or(Ty::Unknown, |(id, _)| Ty::Placeholder(id));
156 param 156 param
157 } else { 157 } else {
158 Ty::Unknown 158 Ty::Unknown
@@ -270,7 +270,7 @@ impl Ty {
270 let generics = 270 let generics =
271 generics(ctx.db, ctx.resolver.generic_def().expect("generics in scope")); 271 generics(ctx.db, ctx.resolver.generic_def().expect("generics in scope"));
272 match ctx.type_param_mode { 272 match ctx.type_param_mode {
273 TypeParamLoweringMode::Placeholder => Ty::Param(param_id), 273 TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
274 TypeParamLoweringMode::Variable => { 274 TypeParamLoweringMode::Variable => {
275 let idx = generics.param_idx(param_id).expect("matching generics"); 275 let idx = generics.param_idx(param_id).expect("matching generics");
276 Ty::Bound(idx) 276 Ty::Bound(idx)
@@ -339,7 +339,7 @@ impl Ty {
339 None => return Ty::Unknown, // this can't actually happen 339 None => return Ty::Unknown, // this can't actually happen
340 }; 340 };
341 let param_id = match self_ty { 341 let param_id = match self_ty {
342 Ty::Param(id) if ctx.type_param_mode == TypeParamLoweringMode::Placeholder => id, 342 Ty::Placeholder(id) if ctx.type_param_mode == TypeParamLoweringMode::Placeholder => id,
343 Ty::Bound(idx) if ctx.type_param_mode == TypeParamLoweringMode::Variable => { 343 Ty::Bound(idx) if ctx.type_param_mode == TypeParamLoweringMode::Variable => {
344 let generics = generics(ctx.db, def); 344 let generics = generics(ctx.db, def);
345 let param_id = if let Some((id, _)) = generics.iter().nth(idx as usize) { 345 let param_id = if let Some((id, _)) = generics.iter().nth(idx as usize) {
@@ -544,7 +544,7 @@ impl GenericPredicate {
544 let generics = generics(ctx.db, generic_def); 544 let generics = generics(ctx.db, generic_def);
545 let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id }; 545 let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id };
546 match ctx.type_param_mode { 546 match ctx.type_param_mode {
547 TypeParamLoweringMode::Placeholder => Ty::Param(param_id), 547 TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
548 TypeParamLoweringMode::Variable => { 548 TypeParamLoweringMode::Variable => {
549 let idx = generics.param_idx(param_id).expect("matching generics"); 549 let idx = generics.param_idx(param_id).expect("matching generics");
550 Ty::Bound(idx) 550 Ty::Bound(idx)
@@ -672,11 +672,35 @@ impl TraitEnvironment {
672 pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> { 672 pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> {
673 let ctx = TyLoweringContext::new(db, &resolver) 673 let ctx = TyLoweringContext::new(db, &resolver)
674 .with_type_param_mode(TypeParamLoweringMode::Placeholder); 674 .with_type_param_mode(TypeParamLoweringMode::Placeholder);
675 let predicates = resolver 675 let mut predicates = resolver
676 .where_predicates_in_scope() 676 .where_predicates_in_scope()
677 .flat_map(|pred| GenericPredicate::from_where_predicate(&ctx, pred)) 677 .flat_map(|pred| GenericPredicate::from_where_predicate(&ctx, pred))
678 .collect::<Vec<_>>(); 678 .collect::<Vec<_>>();
679 679
680 if let Some(def) = resolver.generic_def() {
681 let container: Option<AssocContainerId> = match def {
682 // FIXME: is there a function for this?
683 GenericDefId::FunctionId(f) => Some(f.lookup(db).container),
684 GenericDefId::AdtId(_) => None,
685 GenericDefId::TraitId(_) => None,
686 GenericDefId::TypeAliasId(t) => Some(t.lookup(db).container),
687 GenericDefId::ImplId(_) => None,
688 GenericDefId::EnumVariantId(_) => None,
689 GenericDefId::ConstId(c) => Some(c.lookup(db).container),
690 };
691 if let Some(AssocContainerId::TraitId(trait_id)) = container {
692 // add `Self: Trait<T1, T2, ...>` to the environment in trait
693 // function default implementations (and hypothetical code
694 // inside consts or type aliases)
695 test_utils::tested_by!(trait_self_implements_self);
696 let substs = Substs::type_params(db, trait_id);
697 let trait_ref = TraitRef { trait_: trait_id, substs };
698 let pred = GenericPredicate::Implemented(trait_ref);
699
700 predicates.push(pred);
701 }
702 }
703
680 Arc::new(TraitEnvironment { predicates }) 704 Arc::new(TraitEnvironment { predicates })
681 } 705 }
682} 706}
diff --git a/crates/ra_hir_ty/src/marks.rs b/crates/ra_hir_ty/src/marks.rs
index 0f754eb9c..de5cb1d6b 100644
--- a/crates/ra_hir_ty/src/marks.rs
+++ b/crates/ra_hir_ty/src/marks.rs
@@ -4,6 +4,8 @@ test_utils::marks!(
4 type_var_cycles_resolve_completely 4 type_var_cycles_resolve_completely
5 type_var_cycles_resolve_as_possible 5 type_var_cycles_resolve_as_possible
6 type_var_resolves_to_int_var 6 type_var_resolves_to_int_var
7 impl_self_type_match_without_receiver
7 match_ergonomics_ref 8 match_ergonomics_ref
8 coerce_merge_fail_fallback 9 coerce_merge_fail_fallback
10 trait_self_implements_self
9); 11);
diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs
index 5283bff28..4f8c52433 100644
--- a/crates/ra_hir_ty/src/method_resolution.rs
+++ b/crates/ra_hir_ty/src/method_resolution.rs
@@ -425,6 +425,15 @@ fn iterate_inherent_methods<T>(
425 if !is_valid_candidate(db, name, receiver_ty, item, self_ty) { 425 if !is_valid_candidate(db, name, receiver_ty, item, self_ty) {
426 continue; 426 continue;
427 } 427 }
428 // we have to check whether the self type unifies with the type
429 // that the impl is for. If we have a receiver type, this
430 // already happens in `is_valid_candidate` above; if not, we
431 // check it here
432 if receiver_ty.is_none() && inherent_impl_substs(db, impl_block, self_ty).is_none()
433 {
434 test_utils::tested_by!(impl_self_type_match_without_receiver);
435 continue;
436 }
428 if let Some(result) = callback(&self_ty.value, item) { 437 if let Some(result) = callback(&self_ty.value, item) {
429 return Some(result); 438 return Some(result);
430 } 439 }
diff --git a/crates/ra_hir_ty/src/tests/coercion.rs b/crates/ra_hir_ty/src/tests/coercion.rs
index fc5ef36a5..42330b269 100644
--- a/crates/ra_hir_ty/src/tests/coercion.rs
+++ b/crates/ra_hir_ty/src/tests/coercion.rs
@@ -526,3 +526,25 @@ fn test() {
526 "### 526 "###
527 ); 527 );
528} 528}
529
530#[test]
531fn coerce_placeholder_ref() {
532 // placeholders should unify, even behind references
533 assert_snapshot!(
534 infer_with_mismatches(r#"
535struct S<T> { t: T }
536impl<TT> S<TT> {
537 fn get(&self) -> &TT {
538 &self.t
539 }
540}
541"#, true),
542 @r###"
543 [51; 55) 'self': &S<TT>
544 [64; 87) '{ ... }': &TT
545 [74; 81) '&self.t': &TT
546 [75; 79) 'self': &S<TT>
547 [75; 81) 'self.t': TT
548 "###
549 );
550}
diff --git a/crates/ra_hir_ty/src/tests/method_resolution.rs b/crates/ra_hir_ty/src/tests/method_resolution.rs
index 1722563aa..1f767d324 100644
--- a/crates/ra_hir_ty/src/tests/method_resolution.rs
+++ b/crates/ra_hir_ty/src/tests/method_resolution.rs
@@ -964,6 +964,38 @@ fn test() { S2.into()<|>; }
964} 964}
965 965
966#[test] 966#[test]
967fn method_resolution_overloaded_method() {
968 test_utils::covers!(impl_self_type_match_without_receiver);
969 let t = type_at(
970 r#"
971//- main.rs
972struct Wrapper<T>(T);
973struct Foo<T>(T);
974struct Bar<T>(T);
975
976impl<T> Wrapper<Foo<T>> {
977 pub fn new(foo_: T) -> Self {
978 Wrapper(Foo(foo_))
979 }
980}
981
982impl<T> Wrapper<Bar<T>> {
983 pub fn new(bar_: T) -> Self {
984 Wrapper(Bar(bar_))
985 }
986}
987
988fn main() {
989 let a = Wrapper::<Foo<f32>>::new(1.0);
990 let b = Wrapper::<Bar<f32>>::new(1.0);
991 (a, b)<|>;
992}
993"#,
994 );
995 assert_eq!(t, "(Wrapper<Foo<f32>>, Wrapper<Bar<f32>>)")
996}
997
998#[test]
967fn method_resolution_encountering_fn_type() { 999fn method_resolution_encountering_fn_type() {
968 type_at( 1000 type_at(
969 r#" 1001 r#"
diff --git a/crates/ra_hir_ty/src/tests/never_type.rs b/crates/ra_hir_ty/src/tests/never_type.rs
index c202f545a..a77209480 100644
--- a/crates/ra_hir_ty/src/tests/never_type.rs
+++ b/crates/ra_hir_ty/src/tests/never_type.rs
@@ -101,6 +101,7 @@ fn test() {
101 ); 101 );
102 assert_eq!(t, "Option<i32>"); 102 assert_eq!(t, "Option<i32>");
103} 103}
104
104#[test] 105#[test]
105fn never_type_can_be_reinferred3() { 106fn never_type_can_be_reinferred3() {
106 let t = type_at( 107 let t = type_at(
@@ -138,6 +139,22 @@ fn test(a: Void) {
138} 139}
139 140
140#[test] 141#[test]
142fn match_unknown_arm() {
143 let t = type_at(
144 r#"
145//- /main.rs
146fn test(a: Option) {
147 let t = match 0 {
148 _ => unknown,
149 };
150 t<|>;
151}
152"#,
153 );
154 assert_eq!(t, "{unknown}");
155}
156
157#[test]
141fn if_never() { 158fn if_never() {
142 let t = type_at( 159 let t = type_at(
143 r#" 160 r#"
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs
index 17611ddbf..aa2018944 100644
--- a/crates/ra_hir_ty/src/tests/traits.rs
+++ b/crates/ra_hir_ty/src/tests/traits.rs
@@ -300,6 +300,54 @@ fn test() {
300} 300}
301 301
302#[test] 302#[test]
303fn trait_default_method_self_bound_implements_trait() {
304 test_utils::covers!(trait_self_implements_self);
305 assert_snapshot!(
306 infer(r#"
307trait Trait {
308 fn foo(&self) -> i64;
309 fn bar(&self) -> {
310 let x = self.foo();
311 }
312}
313"#),
314 @r###"
315 [27; 31) 'self': &Self
316 [53; 57) 'self': &Self
317 [62; 97) '{ ... }': ()
318 [76; 77) 'x': i64
319 [80; 84) 'self': &Self
320 [80; 90) 'self.foo()': i64
321 "###
322 );
323}
324
325#[test]
326fn trait_default_method_self_bound_implements_super_trait() {
327 test_utils::covers!(trait_self_implements_self);
328 assert_snapshot!(
329 infer(r#"
330trait SuperTrait {
331 fn foo(&self) -> i64;
332}
333trait Trait: SuperTrait {
334 fn bar(&self) -> {
335 let x = self.foo();
336 }
337}
338"#),
339 @r###"
340 [32; 36) 'self': &Self
341 [86; 90) 'self': &Self
342 [95; 130) '{ ... }': ()
343 [109; 110) 'x': i64
344 [113; 117) 'self': &Self
345 [113; 123) 'self.foo()': i64
346 "###
347 );
348}
349
350#[test]
303fn infer_project_associated_type() { 351fn infer_project_associated_type() {
304 // y, z, a don't yet work because of https://github.com/rust-lang/chalk/issues/234 352 // y, z, a don't yet work because of https://github.com/rust-lang/chalk/issues/234
305 assert_snapshot!( 353 assert_snapshot!(
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs
index 88af61e87..ff8e75b48 100644
--- a/crates/ra_hir_ty/src/traits.rs
+++ b/crates/ra_hir_ty/src/traits.rs
@@ -60,6 +60,9 @@ impl TraitSolver {
60 context.0.db.check_canceled(); 60 context.0.db.check_canceled();
61 let remaining = fuel.get(); 61 let remaining = fuel.get();
62 fuel.set(remaining - 1); 62 fuel.set(remaining - 1);
63 if remaining == 0 {
64 log::debug!("fuel exhausted");
65 }
63 remaining > 0 66 remaining > 0
64 }) 67 })
65 } 68 }
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs
index 4974c565b..882160fa8 100644
--- a/crates/ra_hir_ty/src/traits/chalk.rs
+++ b/crates/ra_hir_ty/src/traits/chalk.rs
@@ -142,7 +142,7 @@ impl ToChalk for Ty {
142 let substitution = proj_ty.parameters.to_chalk(db); 142 let substitution = proj_ty.parameters.to_chalk(db);
143 chalk_ir::AliasTy { associated_ty_id, substitution }.cast().intern() 143 chalk_ir::AliasTy { associated_ty_id, substitution }.cast().intern()
144 } 144 }
145 Ty::Param(id) => { 145 Ty::Placeholder(id) => {
146 let interned_id = db.intern_type_param_id(id); 146 let interned_id = db.intern_type_param_id(id);
147 PlaceholderIndex { 147 PlaceholderIndex {
148 ui: UniverseIndex::ROOT, 148 ui: UniverseIndex::ROOT,
@@ -184,7 +184,7 @@ impl ToChalk for Ty {
184 let interned_id = crate::db::GlobalTypeParamId::from_intern_id( 184 let interned_id = crate::db::GlobalTypeParamId::from_intern_id(
185 crate::salsa::InternId::from(idx.idx), 185 crate::salsa::InternId::from(idx.idx),
186 ); 186 );
187 Ty::Param(db.lookup_intern_type_param_id(interned_id)) 187 Ty::Placeholder(db.lookup_intern_type_param_id(interned_id))
188 } 188 }
189 chalk_ir::TyData::Alias(proj) => { 189 chalk_ir::TyData::Alias(proj) => {
190 let associated_ty = from_chalk(db, proj.associated_ty_id); 190 let associated_ty = from_chalk(db, proj.associated_ty_id);
diff --git a/crates/ra_ide/src/goto_type_definition.rs b/crates/ra_ide/src/goto_type_definition.rs
index 11ad6d137..69940fc36 100644
--- a/crates/ra_ide/src/goto_type_definition.rs
+++ b/crates/ra_ide/src/goto_type_definition.rs
@@ -16,24 +16,16 @@ pub(crate) fn goto_type_definition(
16 let token = pick_best(file.token_at_offset(position.offset))?; 16 let token = pick_best(file.token_at_offset(position.offset))?;
17 let token = descend_into_macros(db, position.file_id, token); 17 let token = descend_into_macros(db, position.file_id, token);
18 18
19 let node = token.value.ancestors().find_map(|token| { 19 let node = token
20 token 20 .value
21 .ancestors() 21 .ancestors()
22 .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some()) 22 .find(|n| ast::Expr::cast(n.clone()).is_some() || ast::Pat::cast(n.clone()).is_some())?;
23 })?;
24 23
25 let analyzer = hir::SourceAnalyzer::new(db, token.with_value(&node), None); 24 let analyzer = hir::SourceAnalyzer::new(db, token.with_value(&node), None);
26 25
27 let ty: hir::Type = if let Some(ty) = 26 let ty: hir::Type = ast::Expr::cast(node.clone())
28 ast::Expr::cast(node.clone()).and_then(|e| analyzer.type_of(db, &e)) 27 .and_then(|e| analyzer.type_of(db, &e))
29 { 28 .or_else(|| ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p)))?;
30 ty
31 } else if let Some(ty) = ast::Pat::cast(node.clone()).and_then(|p| analyzer.type_of_pat(db, &p))
32 {
33 ty
34 } else {
35 return None;
36 };
37 29
38 let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt())?; 30 let adt_def = ty.autoderef(db).find_map(|ty| ty.as_adt())?;
39 31
diff --git a/crates/ra_ide/src/inlay_hints.rs b/crates/ra_ide/src/inlay_hints.rs
index 6b0d3d996..2ae97e65f 100644
--- a/crates/ra_ide/src/inlay_hints.rs
+++ b/crates/ra_ide/src/inlay_hints.rs
@@ -80,8 +80,7 @@ fn get_inlay_hints(
80 }, 80 },
81 ast::MatchArmList(it) => { 81 ast::MatchArmList(it) => {
82 it.arms() 82 it.arms()
83 .map(|match_arm| match_arm.pats()) 83 .filter_map(|match_arm| match_arm.pat())
84 .flatten()
85 .for_each(|root_pat| get_pat_type_hints(acc, db, &analyzer, root_pat, true, max_inlay_hint_length)); 84 .for_each(|root_pat| get_pat_type_hints(acc, db, &analyzer, root_pat, true, max_inlay_hint_length));
86 }, 85 },
87 ast::CallExpr(it) => { 86 ast::CallExpr(it) => {
@@ -202,6 +201,7 @@ fn get_leaf_pats(root_pat: ast::Pat) -> Vec<ast::Pat> {
202 Some(pat) => pats_to_process.push_back(pat), 201 Some(pat) => pats_to_process.push_back(pat),
203 _ => leaf_pats.push(maybe_leaf_pat), 202 _ => leaf_pats.push(maybe_leaf_pat),
204 }, 203 },
204 ast::Pat::OrPat(ref_pat) => pats_to_process.extend(ref_pat.pats()),
205 ast::Pat::TuplePat(tuple_pat) => pats_to_process.extend(tuple_pat.args()), 205 ast::Pat::TuplePat(tuple_pat) => pats_to_process.extend(tuple_pat.args()),
206 ast::Pat::RecordPat(record_pat) => { 206 ast::Pat::RecordPat(record_pat) => {
207 if let Some(pat_list) = record_pat.record_field_pat_list() { 207 if let Some(pat_list) = record_pat.record_field_pat_list() {
@@ -222,6 +222,7 @@ fn get_leaf_pats(root_pat: ast::Pat) -> Vec<ast::Pat> {
222 ast::Pat::TupleStructPat(tuple_struct_pat) => { 222 ast::Pat::TupleStructPat(tuple_struct_pat) => {
223 pats_to_process.extend(tuple_struct_pat.args()) 223 pats_to_process.extend(tuple_struct_pat.args())
224 } 224 }
225 ast::Pat::ParenPat(inner_pat) => pats_to_process.extend(inner_pat.pat()),
225 ast::Pat::RefPat(ref_pat) => pats_to_process.extend(ref_pat.pat()), 226 ast::Pat::RefPat(ref_pat) => pats_to_process.extend(ref_pat.pat()),
226 _ => (), 227 _ => (),
227 } 228 }
diff --git a/crates/ra_ide/src/join_lines.rs b/crates/ra_ide/src/join_lines.rs
index 7deeb3494..01fb32b3d 100644
--- a/crates/ra_ide/src/join_lines.rs
+++ b/crates/ra_ide/src/join_lines.rs
@@ -60,36 +60,15 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextU
60 return; 60 return;
61 } 61 }
62 62
63 // Special case that turns something like:
64 //
65 // ```
66 // my_function({<|>
67 // <some-expr>
68 // })
69 // ```
70 //
71 // into `my_function(<some-expr>)`
72 if join_single_expr_block(edit, token).is_some() {
73 return;
74 }
75 // ditto for
76 //
77 // ```
78 // use foo::{<|>
79 // bar
80 // };
81 // ```
82 if join_single_use_tree(edit, token).is_some() {
83 return;
84 }
85
86 // The node is between two other nodes 63 // The node is between two other nodes
87 let prev = token.prev_sibling_or_token().unwrap(); 64 let prev = token.prev_sibling_or_token().unwrap();
88 let next = token.next_sibling_or_token().unwrap(); 65 let next = token.next_sibling_or_token().unwrap();
89 if is_trailing_comma(prev.kind(), next.kind()) { 66 if is_trailing_comma(prev.kind(), next.kind()) {
90 // Removes: trailing comma, newline (incl. surrounding whitespace) 67 // Removes: trailing comma, newline (incl. surrounding whitespace)
91 edit.delete(TextRange::from_to(prev.text_range().start(), token.text_range().end())); 68 edit.delete(TextRange::from_to(prev.text_range().start(), token.text_range().end()));
92 } else if prev.kind() == T![,] && next.kind() == T!['}'] { 69 return;
70 }
71 if prev.kind() == T![,] && next.kind() == T!['}'] {
93 // Removes: comma, newline (incl. surrounding whitespace) 72 // Removes: comma, newline (incl. surrounding whitespace)
94 let space = if let Some(left) = prev.prev_sibling_or_token() { 73 let space = if let Some(left) = prev.prev_sibling_or_token() {
95 compute_ws(left.kind(), next.kind()) 74 compute_ws(left.kind(), next.kind())
@@ -100,7 +79,10 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextU
100 TextRange::from_to(prev.text_range().start(), token.text_range().end()), 79 TextRange::from_to(prev.text_range().start(), token.text_range().end()),
101 space.to_string(), 80 space.to_string(),
102 ); 81 );
103 } else if let (Some(_), Some(next)) = ( 82 return;
83 }
84
85 if let (Some(_), Some(next)) = (
104 prev.as_token().cloned().and_then(ast::Comment::cast), 86 prev.as_token().cloned().and_then(ast::Comment::cast),
105 next.as_token().cloned().and_then(ast::Comment::cast), 87 next.as_token().cloned().and_then(ast::Comment::cast),
106 ) { 88 ) {
@@ -109,10 +91,34 @@ fn remove_newline(edit: &mut TextEditBuilder, token: &SyntaxToken, offset: TextU
109 token.text_range().start(), 91 token.text_range().start(),
110 next.syntax().text_range().start() + TextUnit::of_str(next.prefix()), 92 next.syntax().text_range().start() + TextUnit::of_str(next.prefix()),
111 )); 93 ));
112 } else { 94 return;
113 // Remove newline but add a computed amount of whitespace characters 95 }
114 edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_string()); 96
97 // Special case that turns something like:
98 //
99 // ```
100 // my_function({<|>
101 // <some-expr>
102 // })
103 // ```
104 //
105 // into `my_function(<some-expr>)`
106 if join_single_expr_block(edit, token).is_some() {
107 return;
115 } 108 }
109 // ditto for
110 //
111 // ```
112 // use foo::{<|>
113 // bar
114 // };
115 // ```
116 if join_single_use_tree(edit, token).is_some() {
117 return;
118 }
119
120 // Remove newline but add a computed amount of whitespace characters
121 edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_string());
116} 122}
117 123
118fn has_comma_after(node: &SyntaxNode) -> bool { 124fn has_comma_after(node: &SyntaxNode) -> bool {
@@ -608,4 +614,27 @@ pub fn handle_find_matching_brace() {
608}", 614}",
609 ); 615 );
610 } 616 }
617
618 #[test]
619 fn test_join_lines_commented_block() {
620 check_join_lines(
621 r"
622fn main() {
623 let _ = {
624 // <|>foo
625 // bar
626 92
627 };
628}
629 ",
630 r"
631fn main() {
632 let _ = {
633 // <|>foo bar
634 92
635 };
636}
637 ",
638 )
639 }
611} 640}
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs
index 689921f3f..9d66c365b 100644
--- a/crates/ra_ide/src/lib.rs
+++ b/crates/ra_ide/src/lib.rs
@@ -71,7 +71,7 @@ pub use crate::{
71 references::{ 71 references::{
72 Declaration, Reference, ReferenceAccess, ReferenceKind, ReferenceSearchResult, SearchScope, 72 Declaration, Reference, ReferenceAccess, ReferenceKind, ReferenceSearchResult, SearchScope,
73 }, 73 },
74 runnables::{Runnable, RunnableKind}, 74 runnables::{Runnable, RunnableKind, TestId},
75 source_change::{FileSystemEdit, SourceChange, SourceFileEdit}, 75 source_change::{FileSystemEdit, SourceChange, SourceFileEdit},
76 syntax_highlighting::HighlightedRange, 76 syntax_highlighting::HighlightedRange,
77}; 77};
diff --git a/crates/ra_ide/src/references.rs b/crates/ra_ide/src/references.rs
index a6320bd2f..de924fad2 100644
--- a/crates/ra_ide/src/references.rs
+++ b/crates/ra_ide/src/references.rs
@@ -13,6 +13,7 @@ mod classify;
13mod rename; 13mod rename;
14mod search_scope; 14mod search_scope;
15 15
16use crate::expand::descend_into_macros_with_analyzer;
16use hir::{InFile, SourceBinder}; 17use hir::{InFile, SourceBinder};
17use once_cell::unsync::Lazy; 18use once_cell::unsync::Lazy;
18use ra_db::{SourceDatabase, SourceDatabaseExt}; 19use ra_db::{SourceDatabase, SourceDatabaseExt};
@@ -192,39 +193,62 @@ fn process_definition(
192 193
193 let parse = Lazy::new(|| SourceFile::parse(&text)); 194 let parse = Lazy::new(|| SourceFile::parse(&text));
194 let mut sb = Lazy::new(|| SourceBinder::new(db)); 195 let mut sb = Lazy::new(|| SourceBinder::new(db));
196 let mut analyzer = None;
195 197
196 for (idx, _) in text.match_indices(pat) { 198 for (idx, _) in text.match_indices(pat) {
197 let offset = TextUnit::from_usize(idx); 199 let offset = TextUnit::from_usize(idx);
198 200
199 if let Some(name_ref) = 201 let (name_ref, range) = if let Some(name_ref) =
200 find_node_at_offset::<ast::NameRef>(parse.tree().syntax(), offset) 202 find_node_at_offset::<ast::NameRef>(parse.tree().syntax(), offset)
201 { 203 {
202 let range = name_ref.syntax().text_range(); 204 let range = name_ref.syntax().text_range();
203 if let Some(search_range) = search_range { 205 (InFile::new(file_id.into(), name_ref), range)
204 if !range.is_subrange(&search_range) { 206 } else {
205 continue; 207 // Handle macro token cases
206 } 208 let t = match parse.tree().syntax().token_at_offset(offset) {
209 TokenAtOffset::None => continue,
210 TokenAtOffset::Single(t) => t,
211 TokenAtOffset::Between(_, t) => t,
212 };
213 let range = t.text_range();
214 let analyzer = analyzer.get_or_insert_with(|| {
215 sb.analyze(InFile::new(file_id.into(), parse.tree().syntax()), None)
216 });
217 let expanded = descend_into_macros_with_analyzer(
218 db,
219 &analyzer,
220 InFile::new(file_id.into(), t),
221 );
222 if let Some(token) = ast::NameRef::cast(expanded.value.parent()) {
223 (expanded.with_value(token), range)
224 } else {
225 continue;
207 } 226 }
208 // FIXME: reuse sb 227 };
209 // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098 228
210 229 if let Some(search_range) = search_range {
211 if let Some(d) = classify_name_ref(&mut sb, InFile::new(file_id.into(), &name_ref)) 230 if !range.is_subrange(&search_range) {
212 { 231 continue;
213 if d == def { 232 }
214 let kind = if is_record_lit_name_ref(&name_ref) 233 }
215 || is_call_expr_name_ref(&name_ref) 234 // FIXME: reuse sb
216 { 235 // See https://github.com/rust-lang/rust/pull/68198#issuecomment-574269098
217 ReferenceKind::StructLiteral 236
218 } else { 237 if let Some(d) = classify_name_ref(&mut sb, name_ref.as_ref()) {
219 ReferenceKind::Other 238 if d == def {
220 }; 239 let kind = if is_record_lit_name_ref(&name_ref.value)
221 240 || is_call_expr_name_ref(&name_ref.value)
222 refs.push(Reference { 241 {
223 file_range: FileRange { file_id, range }, 242 ReferenceKind::StructLiteral
224 kind, 243 } else {
225 access: reference_access(&d.kind, &name_ref), 244 ReferenceKind::Other
226 }); 245 };
227 } 246
247 refs.push(Reference {
248 file_range: FileRange { file_id, range },
249 kind,
250 access: reference_access(&d.kind, &name_ref.value),
251 });
228 } 252 }
229 } 253 }
230 } 254 }
diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs
index 08e77c01f..c46b78cb6 100644
--- a/crates/ra_ide/src/references/rename.rs
+++ b/crates/ra_ide/src/references/rename.rs
@@ -211,6 +211,25 @@ mod tests {
211 } 211 }
212 212
213 #[test] 213 #[test]
214 fn test_rename_for_macro_args() {
215 test_rename(
216 r#"
217 macro_rules! foo {($i:ident) => {$i} }
218 fn main() {
219 let a<|> = "test";
220 foo!(a);
221 }"#,
222 "b",
223 r#"
224 macro_rules! foo {($i:ident) => {$i} }
225 fn main() {
226 let b = "test";
227 foo!(b);
228 }"#,
229 );
230 }
231
232 #[test]
214 fn test_rename_for_param_inside() { 233 fn test_rename_for_param_inside() {
215 test_rename( 234 test_rename(
216 r#" 235 r#"
diff --git a/crates/ra_ide/src/runnables.rs b/crates/ra_ide/src/runnables.rs
index b6b0c70f9..be2a67d0a 100644
--- a/crates/ra_ide/src/runnables.rs
+++ b/crates/ra_ide/src/runnables.rs
@@ -1,6 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::InFile; 3use hir::{InFile, SourceBinder};
4use itertools::Itertools; 4use itertools::Itertools;
5use ra_db::SourceDatabase; 5use ra_db::SourceDatabase;
6use ra_ide_db::RootDatabase; 6use ra_ide_db::RootDatabase;
@@ -10,6 +10,7 @@ use ra_syntax::{
10}; 10};
11 11
12use crate::FileId; 12use crate::FileId;
13use std::fmt::Display;
13 14
14#[derive(Debug)] 15#[derive(Debug)]
15pub struct Runnable { 16pub struct Runnable {
@@ -18,38 +19,84 @@ pub struct Runnable {
18} 19}
19 20
20#[derive(Debug)] 21#[derive(Debug)]
22pub enum TestId {
23 Name(String),
24 Path(String),
25}
26
27impl Display for TestId {
28 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
29 match self {
30 TestId::Name(name) => write!(f, "{}", name),
31 TestId::Path(path) => write!(f, "{}", path),
32 }
33 }
34}
35
36#[derive(Debug)]
21pub enum RunnableKind { 37pub enum RunnableKind {
22 Test { name: String }, 38 Test { test_id: TestId },
23 TestMod { path: String }, 39 TestMod { path: String },
24 Bench { name: String }, 40 Bench { test_id: TestId },
25 Bin, 41 Bin,
26} 42}
27 43
28pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { 44pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
29 let parse = db.parse(file_id); 45 let parse = db.parse(file_id);
30 parse.tree().syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect() 46 let mut sb = SourceBinder::new(db);
47 parse.tree().syntax().descendants().filter_map(|i| runnable(db, &mut sb, file_id, i)).collect()
31} 48}
32 49
33fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNode) -> Option<Runnable> { 50fn runnable(
51 db: &RootDatabase,
52 source_binder: &mut SourceBinder<RootDatabase>,
53 file_id: FileId,
54 item: SyntaxNode,
55) -> Option<Runnable> {
34 match_ast! { 56 match_ast! {
35 match item { 57 match item {
36 ast::FnDef(it) => { runnable_fn(it) }, 58 ast::FnDef(it) => { runnable_fn(db, source_binder, file_id, it) },
37 ast::Module(it) => { runnable_mod(db, file_id, it) }, 59 ast::Module(it) => { runnable_mod(db, source_binder, file_id, it) },
38 _ => { None }, 60 _ => { None },
39 } 61 }
40 } 62 }
41} 63}
42 64
43fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> { 65fn runnable_fn(
44 let name = fn_def.name()?.text().clone(); 66 db: &RootDatabase,
45 let kind = if name == "main" { 67 source_binder: &mut SourceBinder<RootDatabase>,
68 file_id: FileId,
69 fn_def: ast::FnDef,
70) -> Option<Runnable> {
71 let name_string = fn_def.name()?.text().to_string();
72
73 let kind = if name_string == "main" {
46 RunnableKind::Bin 74 RunnableKind::Bin
47 } else if has_test_related_attribute(&fn_def) {
48 RunnableKind::Test { name: name.to_string() }
49 } else if fn_def.has_atom_attr("bench") {
50 RunnableKind::Bench { name: name.to_string() }
51 } else { 75 } else {
52 return None; 76 let test_id = if let Some(module) = source_binder
77 .to_def(InFile::new(file_id.into(), fn_def.clone()))
78 .map(|def| def.module(db))
79 {
80 let path = module
81 .path_to_root(db)
82 .into_iter()
83 .rev()
84 .filter_map(|it| it.name(db))
85 .map(|name| name.to_string())
86 .chain(std::iter::once(name_string))
87 .join("::");
88 TestId::Path(path)
89 } else {
90 TestId::Name(name_string)
91 };
92
93 if has_test_related_attribute(&fn_def) {
94 RunnableKind::Test { test_id }
95 } else if fn_def.has_atom_attr("bench") {
96 RunnableKind::Bench { test_id }
97 } else {
98 return None;
99 }
53 }; 100 };
54 Some(Runnable { range: fn_def.syntax().text_range(), kind }) 101 Some(Runnable { range: fn_def.syntax().text_range(), kind })
55} 102}
@@ -68,7 +115,12 @@ fn has_test_related_attribute(fn_def: &ast::FnDef) -> bool {
68 .any(|attribute_text| attribute_text.contains("test")) 115 .any(|attribute_text| attribute_text.contains("test"))
69} 116}
70 117
71fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> { 118fn runnable_mod(
119 db: &RootDatabase,
120 source_binder: &mut SourceBinder<RootDatabase>,
121 file_id: FileId,
122 module: ast::Module,
123) -> Option<Runnable> {
72 let has_test_function = module 124 let has_test_function = module
73 .item_list()? 125 .item_list()?
74 .items() 126 .items()
@@ -76,13 +128,12 @@ fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Opti
76 ast::ModuleItem::FnDef(it) => Some(it), 128 ast::ModuleItem::FnDef(it) => Some(it),
77 _ => None, 129 _ => None,
78 }) 130 })
79 .any(|f| f.has_atom_attr("test")); 131 .any(|f| has_test_related_attribute(&f));
80 if !has_test_function { 132 if !has_test_function {
81 return None; 133 return None;
82 } 134 }
83 let range = module.syntax().text_range(); 135 let range = module.syntax().text_range();
84 let mut sb = hir::SourceBinder::new(db); 136 let module = source_binder.to_def(InFile::new(file_id.into(), module))?;
85 let module = sb.to_def(InFile::new(file_id.into(), module))?;
86 137
87 let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::"); 138 let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::");
88 Some(Runnable { range, kind: RunnableKind::TestMod { path } }) 139 Some(Runnable { range, kind: RunnableKind::TestMod { path } })
@@ -121,13 +172,17 @@ mod tests {
121 Runnable { 172 Runnable {
122 range: [22; 46), 173 range: [22; 46),
123 kind: Test { 174 kind: Test {
124 name: "test_foo", 175 test_id: Path(
176 "test_foo",
177 ),
125 }, 178 },
126 }, 179 },
127 Runnable { 180 Runnable {
128 range: [47; 81), 181 range: [47; 81),
129 kind: Test { 182 kind: Test {
130 name: "test_foo", 183 test_id: Path(
184 "test_foo",
185 ),
131 }, 186 },
132 }, 187 },
133 ] 188 ]
@@ -160,7 +215,9 @@ mod tests {
160 Runnable { 215 Runnable {
161 range: [28; 57), 216 range: [28; 57),
162 kind: Test { 217 kind: Test {
163 name: "test_foo1", 218 test_id: Path(
219 "test_mod::test_foo1",
220 ),
164 }, 221 },
165 }, 222 },
166 ] 223 ]
@@ -195,7 +252,9 @@ mod tests {
195 Runnable { 252 Runnable {
196 range: [46; 79), 253 range: [46; 79),
197 kind: Test { 254 kind: Test {
198 name: "test_foo1", 255 test_id: Path(
256 "foo::test_mod::test_foo1",
257 ),
199 }, 258 },
200 }, 259 },
201 ] 260 ]
@@ -232,7 +291,9 @@ mod tests {
232 Runnable { 291 Runnable {
233 range: [68; 105), 292 range: [68; 105),
234 kind: Test { 293 kind: Test {
235 name: "test_foo1", 294 test_id: Path(
295 "foo::bar::test_mod::test_foo1",
296 ),
236 }, 297 },
237 }, 298 },
238 ] 299 ]
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html
index 1cc55e78b..a02dbaf2f 100644
--- a/crates/ra_ide/src/snapshots/highlighting.html
+++ b/crates/ra_ide/src/snapshots/highlighting.html
@@ -16,6 +16,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
16.literal { color: #BFEBBF; } 16.literal { color: #BFEBBF; }
17.literal\.numeric { color: #6A8759; } 17.literal\.numeric { color: #6A8759; }
18.macro { color: #94BFF3; } 18.macro { color: #94BFF3; }
19.module { color: #AFD8AF; }
19.variable { color: #DCDCCC; } 20.variable { color: #DCDCCC; }
20.variable\.mut { color: #DCDCCC; text-decoration: underline; } 21.variable\.mut { color: #DCDCCC; text-decoration: underline; }
21 22
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
index 918fd4b97..95f038f00 100644
--- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html
+++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
@@ -16,6 +16,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
16.literal { color: #BFEBBF; } 16.literal { color: #BFEBBF; }
17.literal\.numeric { color: #6A8759; } 17.literal\.numeric { color: #6A8759; }
18.macro { color: #94BFF3; } 18.macro { color: #94BFF3; }
19.module { color: #AFD8AF; }
19.variable { color: #DCDCCC; } 20.variable { color: #DCDCCC; }
20.variable\.mut { color: #DCDCCC; text-decoration: underline; } 21.variable\.mut { color: #DCDCCC; text-decoration: underline; }
21 22
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs
index 174e13595..20c414ca1 100644
--- a/crates/ra_ide/src/syntax_highlighting.rs
+++ b/crates/ra_ide/src/syntax_highlighting.rs
@@ -365,6 +365,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
365.literal { color: #BFEBBF; } 365.literal { color: #BFEBBF; }
366.literal\\.numeric { color: #6A8759; } 366.literal\\.numeric { color: #6A8759; }
367.macro { color: #94BFF3; } 367.macro { color: #94BFF3; }
368.module { color: #AFD8AF; }
368.variable { color: #DCDCCC; } 369.variable { color: #DCDCCC; }
369.variable\\.mut { color: #DCDCCC; text-decoration: underline; } 370.variable\\.mut { color: #DCDCCC; text-decoration: underline; }
370 371
diff --git a/crates/ra_ide_db/Cargo.toml b/crates/ra_ide_db/Cargo.toml
index 716e88bc1..495fffb5a 100644
--- a/crates/ra_ide_db/Cargo.toml
+++ b/crates/ra_ide_db/Cargo.toml
@@ -22,7 +22,6 @@ fst = { version = "0.3.1", default-features = false }
22rustc-hash = "1.0" 22rustc-hash = "1.0"
23unicase = "2.2.0" 23unicase = "2.2.0"
24superslice = "1.0.0" 24superslice = "1.0.0"
25rand = { version = "0.7.0", features = ["small_rng"] }
26once_cell = "1.2.0" 25once_cell = "1.2.0"
27 26
28ra_syntax = { path = "../ra_syntax" } 27ra_syntax = { path = "../ra_syntax" }
diff --git a/crates/ra_lsp_server/src/cargo_target_spec.rs b/crates/ra_lsp_server/src/cargo_target_spec.rs
index 594caffe2..5fd1e7b6b 100644
--- a/crates/ra_lsp_server/src/cargo_target_spec.rs
+++ b/crates/ra_lsp_server/src/cargo_target_spec.rs
@@ -1,6 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use ra_ide::{FileId, RunnableKind}; 3use ra_ide::{FileId, RunnableKind, TestId};
4use ra_project_model::{self, ProjectWorkspace, TargetKind}; 4use ra_project_model::{self, ProjectWorkspace, TargetKind};
5 5
6use crate::{world::WorldSnapshot, Result}; 6use crate::{world::WorldSnapshot, Result};
@@ -13,13 +13,16 @@ pub(crate) fn runnable_args(
13 let spec = CargoTargetSpec::for_file(world, file_id)?; 13 let spec = CargoTargetSpec::for_file(world, file_id)?;
14 let mut res = Vec::new(); 14 let mut res = Vec::new();
15 match kind { 15 match kind {
16 RunnableKind::Test { name } => { 16 RunnableKind::Test { test_id } => {
17 res.push("test".to_string()); 17 res.push("test".to_string());
18 if let Some(spec) = spec { 18 if let Some(spec) = spec {
19 spec.push_to(&mut res); 19 spec.push_to(&mut res);
20 } 20 }
21 res.push("--".to_string()); 21 res.push("--".to_string());
22 res.push(name.to_string()); 22 res.push(test_id.to_string());
23 if let TestId::Path(_) = test_id {
24 res.push("--exact".to_string());
25 }
23 res.push("--nocapture".to_string()); 26 res.push("--nocapture".to_string());
24 } 27 }
25 RunnableKind::TestMod { path } => { 28 RunnableKind::TestMod { path } => {
@@ -31,13 +34,16 @@ pub(crate) fn runnable_args(
31 res.push(path.to_string()); 34 res.push(path.to_string());
32 res.push("--nocapture".to_string()); 35 res.push("--nocapture".to_string());
33 } 36 }
34 RunnableKind::Bench { name } => { 37 RunnableKind::Bench { test_id } => {
35 res.push("bench".to_string()); 38 res.push("bench".to_string());
36 if let Some(spec) = spec { 39 if let Some(spec) = spec {
37 spec.push_to(&mut res); 40 spec.push_to(&mut res);
38 } 41 }
39 res.push("--".to_string()); 42 res.push("--".to_string());
40 res.push(name.to_string()); 43 res.push(test_id.to_string());
44 if let TestId::Path(_) = test_id {
45 res.push("--exact".to_string());
46 }
41 res.push("--nocapture".to_string()); 47 res.push("--nocapture".to_string());
42 } 48 }
43 RunnableKind::Bin => { 49 RunnableKind::Bin => {
diff --git a/crates/ra_lsp_server/src/config.rs b/crates/ra_lsp_server/src/config.rs
index 2d7948d74..3314269ec 100644
--- a/crates/ra_lsp_server/src/config.rs
+++ b/crates/ra_lsp_server/src/config.rs
@@ -44,6 +44,8 @@ pub struct ServerConfig {
44 /// Fine grained feature flags to disable specific features. 44 /// Fine grained feature flags to disable specific features.
45 pub feature_flags: FxHashMap<String, bool>, 45 pub feature_flags: FxHashMap<String, bool>,
46 46
47 pub rustfmt_args: Vec<String>,
48
47 /// Cargo feature configurations. 49 /// Cargo feature configurations.
48 pub cargo_features: CargoFeatures, 50 pub cargo_features: CargoFeatures,
49} 51}
@@ -63,6 +65,7 @@ impl Default for ServerConfig {
63 with_sysroot: true, 65 with_sysroot: true,
64 feature_flags: FxHashMap::default(), 66 feature_flags: FxHashMap::default(),
65 cargo_features: Default::default(), 67 cargo_features: Default::default(),
68 rustfmt_args: Vec::new(),
66 } 69 }
67 } 70 }
68} 71}
diff --git a/crates/ra_lsp_server/src/lib.rs b/crates/ra_lsp_server/src/lib.rs
index 1208c1343..a3464a5a3 100644
--- a/crates/ra_lsp_server/src/lib.rs
+++ b/crates/ra_lsp_server/src/lib.rs
@@ -31,6 +31,8 @@ mod config;
31mod world; 31mod world;
32mod diagnostics; 32mod diagnostics;
33 33
34use serde::de::DeserializeOwned;
35
34pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>; 36pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>;
35pub use crate::{ 37pub use crate::{
36 caps::server_capabilities, 38 caps::server_capabilities,
@@ -38,3 +40,9 @@ pub use crate::{
38 main_loop::LspError, 40 main_loop::LspError,
39 main_loop::{main_loop, show_message}, 41 main_loop::{main_loop, show_message},
40}; 42};
43
44pub fn from_json<T: DeserializeOwned>(what: &'static str, json: serde_json::Value) -> Result<T> {
45 let res = T::deserialize(&json)
46 .map_err(|e| format!("Failed to deserialize {}: {}; {}", what, e, json))?;
47 Ok(res)
48}
diff --git a/crates/ra_lsp_server/src/main.rs b/crates/ra_lsp_server/src/main.rs
index 3879eeff2..ed2eaabd4 100644
--- a/crates/ra_lsp_server/src/main.rs
+++ b/crates/ra_lsp_server/src/main.rs
@@ -1,7 +1,7 @@
1//! `ra_lsp_server` binary 1//! `ra_lsp_server` binary
2 2
3use lsp_server::Connection; 3use lsp_server::Connection;
4use ra_lsp_server::{show_message, Result, ServerConfig}; 4use ra_lsp_server::{from_json, show_message, Result, ServerConfig};
5use ra_prof; 5use ra_prof;
6 6
7fn main() -> Result<()> { 7fn main() -> Result<()> {
@@ -15,13 +15,8 @@ fn main() -> Result<()> {
15 15
16fn setup_logging() -> Result<()> { 16fn setup_logging() -> Result<()> {
17 std::env::set_var("RUST_BACKTRACE", "short"); 17 std::env::set_var("RUST_BACKTRACE", "short");
18
19 env_logger::try_init()?; 18 env_logger::try_init()?;
20 19 ra_prof::init();
21 ra_prof::set_filter(match std::env::var("RA_PROFILE") {
22 Ok(spec) => ra_prof::Filter::from_spec(&spec),
23 Err(_) => ra_prof::Filter::disabled(),
24 });
25 Ok(()) 20 Ok(())
26} 21}
27 22
@@ -45,7 +40,8 @@ fn run_server() -> Result<()> {
45 let server_capabilities = serde_json::to_value(ra_lsp_server::server_capabilities()).unwrap(); 40 let server_capabilities = serde_json::to_value(ra_lsp_server::server_capabilities()).unwrap();
46 41
47 let initialize_params = connection.initialize(server_capabilities)?; 42 let initialize_params = connection.initialize(server_capabilities)?;
48 let initialize_params: lsp_types::InitializeParams = serde_json::from_value(initialize_params)?; 43 let initialize_params =
44 from_json::<lsp_types::InitializeParams>("InitializeParams", initialize_params)?;
49 45
50 if let Some(client_info) = initialize_params.client_info { 46 if let Some(client_info) = initialize_params.client_info {
51 log::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default()); 47 log::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default());
@@ -62,17 +58,13 @@ fn run_server() -> Result<()> {
62 .filter(|workspaces| !workspaces.is_empty()) 58 .filter(|workspaces| !workspaces.is_empty())
63 .unwrap_or_else(|| vec![root]); 59 .unwrap_or_else(|| vec![root]);
64 60
65 let server_config: ServerConfig = initialize_params 61 let server_config = initialize_params
66 .initialization_options 62 .initialization_options
67 .and_then(|v| { 63 .and_then(|v| {
68 serde_json::from_value(v) 64 from_json::<ServerConfig>("config", v)
69 .map_err(|e| { 65 .map_err(|e| {
70 log::error!("failed to deserialize config: {}", e); 66 log::error!("{}", e);
71 show_message( 67 show_message(lsp_types::MessageType::Error, e.to_string(), &connection.sender);
72 lsp_types::MessageType::Error,
73 format!("failed to deserialize config: {}", e),
74 &connection.sender,
75 );
76 }) 68 })
77 .ok() 69 .ok()
78 }) 70 })
diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs
index ceff82fda..1e70cea13 100644
--- a/crates/ra_lsp_server/src/main_loop.rs
+++ b/crates/ra_lsp_server/src/main_loop.rs
@@ -178,6 +178,7 @@ pub fn main_loop(
178 command: config.cargo_watch_command, 178 command: config.cargo_watch_command,
179 all_targets: config.cargo_watch_all_targets, 179 all_targets: config.cargo_watch_all_targets,
180 }, 180 },
181 rustfmt_args: config.rustfmt_args,
181 } 182 }
182 }; 183 };
183 184
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs
index 65e8bc856..3893430c0 100644
--- a/crates/ra_lsp_server/src/main_loop/handlers.rs
+++ b/crates/ra_lsp_server/src/main_loop/handlers.rs
@@ -35,6 +35,7 @@ use crate::{
35 TryConvWithToVec, 35 TryConvWithToVec,
36 }, 36 },
37 diagnostics::DiagnosticTask, 37 diagnostics::DiagnosticTask,
38 from_json,
38 req::{self, Decoration, InlayHint, InlayHintsParams, InlayKind}, 39 req::{self, Decoration, InlayHint, InlayHintsParams, InlayKind},
39 world::WorldSnapshot, 40 world::WorldSnapshot,
40 LspError, Result, 41 LspError, Result,
@@ -589,6 +590,7 @@ pub fn handle_formatting(
589 let end_position = TextUnit::of_str(&file).conv_with(&file_line_index); 590 let end_position = TextUnit::of_str(&file).conv_with(&file_line_index);
590 591
591 let mut rustfmt = process::Command::new("rustfmt"); 592 let mut rustfmt = process::Command::new("rustfmt");
593 rustfmt.args(&world.options.rustfmt_args);
592 if let Some(&crate_id) = crate_ids.first() { 594 if let Some(&crate_id) = crate_ids.first() {
593 // Assume all crates are in the same edition 595 // Assume all crates are in the same edition
594 let edition = world.analysis().crate_edition(crate_id)?; 596 let edition = world.analysis().crate_edition(crate_id)?;
@@ -757,7 +759,7 @@ pub fn handle_code_lens(
757 // Gather runnables 759 // Gather runnables
758 for runnable in world.analysis().runnables(file_id)? { 760 for runnable in world.analysis().runnables(file_id)? {
759 let title = match &runnable.kind { 761 let title = match &runnable.kind {
760 RunnableKind::Test { .. } | RunnableKind::TestMod { .. } => "▶️Run Test", 762 RunnableKind::Test { .. } | RunnableKind::TestMod { .. } => "▶️\u{fe0e}Run Test",
761 RunnableKind::Bench { .. } => "Run Bench", 763 RunnableKind::Bench { .. } => "Run Bench",
762 RunnableKind::Bin => "Run", 764 RunnableKind::Bin => "Run",
763 } 765 }
@@ -811,7 +813,7 @@ enum CodeLensResolveData {
811pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Result<CodeLens> { 813pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Result<CodeLens> {
812 let _p = profile("handle_code_lens_resolve"); 814 let _p = profile("handle_code_lens_resolve");
813 let data = code_lens.data.unwrap(); 815 let data = code_lens.data.unwrap();
814 let resolve = serde_json::from_value(data)?; 816 let resolve = from_json::<Option<CodeLensResolveData>>("CodeLensResolveData", data)?;
815 match resolve { 817 match resolve {
816 Some(CodeLensResolveData::Impls(lens_params)) => { 818 Some(CodeLensResolveData::Impls(lens_params)) => {
817 let locations: Vec<Location> = 819 let locations: Vec<Location> =
@@ -917,9 +919,9 @@ fn to_lsp_runnable(
917 let args = runnable_args(world, file_id, &runnable.kind)?; 919 let args = runnable_args(world, file_id, &runnable.kind)?;
918 let line_index = world.analysis().file_line_index(file_id)?; 920 let line_index = world.analysis().file_line_index(file_id)?;
919 let label = match &runnable.kind { 921 let label = match &runnable.kind {
920 RunnableKind::Test { name } => format!("test {}", name), 922 RunnableKind::Test { test_id } => format!("test {}", test_id),
921 RunnableKind::TestMod { path } => format!("test-mod {}", path), 923 RunnableKind::TestMod { path } => format!("test-mod {}", path),
922 RunnableKind::Bench { name } => format!("bench {}", name), 924 RunnableKind::Bench { test_id } => format!("bench {}", test_id),
923 RunnableKind::Bin => "run binary".to_string(), 925 RunnableKind::Bin => "run binary".to_string(),
924 }; 926 };
925 Ok(req::Runnable { 927 Ok(req::Runnable {
diff --git a/crates/ra_lsp_server/src/world.rs b/crates/ra_lsp_server/src/world.rs
index 1ee02b47c..d993c5fc4 100644
--- a/crates/ra_lsp_server/src/world.rs
+++ b/crates/ra_lsp_server/src/world.rs
@@ -34,6 +34,7 @@ pub struct Options {
34 pub supports_location_link: bool, 34 pub supports_location_link: bool,
35 pub line_folding_only: bool, 35 pub line_folding_only: bool,
36 pub max_inlay_hint_length: Option<usize>, 36 pub max_inlay_hint_length: Option<usize>,
37 pub rustfmt_args: Vec<String>,
37 pub cargo_watch: CheckOptions, 38 pub cargo_watch: CheckOptions,
38} 39}
39 40
diff --git a/crates/ra_lsp_server/tests/heavy_tests/main.rs b/crates/ra_lsp_server/tests/heavy_tests/main.rs
index dff63a12d..9ca31cbcc 100644
--- a/crates/ra_lsp_server/tests/heavy_tests/main.rs
+++ b/crates/ra_lsp_server/tests/heavy_tests/main.rs
@@ -147,7 +147,7 @@ fn main() {}
147 }, 147 },
148 json!([ 148 json!([
149 { 149 {
150 "args": [ "test", "--package", "foo", "--test", "spam", "--", "test_eggs", "--nocapture" ], 150 "args": [ "test", "--package", "foo", "--test", "spam", "--", "test_eggs", "--exact", "--nocapture" ],
151 "bin": "cargo", 151 "bin": "cargo",
152 "env": { "RUST_BACKTRACE": "short" }, 152 "env": { "RUST_BACKTRACE": "short" },
153 "label": "test test_eggs", 153 "label": "test test_eggs",
diff --git a/crates/ra_parser/src/grammar/expressions/atom.rs b/crates/ra_parser/src/grammar/expressions/atom.rs
index f154077a8..b72d2e9e6 100644
--- a/crates/ra_parser/src/grammar/expressions/atom.rs
+++ b/crates/ra_parser/src/grammar/expressions/atom.rs
@@ -336,7 +336,7 @@ fn for_expr(p: &mut Parser, m: Option<Marker>) -> CompletedMarker {
336fn cond(p: &mut Parser) { 336fn cond(p: &mut Parser) {
337 let m = p.start(); 337 let m = p.start();
338 if p.eat(T![let]) { 338 if p.eat(T![let]) {
339 patterns::pattern_list(p); 339 patterns::pattern_top(p);
340 p.expect(T![=]); 340 p.expect(T![=]);
341 } 341 }
342 expr_no_struct(p); 342 expr_no_struct(p);
@@ -430,7 +430,7 @@ fn match_arm(p: &mut Parser) -> BlockLike {
430 // } 430 // }
431 attributes::outer_attributes(p); 431 attributes::outer_attributes(p);
432 432
433 patterns::pattern_list_r(p, TokenSet::EMPTY); 433 patterns::pattern_top_r(p, TokenSet::EMPTY);
434 if p.at(T![if]) { 434 if p.at(T![if]) {
435 match_guard(p); 435 match_guard(p);
436 } 436 }
diff --git a/crates/ra_parser/src/grammar/params.rs b/crates/ra_parser/src/grammar/params.rs
index 94edc7f35..272661b1d 100644
--- a/crates/ra_parser/src/grammar/params.rs
+++ b/crates/ra_parser/src/grammar/params.rs
@@ -114,9 +114,12 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) {
114 // test fn_pointer_param_ident_path 114 // test fn_pointer_param_ident_path
115 // type Foo = fn(Bar::Baz); 115 // type Foo = fn(Bar::Baz);
116 // type Qux = fn(baz: Bar::Baz); 116 // type Qux = fn(baz: Bar::Baz);
117
118 // test fn_pointer_unnamed_arg
119 // type Foo = fn(_: bar);
117 Flavor::FnPointer => { 120 Flavor::FnPointer => {
118 if p.at(IDENT) && p.nth(1) == T![:] && !p.nth_at(1, T![::]) { 121 if (p.at(IDENT) || p.at(UNDERSCORE)) && p.nth(1) == T![:] && !p.nth_at(1, T![::]) {
119 patterns::pattern(p); 122 patterns::pattern_single(p);
120 types::ascription(p); 123 types::ascription(p);
121 } else { 124 } else {
122 types::type_(p); 125 types::type_(p);
@@ -127,7 +130,7 @@ fn value_parameter(p: &mut Parser, flavor: Flavor) {
127 // let foo = |bar, baz: Baz, qux: Qux::Quux| (); 130 // let foo = |bar, baz: Baz, qux: Qux::Quux| ();
128 // } 131 // }
129 Flavor::Closure => { 132 Flavor::Closure => {
130 patterns::pattern(p); 133 patterns::pattern_single(p);
131 if p.at(T![:]) && !p.at(T![::]) { 134 if p.at(T![:]) && !p.at(T![::]) {
132 types::ascription(p); 135 types::ascription(p);
133 } 136 }
diff --git a/crates/ra_parser/src/grammar/patterns.rs b/crates/ra_parser/src/grammar/patterns.rs
index 422a4e3dc..3afbaa82b 100644
--- a/crates/ra_parser/src/grammar/patterns.rs
+++ b/crates/ra_parser/src/grammar/patterns.rs
@@ -11,22 +11,47 @@ pub(crate) fn pattern(p: &mut Parser) {
11} 11}
12 12
13/// Parses a pattern list separated by pipes `|` 13/// Parses a pattern list separated by pipes `|`
14pub(super) fn pattern_list(p: &mut Parser) { 14pub(super) fn pattern_top(p: &mut Parser) {
15 pattern_list_r(p, PAT_RECOVERY_SET) 15 pattern_top_r(p, PAT_RECOVERY_SET)
16}
17
18pub(crate) fn pattern_single(p: &mut Parser) {
19 pattern_single_r(p, PAT_RECOVERY_SET);
16} 20}
17 21
18/// Parses a pattern list separated by pipes `|` 22/// Parses a pattern list separated by pipes `|`
19/// using the given `recovery_set` 23/// using the given `recovery_set`
20pub(super) fn pattern_list_r(p: &mut Parser, recovery_set: TokenSet) { 24pub(super) fn pattern_top_r(p: &mut Parser, recovery_set: TokenSet) {
21 p.eat(T![|]); 25 p.eat(T![|]);
22 pattern_r(p, recovery_set); 26 pattern_r(p, recovery_set);
27}
23 28
29/// Parses a pattern list separated by pipes `|`, with no leading `|`,using the
30/// given `recovery_set`
31// test or_pattern
32// fn main() {
33// match () {
34// (_ | _) => (),
35// &(_ | _) => (),
36// (_ | _,) => (),
37// [_ | _,] => (),
38// }
39// }
40fn pattern_r(p: &mut Parser, recovery_set: TokenSet) {
41 let m = p.start();
42 pattern_single_r(p, recovery_set);
43
44 if !p.at(T![|]) {
45 m.abandon(p);
46 return;
47 }
24 while p.eat(T![|]) { 48 while p.eat(T![|]) {
25 pattern_r(p, recovery_set); 49 pattern_single_r(p, recovery_set);
26 } 50 }
51 m.complete(p, OR_PAT);
27} 52}
28 53
29pub(super) fn pattern_r(p: &mut Parser, recovery_set: TokenSet) { 54fn pattern_single_r(p: &mut Parser, recovery_set: TokenSet) {
30 if let Some(lhs) = atom_pat(p, recovery_set) { 55 if let Some(lhs) = atom_pat(p, recovery_set) {
31 // test range_pat 56 // test range_pat
32 // fn main() { 57 // fn main() {
@@ -258,19 +283,41 @@ fn ref_pat(p: &mut Parser) -> CompletedMarker {
258 let m = p.start(); 283 let m = p.start();
259 p.bump(T![&]); 284 p.bump(T![&]);
260 p.eat(T![mut]); 285 p.eat(T![mut]);
261 pattern(p); 286 pattern_single(p);
262 m.complete(p, REF_PAT) 287 m.complete(p, REF_PAT)
263} 288}
264 289
265// test tuple_pat 290// test tuple_pat
266// fn main() { 291// fn main() {
267// let (a, b, ..) = (); 292// let (a, b, ..) = ();
293// let (a,) = ();
294// let (..) = ();
295// let () = ();
268// } 296// }
269fn tuple_pat(p: &mut Parser) -> CompletedMarker { 297fn tuple_pat(p: &mut Parser) -> CompletedMarker {
270 assert!(p.at(T!['('])); 298 assert!(p.at(T!['(']));
271 let m = p.start(); 299 let m = p.start();
272 tuple_pat_fields(p); 300 p.bump(T!['(']);
273 m.complete(p, TUPLE_PAT) 301 let mut has_comma = false;
302 let mut has_pat = false;
303 let mut has_rest = false;
304 while !p.at(EOF) && !p.at(T![')']) {
305 has_pat = true;
306 if !p.at_ts(PATTERN_FIRST) {
307 p.error("expected a pattern");
308 break;
309 }
310 has_rest |= p.at(T![..]);
311
312 pattern(p);
313 if !p.at(T![')']) {
314 has_comma = true;
315 p.expect(T![,]);
316 }
317 }
318 p.expect(T![')']);
319
320 m.complete(p, if !has_comma && !has_rest && has_pat { PAREN_PAT } else { TUPLE_PAT })
274} 321}
275 322
276// test slice_pat 323// test slice_pat
@@ -315,7 +362,7 @@ fn bind_pat(p: &mut Parser, with_at: bool) -> CompletedMarker {
315 p.eat(T![mut]); 362 p.eat(T![mut]);
316 name(p); 363 name(p);
317 if with_at && p.eat(T![@]) { 364 if with_at && p.eat(T![@]) {
318 pattern(p); 365 pattern_single(p);
319 } 366 }
320 m.complete(p, BIND_PAT) 367 m.complete(p, BIND_PAT)
321} 368}
@@ -330,6 +377,6 @@ fn box_pat(p: &mut Parser) -> CompletedMarker {
330 assert!(p.at(T![box])); 377 assert!(p.at(T![box]));
331 let m = p.start(); 378 let m = p.start();
332 p.bump(T![box]); 379 p.bump(T![box]);
333 pattern(p); 380 pattern_single(p);
334 m.complete(p, BOX_PAT) 381 m.complete(p, BOX_PAT)
335} 382}
diff --git a/crates/ra_parser/src/syntax_kind/generated.rs b/crates/ra_parser/src/syntax_kind/generated.rs
index e27b27ffa..1068da0a0 100644
--- a/crates/ra_parser/src/syntax_kind/generated.rs
+++ b/crates/ra_parser/src/syntax_kind/generated.rs
@@ -151,6 +151,8 @@ pub enum SyntaxKind {
151 FOR_TYPE, 151 FOR_TYPE,
152 IMPL_TRAIT_TYPE, 152 IMPL_TRAIT_TYPE,
153 DYN_TRAIT_TYPE, 153 DYN_TRAIT_TYPE,
154 OR_PAT,
155 PAREN_PAT,
154 REF_PAT, 156 REF_PAT,
155 BOX_PAT, 157 BOX_PAT,
156 BIND_PAT, 158 BIND_PAT,
diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs
index d38ff397e..c0bfbc2ee 100644
--- a/crates/ra_prof/src/lib.rs
+++ b/crates/ra_prof/src/lib.rs
@@ -26,6 +26,13 @@ pub use crate::memory_usage::{Bytes, MemoryUsage};
26#[global_allocator] 26#[global_allocator]
27static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; 27static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
28 28
29pub fn init() {
30 set_filter(match std::env::var("RA_PROFILE") {
31 Ok(spec) => Filter::from_spec(&spec),
32 Err(_) => Filter::disabled(),
33 });
34}
35
29/// Set profiling filter. It specifies descriptions allowed to profile. 36/// Set profiling filter. It specifies descriptions allowed to profile.
30/// This is helpful when call stack has too many nested profiling scopes. 37/// This is helpful when call stack has too many nested profiling scopes.
31/// Additionally filter can specify maximum depth of profiling scopes nesting. 38/// Additionally filter can specify maximum depth of profiling scopes nesting.
diff --git a/crates/ra_project_model/Cargo.toml b/crates/ra_project_model/Cargo.toml
index 69edc3c66..653d5bd14 100644
--- a/crates/ra_project_model/Cargo.toml
+++ b/crates/ra_project_model/Cargo.toml
@@ -19,3 +19,5 @@ ra_cfg = { path = "../ra_cfg" }
19 19
20serde = { version = "1.0.89", features = ["derive"] } 20serde = { version = "1.0.89", features = ["derive"] }
21serde_json = "1.0.39" 21serde_json = "1.0.39"
22
23anyhow = "1.0.26"
diff --git a/crates/ra_project_model/src/cargo_workspace.rs b/crates/ra_project_model/src/cargo_workspace.rs
index 60cb8c1eb..22d226a74 100644
--- a/crates/ra_project_model/src/cargo_workspace.rs
+++ b/crates/ra_project_model/src/cargo_workspace.rs
@@ -2,14 +2,13 @@
2 2
3use std::path::{Path, PathBuf}; 3use std::path::{Path, PathBuf};
4 4
5use anyhow::{Context, Result};
5use cargo_metadata::{CargoOpt, MetadataCommand}; 6use cargo_metadata::{CargoOpt, MetadataCommand};
6use ra_arena::{impl_arena_id, Arena, RawId}; 7use ra_arena::{impl_arena_id, Arena, RawId};
7use ra_db::Edition; 8use ra_db::Edition;
8use rustc_hash::FxHashMap; 9use rustc_hash::FxHashMap;
9use serde::Deserialize; 10use serde::Deserialize;
10 11
11use crate::Result;
12
13/// `CargoWorkspace` represents the logical structure of, well, a Cargo 12/// `CargoWorkspace` represents the logical structure of, well, a Cargo
14/// workspace. It pretty closely mirrors `cargo metadata` output. 13/// workspace. It pretty closely mirrors `cargo metadata` output.
15/// 14///
@@ -171,7 +170,9 @@ impl CargoWorkspace {
171 if let Some(parent) = cargo_toml.parent() { 170 if let Some(parent) = cargo_toml.parent() {
172 meta.current_dir(parent); 171 meta.current_dir(parent);
173 } 172 }
174 let meta = meta.exec().map_err(|e| format!("cargo metadata failed: {}", e))?; 173 let meta = meta.exec().with_context(|| {
174 format!("Failed to run `cargo metadata --manifest-path {}`", cargo_toml.display())
175 })?;
175 let mut pkg_by_id = FxHashMap::default(); 176 let mut pkg_by_id = FxHashMap::default();
176 let mut packages = Arena::default(); 177 let mut packages = Arena::default();
177 let mut targets = Arena::default(); 178 let mut targets = Arena::default();
@@ -181,7 +182,9 @@ impl CargoWorkspace {
181 for meta_pkg in meta.packages { 182 for meta_pkg in meta.packages {
182 let cargo_metadata::Package { id, edition, name, manifest_path, .. } = meta_pkg; 183 let cargo_metadata::Package { id, edition, name, manifest_path, .. } = meta_pkg;
183 let is_member = ws_members.contains(&id); 184 let is_member = ws_members.contains(&id);
184 let edition = edition.parse::<Edition>()?; 185 let edition = edition
186 .parse::<Edition>()
187 .with_context(|| format!("Failed to parse edition {}", edition))?;
185 let pkg = packages.alloc(PackageData { 188 let pkg = packages.alloc(PackageData {
186 name, 189 name,
187 manifest: manifest_path, 190 manifest: manifest_path,
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs
index bc1d15406..250255813 100644
--- a/crates/ra_project_model/src/lib.rs
+++ b/crates/ra_project_model/src/lib.rs
@@ -12,6 +12,7 @@ use std::{
12 process::Command, 12 process::Command,
13}; 13};
14 14
15use anyhow::{bail, Context, Result};
15use ra_cfg::CfgOptions; 16use ra_cfg::CfgOptions;
16use ra_db::{CrateGraph, CrateId, CrateName, Edition, Env, FileId}; 17use ra_db::{CrateGraph, CrateId, CrateName, Edition, Env, FileId};
17use rustc_hash::FxHashMap; 18use rustc_hash::FxHashMap;
@@ -23,8 +24,6 @@ pub use crate::{
23 sysroot::Sysroot, 24 sysroot::Sysroot,
24}; 25};
25 26
26pub type Result<T> = ::std::result::Result<T, Box<dyn Error + Send + Sync>>;
27
28#[derive(Clone, PartialEq, Eq, Hash, Debug)] 27#[derive(Clone, PartialEq, Eq, Hash, Debug)]
29pub struct CargoTomlNotFoundError(pub PathBuf); 28pub struct CargoTomlNotFoundError(pub PathBuf);
30 29
@@ -81,15 +80,36 @@ impl ProjectWorkspace {
81 ) -> Result<ProjectWorkspace> { 80 ) -> Result<ProjectWorkspace> {
82 match find_rust_project_json(path) { 81 match find_rust_project_json(path) {
83 Some(json_path) => { 82 Some(json_path) => {
84 let file = File::open(json_path)?; 83 let file = File::open(&json_path)
84 .with_context(|| format!("Failed to open json file {}", json_path.display()))?;
85 let reader = BufReader::new(file); 85 let reader = BufReader::new(file);
86 Ok(ProjectWorkspace::Json { project: from_reader(reader)? }) 86 Ok(ProjectWorkspace::Json {
87 project: from_reader(reader).with_context(|| {
88 format!("Failed to deserialize json file {}", json_path.display())
89 })?,
90 })
87 } 91 }
88 None => { 92 None => {
89 let cargo_toml = find_cargo_toml(path)?; 93 let cargo_toml = find_cargo_toml(path).with_context(|| {
90 let cargo = CargoWorkspace::from_cargo_metadata(&cargo_toml, cargo_features)?; 94 format!("Failed to find Cargo.toml for path {}", path.display())
91 let sysroot = 95 })?;
92 if with_sysroot { Sysroot::discover(&cargo_toml)? } else { Sysroot::default() }; 96 let cargo = CargoWorkspace::from_cargo_metadata(&cargo_toml, cargo_features)
97 .with_context(|| {
98 format!(
99 "Failed to read Cargo metadata from Cargo.toml file {}",
100 cargo_toml.display()
101 )
102 })?;
103 let sysroot = if with_sysroot {
104 Sysroot::discover(&cargo_toml).with_context(|| {
105 format!(
106 "Failed to find sysroot for Cargo.toml file {}",
107 cargo_toml.display()
108 )
109 })?
110 } else {
111 Sysroot::default()
112 };
93 Ok(ProjectWorkspace::Cargo { cargo, sysroot }) 113 Ok(ProjectWorkspace::Cargo { cargo, sysroot })
94 } 114 }
95 } 115 }
@@ -398,16 +418,27 @@ pub fn get_rustc_cfg_options() -> CfgOptions {
398 // Some nightly-only cfgs, which are required for stdlib 418 // Some nightly-only cfgs, which are required for stdlib
399 { 419 {
400 cfg_options.insert_atom("target_thread_local".into()); 420 cfg_options.insert_atom("target_thread_local".into());
401 for &target_has_atomic in ["16", "32", "64", "8", "cas", "ptr"].iter() { 421 for &target_has_atomic in ["8", "16", "32", "64", "cas", "ptr"].iter() {
402 cfg_options.insert_key_value("target_has_atomic".into(), target_has_atomic.into()) 422 cfg_options.insert_key_value("target_has_atomic".into(), target_has_atomic.into());
423 cfg_options
424 .insert_key_value("target_has_atomic_load_store".into(), target_has_atomic.into());
403 } 425 }
404 } 426 }
405 427
406 match (|| -> Result<_> { 428 match (|| -> Result<String> {
407 // `cfg(test)` and `cfg(debug_assertion)` are handled outside, so we suppress them here. 429 // `cfg(test)` and `cfg(debug_assertion)` are handled outside, so we suppress them here.
408 let output = Command::new("rustc").args(&["--print", "cfg", "-O"]).output()?; 430 let output = Command::new("rustc")
431 .args(&["--print", "cfg", "-O"])
432 .output()
433 .context("Failed to get output from rustc --print cfg -O")?;
409 if !output.status.success() { 434 if !output.status.success() {
410 Err("failed to get rustc cfgs")?; 435 bail!(
436 "rustc --print cfg -O exited with exit code ({})",
437 output
438 .status
439 .code()
440 .map_or(String::from("no exit code"), |code| format!("{}", code))
441 );
411 } 442 }
412 Ok(String::from_utf8(output.stdout)?) 443 Ok(String::from_utf8(output.stdout)?)
413 })() { 444 })() {
diff --git a/crates/ra_project_model/src/sysroot.rs b/crates/ra_project_model/src/sysroot.rs
index 34d066b1e..7b9cc899c 100644
--- a/crates/ra_project_model/src/sysroot.rs
+++ b/crates/ra_project_model/src/sysroot.rs
@@ -1,5 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use anyhow::{anyhow, bail, Context, Result};
3use std::{ 4use std::{
4 env, 5 env,
5 path::{Path, PathBuf}, 6 path::{Path, PathBuf},
@@ -8,8 +9,6 @@ use std::{
8 9
9use ra_arena::{impl_arena_id, Arena, RawId}; 10use ra_arena::{impl_arena_id, Arena, RawId};
10 11
11use crate::Result;
12
13#[derive(Default, Debug, Clone)] 12#[derive(Default, Debug, Clone)]
14pub struct Sysroot { 13pub struct Sysroot {
15 crates: Arena<SysrootCrate, SysrootCrateData>, 14 crates: Arena<SysrootCrate, SysrootCrateData>,
@@ -51,7 +50,7 @@ impl Sysroot {
51 let src = try_find_src_path(cargo_toml)?; 50 let src = try_find_src_path(cargo_toml)?;
52 51
53 if !src.exists() { 52 if !src.exists() {
54 Err(format!( 53 Err(anyhow!(
55 "can't load standard library from sysroot\n\ 54 "can't load standard library from sysroot\n\
56 {}\n\ 55 {}\n\
57 (discovered via `rustc --print sysroot`)\n\ 56 (discovered via `rustc --print sysroot`)\n\
@@ -99,9 +98,15 @@ fn try_find_src_path(cargo_toml: &Path) -> Result<PathBuf> {
99 let rustc_output = Command::new("rustc") 98 let rustc_output = Command::new("rustc")
100 .current_dir(cargo_toml.parent().unwrap()) 99 .current_dir(cargo_toml.parent().unwrap())
101 .args(&["--print", "sysroot"]) 100 .args(&["--print", "sysroot"])
102 .output()?; 101 .output()
102 .context("rustc --print sysroot failed")?;
103 if !rustc_output.status.success() { 103 if !rustc_output.status.success() {
104 Err("failed to locate sysroot")?; 104 match rustc_output.status.code() {
105 Some(code) => {
106 bail!("failed to locate sysroot: rustc --print sysroot exited with code {}", code)
107 }
108 None => bail!("failed to locate sysroot: rustc --print sysroot terminated by signal"),
109 };
105 } 110 }
106 let stdout = String::from_utf8(rustc_output.stdout)?; 111 let stdout = String::from_utf8(rustc_output.stdout)?;
107 let sysroot_path = Path::new(stdout.trim()); 112 let sysroot_path = Path::new(stdout.trim());
diff --git a/crates/ra_syntax/src/ast.rs b/crates/ra_syntax/src/ast.rs
index 89cb9a9f3..9cc7930f7 100644
--- a/crates/ra_syntax/src/ast.rs
+++ b/crates/ra_syntax/src/ast.rs
@@ -18,7 +18,8 @@ use crate::{
18pub use self::{ 18pub use self::{
19 expr_extensions::{ArrayExprKind, BinOp, ElseBranch, LiteralKind, PrefixOp, RangeOp}, 19 expr_extensions::{ArrayExprKind, BinOp, ElseBranch, LiteralKind, PrefixOp, RangeOp},
20 extensions::{ 20 extensions::{
21 FieldKind, PathSegmentKind, SelfParamKind, StructKind, TypeBoundKind, VisibilityKind, 21 AttrKind, FieldKind, PathSegmentKind, SelfParamKind, SlicePatComponents, StructKind,
22 TypeBoundKind, VisibilityKind,
22 }, 23 },
23 generated::*, 24 generated::*,
24 tokens::*, 25 tokens::*,
@@ -216,10 +217,7 @@ fn test_doc_comment_multi_line_block_strips_suffix() {
216#[test] 217#[test]
217fn test_comments_preserve_trailing_whitespace() { 218fn test_comments_preserve_trailing_whitespace() {
218 let file = SourceFile::parse( 219 let file = SourceFile::parse(
219 r#" 220 "\n/// Representation of a Realm. \n/// In the specification these are called Realm Records.\nstruct Realm {}",
220/// Representation of a Realm.
221/// In the specification these are called Realm Records.
222struct Realm {}"#,
223 ) 221 )
224 .ok() 222 .ok()
225 .unwrap(); 223 .unwrap();
diff --git a/crates/ra_syntax/src/ast/extensions.rs b/crates/ra_syntax/src/ast/extensions.rs
index cb0aee422..44de4af89 100644
--- a/crates/ra_syntax/src/ast/extensions.rs
+++ b/crates/ra_syntax/src/ast/extensions.rs
@@ -1,6 +1,8 @@
1//! Various extension methods to ast Nodes, which are hard to code-generate. 1//! Various extension methods to ast Nodes, which are hard to code-generate.
2//! Extensions for various expressions live in a sibling `expr_extensions` module. 2//! Extensions for various expressions live in a sibling `expr_extensions` module.
3 3
4use itertools::Itertools;
5
4use crate::{ 6use crate::{
5 ast::{self, child_opt, children, AstNode, AttrInput, SyntaxNode}, 7 ast::{self, child_opt, children, AstNode, AttrInput, SyntaxNode},
6 SmolStr, SyntaxElement, 8 SmolStr, SyntaxElement,
@@ -35,6 +37,12 @@ fn text_of_first_token(node: &SyntaxNode) -> &SmolStr {
35 node.green().children().next().and_then(|it| it.into_token()).unwrap().text() 37 node.green().children().next().and_then(|it| it.into_token()).unwrap().text()
36} 38}
37 39
40#[derive(Debug, Clone, PartialEq, Eq)]
41pub enum AttrKind {
42 Inner,
43 Outer,
44}
45
38impl ast::Attr { 46impl ast::Attr {
39 pub fn as_simple_atom(&self) -> Option<SmolStr> { 47 pub fn as_simple_atom(&self) -> Option<SmolStr> {
40 match self.input() { 48 match self.input() {
@@ -69,6 +77,18 @@ impl ast::Attr {
69 _ => None, 77 _ => None,
70 } 78 }
71 } 79 }
80
81 pub fn kind(&self) -> AttrKind {
82 let first_token = self.syntax().first_token();
83 let first_token_kind = first_token.as_ref().map(SyntaxToken::kind);
84 let second_token_kind =
85 first_token.and_then(|token| token.next_token()).as_ref().map(SyntaxToken::kind);
86
87 match (first_token_kind, second_token_kind) {
88 (Some(SyntaxKind::POUND), Some(SyntaxKind::EXCL)) => AttrKind::Inner,
89 _ => AttrKind::Outer,
90 }
91 }
72} 92}
73 93
74#[derive(Debug, Clone, PartialEq, Eq)] 94#[derive(Debug, Clone, PartialEq, Eq)]
@@ -293,6 +313,40 @@ impl ast::BindPat {
293 } 313 }
294} 314}
295 315
316pub struct SlicePatComponents {
317 pub prefix: Vec<ast::Pat>,
318 pub slice: Option<ast::Pat>,
319 pub suffix: Vec<ast::Pat>,
320}
321
322impl ast::SlicePat {
323 pub fn components(&self) -> SlicePatComponents {
324 let mut args = self.args().peekable();
325 let prefix = args
326 .peeking_take_while(|p| match p {
327 ast::Pat::DotDotPat(_) => false,
328 ast::Pat::BindPat(bp) => match bp.pat() {
329 Some(ast::Pat::DotDotPat(_)) => false,
330 _ => true,
331 },
332 ast::Pat::RefPat(rp) => match rp.pat() {
333 Some(ast::Pat::DotDotPat(_)) => false,
334 Some(ast::Pat::BindPat(bp)) => match bp.pat() {
335 Some(ast::Pat::DotDotPat(_)) => false,
336 _ => true,
337 },
338 _ => true,
339 },
340 _ => true,
341 })
342 .collect();
343 let slice = args.next();
344 let suffix = args.collect();
345
346 SlicePatComponents { prefix, slice, suffix }
347 }
348}
349
296impl ast::PointerType { 350impl ast::PointerType {
297 pub fn is_mut(&self) -> bool { 351 pub fn is_mut(&self) -> bool {
298 self.syntax().children_with_tokens().any(|n| n.kind() == T![mut]) 352 self.syntax().children_with_tokens().any(|n| n.kind() == T![mut])
diff --git a/crates/ra_syntax/src/ast/generated.rs b/crates/ra_syntax/src/ast/generated.rs
index 435135f92..8eb240801 100644
--- a/crates/ra_syntax/src/ast/generated.rs
+++ b/crates/ra_syntax/src/ast/generated.rs
@@ -1759,8 +1759,8 @@ impl AstNode for MatchArm {
1759} 1759}
1760impl ast::AttrsOwner for MatchArm {} 1760impl ast::AttrsOwner for MatchArm {}
1761impl MatchArm { 1761impl MatchArm {
1762 pub fn pats(&self) -> AstChildren<Pat> { 1762 pub fn pat(&self) -> Option<Pat> {
1763 AstChildren::new(&self.syntax) 1763 AstChildren::new(&self.syntax).next()
1764 } 1764 }
1765 pub fn guard(&self) -> Option<MatchGuard> { 1765 pub fn guard(&self) -> Option<MatchGuard> {
1766 AstChildren::new(&self.syntax).next() 1766 AstChildren::new(&self.syntax).next()
@@ -1887,6 +1887,60 @@ impl RecordField {
1887 } 1887 }
1888} 1888}
1889#[derive(Debug, Clone, PartialEq, Eq, Hash)] 1889#[derive(Debug, Clone, PartialEq, Eq, Hash)]
1890pub struct OrPat {
1891 pub(crate) syntax: SyntaxNode,
1892}
1893impl AstNode for OrPat {
1894 fn can_cast(kind: SyntaxKind) -> bool {
1895 match kind {
1896 OR_PAT => true,
1897 _ => false,
1898 }
1899 }
1900 fn cast(syntax: SyntaxNode) -> Option<Self> {
1901 if Self::can_cast(syntax.kind()) {
1902 Some(Self { syntax })
1903 } else {
1904 None
1905 }
1906 }
1907 fn syntax(&self) -> &SyntaxNode {
1908 &self.syntax
1909 }
1910}
1911impl OrPat {
1912 pub fn pats(&self) -> AstChildren<Pat> {
1913 AstChildren::new(&self.syntax)
1914 }
1915}
1916#[derive(Debug, Clone, PartialEq, Eq, Hash)]
1917pub struct ParenPat {
1918 pub(crate) syntax: SyntaxNode,
1919}
1920impl AstNode for ParenPat {
1921 fn can_cast(kind: SyntaxKind) -> bool {
1922 match kind {
1923 PAREN_PAT => true,
1924 _ => false,
1925 }
1926 }
1927 fn cast(syntax: SyntaxNode) -> Option<Self> {
1928 if Self::can_cast(syntax.kind()) {
1929 Some(Self { syntax })
1930 } else {
1931 None
1932 }
1933 }
1934 fn syntax(&self) -> &SyntaxNode {
1935 &self.syntax
1936 }
1937}
1938impl ParenPat {
1939 pub fn pat(&self) -> Option<Pat> {
1940 AstChildren::new(&self.syntax).next()
1941 }
1942}
1943#[derive(Debug, Clone, PartialEq, Eq, Hash)]
1890pub struct RefPat { 1944pub struct RefPat {
1891 pub(crate) syntax: SyntaxNode, 1945 pub(crate) syntax: SyntaxNode,
1892} 1946}
@@ -2063,7 +2117,11 @@ impl AstNode for SlicePat {
2063 &self.syntax 2117 &self.syntax
2064 } 2118 }
2065} 2119}
2066impl SlicePat {} 2120impl SlicePat {
2121 pub fn args(&self) -> AstChildren<Pat> {
2122 AstChildren::new(&self.syntax)
2123 }
2124}
2067#[derive(Debug, Clone, PartialEq, Eq, Hash)] 2125#[derive(Debug, Clone, PartialEq, Eq, Hash)]
2068pub struct RangePat { 2126pub struct RangePat {
2069 pub(crate) syntax: SyntaxNode, 2127 pub(crate) syntax: SyntaxNode,
@@ -3900,6 +3958,8 @@ impl AstNode for Expr {
3900} 3958}
3901#[derive(Debug, Clone, PartialEq, Eq, Hash)] 3959#[derive(Debug, Clone, PartialEq, Eq, Hash)]
3902pub enum Pat { 3960pub enum Pat {
3961 OrPat(OrPat),
3962 ParenPat(ParenPat),
3903 RefPat(RefPat), 3963 RefPat(RefPat),
3904 BoxPat(BoxPat), 3964 BoxPat(BoxPat),
3905 BindPat(BindPat), 3965 BindPat(BindPat),
@@ -3913,6 +3973,16 @@ pub enum Pat {
3913 RangePat(RangePat), 3973 RangePat(RangePat),
3914 LiteralPat(LiteralPat), 3974 LiteralPat(LiteralPat),
3915} 3975}
3976impl From<OrPat> for Pat {
3977 fn from(node: OrPat) -> Pat {
3978 Pat::OrPat(node)
3979 }
3980}
3981impl From<ParenPat> for Pat {
3982 fn from(node: ParenPat) -> Pat {
3983 Pat::ParenPat(node)
3984 }
3985}
3916impl From<RefPat> for Pat { 3986impl From<RefPat> for Pat {
3917 fn from(node: RefPat) -> Pat { 3987 fn from(node: RefPat) -> Pat {
3918 Pat::RefPat(node) 3988 Pat::RefPat(node)
@@ -3976,15 +4046,16 @@ impl From<LiteralPat> for Pat {
3976impl AstNode for Pat { 4046impl AstNode for Pat {
3977 fn can_cast(kind: SyntaxKind) -> bool { 4047 fn can_cast(kind: SyntaxKind) -> bool {
3978 match kind { 4048 match kind {
3979 REF_PAT | BOX_PAT | BIND_PAT | PLACEHOLDER_PAT | DOT_DOT_PAT | PATH_PAT 4049 OR_PAT | PAREN_PAT | REF_PAT | BOX_PAT | BIND_PAT | PLACEHOLDER_PAT | DOT_DOT_PAT
3980 | RECORD_PAT | TUPLE_STRUCT_PAT | TUPLE_PAT | SLICE_PAT | RANGE_PAT | LITERAL_PAT => { 4050 | PATH_PAT | RECORD_PAT | TUPLE_STRUCT_PAT | TUPLE_PAT | SLICE_PAT | RANGE_PAT
3981 true 4051 | LITERAL_PAT => true,
3982 }
3983 _ => false, 4052 _ => false,
3984 } 4053 }
3985 } 4054 }
3986 fn cast(syntax: SyntaxNode) -> Option<Self> { 4055 fn cast(syntax: SyntaxNode) -> Option<Self> {
3987 let res = match syntax.kind() { 4056 let res = match syntax.kind() {
4057 OR_PAT => Pat::OrPat(OrPat { syntax }),
4058 PAREN_PAT => Pat::ParenPat(ParenPat { syntax }),
3988 REF_PAT => Pat::RefPat(RefPat { syntax }), 4059 REF_PAT => Pat::RefPat(RefPat { syntax }),
3989 BOX_PAT => Pat::BoxPat(BoxPat { syntax }), 4060 BOX_PAT => Pat::BoxPat(BoxPat { syntax }),
3990 BIND_PAT => Pat::BindPat(BindPat { syntax }), 4061 BIND_PAT => Pat::BindPat(BindPat { syntax }),
@@ -4003,6 +4074,8 @@ impl AstNode for Pat {
4003 } 4074 }
4004 fn syntax(&self) -> &SyntaxNode { 4075 fn syntax(&self) -> &SyntaxNode {
4005 match self { 4076 match self {
4077 Pat::OrPat(it) => &it.syntax,
4078 Pat::ParenPat(it) => &it.syntax,
4006 Pat::RefPat(it) => &it.syntax, 4079 Pat::RefPat(it) => &it.syntax,
4007 Pat::BoxPat(it) => &it.syntax, 4080 Pat::BoxPat(it) => &it.syntax,
4008 Pat::BindPat(it) => &it.syntax, 4081 Pat::BindPat(it) => &it.syntax,
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0030_cond.txt b/crates/ra_syntax/test_data/parser/inline/ok/0030_cond.txt
index 4028ca243..6fd49c7bc 100644
--- a/crates/ra_syntax/test_data/parser/inline/ok/0030_cond.txt
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0030_cond.txt
@@ -63,27 +63,28 @@ SOURCE_FILE@[0; 197)
63 CONDITION@[56; 84) 63 CONDITION@[56; 84)
64 LET_KW@[56; 59) "let" 64 LET_KW@[56; 59) "let"
65 WHITESPACE@[59; 60) " " 65 WHITESPACE@[59; 60) " "
66 TUPLE_STRUCT_PAT@[60; 67) 66 OR_PAT@[60; 77)
67 PATH@[60; 64) 67 TUPLE_STRUCT_PAT@[60; 67)
68 PATH_SEGMENT@[60; 64) 68 PATH@[60; 64)
69 NAME_REF@[60; 64) 69 PATH_SEGMENT@[60; 64)
70 IDENT@[60; 64) "Some" 70 NAME_REF@[60; 64)
71 L_PAREN@[64; 65) "(" 71 IDENT@[60; 64) "Some"
72 PLACEHOLDER_PAT@[65; 66) 72 L_PAREN@[64; 65) "("
73 UNDERSCORE@[65; 66) "_" 73 PLACEHOLDER_PAT@[65; 66)
74 R_PAREN@[66; 67) ")" 74 UNDERSCORE@[65; 66) "_"
75 WHITESPACE@[67; 68) " " 75 R_PAREN@[66; 67) ")"
76 PIPE@[68; 69) "|" 76 WHITESPACE@[67; 68) " "
77 WHITESPACE@[69; 70) " " 77 PIPE@[68; 69) "|"
78 TUPLE_STRUCT_PAT@[70; 77) 78 WHITESPACE@[69; 70) " "
79 PATH@[70; 74) 79 TUPLE_STRUCT_PAT@[70; 77)
80 PATH_SEGMENT@[70; 74) 80 PATH@[70; 74)
81 NAME_REF@[70; 74) 81 PATH_SEGMENT@[70; 74)
82 IDENT@[70; 74) "Some" 82 NAME_REF@[70; 74)
83 L_PAREN@[74; 75) "(" 83 IDENT@[70; 74) "Some"
84 PLACEHOLDER_PAT@[75; 76) 84 L_PAREN@[74; 75) "("
85 UNDERSCORE@[75; 76) "_" 85 PLACEHOLDER_PAT@[75; 76)
86 R_PAREN@[76; 77) ")" 86 UNDERSCORE@[75; 76) "_"
87 R_PAREN@[76; 77) ")"
87 WHITESPACE@[77; 78) " " 88 WHITESPACE@[77; 78) " "
88 EQ@[78; 79) "=" 89 EQ@[78; 79) "="
89 WHITESPACE@[79; 80) " " 90 WHITESPACE@[79; 80) " "
@@ -137,27 +138,28 @@ SOURCE_FILE@[0; 197)
137 CONDITION@[129; 157) 138 CONDITION@[129; 157)
138 LET_KW@[129; 132) "let" 139 LET_KW@[129; 132) "let"
139 WHITESPACE@[132; 133) " " 140 WHITESPACE@[132; 133) " "
140 TUPLE_STRUCT_PAT@[133; 140) 141 OR_PAT@[133; 150)
141 PATH@[133; 137) 142 TUPLE_STRUCT_PAT@[133; 140)
142 PATH_SEGMENT@[133; 137) 143 PATH@[133; 137)
143 NAME_REF@[133; 137) 144 PATH_SEGMENT@[133; 137)
144 IDENT@[133; 137) "Some" 145 NAME_REF@[133; 137)
145 L_PAREN@[137; 138) "(" 146 IDENT@[133; 137) "Some"
146 PLACEHOLDER_PAT@[138; 139) 147 L_PAREN@[137; 138) "("
147 UNDERSCORE@[138; 139) "_" 148 PLACEHOLDER_PAT@[138; 139)
148 R_PAREN@[139; 140) ")" 149 UNDERSCORE@[138; 139) "_"
149 WHITESPACE@[140; 141) " " 150 R_PAREN@[139; 140) ")"
150 PIPE@[141; 142) "|" 151 WHITESPACE@[140; 141) " "
151 WHITESPACE@[142; 143) " " 152 PIPE@[141; 142) "|"
152 TUPLE_STRUCT_PAT@[143; 150) 153 WHITESPACE@[142; 143) " "
153 PATH@[143; 147) 154 TUPLE_STRUCT_PAT@[143; 150)
154 PATH_SEGMENT@[143; 147) 155 PATH@[143; 147)
155 NAME_REF@[143; 147) 156 PATH_SEGMENT@[143; 147)
156 IDENT@[143; 147) "Some" 157 NAME_REF@[143; 147)
157 L_PAREN@[147; 148) "(" 158 IDENT@[143; 147) "Some"
158 PLACEHOLDER_PAT@[148; 149) 159 L_PAREN@[147; 148) "("
159 UNDERSCORE@[148; 149) "_" 160 PLACEHOLDER_PAT@[148; 149)
160 R_PAREN@[149; 150) ")" 161 UNDERSCORE@[148; 149) "_"
162 R_PAREN@[149; 150) ")"
161 WHITESPACE@[150; 151) " " 163 WHITESPACE@[150; 151) " "
162 EQ@[151; 152) "=" 164 EQ@[151; 152) "="
163 WHITESPACE@[152; 153) " " 165 WHITESPACE@[152; 153) " "
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0066_match_arm.txt b/crates/ra_syntax/test_data/parser/inline/ok/0066_match_arm.txt
index 87272917b..2f07af4e1 100644
--- a/crates/ra_syntax/test_data/parser/inline/ok/0066_match_arm.txt
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0066_match_arm.txt
@@ -74,15 +74,16 @@ SOURCE_FILE@[0; 167)
74 COMMA@[83; 84) "," 74 COMMA@[83; 84) ","
75 WHITESPACE@[84; 93) "\n " 75 WHITESPACE@[84; 93) "\n "
76 MATCH_ARM@[93; 109) 76 MATCH_ARM@[93; 109)
77 BIND_PAT@[93; 94) 77 OR_PAT@[93; 98)
78 NAME@[93; 94) 78 BIND_PAT@[93; 94)
79 IDENT@[93; 94) "X" 79 NAME@[93; 94)
80 WHITESPACE@[94; 95) " " 80 IDENT@[93; 94) "X"
81 PIPE@[95; 96) "|" 81 WHITESPACE@[94; 95) " "
82 WHITESPACE@[96; 97) " " 82 PIPE@[95; 96) "|"
83 BIND_PAT@[97; 98) 83 WHITESPACE@[96; 97) " "
84 NAME@[97; 98) 84 BIND_PAT@[97; 98)
85 IDENT@[97; 98) "Y" 85 NAME@[97; 98)
86 IDENT@[97; 98) "Y"
86 WHITESPACE@[98; 99) " " 87 WHITESPACE@[98; 99) " "
87 MATCH_GUARD@[99; 103) 88 MATCH_GUARD@[99; 103)
88 IF_KW@[99; 101) "if" 89 IF_KW@[99; 101) "if"
@@ -103,15 +104,16 @@ SOURCE_FILE@[0; 167)
103 MATCH_ARM@[119; 137) 104 MATCH_ARM@[119; 137)
104 PIPE@[119; 120) "|" 105 PIPE@[119; 120) "|"
105 WHITESPACE@[120; 121) " " 106 WHITESPACE@[120; 121) " "
106 BIND_PAT@[121; 122) 107 OR_PAT@[121; 126)
107 NAME@[121; 122) 108 BIND_PAT@[121; 122)
108 IDENT@[121; 122) "X" 109 NAME@[121; 122)
109 WHITESPACE@[122; 123) " " 110 IDENT@[121; 122) "X"
110 PIPE@[123; 124) "|" 111 WHITESPACE@[122; 123) " "
111 WHITESPACE@[124; 125) " " 112 PIPE@[123; 124) "|"
112 BIND_PAT@[125; 126) 113 WHITESPACE@[124; 125) " "
113 NAME@[125; 126) 114 BIND_PAT@[125; 126)
114 IDENT@[125; 126) "Y" 115 NAME@[125; 126)
116 IDENT@[125; 126) "Y"
115 WHITESPACE@[126; 127) " " 117 WHITESPACE@[126; 127) " "
116 MATCH_GUARD@[127; 131) 118 MATCH_GUARD@[127; 131)
117 IF_KW@[127; 129) "if" 119 IF_KW@[127; 129) "if"
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.rs b/crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.rs
index f785acd36..ba719879d 100644
--- a/crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.rs
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.rs
@@ -1,3 +1,6 @@
1fn main() { 1fn main() {
2 let (a, b, ..) = (); 2 let (a, b, ..) = ();
3 let (a,) = ();
4 let (..) = ();
5 let () = ();
3} 6}
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.txt b/crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.txt
index 674dec493..4680c267e 100644
--- a/crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.txt
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0111_tuple_pat.txt
@@ -1,5 +1,5 @@
1SOURCE_FILE@[0; 39) 1SOURCE_FILE@[0; 94)
2 FN_DEF@[0; 38) 2 FN_DEF@[0; 93)
3 FN_KW@[0; 2) "fn" 3 FN_KW@[0; 2) "fn"
4 WHITESPACE@[2; 3) " " 4 WHITESPACE@[2; 3) " "
5 NAME@[3; 7) 5 NAME@[3; 7)
@@ -8,8 +8,8 @@ SOURCE_FILE@[0; 39)
8 L_PAREN@[7; 8) "(" 8 L_PAREN@[7; 8) "("
9 R_PAREN@[8; 9) ")" 9 R_PAREN@[8; 9) ")"
10 WHITESPACE@[9; 10) " " 10 WHITESPACE@[9; 10) " "
11 BLOCK_EXPR@[10; 38) 11 BLOCK_EXPR@[10; 93)
12 BLOCK@[10; 38) 12 BLOCK@[10; 93)
13 L_CURLY@[10; 11) "{" 13 L_CURLY@[10; 11) "{"
14 WHITESPACE@[11; 16) "\n " 14 WHITESPACE@[11; 16) "\n "
15 LET_STMT@[16; 36) 15 LET_STMT@[16; 36)
@@ -37,6 +37,54 @@ SOURCE_FILE@[0; 39)
37 L_PAREN@[33; 34) "(" 37 L_PAREN@[33; 34) "("
38 R_PAREN@[34; 35) ")" 38 R_PAREN@[34; 35) ")"
39 SEMI@[35; 36) ";" 39 SEMI@[35; 36) ";"
40 WHITESPACE@[36; 37) "\n" 40 WHITESPACE@[36; 41) "\n "
41 R_CURLY@[37; 38) "}" 41 LET_STMT@[41; 55)
42 WHITESPACE@[38; 39) "\n" 42 LET_KW@[41; 44) "let"
43 WHITESPACE@[44; 45) " "
44 TUPLE_PAT@[45; 49)
45 L_PAREN@[45; 46) "("
46 BIND_PAT@[46; 47)
47 NAME@[46; 47)
48 IDENT@[46; 47) "a"
49 COMMA@[47; 48) ","
50 R_PAREN@[48; 49) ")"
51 WHITESPACE@[49; 50) " "
52 EQ@[50; 51) "="
53 WHITESPACE@[51; 52) " "
54 TUPLE_EXPR@[52; 54)
55 L_PAREN@[52; 53) "("
56 R_PAREN@[53; 54) ")"
57 SEMI@[54; 55) ";"
58 WHITESPACE@[55; 60) "\n "
59 LET_STMT@[60; 74)
60 LET_KW@[60; 63) "let"
61 WHITESPACE@[63; 64) " "
62 TUPLE_PAT@[64; 68)
63 L_PAREN@[64; 65) "("
64 DOT_DOT_PAT@[65; 67)
65 DOTDOT@[65; 67) ".."
66 R_PAREN@[67; 68) ")"
67 WHITESPACE@[68; 69) " "
68 EQ@[69; 70) "="
69 WHITESPACE@[70; 71) " "
70 TUPLE_EXPR@[71; 73)
71 L_PAREN@[71; 72) "("
72 R_PAREN@[72; 73) ")"
73 SEMI@[73; 74) ";"
74 WHITESPACE@[74; 79) "\n "
75 LET_STMT@[79; 91)
76 LET_KW@[79; 82) "let"
77 WHITESPACE@[82; 83) " "
78 TUPLE_PAT@[83; 85)
79 L_PAREN@[83; 84) "("
80 R_PAREN@[84; 85) ")"
81 WHITESPACE@[85; 86) " "
82 EQ@[86; 87) "="
83 WHITESPACE@[87; 88) " "
84 TUPLE_EXPR@[88; 90)
85 L_PAREN@[88; 89) "("
86 R_PAREN@[89; 90) ")"
87 SEMI@[90; 91) ";"
88 WHITESPACE@[91; 92) "\n"
89 R_CURLY@[92; 93) "}"
90 WHITESPACE@[93; 94) "\n"
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0156_or_pattern.rs b/crates/ra_syntax/test_data/parser/inline/ok/0156_or_pattern.rs
new file mode 100644
index 000000000..a26316605
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0156_or_pattern.rs
@@ -0,0 +1,8 @@
1fn main() {
2 match () {
3 (_ | _) => (),
4 &(_ | _) => (),
5 (_ | _,) => (),
6 [_ | _,] => (),
7 }
8}
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0156_or_pattern.txt b/crates/ra_syntax/test_data/parser/inline/ok/0156_or_pattern.txt
new file mode 100644
index 000000000..3a196d3c0
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0156_or_pattern.txt
@@ -0,0 +1,112 @@
1SOURCE_FILE@[0; 130)
2 FN_DEF@[0; 129)
3 FN_KW@[0; 2) "fn"
4 WHITESPACE@[2; 3) " "
5 NAME@[3; 7)
6 IDENT@[3; 7) "main"
7 PARAM_LIST@[7; 9)
8 L_PAREN@[7; 8) "("
9 R_PAREN@[8; 9) ")"
10 WHITESPACE@[9; 10) " "
11 BLOCK_EXPR@[10; 129)
12 BLOCK@[10; 129)
13 L_CURLY@[10; 11) "{"
14 WHITESPACE@[11; 16) "\n "
15 MATCH_EXPR@[16; 127)
16 MATCH_KW@[16; 21) "match"
17 WHITESPACE@[21; 22) " "
18 TUPLE_EXPR@[22; 24)
19 L_PAREN@[22; 23) "("
20 R_PAREN@[23; 24) ")"
21 WHITESPACE@[24; 25) " "
22 MATCH_ARM_LIST@[25; 127)
23 L_CURLY@[25; 26) "{"
24 WHITESPACE@[26; 35) "\n "
25 MATCH_ARM@[35; 48)
26 PAREN_PAT@[35; 42)
27 L_PAREN@[35; 36) "("
28 OR_PAT@[36; 41)
29 PLACEHOLDER_PAT@[36; 37)
30 UNDERSCORE@[36; 37) "_"
31 WHITESPACE@[37; 38) " "
32 PIPE@[38; 39) "|"
33 WHITESPACE@[39; 40) " "
34 PLACEHOLDER_PAT@[40; 41)
35 UNDERSCORE@[40; 41) "_"
36 R_PAREN@[41; 42) ")"
37 WHITESPACE@[42; 43) " "
38 FAT_ARROW@[43; 45) "=>"
39 WHITESPACE@[45; 46) " "
40 TUPLE_EXPR@[46; 48)
41 L_PAREN@[46; 47) "("
42 R_PAREN@[47; 48) ")"
43 COMMA@[48; 49) ","
44 WHITESPACE@[49; 58) "\n "
45 MATCH_ARM@[58; 72)
46 REF_PAT@[58; 66)
47 AMP@[58; 59) "&"
48 PAREN_PAT@[59; 66)
49 L_PAREN@[59; 60) "("
50 OR_PAT@[60; 65)
51 PLACEHOLDER_PAT@[60; 61)
52 UNDERSCORE@[60; 61) "_"
53 WHITESPACE@[61; 62) " "
54 PIPE@[62; 63) "|"
55 WHITESPACE@[63; 64) " "
56 PLACEHOLDER_PAT@[64; 65)
57 UNDERSCORE@[64; 65) "_"
58 R_PAREN@[65; 66) ")"
59 WHITESPACE@[66; 67) " "
60 FAT_ARROW@[67; 69) "=>"
61 WHITESPACE@[69; 70) " "
62 TUPLE_EXPR@[70; 72)
63 L_PAREN@[70; 71) "("
64 R_PAREN@[71; 72) ")"
65 COMMA@[72; 73) ","
66 WHITESPACE@[73; 82) "\n "
67 MATCH_ARM@[82; 96)
68 TUPLE_PAT@[82; 90)
69 L_PAREN@[82; 83) "("
70 OR_PAT@[83; 88)
71 PLACEHOLDER_PAT@[83; 84)
72 UNDERSCORE@[83; 84) "_"
73 WHITESPACE@[84; 85) " "
74 PIPE@[85; 86) "|"
75 WHITESPACE@[86; 87) " "
76 PLACEHOLDER_PAT@[87; 88)
77 UNDERSCORE@[87; 88) "_"
78 COMMA@[88; 89) ","
79 R_PAREN@[89; 90) ")"
80 WHITESPACE@[90; 91) " "
81 FAT_ARROW@[91; 93) "=>"
82 WHITESPACE@[93; 94) " "
83 TUPLE_EXPR@[94; 96)
84 L_PAREN@[94; 95) "("
85 R_PAREN@[95; 96) ")"
86 COMMA@[96; 97) ","
87 WHITESPACE@[97; 106) "\n "
88 MATCH_ARM@[106; 120)
89 SLICE_PAT@[106; 114)
90 L_BRACK@[106; 107) "["
91 OR_PAT@[107; 112)
92 PLACEHOLDER_PAT@[107; 108)
93 UNDERSCORE@[107; 108) "_"
94 WHITESPACE@[108; 109) " "
95 PIPE@[109; 110) "|"
96 WHITESPACE@[110; 111) " "
97 PLACEHOLDER_PAT@[111; 112)
98 UNDERSCORE@[111; 112) "_"
99 COMMA@[112; 113) ","
100 R_BRACK@[113; 114) "]"
101 WHITESPACE@[114; 115) " "
102 FAT_ARROW@[115; 117) "=>"
103 WHITESPACE@[117; 118) " "
104 TUPLE_EXPR@[118; 120)
105 L_PAREN@[118; 119) "("
106 R_PAREN@[119; 120) ")"
107 COMMA@[120; 121) ","
108 WHITESPACE@[121; 126) "\n "
109 R_CURLY@[126; 127) "}"
110 WHITESPACE@[127; 128) "\n"
111 R_CURLY@[128; 129) "}"
112 WHITESPACE@[129; 130) "\n"
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs b/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs
new file mode 100644
index 000000000..1ebbe5b03
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.rs
@@ -0,0 +1 @@
type Foo = fn(_: bar);
diff --git a/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.txt b/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.txt
new file mode 100644
index 000000000..52d8f21a4
--- /dev/null
+++ b/crates/ra_syntax/test_data/parser/inline/ok/0157_fn_pointer_unnamed_arg.txt
@@ -0,0 +1,26 @@
1SOURCE_FILE@[0; 23)
2 TYPE_ALIAS_DEF@[0; 22)
3 TYPE_KW@[0; 4) "type"
4 WHITESPACE@[4; 5) " "
5 NAME@[5; 8)
6 IDENT@[5; 8) "Foo"
7 WHITESPACE@[8; 9) " "
8 EQ@[9; 10) "="
9 WHITESPACE@[10; 11) " "
10 FN_POINTER_TYPE@[11; 21)
11 FN_KW@[11; 13) "fn"
12 PARAM_LIST@[13; 21)
13 L_PAREN@[13; 14) "("
14 PARAM@[14; 20)
15 PLACEHOLDER_PAT@[14; 15)
16 UNDERSCORE@[14; 15) "_"
17 COLON@[15; 16) ":"
18 WHITESPACE@[16; 17) " "
19 PATH_TYPE@[17; 20)
20 PATH@[17; 20)
21 PATH_SEGMENT@[17; 20)
22 NAME_REF@[17; 20)
23 IDENT@[17; 20) "bar"
24 R_PAREN@[20; 21) ")"
25 SEMI@[21; 22) ";"
26 WHITESPACE@[22; 23) "\n"
diff --git a/crates/ra_text_edit/src/lib.rs b/crates/ra_text_edit/src/lib.rs
index 5f1b12222..37f23d043 100644
--- a/crates/ra_text_edit/src/lib.rs
+++ b/crates/ra_text_edit/src/lib.rs
@@ -29,8 +29,8 @@ impl AtomTextEdit {
29 } 29 }
30 30
31 pub fn apply(&self, mut text: String) -> String { 31 pub fn apply(&self, mut text: String) -> String {
32 let start = u32::from(self.delete.start()) as usize; 32 let start = self.delete.start().to_usize();
33 let end = u32::from(self.delete.end()) as usize; 33 let end = self.delete.end().to_usize();
34 text.replace_range(start..end, &self.insert); 34 text.replace_range(start..end, &self.insert);
35 text 35 text
36 } 36 }
diff --git a/crates/ra_text_edit/src/text_edit.rs b/crates/ra_text_edit/src/text_edit.rs
index 413c7d782..3291ada42 100644
--- a/crates/ra_text_edit/src/text_edit.rs
+++ b/crates/ra_text_edit/src/text_edit.rs
@@ -66,13 +66,13 @@ impl TextEdit {
66 let mut total_len = text.len(); 66 let mut total_len = text.len();
67 for atom in self.atoms.iter() { 67 for atom in self.atoms.iter() {
68 total_len += atom.insert.len(); 68 total_len += atom.insert.len();
69 total_len -= u32::from(atom.delete.end() - atom.delete.start()) as usize; 69 total_len -= (atom.delete.end() - atom.delete.start()).to_usize();
70 } 70 }
71 let mut buf = String::with_capacity(total_len); 71 let mut buf = String::with_capacity(total_len);
72 let mut prev = 0; 72 let mut prev = 0;
73 for atom in self.atoms.iter() { 73 for atom in self.atoms.iter() {
74 let start = u32::from(atom.delete.start()) as usize; 74 let start = atom.delete.start().to_usize();
75 let end = u32::from(atom.delete.end()) as usize; 75 let end = atom.delete.end().to_usize();
76 if start > prev { 76 if start > prev {
77 buf.push_str(&text[prev..start]); 77 buf.push_str(&text[prev..start]);
78 } 78 }
diff --git a/docs/dev/README.md b/docs/dev/README.md
index 732e4bdd3..991deaf90 100644
--- a/docs/dev/README.md
+++ b/docs/dev/README.md
@@ -74,7 +74,7 @@ relevant test and execute it (VS Code includes an action for running a single
74test). 74test).
75 75
76However, launching a VS Code instance with locally build language server is 76However, launching a VS Code instance with locally build language server is
77possible. There's "Run Extension (Dev Server)" launch configuration for this. 77possible. There's **"Run Extension (Dev Server)"** launch configuration for this.
78 78
79In general, I use one of the following workflows for fixing bugs and 79In general, I use one of the following workflows for fixing bugs and
80implementing features. 80implementing features.
@@ -88,7 +88,14 @@ Code to sanity check that the thing works as I expect.
88 88
89If the problem concerns only the VS Code extension, I use **Run Extension** 89If the problem concerns only the VS Code extension, I use **Run Extension**
90launch configuration from `launch.json`. Notably, this uses the usual 90launch configuration from `launch.json`. Notably, this uses the usual
91`ra_lsp_server` binary from `PATH`. After I am done with the fix, I use `cargo 91`ra_lsp_server` binary from `PATH`. For this it is important to have the following
92in `setting.json` file:
93```json
94{
95 "rust-analyzer.raLspServerPath": "ra_lsp_server"
96}
97```
98After I am done with the fix, I use `cargo
92xtask install --client-code` to try the new extension for real. 99xtask install --client-code` to try the new extension for real.
93 100
94If I need to fix something in the `ra_lsp_server` crate, I feel sad because it's 101If I need to fix something in the `ra_lsp_server` crate, I feel sad because it's
diff --git a/docs/dev/debugging.md b/docs/dev/debugging.md
index 1ccf4dca2..e6b082156 100644
--- a/docs/dev/debugging.md
+++ b/docs/dev/debugging.md
@@ -1,44 +1,66 @@
1# Debugging vs Code plugin and the Language Server 1# Debugging VSCode plugin and the language server
2 2
3**NOTE:** the information here is mostly obsolete 3## Prerequisites
4 4
5Install [LLDB](https://lldb.llvm.org/) and the [LLDB Extension](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb). 5- Install [LLDB](https://lldb.llvm.org/) and the [LLDB Extension](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb).
6- Open the root folder in VSCode. Here you can access the preconfigured debug setups.
6 7
7Checkout rust rust-analyzer and open it in vscode. 8 <img height=150px src="https://user-images.githubusercontent.com/36276403/74611090-92ec5380-5101-11ea-8a41-598f51f3f3e3.png" alt="Debug options view">
9
10- Install all TypeScript dependencies
11 ```bash
12 cd editors/code
13 npm install
14 ```
15
16## Common knowledge
17
18* All debug configurations open a new `[Extension Development Host]` VSCode instance
19where **only** the `rust-analyzer` extension being debugged is enabled.
20* To activate the extension you need to open any Rust project folder in `[Extension Development Host]`.
8 21
9```
10$ git clone https://github.com/rust-analyzer/rust-analyzer.git --depth 1
11$ cd rust-analyzer
12$ code .
13```
14 22
15- To attach to the `lsp server` in linux you'll have to run: 23## Debug TypeScript VSCode extension
16 24
17 `echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope` 25- `Run Extension` - runs the extension with the globally installed `ra_lsp_server` binary.
26- `Run Extension (Dev Server)` - runs extension with the locally built LSP server (`target/debug/ra_lsp_server`).
27
28TypeScript debugging is configured to watch your source edits and recompile.
29To apply changes to an already running debug process press <kbd>Ctrl+Shift+P</kbd> and run the following command in your `[Extension Development Host]`
30
31```
32> Developer: Reload Window
33```
18 34
19 This enables ptrace on non forked processes 35## Debug Rust LSP server
20 36
21- Ensure the dependencies for the extension are installed, run the `npm: install - editors/code` task in vscode. 37- When attaching a debugger to an already running `rust-analyzer` server on Linux you might need to enable `ptrace` for unrelated processes by running:
22 38
23- Launch the `Debug Extension`, this will build the extension and the `lsp server`. 39 ```
40 echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope
41 ```
24 42
25- A new instance of vscode with `[Extension Development Host]` in the title.
26 43
27 Don't worry about disabling `rls` all other extensions will be disabled but this one. 44- By default, the LSP server is built without debug information. To enable it, you'll need to change `Cargo.toml`:
45 ```toml
46 [profile.dev]
47 debug = 2
48 ```
28 49
29- In the new vscode instance open a rust project, and navigate to a rust file 50- Select `Run Extension (Dev Server)` to run your locally built `target/debug/ra_lsp_server`.
30 51
31- In the original vscode start an additional debug session (the three periods in the launch) and select `Debug Lsp Server`. 52- In the original VSCode window once again select the `Attach To Server` debug configuration.
32 53
33- A list of running processes should appear select the `ra_lsp_server` from this repo. 54- A list of running processes should appear. Select the `ra_lsp_server` from this repo.
34 55
35- Navigate to `crates/ra_lsp_server/src/main_loop.rs` and add a breakpoint to the `on_task` function. 56- Navigate to `crates/ra_lsp_server/src/main_loop.rs` and add a breakpoint to the `on_task` function.
36 57
37- Go back to the `[Extension Development Host]` instance and hover over a rust variable and your breakpoint should hit. 58- Go back to the `[Extension Development Host]` instance and hover over a Rust variable and your breakpoint should hit.
38 59
39## Demo 60## Demo
40 61
41![demonstration of debugging](https://user-images.githubusercontent.com/1711539/51384036-254fab80-1b2c-11e9-824d-95f9a6e9cf4f.gif) 62- [Debugging TypeScript VScode extension](https://www.youtube.com/watch?v=T-hvpK6s4wM).
63- [Debugging Rust LSP server](https://www.youtube.com/watch?v=EaNb5rg4E0M).
42 64
43## Troubleshooting 65## Troubleshooting
44 66
diff --git a/docs/user/README.md b/docs/user/README.md
deleted file mode 100644
index 3da30a193..000000000
--- a/docs/user/README.md
+++ /dev/null
@@ -1,250 +0,0 @@
1The main interface to rust-analyzer is the
2[LSP](https://microsoft.github.io/language-server-protocol/) implementation. To
3install lsp server, clone the repository and then run `cargo xtask install
4--server` (which is shorthand for `cargo install --path
5./crates/ra_lsp_server`). This will produce a binary named `ra_lsp_server` which
6you should be able to use it with any LSP-compatible editor. We use custom
7extensions to LSP, so special client-side support is required to take full
8advantage of rust-analyzer. This repository contains support code for VS Code.
9
10```
11$ git clone [email protected]:rust-analyzer/rust-analyzer && cd rust-analyzer
12$ cargo xtask install --server
13```
14Rust Analyzer needs sources of rust standard library to work, so
15you might also need to execute
16
17```
18$ rustup component add rust-src
19```
20
21See [./features.md](./features.md) document for a list of features that are available.
22
23## VS Code
24
25Prerequisites:
26
27In order to build the VS Code plugin, you need to have node.js and npm with
28a minimum version of 10 installed. Please refer to
29[node.js and npm documentation](https://nodejs.org) for installation instructions.
30
31You will also need the most recent version of VS Code: we don't try to
32maintain compatibility with older versions yet.
33
34### Installation from prebuilt binaries
35
36We ship prebuilt binaries for Linux, Mac and Windows via
37[GitHub releases](https://github.com/rust-analyzer/rust-analyzer/releases).
38In order to use them you need to install the client VSCode extension.
39
40Publishing to VSCode marketplace is currently WIP. Thus, you need to clone the repository and install **only** the client extension via
41```
42$ git clone https://github.com/rust-analyzer/rust-analyzer.git --depth 1
43$ cd rust-analyzer
44$ cargo xtask install --client-code
45```
46Then open VSCode (or reload the window if it was already running), open some Rust project and you should
47see an info message pop-up.
48
49
50<img height="140px" src="https://user-images.githubusercontent.com/36276403/74103174-a40df100-4b52-11ea-81f4-372c70797924.png" alt="Download now message"/>
51
52
53Click `Download now`, wait until the progress is 100% and you are ready to go.
54
55For updates you need to remove installed binary
56```
57rm -rf ${HOME}/.config/Code/User/globalStorage/matklad.rust-analyzer
58```
59
60`"Donwload latest language server"` command for VSCode and automatic updates detection is currently WIP.
61
62
63### Installation from sources
64
65The experimental VS Code plugin can be built and installed by executing the
66following commands:
67
68```
69$ git clone https://github.com/rust-analyzer/rust-analyzer.git --depth 1
70$ cd rust-analyzer
71$ cargo xtask install
72```
73
74The automatic installation is expected to *just work* for common cases, if it
75doesn't, report bugs!
76
77**Note** [#1831](https://github.com/rust-analyzer/rust-analyzer/issues/1831): If you are using the popular
78[Vim emulation plugin](https://github.com/VSCodeVim/Vim), you will likely
79need to turn off the `rust-analyzer.enableEnhancedTyping` setting.
80(// TODO: This configuration is no longer available, enhanced typing shoud be disabled via removing Enter key binding, [see this issue](https://github.com/rust-analyzer/rust-analyzer/issues/3051))
81
82If you have an unusual setup (for example, `code` is not in the `PATH`), you
83should adapt these manual installation instructions:
84
85```
86$ git clone https://github.com/rust-analyzer/rust-analyzer.git --depth 1
87$ cd rust-analyzer
88$ cargo install --path ./crates/ra_lsp_server/ --force --locked
89$ cd ./editors/code
90$ npm install
91$ npm run package
92$ code --install-extension ./rust-analyzer-0.1.0.vsix
93```
94
95It's better to remove existing Rust plugins to avoid interference.
96
97Beyond basic LSP features, there are some extension commands which you can
98invoke via <kbd>Ctrl+Shift+P</kbd> or bind to a shortcut. See [./features.md](./features.md)
99for details.
100
101For updates, pull the latest changes from the master branch, run `cargo xtask install` again, and **restart** VS Code instance.
102See [microsoft/vscode#72308](https://github.com/microsoft/vscode/issues/72308) for why a full restart is needed.
103
104### VS Code Remote
105
106You can also use `rust-analyzer` with the Visual Studio Code Remote extensions
107(Remote SSH, Remote WSL, Remote Containers). In this case, however, you have to
108manually install the `.vsix` package:
109
1101. Build the extension on the remote host using the instructions above (ignore the
111 error if `code` cannot be found in your PATH: VSCode doesn't need to be installed
112 on the remote host).
1132. In Visual Studio Code open a connection to the remote host.
1143. Open the Extensions View (`View > Extensions`, keyboard shortcut: `Ctrl+Shift+X`).
1154. From the top-right kebab menu (`···`) select `Install from VSIX...`
1165. Inside the `rust-analyzer` directory find the `editors/code` subdirectory and choose
117 the `rust-analyzer-0.1.0.vsix` file.
1186. Restart Visual Studio Code and re-establish the connection to the remote host.
119
120In case of errors please make sure that `~/.cargo/bin` is in your `PATH` on the remote
121host.
122
123### Settings
124
125* `rust-analyzer.highlightingOn`: enables experimental syntax highlighting.
126 Colors can be configured via `editor.tokenColorCustomizations`.
127 As an example, [Pale Fire](https://github.com/matklad/pale-fire/) color scheme tweaks rust colors.
128* `rust-analyzer.enableEnhancedTyping`: by default, rust-analyzer intercepts the
129 `Enter` key to make it easier to continue comments. Note that it may conflict with VIM emulation plugin.
130* `rust-analyzer.raLspServerPath`: path to `ra_lsp_server` executable
131* `rust-analyzer.enableCargoWatchOnStartup`: prompt to install & enable `cargo
132 watch` for live error highlighting (note, this **does not** use rust-analyzer)
133* `rust-analyzer.excludeGlobs`: a list of glob-patterns for exclusion (see globset [docs](https://docs.rs/globset) for syntax).
134 Note: glob patterns are applied to all Cargo packages and a rooted at a package root.
135 This is not very intuitive and a limitation of a current implementation.
136* `rust-analyzer.useClientWatching`: use client provided file watching instead
137 of notify watching.
138* `rust-analyzer.cargo-watch.command`: `cargo-watch` command. (e.g: `clippy` will run as `cargo watch -x clippy` )
139* `rust-analyzer.cargo-watch.arguments`: cargo-watch check arguments.
140 (e.g: `--features="shumway,pdf"` will run as `cargo watch -x "check --features="shumway,pdf""` )
141* `rust-analyzer.cargo-watch.ignore`: list of patterns for cargo-watch to ignore (will be passed as `--ignore`)
142* `rust-analyzer.trace.server`: enables internal logging
143* `rust-analyzer.trace.cargo-watch`: enables cargo-watch logging
144* `RUST_SRC_PATH`: environment variable that overwrites the sysroot
145* `rust-analyzer.featureFlags` -- a JSON object to tweak fine-grained behavior:
146 ```jsonc
147 {
148 // Show diagnostics produced by rust-analyzer itself.
149 "lsp.diagnostics": true,
150 // Automatically insert `()` and `<>` when completing functions and types.
151 "completion.insertion.add-call-parenthesis": true,
152 // Enable completions like `.if`, `.match`, etc.
153 "completion.enable-postfix": true,
154 // Show notification when workspace is fully loaded
155 "notifications.workspace-loaded": true,
156 // Show error when no Cargo.toml was found
157 "notifications.cargo-toml-not-found": true,
158 }
159 ```
160
161
162## Emacs
163
164* install recent version of `emacs-lsp` package by following the instructions [here][emacs-lsp]
165* set `lsp-rust-server` to `'rust-analyzer`
166* run `lsp` in a Rust buffer
167* (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys
168
169[emacs-lsp]: https://github.com/emacs-lsp/lsp-mode
170
171
172## Vim and NeoVim (coc-rust-analyzer)
173
174* Install coc.nvim by following the instructions at [coc.nvim][] (nodejs required)
175* Run `:CocInstall coc-rust-analyzer` to install [coc-rust-analyzer], this extension implements _most_ of the features supported in the VSCode extension:
176 - same configurations as VSCode extension, `rust-analyzer.raLspServerPath`, `rust-analyzer.enableCargoWatchOnStartup` etc.
177 - same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.startCargoWatch` etc.
178 - highlighting and inlay_hints are not implemented yet
179
180[coc.nvim]: https://github.com/neoclide/coc.nvim
181[coc-rust-analyzer]: https://github.com/fannheyward/coc-rust-analyzer
182
183## Vim and NeoVim (LanguageClient-neovim)
184
185* Install LanguageClient-neovim by following the instructions [here][lang-client-neovim]
186 - The github project wiki has extra tips on configuration
187
188* Configure by adding this to your vim/neovim config file (replacing the existing rust specific line if it exists):
189
190```vim
191let g:LanguageClient_serverCommands = {
192\ 'rust': ['ra_lsp_server'],
193\ }
194```
195
196[lang-client-neovim]: https://github.com/autozimu/LanguageClient-neovim
197
198## NeoVim (nvim-lsp)
199
200NeoVim 0.5 (not yet released) has built in language server support. For a quick start configuration
201of rust-analyzer, use [neovim/nvim-lsp](https://github.com/neovim/nvim-lsp#rust_analyzer).
202Once `neovim/nvim-lsp` is installed, use `lua require'nvim_lsp'.rust_analyzer.setup({})` in your `init.vim`.
203
204
205## Sublime Text 3
206
207Prequisites:
208
209`LSP` package.
210
211Installation:
212
213* Invoke the command palette with <kbd>Ctrl+Shift+P</kbd>
214* Type `LSP Settings` to open the LSP preferences editor
215* Add the following LSP client definition to your settings:
216
217```json
218"rust-analyzer": {
219 "command": ["ra_lsp_server"],
220 "languageId": "rust",
221 "scopes": ["source.rust"],
222 "syntaxes": [
223 "Packages/Rust/Rust.sublime-syntax",
224 "Packages/Rust Enhanced/RustEnhanced.sublime-syntax"
225 ],
226 "initializationOptions": {
227 "featureFlags": {
228 }
229 },
230}
231```
232
233* You can now invoke the command palette and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer)
234
235### Setting up the `PATH` variable
236
237On Unix systems, `rustup` adds `~/.cargo/bin` to `PATH` by modifying the shell's
238startup file. Depending on your configuration, your Desktop Environment might not
239actually load it. If you find that `rust-analyzer` only runs when starting the
240editor from the terminal, you will have to set up your `PATH` variable manually.
241
242There are a couple of ways to do that:
243
244- for Code, set `rust-analyzer.raLspServerPath` to `~/.cargo/bin` (the `~` is
245 automatically resolved by the extension)
246- copy the binary to a location that is already in `PATH`, e.g. `/usr/local/bin`
247- on Linux, use PAM to configure the `PATH` variable, by e.g. putting
248 `PATH DEFAULT=/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:@{HOME}/.cargo/bin:@{HOME}/.local/bin`
249 in your `~/.pam_environment` file; note that this might interfere with other
250 defaults set by the system administrator via `/etc/environment`.
diff --git a/docs/user/readme.adoc b/docs/user/readme.adoc
new file mode 100644
index 000000000..553687e78
--- /dev/null
+++ b/docs/user/readme.adoc
@@ -0,0 +1,154 @@
1= User Manual
2:toc: preamble
3:sectanchors:
4:page-layout: post
5
6
7// Master copy of this document lives in the https://github.com/rust-analyzer/rust-analyzer repository
8
9At it's core, rust-analyzer is a *library* for semantic analysis of the Rust code as it changes over time.
10This manual focuses on a specific usage of the library -- the implementation of
11https://microsoft.github.io/language-server-protocol/[Language Server Protocol].
12LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic feature like completion or goto definition by talking to an external language server process.
13
14To improve this document, send a pull request against
15https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/readme.adoc[this file].
16
17== Installation
18
19In theory, one should be able to just install the server binary and have it automatically work with any editor.
20We are not there yet, so some editor specific setup is required.
21
22=== VS Code
23
24This the best supported editor at the moment.
25rust-analyzer plugin for VS Code is maintained
26https://github.com/rust-analyzer/rust-analyzer/tree/master/editors/code[in tree].
27
28You can install the latest release of the plugin from
29https://marketplace.visualstudio.com/items?itemName=matklad.rust-analyzer[the marketplace].
30By default, the plugin will download the matching version of the server as well.
31
32// FIXME: update the image (its text has changed)
33image::https://user-images.githubusercontent.com/36276403/74103174-a40df100-4b52-11ea-81f4-372c70797924.png[]
34
35The server binary is stored in `~/.config/Code/User/globalStorage/matklad.rust-analyzer`.
36
37Note that we only support the latest version of VS Code.
38
39==== Updates
40
41The extension will be updated automatically as new versions become available. It will ask your permission to download the matching language server version binary if needed.
42
43==== Building From Source
44
45Alternatively, both the server and the plugin can be installed from source:
46
47[source]
48----
49$ git clone https://github.com/rust-analyzer/rust-analyzer.git && cd rust-analyzer
50$ cargo xtask install
51----
52
53You'll need Cargo, nodejs and npm for this.
54To make VS Code use the freshly build server, add this to the settings:
55
56[source,json]
57----
58{ "rust-analyzer.raLspServerPath": "ra_lsp_server" }
59----
60
61Note that installing via `xtask install` does not work for VS Code Remote, instead you'll need to install the `.vsix` manually.
62
63=== Language Server Binary
64
65Other editors generally require `ra_lsp_server` binary to be in `$PATH`.
66You can download pre-build binary from
67https://github.com/rust-analyzer/rust-analyzer/releases[relases]
68page, or you can install it from source using the following command:
69
70[source,bash]
71----
72$ cargo xtask install --server
73----
74
75=== Emacs
76
77Emacs support is maintained https://github.com/emacs-lsp/lsp-mode/blob/master/lsp-rust.el[upstream].
78
791. Install recent version of `emacs-lsp` package by following the instructions https://github.com/emacs-lsp/lsp-mode[here].
802. Set `lsp-rust-server` to `'rust-analyzer`.
813. Run `lsp` in a Rust buffer.
824. (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys.
83
84=== Vim
85
86The are several LSP client implementations for vim:
87
88==== coc-rust-analyzer
89
901. Install coc.nvim by following the instructions at
91 https://github.com/neoclide/coc.nvim[coc.nvim]
92 (nodejs required)
932. Run `:CocInstall coc-rust-analyzer` to install
94 https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer],
95 this extension implements _most_ of the features supported in the VSCode extension:
96 * same configurations as VSCode extension, `rust-analyzer.raLspServerPath`, `rust-analyzer.enableCargoWatchOnStartup` etc.
97 * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.startCargoWatch` etc.
98 * highlighting and inlay_hints are not implemented yet
99
100==== LanguageClient-neovim
101
1021. Install LanguageClient-neovim by following the instructions
103 https://github.com/autozimu/LanguageClient-neovim[here]
104 * The github project wiki has extra tips on configuration
105
1062. Configure by adding this to your vim/neovim config file (replacing the existing rust specific line if it exists):
107+
108[source,vim]
109----
110let g:LanguageClient_serverCommands = {
111\ 'rust': ['ra_lsp_server'],
112\ }
113----
114
115==== nvim-lsp
116
117NeoVim 0.5 (not yet released) has built in language server support.
118For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lsp#rust_analyzer[neovim/nvim-lsp].
119Once `neovim/nvim-lsp` is installed, use `lua require'nvim_lsp'.rust_analyzer.setup({})` in your `init.vim`.
120
121=== Sublime Text 3
122
123Prerequisites:
124
125`LSP` package.
126
127Installation:
128
1291. Invoke the command palette with <kbd>Ctrl+Shift+P</kbd>
1302. Type `LSP Settings` to open the LSP preferences editor
1313. Add the following LSP client definition to your settings:
132+
133[source,json]
134----
135"rust-analyzer": {
136 "command": ["ra_lsp_server"],
137 "languageId": "rust",
138 "scopes": ["source.rust"],
139 "syntaxes": [
140 "Packages/Rust/Rust.sublime-syntax",
141 "Packages/Rust Enhanced/RustEnhanced.sublime-syntax"
142 ],
143 "initializationOptions": {
144 "featureFlags": {
145 }
146 },
147}
148----
149
1504. You can now invoke the command palette and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer)
151
152== Usage
153
154See https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/features.md[features.md].
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index 5c056463e..c74078735 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -1,6 +1,6 @@
1{ 1{
2 "name": "rust-analyzer", 2 "name": "rust-analyzer",
3 "version": "0.1.0", 3 "version": "0.2.0-dev",
4 "lockfileVersion": 1, 4 "lockfileVersion": 1,
5 "requires": true, 5 "requires": true,
6 "dependencies": { 6 "dependencies": {
@@ -107,9 +107,9 @@
107 "dev": true 107 "dev": true
108 }, 108 },
109 "@types/vscode": { 109 "@types/vscode": {
110 "version": "1.41.0", 110 "version": "1.42.0",
111 "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.41.0.tgz", 111 "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.42.0.tgz",
112 "integrity": "sha512-7SfeY5u9jgiELwxyLB3z7l6l/GbN9CqpCQGkcRlB7tKRFBxzbz2PoBfGrLxI1vRfUCIq5+hg5vtDHExwq5j3+A==", 112 "integrity": "sha512-ds6TceMsh77Fs0Mq0Vap6Y72JbGWB8Bay4DrnJlf5d9ui2RSe1wis13oQm+XhguOeH1HUfLGzaDAoupTUtgabw==",
113 "dev": true 113 "dev": true
114 }, 114 },
115 "acorn": { 115 "acorn": {
@@ -662,9 +662,9 @@
662 } 662 }
663 }, 663 },
664 "readable-stream": { 664 "readable-stream": {
665 "version": "3.4.0", 665 "version": "3.6.0",
666 "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", 666 "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
667 "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", 667 "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
668 "dev": true, 668 "dev": true,
669 "requires": { 669 "requires": {
670 "inherits": "^2.0.3", 670 "inherits": "^2.0.3",
@@ -860,9 +860,9 @@
860 "dev": true 860 "dev": true
861 }, 861 },
862 "vsce": { 862 "vsce": {
863 "version": "1.71.0", 863 "version": "1.73.0",
864 "resolved": "https://registry.npmjs.org/vsce/-/vsce-1.71.0.tgz", 864 "resolved": "https://registry.npmjs.org/vsce/-/vsce-1.73.0.tgz",
865 "integrity": "sha512-7k+LPC4oJYPyyxs0a5nh4A8CleQ6+2EMPiAiX/bDyN+PmwJFm2FFPqLRxdIsIWfFnkW4ZMQBf10+W62dCRd9kQ==", 865 "integrity": "sha512-6W37Ebbkj3uF3WhT+SCfRtsneRQEFcGvf/XYz+b6OAgDCj4gPurWyDVrqw/HLsbP1WflGIyUfVZ8t5M7kQp6Uw==",
866 "dev": true, 866 "dev": true,
867 "requires": { 867 "requires": {
868 "azure-devops-node-api": "^7.2.0", 868 "azure-devops-node-api": "^7.2.0",
diff --git a/editors/code/package.json b/editors/code/package.json
index f687eb8d4..46acbfe76 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -5,7 +5,8 @@
5 "preview": true, 5 "preview": true,
6 "private": true, 6 "private": true,
7 "icon": "icon.png", 7 "icon": "icon.png",
8 "version": "0.1.0", 8 "//": "The real version is in release.yaml, this one just needs to be bigger",
9 "version": "0.2.20200211-dev",
9 "publisher": "matklad", 10 "publisher": "matklad",
10 "repository": { 11 "repository": {
11 "url": "https://github.com/rust-analyzer/rust-analyzer.git", 12 "url": "https://github.com/rust-analyzer/rust-analyzer.git",
@@ -15,7 +16,7 @@
15 "Other" 16 "Other"
16 ], 17 ],
17 "engines": { 18 "engines": {
18 "vscode": "^1.41.0" 19 "vscode": "^1.42.0"
19 }, 20 },
20 "scripts": { 21 "scripts": {
21 "vscode:prepublish": "tsc && rollup -c", 22 "vscode:prepublish": "tsc && rollup -c",
@@ -35,13 +36,13 @@
35 "@types/node": "^12.12.25", 36 "@types/node": "^12.12.25",
36 "@types/node-fetch": "^2.5.4", 37 "@types/node-fetch": "^2.5.4",
37 "@types/throttle-debounce": "^2.1.0", 38 "@types/throttle-debounce": "^2.1.0",
38 "@types/vscode": "^1.41.0", 39 "@types/vscode": "^1.42.0",
39 "rollup": "^1.31.0", 40 "rollup": "^1.31.0",
40 "tslib": "^1.10.0", 41 "tslib": "^1.10.0",
41 "tslint": "^5.20.1", 42 "tslint": "^5.20.1",
42 "typescript": "^3.7.5", 43 "typescript": "^3.7.5",
43 "typescript-formatter": "^7.2.2", 44 "typescript-formatter": "^7.2.2",
44 "vsce": "^1.71.0" 45 "vsce": "^1.73.0"
45 }, 46 },
46 "activationEvents": [ 47 "activationEvents": [
47 "onLanguage:rust", 48 "onLanguage:rust",
@@ -181,9 +182,20 @@
181 }, 182 },
182 "rust-analyzer.excludeGlobs": { 183 "rust-analyzer.excludeGlobs": {
183 "type": "array", 184 "type": "array",
185 "items": {
186 "type": "string"
187 },
184 "default": [], 188 "default": [],
185 "description": "Paths to exclude from analysis" 189 "description": "Paths to exclude from analysis"
186 }, 190 },
191 "rust-analyzer.rustfmtArgs": {
192 "type": "array",
193 "items": {
194 "type": "string"
195 },
196 "default": [],
197 "description": "Additional arguments to rustfmt"
198 },
187 "rust-analyzer.useClientWatching": { 199 "rust-analyzer.useClientWatching": {
188 "type": "boolean", 200 "type": "boolean",
189 "default": true, 201 "default": true,
@@ -196,6 +208,9 @@
196 }, 208 },
197 "rust-analyzer.cargo-watch.arguments": { 209 "rust-analyzer.cargo-watch.arguments": {
198 "type": "array", 210 "type": "array",
211 "items": {
212 "type": "string"
213 },
199 "description": "`cargo-watch` arguments. (e.g: `--features=\"shumway,pdf\"` will run as `cargo watch -x \"check --features=\"shumway,pdf\"\"` )", 214 "description": "`cargo-watch` arguments. (e.g: `--features=\"shumway,pdf\"` will run as `cargo watch -x \"check --features=\"shumway,pdf\"\"` )",
200 "default": [] 215 "default": []
201 }, 216 },
@@ -227,10 +242,12 @@
227 }, 242 },
228 "rust-analyzer.lruCapacity": { 243 "rust-analyzer.lruCapacity": {
229 "type": [ 244 "type": [
230 "number", 245 "null",
231 "null" 246 "integer"
232 ], 247 ],
233 "default": null, 248 "default": null,
249 "minimum": 0,
250 "exclusiveMinimum": true,
234 "description": "Number of syntax trees rust-analyzer keeps in memory" 251 "description": "Number of syntax trees rust-analyzer keeps in memory"
235 }, 252 },
236 "rust-analyzer.displayInlayHints": { 253 "rust-analyzer.displayInlayHints": {
@@ -239,8 +256,13 @@
239 "description": "Display additional type and parameter information in the editor" 256 "description": "Display additional type and parameter information in the editor"
240 }, 257 },
241 "rust-analyzer.maxInlayHintLength": { 258 "rust-analyzer.maxInlayHintLength": {
242 "type": "number", 259 "type": [
260 "null",
261 "integer"
262 ],
243 "default": 20, 263 "default": 20,
264 "minimum": 0,
265 "exclusiveMinimum": true,
244 "description": "Maximum length for inlay hints" 266 "description": "Maximum length for inlay hints"
245 }, 267 },
246 "rust-analyzer.cargoFeatures.noDefaultFeatures": { 268 "rust-analyzer.cargoFeatures.noDefaultFeatures": {
@@ -255,6 +277,9 @@
255 }, 277 },
256 "rust-analyzer.cargoFeatures.features": { 278 "rust-analyzer.cargoFeatures.features": {
257 "type": "array", 279 "type": "array",
280 "items": {
281 "type": "string"
282 },
258 "default": [], 283 "default": [],
259 "description": "List of features to activate" 284 "description": "List of features to activate"
260 } 285 }
diff --git a/editors/code/rollup.config.js b/editors/code/rollup.config.js
index f8d320f46..337385a24 100644
--- a/editors/code/rollup.config.js
+++ b/editors/code/rollup.config.js
@@ -18,6 +18,7 @@ export default {
18 external: [...nodeBuiltins, 'vscode'], 18 external: [...nodeBuiltins, 'vscode'],
19 output: { 19 output: {
20 file: './out/main.js', 20 file: './out/main.js',
21 format: 'cjs' 21 format: 'cjs',
22 exports: 'named'
22 } 23 }
23}; 24};
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index 2e3d4aba2..11894973c 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -1,44 +1,48 @@
1import * as lc from 'vscode-languageclient'; 1import * as lc from 'vscode-languageclient';
2import * as vscode from 'vscode';
2 3
3import { window, workspace } from 'vscode';
4import { Config } from './config'; 4import { Config } from './config';
5import { ensureLanguageServerBinary } from './installation/language_server'; 5import { ensureServerBinary } from './installation/server';
6import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed';
6 7
7export async function createClient(config: Config): Promise<null | lc.LanguageClient> { 8export async function createClient(config: Config): Promise<null | lc.LanguageClient> {
8 // '.' Is the fallback if no folder is open 9 // '.' Is the fallback if no folder is open
9 // TODO?: Workspace folders support Uri's (eg: file://test.txt). 10 // TODO?: Workspace folders support Uri's (eg: file://test.txt).
10 // It might be a good idea to test if the uri points to a file. 11 // It might be a good idea to test if the uri points to a file.
11 const workspaceFolderPath = workspace.workspaceFolders?.[0]?.uri.fsPath ?? '.'; 12 const workspaceFolderPath = vscode.workspace.workspaceFolders?.[0]?.uri.fsPath ?? '.';
12 13
13 const raLspServerPath = await ensureLanguageServerBinary(config.langServerSource); 14 const serverPath = await ensureServerBinary(config.serverSource);
14 if (!raLspServerPath) return null; 15 if (!serverPath) return null;
15 16
16 const run: lc.Executable = { 17 const run: lc.Executable = {
17 command: raLspServerPath, 18 command: serverPath,
18 options: { cwd: workspaceFolderPath }, 19 options: { cwd: workspaceFolderPath },
19 }; 20 };
20 const serverOptions: lc.ServerOptions = { 21 const serverOptions: lc.ServerOptions = {
21 run, 22 run,
22 debug: run, 23 debug: run,
23 }; 24 };
24 const traceOutputChannel = window.createOutputChannel( 25 const traceOutputChannel = vscode.window.createOutputChannel(
25 'Rust Analyzer Language Server Trace', 26 'Rust Analyzer Language Server Trace',
26 ); 27 );
28 const cargoWatchOpts = config.cargoWatchOptions;
29
27 const clientOptions: lc.LanguageClientOptions = { 30 const clientOptions: lc.LanguageClientOptions = {
28 documentSelector: [{ scheme: 'file', language: 'rust' }], 31 documentSelector: [{ scheme: 'file', language: 'rust' }],
29 initializationOptions: { 32 initializationOptions: {
30 publishDecorations: true, 33 publishDecorations: true,
31 lruCapacity: config.lruCapacity, 34 lruCapacity: config.lruCapacity,
32 maxInlayHintLength: config.maxInlayHintLength, 35 maxInlayHintLength: config.maxInlayHintLength,
33 cargoWatchEnable: config.cargoWatchOptions.enable, 36 cargoWatchEnable: cargoWatchOpts.enable,
34 cargoWatchArgs: config.cargoWatchOptions.arguments, 37 cargoWatchArgs: cargoWatchOpts.arguments,
35 cargoWatchCommand: config.cargoWatchOptions.command, 38 cargoWatchCommand: cargoWatchOpts.command,
36 cargoWatchAllTargets: config.cargoWatchOptions.allTargets, 39 cargoWatchAllTargets: cargoWatchOpts.allTargets,
37 excludeGlobs: config.excludeGlobs, 40 excludeGlobs: config.excludeGlobs,
38 useClientWatching: config.useClientWatching, 41 useClientWatching: config.useClientWatching,
39 featureFlags: config.featureFlags, 42 featureFlags: config.featureFlags,
40 withSysroot: config.withSysroot, 43 withSysroot: config.withSysroot,
41 cargoFeatures: config.cargoFeatures, 44 cargoFeatures: config.cargoFeatures,
45 rustfmtArgs: config.rustfmtArgs,
42 }, 46 },
43 traceOutputChannel, 47 traceOutputChannel,
44 }; 48 };
@@ -78,6 +82,10 @@ export async function createClient(config: Config): Promise<null | lc.LanguageCl
78 } 82 }
79 }, 83 },
80 }; 84 };
81 res.registerProposedFeatures(); 85
86 // To turn on all proposed features use: res.registerProposedFeatures();
87 // Here we want to just enable CallHierarchyFeature since it is available on stable.
88 // Note that while the CallHierarchyFeature is stable the LSP protocol is not.
89 res.registerFeature(new CallHierarchyFeature(res));
82 return res; 90 return res;
83} 91}
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index d5f3da2ed..c3fa788c7 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -16,45 +16,61 @@ export interface CargoFeatures {
16 allFeatures: boolean; 16 allFeatures: boolean;
17 features: string[]; 17 features: string[];
18} 18}
19
20export class Config { 19export class Config {
21 langServerSource!: null | BinarySource; 20 private static readonly rootSection = "rust-analyzer";
21 private static readonly requiresReloadOpts = [
22 "cargoFeatures",
23 "cargo-watch",
24 ]
25 .map(opt => `${Config.rootSection}.${opt}`);
26
27 private static readonly extensionVersion: string = (() => {
28 const packageJsonVersion = vscode
29 .extensions
30 .getExtension("matklad.rust-analyzer")!
31 .packageJSON
32 .version as string; // n.n.YYYYMMDD
33
34 const realVersionRegexp = /^\d+\.\d+\.(\d{4})(\d{2})(\d{2})/;
35 const [, yyyy, mm, dd] = packageJsonVersion.match(realVersionRegexp)!;
36
37 return `${yyyy}-${mm}-${dd}`;
38 })();
39
40 private cfg!: vscode.WorkspaceConfiguration;
41
42 constructor(private readonly ctx: vscode.ExtensionContext) {
43 vscode.workspace.onDidChangeConfiguration(this.onConfigChange, this, ctx.subscriptions);
44 this.refreshConfig();
45 }
22 46
23 highlightingOn = true; 47 private refreshConfig() {
24 rainbowHighlightingOn = false; 48 this.cfg = vscode.workspace.getConfiguration(Config.rootSection);
25 enableEnhancedTyping = true; 49 console.log("Using configuration:", this.cfg);
26 lruCapacity: null | number = null; 50 }
27 displayInlayHints = true;
28 maxInlayHintLength: null | number = null;
29 excludeGlobs: string[] = [];
30 useClientWatching = true;
31 featureFlags: Record<string, boolean> = {};
32 // for internal use
33 withSysroot: null | boolean = null;
34 cargoWatchOptions: CargoWatchOptions = {
35 enable: true,
36 arguments: [],
37 command: '',
38 allTargets: true,
39 };
40 cargoFeatures: CargoFeatures = {
41 noDefaultFeatures: false,
42 allFeatures: true,
43 features: [],
44 };
45 51
46 private prevEnhancedTyping: null | boolean = null; 52 private async onConfigChange(event: vscode.ConfigurationChangeEvent) {
47 private prevCargoFeatures: null | CargoFeatures = null; 53 this.refreshConfig();
48 private prevCargoWatchOptions: null | CargoWatchOptions = null;
49 54
50 constructor(ctx: vscode.ExtensionContext) { 55 const requiresReloadOpt = Config.requiresReloadOpts.find(
51 vscode.workspace.onDidChangeConfiguration(_ => this.refresh(ctx), null, ctx.subscriptions); 56 opt => event.affectsConfiguration(opt)
52 this.refresh(ctx); 57 );
58
59 if (!requiresReloadOpt) return;
60
61 const userResponse = await vscode.window.showInformationMessage(
62 `Changing "${requiresReloadOpt}" requires a reload`,
63 "Reload now"
64 );
65
66 if (userResponse === "Reload now") {
67 vscode.commands.executeCommand("workbench.action.reloadWindow");
68 }
53 } 69 }
54 70
55 private static expandPathResolving(path: string) { 71 private static replaceTildeWithHomeDir(path: string) {
56 if (path.startsWith('~/')) { 72 if (path.startsWith("~/")) {
57 return path.replace('~', os.homedir()); 73 return os.homedir() + path.slice("~".length);
58 } 74 }
59 return path; 75 return path;
60 } 76 }
@@ -64,9 +80,21 @@ export class Config {
64 * `platform` on GitHub releases. (It is also stored under the same name when 80 * `platform` on GitHub releases. (It is also stored under the same name when
65 * downloaded by the extension). 81 * downloaded by the extension).
66 */ 82 */
67 private static prebuiltLangServerFileName(platform: NodeJS.Platform): null | string { 83 get prebuiltServerFileName(): null | string {
68 switch (platform) { 84 // See possible `arch` values here:
69 case "linux": return "ra_lsp_server-linux"; 85 // https://nodejs.org/api/process.html#process_process_arch
86
87 switch (process.platform) {
88
89 case "linux": {
90 switch (process.arch) {
91 case "arm":
92 case "arm64": return null;
93
94 default: return "ra_lsp_server-linux";
95 }
96 }
97
70 case "darwin": return "ra_lsp_server-mac"; 98 case "darwin": return "ra_lsp_server-mac";
71 case "win32": return "ra_lsp_server-windows.exe"; 99 case "win32": return "ra_lsp_server-windows.exe";
72 100
@@ -82,27 +110,26 @@ export class Config {
82 } 110 }
83 } 111 }
84 112
85 private static langServerBinarySource( 113 get serverSource(): null | BinarySource {
86 ctx: vscode.ExtensionContext, 114 const serverPath = RA_LSP_DEBUG ?? this.cfg.get<null | string>("raLspServerPath");
87 config: vscode.WorkspaceConfiguration
88 ): null | BinarySource {
89 const langServerPath = RA_LSP_DEBUG ?? config.get<null | string>("raLspServerPath");
90 115
91 if (langServerPath) { 116 if (serverPath) {
92 return { 117 return {
93 type: BinarySource.Type.ExplicitPath, 118 type: BinarySource.Type.ExplicitPath,
94 path: Config.expandPathResolving(langServerPath) 119 path: Config.replaceTildeWithHomeDir(serverPath)
95 }; 120 };
96 } 121 }
97 122
98 const prebuiltBinaryName = Config.prebuiltLangServerFileName(process.platform); 123 const prebuiltBinaryName = this.prebuiltServerFileName;
99 124
100 if (!prebuiltBinaryName) return null; 125 if (!prebuiltBinaryName) return null;
101 126
102 return { 127 return {
103 type: BinarySource.Type.GithubRelease, 128 type: BinarySource.Type.GithubRelease,
104 dir: ctx.globalStoragePath, 129 dir: this.ctx.globalStoragePath,
105 file: prebuiltBinaryName, 130 file: prebuiltBinaryName,
131 storage: this.ctx.globalState,
132 version: Config.extensionVersion,
106 repo: { 133 repo: {
107 name: "rust-analyzer", 134 name: "rust-analyzer",
108 owner: "rust-analyzer", 135 owner: "rust-analyzer",
@@ -110,158 +137,36 @@ export class Config {
110 }; 137 };
111 } 138 }
112 139
140 // We don't do runtime config validation here for simplicity. More on stackoverflow:
141 // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension
142
143 get highlightingOn() { return this.cfg.get("highlightingOn") as boolean; }
144 get rainbowHighlightingOn() { return this.cfg.get("rainbowHighlightingOn") as boolean; }
145 get lruCapacity() { return this.cfg.get("lruCapacity") as null | number; }
146 get displayInlayHints() { return this.cfg.get("displayInlayHints") as boolean; }
147 get maxInlayHintLength() { return this.cfg.get("maxInlayHintLength") as number; }
148 get excludeGlobs() { return this.cfg.get("excludeGlobs") as string[]; }
149 get useClientWatching() { return this.cfg.get("useClientWatching") as boolean; }
150 get featureFlags() { return this.cfg.get("featureFlags") as Record<string, boolean>; }
151 get rustfmtArgs() { return this.cfg.get("rustfmtArgs") as string[]; }
152
153 get cargoWatchOptions(): CargoWatchOptions {
154 return {
155 enable: this.cfg.get("cargo-watch.enable") as boolean,
156 arguments: this.cfg.get("cargo-watch.arguments") as string[],
157 allTargets: this.cfg.get("cargo-watch.allTargets") as boolean,
158 command: this.cfg.get("cargo-watch.command") as string,
159 };
160 }
113 161
114 // FIXME: revisit the logic for `if (.has(...)) config.get(...)` set default 162 get cargoFeatures(): CargoFeatures {
115 // values only in one place (i.e. remove default values from non-readonly members declarations) 163 return {
116 private refresh(ctx: vscode.ExtensionContext) { 164 noDefaultFeatures: this.cfg.get("cargoFeatures.noDefaultFeatures") as boolean,
117 const config = vscode.workspace.getConfiguration('rust-analyzer'); 165 allFeatures: this.cfg.get("cargoFeatures.allFeatures") as boolean,
118 166 features: this.cfg.get("cargoFeatures.features") as string[],
119 let requireReloadMessage = null; 167 };
120
121 if (config.has('highlightingOn')) {
122 this.highlightingOn = config.get('highlightingOn') as boolean;
123 }
124
125 if (config.has('rainbowHighlightingOn')) {
126 this.rainbowHighlightingOn = config.get(
127 'rainbowHighlightingOn',
128 ) as boolean;
129 }
130
131 if (config.has('enableEnhancedTyping')) {
132 this.enableEnhancedTyping = config.get(
133 'enableEnhancedTyping',
134 ) as boolean;
135
136 if (this.prevEnhancedTyping === null) {
137 this.prevEnhancedTyping = this.enableEnhancedTyping;
138 }
139 } else if (this.prevEnhancedTyping === null) {
140 this.prevEnhancedTyping = this.enableEnhancedTyping;
141 }
142
143 if (this.prevEnhancedTyping !== this.enableEnhancedTyping) {
144 requireReloadMessage =
145 'Changing enhanced typing setting requires a reload';
146 this.prevEnhancedTyping = this.enableEnhancedTyping;
147 }
148
149 this.langServerSource = Config.langServerBinarySource(ctx, config);
150
151 if (config.has('cargo-watch.enable')) {
152 this.cargoWatchOptions.enable = config.get<boolean>(
153 'cargo-watch.enable',
154 true,
155 );
156 }
157
158 if (config.has('cargo-watch.arguments')) {
159 this.cargoWatchOptions.arguments = config.get<string[]>(
160 'cargo-watch.arguments',
161 [],
162 );
163 }
164
165 if (config.has('cargo-watch.command')) {
166 this.cargoWatchOptions.command = config.get<string>(
167 'cargo-watch.command',
168 '',
169 );
170 }
171
172 if (config.has('cargo-watch.allTargets')) {
173 this.cargoWatchOptions.allTargets = config.get<boolean>(
174 'cargo-watch.allTargets',
175 true,
176 );
177 }
178
179 if (config.has('lruCapacity')) {
180 this.lruCapacity = config.get('lruCapacity') as number;
181 }
182
183 if (config.has('displayInlayHints')) {
184 this.displayInlayHints = config.get('displayInlayHints') as boolean;
185 }
186 if (config.has('maxInlayHintLength')) {
187 this.maxInlayHintLength = config.get(
188 'maxInlayHintLength',
189 ) as number;
190 }
191 if (config.has('excludeGlobs')) {
192 this.excludeGlobs = config.get('excludeGlobs') || [];
193 }
194 if (config.has('useClientWatching')) {
195 this.useClientWatching = config.get('useClientWatching') || true;
196 }
197 if (config.has('featureFlags')) {
198 this.featureFlags = config.get('featureFlags') || {};
199 }
200 if (config.has('withSysroot')) {
201 this.withSysroot = config.get('withSysroot') || false;
202 }
203
204 if (config.has('cargoFeatures.noDefaultFeatures')) {
205 this.cargoFeatures.noDefaultFeatures = config.get(
206 'cargoFeatures.noDefaultFeatures',
207 false,
208 );
209 }
210 if (config.has('cargoFeatures.allFeatures')) {
211 this.cargoFeatures.allFeatures = config.get(
212 'cargoFeatures.allFeatures',
213 true,
214 );
215 }
216 if (config.has('cargoFeatures.features')) {
217 this.cargoFeatures.features = config.get(
218 'cargoFeatures.features',
219 [],
220 );
221 }
222
223 if (
224 this.prevCargoFeatures !== null &&
225 (this.cargoFeatures.allFeatures !==
226 this.prevCargoFeatures.allFeatures ||
227 this.cargoFeatures.noDefaultFeatures !==
228 this.prevCargoFeatures.noDefaultFeatures ||
229 this.cargoFeatures.features.length !==
230 this.prevCargoFeatures.features.length ||
231 this.cargoFeatures.features.some(
232 (v, i) => v !== this.prevCargoFeatures!.features[i],
233 ))
234 ) {
235 requireReloadMessage = 'Changing cargo features requires a reload';
236 }
237 this.prevCargoFeatures = { ...this.cargoFeatures };
238
239 if (this.prevCargoWatchOptions !== null) {
240 const changed =
241 this.cargoWatchOptions.enable !== this.prevCargoWatchOptions.enable ||
242 this.cargoWatchOptions.command !== this.prevCargoWatchOptions.command ||
243 this.cargoWatchOptions.allTargets !== this.prevCargoWatchOptions.allTargets ||
244 this.cargoWatchOptions.arguments.length !== this.prevCargoWatchOptions.arguments.length ||
245 this.cargoWatchOptions.arguments.some(
246 (v, i) => v !== this.prevCargoWatchOptions!.arguments[i],
247 );
248 if (changed) {
249 requireReloadMessage = 'Changing cargo-watch options requires a reload';
250 }
251 }
252 this.prevCargoWatchOptions = { ...this.cargoWatchOptions };
253
254 if (requireReloadMessage !== null) {
255 const reloadAction = 'Reload now';
256 vscode.window
257 .showInformationMessage(requireReloadMessage, reloadAction)
258 .then(selectedAction => {
259 if (selectedAction === reloadAction) {
260 vscode.commands.executeCommand(
261 'workbench.action.reloadWindow',
262 );
263 }
264 });
265 }
266 } 168 }
169
170 // for internal use
171 get withSysroot() { return this.cfg.get("withSysroot", true) as boolean; }
267} 172}
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index 70042a479..ff6245f78 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -60,6 +60,10 @@ export class Ctx {
60 this.pushCleanup(d); 60 this.pushCleanup(d);
61 } 61 }
62 62
63 get globalState(): vscode.Memento {
64 return this.extCtx.globalState;
65 }
66
63 get subscriptions(): Disposable[] { 67 get subscriptions(): Disposable[] {
64 return this.extCtx.subscriptions; 68 return this.extCtx.subscriptions;
65 } 69 }
@@ -87,15 +91,11 @@ export async function sendRequestWithRetry<R>(
87 for (const delay of [2, 4, 6, 8, 10, null]) { 91 for (const delay of [2, 4, 6, 8, 10, null]) {
88 try { 92 try {
89 return await (token ? client.sendRequest(method, param, token) : client.sendRequest(method, param)); 93 return await (token ? client.sendRequest(method, param, token) : client.sendRequest(method, param));
90 } catch (e) { 94 } catch (err) {
91 if ( 95 if (delay === null || err.code !== lc.ErrorCodes.ContentModified) {
92 e.code === lc.ErrorCodes.ContentModified && 96 throw err;
93 delay !== null
94 ) {
95 await sleep(10 * (1 << delay));
96 continue;
97 } 97 }
98 throw e; 98 await sleep(10 * (1 << delay));
99 } 99 }
100 } 100 }
101 throw 'unreachable'; 101 throw 'unreachable';
diff --git a/editors/code/src/inlay_hints.ts b/editors/code/src/inlay_hints.ts
index 1c019a51b..3896878cd 100644
--- a/editors/code/src/inlay_hints.ts
+++ b/editors/code/src/inlay_hints.ts
@@ -13,7 +13,7 @@ export function activateInlayHints(ctx: Ctx) {
13 13
14 vscode.workspace.onDidChangeTextDocument( 14 vscode.workspace.onDidChangeTextDocument(
15 async event => { 15 async event => {
16 if (event.contentChanges.length !== 0) return; 16 if (event.contentChanges.length === 0) return;
17 if (event.document.languageId !== 'rust') return; 17 if (event.document.languageId !== 'rust') return;
18 await hintsUpdater.refresh(); 18 await hintsUpdater.refresh();
19 }, 19 },
@@ -27,7 +27,9 @@ export function activateInlayHints(ctx: Ctx) {
27 ctx.subscriptions 27 ctx.subscriptions
28 ); 28 );
29 29
30 ctx.onDidRestart(_ => hintsUpdater.setEnabled(ctx.config.displayInlayHints)); 30 // We pass async function though it will not be awaited when called,
31 // thus Promise rejections won't be handled, but this should never throw in fact...
32 ctx.onDidRestart(async _ => hintsUpdater.setEnabled(ctx.config.displayInlayHints));
31} 33}
32 34
33interface InlayHintsParams { 35interface InlayHintsParams {
@@ -36,7 +38,7 @@ interface InlayHintsParams {
36 38
37interface InlayHint { 39interface InlayHint {
38 range: vscode.Range; 40 range: vscode.Range;
39 kind: string; 41 kind: "TypeHint" | "ParameterHint";
40 label: string; 42 label: string;
41} 43}
42 44
@@ -53,7 +55,7 @@ const parameterHintDecorationType = vscode.window.createTextEditorDecorationType
53}); 55});
54 56
55class HintsUpdater { 57class HintsUpdater {
56 private pending: Map<string, vscode.CancellationTokenSource> = new Map(); 58 private pending = new Map<string, vscode.CancellationTokenSource>();
57 private ctx: Ctx; 59 private ctx: Ctx;
58 private enabled: boolean; 60 private enabled: boolean;
59 61
@@ -62,30 +64,36 @@ class HintsUpdater {
62 this.enabled = ctx.config.displayInlayHints; 64 this.enabled = ctx.config.displayInlayHints;
63 } 65 }
64 66
65 async setEnabled(enabled: boolean) { 67 async setEnabled(enabled: boolean): Promise<void> {
66 if (this.enabled == enabled) return; 68 if (this.enabled == enabled) return;
67 this.enabled = enabled; 69 this.enabled = enabled;
68 70
69 if (this.enabled) { 71 if (this.enabled) {
70 await this.refresh(); 72 return await this.refresh();
71 } else {
72 this.allEditors.forEach(it => {
73 this.setTypeDecorations(it, []);
74 this.setParameterDecorations(it, []);
75 });
76 } 73 }
74 this.allEditors.forEach(it => {
75 this.setTypeDecorations(it, []);
76 this.setParameterDecorations(it, []);
77 });
77 } 78 }
78 79
79 async refresh() { 80 async refresh() {
80 if (!this.enabled) return; 81 if (!this.enabled) return;
81 const promises = this.allEditors.map(it => this.refreshEditor(it)); 82 await Promise.all(this.allEditors.map(it => this.refreshEditor(it)));
82 await Promise.all(promises); 83 }
84
85 private get allEditors(): vscode.TextEditor[] {
86 return vscode.window.visibleTextEditors.filter(
87 editor => editor.document.languageId === 'rust',
88 );
83 } 89 }
84 90
85 private async refreshEditor(editor: vscode.TextEditor): Promise<void> { 91 private async refreshEditor(editor: vscode.TextEditor): Promise<void> {
86 const newHints = await this.queryHints(editor.document.uri.toString()); 92 const newHints = await this.queryHints(editor.document.uri.toString());
87 if (newHints == null) return; 93 if (newHints == null) return;
88 const newTypeDecorations = newHints.filter(hint => hint.kind === 'TypeHint') 94
95 const newTypeDecorations = newHints
96 .filter(hint => hint.kind === 'TypeHint')
89 .map(hint => ({ 97 .map(hint => ({
90 range: hint.range, 98 range: hint.range,
91 renderOptions: { 99 renderOptions: {
@@ -96,7 +104,8 @@ class HintsUpdater {
96 })); 104 }));
97 this.setTypeDecorations(editor, newTypeDecorations); 105 this.setTypeDecorations(editor, newTypeDecorations);
98 106
99 const newParameterDecorations = newHints.filter(hint => hint.kind === 'ParameterHint') 107 const newParameterDecorations = newHints
108 .filter(hint => hint.kind === 'ParameterHint')
100 .map(hint => ({ 109 .map(hint => ({
101 range: hint.range, 110 range: hint.range,
102 renderOptions: { 111 renderOptions: {
@@ -108,12 +117,6 @@ class HintsUpdater {
108 this.setParameterDecorations(editor, newParameterDecorations); 117 this.setParameterDecorations(editor, newParameterDecorations);
109 } 118 }
110 119
111 private get allEditors(): vscode.TextEditor[] {
112 return vscode.window.visibleTextEditors.filter(
113 editor => editor.document.languageId === 'rust',
114 );
115 }
116
117 private setTypeDecorations( 120 private setTypeDecorations(
118 editor: vscode.TextEditor, 121 editor: vscode.TextEditor,
119 decorations: vscode.DecorationOptions[], 122 decorations: vscode.DecorationOptions[],
@@ -137,12 +140,14 @@ class HintsUpdater {
137 private async queryHints(documentUri: string): Promise<InlayHint[] | null> { 140 private async queryHints(documentUri: string): Promise<InlayHint[] | null> {
138 const client = this.ctx.client; 141 const client = this.ctx.client;
139 if (!client) return null; 142 if (!client) return null;
143
140 const request: InlayHintsParams = { 144 const request: InlayHintsParams = {
141 textDocument: { uri: documentUri }, 145 textDocument: { uri: documentUri },
142 }; 146 };
143 const tokenSource = new vscode.CancellationTokenSource(); 147 const tokenSource = new vscode.CancellationTokenSource();
144 const prev = this.pending.get(documentUri); 148 const prevHintsRequest = this.pending.get(documentUri);
145 if (prev) prev.cancel(); 149 prevHintsRequest?.cancel();
150
146 this.pending.set(documentUri, tokenSource); 151 this.pending.set(documentUri, tokenSource);
147 try { 152 try {
148 return await sendRequestWithRetry<InlayHint[] | null>( 153 return await sendRequestWithRetry<InlayHint[] | null>(
diff --git a/editors/code/src/installation/download_artifact.ts b/editors/code/src/installation/download_artifact.ts
new file mode 100644
index 000000000..de655f8f4
--- /dev/null
+++ b/editors/code/src/installation/download_artifact.ts
@@ -0,0 +1,58 @@
1import * as vscode from "vscode";
2import * as path from "path";
3import { promises as fs } from "fs";
4import { strict as assert } from "assert";
5
6import { ArtifactReleaseInfo } from "./interfaces";
7import { downloadFile } from "./download_file";
8import { throttle } from "throttle-debounce";
9
10/**
11 * Downloads artifact from given `downloadUrl`.
12 * Creates `installationDir` if it is not yet created and put the artifact under
13 * `artifactFileName`.
14 * Displays info about the download progress in an info message printing the name
15 * of the artifact as `displayName`.
16 */
17export async function downloadArtifact(
18 {downloadUrl, releaseName}: ArtifactReleaseInfo,
19 artifactFileName: string,
20 installationDir: string,
21 displayName: string,
22) {
23 await fs.mkdir(installationDir).catch(err => assert.strictEqual(
24 err?.code,
25 "EEXIST",
26 `Couldn't create directory "${installationDir}" to download `+
27 `${artifactFileName} artifact: ${err.message}`
28 ));
29
30 const installationPath = path.join(installationDir, artifactFileName);
31
32 console.time(`Downloading ${artifactFileName}`);
33 await vscode.window.withProgress(
34 {
35 location: vscode.ProgressLocation.Notification,
36 cancellable: false, // FIXME: add support for canceling download?
37 title: `Downloading ${displayName} (${releaseName})`
38 },
39 async (progress, _cancellationToken) => {
40 let lastPrecentage = 0;
41 const filePermissions = 0o755; // (rwx, r_x, r_x)
42 await downloadFile(downloadUrl, installationPath, filePermissions, throttle(
43 200,
44 /* noTrailing: */ true,
45 (readBytes, totalBytes) => {
46 const newPercentage = (readBytes / totalBytes) * 100;
47 progress.report({
48 message: newPercentage.toFixed(0) + "%",
49 increment: newPercentage - lastPrecentage
50 });
51
52 lastPrecentage = newPercentage;
53 })
54 );
55 }
56 );
57 console.timeEnd(`Downloading ${artifactFileName}`);
58}
diff --git a/editors/code/src/installation/download_file.ts b/editors/code/src/installation/download_file.ts
index b51602ef9..d154f4816 100644
--- a/editors/code/src/installation/download_file.ts
+++ b/editors/code/src/installation/download_file.ts
@@ -1,9 +1,13 @@
1import fetch from "node-fetch"; 1import fetch from "node-fetch";
2import * as fs from "fs"; 2import * as fs from "fs";
3import * as stream from "stream";
4import * as util from "util";
3import { strict as assert } from "assert"; 5import { strict as assert } from "assert";
4 6
7const pipeline = util.promisify(stream.pipeline);
8
5/** 9/**
6 * Downloads file from `url` and stores it at `destFilePath`. 10 * Downloads file from `url` and stores it at `destFilePath` with `destFilePermissions`.
7 * `onProgress` callback is called on recieveing each chunk of bytes 11 * `onProgress` callback is called on recieveing each chunk of bytes
8 * to track the progress of downloading, it gets the already read and total 12 * to track the progress of downloading, it gets the already read and total
9 * amount of bytes to read as its parameters. 13 * amount of bytes to read as its parameters.
@@ -11,24 +15,37 @@ import { strict as assert } from "assert";
11export async function downloadFile( 15export async function downloadFile(
12 url: string, 16 url: string,
13 destFilePath: fs.PathLike, 17 destFilePath: fs.PathLike,
18 destFilePermissions: number,
14 onProgress: (readBytes: number, totalBytes: number) => void 19 onProgress: (readBytes: number, totalBytes: number) => void
15): Promise<void> { 20): Promise<void> {
16 const response = await fetch(url); 21 const res = await fetch(url);
22
23 if (!res.ok) {
24 console.log("Error", res.status, "while downloading file from", url);
25 console.dir({ body: await res.text(), headers: res.headers }, { depth: 3 });
17 26
18 const totalBytes = Number(response.headers.get('content-length')); 27 throw new Error(`Got response ${res.status} when trying to download a file.`);
28 }
29
30 const totalBytes = Number(res.headers.get('content-length'));
19 assert(!Number.isNaN(totalBytes), "Sanity check of content-length protocol"); 31 assert(!Number.isNaN(totalBytes), "Sanity check of content-length protocol");
20 32
33 console.log("Downloading file of", totalBytes, "bytes size from", url, "to", destFilePath);
34
21 let readBytes = 0; 35 let readBytes = 0;
36 res.body.on("data", (chunk: Buffer) => {
37 readBytes += chunk.length;
38 onProgress(readBytes, totalBytes);
39 });
22 40
23 console.log("Downloading file of", totalBytes, "bytes size from", url, "to", destFilePath); 41 const destFileStream = fs.createWriteStream(destFilePath, { mode: destFilePermissions });
42
43 await pipeline(res.body, destFileStream);
44 return new Promise<void>(resolve => {
45 destFileStream.on("close", resolve);
46 destFileStream.destroy();
24 47
25 return new Promise<void>((resolve, reject) => response.body 48 // Details on workaround: https://github.com/rust-analyzer/rust-analyzer/pull/3092#discussion_r378191131
26 .on("data", (chunk: Buffer) => { 49 // Issue at nodejs repo: https://github.com/nodejs/node/issues/31776
27 readBytes += chunk.length; 50 });
28 onProgress(readBytes, totalBytes);
29 })
30 .on("end", resolve)
31 .on("error", reject)
32 .pipe(fs.createWriteStream(destFilePath))
33 );
34} 51}
diff --git a/editors/code/src/installation/fetch_latest_artifact_metadata.ts b/editors/code/src/installation/fetch_artifact_release_info.ts
index 7e3700603..7d497057a 100644
--- a/editors/code/src/installation/fetch_latest_artifact_metadata.ts
+++ b/editors/code/src/installation/fetch_artifact_release_info.ts
@@ -1,26 +1,32 @@
1import fetch from "node-fetch"; 1import fetch from "node-fetch";
2import { GithubRepo, ArtifactMetadata } from "./interfaces"; 2import { GithubRepo, ArtifactReleaseInfo } from "./interfaces";
3 3
4const GITHUB_API_ENDPOINT_URL = "https://api.github.com"; 4const GITHUB_API_ENDPOINT_URL = "https://api.github.com";
5 5
6
6/** 7/**
7 * Fetches the latest release from GitHub `repo` and returns metadata about 8 * Fetches the release with `releaseTag` (or just latest release when not specified)
8 * `artifactFileName` shipped with this release or `null` if no such artifact was published. 9 * from GitHub `repo` and returns metadata about `artifactFileName` shipped with
10 * this release or `null` if no such artifact was published.
9 */ 11 */
10export async function fetchLatestArtifactMetadata( 12export async function fetchArtifactReleaseInfo(
11 repo: GithubRepo, artifactFileName: string 13 repo: GithubRepo, artifactFileName: string, releaseTag?: string
12): Promise<null | ArtifactMetadata> { 14): Promise<null | ArtifactReleaseInfo> {
13 15
14 const repoOwner = encodeURIComponent(repo.owner); 16 const repoOwner = encodeURIComponent(repo.owner);
15 const repoName = encodeURIComponent(repo.name); 17 const repoName = encodeURIComponent(repo.name);
16 18
17 const apiEndpointPath = `/repos/${repoOwner}/${repoName}/releases/latest`; 19 const apiEndpointPath = releaseTag
20 ? `/repos/${repoOwner}/${repoName}/releases/tags/${releaseTag}`
21 : `/repos/${repoOwner}/${repoName}/releases/latest`;
22
18 const requestUrl = GITHUB_API_ENDPOINT_URL + apiEndpointPath; 23 const requestUrl = GITHUB_API_ENDPOINT_URL + apiEndpointPath;
19 24
20 // We skip runtime type checks for simplicity (here we cast from `any` to `GithubRelease`) 25 // We skip runtime type checks for simplicity (here we cast from `any` to `GithubRelease`)
21 26
22 console.log("Issuing request for released artifacts metadata to", requestUrl); 27 console.log("Issuing request for released artifacts metadata to", requestUrl);
23 28
29 // FIXME: handle non-ok response
24 const response: GithubRelease = await fetch(requestUrl, { 30 const response: GithubRelease = await fetch(requestUrl, {
25 headers: { Accept: "application/vnd.github.v3+json" } 31 headers: { Accept: "application/vnd.github.v3+json" }
26 }) 32 })
diff --git a/editors/code/src/installation/interfaces.ts b/editors/code/src/installation/interfaces.ts
index 8039d0b90..e40839e4b 100644
--- a/editors/code/src/installation/interfaces.ts
+++ b/editors/code/src/installation/interfaces.ts
@@ -1,3 +1,5 @@
1import * as vscode from "vscode";
2
1export interface GithubRepo { 3export interface GithubRepo {
2 name: string; 4 name: string;
3 owner: string; 5 owner: string;
@@ -6,7 +8,7 @@ export interface GithubRepo {
6/** 8/**
7 * Metadata about particular artifact retrieved from GitHub releases. 9 * Metadata about particular artifact retrieved from GitHub releases.
8 */ 10 */
9export interface ArtifactMetadata { 11export interface ArtifactReleaseInfo {
10 releaseName: string; 12 releaseName: string;
11 downloadUrl: string; 13 downloadUrl: string;
12} 14}
@@ -50,6 +52,17 @@ export namespace BinarySource {
50 * and in local `.dir`. 52 * and in local `.dir`.
51 */ 53 */
52 file: string; 54 file: string;
55
56 /**
57 * Tag of github release that denotes a version required by this extension.
58 */
59 version: string;
60
61 /**
62 * Object that provides `get()/update()` operations to store metadata
63 * about the actual binary, e.g. its actual version.
64 */
65 storage: vscode.Memento;
53 } 66 }
54 67
55} 68}
diff --git a/editors/code/src/installation/language_server.ts b/editors/code/src/installation/language_server.ts
deleted file mode 100644
index 1ce67b8b2..000000000
--- a/editors/code/src/installation/language_server.ts
+++ /dev/null
@@ -1,141 +0,0 @@
1import * as vscode from "vscode";
2import * as path from "path";
3import { strict as assert } from "assert";
4import { promises as fs } from "fs";
5import { promises as dns } from "dns";
6import { spawnSync } from "child_process";
7import { throttle } from "throttle-debounce";
8
9import { BinarySource } from "./interfaces";
10import { fetchLatestArtifactMetadata } from "./fetch_latest_artifact_metadata";
11import { downloadFile } from "./download_file";
12
13export async function downloadLatestLanguageServer(
14 {file: artifactFileName, dir: installationDir, repo}: BinarySource.GithubRelease
15) {
16 const { releaseName, downloadUrl } = (await fetchLatestArtifactMetadata(
17 repo, artifactFileName
18 ))!;
19
20 await fs.mkdir(installationDir).catch(err => assert.strictEqual(
21 err?.code,
22 "EEXIST",
23 `Couldn't create directory "${installationDir}" to download `+
24 `language server binary: ${err.message}`
25 ));
26
27 const installationPath = path.join(installationDir, artifactFileName);
28
29 console.time("Downloading ra_lsp_server");
30 await vscode.window.withProgress(
31 {
32 location: vscode.ProgressLocation.Notification,
33 cancellable: false, // FIXME: add support for canceling download?
34 title: `Downloading language server (${releaseName})`
35 },
36 async (progress, _cancellationToken) => {
37 let lastPrecentage = 0;
38 await downloadFile(downloadUrl, installationPath, throttle(
39 200,
40 /* noTrailing: */ true,
41 (readBytes, totalBytes) => {
42 const newPercentage = (readBytes / totalBytes) * 100;
43 progress.report({
44 message: newPercentage.toFixed(0) + "%",
45 increment: newPercentage - lastPrecentage
46 });
47
48 lastPrecentage = newPercentage;
49 })
50 );
51 }
52 );
53 console.timeEnd("Downloading ra_lsp_server");
54
55 await fs.chmod(installationPath, 0o755); // Set (rwx, r_x, r_x) permissions
56}
57export async function ensureLanguageServerBinary(
58 langServerSource: null | BinarySource
59): Promise<null | string> {
60
61 if (!langServerSource) {
62 vscode.window.showErrorMessage(
63 "Unfortunately we don't ship binaries for your platform yet. " +
64 "You need to manually clone rust-analyzer repository and " +
65 "run `cargo xtask install --server` to build the language server from sources. " +
66 "If you feel that your platform should be supported, please create an issue " +
67 "about that [here](https://github.com/rust-analyzer/rust-analyzer/issues) and we " +
68 "will consider it."
69 );
70 return null;
71 }
72
73 switch (langServerSource.type) {
74 case BinarySource.Type.ExplicitPath: {
75 if (isBinaryAvailable(langServerSource.path)) {
76 return langServerSource.path;
77 }
78
79 vscode.window.showErrorMessage(
80 `Unable to run ${langServerSource.path} binary. ` +
81 `To use the pre-built language server, set "rust-analyzer.raLspServerPath" ` +
82 "value to `null` or remove it from the settings to use it by default."
83 );
84 return null;
85 }
86 case BinarySource.Type.GithubRelease: {
87 const prebuiltBinaryPath = path.join(langServerSource.dir, langServerSource.file);
88
89 if (isBinaryAvailable(prebuiltBinaryPath)) {
90 return prebuiltBinaryPath;
91 }
92
93 const userResponse = await vscode.window.showInformationMessage(
94 "Language server binary for rust-analyzer was not found. " +
95 "Do you want to download it now?",
96 "Download now", "Cancel"
97 );
98 if (userResponse !== "Download now") return null;
99
100 try {
101 await downloadLatestLanguageServer(langServerSource);
102 } catch (err) {
103 await vscode.window.showErrorMessage(
104 `Failed to download language server from ${langServerSource.repo.name} ` +
105 `GitHub repository: ${err.message}`
106 );
107
108 await dns.resolve('www.google.com').catch(err => {
109 console.error("DNS resolution failed, there might be an issue with Internet availability");
110 console.error(err);
111 });
112
113 return null;
114 }
115
116 if (!isBinaryAvailable(prebuiltBinaryPath)) assert(false,
117 `Downloaded language server binary is not functional.` +
118 `Downloaded from: ${JSON.stringify(langServerSource)}`
119 );
120
121
122 vscode.window.showInformationMessage(
123 "Rust analyzer language server was successfully installed 🦀"
124 );
125
126 return prebuiltBinaryPath;
127 }
128 }
129
130 function isBinaryAvailable(binaryPath: string) {
131 const res = spawnSync(binaryPath, ["--version"]);
132
133 // ACHTUNG! `res` type declaration is inherently wrong, see
134 // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/42221
135
136 console.log("Checked binary availablity via --version", res);
137 console.log(binaryPath, "--version output:", res.output?.map(String));
138
139 return res.status === 0;
140 }
141}
diff --git a/editors/code/src/installation/server.ts b/editors/code/src/installation/server.ts
new file mode 100644
index 000000000..80cb719e3
--- /dev/null
+++ b/editors/code/src/installation/server.ts
@@ -0,0 +1,124 @@
1import * as vscode from "vscode";
2import * as path from "path";
3import { strict as assert } from "assert";
4import { promises as dns } from "dns";
5import { spawnSync } from "child_process";
6
7import { BinarySource } from "./interfaces";
8import { fetchArtifactReleaseInfo } from "./fetch_artifact_release_info";
9import { downloadArtifact } from "./download_artifact";
10
11export async function ensureServerBinary(source: null | BinarySource): Promise<null | string> {
12 if (!source) {
13 vscode.window.showErrorMessage(
14 "Unfortunately we don't ship binaries for your platform yet. " +
15 "You need to manually clone rust-analyzer repository and " +
16 "run `cargo xtask install --server` to build the language server from sources. " +
17 "If you feel that your platform should be supported, please create an issue " +
18 "about that [here](https://github.com/rust-analyzer/rust-analyzer/issues) and we " +
19 "will consider it."
20 );
21 return null;
22 }
23
24 switch (source.type) {
25 case BinarySource.Type.ExplicitPath: {
26 if (isBinaryAvailable(source.path)) {
27 return source.path;
28 }
29
30 vscode.window.showErrorMessage(
31 `Unable to run ${source.path} binary. ` +
32 `To use the pre-built language server, set "rust-analyzer.raLspServerPath" ` +
33 "value to `null` or remove it from the settings to use it by default."
34 );
35 return null;
36 }
37 case BinarySource.Type.GithubRelease: {
38 const prebuiltBinaryPath = path.join(source.dir, source.file);
39
40 const installedVersion: null | string = getServerVersion(source.storage);
41 const requiredVersion: string = source.version;
42
43 console.log("Installed version:", installedVersion, "required:", requiredVersion);
44
45 if (isBinaryAvailable(prebuiltBinaryPath) && installedVersion == requiredVersion) {
46 // FIXME: check for new releases and notify the user to update if possible
47 return prebuiltBinaryPath;
48 }
49
50 const userResponse = await vscode.window.showInformationMessage(
51 `Language server version ${source.version} for rust-analyzer is not installed. ` +
52 "Do you want to download it now?",
53 "Download now", "Cancel"
54 );
55 if (userResponse !== "Download now") return null;
56
57 if (!await downloadServer(source)) return null;
58
59 return prebuiltBinaryPath;
60 }
61 }
62}
63
64async function downloadServer(source: BinarySource.GithubRelease): Promise<boolean> {
65 try {
66 const releaseInfo = (await fetchArtifactReleaseInfo(source.repo, source.file, source.version))!;
67
68 await downloadArtifact(releaseInfo, source.file, source.dir, "language server");
69 await setServerVersion(source.storage, releaseInfo.releaseName);
70 } catch (err) {
71 vscode.window.showErrorMessage(
72 `Failed to download language server from ${source.repo.name} ` +
73 `GitHub repository: ${err.message}`
74 );
75
76 console.error(err);
77
78 dns.resolve('example.com').then(
79 addrs => console.log("DNS resolution for example.com was successful", addrs),
80 err => {
81 console.error(
82 "DNS resolution for example.com failed, " +
83 "there might be an issue with Internet availability"
84 );
85 console.error(err);
86 }
87 );
88 return false;
89 }
90
91 if (!isBinaryAvailable(path.join(source.dir, source.file))) assert(false,
92 `Downloaded language server binary is not functional.` +
93 `Downloaded from: ${JSON.stringify(source, null, 4)}`
94 );
95
96 vscode.window.showInformationMessage(
97 "Rust analyzer language server was successfully installed 🦀"
98 );
99
100 return true;
101}
102
103function isBinaryAvailable(binaryPath: string): boolean {
104 const res = spawnSync(binaryPath, ["--version"]);
105
106 // ACHTUNG! `res` type declaration is inherently wrong, see
107 // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/42221
108
109 console.log("Checked binary availablity via --version", res);
110 console.log(binaryPath, "--version output:", res.output?.map(String));
111
112 return res.status === 0;
113}
114
115function getServerVersion(storage: vscode.Memento): null | string {
116 const version = storage.get<null | string>("server-version", null);
117 console.log("Get server-version:", version);
118 return version;
119}
120
121async function setServerVersion(storage: vscode.Memento, version: string): Promise<void> {
122 console.log("Set server-version:", version);
123 await storage.update("server-version", version.toString());
124}
diff --git a/editors/code/src/status_display.ts b/editors/code/src/status_display.ts
index 51dbf388b..993e79d70 100644
--- a/editors/code/src/status_display.ts
+++ b/editors/code/src/status_display.ts
@@ -66,9 +66,9 @@ class StatusDisplay implements Disposable {
66 66
67 refreshLabel() { 67 refreshLabel() {
68 if (this.packageName) { 68 if (this.packageName) {
69 this.statusBarItem!.text = `${spinnerFrames[this.i]} cargo ${this.command} [${this.packageName}]`; 69 this.statusBarItem.text = `${spinnerFrames[this.i]} cargo ${this.command} [${this.packageName}]`;
70 } else { 70 } else {
71 this.statusBarItem!.text = `${spinnerFrames[this.i]} cargo ${this.command}`; 71 this.statusBarItem.text = `${spinnerFrames[this.i]} cargo ${this.command}`;
72 } 72 }
73 } 73 }
74 74
diff --git a/xtask/src/ast_src.rs b/xtask/src/ast_src.rs
index 67d1f41bc..2d9ae904b 100644
--- a/xtask/src/ast_src.rs
+++ b/xtask/src/ast_src.rs
@@ -120,6 +120,8 @@ pub(crate) const KINDS_SRC: KindsSrc = KindsSrc {
120 "FOR_TYPE", 120 "FOR_TYPE",
121 "IMPL_TRAIT_TYPE", 121 "IMPL_TRAIT_TYPE",
122 "DYN_TRAIT_TYPE", 122 "DYN_TRAIT_TYPE",
123 "OR_PAT",
124 "PAREN_PAT",
123 "REF_PAT", 125 "REF_PAT",
124 "BOX_PAT", 126 "BOX_PAT",
125 "BIND_PAT", 127 "BIND_PAT",
@@ -412,26 +414,28 @@ pub(crate) const AST_SRC: AstSrc = AstSrc {
412 struct MatchExpr { Expr, MatchArmList } 414 struct MatchExpr { Expr, MatchArmList }
413 struct MatchArmList: AttrsOwner { arms: [MatchArm] } 415 struct MatchArmList: AttrsOwner { arms: [MatchArm] }
414 struct MatchArm: AttrsOwner { 416 struct MatchArm: AttrsOwner {
415 pats: [Pat], 417 pat: Pat,
416 guard: MatchGuard, 418 guard: MatchGuard,
417 Expr, 419 Expr,
418 } 420 }
419 struct MatchGuard { Expr } 421 struct MatchGuard { Expr }
420 422
421 struct RecordLit { Path, RecordFieldList } 423 struct RecordLit { Path, RecordFieldList }
422 struct RecordFieldList { 424 struct RecordFieldList {
423 fields: [RecordField], 425 fields: [RecordField],
424 spread: Expr, 426 spread: Expr,
425 } 427 }
426 struct RecordField { NameRef, Expr } 428 struct RecordField { NameRef, Expr }
427 429
430 struct OrPat { pats: [Pat] }
431 struct ParenPat { Pat }
428 struct RefPat { Pat } 432 struct RefPat { Pat }
429 struct BoxPat { Pat } 433 struct BoxPat { Pat }
430 struct BindPat: NameOwner { Pat } 434 struct BindPat: NameOwner { Pat }
431 struct PlaceholderPat { } 435 struct PlaceholderPat { }
432 struct DotDotPat { } 436 struct DotDotPat { }
433 struct PathPat { Path } 437 struct PathPat { Path }
434 struct SlicePat {} 438 struct SlicePat { args: [Pat] }
435 struct RangePat {} 439 struct RangePat {}
436 struct LiteralPat { Literal } 440 struct LiteralPat { Literal }
437 441
@@ -601,6 +605,8 @@ pub(crate) const AST_SRC: AstSrc = AstSrc {
601 } 605 }
602 606
603 enum Pat { 607 enum Pat {
608 OrPat,
609 ParenPat,
604 RefPat, 610 RefPat,
605 BoxPat, 611 BoxPat,
606 BindPat, 612 BindPat,
diff --git a/xtask/src/cmd.rs b/xtask/src/cmd.rs
deleted file mode 100644
index 2027f4893..000000000
--- a/xtask/src/cmd.rs
+++ /dev/null
@@ -1,53 +0,0 @@
1use std::process::{Command, Output, Stdio};
2
3use anyhow::{Context, Result};
4
5use crate::project_root;
6
7pub struct Cmd<'a> {
8 pub unix: &'a str,
9 pub windows: &'a str,
10 pub work_dir: &'a str,
11}
12
13impl Cmd<'_> {
14 pub fn run(self) -> Result<()> {
15 if cfg!(windows) {
16 run(self.windows, self.work_dir)
17 } else {
18 run(self.unix, self.work_dir)
19 }
20 }
21 pub fn run_with_output(self) -> Result<Output> {
22 if cfg!(windows) {
23 run_with_output(self.windows, self.work_dir)
24 } else {
25 run_with_output(self.unix, self.work_dir)
26 }
27 }
28}
29
30pub fn run(cmdline: &str, dir: &str) -> Result<()> {
31 do_run(cmdline, dir, &mut |c| {
32 c.stdout(Stdio::inherit());
33 })
34 .map(|_| ())
35}
36
37pub fn run_with_output(cmdline: &str, dir: &str) -> Result<Output> {
38 do_run(cmdline, dir, &mut |_| {})
39}
40
41fn do_run(cmdline: &str, dir: &str, f: &mut dyn FnMut(&mut Command)) -> Result<Output> {
42 eprintln!("\nwill run: {}", cmdline);
43 let proj_dir = project_root().join(dir);
44 let mut args = cmdline.split_whitespace();
45 let exec = args.next().unwrap();
46 let mut cmd = Command::new(exec);
47 f(cmd.args(args).current_dir(proj_dir).stderr(Stdio::inherit()));
48 let output = cmd.output().with_context(|| format!("running `{}`", cmdline))?;
49 if !output.status.success() {
50 anyhow::bail!("`{}` exited with {}", cmdline, output.status);
51 }
52 Ok(output)
53}
diff --git a/xtask/src/install.rs b/xtask/src/install.rs
index 8c65b51e3..00bbabce4 100644
--- a/xtask/src/install.rs
+++ b/xtask/src/install.rs
@@ -2,9 +2,9 @@
2 2
3use std::{env, path::PathBuf, str}; 3use std::{env, path::PathBuf, str};
4 4
5use anyhow::{Context, Result}; 5use anyhow::{bail, format_err, Context, Result};
6 6
7use crate::cmd::{run, run_with_output, Cmd}; 7use crate::not_bash::{ls, pushd, rm, run};
8 8
9// Latest stable, feel free to send a PR if this lags behind. 9// Latest stable, feel free to send a PR if this lags behind.
10const REQUIRED_RUST_VERSION: u32 = 41; 10const REQUIRED_RUST_VERSION: u32 = 41;
@@ -24,6 +24,7 @@ pub struct ServerOpt {
24 24
25impl InstallCmd { 25impl InstallCmd {
26 pub fn run(self) -> Result<()> { 26 pub fn run(self) -> Result<()> {
27 let both = self.server.is_some() && self.client.is_some();
27 if cfg!(target_os = "macos") { 28 if cfg!(target_os = "macos") {
28 fix_path_for_mac().context("Fix path for mac")? 29 fix_path_for_mac().context("Fix path for mac")?
29 } 30 }
@@ -33,6 +34,16 @@ impl InstallCmd {
33 if let Some(client) = self.client { 34 if let Some(client) = self.client {
34 install_client(client).context("install client")?; 35 install_client(client).context("install client")?;
35 } 36 }
37 if both {
38 eprintln!(
39 "
40 Installation complete.
41
42 Add `\"rust-analyzer.raLspServerPath\": \"ra_lsp_server\",` to VS Code settings,
43 otherwise it will use the latest release from GitHub.
44"
45 )
46 }
36 Ok(()) 47 Ok(())
37 } 48 }
38} 49}
@@ -44,7 +55,7 @@ fn fix_path_for_mac() -> Result<()> {
44 const ROOT_DIR: &str = ""; 55 const ROOT_DIR: &str = "";
45 let home_dir = match env::var("HOME") { 56 let home_dir = match env::var("HOME") {
46 Ok(home) => home, 57 Ok(home) => home,
47 Err(e) => anyhow::bail!("Failed getting HOME from environment with error: {}.", e), 58 Err(e) => bail!("Failed getting HOME from environment with error: {}.", e),
48 }; 59 };
49 60
50 [ROOT_DIR, &home_dir] 61 [ROOT_DIR, &home_dir]
@@ -58,7 +69,7 @@ fn fix_path_for_mac() -> Result<()> {
58 if !vscode_path.is_empty() { 69 if !vscode_path.is_empty() {
59 let vars = match env::var_os("PATH") { 70 let vars = match env::var_os("PATH") {
60 Some(path) => path, 71 Some(path) => path,
61 None => anyhow::bail!("Could not get PATH variable from env."), 72 None => bail!("Could not get PATH variable from env."),
62 }; 73 };
63 74
64 let mut paths = env::split_paths(&vars).collect::<Vec<_>>(); 75 let mut paths = env::split_paths(&vars).collect::<Vec<_>>();
@@ -71,91 +82,63 @@ fn fix_path_for_mac() -> Result<()> {
71} 82}
72 83
73fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> { 84fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
74 let npm_version = Cmd { 85 let _dir = pushd("./editors/code");
75 unix: r"npm --version",
76 windows: r"cmd.exe /c npm --version",
77 work_dir: "./editors/code",
78 }
79 .run();
80 86
81 if npm_version.is_err() { 87 let find_code = |f: fn(&str) -> bool| -> Result<&'static str> {
82 eprintln!("\nERROR: `npm --version` failed, `npm` is required to build the VS Code plugin") 88 ["code", "code-insiders", "codium", "code-oss"]
83 } 89 .iter()
90 .copied()
91 .find(|bin| f(bin))
92 .ok_or_else(|| {
93 format_err!("Can't execute `code --version`. Perhaps it is not in $PATH?")
94 })
95 };
84 96
85 Cmd { unix: r"npm install", windows: r"cmd.exe /c npm install", work_dir: "./editors/code" } 97 let installed_extensions;
86 .run()?; 98 if cfg!(unix) {
87 Cmd { 99 run!("npm --version").context("`npm` is required to build the VS Code plugin")?;
88 unix: r"npm run package --scripts-prepend-node-path", 100 run!("npm install")?;
89 windows: r"cmd.exe /c npm run package",
90 work_dir: "./editors/code",
91 }
92 .run()?;
93 101
94 let code_binary = ["code", "code-insiders", "codium", "code-oss"].iter().find(|bin| { 102 let vsix_pkg = {
95 Cmd { 103 rm("*.vsix")?;
96 unix: &format!("{} --version", bin), 104 run!("npm run package --scripts-prepend-node-path")?;
97 windows: &format!("cmd.exe /c {}.cmd --version", bin), 105 ls("*.vsix")?.pop().unwrap()
98 work_dir: "./editors/code", 106 };
99 }
100 .run()
101 .is_ok()
102 });
103 107
104 let code_binary = match code_binary { 108 let code = find_code(|bin| run!("{} --version", bin).is_ok())?;
105 Some(it) => it, 109 run!("{} --install-extension {} --force", code, vsix_pkg.display())?;
106 None => anyhow::bail!("Can't execute `code --version`. Perhaps it is not in $PATH?"), 110 installed_extensions = run!("{} --list-extensions", code; echo = false)?;
107 }; 111 } else {
112 run!("cmd.exe /c npm --version")
113 .context("`npm` is required to build the VS Code plugin")?;
114 run!("cmd.exe /c npm install")?;
115
116 let vsix_pkg = {
117 rm("*.vsix")?;
118 run!("cmd.exe /c npm run package")?;
119 ls("*.vsix")?.pop().unwrap()
120 };
108 121
109 Cmd { 122 let code = find_code(|bin| run!("cmd.exe /c {}.cmd --version", bin).is_ok())?;
110 unix: &format!(r"{} --install-extension ./rust-analyzer-0.1.0.vsix --force", code_binary), 123 run!(r"cmd.exe /c {}.cmd --install-extension {} --force", code, vsix_pkg.display())?;
111 windows: &format!( 124 installed_extensions = run!("cmd.exe /c {}.cmd --list-extensions", code; echo = false)?;
112 r"cmd.exe /c {}.cmd --install-extension ./rust-analyzer-0.1.0.vsix --force",
113 code_binary
114 ),
115 work_dir: "./editors/code",
116 } 125 }
117 .run()?;
118
119 let installed_extensions = {
120 let output = Cmd {
121 unix: &format!(r"{} --list-extensions", code_binary),
122 windows: &format!(r"cmd.exe /c {}.cmd --list-extensions", code_binary),
123 work_dir: ".",
124 }
125 .run_with_output()?;
126 String::from_utf8(output.stdout)?
127 };
128 126
129 if !installed_extensions.contains("rust-analyzer") { 127 if !installed_extensions.contains("rust-analyzer") {
130 anyhow::bail!( 128 bail!(
131 "Could not install the Visual Studio Code extension. \ 129 "Could not install the Visual Studio Code extension. \
132 Please make sure you have at least NodeJS 10.x together with the latest version of VS Code installed and try again." 130 Please make sure you have at least NodeJS 12.x together with the latest version of VS Code installed and try again."
133 ); 131 );
134 } 132 }
135 133
136 if installed_extensions.contains("ra-lsp") {
137 Cmd {
138 unix: &format!(r"{} --uninstall-extension matklad.ra-lsp", code_binary),
139 windows: &format!(
140 r"cmd.exe /c {}.cmd --uninstall-extension matklad.ra-lsp",
141 code_binary
142 ),
143 work_dir: "./editors/code",
144 }
145 .run()?;
146 }
147
148 Ok(()) 134 Ok(())
149} 135}
150 136
151fn install_server(opts: ServerOpt) -> Result<()> { 137fn install_server(opts: ServerOpt) -> Result<()> {
152 let mut old_rust = false; 138 let mut old_rust = false;
153 if let Ok(output) = run_with_output("cargo --version", ".") { 139 if let Ok(stdout) = run!("cargo --version") {
154 if let Ok(stdout) = String::from_utf8(output.stdout) { 140 if !check_version(&stdout, REQUIRED_RUST_VERSION) {
155 println!("{}", stdout); 141 old_rust = true;
156 if !check_version(&stdout, REQUIRED_RUST_VERSION) {
157 old_rust = true;
158 }
159 } 142 }
160 } 143 }
161 144
@@ -166,20 +149,17 @@ fn install_server(opts: ServerOpt) -> Result<()> {
166 ) 149 )
167 } 150 }
168 151
169 let res = if opts.jemalloc { 152 let jemalloc = if opts.jemalloc { "--features jemalloc" } else { "" };
170 run("cargo install --path crates/ra_lsp_server --locked --force --features jemalloc", ".") 153 let res = run!("cargo install --path crates/ra_lsp_server --locked --force {}", jemalloc);
171 } else {
172 run("cargo install --path crates/ra_lsp_server --locked --force", ".")
173 };
174 154
175 if res.is_err() && old_rust { 155 if res.is_err() && old_rust {
176 eprintln!( 156 eprintln!(
177 "\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n", 157 "\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n",
178 REQUIRED_RUST_VERSION, 158 REQUIRED_RUST_VERSION,
179 ) 159 );
180 } 160 }
181 161
182 res 162 res.map(drop)
183} 163}
184 164
185fn check_version(version_output: &str, min_minor_version: u32) -> bool { 165fn check_version(version_output: &str, min_minor_version: u32) -> bool {
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs
index 8fdf43e4a..2bcd76d60 100644
--- a/xtask/src/lib.rs
+++ b/xtask/src/lib.rs
@@ -1,6 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3mod cmd; 3pub mod not_bash;
4pub mod install; 4pub mod install;
5pub mod pre_commit; 5pub mod pre_commit;
6 6
@@ -9,13 +9,16 @@ mod ast_src;
9 9
10use anyhow::Context; 10use anyhow::Context;
11use std::{ 11use std::{
12 env, fs, 12 env,
13 io::Write, 13 io::Write,
14 path::{Path, PathBuf}, 14 path::{Path, PathBuf},
15 process::{Command, Stdio}, 15 process::{Command, Stdio},
16}; 16};
17 17
18use crate::{cmd::run, codegen::Mode}; 18use crate::{
19 codegen::Mode,
20 not_bash::{fs2, pushd, rm_rf, run},
21};
19 22
20pub use anyhow::Result; 23pub use anyhow::Result;
21 24
@@ -35,9 +38,9 @@ pub fn run_rustfmt(mode: Mode) -> Result<()> {
35 ensure_rustfmt()?; 38 ensure_rustfmt()?;
36 39
37 if mode == Mode::Verify { 40 if mode == Mode::Verify {
38 run(&format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN), ".")?; 41 run!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN)?;
39 } else { 42 } else {
40 run(&format!("rustup run {} -- cargo fmt", TOOLCHAIN), ".")?; 43 run!("rustup run {} -- cargo fmt", TOOLCHAIN)?;
41 } 44 }
42 Ok(()) 45 Ok(())
43} 46}
@@ -67,8 +70,9 @@ fn ensure_rustfmt() -> Result<()> {
67 Ok(status) if status.success() => return Ok(()), 70 Ok(status) if status.success() => return Ok(()),
68 _ => (), 71 _ => (),
69 }; 72 };
70 run(&format!("rustup toolchain install {}", TOOLCHAIN), ".")?; 73 run!("rustup toolchain install {}", TOOLCHAIN)?;
71 run(&format!("rustup component add rustfmt --toolchain {}", TOOLCHAIN), ".") 74 run!("rustup component add rustfmt --toolchain {}", TOOLCHAIN)?;
75 Ok(())
72} 76}
73 77
74pub fn run_clippy() -> Result<()> { 78pub fn run_clippy() -> Result<()> {
@@ -89,34 +93,28 @@ pub fn run_clippy() -> Result<()> {
89 "clippy::nonminimal_bool", 93 "clippy::nonminimal_bool",
90 "clippy::redundant_pattern_matching", 94 "clippy::redundant_pattern_matching",
91 ]; 95 ];
92 run( 96 run!(
93 &format!( 97 "rustup run {} -- cargo clippy --all-features --all-targets -- -A {}",
94 "rustup run {} -- cargo clippy --all-features --all-targets -- -A {}", 98 TOOLCHAIN,
95 TOOLCHAIN, 99 allowed_lints.join(" -A ")
96 allowed_lints.join(" -A ")
97 ),
98 ".",
99 )?; 100 )?;
100 Ok(()) 101 Ok(())
101} 102}
102 103
103fn install_clippy() -> Result<()> { 104fn install_clippy() -> Result<()> {
104 run(&format!("rustup toolchain install {}", TOOLCHAIN), ".")?; 105 run!("rustup toolchain install {}", TOOLCHAIN)?;
105 run(&format!("rustup component add clippy --toolchain {}", TOOLCHAIN), ".") 106 run!("rustup component add clippy --toolchain {}", TOOLCHAIN)?;
107 Ok(())
106} 108}
107 109
108pub fn run_fuzzer() -> Result<()> { 110pub fn run_fuzzer() -> Result<()> {
109 match Command::new("cargo") 111 let _d = pushd("./crates/ra_syntax");
110 .args(&["fuzz", "--help"]) 112 if run!("cargo fuzz --help").is_err() {
111 .stderr(Stdio::null()) 113 run!("cargo install cargo-fuzz")?;
112 .stdout(Stdio::null())
113 .status()
114 {
115 Ok(status) if status.success() => (),
116 _ => run("cargo install cargo-fuzz", ".")?,
117 }; 114 };
118 115
119 run("rustup run nightly -- cargo fuzz run parser", "./crates/ra_syntax") 116 run!("rustup run nightly -- cargo fuzz run parser")?;
117 Ok(())
120} 118}
121 119
122/// Cleans the `./target` dir after the build such that only 120/// Cleans the `./target` dir after the build such that only
@@ -138,7 +136,7 @@ pub fn run_pre_cache() -> Result<()> {
138 } 136 }
139 } 137 }
140 138
141 fs::remove_file("./target/.rustc_info.json")?; 139 fs2::remove_file("./target/.rustc_info.json")?;
142 let to_delete = ["ra_", "heavy_test"]; 140 let to_delete = ["ra_", "heavy_test"];
143 for &dir in ["./target/debug/deps", "target/debug/.fingerprint"].iter() { 141 for &dir in ["./target/debug/deps", "target/debug/.fingerprint"].iter() {
144 for entry in Path::new(dir).read_dir()? { 142 for entry in Path::new(dir).read_dir()? {
@@ -152,7 +150,45 @@ pub fn run_pre_cache() -> Result<()> {
152 Ok(()) 150 Ok(())
153} 151}
154 152
155fn rm_rf(path: &Path) -> Result<()> { 153pub fn run_release(dry_run: bool) -> Result<()> {
156 if path.is_file() { fs::remove_file(path) } else { fs::remove_dir_all(path) } 154 if !dry_run {
157 .with_context(|| format!("failed to remove {:?}", path)) 155 run!("git switch release")?;
156 run!("git fetch upstream")?;
157 run!("git reset --hard upstream/master")?;
158 run!("git push")?;
159 }
160
161 let website_root = project_root().join("../rust-analyzer.github.io");
162 let changelog_dir = website_root.join("./thisweek/_posts");
163
164 let today = run!("date --iso")?;
165 let commit = run!("git rev-parse HEAD")?;
166 let changelog_n = fs2::read_dir(changelog_dir.as_path())?.count();
167
168 let contents = format!(
169 "\
170= Changelog #{}
171:sectanchors:
172:page-layout: post
173
174Commit: commit:{}[] +
175Release: release:{}[]
176
177== New Features
178
179* pr:[] .
180
181== Fixes
182
183== Internal Improvements
184",
185 changelog_n, commit, today
186 );
187
188 let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n));
189 fs2::write(&path, &contents)?;
190
191 fs2::copy(project_root().join("./docs/user/readme.adoc"), website_root.join("manual.adoc"))?;
192
193 Ok(())
158} 194}
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index c347de9ab..a7dffe2cc 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -14,7 +14,7 @@ use pico_args::Arguments;
14use xtask::{ 14use xtask::{
15 codegen::{self, Mode}, 15 codegen::{self, Mode},
16 install::{ClientOpt, InstallCmd, ServerOpt}, 16 install::{ClientOpt, InstallCmd, ServerOpt},
17 pre_commit, run_clippy, run_fuzzer, run_pre_cache, run_rustfmt, Result, 17 pre_commit, run_clippy, run_fuzzer, run_pre_cache, run_release, run_rustfmt, Result,
18}; 18};
19 19
20fn main() -> Result<()> { 20fn main() -> Result<()> {
@@ -92,6 +92,11 @@ FLAGS:
92 args.finish()?; 92 args.finish()?;
93 run_pre_cache() 93 run_pre_cache()
94 } 94 }
95 "release" => {
96 let dry_run = args.contains("--dry-run");
97 args.finish()?;
98 run_release(dry_run)
99 }
95 _ => { 100 _ => {
96 eprintln!( 101 eprintln!(
97 "\ 102 "\
diff --git a/xtask/src/not_bash.rs b/xtask/src/not_bash.rs
new file mode 100644
index 000000000..3e30e7279
--- /dev/null
+++ b/xtask/src/not_bash.rs
@@ -0,0 +1,165 @@
1//! A bad shell -- small cross platform module for writing glue code
2use std::{
3 cell::RefCell,
4 env,
5 ffi::OsStr,
6 fs,
7 path::{Path, PathBuf},
8 process::{Command, Stdio},
9};
10
11use anyhow::{bail, Context, Result};
12
13pub mod fs2 {
14 use std::{fs, path::Path};
15
16 use anyhow::{Context, Result};
17
18 pub fn read_dir<P: AsRef<Path>>(path: P) -> Result<fs::ReadDir> {
19 let path = path.as_ref();
20 fs::read_dir(path).with_context(|| format!("Failed to read {}", path.display()))
21 }
22
23 pub fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> {
24 let path = path.as_ref();
25 fs::write(path, contents).with_context(|| format!("Failed to write {}", path.display()))
26 }
27
28 pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<u64> {
29 let from = from.as_ref();
30 let to = to.as_ref();
31 fs::copy(from, to)
32 .with_context(|| format!("Failed to copy {} to {}", from.display(), to.display()))
33 }
34
35 pub fn remove_file<P: AsRef<Path>>(path: P) -> Result<()> {
36 let path = path.as_ref();
37 fs::remove_file(path).with_context(|| format!("Failed to remove file {}", path.display()))
38 }
39
40 pub fn remove_dir_all<P: AsRef<Path>>(path: P) -> Result<()> {
41 let path = path.as_ref();
42 fs::remove_dir_all(path).with_context(|| format!("Failed to remove dir {}", path.display()))
43 }
44}
45
46macro_rules! _run {
47 ($($expr:expr),*) => {
48 run!($($expr),*; echo = true)
49 };
50 ($($expr:expr),* ; echo = $echo:expr) => {
51 $crate::not_bash::run_process(format!($($expr),*), $echo)
52 };
53}
54pub(crate) use _run as run;
55
56pub struct Pushd {
57 _p: (),
58}
59
60pub fn pushd(path: impl Into<PathBuf>) -> Pushd {
61 Env::with(|env| env.pushd(path.into()));
62 Pushd { _p: () }
63}
64
65impl Drop for Pushd {
66 fn drop(&mut self) {
67 Env::with(|env| env.popd())
68 }
69}
70
71pub fn rm(glob: &str) -> Result<()> {
72 let cwd = Env::with(|env| env.cwd());
73 ls(glob)?.into_iter().try_for_each(|it| fs::remove_file(cwd.join(it)))?;
74 Ok(())
75}
76
77pub fn rm_rf(path: impl AsRef<Path>) -> Result<()> {
78 let path = path.as_ref();
79 if path.is_file() {
80 fs2::remove_file(path)
81 } else {
82 fs2::remove_dir_all(path)
83 }
84}
85
86pub fn ls(glob: &str) -> Result<Vec<PathBuf>> {
87 let cwd = Env::with(|env| env.cwd());
88 let mut res = Vec::new();
89 for entry in fs::read_dir(&cwd)? {
90 let entry = entry?;
91 if matches(&entry.file_name(), glob) {
92 let path = entry.path();
93 let path = path.strip_prefix(&cwd).unwrap();
94 res.push(path.to_path_buf())
95 }
96 }
97 return Ok(res);
98
99 fn matches(file_name: &OsStr, glob: &str) -> bool {
100 assert!(glob.starts_with('*'));
101 file_name.to_string_lossy().ends_with(&glob[1..])
102 }
103}
104
105#[doc(hidden)]
106pub fn run_process(cmd: String, echo: bool) -> Result<String> {
107 run_process_inner(&cmd, echo).with_context(|| format!("process `{}` failed", cmd))
108}
109
110fn run_process_inner(cmd: &str, echo: bool) -> Result<String> {
111 let cwd = Env::with(|env| env.cwd());
112 let mut args = shelx(cmd);
113 let binary = args.remove(0);
114
115 if echo {
116 println!("> {}", cmd)
117 }
118
119 let output = Command::new(binary)
120 .args(args)
121 .current_dir(cwd)
122 .stdin(Stdio::null())
123 .stderr(Stdio::inherit())
124 .output()?;
125 let stdout = String::from_utf8(output.stdout)?;
126
127 if echo {
128 print!("{}", stdout)
129 }
130
131 if !output.status.success() {
132 bail!("{}", output.status)
133 }
134
135 Ok(stdout.trim().to_string())
136}
137
138// FIXME: some real shell lexing here
139fn shelx(cmd: &str) -> Vec<String> {
140 cmd.split_whitespace().map(|it| it.to_string()).collect()
141}
142
143#[derive(Default)]
144struct Env {
145 pushd_stack: Vec<PathBuf>,
146}
147
148impl Env {
149 fn with<F: FnOnce(&mut Env) -> T, T>(f: F) -> T {
150 thread_local! {
151 static ENV: RefCell<Env> = Default::default();
152 }
153 ENV.with(|it| f(&mut *it.borrow_mut()))
154 }
155
156 fn pushd(&mut self, dir: PathBuf) {
157 self.pushd_stack.push(dir)
158 }
159 fn popd(&mut self) {
160 self.pushd_stack.pop().unwrap();
161 }
162 fn cwd(&self) -> PathBuf {
163 self.pushd_stack.last().cloned().unwrap_or_else(|| env::current_dir().unwrap())
164 }
165}
diff --git a/xtask/src/pre_commit.rs b/xtask/src/pre_commit.rs
index 88e868ca6..056f34acf 100644
--- a/xtask/src/pre_commit.rs
+++ b/xtask/src/pre_commit.rs
@@ -4,18 +4,18 @@ use std::{fs, path::PathBuf};
4 4
5use anyhow::{bail, Result}; 5use anyhow::{bail, Result};
6 6
7use crate::{cmd::run_with_output, project_root, run, run_rustfmt, Mode}; 7use crate::{not_bash::run, project_root, run_rustfmt, Mode};
8 8
9// FIXME: if there are changed `.ts` files, also reformat TypeScript (by 9// FIXME: if there are changed `.ts` files, also reformat TypeScript (by
10// shelling out to `npm fmt`). 10// shelling out to `npm fmt`).
11pub fn run_hook() -> Result<()> { 11pub fn run_hook() -> Result<()> {
12 run_rustfmt(Mode::Overwrite)?; 12 run_rustfmt(Mode::Overwrite)?;
13 13
14 let diff = run_with_output("git diff --diff-filter=MAR --name-only --cached", ".")?; 14 let diff = run!("git diff --diff-filter=MAR --name-only --cached")?;
15 15
16 let root = project_root(); 16 let root = project_root();
17 for line in String::from_utf8(diff.stdout)?.lines() { 17 for line in diff.lines() {
18 run(&format!("git update-index --add {}", root.join(line).to_string_lossy()), ".")?; 18 run!("git update-index --add {}", root.join(line).display())?;
19 } 19 }
20 20
21 Ok(()) 21 Ok(())