aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/release.yaml2
-rw-r--r--.vscode/launch.json26
-rw-r--r--Cargo.lock3
-rw-r--r--Cargo.toml5
-rw-r--r--crates/ra_assists/src/handlers/add_missing_impl_members.rs47
-rw-r--r--crates/ra_assists/src/lib.rs2
-rw-r--r--crates/ra_assists/src/utils.rs75
-rw-r--r--crates/ra_cli/Cargo.toml2
-rw-r--r--crates/ra_cli/src/analysis_bench.rs54
-rw-r--r--crates/ra_cli/src/analysis_stats.rs83
-rw-r--r--crates/ra_cli/src/main.rs56
-rw-r--r--crates/ra_hir/src/source_analyzer.rs16
-rw-r--r--crates/ra_hir_def/src/body.rs54
-rw-r--r--crates/ra_hir_def/src/lib.rs63
-rw-r--r--crates/ra_hir_def/src/nameres/collector.rs79
-rw-r--r--crates/ra_hir_ty/src/infer/unify.rs2
-rw-r--r--crates/ra_hir_ty/src/tests/coercion.rs22
-rw-r--r--crates/ra_hir_ty/src/traits.rs3
-rw-r--r--crates/ra_ide/src/completion.rs3
-rw-r--r--crates/ra_ide/src/completion/complete_trait_impl.rs436
-rw-r--r--crates/ra_ide/src/completion/completion_context.rs9
-rw-r--r--crates/ra_ide/src/lib.rs14
-rw-r--r--crates/ra_ide/src/runnables.rs109
-rw-r--r--crates/ra_ide/src/snapshots/highlighting.html1
-rw-r--r--crates/ra_ide/src/snapshots/rainbow_highlighting.html1
-rw-r--r--crates/ra_ide/src/ssr.rs324
-rw-r--r--crates/ra_ide/src/syntax_highlighting.rs1
-rw-r--r--crates/ra_ide_db/Cargo.toml1
-rw-r--r--crates/ra_lsp_server/src/cargo_target_spec.rs16
-rw-r--r--crates/ra_lsp_server/src/config.rs3
-rw-r--r--crates/ra_lsp_server/src/main.rs7
-rw-r--r--crates/ra_lsp_server/src/main_loop.rs2
-rw-r--r--crates/ra_lsp_server/src/main_loop/handlers.rs10
-rw-r--r--crates/ra_lsp_server/src/req.rs13
-rw-r--r--crates/ra_lsp_server/src/world.rs1
-rw-r--r--crates/ra_lsp_server/tests/heavy_tests/main.rs2
-rw-r--r--crates/ra_prof/src/lib.rs7
-rw-r--r--crates/ra_syntax/src/ast/make.rs2
-rw-r--r--docs/dev/debugging.md64
-rw-r--r--docs/user/readme.adoc14
-rw-r--r--editors/code/package-lock.json14
-rw-r--r--editors/code/package.json41
-rw-r--r--editors/code/rollup.config.js3
-rw-r--r--editors/code/src/client.ts25
-rw-r--r--editors/code/src/commands/index.ts1
-rw-r--r--editors/code/src/commands/ssr.ts36
-rw-r--r--editors/code/src/config.ts292
-rw-r--r--editors/code/src/ctx.ts16
-rw-r--r--editors/code/src/inlay_hints.ts51
-rw-r--r--editors/code/src/installation/download_artifact.ts58
-rw-r--r--editors/code/src/installation/fetch_artifact_release_info.ts (renamed from editors/code/src/installation/fetch_latest_artifact_metadata.ts)20
-rw-r--r--editors/code/src/installation/interfaces.ts15
-rw-r--r--editors/code/src/installation/language_server.ts148
-rw-r--r--editors/code/src/installation/server.ts124
-rw-r--r--editors/code/src/main.ts1
-rw-r--r--editors/code/src/status_display.ts4
-rw-r--r--xtask/src/install.rs2
57 files changed, 1835 insertions, 650 deletions
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index ff7a95ee1..eae4fbcb5 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -190,4 +190,4 @@ jobs:
190 - name: Publish Extension 190 - name: Publish Extension
191 working-directory: ./editors/code 191 working-directory: ./editors/code
192 # token from https://dev.azure.com/rust-analyzer/ 192 # token from https://dev.azure.com/rust-analyzer/
193 run: ./node_modules/vsce/out/vsce publish 0.1.$(date +%Y%m%d) --pat ${{ secrets.MARKETPLACE_TOKEN }} 193 run: npx vsce publish 0.1.$(date +%Y%m%d) --pat ${{ secrets.MARKETPLACE_TOKEN }}
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 55a2f10f2..b1bd98d4a 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -2,39 +2,61 @@
2 // Use IntelliSense to learn about possible attributes. 2 // Use IntelliSense to learn about possible attributes.
3 // Hover to view descriptions of existing attributes. 3 // Hover to view descriptions of existing attributes.
4 // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 4 // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5
6 // NOTE: --disable-extensions
7 // Disable all installed extensions to increase performance of the debug instance
8 // and prevent potential conflicts with other installed extensions.
9
5 "version": "0.2.0", 10 "version": "0.2.0",
6 "configurations": [ 11 "configurations": [
7 { 12 {
13 // Used for testing the extension with the installed LSP server.
8 "name": "Run Extension", 14 "name": "Run Extension",
9 "type": "extensionHost", 15 "type": "extensionHost",
10 "request": "launch", 16 "request": "launch",
11 "runtimeExecutable": "${execPath}", 17 "runtimeExecutable": "${execPath}",
12 "args": [ 18 "args": [
19 "--disable-extensions",
13 "--extensionDevelopmentPath=${workspaceFolder}/editors/code" 20 "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
14 ], 21 ],
15 "outFiles": [ 22 "outFiles": [
16 "${workspaceFolder}/editors/code/out/**/*.js" 23 "${workspaceFolder}/editors/code/out/**/*.js"
17 ], 24 ],
18 "preLaunchTask": "Build Extension" 25 "preLaunchTask": "Build Extension",
26 "skipFiles": [
27 "<node_internals>/**/*.js"
28 ]
19 }, 29 },
20 { 30 {
31 // Used for testing the extension with a local build of the LSP server (in `target/debug`).
21 "name": "Run Extension (Dev Server)", 32 "name": "Run Extension (Dev Server)",
22 "type": "extensionHost", 33 "type": "extensionHost",
23 "request": "launch", 34 "request": "launch",
24 "runtimeExecutable": "${execPath}", 35 "runtimeExecutable": "${execPath}",
25 "args": [ 36 "args": [
37 "--disable-extensions",
26 "--extensionDevelopmentPath=${workspaceFolder}/editors/code" 38 "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
27 ], 39 ],
28 "outFiles": [ 40 "outFiles": [
29 "${workspaceFolder}/editors/code/out/**/*.js" 41 "${workspaceFolder}/editors/code/out/**/*.js"
30 ], 42 ],
31 "preLaunchTask": "Build Extension", 43 "preLaunchTask": "Build Extension",
44 "skipFiles": [
45 "<node_internals>/**/*.js"
46 ],
32 "env": { 47 "env": {
33 "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/debug/ra_lsp_server" 48 "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/debug/ra_lsp_server"
34 } 49 }
35 }, 50 },
36 { 51 {
37 "name": "Debug Lsp Server", 52 // Used to attach LLDB to a running LSP server.
53 // NOTE: Might require root permissions. For this run:
54 //
55 // `echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope`
56 //
57 // Don't forget to set `debug = 2` in `Cargo.toml` before building the server
58
59 "name": "Attach To Server",
38 "type": "lldb", 60 "type": "lldb",
39 "request": "attach", 61 "request": "attach",
40 "program": "${workspaceFolder}/target/debug/ra_lsp_server", 62 "program": "${workspaceFolder}/target/debug/ra_lsp_server",
diff --git a/Cargo.lock b/Cargo.lock
index f1651edaa..f44e514dd 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1015,6 +1015,7 @@ name = "ra_cli"
1015version = "0.1.0" 1015version = "0.1.0"
1016dependencies = [ 1016dependencies = [
1017 "env_logger", 1017 "env_logger",
1018 "itertools",
1018 "pico-args", 1019 "pico-args",
1019 "ra_batch", 1020 "ra_batch",
1020 "ra_db", 1021 "ra_db",
@@ -1024,6 +1025,7 @@ dependencies = [
1024 "ra_ide", 1025 "ra_ide",
1025 "ra_prof", 1026 "ra_prof",
1026 "ra_syntax", 1027 "ra_syntax",
1028 "rand 0.7.3",
1027] 1029]
1028 1030
1029[[package]] 1031[[package]]
@@ -1174,7 +1176,6 @@ dependencies = [
1174 "ra_prof", 1176 "ra_prof",
1175 "ra_syntax", 1177 "ra_syntax",
1176 "ra_text_edit", 1178 "ra_text_edit",
1177 "rand 0.7.3",
1178 "rayon", 1179 "rayon",
1179 "rustc-hash", 1180 "rustc-hash",
1180 "superslice", 1181 "superslice",
diff --git a/Cargo.toml b/Cargo.toml
index e5620b1b7..c034e2424 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -31,3 +31,8 @@ opt-level = 0
31 31
32[patch.'crates-io'] 32[patch.'crates-io']
33# rowan = { path = "../rowan" } 33# rowan = { path = "../rowan" }
34
35[patch.'https://github.com/rust-lang/chalk.git']
36# chalk-solve = { path = "../chalk/chalk-solve" }
37# chalk-rust-ir = { path = "../chalk/chalk-rust-ir" }
38# chalk-ir = { path = "../chalk/chalk-ir" }
diff --git a/crates/ra_assists/src/handlers/add_missing_impl_members.rs b/crates/ra_assists/src/handlers/add_missing_impl_members.rs
index 448697d31..ab21388c8 100644
--- a/crates/ra_assists/src/handlers/add_missing_impl_members.rs
+++ b/crates/ra_assists/src/handlers/add_missing_impl_members.rs
@@ -1,4 +1,4 @@
1use hir::{db::HirDatabase, HasSource, InFile}; 1use hir::{HasSource, InFile};
2use ra_syntax::{ 2use ra_syntax::{
3 ast::{self, edit, make, AstNode, NameOwner}, 3 ast::{self, edit, make, AstNode, NameOwner},
4 SmolStr, 4 SmolStr,
@@ -6,6 +6,7 @@ use ra_syntax::{
6 6
7use crate::{ 7use crate::{
8 ast_transform::{self, AstTransform, QualifyPaths, SubstituteTypeParams}, 8 ast_transform::{self, AstTransform, QualifyPaths, SubstituteTypeParams},
9 utils::{get_missing_impl_items, resolve_target_trait},
9 Assist, AssistCtx, AssistId, 10 Assist, AssistCtx, AssistId,
10}; 11};
11 12
@@ -103,11 +104,9 @@ fn add_missing_impl_members_inner(
103 let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?; 104 let impl_node = ctx.find_node_at_offset::<ast::ImplBlock>()?;
104 let impl_item_list = impl_node.item_list()?; 105 let impl_item_list = impl_node.item_list()?;
105 106
106 let (trait_, trait_def) = { 107 let analyzer = ctx.source_analyzer(impl_node.syntax(), None);
107 let analyzer = ctx.source_analyzer(impl_node.syntax(), None);
108 108
109 resolve_target_trait_def(ctx.db, &analyzer, &impl_node)? 109 let trait_ = resolve_target_trait(ctx.db, &analyzer, &impl_node)?;
110 };
111 110
112 let def_name = |item: &ast::ImplItem| -> Option<SmolStr> { 111 let def_name = |item: &ast::ImplItem| -> Option<SmolStr> {
113 match item { 112 match item {
@@ -118,11 +117,14 @@ fn add_missing_impl_members_inner(
118 .map(|it| it.text().clone()) 117 .map(|it| it.text().clone())
119 }; 118 };
120 119
121 let trait_items = trait_def.item_list()?.impl_items(); 120 let missing_items = get_missing_impl_items(ctx.db, &analyzer, &impl_node)
122 let impl_items = impl_item_list.impl_items().collect::<Vec<_>>(); 121 .iter()
123 122 .map(|i| match i {
124 let missing_items: Vec<_> = trait_items 123 hir::AssocItem::Function(i) => ast::ImplItem::FnDef(i.source(ctx.db).value),
125 .filter(|t| def_name(t).is_some()) 124 hir::AssocItem::TypeAlias(i) => ast::ImplItem::TypeAliasDef(i.source(ctx.db).value),
125 hir::AssocItem::Const(i) => ast::ImplItem::ConstDef(i.source(ctx.db).value),
126 })
127 .filter(|t| def_name(&t).is_some())
126 .filter(|t| match t { 128 .filter(|t| match t {
127 ast::ImplItem::FnDef(def) => match mode { 129 ast::ImplItem::FnDef(def) => match mode {
128 AddMissingImplMembersMode::DefaultMethodsOnly => def.body().is_some(), 130 AddMissingImplMembersMode::DefaultMethodsOnly => def.body().is_some(),
@@ -130,8 +132,8 @@ fn add_missing_impl_members_inner(
130 }, 132 },
131 _ => mode == AddMissingImplMembersMode::NoDefaultMethods, 133 _ => mode == AddMissingImplMembersMode::NoDefaultMethods,
132 }) 134 })
133 .filter(|t| impl_items.iter().all(|i| def_name(i) != def_name(t))) 135 .collect::<Vec<_>>();
134 .collect(); 136
135 if missing_items.is_empty() { 137 if missing_items.is_empty() {
136 return None; 138 return None;
137 } 139 }
@@ -177,27 +179,6 @@ fn add_body(fn_def: ast::FnDef) -> ast::FnDef {
177 } 179 }
178} 180}
179 181
180/// Given an `ast::ImplBlock`, resolves the target trait (the one being
181/// implemented) to a `ast::TraitDef`.
182fn resolve_target_trait_def(
183 db: &impl HirDatabase,
184 analyzer: &hir::SourceAnalyzer,
185 impl_block: &ast::ImplBlock,
186) -> Option<(hir::Trait, ast::TraitDef)> {
187 let ast_path = impl_block
188 .target_trait()
189 .map(|it| it.syntax().clone())
190 .and_then(ast::PathType::cast)?
191 .path()?;
192
193 match analyzer.resolve_path(db, &ast_path) {
194 Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => {
195 Some((def, def.source(db).value))
196 }
197 _ => None,
198 }
199}
200
201#[cfg(test)] 182#[cfg(test)]
202mod tests { 183mod tests {
203 use super::*; 184 use super::*;
diff --git a/crates/ra_assists/src/lib.rs b/crates/ra_assists/src/lib.rs
index 828a8e9e8..cb124eaf0 100644
--- a/crates/ra_assists/src/lib.rs
+++ b/crates/ra_assists/src/lib.rs
@@ -9,7 +9,7 @@ mod assist_ctx;
9mod marks; 9mod marks;
10#[cfg(test)] 10#[cfg(test)]
11mod doc_tests; 11mod doc_tests;
12mod utils; 12pub mod utils;
13pub mod ast_transform; 13pub mod ast_transform;
14 14
15use ra_db::FileRange; 15use ra_db::FileRange;
diff --git a/crates/ra_assists/src/utils.rs b/crates/ra_assists/src/utils.rs
index 0d5722295..6ff44c95c 100644
--- a/crates/ra_assists/src/utils.rs
+++ b/crates/ra_assists/src/utils.rs
@@ -1,10 +1,81 @@
1//! Assorted functions shared by several assists. 1//! Assorted functions shared by several assists.
2 2
3use ra_syntax::{ 3use ra_syntax::{
4 ast::{self, make}, 4 ast::{self, make, NameOwner},
5 T, 5 AstNode, T,
6}; 6};
7 7
8use hir::db::HirDatabase;
9use rustc_hash::FxHashSet;
10
11pub fn get_missing_impl_items(
12 db: &impl HirDatabase,
13 analyzer: &hir::SourceAnalyzer,
14 impl_block: &ast::ImplBlock,
15) -> Vec<hir::AssocItem> {
16 // Names must be unique between constants and functions. However, type aliases
17 // may share the same name as a function or constant.
18 let mut impl_fns_consts = FxHashSet::default();
19 let mut impl_type = FxHashSet::default();
20
21 if let Some(item_list) = impl_block.item_list() {
22 for item in item_list.impl_items() {
23 match item {
24 ast::ImplItem::FnDef(f) => {
25 if let Some(n) = f.name() {
26 impl_fns_consts.insert(n.syntax().to_string());
27 }
28 }
29
30 ast::ImplItem::TypeAliasDef(t) => {
31 if let Some(n) = t.name() {
32 impl_type.insert(n.syntax().to_string());
33 }
34 }
35
36 ast::ImplItem::ConstDef(c) => {
37 if let Some(n) = c.name() {
38 impl_fns_consts.insert(n.syntax().to_string());
39 }
40 }
41 }
42 }
43 }
44
45 resolve_target_trait(db, analyzer, impl_block).map_or(vec![], |target_trait| {
46 target_trait
47 .items(db)
48 .iter()
49 .filter(|i| match i {
50 hir::AssocItem::Function(f) => !impl_fns_consts.contains(&f.name(db).to_string()),
51 hir::AssocItem::TypeAlias(t) => !impl_type.contains(&t.name(db).to_string()),
52 hir::AssocItem::Const(c) => c
53 .name(db)
54 .map(|n| !impl_fns_consts.contains(&n.to_string()))
55 .unwrap_or_default(),
56 })
57 .cloned()
58 .collect()
59 })
60}
61
62pub(crate) fn resolve_target_trait(
63 db: &impl HirDatabase,
64 analyzer: &hir::SourceAnalyzer,
65 impl_block: &ast::ImplBlock,
66) -> Option<hir::Trait> {
67 let ast_path = impl_block
68 .target_trait()
69 .map(|it| it.syntax().clone())
70 .and_then(ast::PathType::cast)?
71 .path()?;
72
73 match analyzer.resolve_path(db, &ast_path) {
74 Some(hir::PathResolution::Def(hir::ModuleDef::Trait(def))) => Some(def),
75 _ => None,
76 }
77}
78
8pub(crate) fn invert_boolean_expression(expr: ast::Expr) -> ast::Expr { 79pub(crate) fn invert_boolean_expression(expr: ast::Expr) -> ast::Expr {
9 if let Some(expr) = invert_special_case(&expr) { 80 if let Some(expr) = invert_special_case(&expr) {
10 return expr; 81 return expr;
diff --git a/crates/ra_cli/Cargo.toml b/crates/ra_cli/Cargo.toml
index bcd408421..53d4876f6 100644
--- a/crates/ra_cli/Cargo.toml
+++ b/crates/ra_cli/Cargo.toml
@@ -6,8 +6,10 @@ authors = ["rust-analyzer developers"]
6publish = false 6publish = false
7 7
8[dependencies] 8[dependencies]
9itertools = "0.8.0"
9pico-args = "0.3.0" 10pico-args = "0.3.0"
10env_logger = { version = "0.7.1", default-features = false } 11env_logger = { version = "0.7.1", default-features = false }
12rand = { version = "0.7.0", features = ["small_rng"] }
11 13
12ra_syntax = { path = "../ra_syntax" } 14ra_syntax = { path = "../ra_syntax" }
13ra_ide = { path = "../ra_ide" } 15ra_ide = { path = "../ra_ide" }
diff --git a/crates/ra_cli/src/analysis_bench.rs b/crates/ra_cli/src/analysis_bench.rs
index 5485a38ff..4835a68ce 100644
--- a/crates/ra_cli/src/analysis_bench.rs
+++ b/crates/ra_cli/src/analysis_bench.rs
@@ -2,6 +2,7 @@
2 2
3use std::{ 3use std::{
4 path::{Path, PathBuf}, 4 path::{Path, PathBuf},
5 str::FromStr,
5 sync::Arc, 6 sync::Arc,
6 time::Instant, 7 time::Instant,
7}; 8};
@@ -14,12 +15,35 @@ use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FilePosition, LineCol};
14 15
15use crate::Result; 16use crate::Result;
16 17
18pub(crate) struct Position {
19 path: PathBuf,
20 line: u32,
21 column: u32,
22}
23
24impl FromStr for Position {
25 type Err = Box<dyn std::error::Error + Send + Sync>;
26 fn from_str(s: &str) -> Result<Self> {
27 let (path_line, column) = rsplit_at_char(s, ':')?;
28 let (path, line) = rsplit_at_char(path_line, ':')?;
29 Ok(Position { path: path.into(), line: line.parse()?, column: column.parse()? })
30 }
31}
32
33fn rsplit_at_char(s: &str, c: char) -> Result<(&str, &str)> {
34 let idx = s.rfind(':').ok_or_else(|| format!("no `{}` in {}", c, s))?;
35 Ok((&s[..idx], &s[idx + 1..]))
36}
37
17pub(crate) enum Op { 38pub(crate) enum Op {
18 Highlight { path: PathBuf }, 39 Highlight { path: PathBuf },
19 Complete { path: PathBuf, line: u32, column: u32 }, 40 Complete(Position),
41 GotoDef(Position),
20} 42}
21 43
22pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> { 44pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
45 ra_prof::init();
46
23 let start = Instant::now(); 47 let start = Instant::now();
24 eprint!("loading: "); 48 eprint!("loading: ");
25 let (mut host, roots) = ra_batch::load_cargo(path)?; 49 let (mut host, roots) = ra_batch::load_cargo(path)?;
@@ -29,7 +53,7 @@ pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
29 let file_id = { 53 let file_id = {
30 let path = match &op { 54 let path = match &op {
31 Op::Highlight { path } => path, 55 Op::Highlight { path } => path,
32 Op::Complete { path, .. } => path, 56 Op::Complete(pos) | Op::GotoDef(pos) => &pos.path,
33 }; 57 };
34 let path = std::env::current_dir()?.join(path).canonicalize()?; 58 let path = std::env::current_dir()?.join(path).canonicalize()?;
35 roots 59 roots
@@ -49,7 +73,7 @@ pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
49 .ok_or_else(|| format!("Can't find {:?}", path))? 73 .ok_or_else(|| format!("Can't find {:?}", path))?
50 }; 74 };
51 75
52 match op { 76 match &op {
53 Op::Highlight { .. } => { 77 Op::Highlight { .. } => {
54 let res = do_work(&mut host, file_id, |analysis| { 78 let res = do_work(&mut host, file_id, |analysis| {
55 analysis.diagnostics(file_id).unwrap(); 79 analysis.diagnostics(file_id).unwrap();
@@ -59,16 +83,30 @@ pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
59 println!("\n{}", res); 83 println!("\n{}", res);
60 } 84 }
61 } 85 }
62 Op::Complete { line, column, .. } => { 86 Op::Complete(pos) | Op::GotoDef(pos) => {
87 let is_completion = match op {
88 Op::Complete(..) => true,
89 _ => false,
90 };
91
63 let offset = host 92 let offset = host
64 .analysis() 93 .analysis()
65 .file_line_index(file_id)? 94 .file_line_index(file_id)?
66 .offset(LineCol { line, col_utf16: column }); 95 .offset(LineCol { line: pos.line - 1, col_utf16: pos.column });
67 let file_postion = FilePosition { file_id, offset }; 96 let file_postion = FilePosition { file_id, offset };
68 97
69 let res = do_work(&mut host, file_id, |analysis| analysis.completions(file_postion)); 98 if is_completion {
70 if verbose { 99 let res =
71 println!("\n{:#?}", res); 100 do_work(&mut host, file_id, |analysis| analysis.completions(file_postion));
101 if verbose {
102 println!("\n{:#?}", res);
103 }
104 } else {
105 let res =
106 do_work(&mut host, file_id, |analysis| analysis.goto_definition(file_postion));
107 if verbose {
108 println!("\n{:#?}", res);
109 }
72 } 110 }
73 } 111 }
74 } 112 }
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs
index 833235bff..6d2dd34c6 100644
--- a/crates/ra_cli/src/analysis_stats.rs
+++ b/crates/ra_cli/src/analysis_stats.rs
@@ -2,6 +2,9 @@
2 2
3use std::{collections::HashSet, fmt::Write, path::Path, time::Instant}; 3use std::{collections::HashSet, fmt::Write, path::Path, time::Instant};
4 4
5use itertools::Itertools;
6use rand::{seq::SliceRandom, thread_rng};
7
5use hir::{ 8use hir::{
6 db::{DefDatabase, HirDatabase}, 9 db::{DefDatabase, HirDatabase},
7 AssocItem, Crate, HasSource, HirDisplay, ModuleDef, 10 AssocItem, Crate, HasSource, HirDisplay, ModuleDef,
@@ -19,6 +22,7 @@ pub fn run(
19 path: &Path, 22 path: &Path,
20 only: Option<&str>, 23 only: Option<&str>,
21 with_deps: bool, 24 with_deps: bool,
25 randomize: bool,
22) -> Result<()> { 26) -> Result<()> {
23 let db_load_time = Instant::now(); 27 let db_load_time = Instant::now();
24 let (mut host, roots) = ra_batch::load_cargo(path)?; 28 let (mut host, roots) = ra_batch::load_cargo(path)?;
@@ -41,7 +45,11 @@ pub fn run(
41 }) 45 })
42 .collect::<HashSet<_>>(); 46 .collect::<HashSet<_>>();
43 47
44 for krate in Crate::all(db) { 48 let mut krates = Crate::all(db);
49 if randomize {
50 krates.shuffle(&mut thread_rng());
51 }
52 for krate in krates {
45 let module = krate.root_module(db).expect("crate without root module"); 53 let module = krate.root_module(db).expect("crate without root module");
46 let file_id = module.definition_source(db).file_id; 54 let file_id = module.definition_source(db).file_id;
47 if members.contains(&db.file_source_root(file_id.original_file(db))) { 55 if members.contains(&db.file_source_root(file_id.original_file(db))) {
@@ -50,6 +58,10 @@ pub fn run(
50 } 58 }
51 } 59 }
52 60
61 if randomize {
62 visit_queue.shuffle(&mut thread_rng());
63 }
64
53 println!("Crates in this dir: {}", num_crates); 65 println!("Crates in this dir: {}", num_crates);
54 let mut num_decls = 0; 66 let mut num_decls = 0;
55 let mut funcs = Vec::new(); 67 let mut funcs = Vec::new();
@@ -79,10 +91,14 @@ pub fn run(
79 println!("Total functions: {}", funcs.len()); 91 println!("Total functions: {}", funcs.len());
80 println!("Item Collection: {:?}, {}", analysis_time.elapsed(), ra_prof::memory_usage()); 92 println!("Item Collection: {:?}, {}", analysis_time.elapsed(), ra_prof::memory_usage());
81 93
94 if randomize {
95 funcs.shuffle(&mut thread_rng());
96 }
97
82 let inference_time = Instant::now(); 98 let inference_time = Instant::now();
83 let mut bar = match verbosity { 99 let mut bar = match verbosity {
84 Verbosity::Verbose | Verbosity::Normal => ProgressReport::new(funcs.len() as u64), 100 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
85 Verbosity::Quiet => ProgressReport::hidden(), 101 _ => ProgressReport::new(funcs.len() as u64),
86 }; 102 };
87 103
88 bar.tick(); 104 bar.tick();
@@ -92,7 +108,20 @@ pub fn run(
92 let mut num_type_mismatches = 0; 108 let mut num_type_mismatches = 0;
93 for f in funcs { 109 for f in funcs {
94 let name = f.name(db); 110 let name = f.name(db);
95 let mut msg = format!("processing: {}", name); 111 let full_name = f
112 .module(db)
113 .path_to_root(db)
114 .into_iter()
115 .rev()
116 .filter_map(|it| it.name(db))
117 .chain(Some(f.name(db)))
118 .join("::");
119 if let Some(only_name) = only {
120 if name.to_string() != only_name && full_name != only_name {
121 continue;
122 }
123 }
124 let mut msg = format!("processing: {}", full_name);
96 if verbosity.is_verbose() { 125 if verbosity.is_verbose() {
97 let src = f.source(db); 126 let src = f.source(db);
98 let original_file = src.file_id.original_file(db); 127 let original_file = src.file_id.original_file(db);
@@ -100,15 +129,15 @@ pub fn run(
100 let syntax_range = src.value.syntax().text_range(); 129 let syntax_range = src.value.syntax().text_range();
101 write!(msg, " ({:?} {})", path, syntax_range).unwrap(); 130 write!(msg, " ({:?} {})", path, syntax_range).unwrap();
102 } 131 }
103 bar.set_message(&msg); 132 if verbosity.is_spammy() {
104 if let Some(only_name) = only { 133 bar.println(format!("{}", msg));
105 if name.to_string() != only_name {
106 continue;
107 }
108 } 134 }
135 bar.set_message(&msg);
109 let f_id = FunctionId::from(f); 136 let f_id = FunctionId::from(f);
110 let body = db.body(f_id.into()); 137 let body = db.body(f_id.into());
111 let inference_result = db.infer(f_id.into()); 138 let inference_result = db.infer(f_id.into());
139 let (previous_exprs, previous_unknown, previous_partially_unknown) =
140 (num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
112 for (expr_id, _) in body.exprs.iter() { 141 for (expr_id, _) in body.exprs.iter() {
113 let ty = &inference_result[expr_id]; 142 let ty = &inference_result[expr_id];
114 num_exprs += 1; 143 num_exprs += 1;
@@ -125,6 +154,33 @@ pub fn run(
125 num_exprs_partially_unknown += 1; 154 num_exprs_partially_unknown += 1;
126 } 155 }
127 } 156 }
157 if only.is_some() && verbosity.is_spammy() {
158 // in super-verbose mode for just one function, we print every single expression
159 let (_, sm) = db.body_with_source_map(f_id.into());
160 let src = sm.expr_syntax(expr_id);
161 if let Some(src) = src {
162 let original_file = src.file_id.original_file(db);
163 let line_index = host.analysis().file_line_index(original_file).unwrap();
164 let text_range = src.value.either(
165 |it| it.syntax_node_ptr().range(),
166 |it| it.syntax_node_ptr().range(),
167 );
168 let (start, end) = (
169 line_index.line_col(text_range.start()),
170 line_index.line_col(text_range.end()),
171 );
172 bar.println(format!(
173 "{}:{}-{}:{}: {}",
174 start.line + 1,
175 start.col_utf16,
176 end.line + 1,
177 end.col_utf16,
178 ty.display(db)
179 ));
180 } else {
181 bar.println(format!("unknown location: {}", ty.display(db)));
182 }
183 }
128 if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) { 184 if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
129 num_type_mismatches += 1; 185 num_type_mismatches += 1;
130 if verbosity.is_verbose() { 186 if verbosity.is_verbose() {
@@ -164,6 +220,15 @@ pub fn run(
164 } 220 }
165 } 221 }
166 } 222 }
223 if verbosity.is_spammy() {
224 bar.println(format!(
225 "In {}: {} exprs, {} unknown, {} partial",
226 full_name,
227 num_exprs - previous_exprs,
228 num_exprs_unknown - previous_unknown,
229 num_exprs_partially_unknown - previous_partially_unknown
230 ));
231 }
167 bar.inc(1); 232 bar.inc(1);
168 } 233 }
169 bar.finish_and_clear(); 234 bar.finish_and_clear();
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs
index 806612c2c..750cbab86 100644
--- a/crates/ra_cli/src/main.rs
+++ b/crates/ra_cli/src/main.rs
@@ -16,6 +16,7 @@ type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>;
16 16
17#[derive(Clone, Copy)] 17#[derive(Clone, Copy)]
18pub enum Verbosity { 18pub enum Verbosity {
19 Spammy,
19 Verbose, 20 Verbose,
20 Normal, 21 Normal,
21 Quiet, 22 Quiet,
@@ -24,7 +25,13 @@ pub enum Verbosity {
24impl Verbosity { 25impl Verbosity {
25 fn is_verbose(self) -> bool { 26 fn is_verbose(self) -> bool {
26 match self { 27 match self {
27 Verbosity::Verbose => true, 28 Verbosity::Verbose | Verbosity::Spammy => true,
29 _ => false,
30 }
31 }
32 fn is_spammy(self) -> bool {
33 match self {
34 Verbosity::Spammy => true,
28 _ => false, 35 _ => false,
29 } 36 }
30 } 37 }
@@ -86,14 +93,18 @@ fn main() -> Result<()> {
86 return Ok(()); 93 return Ok(());
87 } 94 }
88 let verbosity = match ( 95 let verbosity = match (
96 matches.contains(["-vv", "--spammy"]),
89 matches.contains(["-v", "--verbose"]), 97 matches.contains(["-v", "--verbose"]),
90 matches.contains(["-q", "--quiet"]), 98 matches.contains(["-q", "--quiet"]),
91 ) { 99 ) {
92 (false, false) => Verbosity::Normal, 100 (true, _, true) => Err("Invalid flags: -q conflicts with -vv")?,
93 (false, true) => Verbosity::Quiet, 101 (true, _, false) => Verbosity::Spammy,
94 (true, false) => Verbosity::Verbose, 102 (false, false, false) => Verbosity::Normal,
95 (true, true) => Err("Invalid flags: -q conflicts with -v")?, 103 (false, false, true) => Verbosity::Quiet,
104 (false, true, false) => Verbosity::Verbose,
105 (false, true, true) => Err("Invalid flags: -q conflicts with -v")?,
96 }; 106 };
107 let randomize = matches.contains("--randomize");
97 let memory_usage = matches.contains("--memory-usage"); 108 let memory_usage = matches.contains("--memory-usage");
98 let only: Option<String> = matches.opt_value_from_str(["-o", "--only"])?; 109 let only: Option<String> = matches.opt_value_from_str(["-o", "--only"])?;
99 let with_deps: bool = matches.contains("--with-deps"); 110 let with_deps: bool = matches.contains("--with-deps");
@@ -111,6 +122,7 @@ fn main() -> Result<()> {
111 path.as_ref(), 122 path.as_ref(),
112 only.as_ref().map(String::as_ref), 123 only.as_ref().map(String::as_ref),
113 with_deps, 124 with_deps,
125 randomize,
114 )?; 126 )?;
115 } 127 }
116 "analysis-bench" => { 128 "analysis-bench" => {
@@ -120,25 +132,16 @@ fn main() -> Result<()> {
120 } 132 }
121 let verbose = matches.contains(["-v", "--verbose"]); 133 let verbose = matches.contains(["-v", "--verbose"]);
122 let path: String = matches.opt_value_from_str("--path")?.unwrap_or_default(); 134 let path: String = matches.opt_value_from_str("--path")?.unwrap_or_default();
123 let highlight_path = matches.opt_value_from_str("--highlight")?; 135 let highlight_path: Option<String> = matches.opt_value_from_str("--highlight")?;
124 let complete_path = matches.opt_value_from_str("--complete")?; 136 let complete_path: Option<String> = matches.opt_value_from_str("--complete")?;
125 if highlight_path.is_some() && complete_path.is_some() { 137 let goto_def_path: Option<String> = matches.opt_value_from_str("--goto-def")?;
126 panic!("either --highlight or --complete must be set, not both") 138 let op = match (highlight_path, complete_path, goto_def_path) {
127 } 139 (Some(path), None, None) => analysis_bench::Op::Highlight { path: path.into() },
128 let op = if let Some(path) = highlight_path { 140 (None, Some(position), None) => analysis_bench::Op::Complete(position.parse()?),
129 let path: String = path; 141 (None, None, Some(position)) => analysis_bench::Op::GotoDef(position.parse()?),
130 analysis_bench::Op::Highlight { path: path.into() } 142 _ => panic!(
131 } else if let Some(path_line_col) = complete_path { 143 "exactly one of `--highlight`, `--complete` or `--goto-def` must be set"
132 let path_line_col: String = path_line_col; 144 ),
133 let (path_line, column) = rsplit_at_char(path_line_col.as_str(), ':')?;
134 let (path, line) = rsplit_at_char(path_line, ':')?;
135 analysis_bench::Op::Complete {
136 path: path.into(),
137 line: line.parse()?,
138 column: column.parse()?,
139 }
140 } else {
141 panic!("either --highlight or --complete must be set")
142 }; 145 };
143 matches.finish().or_else(handle_extra_flags)?; 146 matches.finish().or_else(handle_extra_flags)?;
144 analysis_bench::run(verbose, path.as_ref(), op)?; 147 analysis_bench::run(verbose, path.as_ref(), op)?;
@@ -171,8 +174,3 @@ fn read_stdin() -> Result<String> {
171 std::io::stdin().read_to_string(&mut buff)?; 174 std::io::stdin().read_to_string(&mut buff)?;
172 Ok(buff) 175 Ok(buff)
173} 176}
174
175fn rsplit_at_char(s: &str, c: char) -> Result<(&str, &str)> {
176 let idx = s.rfind(':').ok_or_else(|| format!("no `{}` in {}", c, s))?;
177 Ok((&s[..idx], &s[idx + 1..]))
178}
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs
index bb9a35c5d..94d5b4cfd 100644
--- a/crates/ra_hir/src/source_analyzer.rs
+++ b/crates/ra_hir/src/source_analyzer.rs
@@ -15,11 +15,9 @@ use hir_def::{
15 }, 15 },
16 expr::{ExprId, PatId}, 16 expr::{ExprId, PatId},
17 resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs}, 17 resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs},
18 DefWithBodyId, TraitId, 18 AsMacroCall, DefWithBodyId, TraitId,
19};
20use hir_expand::{
21 hygiene::Hygiene, name::AsName, AstId, HirFileId, InFile, MacroCallId, MacroCallKind,
22}; 19};
20use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile, MacroCallId};
23use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment}; 21use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment};
24use ra_syntax::{ 22use ra_syntax::{
25 ast::{self, AstNode}, 23 ast::{self, AstNode},
@@ -363,12 +361,10 @@ impl SourceAnalyzer {
363 db: &impl HirDatabase, 361 db: &impl HirDatabase,
364 macro_call: InFile<&ast::MacroCall>, 362 macro_call: InFile<&ast::MacroCall>,
365 ) -> Option<Expansion> { 363 ) -> Option<Expansion> {
366 let def = self.resolve_macro_call(db, macro_call)?.id; 364 let macro_call_id = macro_call.as_call_id(db, |path| {
367 let ast_id = AstId::new( 365 self.resolver.resolve_path_as_macro(db, &path).map(|it| it.into())
368 macro_call.file_id, 366 })?;
369 db.ast_id_map(macro_call.file_id).ast_id(macro_call.value), 367 Some(Expansion { macro_call_id })
370 );
371 Some(Expansion { macro_call_id: def.as_call_id(db, MacroCallKind::FnLike(ast_id)) })
372 } 368 }
373} 369}
374 370
diff --git a/crates/ra_hir_def/src/body.rs b/crates/ra_hir_def/src/body.rs
index 142c52d35..010d35e55 100644
--- a/crates/ra_hir_def/src/body.rs
+++ b/crates/ra_hir_def/src/body.rs
@@ -7,9 +7,7 @@ use std::{mem, ops::Index, sync::Arc};
7 7
8use drop_bomb::DropBomb; 8use drop_bomb::DropBomb;
9use either::Either; 9use either::Either;
10use hir_expand::{ 10use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile, MacroDefId};
11 ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile, MacroCallKind, MacroDefId,
12};
13use ra_arena::{map::ArenaMap, Arena}; 11use ra_arena::{map::ArenaMap, Arena};
14use ra_prof::profile; 12use ra_prof::profile;
15use ra_syntax::{ast, AstNode, AstPtr}; 13use ra_syntax::{ast, AstNode, AstPtr};
@@ -23,7 +21,7 @@ use crate::{
23 nameres::CrateDefMap, 21 nameres::CrateDefMap,
24 path::{ModPath, Path}, 22 path::{ModPath, Path},
25 src::HasSource, 23 src::HasSource,
26 DefWithBodyId, HasModule, Lookup, ModuleId, 24 AsMacroCall, DefWithBodyId, HasModule, Lookup, ModuleId,
27}; 25};
28 26
29pub(crate) struct Expander { 27pub(crate) struct Expander {
@@ -51,30 +49,26 @@ impl Expander {
51 db: &DB, 49 db: &DB,
52 macro_call: ast::MacroCall, 50 macro_call: ast::MacroCall,
53 ) -> Option<(Mark, T)> { 51 ) -> Option<(Mark, T)> {
54 let ast_id = AstId::new( 52 let macro_call = InFile::new(self.current_file_id, &macro_call);
55 self.current_file_id, 53
56 db.ast_id_map(self.current_file_id).ast_id(&macro_call), 54 if let Some(call_id) =
57 ); 55 macro_call.as_call_id(db, |path| self.resolve_path_as_macro(db, &path))
58 56 {
59 if let Some(path) = macro_call.path().and_then(|path| self.parse_mod_path(path)) { 57 let file_id = call_id.as_file();
60 if let Some(def) = self.resolve_path_as_macro(db, &path) { 58 if let Some(node) = db.parse_or_expand(file_id) {
61 let call_id = def.as_call_id(db, MacroCallKind::FnLike(ast_id)); 59 if let Some(expr) = T::cast(node) {
62 let file_id = call_id.as_file(); 60 log::debug!("macro expansion {:#?}", expr.syntax());
63 if let Some(node) = db.parse_or_expand(file_id) { 61
64 if let Some(expr) = T::cast(node) { 62 let mark = Mark {
65 log::debug!("macro expansion {:#?}", expr.syntax()); 63 file_id: self.current_file_id,
66 64 ast_id_map: mem::take(&mut self.ast_id_map),
67 let mark = Mark { 65 bomb: DropBomb::new("expansion mark dropped"),
68 file_id: self.current_file_id, 66 };
69 ast_id_map: mem::take(&mut self.ast_id_map), 67 self.hygiene = Hygiene::new(db, file_id);
70 bomb: DropBomb::new("expansion mark dropped"), 68 self.current_file_id = file_id;
71 }; 69 self.ast_id_map = db.ast_id_map(file_id);
72 self.hygiene = Hygiene::new(db, file_id); 70
73 self.current_file_id = file_id; 71 return Some((mark, expr));
74 self.ast_id_map = db.ast_id_map(file_id);
75
76 return Some((mark, expr));
77 }
78 } 72 }
79 } 73 }
80 } 74 }
@@ -99,10 +93,6 @@ impl Expander {
99 Path::from_src(path, &self.hygiene) 93 Path::from_src(path, &self.hygiene)
100 } 94 }
101 95
102 fn parse_mod_path(&mut self, path: ast::Path) -> Option<ModPath> {
103 ModPath::from_src(path, &self.hygiene)
104 }
105
106 fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &ModPath) -> Option<MacroDefId> { 96 fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &ModPath) -> Option<MacroDefId> {
107 self.crate_def_map 97 self.crate_def_map
108 .resolve_path(db, self.module.local_id, path, BuiltinShadowMode::Other) 98 .resolve_path(db, self.module.local_id, path, BuiltinShadowMode::Other)
diff --git a/crates/ra_hir_def/src/lib.rs b/crates/ra_hir_def/src/lib.rs
index feb3a300d..aa0b558b8 100644
--- a/crates/ra_hir_def/src/lib.rs
+++ b/crates/ra_hir_def/src/lib.rs
@@ -46,7 +46,10 @@ mod marks;
46 46
47use std::hash::Hash; 47use std::hash::Hash;
48 48
49use hir_expand::{ast_id_map::FileAstId, AstId, HirFileId, InFile, MacroDefId}; 49use hir_expand::{
50 ast_id_map::FileAstId, db::AstDatabase, hygiene::Hygiene, AstId, HirFileId, InFile,
51 MacroCallId, MacroCallKind, MacroDefId,
52};
50use ra_arena::{impl_arena_id, RawId}; 53use ra_arena::{impl_arena_id, RawId};
51use ra_db::{impl_intern_key, salsa, CrateId}; 54use ra_db::{impl_intern_key, salsa, CrateId};
52use ra_syntax::{ast, AstNode}; 55use ra_syntax::{ast, AstNode};
@@ -413,3 +416,61 @@ impl HasModule for StaticLoc {
413 self.container.module(db) 416 self.container.module(db)
414 } 417 }
415} 418}
419
420/// A helper trait for converting to MacroCallId
421pub trait AsMacroCall {
422 fn as_call_id(
423 &self,
424 db: &(impl db::DefDatabase + AstDatabase),
425 resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
426 ) -> Option<MacroCallId>;
427}
428
429impl AsMacroCall for InFile<&ast::MacroCall> {
430 fn as_call_id(
431 &self,
432 db: &(impl db::DefDatabase + AstDatabase),
433 resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
434 ) -> Option<MacroCallId> {
435 let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
436 let h = Hygiene::new(db, self.file_id);
437 let path = path::ModPath::from_src(self.value.path()?, &h)?;
438
439 AstIdWithPath::new(ast_id.file_id, ast_id.value, path).as_call_id(db, resolver)
440 }
441}
442
443/// Helper wrapper for `AstId` with `ModPath`
444#[derive(Clone, Debug, Eq, PartialEq)]
445struct AstIdWithPath<T: ast::AstNode> {
446 pub ast_id: AstId<T>,
447 pub path: path::ModPath,
448}
449
450impl<T: ast::AstNode> AstIdWithPath<T> {
451 pub fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
452 AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
453 }
454}
455
456impl AsMacroCall for AstIdWithPath<ast::MacroCall> {
457 fn as_call_id(
458 &self,
459 db: &impl AstDatabase,
460 resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
461 ) -> Option<MacroCallId> {
462 let def = resolver(self.path.clone())?;
463 Some(def.as_call_id(db, MacroCallKind::FnLike(self.ast_id.clone())))
464 }
465}
466
467impl AsMacroCall for AstIdWithPath<ast::ModuleItem> {
468 fn as_call_id(
469 &self,
470 db: &impl AstDatabase,
471 resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
472 ) -> Option<MacroCallId> {
473 let def = resolver(self.path.clone())?;
474 Some(def.as_call_id(db, MacroCallKind::Attr(self.ast_id.clone())))
475 }
476}
diff --git a/crates/ra_hir_def/src/nameres/collector.rs b/crates/ra_hir_def/src/nameres/collector.rs
index 6352c71ef..51c65a5d7 100644
--- a/crates/ra_hir_def/src/nameres/collector.rs
+++ b/crates/ra_hir_def/src/nameres/collector.rs
@@ -7,7 +7,7 @@ use hir_expand::{
7 builtin_derive::find_builtin_derive, 7 builtin_derive::find_builtin_derive,
8 builtin_macro::find_builtin_macro, 8 builtin_macro::find_builtin_macro,
9 name::{name, AsName, Name}, 9 name::{name, AsName, Name},
10 HirFileId, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, 10 HirFileId, MacroCallId, MacroDefId, MacroDefKind,
11}; 11};
12use ra_cfg::CfgOptions; 12use ra_cfg::CfgOptions;
13use ra_db::{CrateId, FileId}; 13use ra_db::{CrateId, FileId};
@@ -25,8 +25,9 @@ use crate::{
25 path::{ImportAlias, ModPath, PathKind}, 25 path::{ImportAlias, ModPath, PathKind},
26 per_ns::PerNs, 26 per_ns::PerNs,
27 visibility::Visibility, 27 visibility::Visibility,
28 AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern, 28 AdtId, AsMacroCall, AstId, AstIdWithPath, ConstLoc, ContainerId, EnumLoc, EnumVariantId,
29 LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, 29 FunctionLoc, ImplLoc, Intern, LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc,
30 TraitLoc, TypeAliasLoc, UnionLoc,
30}; 31};
31 32
32pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap { 33pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
@@ -99,11 +100,16 @@ struct ImportDirective {
99#[derive(Clone, Debug, Eq, PartialEq)] 100#[derive(Clone, Debug, Eq, PartialEq)]
100struct MacroDirective { 101struct MacroDirective {
101 module_id: LocalModuleId, 102 module_id: LocalModuleId,
102 ast_id: AstId<ast::MacroCall>, 103 ast_id: AstIdWithPath<ast::MacroCall>,
103 path: ModPath,
104 legacy: Option<MacroCallId>, 104 legacy: Option<MacroCallId>,
105} 105}
106 106
107#[derive(Clone, Debug, Eq, PartialEq)]
108struct DeriveDirective {
109 module_id: LocalModuleId,
110 ast_id: AstIdWithPath<ast::ModuleItem>,
111}
112
107/// Walks the tree of module recursively 113/// Walks the tree of module recursively
108struct DefCollector<'a, DB> { 114struct DefCollector<'a, DB> {
109 db: &'a DB, 115 db: &'a DB,
@@ -112,7 +118,7 @@ struct DefCollector<'a, DB> {
112 unresolved_imports: Vec<ImportDirective>, 118 unresolved_imports: Vec<ImportDirective>,
113 resolved_imports: Vec<ImportDirective>, 119 resolved_imports: Vec<ImportDirective>,
114 unexpanded_macros: Vec<MacroDirective>, 120 unexpanded_macros: Vec<MacroDirective>,
115 unexpanded_attribute_macros: Vec<(LocalModuleId, AstId<ast::ModuleItem>, ModPath)>, 121 unexpanded_attribute_macros: Vec<DeriveDirective>,
116 mod_dirs: FxHashMap<LocalModuleId, ModDir>, 122 mod_dirs: FxHashMap<LocalModuleId, ModDir>,
117 cfg_options: &'a CfgOptions, 123 cfg_options: &'a CfgOptions,
118} 124}
@@ -146,7 +152,7 @@ where
146 ReachedFixedPoint::Yes => break, 152 ReachedFixedPoint::Yes => break,
147 ReachedFixedPoint::No => i += 1, 153 ReachedFixedPoint::No => i += 1,
148 } 154 }
149 if i == 1000 { 155 if i == 10000 {
150 log::error!("name resolution is stuck"); 156 log::error!("name resolution is stuck");
151 break; 157 break;
152 } 158 }
@@ -515,16 +521,16 @@ where
515 return false; 521 return false;
516 } 522 }
517 523
518 let resolved_res = self.def_map.resolve_path_fp_with_macro( 524 if let Some(call_id) = directive.ast_id.as_call_id(self.db, |path| {
519 self.db, 525 let resolved_res = self.def_map.resolve_path_fp_with_macro(
520 ResolveMode::Other, 526 self.db,
521 directive.module_id, 527 ResolveMode::Other,
522 &directive.path, 528 directive.module_id,
523 BuiltinShadowMode::Module, 529 &path,
524 ); 530 BuiltinShadowMode::Module,
525 531 );
526 if let Some(def) = resolved_res.resolved_def.take_macros() { 532 resolved_res.resolved_def.take_macros()
527 let call_id = def.as_call_id(self.db, MacroCallKind::FnLike(directive.ast_id)); 533 }) {
528 resolved.push((directive.module_id, call_id)); 534 resolved.push((directive.module_id, call_id));
529 res = ReachedFixedPoint::No; 535 res = ReachedFixedPoint::No;
530 return false; 536 return false;
@@ -532,12 +538,11 @@ where
532 538
533 true 539 true
534 }); 540 });
535 attribute_macros.retain(|(module_id, ast_id, path)| { 541 attribute_macros.retain(|directive| {
536 let resolved_res = self.resolve_attribute_macro(path); 542 if let Some(call_id) =
537 543 directive.ast_id.as_call_id(self.db, |path| self.resolve_attribute_macro(&path))
538 if let Some(def) = resolved_res { 544 {
539 let call_id = def.as_call_id(self.db, MacroCallKind::Attr(*ast_id)); 545 resolved.push((directive.module_id, call_id));
540 resolved.push((*module_id, call_id));
541 res = ReachedFixedPoint::No; 546 res = ReachedFixedPoint::No;
542 return false; 547 return false;
543 } 548 }
@@ -833,20 +838,22 @@ where
833 }; 838 };
834 let path = ModPath::from_tt_ident(ident); 839 let path = ModPath::from_tt_ident(ident);
835 840
836 let ast_id = AstId::new(self.file_id, def.kind.ast_id()); 841 let ast_id = AstIdWithPath::new(self.file_id, def.kind.ast_id(), path);
837 self.def_collector.unexpanded_attribute_macros.push((self.module_id, ast_id, path)); 842 self.def_collector
843 .unexpanded_attribute_macros
844 .push(DeriveDirective { module_id: self.module_id, ast_id });
838 } 845 }
839 } 846 }
840 } 847 }
841 848
842 fn collect_macro(&mut self, mac: &raw::MacroData) { 849 fn collect_macro(&mut self, mac: &raw::MacroData) {
843 let ast_id = AstId::new(self.file_id, mac.ast_id); 850 let mut ast_id = AstIdWithPath::new(self.file_id, mac.ast_id, mac.path.clone());
844 851
845 // Case 0: builtin macros 852 // Case 0: builtin macros
846 if mac.builtin { 853 if mac.builtin {
847 if let Some(name) = &mac.name { 854 if let Some(name) = &mac.name {
848 let krate = self.def_collector.def_map.krate; 855 let krate = self.def_collector.def_map.krate;
849 if let Some(macro_id) = find_builtin_macro(name, krate, ast_id) { 856 if let Some(macro_id) = find_builtin_macro(name, krate, ast_id.ast_id) {
850 self.def_collector.define_macro( 857 self.def_collector.define_macro(
851 self.module_id, 858 self.module_id,
852 name.clone(), 859 name.clone(),
@@ -862,7 +869,7 @@ where
862 if is_macro_rules(&mac.path) { 869 if is_macro_rules(&mac.path) {
863 if let Some(name) = &mac.name { 870 if let Some(name) = &mac.name {
864 let macro_id = MacroDefId { 871 let macro_id = MacroDefId {
865 ast_id: Some(ast_id), 872 ast_id: Some(ast_id.ast_id),
866 krate: Some(self.def_collector.def_map.krate), 873 krate: Some(self.def_collector.def_map.krate),
867 kind: MacroDefKind::Declarative, 874 kind: MacroDefKind::Declarative,
868 }; 875 };
@@ -872,15 +879,13 @@ where
872 } 879 }
873 880
874 // Case 2: try to resolve in legacy scope and expand macro_rules 881 // Case 2: try to resolve in legacy scope and expand macro_rules
875 if let Some(macro_def) = mac.path.as_ident().and_then(|name| { 882 if let Some(macro_call_id) = ast_id.as_call_id(self.def_collector.db, |path| {
876 self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name) 883 path.as_ident().and_then(|name| {
884 self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name)
885 })
877 }) { 886 }) {
878 let macro_call_id =
879 macro_def.as_call_id(self.def_collector.db, MacroCallKind::FnLike(ast_id));
880
881 self.def_collector.unexpanded_macros.push(MacroDirective { 887 self.def_collector.unexpanded_macros.push(MacroDirective {
882 module_id: self.module_id, 888 module_id: self.module_id,
883 path: mac.path.clone(),
884 ast_id, 889 ast_id,
885 legacy: Some(macro_call_id), 890 legacy: Some(macro_call_id),
886 }); 891 });
@@ -890,14 +895,12 @@ where
890 895
891 // Case 3: resolve in module scope, expand during name resolution. 896 // Case 3: resolve in module scope, expand during name resolution.
892 // We rewrite simple path `macro_name` to `self::macro_name` to force resolve in module scope only. 897 // We rewrite simple path `macro_name` to `self::macro_name` to force resolve in module scope only.
893 let mut path = mac.path.clone(); 898 if ast_id.path.is_ident() {
894 if path.is_ident() { 899 ast_id.path.kind = PathKind::Super(0);
895 path.kind = PathKind::Super(0);
896 } 900 }
897 901
898 self.def_collector.unexpanded_macros.push(MacroDirective { 902 self.def_collector.unexpanded_macros.push(MacroDirective {
899 module_id: self.module_id, 903 module_id: self.module_id,
900 path,
901 ast_id, 904 ast_id,
902 legacy: None, 905 legacy: None,
903 }); 906 });
diff --git a/crates/ra_hir_ty/src/infer/unify.rs b/crates/ra_hir_ty/src/infer/unify.rs
index fe05642ae..1dc842f40 100644
--- a/crates/ra_hir_ty/src/infer/unify.rs
+++ b/crates/ra_hir_ty/src/infer/unify.rs
@@ -249,6 +249,8 @@ impl InferenceTable {
249 match (ty1, ty2) { 249 match (ty1, ty2) {
250 (Ty::Unknown, _) | (_, Ty::Unknown) => true, 250 (Ty::Unknown, _) | (_, Ty::Unknown) => true,
251 251
252 (Ty::Placeholder(p1), Ty::Placeholder(p2)) if *p1 == *p2 => true,
253
252 (Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2))) 254 (Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2)))
253 | (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2))) 255 | (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2)))
254 | (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2))) 256 | (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2)))
diff --git a/crates/ra_hir_ty/src/tests/coercion.rs b/crates/ra_hir_ty/src/tests/coercion.rs
index fc5ef36a5..42330b269 100644
--- a/crates/ra_hir_ty/src/tests/coercion.rs
+++ b/crates/ra_hir_ty/src/tests/coercion.rs
@@ -526,3 +526,25 @@ fn test() {
526 "### 526 "###
527 ); 527 );
528} 528}
529
530#[test]
531fn coerce_placeholder_ref() {
532 // placeholders should unify, even behind references
533 assert_snapshot!(
534 infer_with_mismatches(r#"
535struct S<T> { t: T }
536impl<TT> S<TT> {
537 fn get(&self) -> &TT {
538 &self.t
539 }
540}
541"#, true),
542 @r###"
543 [51; 55) 'self': &S<TT>
544 [64; 87) '{ ... }': &TT
545 [74; 81) '&self.t': &TT
546 [75; 79) 'self': &S<TT>
547 [75; 81) 'self.t': TT
548 "###
549 );
550}
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs
index 88af61e87..ff8e75b48 100644
--- a/crates/ra_hir_ty/src/traits.rs
+++ b/crates/ra_hir_ty/src/traits.rs
@@ -60,6 +60,9 @@ impl TraitSolver {
60 context.0.db.check_canceled(); 60 context.0.db.check_canceled();
61 let remaining = fuel.get(); 61 let remaining = fuel.get();
62 fuel.set(remaining - 1); 62 fuel.set(remaining - 1);
63 if remaining == 0 {
64 log::debug!("fuel exhausted");
65 }
63 remaining > 0 66 remaining > 0
64 }) 67 })
65 } 68 }
diff --git a/crates/ra_ide/src/completion.rs b/crates/ra_ide/src/completion.rs
index fedc02e14..4bdc6ba23 100644
--- a/crates/ra_ide/src/completion.rs
+++ b/crates/ra_ide/src/completion.rs
@@ -15,6 +15,7 @@ mod complete_path;
15mod complete_scope; 15mod complete_scope;
16mod complete_postfix; 16mod complete_postfix;
17mod complete_macro_in_item_position; 17mod complete_macro_in_item_position;
18mod complete_trait_impl;
18 19
19use ra_db::SourceDatabase; 20use ra_db::SourceDatabase;
20use ra_ide_db::RootDatabase; 21use ra_ide_db::RootDatabase;
@@ -74,5 +75,7 @@ pub(crate) fn completions(db: &RootDatabase, position: FilePosition) -> Option<C
74 complete_pattern::complete_pattern(&mut acc, &ctx); 75 complete_pattern::complete_pattern(&mut acc, &ctx);
75 complete_postfix::complete_postfix(&mut acc, &ctx); 76 complete_postfix::complete_postfix(&mut acc, &ctx);
76 complete_macro_in_item_position::complete_macro_in_item_position(&mut acc, &ctx); 77 complete_macro_in_item_position::complete_macro_in_item_position(&mut acc, &ctx);
78 complete_trait_impl::complete_trait_impl(&mut acc, &ctx);
79
77 Some(acc) 80 Some(acc)
78} 81}
diff --git a/crates/ra_ide/src/completion/complete_trait_impl.rs b/crates/ra_ide/src/completion/complete_trait_impl.rs
new file mode 100644
index 000000000..6ff10c017
--- /dev/null
+++ b/crates/ra_ide/src/completion/complete_trait_impl.rs
@@ -0,0 +1,436 @@
1//! Completion for associated items in a trait implementation.
2//!
3//! This module adds the completion items related to implementing associated
4//! items within a `impl Trait for Struct` block. The current context node
5//! must be within either a `FN_DEF`, `TYPE_ALIAS_DEF`, or `CONST_DEF` node
6//! and an direct child of an `IMPL_BLOCK`.
7//!
8//! # Examples
9//!
10//! Considering the following trait `impl`:
11//!
12//! ```ignore
13//! trait SomeTrait {
14//! fn foo();
15//! }
16//!
17//! impl SomeTrait for () {
18//! fn f<|>
19//! }
20//! ```
21//!
22//! may result in the completion of the following method:
23//!
24//! ```ignore
25//! # trait SomeTrait {
26//! # fn foo();
27//! # }
28//!
29//! impl SomeTrait for () {
30//! fn foo() {}<|>
31//! }
32//! ```
33
34use hir::{self, Docs, HasSource};
35use ra_assists::utils::get_missing_impl_items;
36use ra_syntax::{
37 ast::{self, edit},
38 AstNode, SyntaxKind, SyntaxNode, TextRange,
39};
40use ra_text_edit::TextEdit;
41
42use crate::{
43 completion::{
44 CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions,
45 },
46 display::FunctionSignature,
47};
48
49pub(crate) fn complete_trait_impl(acc: &mut Completions, ctx: &CompletionContext) {
50 let trigger = ctx.token.ancestors().find(|p| match p.kind() {
51 SyntaxKind::FN_DEF
52 | SyntaxKind::TYPE_ALIAS_DEF
53 | SyntaxKind::CONST_DEF
54 | SyntaxKind::BLOCK_EXPR => true,
55 _ => false,
56 });
57
58 let impl_block = trigger
59 .as_ref()
60 .and_then(|node| node.parent())
61 .and_then(|node| node.parent())
62 .and_then(|node| ast::ImplBlock::cast(node));
63
64 if let (Some(trigger), Some(impl_block)) = (trigger, impl_block) {
65 match trigger.kind() {
66 SyntaxKind::FN_DEF => {
67 for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block)
68 .iter()
69 .filter_map(|item| match item {
70 hir::AssocItem::Function(fn_item) => Some(fn_item),
71 _ => None,
72 })
73 {
74 add_function_impl(&trigger, acc, ctx, &missing_fn);
75 }
76 }
77
78 SyntaxKind::TYPE_ALIAS_DEF => {
79 for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block)
80 .iter()
81 .filter_map(|item| match item {
82 hir::AssocItem::TypeAlias(type_item) => Some(type_item),
83 _ => None,
84 })
85 {
86 add_type_alias_impl(&trigger, acc, ctx, &missing_fn);
87 }
88 }
89
90 SyntaxKind::CONST_DEF => {
91 for missing_fn in get_missing_impl_items(ctx.db, &ctx.analyzer, &impl_block)
92 .iter()
93 .filter_map(|item| match item {
94 hir::AssocItem::Const(const_item) => Some(const_item),
95 _ => None,
96 })
97 {
98 add_const_impl(&trigger, acc, ctx, &missing_fn);
99 }
100 }
101
102 _ => {}
103 }
104 }
105}
106
107fn add_function_impl(
108 fn_def_node: &SyntaxNode,
109 acc: &mut Completions,
110 ctx: &CompletionContext,
111 func: &hir::Function,
112) {
113 let display = FunctionSignature::from_hir(ctx.db, func.clone());
114
115 let fn_name = func.name(ctx.db).to_string();
116
117 let label = if func.params(ctx.db).len() > 0 {
118 format!("fn {}(..)", fn_name)
119 } else {
120 format!("fn {}()", fn_name)
121 };
122
123 let builder = CompletionItem::new(CompletionKind::Magic, ctx.source_range(), label.clone())
124 .lookup_by(fn_name)
125 .set_documentation(func.docs(ctx.db));
126
127 let completion_kind = if func.has_self_param(ctx.db) {
128 CompletionItemKind::Method
129 } else {
130 CompletionItemKind::Function
131 };
132
133 let snippet = format!("{} {{}}", display);
134
135 let range = TextRange::from_to(fn_def_node.text_range().start(), ctx.source_range().end());
136
137 builder.text_edit(TextEdit::replace(range, snippet)).kind(completion_kind).add_to(acc);
138}
139
140fn add_type_alias_impl(
141 type_def_node: &SyntaxNode,
142 acc: &mut Completions,
143 ctx: &CompletionContext,
144 type_alias: &hir::TypeAlias,
145) {
146 let alias_name = type_alias.name(ctx.db).to_string();
147
148 let snippet = format!("type {} = ", alias_name);
149
150 let range = TextRange::from_to(type_def_node.text_range().start(), ctx.source_range().end());
151
152 CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone())
153 .text_edit(TextEdit::replace(range, snippet))
154 .lookup_by(alias_name)
155 .kind(CompletionItemKind::TypeAlias)
156 .set_documentation(type_alias.docs(ctx.db))
157 .add_to(acc);
158}
159
160fn add_const_impl(
161 const_def_node: &SyntaxNode,
162 acc: &mut Completions,
163 ctx: &CompletionContext,
164 const_: &hir::Const,
165) {
166 let const_name = const_.name(ctx.db).map(|n| n.to_string());
167
168 if let Some(const_name) = const_name {
169 let snippet = make_const_compl_syntax(&const_.source(ctx.db).value);
170
171 let range =
172 TextRange::from_to(const_def_node.text_range().start(), ctx.source_range().end());
173
174 CompletionItem::new(CompletionKind::Magic, ctx.source_range(), snippet.clone())
175 .text_edit(TextEdit::replace(range, snippet))
176 .lookup_by(const_name)
177 .kind(CompletionItemKind::Const)
178 .set_documentation(const_.docs(ctx.db))
179 .add_to(acc);
180 }
181}
182
183fn make_const_compl_syntax(const_: &ast::ConstDef) -> String {
184 let const_ = edit::strip_attrs_and_docs(const_);
185
186 let const_start = const_.syntax().text_range().start();
187 let const_end = const_.syntax().text_range().end();
188
189 let start =
190 const_.syntax().first_child_or_token().map_or(const_start, |f| f.text_range().start());
191
192 let end = const_
193 .syntax()
194 .children_with_tokens()
195 .find(|s| s.kind() == SyntaxKind::SEMI || s.kind() == SyntaxKind::EQ)
196 .map_or(const_end, |f| f.text_range().start());
197
198 let len = end - start;
199 let range = TextRange::from_to(0.into(), len);
200
201 let syntax = const_.syntax().text().slice(range).to_string();
202
203 format!("{} = ", syntax.trim_end())
204}
205
206#[cfg(test)]
207mod tests {
208 use crate::completion::{do_completion, CompletionItem, CompletionKind};
209 use insta::assert_debug_snapshot;
210
211 fn complete(code: &str) -> Vec<CompletionItem> {
212 do_completion(code, CompletionKind::Magic)
213 }
214
215 #[test]
216 fn single_function() {
217 let completions = complete(
218 r"
219 trait Test {
220 fn foo();
221 }
222
223 struct T1;
224
225 impl Test for T1 {
226 fn f<|>
227 }
228 ",
229 );
230 assert_debug_snapshot!(completions, @r###"
231 [
232 CompletionItem {
233 label: "fn foo()",
234 source_range: [141; 142),
235 delete: [138; 142),
236 insert: "fn foo() {}",
237 kind: Function,
238 lookup: "foo",
239 },
240 ]
241 "###);
242 }
243
244 #[test]
245 fn hide_implemented_fn() {
246 let completions = complete(
247 r"
248 trait Test {
249 fn foo();
250 fn foo_bar();
251 }
252
253 struct T1;
254
255 impl Test for T1 {
256 fn foo() {}
257
258 fn f<|>
259 }
260 ",
261 );
262 assert_debug_snapshot!(completions, @r###"
263 [
264 CompletionItem {
265 label: "fn foo_bar()",
266 source_range: [200; 201),
267 delete: [197; 201),
268 insert: "fn foo_bar() {}",
269 kind: Function,
270 lookup: "foo_bar",
271 },
272 ]
273 "###);
274 }
275
276 #[test]
277 fn completes_only_on_top_level() {
278 let completions = complete(
279 r"
280 trait Test {
281 fn foo();
282
283 fn foo_bar();
284 }
285
286 struct T1;
287
288 impl Test for T1 {
289 fn foo() {
290 <|>
291 }
292 }
293 ",
294 );
295 assert_debug_snapshot!(completions, @r###"[]"###);
296 }
297
298 #[test]
299 fn generic_fn() {
300 let completions = complete(
301 r"
302 trait Test {
303 fn foo<T>();
304 }
305
306 struct T1;
307
308 impl Test for T1 {
309 fn f<|>
310 }
311 ",
312 );
313 assert_debug_snapshot!(completions, @r###"
314 [
315 CompletionItem {
316 label: "fn foo()",
317 source_range: [144; 145),
318 delete: [141; 145),
319 insert: "fn foo<T>() {}",
320 kind: Function,
321 lookup: "foo",
322 },
323 ]
324 "###);
325 }
326
327 #[test]
328 fn generic_constrait_fn() {
329 let completions = complete(
330 r"
331 trait Test {
332 fn foo<T>() where T: Into<String>;
333 }
334
335 struct T1;
336
337 impl Test for T1 {
338 fn f<|>
339 }
340 ",
341 );
342 assert_debug_snapshot!(completions, @r###"
343 [
344 CompletionItem {
345 label: "fn foo()",
346 source_range: [166; 167),
347 delete: [163; 167),
348 insert: "fn foo<T>()\nwhere T: Into<String> {}",
349 kind: Function,
350 lookup: "foo",
351 },
352 ]
353 "###);
354 }
355
356 #[test]
357 fn associated_type() {
358 let completions = complete(
359 r"
360 trait Test {
361 type SomeType;
362 }
363
364 impl Test for () {
365 type S<|>
366 }
367 ",
368 );
369 assert_debug_snapshot!(completions, @r###"
370 [
371 CompletionItem {
372 label: "type SomeType = ",
373 source_range: [124; 125),
374 delete: [119; 125),
375 insert: "type SomeType = ",
376 kind: TypeAlias,
377 lookup: "SomeType",
378 },
379 ]
380 "###);
381 }
382
383 #[test]
384 fn associated_const() {
385 let completions = complete(
386 r"
387 trait Test {
388 const SOME_CONST: u16;
389 }
390
391 impl Test for () {
392 const S<|>
393 }
394 ",
395 );
396 assert_debug_snapshot!(completions, @r###"
397 [
398 CompletionItem {
399 label: "const SOME_CONST: u16 = ",
400 source_range: [133; 134),
401 delete: [127; 134),
402 insert: "const SOME_CONST: u16 = ",
403 kind: Const,
404 lookup: "SOME_CONST",
405 },
406 ]
407 "###);
408 }
409
410 #[test]
411 fn associated_const_with_default() {
412 let completions = complete(
413 r"
414 trait Test {
415 const SOME_CONST: u16 = 42;
416 }
417
418 impl Test for () {
419 const S<|>
420 }
421 ",
422 );
423 assert_debug_snapshot!(completions, @r###"
424 [
425 CompletionItem {
426 label: "const SOME_CONST: u16 = ",
427 source_range: [138; 139),
428 delete: [132; 139),
429 insert: "const SOME_CONST: u16 = ",
430 kind: Const,
431 lookup: "SOME_CONST",
432 },
433 ]
434 "###);
435 }
436}
diff --git a/crates/ra_ide/src/completion/completion_context.rs b/crates/ra_ide/src/completion/completion_context.rs
index 5a0407fd7..8678a3234 100644
--- a/crates/ra_ide/src/completion/completion_context.rs
+++ b/crates/ra_ide/src/completion/completion_context.rs
@@ -25,6 +25,7 @@ pub(crate) struct CompletionContext<'a> {
25 pub(super) use_item_syntax: Option<ast::UseItem>, 25 pub(super) use_item_syntax: Option<ast::UseItem>,
26 pub(super) record_lit_syntax: Option<ast::RecordLit>, 26 pub(super) record_lit_syntax: Option<ast::RecordLit>,
27 pub(super) record_lit_pat: Option<ast::RecordPat>, 27 pub(super) record_lit_pat: Option<ast::RecordPat>,
28 pub(super) impl_block: Option<ast::ImplBlock>,
28 pub(super) is_param: bool, 29 pub(super) is_param: bool,
29 /// If a name-binding or reference to a const in a pattern. 30 /// If a name-binding or reference to a const in a pattern.
30 /// Irrefutable patterns (like let) are excluded. 31 /// Irrefutable patterns (like let) are excluded.
@@ -72,6 +73,7 @@ impl<'a> CompletionContext<'a> {
72 use_item_syntax: None, 73 use_item_syntax: None,
73 record_lit_syntax: None, 74 record_lit_syntax: None,
74 record_lit_pat: None, 75 record_lit_pat: None,
76 impl_block: None,
75 is_param: false, 77 is_param: false,
76 is_pat_binding: false, 78 is_pat_binding: false,
77 is_trivial_path: false, 79 is_trivial_path: false,
@@ -148,6 +150,13 @@ impl<'a> CompletionContext<'a> {
148 self.record_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset); 150 self.record_lit_syntax = find_node_at_offset(original_file.syntax(), self.offset);
149 } 151 }
150 152
153 self.impl_block = self
154 .token
155 .parent()
156 .ancestors()
157 .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
158 .find_map(ast::ImplBlock::cast);
159
151 let top_node = name_ref 160 let top_node = name_ref
152 .syntax() 161 .syntax()
153 .ancestors() 162 .ancestors()
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs
index 689921f3f..f86f98be7 100644
--- a/crates/ra_ide/src/lib.rs
+++ b/crates/ra_ide/src/lib.rs
@@ -37,6 +37,7 @@ mod display;
37mod inlay_hints; 37mod inlay_hints;
38mod expand; 38mod expand;
39mod expand_macro; 39mod expand_macro;
40mod ssr;
40 41
41#[cfg(test)] 42#[cfg(test)]
42mod marks; 43mod marks;
@@ -71,8 +72,9 @@ pub use crate::{
71 references::{ 72 references::{
72 Declaration, Reference, ReferenceAccess, ReferenceKind, ReferenceSearchResult, SearchScope, 73 Declaration, Reference, ReferenceAccess, ReferenceKind, ReferenceSearchResult, SearchScope,
73 }, 74 },
74 runnables::{Runnable, RunnableKind}, 75 runnables::{Runnable, RunnableKind, TestId},
75 source_change::{FileSystemEdit, SourceChange, SourceFileEdit}, 76 source_change::{FileSystemEdit, SourceChange, SourceFileEdit},
77 ssr::SsrError,
76 syntax_highlighting::HighlightedRange, 78 syntax_highlighting::HighlightedRange,
77}; 79};
78 80
@@ -464,6 +466,16 @@ impl Analysis {
464 self.with_db(|db| references::rename(db, position, new_name)) 466 self.with_db(|db| references::rename(db, position, new_name))
465 } 467 }
466 468
469 pub fn structural_search_replace(
470 &self,
471 query: &str,
472 ) -> Cancelable<Result<SourceChange, SsrError>> {
473 self.with_db(|db| {
474 let edits = ssr::parse_search_replace(query, db)?;
475 Ok(SourceChange::source_file_edits("ssr", edits))
476 })
477 }
478
467 /// Performs an operation on that may be Canceled. 479 /// Performs an operation on that may be Canceled.
468 fn with_db<F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe, T>( 480 fn with_db<F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe, T>(
469 &self, 481 &self,
diff --git a/crates/ra_ide/src/runnables.rs b/crates/ra_ide/src/runnables.rs
index b6b0c70f9..be2a67d0a 100644
--- a/crates/ra_ide/src/runnables.rs
+++ b/crates/ra_ide/src/runnables.rs
@@ -1,6 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use hir::InFile; 3use hir::{InFile, SourceBinder};
4use itertools::Itertools; 4use itertools::Itertools;
5use ra_db::SourceDatabase; 5use ra_db::SourceDatabase;
6use ra_ide_db::RootDatabase; 6use ra_ide_db::RootDatabase;
@@ -10,6 +10,7 @@ use ra_syntax::{
10}; 10};
11 11
12use crate::FileId; 12use crate::FileId;
13use std::fmt::Display;
13 14
14#[derive(Debug)] 15#[derive(Debug)]
15pub struct Runnable { 16pub struct Runnable {
@@ -18,38 +19,84 @@ pub struct Runnable {
18} 19}
19 20
20#[derive(Debug)] 21#[derive(Debug)]
22pub enum TestId {
23 Name(String),
24 Path(String),
25}
26
27impl Display for TestId {
28 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
29 match self {
30 TestId::Name(name) => write!(f, "{}", name),
31 TestId::Path(path) => write!(f, "{}", path),
32 }
33 }
34}
35
36#[derive(Debug)]
21pub enum RunnableKind { 37pub enum RunnableKind {
22 Test { name: String }, 38 Test { test_id: TestId },
23 TestMod { path: String }, 39 TestMod { path: String },
24 Bench { name: String }, 40 Bench { test_id: TestId },
25 Bin, 41 Bin,
26} 42}
27 43
28pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { 44pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
29 let parse = db.parse(file_id); 45 let parse = db.parse(file_id);
30 parse.tree().syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect() 46 let mut sb = SourceBinder::new(db);
47 parse.tree().syntax().descendants().filter_map(|i| runnable(db, &mut sb, file_id, i)).collect()
31} 48}
32 49
33fn runnable(db: &RootDatabase, file_id: FileId, item: SyntaxNode) -> Option<Runnable> { 50fn runnable(
51 db: &RootDatabase,
52 source_binder: &mut SourceBinder<RootDatabase>,
53 file_id: FileId,
54 item: SyntaxNode,
55) -> Option<Runnable> {
34 match_ast! { 56 match_ast! {
35 match item { 57 match item {
36 ast::FnDef(it) => { runnable_fn(it) }, 58 ast::FnDef(it) => { runnable_fn(db, source_binder, file_id, it) },
37 ast::Module(it) => { runnable_mod(db, file_id, it) }, 59 ast::Module(it) => { runnable_mod(db, source_binder, file_id, it) },
38 _ => { None }, 60 _ => { None },
39 } 61 }
40 } 62 }
41} 63}
42 64
43fn runnable_fn(fn_def: ast::FnDef) -> Option<Runnable> { 65fn runnable_fn(
44 let name = fn_def.name()?.text().clone(); 66 db: &RootDatabase,
45 let kind = if name == "main" { 67 source_binder: &mut SourceBinder<RootDatabase>,
68 file_id: FileId,
69 fn_def: ast::FnDef,
70) -> Option<Runnable> {
71 let name_string = fn_def.name()?.text().to_string();
72
73 let kind = if name_string == "main" {
46 RunnableKind::Bin 74 RunnableKind::Bin
47 } else if has_test_related_attribute(&fn_def) {
48 RunnableKind::Test { name: name.to_string() }
49 } else if fn_def.has_atom_attr("bench") {
50 RunnableKind::Bench { name: name.to_string() }
51 } else { 75 } else {
52 return None; 76 let test_id = if let Some(module) = source_binder
77 .to_def(InFile::new(file_id.into(), fn_def.clone()))
78 .map(|def| def.module(db))
79 {
80 let path = module
81 .path_to_root(db)
82 .into_iter()
83 .rev()
84 .filter_map(|it| it.name(db))
85 .map(|name| name.to_string())
86 .chain(std::iter::once(name_string))
87 .join("::");
88 TestId::Path(path)
89 } else {
90 TestId::Name(name_string)
91 };
92
93 if has_test_related_attribute(&fn_def) {
94 RunnableKind::Test { test_id }
95 } else if fn_def.has_atom_attr("bench") {
96 RunnableKind::Bench { test_id }
97 } else {
98 return None;
99 }
53 }; 100 };
54 Some(Runnable { range: fn_def.syntax().text_range(), kind }) 101 Some(Runnable { range: fn_def.syntax().text_range(), kind })
55} 102}
@@ -68,7 +115,12 @@ fn has_test_related_attribute(fn_def: &ast::FnDef) -> bool {
68 .any(|attribute_text| attribute_text.contains("test")) 115 .any(|attribute_text| attribute_text.contains("test"))
69} 116}
70 117
71fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Option<Runnable> { 118fn runnable_mod(
119 db: &RootDatabase,
120 source_binder: &mut SourceBinder<RootDatabase>,
121 file_id: FileId,
122 module: ast::Module,
123) -> Option<Runnable> {
72 let has_test_function = module 124 let has_test_function = module
73 .item_list()? 125 .item_list()?
74 .items() 126 .items()
@@ -76,13 +128,12 @@ fn runnable_mod(db: &RootDatabase, file_id: FileId, module: ast::Module) -> Opti
76 ast::ModuleItem::FnDef(it) => Some(it), 128 ast::ModuleItem::FnDef(it) => Some(it),
77 _ => None, 129 _ => None,
78 }) 130 })
79 .any(|f| f.has_atom_attr("test")); 131 .any(|f| has_test_related_attribute(&f));
80 if !has_test_function { 132 if !has_test_function {
81 return None; 133 return None;
82 } 134 }
83 let range = module.syntax().text_range(); 135 let range = module.syntax().text_range();
84 let mut sb = hir::SourceBinder::new(db); 136 let module = source_binder.to_def(InFile::new(file_id.into(), module))?;
85 let module = sb.to_def(InFile::new(file_id.into(), module))?;
86 137
87 let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::"); 138 let path = module.path_to_root(db).into_iter().rev().filter_map(|it| it.name(db)).join("::");
88 Some(Runnable { range, kind: RunnableKind::TestMod { path } }) 139 Some(Runnable { range, kind: RunnableKind::TestMod { path } })
@@ -121,13 +172,17 @@ mod tests {
121 Runnable { 172 Runnable {
122 range: [22; 46), 173 range: [22; 46),
123 kind: Test { 174 kind: Test {
124 name: "test_foo", 175 test_id: Path(
176 "test_foo",
177 ),
125 }, 178 },
126 }, 179 },
127 Runnable { 180 Runnable {
128 range: [47; 81), 181 range: [47; 81),
129 kind: Test { 182 kind: Test {
130 name: "test_foo", 183 test_id: Path(
184 "test_foo",
185 ),
131 }, 186 },
132 }, 187 },
133 ] 188 ]
@@ -160,7 +215,9 @@ mod tests {
160 Runnable { 215 Runnable {
161 range: [28; 57), 216 range: [28; 57),
162 kind: Test { 217 kind: Test {
163 name: "test_foo1", 218 test_id: Path(
219 "test_mod::test_foo1",
220 ),
164 }, 221 },
165 }, 222 },
166 ] 223 ]
@@ -195,7 +252,9 @@ mod tests {
195 Runnable { 252 Runnable {
196 range: [46; 79), 253 range: [46; 79),
197 kind: Test { 254 kind: Test {
198 name: "test_foo1", 255 test_id: Path(
256 "foo::test_mod::test_foo1",
257 ),
199 }, 258 },
200 }, 259 },
201 ] 260 ]
@@ -232,7 +291,9 @@ mod tests {
232 Runnable { 291 Runnable {
233 range: [68; 105), 292 range: [68; 105),
234 kind: Test { 293 kind: Test {
235 name: "test_foo1", 294 test_id: Path(
295 "foo::bar::test_mod::test_foo1",
296 ),
236 }, 297 },
237 }, 298 },
238 ] 299 ]
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html
index 1cc55e78b..a02dbaf2f 100644
--- a/crates/ra_ide/src/snapshots/highlighting.html
+++ b/crates/ra_ide/src/snapshots/highlighting.html
@@ -16,6 +16,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
16.literal { color: #BFEBBF; } 16.literal { color: #BFEBBF; }
17.literal\.numeric { color: #6A8759; } 17.literal\.numeric { color: #6A8759; }
18.macro { color: #94BFF3; } 18.macro { color: #94BFF3; }
19.module { color: #AFD8AF; }
19.variable { color: #DCDCCC; } 20.variable { color: #DCDCCC; }
20.variable\.mut { color: #DCDCCC; text-decoration: underline; } 21.variable\.mut { color: #DCDCCC; text-decoration: underline; }
21 22
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
index 918fd4b97..95f038f00 100644
--- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html
+++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
@@ -16,6 +16,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
16.literal { color: #BFEBBF; } 16.literal { color: #BFEBBF; }
17.literal\.numeric { color: #6A8759; } 17.literal\.numeric { color: #6A8759; }
18.macro { color: #94BFF3; } 18.macro { color: #94BFF3; }
19.module { color: #AFD8AF; }
19.variable { color: #DCDCCC; } 20.variable { color: #DCDCCC; }
20.variable\.mut { color: #DCDCCC; text-decoration: underline; } 21.variable\.mut { color: #DCDCCC; text-decoration: underline; }
21 22
diff --git a/crates/ra_ide/src/ssr.rs b/crates/ra_ide/src/ssr.rs
new file mode 100644
index 000000000..14eb0b8b2
--- /dev/null
+++ b/crates/ra_ide/src/ssr.rs
@@ -0,0 +1,324 @@
1//! structural search replace
2
3use crate::source_change::SourceFileEdit;
4use ra_ide_db::RootDatabase;
5use ra_syntax::ast::make::expr_from_text;
6use ra_syntax::AstNode;
7use ra_syntax::SyntaxElement;
8use ra_syntax::SyntaxNode;
9use ra_text_edit::{TextEdit, TextEditBuilder};
10use rustc_hash::FxHashMap;
11use std::collections::HashMap;
12use std::str::FromStr;
13
14pub use ra_db::{SourceDatabase, SourceDatabaseExt};
15use ra_ide_db::symbol_index::SymbolsDatabase;
16
17#[derive(Debug, PartialEq)]
18pub struct SsrError(String);
19
20impl std::fmt::Display for SsrError {
21 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
22 write!(f, "Parse error: {}", self.0)
23 }
24}
25
26impl std::error::Error for SsrError {}
27
28pub fn parse_search_replace(
29 query: &str,
30 db: &RootDatabase,
31) -> Result<Vec<SourceFileEdit>, SsrError> {
32 let mut edits = vec![];
33 let query: SsrQuery = query.parse()?;
34 for &root in db.local_roots().iter() {
35 let sr = db.source_root(root);
36 for file_id in sr.walk() {
37 dbg!(db.file_relative_path(file_id));
38 let matches = find(&query.pattern, db.parse(file_id).tree().syntax());
39 if !matches.matches.is_empty() {
40 edits.push(SourceFileEdit { file_id, edit: replace(&matches, &query.template) });
41 }
42 }
43 }
44 Ok(edits)
45}
46
47#[derive(Debug)]
48struct SsrQuery {
49 pattern: SsrPattern,
50 template: SsrTemplate,
51}
52
53#[derive(Debug)]
54struct SsrPattern {
55 pattern: SyntaxNode,
56 vars: Vec<Var>,
57}
58
59/// represents an `$var` in an SSR query
60#[derive(Debug, Clone, PartialEq, Eq, Hash)]
61struct Var(String);
62
63#[derive(Debug)]
64struct SsrTemplate {
65 template: SyntaxNode,
66 placeholders: FxHashMap<SyntaxNode, Var>,
67}
68
69type Binding = HashMap<Var, SyntaxNode>;
70
71#[derive(Debug)]
72struct Match {
73 place: SyntaxNode,
74 binding: Binding,
75}
76
77#[derive(Debug)]
78struct SsrMatches {
79 matches: Vec<Match>,
80}
81
82impl FromStr for SsrQuery {
83 type Err = SsrError;
84
85 fn from_str(query: &str) -> Result<SsrQuery, SsrError> {
86 let mut it = query.split("==>>");
87 let pattern = it.next().expect("at least empty string").trim();
88 let mut template =
89 it.next().ok_or(SsrError("Cannot find delemiter `==>>`".into()))?.trim().to_string();
90 if it.next().is_some() {
91 return Err(SsrError("More than one delimiter found".into()));
92 }
93 let mut vars = vec![];
94 let mut it = pattern.split('$');
95 let mut pattern = it.next().expect("something").to_string();
96
97 for part in it.map(split_by_var) {
98 let (var, var_type, remainder) = part?;
99 is_expr(var_type)?;
100 let new_var = create_name(var, &mut vars)?;
101 pattern.push_str(new_var);
102 pattern.push_str(remainder);
103 template = replace_in_template(template, var, new_var);
104 }
105
106 let template = expr_from_text(&template).syntax().clone();
107 let mut placeholders = FxHashMap::default();
108
109 traverse(&template, &mut |n| {
110 if let Some(v) = vars.iter().find(|v| v.0.as_str() == n.text()) {
111 placeholders.insert(n.clone(), v.clone());
112 false
113 } else {
114 true
115 }
116 });
117
118 let pattern = SsrPattern { pattern: expr_from_text(&pattern).syntax().clone(), vars };
119 let template = SsrTemplate { template, placeholders };
120 Ok(SsrQuery { pattern, template })
121 }
122}
123
124fn traverse(node: &SyntaxNode, go: &mut impl FnMut(&SyntaxNode) -> bool) {
125 if !go(node) {
126 return;
127 }
128 for ref child in node.children() {
129 traverse(child, go);
130 }
131}
132
133fn split_by_var(s: &str) -> Result<(&str, &str, &str), SsrError> {
134 let end_of_name = s.find(":").ok_or(SsrError("Use $<name>:expr".into()))?;
135 let name = &s[0..end_of_name];
136 is_name(name)?;
137 let type_begin = end_of_name + 1;
138 let type_length = s[type_begin..].find(|c| !char::is_ascii_alphanumeric(&c)).unwrap_or(s.len());
139 let type_name = &s[type_begin..type_begin + type_length];
140 Ok((name, type_name, &s[type_begin + type_length..]))
141}
142
143fn is_name(s: &str) -> Result<(), SsrError> {
144 if s.chars().all(|c| c.is_ascii_alphanumeric() || c == '_') {
145 Ok(())
146 } else {
147 Err(SsrError("Name can contain only alphanumerics and _".into()))
148 }
149}
150
151fn is_expr(s: &str) -> Result<(), SsrError> {
152 if s == "expr" {
153 Ok(())
154 } else {
155 Err(SsrError("Only $<name>:expr is supported".into()))
156 }
157}
158
159fn replace_in_template(template: String, var: &str, new_var: &str) -> String {
160 let name = format!("${}", var);
161 template.replace(&name, new_var)
162}
163
164fn create_name<'a>(name: &str, vars: &'a mut Vec<Var>) -> Result<&'a str, SsrError> {
165 let sanitized_name = format!("__search_pattern_{}", name);
166 if vars.iter().any(|a| a.0 == sanitized_name) {
167 return Err(SsrError(format!("Name `{}` repeats more than once", name)));
168 }
169 vars.push(Var(sanitized_name));
170 Ok(&vars.last().unwrap().0)
171}
172
173fn find(pattern: &SsrPattern, code: &SyntaxNode) -> SsrMatches {
174 fn check(
175 pattern: &SyntaxElement,
176 code: &SyntaxElement,
177 placeholders: &[Var],
178 match_: &mut Match,
179 ) -> bool {
180 match (pattern, code) {
181 (SyntaxElement::Token(ref pattern), SyntaxElement::Token(ref code)) => {
182 pattern.text() == code.text()
183 }
184 (SyntaxElement::Node(ref pattern), SyntaxElement::Node(ref code)) => {
185 if placeholders.iter().find(|&n| n.0.as_str() == pattern.text()).is_some() {
186 match_.binding.insert(Var(pattern.text().to_string()), code.clone());
187 true
188 } else {
189 pattern.green().children().count() == code.green().children().count()
190 && pattern
191 .children_with_tokens()
192 .zip(code.children_with_tokens())
193 .all(|(a, b)| check(&a, &b, placeholders, match_))
194 }
195 }
196 _ => false,
197 }
198 }
199 let kind = pattern.pattern.kind();
200 let matches = code
201 .descendants_with_tokens()
202 .filter(|n| n.kind() == kind)
203 .filter_map(|code| {
204 let mut match_ =
205 Match { place: code.as_node().unwrap().clone(), binding: HashMap::new() };
206 if check(
207 &SyntaxElement::from(pattern.pattern.clone()),
208 &code,
209 &pattern.vars,
210 &mut match_,
211 ) {
212 Some(match_)
213 } else {
214 None
215 }
216 })
217 .collect();
218 SsrMatches { matches }
219}
220
221fn replace(matches: &SsrMatches, template: &SsrTemplate) -> TextEdit {
222 let mut builder = TextEditBuilder::default();
223 for match_ in &matches.matches {
224 builder.replace(match_.place.text_range(), render_replace(&match_.binding, template));
225 }
226 builder.finish()
227}
228
229fn render_replace(binding: &Binding, template: &SsrTemplate) -> String {
230 let mut builder = TextEditBuilder::default();
231 for element in template.template.descendants() {
232 if let Some(var) = template.placeholders.get(&element) {
233 builder.replace(element.text_range(), binding[var].to_string())
234 }
235 }
236 builder.finish().apply(&template.template.text().to_string())
237}
238
239#[cfg(test)]
240mod tests {
241 use super::*;
242 use ra_syntax::SourceFile;
243
244 fn parse_error_text(query: &str) -> String {
245 format!("{}", query.parse::<SsrQuery>().unwrap_err())
246 }
247
248 #[test]
249 fn parser_happy_case() {
250 let result: SsrQuery = "foo($a:expr, $b:expr) ==>> bar($b, $a)".parse().unwrap();
251 assert_eq!(&result.pattern.pattern.text(), "foo(__search_pattern_a, __search_pattern_b)");
252 assert_eq!(result.pattern.vars.len(), 2);
253 assert_eq!(result.pattern.vars[0].0, "__search_pattern_a");
254 assert_eq!(result.pattern.vars[1].0, "__search_pattern_b");
255 assert_eq!(&result.template.template.text(), "bar(__search_pattern_b, __search_pattern_a)");
256 dbg!(result.template.placeholders);
257 }
258
259 #[test]
260 fn parser_empty_query() {
261 assert_eq!(parse_error_text(""), "Parse error: Cannot find delemiter `==>>`");
262 }
263
264 #[test]
265 fn parser_no_delimiter() {
266 assert_eq!(parse_error_text("foo()"), "Parse error: Cannot find delemiter `==>>`");
267 }
268
269 #[test]
270 fn parser_two_delimiters() {
271 assert_eq!(
272 parse_error_text("foo() ==>> a ==>> b "),
273 "Parse error: More than one delimiter found"
274 );
275 }
276
277 #[test]
278 fn parser_no_pattern_type() {
279 assert_eq!(parse_error_text("foo($a) ==>>"), "Parse error: Use $<name>:expr");
280 }
281
282 #[test]
283 fn parser_invalid_name() {
284 assert_eq!(
285 parse_error_text("foo($a+:expr) ==>>"),
286 "Parse error: Name can contain only alphanumerics and _"
287 );
288 }
289
290 #[test]
291 fn parser_invalid_type() {
292 assert_eq!(
293 parse_error_text("foo($a:ident) ==>>"),
294 "Parse error: Only $<name>:expr is supported"
295 );
296 }
297
298 #[test]
299 fn parser_repeated_name() {
300 assert_eq!(
301 parse_error_text("foo($a:expr, $a:expr) ==>>"),
302 "Parse error: Name `a` repeats more than once"
303 );
304 }
305
306 #[test]
307 fn parse_match_replace() {
308 let query: SsrQuery = "foo($x:expr) ==>> bar($x)".parse().unwrap();
309 let input = "fn main() { foo(1+2); }";
310
311 let code = SourceFile::parse(input).tree();
312 let matches = find(&query.pattern, code.syntax());
313 assert_eq!(matches.matches.len(), 1);
314 assert_eq!(matches.matches[0].place.text(), "foo(1+2)");
315 assert_eq!(matches.matches[0].binding.len(), 1);
316 assert_eq!(
317 matches.matches[0].binding[&Var("__search_pattern_x".to_string())].text(),
318 "1+2"
319 );
320
321 let edit = replace(&matches, &query.template);
322 assert_eq!(edit.apply(input), "fn main() { bar(1+2); }");
323 }
324}
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs
index 174e13595..20c414ca1 100644
--- a/crates/ra_ide/src/syntax_highlighting.rs
+++ b/crates/ra_ide/src/syntax_highlighting.rs
@@ -365,6 +365,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
365.literal { color: #BFEBBF; } 365.literal { color: #BFEBBF; }
366.literal\\.numeric { color: #6A8759; } 366.literal\\.numeric { color: #6A8759; }
367.macro { color: #94BFF3; } 367.macro { color: #94BFF3; }
368.module { color: #AFD8AF; }
368.variable { color: #DCDCCC; } 369.variable { color: #DCDCCC; }
369.variable\\.mut { color: #DCDCCC; text-decoration: underline; } 370.variable\\.mut { color: #DCDCCC; text-decoration: underline; }
370 371
diff --git a/crates/ra_ide_db/Cargo.toml b/crates/ra_ide_db/Cargo.toml
index 716e88bc1..495fffb5a 100644
--- a/crates/ra_ide_db/Cargo.toml
+++ b/crates/ra_ide_db/Cargo.toml
@@ -22,7 +22,6 @@ fst = { version = "0.3.1", default-features = false }
22rustc-hash = "1.0" 22rustc-hash = "1.0"
23unicase = "2.2.0" 23unicase = "2.2.0"
24superslice = "1.0.0" 24superslice = "1.0.0"
25rand = { version = "0.7.0", features = ["small_rng"] }
26once_cell = "1.2.0" 25once_cell = "1.2.0"
27 26
28ra_syntax = { path = "../ra_syntax" } 27ra_syntax = { path = "../ra_syntax" }
diff --git a/crates/ra_lsp_server/src/cargo_target_spec.rs b/crates/ra_lsp_server/src/cargo_target_spec.rs
index 594caffe2..5fd1e7b6b 100644
--- a/crates/ra_lsp_server/src/cargo_target_spec.rs
+++ b/crates/ra_lsp_server/src/cargo_target_spec.rs
@@ -1,6 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3use ra_ide::{FileId, RunnableKind}; 3use ra_ide::{FileId, RunnableKind, TestId};
4use ra_project_model::{self, ProjectWorkspace, TargetKind}; 4use ra_project_model::{self, ProjectWorkspace, TargetKind};
5 5
6use crate::{world::WorldSnapshot, Result}; 6use crate::{world::WorldSnapshot, Result};
@@ -13,13 +13,16 @@ pub(crate) fn runnable_args(
13 let spec = CargoTargetSpec::for_file(world, file_id)?; 13 let spec = CargoTargetSpec::for_file(world, file_id)?;
14 let mut res = Vec::new(); 14 let mut res = Vec::new();
15 match kind { 15 match kind {
16 RunnableKind::Test { name } => { 16 RunnableKind::Test { test_id } => {
17 res.push("test".to_string()); 17 res.push("test".to_string());
18 if let Some(spec) = spec { 18 if let Some(spec) = spec {
19 spec.push_to(&mut res); 19 spec.push_to(&mut res);
20 } 20 }
21 res.push("--".to_string()); 21 res.push("--".to_string());
22 res.push(name.to_string()); 22 res.push(test_id.to_string());
23 if let TestId::Path(_) = test_id {
24 res.push("--exact".to_string());
25 }
23 res.push("--nocapture".to_string()); 26 res.push("--nocapture".to_string());
24 } 27 }
25 RunnableKind::TestMod { path } => { 28 RunnableKind::TestMod { path } => {
@@ -31,13 +34,16 @@ pub(crate) fn runnable_args(
31 res.push(path.to_string()); 34 res.push(path.to_string());
32 res.push("--nocapture".to_string()); 35 res.push("--nocapture".to_string());
33 } 36 }
34 RunnableKind::Bench { name } => { 37 RunnableKind::Bench { test_id } => {
35 res.push("bench".to_string()); 38 res.push("bench".to_string());
36 if let Some(spec) = spec { 39 if let Some(spec) = spec {
37 spec.push_to(&mut res); 40 spec.push_to(&mut res);
38 } 41 }
39 res.push("--".to_string()); 42 res.push("--".to_string());
40 res.push(name.to_string()); 43 res.push(test_id.to_string());
44 if let TestId::Path(_) = test_id {
45 res.push("--exact".to_string());
46 }
41 res.push("--nocapture".to_string()); 47 res.push("--nocapture".to_string());
42 } 48 }
43 RunnableKind::Bin => { 49 RunnableKind::Bin => {
diff --git a/crates/ra_lsp_server/src/config.rs b/crates/ra_lsp_server/src/config.rs
index 2d7948d74..3314269ec 100644
--- a/crates/ra_lsp_server/src/config.rs
+++ b/crates/ra_lsp_server/src/config.rs
@@ -44,6 +44,8 @@ pub struct ServerConfig {
44 /// Fine grained feature flags to disable specific features. 44 /// Fine grained feature flags to disable specific features.
45 pub feature_flags: FxHashMap<String, bool>, 45 pub feature_flags: FxHashMap<String, bool>,
46 46
47 pub rustfmt_args: Vec<String>,
48
47 /// Cargo feature configurations. 49 /// Cargo feature configurations.
48 pub cargo_features: CargoFeatures, 50 pub cargo_features: CargoFeatures,
49} 51}
@@ -63,6 +65,7 @@ impl Default for ServerConfig {
63 with_sysroot: true, 65 with_sysroot: true,
64 feature_flags: FxHashMap::default(), 66 feature_flags: FxHashMap::default(),
65 cargo_features: Default::default(), 67 cargo_features: Default::default(),
68 rustfmt_args: Vec::new(),
66 } 69 }
67 } 70 }
68} 71}
diff --git a/crates/ra_lsp_server/src/main.rs b/crates/ra_lsp_server/src/main.rs
index c8a017c5c..ed2eaabd4 100644
--- a/crates/ra_lsp_server/src/main.rs
+++ b/crates/ra_lsp_server/src/main.rs
@@ -15,13 +15,8 @@ fn main() -> Result<()> {
15 15
16fn setup_logging() -> Result<()> { 16fn setup_logging() -> Result<()> {
17 std::env::set_var("RUST_BACKTRACE", "short"); 17 std::env::set_var("RUST_BACKTRACE", "short");
18
19 env_logger::try_init()?; 18 env_logger::try_init()?;
20 19 ra_prof::init();
21 ra_prof::set_filter(match std::env::var("RA_PROFILE") {
22 Ok(spec) => ra_prof::Filter::from_spec(&spec),
23 Err(_) => ra_prof::Filter::disabled(),
24 });
25 Ok(()) 20 Ok(())
26} 21}
27 22
diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs
index ceff82fda..7ae2e1e6f 100644
--- a/crates/ra_lsp_server/src/main_loop.rs
+++ b/crates/ra_lsp_server/src/main_loop.rs
@@ -178,6 +178,7 @@ pub fn main_loop(
178 command: config.cargo_watch_command, 178 command: config.cargo_watch_command,
179 all_targets: config.cargo_watch_all_targets, 179 all_targets: config.cargo_watch_all_targets,
180 }, 180 },
181 rustfmt_args: config.rustfmt_args,
181 } 182 }
182 }; 183 };
183 184
@@ -526,6 +527,7 @@ fn on_request(
526 .on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)? 527 .on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)?
527 .on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)? 528 .on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)?
528 .on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)? 529 .on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)?
530 .on::<req::Ssr>(handlers::handle_ssr)?
529 .finish(); 531 .finish();
530 Ok(()) 532 Ok(())
531} 533}
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs
index 2e598fdcd..ae51141cb 100644
--- a/crates/ra_lsp_server/src/main_loop/handlers.rs
+++ b/crates/ra_lsp_server/src/main_loop/handlers.rs
@@ -590,6 +590,7 @@ pub fn handle_formatting(
590 let end_position = TextUnit::of_str(&file).conv_with(&file_line_index); 590 let end_position = TextUnit::of_str(&file).conv_with(&file_line_index);
591 591
592 let mut rustfmt = process::Command::new("rustfmt"); 592 let mut rustfmt = process::Command::new("rustfmt");
593 rustfmt.args(&world.options.rustfmt_args);
593 if let Some(&crate_id) = crate_ids.first() { 594 if let Some(&crate_id) = crate_ids.first() {
594 // Assume all crates are in the same edition 595 // Assume all crates are in the same edition
595 let edition = world.analysis().crate_edition(crate_id)?; 596 let edition = world.analysis().crate_edition(crate_id)?;
@@ -881,6 +882,11 @@ pub fn handle_document_highlight(
881 )) 882 ))
882} 883}
883 884
885pub fn handle_ssr(world: WorldSnapshot, params: req::SsrParams) -> Result<req::SourceChange> {
886 let _p = profile("handle_ssr");
887 world.analysis().structural_search_replace(&params.arg)??.try_conv_with(&world)
888}
889
884pub fn publish_diagnostics(world: &WorldSnapshot, file_id: FileId) -> Result<DiagnosticTask> { 890pub fn publish_diagnostics(world: &WorldSnapshot, file_id: FileId) -> Result<DiagnosticTask> {
885 let _p = profile("publish_diagnostics"); 891 let _p = profile("publish_diagnostics");
886 let line_index = world.analysis().file_line_index(file_id)?; 892 let line_index = world.analysis().file_line_index(file_id)?;
@@ -918,9 +924,9 @@ fn to_lsp_runnable(
918 let args = runnable_args(world, file_id, &runnable.kind)?; 924 let args = runnable_args(world, file_id, &runnable.kind)?;
919 let line_index = world.analysis().file_line_index(file_id)?; 925 let line_index = world.analysis().file_line_index(file_id)?;
920 let label = match &runnable.kind { 926 let label = match &runnable.kind {
921 RunnableKind::Test { name } => format!("test {}", name), 927 RunnableKind::Test { test_id } => format!("test {}", test_id),
922 RunnableKind::TestMod { path } => format!("test-mod {}", path), 928 RunnableKind::TestMod { path } => format!("test-mod {}", path),
923 RunnableKind::Bench { name } => format!("bench {}", name), 929 RunnableKind::Bench { test_id } => format!("bench {}", test_id),
924 RunnableKind::Bin => "run binary".to_string(), 930 RunnableKind::Bin => "run binary".to_string(),
925 }; 931 };
926 Ok(req::Runnable { 932 Ok(req::Runnable {
diff --git a/crates/ra_lsp_server/src/req.rs b/crates/ra_lsp_server/src/req.rs
index dc327f53d..7ff7f60b3 100644
--- a/crates/ra_lsp_server/src/req.rs
+++ b/crates/ra_lsp_server/src/req.rs
@@ -206,3 +206,16 @@ pub struct InlayHint {
206 pub kind: InlayKind, 206 pub kind: InlayKind,
207 pub label: String, 207 pub label: String,
208} 208}
209
210pub enum Ssr {}
211
212impl Request for Ssr {
213 type Params = SsrParams;
214 type Result = SourceChange;
215 const METHOD: &'static str = "rust-analyzer/ssr";
216}
217
218#[derive(Debug, Deserialize, Serialize)]
219pub struct SsrParams {
220 pub arg: String,
221}
diff --git a/crates/ra_lsp_server/src/world.rs b/crates/ra_lsp_server/src/world.rs
index 1ee02b47c..d993c5fc4 100644
--- a/crates/ra_lsp_server/src/world.rs
+++ b/crates/ra_lsp_server/src/world.rs
@@ -34,6 +34,7 @@ pub struct Options {
34 pub supports_location_link: bool, 34 pub supports_location_link: bool,
35 pub line_folding_only: bool, 35 pub line_folding_only: bool,
36 pub max_inlay_hint_length: Option<usize>, 36 pub max_inlay_hint_length: Option<usize>,
37 pub rustfmt_args: Vec<String>,
37 pub cargo_watch: CheckOptions, 38 pub cargo_watch: CheckOptions,
38} 39}
39 40
diff --git a/crates/ra_lsp_server/tests/heavy_tests/main.rs b/crates/ra_lsp_server/tests/heavy_tests/main.rs
index dff63a12d..9ca31cbcc 100644
--- a/crates/ra_lsp_server/tests/heavy_tests/main.rs
+++ b/crates/ra_lsp_server/tests/heavy_tests/main.rs
@@ -147,7 +147,7 @@ fn main() {}
147 }, 147 },
148 json!([ 148 json!([
149 { 149 {
150 "args": [ "test", "--package", "foo", "--test", "spam", "--", "test_eggs", "--nocapture" ], 150 "args": [ "test", "--package", "foo", "--test", "spam", "--", "test_eggs", "--exact", "--nocapture" ],
151 "bin": "cargo", 151 "bin": "cargo",
152 "env": { "RUST_BACKTRACE": "short" }, 152 "env": { "RUST_BACKTRACE": "short" },
153 "label": "test test_eggs", 153 "label": "test test_eggs",
diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs
index d38ff397e..c0bfbc2ee 100644
--- a/crates/ra_prof/src/lib.rs
+++ b/crates/ra_prof/src/lib.rs
@@ -26,6 +26,13 @@ pub use crate::memory_usage::{Bytes, MemoryUsage};
26#[global_allocator] 26#[global_allocator]
27static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; 27static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
28 28
29pub fn init() {
30 set_filter(match std::env::var("RA_PROFILE") {
31 Ok(spec) => Filter::from_spec(&spec),
32 Err(_) => Filter::disabled(),
33 });
34}
35
29/// Set profiling filter. It specifies descriptions allowed to profile. 36/// Set profiling filter. It specifies descriptions allowed to profile.
30/// This is helpful when call stack has too many nested profiling scopes. 37/// This is helpful when call stack has too many nested profiling scopes.
31/// Additionally filter can specify maximum depth of profiling scopes nesting. 38/// Additionally filter can specify maximum depth of profiling scopes nesting.
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs
index 862eb1172..89d1403e7 100644
--- a/crates/ra_syntax/src/ast/make.rs
+++ b/crates/ra_syntax/src/ast/make.rs
@@ -84,7 +84,7 @@ pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr {
84 let token = token(op); 84 let token = token(op);
85 expr_from_text(&format!("{}{}", token, expr.syntax())) 85 expr_from_text(&format!("{}{}", token, expr.syntax()))
86} 86}
87fn expr_from_text(text: &str) -> ast::Expr { 87pub fn expr_from_text(text: &str) -> ast::Expr {
88 ast_from_text(&format!("const C: () = {};", text)) 88 ast_from_text(&format!("const C: () = {};", text))
89} 89}
90 90
diff --git a/docs/dev/debugging.md b/docs/dev/debugging.md
index 1ccf4dca2..e6b082156 100644
--- a/docs/dev/debugging.md
+++ b/docs/dev/debugging.md
@@ -1,44 +1,66 @@
1# Debugging vs Code plugin and the Language Server 1# Debugging VSCode plugin and the language server
2 2
3**NOTE:** the information here is mostly obsolete 3## Prerequisites
4 4
5Install [LLDB](https://lldb.llvm.org/) and the [LLDB Extension](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb). 5- Install [LLDB](https://lldb.llvm.org/) and the [LLDB Extension](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb).
6- Open the root folder in VSCode. Here you can access the preconfigured debug setups.
6 7
7Checkout rust rust-analyzer and open it in vscode. 8 <img height=150px src="https://user-images.githubusercontent.com/36276403/74611090-92ec5380-5101-11ea-8a41-598f51f3f3e3.png" alt="Debug options view">
9
10- Install all TypeScript dependencies
11 ```bash
12 cd editors/code
13 npm install
14 ```
15
16## Common knowledge
17
18* All debug configurations open a new `[Extension Development Host]` VSCode instance
19where **only** the `rust-analyzer` extension being debugged is enabled.
20* To activate the extension you need to open any Rust project folder in `[Extension Development Host]`.
8 21
9```
10$ git clone https://github.com/rust-analyzer/rust-analyzer.git --depth 1
11$ cd rust-analyzer
12$ code .
13```
14 22
15- To attach to the `lsp server` in linux you'll have to run: 23## Debug TypeScript VSCode extension
16 24
17 `echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope` 25- `Run Extension` - runs the extension with the globally installed `ra_lsp_server` binary.
26- `Run Extension (Dev Server)` - runs extension with the locally built LSP server (`target/debug/ra_lsp_server`).
27
28TypeScript debugging is configured to watch your source edits and recompile.
29To apply changes to an already running debug process press <kbd>Ctrl+Shift+P</kbd> and run the following command in your `[Extension Development Host]`
30
31```
32> Developer: Reload Window
33```
18 34
19 This enables ptrace on non forked processes 35## Debug Rust LSP server
20 36
21- Ensure the dependencies for the extension are installed, run the `npm: install - editors/code` task in vscode. 37- When attaching a debugger to an already running `rust-analyzer` server on Linux you might need to enable `ptrace` for unrelated processes by running:
22 38
23- Launch the `Debug Extension`, this will build the extension and the `lsp server`. 39 ```
40 echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope
41 ```
24 42
25- A new instance of vscode with `[Extension Development Host]` in the title.
26 43
27 Don't worry about disabling `rls` all other extensions will be disabled but this one. 44- By default, the LSP server is built without debug information. To enable it, you'll need to change `Cargo.toml`:
45 ```toml
46 [profile.dev]
47 debug = 2
48 ```
28 49
29- In the new vscode instance open a rust project, and navigate to a rust file 50- Select `Run Extension (Dev Server)` to run your locally built `target/debug/ra_lsp_server`.
30 51
31- In the original vscode start an additional debug session (the three periods in the launch) and select `Debug Lsp Server`. 52- In the original VSCode window once again select the `Attach To Server` debug configuration.
32 53
33- A list of running processes should appear select the `ra_lsp_server` from this repo. 54- A list of running processes should appear. Select the `ra_lsp_server` from this repo.
34 55
35- Navigate to `crates/ra_lsp_server/src/main_loop.rs` and add a breakpoint to the `on_task` function. 56- Navigate to `crates/ra_lsp_server/src/main_loop.rs` and add a breakpoint to the `on_task` function.
36 57
37- Go back to the `[Extension Development Host]` instance and hover over a rust variable and your breakpoint should hit. 58- Go back to the `[Extension Development Host]` instance and hover over a Rust variable and your breakpoint should hit.
38 59
39## Demo 60## Demo
40 61
41![demonstration of debugging](https://user-images.githubusercontent.com/1711539/51384036-254fab80-1b2c-11e9-824d-95f9a6e9cf4f.gif) 62- [Debugging TypeScript VScode extension](https://www.youtube.com/watch?v=T-hvpK6s4wM).
63- [Debugging Rust LSP server](https://www.youtube.com/watch?v=EaNb5rg4E0M).
42 64
43## Troubleshooting 65## Troubleshooting
44 66
diff --git a/docs/user/readme.adoc b/docs/user/readme.adoc
index 867aae975..57a8cbe31 100644
--- a/docs/user/readme.adoc
+++ b/docs/user/readme.adoc
@@ -19,6 +19,13 @@ https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/readme.adoc
19In theory, one should be able to just install the server binary and have it automatically work with any editor. 19In theory, one should be able to just install the server binary and have it automatically work with any editor.
20We are not there yet, so some editor specific setup is required. 20We are not there yet, so some editor specific setup is required.
21 21
22Additionally, rust-analyzer needs sources of the standard library.
23This commands adds them:
24
25```bash
26$ rustup component add rust-src
27```
28
22=== VS Code 29=== VS Code
23 30
24This the best supported editor at the moment. 31This the best supported editor at the moment.
@@ -27,8 +34,9 @@ https://github.com/rust-analyzer/rust-analyzer/tree/master/editors/code[in tree]
27 34
28You can install the latest release of the plugin from 35You can install the latest release of the plugin from
29https://marketplace.visualstudio.com/items?itemName=matklad.rust-analyzer[the marketplace]. 36https://marketplace.visualstudio.com/items?itemName=matklad.rust-analyzer[the marketplace].
30By default, the plugin will download the latest version of the server as well. 37By default, the plugin will download the matching version of the server as well.
31 38
39// FIXME: update the image (its text has changed)
32image::https://user-images.githubusercontent.com/36276403/74103174-a40df100-4b52-11ea-81f4-372c70797924.png[] 40image::https://user-images.githubusercontent.com/36276403/74103174-a40df100-4b52-11ea-81f4-372c70797924.png[]
33 41
34The server binary is stored in `~/.config/Code/User/globalStorage/matklad.rust-analyzer`. 42The server binary is stored in `~/.config/Code/User/globalStorage/matklad.rust-analyzer`.
@@ -37,9 +45,7 @@ Note that we only support the latest version of VS Code.
37 45
38==== Updates 46==== Updates
39 47
40The extension will be updated automatically as new versions become available. 48The extension will be updated automatically as new versions become available. It will ask your permission to download the matching language server version binary if needed.
41The server update functionality is in progress.
42For the time being, the workaround is to remove the binary from `globalStorage` and to restart the extension.
43 49
44==== Building From Source 50==== Building From Source
45 51
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index c74078735..22aa63c9d 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -1,6 +1,6 @@
1{ 1{
2 "name": "rust-analyzer", 2 "name": "rust-analyzer",
3 "version": "0.2.0-dev", 3 "version": "0.2.20200211-dev",
4 "lockfileVersion": 1, 4 "lockfileVersion": 1,
5 "requires": true, 5 "requires": true,
6 "dependencies": { 6 "dependencies": {
@@ -77,9 +77,9 @@
77 "dev": true 77 "dev": true
78 }, 78 },
79 "@types/node": { 79 "@types/node": {
80 "version": "12.12.25", 80 "version": "12.12.27",
81 "resolved": "https://registry.npmjs.org/@types/node/-/node-12.12.25.tgz", 81 "resolved": "https://registry.npmjs.org/@types/node/-/node-12.12.27.tgz",
82 "integrity": "sha512-nf1LMGZvgFX186geVZR1xMZKKblJiRfiASTHw85zED2kI1yDKHDwTKMdkaCbTlXoRKlGKaDfYywt+V0As30q3w==", 82 "integrity": "sha512-odQFl/+B9idbdS0e8IxDl2ia/LP8KZLXhV3BUeI98TrZp0uoIzQPhGd+5EtzHmT0SMOIaPd7jfz6pOHLWTtl7A==",
83 "dev": true 83 "dev": true
84 }, 84 },
85 "@types/node-fetch": { 85 "@types/node-fetch": {
@@ -682,9 +682,9 @@
682 } 682 }
683 }, 683 },
684 "rollup": { 684 "rollup": {
685 "version": "1.31.0", 685 "version": "1.31.1",
686 "resolved": "https://registry.npmjs.org/rollup/-/rollup-1.31.0.tgz", 686 "resolved": "https://registry.npmjs.org/rollup/-/rollup-1.31.1.tgz",
687 "integrity": "sha512-9C6ovSyNeEwvuRuUUmsTpJcXac1AwSL1a3x+O5lpmQKZqi5mmrjauLeqIjvREC+yNRR8fPdzByojDng+af3nVw==", 687 "integrity": "sha512-2JREN1YdrS/kpPzEd33ZjtuNbOuBC3ePfuZBdKEybvqcEcszW1ckyVqzcEiEe0nE8sqHK+pbJg+PsAgRJ8+1dg==",
688 "dev": true, 688 "dev": true,
689 "requires": { 689 "requires": {
690 "@types/estree": "*", 690 "@types/estree": "*",
diff --git a/editors/code/package.json b/editors/code/package.json
index db1fe5189..8f24a13f5 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -6,7 +6,7 @@
6 "private": true, 6 "private": true,
7 "icon": "icon.png", 7 "icon": "icon.png",
8 "//": "The real version is in release.yaml, this one just needs to be bigger", 8 "//": "The real version is in release.yaml, this one just needs to be bigger",
9 "version": "0.2.0-dev", 9 "version": "0.2.20200211-dev",
10 "publisher": "matklad", 10 "publisher": "matklad",
11 "repository": { 11 "repository": {
12 "url": "https://github.com/rust-analyzer/rust-analyzer.git", 12 "url": "https://github.com/rust-analyzer/rust-analyzer.git",
@@ -33,11 +33,11 @@
33 "devDependencies": { 33 "devDependencies": {
34 "@rollup/plugin-commonjs": "^11.0.2", 34 "@rollup/plugin-commonjs": "^11.0.2",
35 "@rollup/plugin-node-resolve": "^7.1.1", 35 "@rollup/plugin-node-resolve": "^7.1.1",
36 "@types/node": "^12.12.25", 36 "@types/node": "^12.12.27",
37 "@types/node-fetch": "^2.5.4", 37 "@types/node-fetch": "^2.5.4",
38 "@types/throttle-debounce": "^2.1.0", 38 "@types/throttle-debounce": "^2.1.0",
39 "@types/vscode": "^1.42.0", 39 "@types/vscode": "^1.42.0",
40 "rollup": "^1.31.0", 40 "rollup": "^1.31.1",
41 "tslib": "^1.10.0", 41 "tslib": "^1.10.0",
42 "tslint": "^5.20.1", 42 "tslint": "^5.20.1",
43 "typescript": "^3.7.5", 43 "typescript": "^3.7.5",
@@ -124,6 +124,11 @@
124 "command": "rust-analyzer.onEnter", 124 "command": "rust-analyzer.onEnter",
125 "title": "Enhanced enter key", 125 "title": "Enhanced enter key",
126 "category": "Rust Analyzer" 126 "category": "Rust Analyzer"
127 },
128 {
129 "command": "rust-analyzer.ssr",
130 "title": "Structural Search Replace",
131 "category": "Rust Analyzer"
127 } 132 }
128 ], 133 ],
129 "keybindings": [ 134 "keybindings": [
@@ -182,9 +187,20 @@
182 }, 187 },
183 "rust-analyzer.excludeGlobs": { 188 "rust-analyzer.excludeGlobs": {
184 "type": "array", 189 "type": "array",
190 "items": {
191 "type": "string"
192 },
185 "default": [], 193 "default": [],
186 "description": "Paths to exclude from analysis" 194 "description": "Paths to exclude from analysis"
187 }, 195 },
196 "rust-analyzer.rustfmtArgs": {
197 "type": "array",
198 "items": {
199 "type": "string"
200 },
201 "default": [],
202 "description": "Additional arguments to rustfmt"
203 },
188 "rust-analyzer.useClientWatching": { 204 "rust-analyzer.useClientWatching": {
189 "type": "boolean", 205 "type": "boolean",
190 "default": true, 206 "default": true,
@@ -197,6 +213,9 @@
197 }, 213 },
198 "rust-analyzer.cargo-watch.arguments": { 214 "rust-analyzer.cargo-watch.arguments": {
199 "type": "array", 215 "type": "array",
216 "items": {
217 "type": "string"
218 },
200 "description": "`cargo-watch` arguments. (e.g: `--features=\"shumway,pdf\"` will run as `cargo watch -x \"check --features=\"shumway,pdf\"\"` )", 219 "description": "`cargo-watch` arguments. (e.g: `--features=\"shumway,pdf\"` will run as `cargo watch -x \"check --features=\"shumway,pdf\"\"` )",
201 "default": [] 220 "default": []
202 }, 221 },
@@ -228,10 +247,12 @@
228 }, 247 },
229 "rust-analyzer.lruCapacity": { 248 "rust-analyzer.lruCapacity": {
230 "type": [ 249 "type": [
231 "number", 250 "null",
232 "null" 251 "integer"
233 ], 252 ],
234 "default": null, 253 "default": null,
254 "minimum": 0,
255 "exclusiveMinimum": true,
235 "description": "Number of syntax trees rust-analyzer keeps in memory" 256 "description": "Number of syntax trees rust-analyzer keeps in memory"
236 }, 257 },
237 "rust-analyzer.displayInlayHints": { 258 "rust-analyzer.displayInlayHints": {
@@ -240,8 +261,13 @@
240 "description": "Display additional type and parameter information in the editor" 261 "description": "Display additional type and parameter information in the editor"
241 }, 262 },
242 "rust-analyzer.maxInlayHintLength": { 263 "rust-analyzer.maxInlayHintLength": {
243 "type": "number", 264 "type": [
265 "null",
266 "integer"
267 ],
244 "default": 20, 268 "default": 20,
269 "minimum": 0,
270 "exclusiveMinimum": true,
245 "description": "Maximum length for inlay hints" 271 "description": "Maximum length for inlay hints"
246 }, 272 },
247 "rust-analyzer.cargoFeatures.noDefaultFeatures": { 273 "rust-analyzer.cargoFeatures.noDefaultFeatures": {
@@ -256,6 +282,9 @@
256 }, 282 },
257 "rust-analyzer.cargoFeatures.features": { 283 "rust-analyzer.cargoFeatures.features": {
258 "type": "array", 284 "type": "array",
285 "items": {
286 "type": "string"
287 },
259 "default": [], 288 "default": [],
260 "description": "List of features to activate" 289 "description": "List of features to activate"
261 } 290 }
diff --git a/editors/code/rollup.config.js b/editors/code/rollup.config.js
index f8d320f46..337385a24 100644
--- a/editors/code/rollup.config.js
+++ b/editors/code/rollup.config.js
@@ -18,6 +18,7 @@ export default {
18 external: [...nodeBuiltins, 'vscode'], 18 external: [...nodeBuiltins, 'vscode'],
19 output: { 19 output: {
20 file: './out/main.js', 20 file: './out/main.js',
21 format: 'cjs' 21 format: 'cjs',
22 exports: 'named'
22 } 23 }
23}; 24};
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index d2759969b..11894973c 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -1,45 +1,48 @@
1import * as lc from 'vscode-languageclient'; 1import * as lc from 'vscode-languageclient';
2import * as vscode from 'vscode';
2 3
3import { window, workspace } from 'vscode';
4import { Config } from './config'; 4import { Config } from './config';
5import { ensureLanguageServerBinary } from './installation/language_server'; 5import { ensureServerBinary } from './installation/server';
6import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed'; 6import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed';
7 7
8export async function createClient(config: Config): Promise<null | lc.LanguageClient> { 8export async function createClient(config: Config): Promise<null | lc.LanguageClient> {
9 // '.' Is the fallback if no folder is open 9 // '.' Is the fallback if no folder is open
10 // TODO?: Workspace folders support Uri's (eg: file://test.txt). 10 // TODO?: Workspace folders support Uri's (eg: file://test.txt).
11 // It might be a good idea to test if the uri points to a file. 11 // It might be a good idea to test if the uri points to a file.
12 const workspaceFolderPath = workspace.workspaceFolders?.[0]?.uri.fsPath ?? '.'; 12 const workspaceFolderPath = vscode.workspace.workspaceFolders?.[0]?.uri.fsPath ?? '.';
13 13
14 const raLspServerPath = await ensureLanguageServerBinary(config.langServerSource); 14 const serverPath = await ensureServerBinary(config.serverSource);
15 if (!raLspServerPath) return null; 15 if (!serverPath) return null;
16 16
17 const run: lc.Executable = { 17 const run: lc.Executable = {
18 command: raLspServerPath, 18 command: serverPath,
19 options: { cwd: workspaceFolderPath }, 19 options: { cwd: workspaceFolderPath },
20 }; 20 };
21 const serverOptions: lc.ServerOptions = { 21 const serverOptions: lc.ServerOptions = {
22 run, 22 run,
23 debug: run, 23 debug: run,
24 }; 24 };
25 const traceOutputChannel = window.createOutputChannel( 25 const traceOutputChannel = vscode.window.createOutputChannel(
26 'Rust Analyzer Language Server Trace', 26 'Rust Analyzer Language Server Trace',
27 ); 27 );
28 const cargoWatchOpts = config.cargoWatchOptions;
29
28 const clientOptions: lc.LanguageClientOptions = { 30 const clientOptions: lc.LanguageClientOptions = {
29 documentSelector: [{ scheme: 'file', language: 'rust' }], 31 documentSelector: [{ scheme: 'file', language: 'rust' }],
30 initializationOptions: { 32 initializationOptions: {
31 publishDecorations: true, 33 publishDecorations: true,
32 lruCapacity: config.lruCapacity, 34 lruCapacity: config.lruCapacity,
33 maxInlayHintLength: config.maxInlayHintLength, 35 maxInlayHintLength: config.maxInlayHintLength,
34 cargoWatchEnable: config.cargoWatchOptions.enable, 36 cargoWatchEnable: cargoWatchOpts.enable,
35 cargoWatchArgs: config.cargoWatchOptions.arguments, 37 cargoWatchArgs: cargoWatchOpts.arguments,
36 cargoWatchCommand: config.cargoWatchOptions.command, 38 cargoWatchCommand: cargoWatchOpts.command,
37 cargoWatchAllTargets: config.cargoWatchOptions.allTargets, 39 cargoWatchAllTargets: cargoWatchOpts.allTargets,
38 excludeGlobs: config.excludeGlobs, 40 excludeGlobs: config.excludeGlobs,
39 useClientWatching: config.useClientWatching, 41 useClientWatching: config.useClientWatching,
40 featureFlags: config.featureFlags, 42 featureFlags: config.featureFlags,
41 withSysroot: config.withSysroot, 43 withSysroot: config.withSysroot,
42 cargoFeatures: config.cargoFeatures, 44 cargoFeatures: config.cargoFeatures,
45 rustfmtArgs: config.rustfmtArgs,
43 }, 46 },
44 traceOutputChannel, 47 traceOutputChannel,
45 }; 48 };
diff --git a/editors/code/src/commands/index.ts b/editors/code/src/commands/index.ts
index aee969432..b5ebec117 100644
--- a/editors/code/src/commands/index.ts
+++ b/editors/code/src/commands/index.ts
@@ -12,6 +12,7 @@ export * from './parent_module';
12export * from './syntax_tree'; 12export * from './syntax_tree';
13export * from './expand_macro'; 13export * from './expand_macro';
14export * from './runnables'; 14export * from './runnables';
15export * from './ssr';
15 16
16export function collectGarbage(ctx: Ctx): Cmd { 17export function collectGarbage(ctx: Ctx): Cmd {
17 return async () => { 18 return async () => {
diff --git a/editors/code/src/commands/ssr.ts b/editors/code/src/commands/ssr.ts
new file mode 100644
index 000000000..6287bf47b
--- /dev/null
+++ b/editors/code/src/commands/ssr.ts
@@ -0,0 +1,36 @@
1import { Ctx, Cmd } from '../ctx';
2import { applySourceChange, SourceChange } from '../source_change';
3import * as vscode from 'vscode';
4
5export function ssr(ctx: Ctx): Cmd {
6 return async () => {
7 const client = ctx.client;
8 if (!client) return;
9
10 const options: vscode.InputBoxOptions = {
11 placeHolder: "foo($a:expr, $b:expr) ==>> bar($a, foo($b))",
12 prompt: "Enter request",
13 validateInput: (x: string) => {
14 if (x.includes('==>>')) {
15 return null;
16 }
17 return "Enter request: pattern ==>> template"
18 }
19 }
20 const request = await vscode.window.showInputBox(options);
21
22 if (!request) return;
23
24 const ssrRequest: SsrRequest = { arg: request };
25 const change = await client.sendRequest<SourceChange>(
26 'rust-analyzer/ssr',
27 ssrRequest,
28 );
29
30 await applySourceChange(ctx, change);
31 };
32}
33
34interface SsrRequest {
35 arg: string;
36}
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index 418845436..c3fa788c7 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -16,45 +16,61 @@ export interface CargoFeatures {
16 allFeatures: boolean; 16 allFeatures: boolean;
17 features: string[]; 17 features: string[];
18} 18}
19
20export class Config { 19export class Config {
21 langServerSource!: null | BinarySource; 20 private static readonly rootSection = "rust-analyzer";
21 private static readonly requiresReloadOpts = [
22 "cargoFeatures",
23 "cargo-watch",
24 ]
25 .map(opt => `${Config.rootSection}.${opt}`);
26
27 private static readonly extensionVersion: string = (() => {
28 const packageJsonVersion = vscode
29 .extensions
30 .getExtension("matklad.rust-analyzer")!
31 .packageJSON
32 .version as string; // n.n.YYYYMMDD
33
34 const realVersionRegexp = /^\d+\.\d+\.(\d{4})(\d{2})(\d{2})/;
35 const [, yyyy, mm, dd] = packageJsonVersion.match(realVersionRegexp)!;
36
37 return `${yyyy}-${mm}-${dd}`;
38 })();
39
40 private cfg!: vscode.WorkspaceConfiguration;
41
42 constructor(private readonly ctx: vscode.ExtensionContext) {
43 vscode.workspace.onDidChangeConfiguration(this.onConfigChange, this, ctx.subscriptions);
44 this.refreshConfig();
45 }
22 46
23 highlightingOn = true; 47 private refreshConfig() {
24 rainbowHighlightingOn = false; 48 this.cfg = vscode.workspace.getConfiguration(Config.rootSection);
25 enableEnhancedTyping = true; 49 console.log("Using configuration:", this.cfg);
26 lruCapacity: null | number = null; 50 }
27 displayInlayHints = true;
28 maxInlayHintLength: null | number = null;
29 excludeGlobs: string[] = [];
30 useClientWatching = true;
31 featureFlags: Record<string, boolean> = {};
32 // for internal use
33 withSysroot: null | boolean = null;
34 cargoWatchOptions: CargoWatchOptions = {
35 enable: true,
36 arguments: [],
37 command: '',
38 allTargets: true,
39 };
40 cargoFeatures: CargoFeatures = {
41 noDefaultFeatures: false,
42 allFeatures: true,
43 features: [],
44 };
45 51
46 private prevEnhancedTyping: null | boolean = null; 52 private async onConfigChange(event: vscode.ConfigurationChangeEvent) {
47 private prevCargoFeatures: null | CargoFeatures = null; 53 this.refreshConfig();
48 private prevCargoWatchOptions: null | CargoWatchOptions = null;
49 54
50 constructor(ctx: vscode.ExtensionContext) { 55 const requiresReloadOpt = Config.requiresReloadOpts.find(
51 vscode.workspace.onDidChangeConfiguration(_ => this.refresh(ctx), null, ctx.subscriptions); 56 opt => event.affectsConfiguration(opt)
52 this.refresh(ctx); 57 );
58
59 if (!requiresReloadOpt) return;
60
61 const userResponse = await vscode.window.showInformationMessage(
62 `Changing "${requiresReloadOpt}" requires a reload`,
63 "Reload now"
64 );
65
66 if (userResponse === "Reload now") {
67 vscode.commands.executeCommand("workbench.action.reloadWindow");
68 }
53 } 69 }
54 70
55 private static expandPathResolving(path: string) { 71 private static replaceTildeWithHomeDir(path: string) {
56 if (path.startsWith('~/')) { 72 if (path.startsWith("~/")) {
57 return path.replace('~', os.homedir()); 73 return os.homedir() + path.slice("~".length);
58 } 74 }
59 return path; 75 return path;
60 } 76 }
@@ -64,17 +80,14 @@ export class Config {
64 * `platform` on GitHub releases. (It is also stored under the same name when 80 * `platform` on GitHub releases. (It is also stored under the same name when
65 * downloaded by the extension). 81 * downloaded by the extension).
66 */ 82 */
67 private static prebuiltLangServerFileName( 83 get prebuiltServerFileName(): null | string {
68 platform: NodeJS.Platform,
69 arch: string
70 ): null | string {
71 // See possible `arch` values here: 84 // See possible `arch` values here:
72 // https://nodejs.org/api/process.html#process_process_arch 85 // https://nodejs.org/api/process.html#process_process_arch
73 86
74 switch (platform) { 87 switch (process.platform) {
75 88
76 case "linux": { 89 case "linux": {
77 switch (arch) { 90 switch (process.arch) {
78 case "arm": 91 case "arm":
79 case "arm64": return null; 92 case "arm64": return null;
80 93
@@ -97,29 +110,26 @@ export class Config {
97 } 110 }
98 } 111 }
99 112
100 private static langServerBinarySource( 113 get serverSource(): null | BinarySource {
101 ctx: vscode.ExtensionContext, 114 const serverPath = RA_LSP_DEBUG ?? this.cfg.get<null | string>("raLspServerPath");
102 config: vscode.WorkspaceConfiguration
103 ): null | BinarySource {
104 const langServerPath = RA_LSP_DEBUG ?? config.get<null | string>("raLspServerPath");
105 115
106 if (langServerPath) { 116 if (serverPath) {
107 return { 117 return {
108 type: BinarySource.Type.ExplicitPath, 118 type: BinarySource.Type.ExplicitPath,
109 path: Config.expandPathResolving(langServerPath) 119 path: Config.replaceTildeWithHomeDir(serverPath)
110 }; 120 };
111 } 121 }
112 122
113 const prebuiltBinaryName = Config.prebuiltLangServerFileName( 123 const prebuiltBinaryName = this.prebuiltServerFileName;
114 process.platform, process.arch
115 );
116 124
117 if (!prebuiltBinaryName) return null; 125 if (!prebuiltBinaryName) return null;
118 126
119 return { 127 return {
120 type: BinarySource.Type.GithubRelease, 128 type: BinarySource.Type.GithubRelease,
121 dir: ctx.globalStoragePath, 129 dir: this.ctx.globalStoragePath,
122 file: prebuiltBinaryName, 130 file: prebuiltBinaryName,
131 storage: this.ctx.globalState,
132 version: Config.extensionVersion,
123 repo: { 133 repo: {
124 name: "rust-analyzer", 134 name: "rust-analyzer",
125 owner: "rust-analyzer", 135 owner: "rust-analyzer",
@@ -127,158 +137,36 @@ export class Config {
127 }; 137 };
128 } 138 }
129 139
140 // We don't do runtime config validation here for simplicity. More on stackoverflow:
141 // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension
142
143 get highlightingOn() { return this.cfg.get("highlightingOn") as boolean; }
144 get rainbowHighlightingOn() { return this.cfg.get("rainbowHighlightingOn") as boolean; }
145 get lruCapacity() { return this.cfg.get("lruCapacity") as null | number; }
146 get displayInlayHints() { return this.cfg.get("displayInlayHints") as boolean; }
147 get maxInlayHintLength() { return this.cfg.get("maxInlayHintLength") as number; }
148 get excludeGlobs() { return this.cfg.get("excludeGlobs") as string[]; }
149 get useClientWatching() { return this.cfg.get("useClientWatching") as boolean; }
150 get featureFlags() { return this.cfg.get("featureFlags") as Record<string, boolean>; }
151 get rustfmtArgs() { return this.cfg.get("rustfmtArgs") as string[]; }
152
153 get cargoWatchOptions(): CargoWatchOptions {
154 return {
155 enable: this.cfg.get("cargo-watch.enable") as boolean,
156 arguments: this.cfg.get("cargo-watch.arguments") as string[],
157 allTargets: this.cfg.get("cargo-watch.allTargets") as boolean,
158 command: this.cfg.get("cargo-watch.command") as string,
159 };
160 }
130 161
131 // FIXME: revisit the logic for `if (.has(...)) config.get(...)` set default 162 get cargoFeatures(): CargoFeatures {
132 // values only in one place (i.e. remove default values from non-readonly members declarations) 163 return {
133 private refresh(ctx: vscode.ExtensionContext) { 164 noDefaultFeatures: this.cfg.get("cargoFeatures.noDefaultFeatures") as boolean,
134 const config = vscode.workspace.getConfiguration('rust-analyzer'); 165 allFeatures: this.cfg.get("cargoFeatures.allFeatures") as boolean,
135 166 features: this.cfg.get("cargoFeatures.features") as string[],
136 let requireReloadMessage = null; 167 };
137
138 if (config.has('highlightingOn')) {
139 this.highlightingOn = config.get('highlightingOn') as boolean;
140 }
141
142 if (config.has('rainbowHighlightingOn')) {
143 this.rainbowHighlightingOn = config.get(
144 'rainbowHighlightingOn',
145 ) as boolean;
146 }
147
148 if (config.has('enableEnhancedTyping')) {
149 this.enableEnhancedTyping = config.get(
150 'enableEnhancedTyping',
151 ) as boolean;
152
153 if (this.prevEnhancedTyping === null) {
154 this.prevEnhancedTyping = this.enableEnhancedTyping;
155 }
156 } else if (this.prevEnhancedTyping === null) {
157 this.prevEnhancedTyping = this.enableEnhancedTyping;
158 }
159
160 if (this.prevEnhancedTyping !== this.enableEnhancedTyping) {
161 requireReloadMessage =
162 'Changing enhanced typing setting requires a reload';
163 this.prevEnhancedTyping = this.enableEnhancedTyping;
164 }
165
166 this.langServerSource = Config.langServerBinarySource(ctx, config);
167
168 if (config.has('cargo-watch.enable')) {
169 this.cargoWatchOptions.enable = config.get<boolean>(
170 'cargo-watch.enable',
171 true,
172 );
173 }
174
175 if (config.has('cargo-watch.arguments')) {
176 this.cargoWatchOptions.arguments = config.get<string[]>(
177 'cargo-watch.arguments',
178 [],
179 );
180 }
181
182 if (config.has('cargo-watch.command')) {
183 this.cargoWatchOptions.command = config.get<string>(
184 'cargo-watch.command',
185 '',
186 );
187 }
188
189 if (config.has('cargo-watch.allTargets')) {
190 this.cargoWatchOptions.allTargets = config.get<boolean>(
191 'cargo-watch.allTargets',
192 true,
193 );
194 }
195
196 if (config.has('lruCapacity')) {
197 this.lruCapacity = config.get('lruCapacity') as number;
198 }
199
200 if (config.has('displayInlayHints')) {
201 this.displayInlayHints = config.get('displayInlayHints') as boolean;
202 }
203 if (config.has('maxInlayHintLength')) {
204 this.maxInlayHintLength = config.get(
205 'maxInlayHintLength',
206 ) as number;
207 }
208 if (config.has('excludeGlobs')) {
209 this.excludeGlobs = config.get('excludeGlobs') || [];
210 }
211 if (config.has('useClientWatching')) {
212 this.useClientWatching = config.get('useClientWatching') || true;
213 }
214 if (config.has('featureFlags')) {
215 this.featureFlags = config.get('featureFlags') || {};
216 }
217 if (config.has('withSysroot')) {
218 this.withSysroot = config.get('withSysroot') || false;
219 }
220
221 if (config.has('cargoFeatures.noDefaultFeatures')) {
222 this.cargoFeatures.noDefaultFeatures = config.get(
223 'cargoFeatures.noDefaultFeatures',
224 false,
225 );
226 }
227 if (config.has('cargoFeatures.allFeatures')) {
228 this.cargoFeatures.allFeatures = config.get(
229 'cargoFeatures.allFeatures',
230 true,
231 );
232 }
233 if (config.has('cargoFeatures.features')) {
234 this.cargoFeatures.features = config.get(
235 'cargoFeatures.features',
236 [],
237 );
238 }
239
240 if (
241 this.prevCargoFeatures !== null &&
242 (this.cargoFeatures.allFeatures !==
243 this.prevCargoFeatures.allFeatures ||
244 this.cargoFeatures.noDefaultFeatures !==
245 this.prevCargoFeatures.noDefaultFeatures ||
246 this.cargoFeatures.features.length !==
247 this.prevCargoFeatures.features.length ||
248 this.cargoFeatures.features.some(
249 (v, i) => v !== this.prevCargoFeatures!.features[i],
250 ))
251 ) {
252 requireReloadMessage = 'Changing cargo features requires a reload';
253 }
254 this.prevCargoFeatures = { ...this.cargoFeatures };
255
256 if (this.prevCargoWatchOptions !== null) {
257 const changed =
258 this.cargoWatchOptions.enable !== this.prevCargoWatchOptions.enable ||
259 this.cargoWatchOptions.command !== this.prevCargoWatchOptions.command ||
260 this.cargoWatchOptions.allTargets !== this.prevCargoWatchOptions.allTargets ||
261 this.cargoWatchOptions.arguments.length !== this.prevCargoWatchOptions.arguments.length ||
262 this.cargoWatchOptions.arguments.some(
263 (v, i) => v !== this.prevCargoWatchOptions!.arguments[i],
264 );
265 if (changed) {
266 requireReloadMessage = 'Changing cargo-watch options requires a reload';
267 }
268 }
269 this.prevCargoWatchOptions = { ...this.cargoWatchOptions };
270
271 if (requireReloadMessage !== null) {
272 const reloadAction = 'Reload now';
273 vscode.window
274 .showInformationMessage(requireReloadMessage, reloadAction)
275 .then(selectedAction => {
276 if (selectedAction === reloadAction) {
277 vscode.commands.executeCommand(
278 'workbench.action.reloadWindow',
279 );
280 }
281 });
282 }
283 } 168 }
169
170 // for internal use
171 get withSysroot() { return this.cfg.get("withSysroot", true) as boolean; }
284} 172}
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index 70042a479..ff6245f78 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -60,6 +60,10 @@ export class Ctx {
60 this.pushCleanup(d); 60 this.pushCleanup(d);
61 } 61 }
62 62
63 get globalState(): vscode.Memento {
64 return this.extCtx.globalState;
65 }
66
63 get subscriptions(): Disposable[] { 67 get subscriptions(): Disposable[] {
64 return this.extCtx.subscriptions; 68 return this.extCtx.subscriptions;
65 } 69 }
@@ -87,15 +91,11 @@ export async function sendRequestWithRetry<R>(
87 for (const delay of [2, 4, 6, 8, 10, null]) { 91 for (const delay of [2, 4, 6, 8, 10, null]) {
88 try { 92 try {
89 return await (token ? client.sendRequest(method, param, token) : client.sendRequest(method, param)); 93 return await (token ? client.sendRequest(method, param, token) : client.sendRequest(method, param));
90 } catch (e) { 94 } catch (err) {
91 if ( 95 if (delay === null || err.code !== lc.ErrorCodes.ContentModified) {
92 e.code === lc.ErrorCodes.ContentModified && 96 throw err;
93 delay !== null
94 ) {
95 await sleep(10 * (1 << delay));
96 continue;
97 } 97 }
98 throw e; 98 await sleep(10 * (1 << delay));
99 } 99 }
100 } 100 }
101 throw 'unreachable'; 101 throw 'unreachable';
diff --git a/editors/code/src/inlay_hints.ts b/editors/code/src/inlay_hints.ts
index 1c019a51b..3896878cd 100644
--- a/editors/code/src/inlay_hints.ts
+++ b/editors/code/src/inlay_hints.ts
@@ -13,7 +13,7 @@ export function activateInlayHints(ctx: Ctx) {
13 13
14 vscode.workspace.onDidChangeTextDocument( 14 vscode.workspace.onDidChangeTextDocument(
15 async event => { 15 async event => {
16 if (event.contentChanges.length !== 0) return; 16 if (event.contentChanges.length === 0) return;
17 if (event.document.languageId !== 'rust') return; 17 if (event.document.languageId !== 'rust') return;
18 await hintsUpdater.refresh(); 18 await hintsUpdater.refresh();
19 }, 19 },
@@ -27,7 +27,9 @@ export function activateInlayHints(ctx: Ctx) {
27 ctx.subscriptions 27 ctx.subscriptions
28 ); 28 );
29 29
30 ctx.onDidRestart(_ => hintsUpdater.setEnabled(ctx.config.displayInlayHints)); 30 // We pass async function though it will not be awaited when called,
31 // thus Promise rejections won't be handled, but this should never throw in fact...
32 ctx.onDidRestart(async _ => hintsUpdater.setEnabled(ctx.config.displayInlayHints));
31} 33}
32 34
33interface InlayHintsParams { 35interface InlayHintsParams {
@@ -36,7 +38,7 @@ interface InlayHintsParams {
36 38
37interface InlayHint { 39interface InlayHint {
38 range: vscode.Range; 40 range: vscode.Range;
39 kind: string; 41 kind: "TypeHint" | "ParameterHint";
40 label: string; 42 label: string;
41} 43}
42 44
@@ -53,7 +55,7 @@ const parameterHintDecorationType = vscode.window.createTextEditorDecorationType
53}); 55});
54 56
55class HintsUpdater { 57class HintsUpdater {
56 private pending: Map<string, vscode.CancellationTokenSource> = new Map(); 58 private pending = new Map<string, vscode.CancellationTokenSource>();
57 private ctx: Ctx; 59 private ctx: Ctx;
58 private enabled: boolean; 60 private enabled: boolean;
59 61
@@ -62,30 +64,36 @@ class HintsUpdater {
62 this.enabled = ctx.config.displayInlayHints; 64 this.enabled = ctx.config.displayInlayHints;
63 } 65 }
64 66
65 async setEnabled(enabled: boolean) { 67 async setEnabled(enabled: boolean): Promise<void> {
66 if (this.enabled == enabled) return; 68 if (this.enabled == enabled) return;
67 this.enabled = enabled; 69 this.enabled = enabled;
68 70
69 if (this.enabled) { 71 if (this.enabled) {
70 await this.refresh(); 72 return await this.refresh();
71 } else {
72 this.allEditors.forEach(it => {
73 this.setTypeDecorations(it, []);
74 this.setParameterDecorations(it, []);
75 });
76 } 73 }
74 this.allEditors.forEach(it => {
75 this.setTypeDecorations(it, []);
76 this.setParameterDecorations(it, []);
77 });
77 } 78 }
78 79
79 async refresh() { 80 async refresh() {
80 if (!this.enabled) return; 81 if (!this.enabled) return;
81 const promises = this.allEditors.map(it => this.refreshEditor(it)); 82 await Promise.all(this.allEditors.map(it => this.refreshEditor(it)));
82 await Promise.all(promises); 83 }
84
85 private get allEditors(): vscode.TextEditor[] {
86 return vscode.window.visibleTextEditors.filter(
87 editor => editor.document.languageId === 'rust',
88 );
83 } 89 }
84 90
85 private async refreshEditor(editor: vscode.TextEditor): Promise<void> { 91 private async refreshEditor(editor: vscode.TextEditor): Promise<void> {
86 const newHints = await this.queryHints(editor.document.uri.toString()); 92 const newHints = await this.queryHints(editor.document.uri.toString());
87 if (newHints == null) return; 93 if (newHints == null) return;
88 const newTypeDecorations = newHints.filter(hint => hint.kind === 'TypeHint') 94
95 const newTypeDecorations = newHints
96 .filter(hint => hint.kind === 'TypeHint')
89 .map(hint => ({ 97 .map(hint => ({
90 range: hint.range, 98 range: hint.range,
91 renderOptions: { 99 renderOptions: {
@@ -96,7 +104,8 @@ class HintsUpdater {
96 })); 104 }));
97 this.setTypeDecorations(editor, newTypeDecorations); 105 this.setTypeDecorations(editor, newTypeDecorations);
98 106
99 const newParameterDecorations = newHints.filter(hint => hint.kind === 'ParameterHint') 107 const newParameterDecorations = newHints
108 .filter(hint => hint.kind === 'ParameterHint')
100 .map(hint => ({ 109 .map(hint => ({
101 range: hint.range, 110 range: hint.range,
102 renderOptions: { 111 renderOptions: {
@@ -108,12 +117,6 @@ class HintsUpdater {
108 this.setParameterDecorations(editor, newParameterDecorations); 117 this.setParameterDecorations(editor, newParameterDecorations);
109 } 118 }
110 119
111 private get allEditors(): vscode.TextEditor[] {
112 return vscode.window.visibleTextEditors.filter(
113 editor => editor.document.languageId === 'rust',
114 );
115 }
116
117 private setTypeDecorations( 120 private setTypeDecorations(
118 editor: vscode.TextEditor, 121 editor: vscode.TextEditor,
119 decorations: vscode.DecorationOptions[], 122 decorations: vscode.DecorationOptions[],
@@ -137,12 +140,14 @@ class HintsUpdater {
137 private async queryHints(documentUri: string): Promise<InlayHint[] | null> { 140 private async queryHints(documentUri: string): Promise<InlayHint[] | null> {
138 const client = this.ctx.client; 141 const client = this.ctx.client;
139 if (!client) return null; 142 if (!client) return null;
143
140 const request: InlayHintsParams = { 144 const request: InlayHintsParams = {
141 textDocument: { uri: documentUri }, 145 textDocument: { uri: documentUri },
142 }; 146 };
143 const tokenSource = new vscode.CancellationTokenSource(); 147 const tokenSource = new vscode.CancellationTokenSource();
144 const prev = this.pending.get(documentUri); 148 const prevHintsRequest = this.pending.get(documentUri);
145 if (prev) prev.cancel(); 149 prevHintsRequest?.cancel();
150
146 this.pending.set(documentUri, tokenSource); 151 this.pending.set(documentUri, tokenSource);
147 try { 152 try {
148 return await sendRequestWithRetry<InlayHint[] | null>( 153 return await sendRequestWithRetry<InlayHint[] | null>(
diff --git a/editors/code/src/installation/download_artifact.ts b/editors/code/src/installation/download_artifact.ts
new file mode 100644
index 000000000..de655f8f4
--- /dev/null
+++ b/editors/code/src/installation/download_artifact.ts
@@ -0,0 +1,58 @@
1import * as vscode from "vscode";
2import * as path from "path";
3import { promises as fs } from "fs";
4import { strict as assert } from "assert";
5
6import { ArtifactReleaseInfo } from "./interfaces";
7import { downloadFile } from "./download_file";
8import { throttle } from "throttle-debounce";
9
10/**
11 * Downloads artifact from given `downloadUrl`.
12 * Creates `installationDir` if it is not yet created and put the artifact under
13 * `artifactFileName`.
14 * Displays info about the download progress in an info message printing the name
15 * of the artifact as `displayName`.
16 */
17export async function downloadArtifact(
18 {downloadUrl, releaseName}: ArtifactReleaseInfo,
19 artifactFileName: string,
20 installationDir: string,
21 displayName: string,
22) {
23 await fs.mkdir(installationDir).catch(err => assert.strictEqual(
24 err?.code,
25 "EEXIST",
26 `Couldn't create directory "${installationDir}" to download `+
27 `${artifactFileName} artifact: ${err.message}`
28 ));
29
30 const installationPath = path.join(installationDir, artifactFileName);
31
32 console.time(`Downloading ${artifactFileName}`);
33 await vscode.window.withProgress(
34 {
35 location: vscode.ProgressLocation.Notification,
36 cancellable: false, // FIXME: add support for canceling download?
37 title: `Downloading ${displayName} (${releaseName})`
38 },
39 async (progress, _cancellationToken) => {
40 let lastPrecentage = 0;
41 const filePermissions = 0o755; // (rwx, r_x, r_x)
42 await downloadFile(downloadUrl, installationPath, filePermissions, throttle(
43 200,
44 /* noTrailing: */ true,
45 (readBytes, totalBytes) => {
46 const newPercentage = (readBytes / totalBytes) * 100;
47 progress.report({
48 message: newPercentage.toFixed(0) + "%",
49 increment: newPercentage - lastPrecentage
50 });
51
52 lastPrecentage = newPercentage;
53 })
54 );
55 }
56 );
57 console.timeEnd(`Downloading ${artifactFileName}`);
58}
diff --git a/editors/code/src/installation/fetch_latest_artifact_metadata.ts b/editors/code/src/installation/fetch_artifact_release_info.ts
index 7e3700603..7d497057a 100644
--- a/editors/code/src/installation/fetch_latest_artifact_metadata.ts
+++ b/editors/code/src/installation/fetch_artifact_release_info.ts
@@ -1,26 +1,32 @@
1import fetch from "node-fetch"; 1import fetch from "node-fetch";
2import { GithubRepo, ArtifactMetadata } from "./interfaces"; 2import { GithubRepo, ArtifactReleaseInfo } from "./interfaces";
3 3
4const GITHUB_API_ENDPOINT_URL = "https://api.github.com"; 4const GITHUB_API_ENDPOINT_URL = "https://api.github.com";
5 5
6
6/** 7/**
7 * Fetches the latest release from GitHub `repo` and returns metadata about 8 * Fetches the release with `releaseTag` (or just latest release when not specified)
8 * `artifactFileName` shipped with this release or `null` if no such artifact was published. 9 * from GitHub `repo` and returns metadata about `artifactFileName` shipped with
10 * this release or `null` if no such artifact was published.
9 */ 11 */
10export async function fetchLatestArtifactMetadata( 12export async function fetchArtifactReleaseInfo(
11 repo: GithubRepo, artifactFileName: string 13 repo: GithubRepo, artifactFileName: string, releaseTag?: string
12): Promise<null | ArtifactMetadata> { 14): Promise<null | ArtifactReleaseInfo> {
13 15
14 const repoOwner = encodeURIComponent(repo.owner); 16 const repoOwner = encodeURIComponent(repo.owner);
15 const repoName = encodeURIComponent(repo.name); 17 const repoName = encodeURIComponent(repo.name);
16 18
17 const apiEndpointPath = `/repos/${repoOwner}/${repoName}/releases/latest`; 19 const apiEndpointPath = releaseTag
20 ? `/repos/${repoOwner}/${repoName}/releases/tags/${releaseTag}`
21 : `/repos/${repoOwner}/${repoName}/releases/latest`;
22
18 const requestUrl = GITHUB_API_ENDPOINT_URL + apiEndpointPath; 23 const requestUrl = GITHUB_API_ENDPOINT_URL + apiEndpointPath;
19 24
20 // We skip runtime type checks for simplicity (here we cast from `any` to `GithubRelease`) 25 // We skip runtime type checks for simplicity (here we cast from `any` to `GithubRelease`)
21 26
22 console.log("Issuing request for released artifacts metadata to", requestUrl); 27 console.log("Issuing request for released artifacts metadata to", requestUrl);
23 28
29 // FIXME: handle non-ok response
24 const response: GithubRelease = await fetch(requestUrl, { 30 const response: GithubRelease = await fetch(requestUrl, {
25 headers: { Accept: "application/vnd.github.v3+json" } 31 headers: { Accept: "application/vnd.github.v3+json" }
26 }) 32 })
diff --git a/editors/code/src/installation/interfaces.ts b/editors/code/src/installation/interfaces.ts
index 8039d0b90..e40839e4b 100644
--- a/editors/code/src/installation/interfaces.ts
+++ b/editors/code/src/installation/interfaces.ts
@@ -1,3 +1,5 @@
1import * as vscode from "vscode";
2
1export interface GithubRepo { 3export interface GithubRepo {
2 name: string; 4 name: string;
3 owner: string; 5 owner: string;
@@ -6,7 +8,7 @@ export interface GithubRepo {
6/** 8/**
7 * Metadata about particular artifact retrieved from GitHub releases. 9 * Metadata about particular artifact retrieved from GitHub releases.
8 */ 10 */
9export interface ArtifactMetadata { 11export interface ArtifactReleaseInfo {
10 releaseName: string; 12 releaseName: string;
11 downloadUrl: string; 13 downloadUrl: string;
12} 14}
@@ -50,6 +52,17 @@ export namespace BinarySource {
50 * and in local `.dir`. 52 * and in local `.dir`.
51 */ 53 */
52 file: string; 54 file: string;
55
56 /**
57 * Tag of github release that denotes a version required by this extension.
58 */
59 version: string;
60
61 /**
62 * Object that provides `get()/update()` operations to store metadata
63 * about the actual binary, e.g. its actual version.
64 */
65 storage: vscode.Memento;
53 } 66 }
54 67
55} 68}
diff --git a/editors/code/src/installation/language_server.ts b/editors/code/src/installation/language_server.ts
deleted file mode 100644
index 4797c3f01..000000000
--- a/editors/code/src/installation/language_server.ts
+++ /dev/null
@@ -1,148 +0,0 @@
1import * as vscode from "vscode";
2import * as path from "path";
3import { strict as assert } from "assert";
4import { promises as fs } from "fs";
5import { promises as dns } from "dns";
6import { spawnSync } from "child_process";
7import { throttle } from "throttle-debounce";
8
9import { BinarySource } from "./interfaces";
10import { fetchLatestArtifactMetadata } from "./fetch_latest_artifact_metadata";
11import { downloadFile } from "./download_file";
12
13export async function downloadLatestLanguageServer(
14 {file: artifactFileName, dir: installationDir, repo}: BinarySource.GithubRelease
15) {
16 const { releaseName, downloadUrl } = (await fetchLatestArtifactMetadata(
17 repo, artifactFileName
18 ))!;
19
20 await fs.mkdir(installationDir).catch(err => assert.strictEqual(
21 err?.code,
22 "EEXIST",
23 `Couldn't create directory "${installationDir}" to download `+
24 `language server binary: ${err.message}`
25 ));
26
27 const installationPath = path.join(installationDir, artifactFileName);
28
29 console.time("Downloading ra_lsp_server");
30 await vscode.window.withProgress(
31 {
32 location: vscode.ProgressLocation.Notification,
33 cancellable: false, // FIXME: add support for canceling download?
34 title: `Downloading language server (${releaseName})`
35 },
36 async (progress, _cancellationToken) => {
37 let lastPrecentage = 0;
38 const filePermissions = 0o755; // (rwx, r_x, r_x)
39 await downloadFile(downloadUrl, installationPath, filePermissions, throttle(
40 200,
41 /* noTrailing: */ true,
42 (readBytes, totalBytes) => {
43 const newPercentage = (readBytes / totalBytes) * 100;
44 progress.report({
45 message: newPercentage.toFixed(0) + "%",
46 increment: newPercentage - lastPrecentage
47 });
48
49 lastPrecentage = newPercentage;
50 })
51 );
52 }
53 );
54 console.timeEnd("Downloading ra_lsp_server");
55}
56export async function ensureLanguageServerBinary(
57 langServerSource: null | BinarySource
58): Promise<null | string> {
59
60 if (!langServerSource) {
61 vscode.window.showErrorMessage(
62 "Unfortunately we don't ship binaries for your platform yet. " +
63 "You need to manually clone rust-analyzer repository and " +
64 "run `cargo xtask install --server` to build the language server from sources. " +
65 "If you feel that your platform should be supported, please create an issue " +
66 "about that [here](https://github.com/rust-analyzer/rust-analyzer/issues) and we " +
67 "will consider it."
68 );
69 return null;
70 }
71
72 switch (langServerSource.type) {
73 case BinarySource.Type.ExplicitPath: {
74 if (isBinaryAvailable(langServerSource.path)) {
75 return langServerSource.path;
76 }
77
78 vscode.window.showErrorMessage(
79 `Unable to run ${langServerSource.path} binary. ` +
80 `To use the pre-built language server, set "rust-analyzer.raLspServerPath" ` +
81 "value to `null` or remove it from the settings to use it by default."
82 );
83 return null;
84 }
85 case BinarySource.Type.GithubRelease: {
86 const prebuiltBinaryPath = path.join(langServerSource.dir, langServerSource.file);
87
88 if (isBinaryAvailable(prebuiltBinaryPath)) {
89 return prebuiltBinaryPath;
90 }
91
92 const userResponse = await vscode.window.showInformationMessage(
93 "Language server binary for rust-analyzer was not found. " +
94 "Do you want to download it now?",
95 "Download now", "Cancel"
96 );
97 if (userResponse !== "Download now") return null;
98
99 try {
100 await downloadLatestLanguageServer(langServerSource);
101 } catch (err) {
102 vscode.window.showErrorMessage(
103 `Failed to download language server from ${langServerSource.repo.name} ` +
104 `GitHub repository: ${err.message}`
105 );
106
107 console.error(err);
108
109 dns.resolve('example.com').then(
110 addrs => console.log("DNS resolution for example.com was successful", addrs),
111 err => {
112 console.error(
113 "DNS resolution for example.com failed, " +
114 "there might be an issue with Internet availability"
115 );
116 console.error(err);
117 }
118 );
119
120 return null;
121 }
122
123 if (!isBinaryAvailable(prebuiltBinaryPath)) assert(false,
124 `Downloaded language server binary is not functional.` +
125 `Downloaded from: ${JSON.stringify(langServerSource)}`
126 );
127
128
129 vscode.window.showInformationMessage(
130 "Rust analyzer language server was successfully installed 🦀"
131 );
132
133 return prebuiltBinaryPath;
134 }
135 }
136
137 function isBinaryAvailable(binaryPath: string) {
138 const res = spawnSync(binaryPath, ["--version"]);
139
140 // ACHTUNG! `res` type declaration is inherently wrong, see
141 // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/42221
142
143 console.log("Checked binary availablity via --version", res);
144 console.log(binaryPath, "--version output:", res.output?.map(String));
145
146 return res.status === 0;
147 }
148}
diff --git a/editors/code/src/installation/server.ts b/editors/code/src/installation/server.ts
new file mode 100644
index 000000000..80cb719e3
--- /dev/null
+++ b/editors/code/src/installation/server.ts
@@ -0,0 +1,124 @@
1import * as vscode from "vscode";
2import * as path from "path";
3import { strict as assert } from "assert";
4import { promises as dns } from "dns";
5import { spawnSync } from "child_process";
6
7import { BinarySource } from "./interfaces";
8import { fetchArtifactReleaseInfo } from "./fetch_artifact_release_info";
9import { downloadArtifact } from "./download_artifact";
10
11export async function ensureServerBinary(source: null | BinarySource): Promise<null | string> {
12 if (!source) {
13 vscode.window.showErrorMessage(
14 "Unfortunately we don't ship binaries for your platform yet. " +
15 "You need to manually clone rust-analyzer repository and " +
16 "run `cargo xtask install --server` to build the language server from sources. " +
17 "If you feel that your platform should be supported, please create an issue " +
18 "about that [here](https://github.com/rust-analyzer/rust-analyzer/issues) and we " +
19 "will consider it."
20 );
21 return null;
22 }
23
24 switch (source.type) {
25 case BinarySource.Type.ExplicitPath: {
26 if (isBinaryAvailable(source.path)) {
27 return source.path;
28 }
29
30 vscode.window.showErrorMessage(
31 `Unable to run ${source.path} binary. ` +
32 `To use the pre-built language server, set "rust-analyzer.raLspServerPath" ` +
33 "value to `null` or remove it from the settings to use it by default."
34 );
35 return null;
36 }
37 case BinarySource.Type.GithubRelease: {
38 const prebuiltBinaryPath = path.join(source.dir, source.file);
39
40 const installedVersion: null | string = getServerVersion(source.storage);
41 const requiredVersion: string = source.version;
42
43 console.log("Installed version:", installedVersion, "required:", requiredVersion);
44
45 if (isBinaryAvailable(prebuiltBinaryPath) && installedVersion == requiredVersion) {
46 // FIXME: check for new releases and notify the user to update if possible
47 return prebuiltBinaryPath;
48 }
49
50 const userResponse = await vscode.window.showInformationMessage(
51 `Language server version ${source.version} for rust-analyzer is not installed. ` +
52 "Do you want to download it now?",
53 "Download now", "Cancel"
54 );
55 if (userResponse !== "Download now") return null;
56
57 if (!await downloadServer(source)) return null;
58
59 return prebuiltBinaryPath;
60 }
61 }
62}
63
64async function downloadServer(source: BinarySource.GithubRelease): Promise<boolean> {
65 try {
66 const releaseInfo = (await fetchArtifactReleaseInfo(source.repo, source.file, source.version))!;
67
68 await downloadArtifact(releaseInfo, source.file, source.dir, "language server");
69 await setServerVersion(source.storage, releaseInfo.releaseName);
70 } catch (err) {
71 vscode.window.showErrorMessage(
72 `Failed to download language server from ${source.repo.name} ` +
73 `GitHub repository: ${err.message}`
74 );
75
76 console.error(err);
77
78 dns.resolve('example.com').then(
79 addrs => console.log("DNS resolution for example.com was successful", addrs),
80 err => {
81 console.error(
82 "DNS resolution for example.com failed, " +
83 "there might be an issue with Internet availability"
84 );
85 console.error(err);
86 }
87 );
88 return false;
89 }
90
91 if (!isBinaryAvailable(path.join(source.dir, source.file))) assert(false,
92 `Downloaded language server binary is not functional.` +
93 `Downloaded from: ${JSON.stringify(source, null, 4)}`
94 );
95
96 vscode.window.showInformationMessage(
97 "Rust analyzer language server was successfully installed 🦀"
98 );
99
100 return true;
101}
102
103function isBinaryAvailable(binaryPath: string): boolean {
104 const res = spawnSync(binaryPath, ["--version"]);
105
106 // ACHTUNG! `res` type declaration is inherently wrong, see
107 // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/42221
108
109 console.log("Checked binary availablity via --version", res);
110 console.log(binaryPath, "--version output:", res.output?.map(String));
111
112 return res.status === 0;
113}
114
115function getServerVersion(storage: vscode.Memento): null | string {
116 const version = storage.get<null | string>("server-version", null);
117 console.log("Get server-version:", version);
118 return version;
119}
120
121async function setServerVersion(storage: vscode.Memento, version: string): Promise<void> {
122 console.log("Set server-version:", version);
123 await storage.update("server-version", version.toString());
124}
diff --git a/editors/code/src/main.ts b/editors/code/src/main.ts
index 5efce41f4..5a99e96f0 100644
--- a/editors/code/src/main.ts
+++ b/editors/code/src/main.ts
@@ -22,6 +22,7 @@ export async function activate(context: vscode.ExtensionContext) {
22 ctx.registerCommand('run', commands.run); 22 ctx.registerCommand('run', commands.run);
23 ctx.registerCommand('reload', commands.reload); 23 ctx.registerCommand('reload', commands.reload);
24 ctx.registerCommand('onEnter', commands.onEnter); 24 ctx.registerCommand('onEnter', commands.onEnter);
25 ctx.registerCommand('ssr', commands.ssr)
25 26
26 // Internal commands which are invoked by the server. 27 // Internal commands which are invoked by the server.
27 ctx.registerCommand('runSingle', commands.runSingle); 28 ctx.registerCommand('runSingle', commands.runSingle);
diff --git a/editors/code/src/status_display.ts b/editors/code/src/status_display.ts
index 51dbf388b..993e79d70 100644
--- a/editors/code/src/status_display.ts
+++ b/editors/code/src/status_display.ts
@@ -66,9 +66,9 @@ class StatusDisplay implements Disposable {
66 66
67 refreshLabel() { 67 refreshLabel() {
68 if (this.packageName) { 68 if (this.packageName) {
69 this.statusBarItem!.text = `${spinnerFrames[this.i]} cargo ${this.command} [${this.packageName}]`; 69 this.statusBarItem.text = `${spinnerFrames[this.i]} cargo ${this.command} [${this.packageName}]`;
70 } else { 70 } else {
71 this.statusBarItem!.text = `${spinnerFrames[this.i]} cargo ${this.command}`; 71 this.statusBarItem.text = `${spinnerFrames[this.i]} cargo ${this.command}`;
72 } 72 }
73 } 73 }
74 74
diff --git a/xtask/src/install.rs b/xtask/src/install.rs
index 540a66130..00bbabce4 100644
--- a/xtask/src/install.rs
+++ b/xtask/src/install.rs
@@ -127,7 +127,7 @@ fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
127 if !installed_extensions.contains("rust-analyzer") { 127 if !installed_extensions.contains("rust-analyzer") {
128 bail!( 128 bail!(
129 "Could not install the Visual Studio Code extension. \ 129 "Could not install the Visual Studio Code extension. \
130 Please make sure you have at least NodeJS 10.x together with the latest version of VS Code installed and try again." 130 Please make sure you have at least NodeJS 12.x together with the latest version of VS Code installed and try again."
131 ); 131 );
132 } 132 }
133 133