aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/release.yaml2
-rw-r--r--.vscode/launch.json26
-rw-r--r--Cargo.lock3
-rw-r--r--Cargo.toml5
-rw-r--r--crates/ra_cli/Cargo.toml2
-rw-r--r--crates/ra_cli/src/analysis_bench.rs54
-rw-r--r--crates/ra_cli/src/analysis_stats.rs83
-rw-r--r--crates/ra_cli/src/main.rs56
-rw-r--r--crates/ra_hir/src/source_analyzer.rs16
-rw-r--r--crates/ra_hir_def/src/body.rs54
-rw-r--r--crates/ra_hir_def/src/lib.rs63
-rw-r--r--crates/ra_hir_def/src/nameres/collector.rs77
-rw-r--r--crates/ra_hir_ty/src/infer/unify.rs2
-rw-r--r--crates/ra_hir_ty/src/tests/coercion.rs22
-rw-r--r--crates/ra_hir_ty/src/traits.rs3
-rw-r--r--crates/ra_ide/src/snapshots/highlighting.html1
-rw-r--r--crates/ra_ide/src/snapshots/rainbow_highlighting.html1
-rw-r--r--crates/ra_ide/src/syntax_highlighting.rs1
-rw-r--r--crates/ra_ide_db/Cargo.toml1
-rw-r--r--crates/ra_lsp_server/src/main.rs7
-rw-r--r--crates/ra_prof/src/lib.rs7
-rw-r--r--docs/dev/debugging.md64
-rw-r--r--docs/user/readme.adoc7
-rw-r--r--editors/code/package.json14
-rw-r--r--editors/code/rollup.config.js3
-rw-r--r--editors/code/src/client.ts8
-rw-r--r--editors/code/src/config.ts30
-rw-r--r--editors/code/src/ctx.ts16
-rw-r--r--editors/code/src/inlay_hints.ts51
-rw-r--r--editors/code/src/installation/download_artifact.ts58
-rw-r--r--editors/code/src/installation/fetch_artifact_release_info.ts (renamed from editors/code/src/installation/fetch_latest_artifact_metadata.ts)20
-rw-r--r--editors/code/src/installation/interfaces.ts15
-rw-r--r--editors/code/src/installation/language_server.ts148
-rw-r--r--editors/code/src/installation/server.ts124
34 files changed, 675 insertions, 369 deletions
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index ff7a95ee1..eae4fbcb5 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -190,4 +190,4 @@ jobs:
190 - name: Publish Extension 190 - name: Publish Extension
191 working-directory: ./editors/code 191 working-directory: ./editors/code
192 # token from https://dev.azure.com/rust-analyzer/ 192 # token from https://dev.azure.com/rust-analyzer/
193 run: ./node_modules/vsce/out/vsce publish 0.1.$(date +%Y%m%d) --pat ${{ secrets.MARKETPLACE_TOKEN }} 193 run: npx vsce publish 0.1.$(date +%Y%m%d) --pat ${{ secrets.MARKETPLACE_TOKEN }}
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 55a2f10f2..b1bd98d4a 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -2,39 +2,61 @@
2 // Use IntelliSense to learn about possible attributes. 2 // Use IntelliSense to learn about possible attributes.
3 // Hover to view descriptions of existing attributes. 3 // Hover to view descriptions of existing attributes.
4 // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 4 // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
5
6 // NOTE: --disable-extensions
7 // Disable all installed extensions to increase performance of the debug instance
8 // and prevent potential conflicts with other installed extensions.
9
5 "version": "0.2.0", 10 "version": "0.2.0",
6 "configurations": [ 11 "configurations": [
7 { 12 {
13 // Used for testing the extension with the installed LSP server.
8 "name": "Run Extension", 14 "name": "Run Extension",
9 "type": "extensionHost", 15 "type": "extensionHost",
10 "request": "launch", 16 "request": "launch",
11 "runtimeExecutable": "${execPath}", 17 "runtimeExecutable": "${execPath}",
12 "args": [ 18 "args": [
19 "--disable-extensions",
13 "--extensionDevelopmentPath=${workspaceFolder}/editors/code" 20 "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
14 ], 21 ],
15 "outFiles": [ 22 "outFiles": [
16 "${workspaceFolder}/editors/code/out/**/*.js" 23 "${workspaceFolder}/editors/code/out/**/*.js"
17 ], 24 ],
18 "preLaunchTask": "Build Extension" 25 "preLaunchTask": "Build Extension",
26 "skipFiles": [
27 "<node_internals>/**/*.js"
28 ]
19 }, 29 },
20 { 30 {
31 // Used for testing the extension with a local build of the LSP server (in `target/debug`).
21 "name": "Run Extension (Dev Server)", 32 "name": "Run Extension (Dev Server)",
22 "type": "extensionHost", 33 "type": "extensionHost",
23 "request": "launch", 34 "request": "launch",
24 "runtimeExecutable": "${execPath}", 35 "runtimeExecutable": "${execPath}",
25 "args": [ 36 "args": [
37 "--disable-extensions",
26 "--extensionDevelopmentPath=${workspaceFolder}/editors/code" 38 "--extensionDevelopmentPath=${workspaceFolder}/editors/code"
27 ], 39 ],
28 "outFiles": [ 40 "outFiles": [
29 "${workspaceFolder}/editors/code/out/**/*.js" 41 "${workspaceFolder}/editors/code/out/**/*.js"
30 ], 42 ],
31 "preLaunchTask": "Build Extension", 43 "preLaunchTask": "Build Extension",
44 "skipFiles": [
45 "<node_internals>/**/*.js"
46 ],
32 "env": { 47 "env": {
33 "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/debug/ra_lsp_server" 48 "__RA_LSP_SERVER_DEBUG": "${workspaceFolder}/target/debug/ra_lsp_server"
34 } 49 }
35 }, 50 },
36 { 51 {
37 "name": "Debug Lsp Server", 52 // Used to attach LLDB to a running LSP server.
53 // NOTE: Might require root permissions. For this run:
54 //
55 // `echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope`
56 //
57 // Don't forget to set `debug = 2` in `Cargo.toml` before building the server
58
59 "name": "Attach To Server",
38 "type": "lldb", 60 "type": "lldb",
39 "request": "attach", 61 "request": "attach",
40 "program": "${workspaceFolder}/target/debug/ra_lsp_server", 62 "program": "${workspaceFolder}/target/debug/ra_lsp_server",
diff --git a/Cargo.lock b/Cargo.lock
index f1651edaa..f44e514dd 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1015,6 +1015,7 @@ name = "ra_cli"
1015version = "0.1.0" 1015version = "0.1.0"
1016dependencies = [ 1016dependencies = [
1017 "env_logger", 1017 "env_logger",
1018 "itertools",
1018 "pico-args", 1019 "pico-args",
1019 "ra_batch", 1020 "ra_batch",
1020 "ra_db", 1021 "ra_db",
@@ -1024,6 +1025,7 @@ dependencies = [
1024 "ra_ide", 1025 "ra_ide",
1025 "ra_prof", 1026 "ra_prof",
1026 "ra_syntax", 1027 "ra_syntax",
1028 "rand 0.7.3",
1027] 1029]
1028 1030
1029[[package]] 1031[[package]]
@@ -1174,7 +1176,6 @@ dependencies = [
1174 "ra_prof", 1176 "ra_prof",
1175 "ra_syntax", 1177 "ra_syntax",
1176 "ra_text_edit", 1178 "ra_text_edit",
1177 "rand 0.7.3",
1178 "rayon", 1179 "rayon",
1179 "rustc-hash", 1180 "rustc-hash",
1180 "superslice", 1181 "superslice",
diff --git a/Cargo.toml b/Cargo.toml
index e5620b1b7..c034e2424 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -31,3 +31,8 @@ opt-level = 0
31 31
32[patch.'crates-io'] 32[patch.'crates-io']
33# rowan = { path = "../rowan" } 33# rowan = { path = "../rowan" }
34
35[patch.'https://github.com/rust-lang/chalk.git']
36# chalk-solve = { path = "../chalk/chalk-solve" }
37# chalk-rust-ir = { path = "../chalk/chalk-rust-ir" }
38# chalk-ir = { path = "../chalk/chalk-ir" }
diff --git a/crates/ra_cli/Cargo.toml b/crates/ra_cli/Cargo.toml
index bcd408421..53d4876f6 100644
--- a/crates/ra_cli/Cargo.toml
+++ b/crates/ra_cli/Cargo.toml
@@ -6,8 +6,10 @@ authors = ["rust-analyzer developers"]
6publish = false 6publish = false
7 7
8[dependencies] 8[dependencies]
9itertools = "0.8.0"
9pico-args = "0.3.0" 10pico-args = "0.3.0"
10env_logger = { version = "0.7.1", default-features = false } 11env_logger = { version = "0.7.1", default-features = false }
12rand = { version = "0.7.0", features = ["small_rng"] }
11 13
12ra_syntax = { path = "../ra_syntax" } 14ra_syntax = { path = "../ra_syntax" }
13ra_ide = { path = "../ra_ide" } 15ra_ide = { path = "../ra_ide" }
diff --git a/crates/ra_cli/src/analysis_bench.rs b/crates/ra_cli/src/analysis_bench.rs
index 5485a38ff..4835a68ce 100644
--- a/crates/ra_cli/src/analysis_bench.rs
+++ b/crates/ra_cli/src/analysis_bench.rs
@@ -2,6 +2,7 @@
2 2
3use std::{ 3use std::{
4 path::{Path, PathBuf}, 4 path::{Path, PathBuf},
5 str::FromStr,
5 sync::Arc, 6 sync::Arc,
6 time::Instant, 7 time::Instant,
7}; 8};
@@ -14,12 +15,35 @@ use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FilePosition, LineCol};
14 15
15use crate::Result; 16use crate::Result;
16 17
18pub(crate) struct Position {
19 path: PathBuf,
20 line: u32,
21 column: u32,
22}
23
24impl FromStr for Position {
25 type Err = Box<dyn std::error::Error + Send + Sync>;
26 fn from_str(s: &str) -> Result<Self> {
27 let (path_line, column) = rsplit_at_char(s, ':')?;
28 let (path, line) = rsplit_at_char(path_line, ':')?;
29 Ok(Position { path: path.into(), line: line.parse()?, column: column.parse()? })
30 }
31}
32
33fn rsplit_at_char(s: &str, c: char) -> Result<(&str, &str)> {
34 let idx = s.rfind(':').ok_or_else(|| format!("no `{}` in {}", c, s))?;
35 Ok((&s[..idx], &s[idx + 1..]))
36}
37
17pub(crate) enum Op { 38pub(crate) enum Op {
18 Highlight { path: PathBuf }, 39 Highlight { path: PathBuf },
19 Complete { path: PathBuf, line: u32, column: u32 }, 40 Complete(Position),
41 GotoDef(Position),
20} 42}
21 43
22pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> { 44pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
45 ra_prof::init();
46
23 let start = Instant::now(); 47 let start = Instant::now();
24 eprint!("loading: "); 48 eprint!("loading: ");
25 let (mut host, roots) = ra_batch::load_cargo(path)?; 49 let (mut host, roots) = ra_batch::load_cargo(path)?;
@@ -29,7 +53,7 @@ pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
29 let file_id = { 53 let file_id = {
30 let path = match &op { 54 let path = match &op {
31 Op::Highlight { path } => path, 55 Op::Highlight { path } => path,
32 Op::Complete { path, .. } => path, 56 Op::Complete(pos) | Op::GotoDef(pos) => &pos.path,
33 }; 57 };
34 let path = std::env::current_dir()?.join(path).canonicalize()?; 58 let path = std::env::current_dir()?.join(path).canonicalize()?;
35 roots 59 roots
@@ -49,7 +73,7 @@ pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
49 .ok_or_else(|| format!("Can't find {:?}", path))? 73 .ok_or_else(|| format!("Can't find {:?}", path))?
50 }; 74 };
51 75
52 match op { 76 match &op {
53 Op::Highlight { .. } => { 77 Op::Highlight { .. } => {
54 let res = do_work(&mut host, file_id, |analysis| { 78 let res = do_work(&mut host, file_id, |analysis| {
55 analysis.diagnostics(file_id).unwrap(); 79 analysis.diagnostics(file_id).unwrap();
@@ -59,16 +83,30 @@ pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
59 println!("\n{}", res); 83 println!("\n{}", res);
60 } 84 }
61 } 85 }
62 Op::Complete { line, column, .. } => { 86 Op::Complete(pos) | Op::GotoDef(pos) => {
87 let is_completion = match op {
88 Op::Complete(..) => true,
89 _ => false,
90 };
91
63 let offset = host 92 let offset = host
64 .analysis() 93 .analysis()
65 .file_line_index(file_id)? 94 .file_line_index(file_id)?
66 .offset(LineCol { line, col_utf16: column }); 95 .offset(LineCol { line: pos.line - 1, col_utf16: pos.column });
67 let file_postion = FilePosition { file_id, offset }; 96 let file_postion = FilePosition { file_id, offset };
68 97
69 let res = do_work(&mut host, file_id, |analysis| analysis.completions(file_postion)); 98 if is_completion {
70 if verbose { 99 let res =
71 println!("\n{:#?}", res); 100 do_work(&mut host, file_id, |analysis| analysis.completions(file_postion));
101 if verbose {
102 println!("\n{:#?}", res);
103 }
104 } else {
105 let res =
106 do_work(&mut host, file_id, |analysis| analysis.goto_definition(file_postion));
107 if verbose {
108 println!("\n{:#?}", res);
109 }
72 } 110 }
73 } 111 }
74 } 112 }
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs
index 833235bff..6d2dd34c6 100644
--- a/crates/ra_cli/src/analysis_stats.rs
+++ b/crates/ra_cli/src/analysis_stats.rs
@@ -2,6 +2,9 @@
2 2
3use std::{collections::HashSet, fmt::Write, path::Path, time::Instant}; 3use std::{collections::HashSet, fmt::Write, path::Path, time::Instant};
4 4
5use itertools::Itertools;
6use rand::{seq::SliceRandom, thread_rng};
7
5use hir::{ 8use hir::{
6 db::{DefDatabase, HirDatabase}, 9 db::{DefDatabase, HirDatabase},
7 AssocItem, Crate, HasSource, HirDisplay, ModuleDef, 10 AssocItem, Crate, HasSource, HirDisplay, ModuleDef,
@@ -19,6 +22,7 @@ pub fn run(
19 path: &Path, 22 path: &Path,
20 only: Option<&str>, 23 only: Option<&str>,
21 with_deps: bool, 24 with_deps: bool,
25 randomize: bool,
22) -> Result<()> { 26) -> Result<()> {
23 let db_load_time = Instant::now(); 27 let db_load_time = Instant::now();
24 let (mut host, roots) = ra_batch::load_cargo(path)?; 28 let (mut host, roots) = ra_batch::load_cargo(path)?;
@@ -41,7 +45,11 @@ pub fn run(
41 }) 45 })
42 .collect::<HashSet<_>>(); 46 .collect::<HashSet<_>>();
43 47
44 for krate in Crate::all(db) { 48 let mut krates = Crate::all(db);
49 if randomize {
50 krates.shuffle(&mut thread_rng());
51 }
52 for krate in krates {
45 let module = krate.root_module(db).expect("crate without root module"); 53 let module = krate.root_module(db).expect("crate without root module");
46 let file_id = module.definition_source(db).file_id; 54 let file_id = module.definition_source(db).file_id;
47 if members.contains(&db.file_source_root(file_id.original_file(db))) { 55 if members.contains(&db.file_source_root(file_id.original_file(db))) {
@@ -50,6 +58,10 @@ pub fn run(
50 } 58 }
51 } 59 }
52 60
61 if randomize {
62 visit_queue.shuffle(&mut thread_rng());
63 }
64
53 println!("Crates in this dir: {}", num_crates); 65 println!("Crates in this dir: {}", num_crates);
54 let mut num_decls = 0; 66 let mut num_decls = 0;
55 let mut funcs = Vec::new(); 67 let mut funcs = Vec::new();
@@ -79,10 +91,14 @@ pub fn run(
79 println!("Total functions: {}", funcs.len()); 91 println!("Total functions: {}", funcs.len());
80 println!("Item Collection: {:?}, {}", analysis_time.elapsed(), ra_prof::memory_usage()); 92 println!("Item Collection: {:?}, {}", analysis_time.elapsed(), ra_prof::memory_usage());
81 93
94 if randomize {
95 funcs.shuffle(&mut thread_rng());
96 }
97
82 let inference_time = Instant::now(); 98 let inference_time = Instant::now();
83 let mut bar = match verbosity { 99 let mut bar = match verbosity {
84 Verbosity::Verbose | Verbosity::Normal => ProgressReport::new(funcs.len() as u64), 100 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
85 Verbosity::Quiet => ProgressReport::hidden(), 101 _ => ProgressReport::new(funcs.len() as u64),
86 }; 102 };
87 103
88 bar.tick(); 104 bar.tick();
@@ -92,7 +108,20 @@ pub fn run(
92 let mut num_type_mismatches = 0; 108 let mut num_type_mismatches = 0;
93 for f in funcs { 109 for f in funcs {
94 let name = f.name(db); 110 let name = f.name(db);
95 let mut msg = format!("processing: {}", name); 111 let full_name = f
112 .module(db)
113 .path_to_root(db)
114 .into_iter()
115 .rev()
116 .filter_map(|it| it.name(db))
117 .chain(Some(f.name(db)))
118 .join("::");
119 if let Some(only_name) = only {
120 if name.to_string() != only_name && full_name != only_name {
121 continue;
122 }
123 }
124 let mut msg = format!("processing: {}", full_name);
96 if verbosity.is_verbose() { 125 if verbosity.is_verbose() {
97 let src = f.source(db); 126 let src = f.source(db);
98 let original_file = src.file_id.original_file(db); 127 let original_file = src.file_id.original_file(db);
@@ -100,15 +129,15 @@ pub fn run(
100 let syntax_range = src.value.syntax().text_range(); 129 let syntax_range = src.value.syntax().text_range();
101 write!(msg, " ({:?} {})", path, syntax_range).unwrap(); 130 write!(msg, " ({:?} {})", path, syntax_range).unwrap();
102 } 131 }
103 bar.set_message(&msg); 132 if verbosity.is_spammy() {
104 if let Some(only_name) = only { 133 bar.println(format!("{}", msg));
105 if name.to_string() != only_name {
106 continue;
107 }
108 } 134 }
135 bar.set_message(&msg);
109 let f_id = FunctionId::from(f); 136 let f_id = FunctionId::from(f);
110 let body = db.body(f_id.into()); 137 let body = db.body(f_id.into());
111 let inference_result = db.infer(f_id.into()); 138 let inference_result = db.infer(f_id.into());
139 let (previous_exprs, previous_unknown, previous_partially_unknown) =
140 (num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
112 for (expr_id, _) in body.exprs.iter() { 141 for (expr_id, _) in body.exprs.iter() {
113 let ty = &inference_result[expr_id]; 142 let ty = &inference_result[expr_id];
114 num_exprs += 1; 143 num_exprs += 1;
@@ -125,6 +154,33 @@ pub fn run(
125 num_exprs_partially_unknown += 1; 154 num_exprs_partially_unknown += 1;
126 } 155 }
127 } 156 }
157 if only.is_some() && verbosity.is_spammy() {
158 // in super-verbose mode for just one function, we print every single expression
159 let (_, sm) = db.body_with_source_map(f_id.into());
160 let src = sm.expr_syntax(expr_id);
161 if let Some(src) = src {
162 let original_file = src.file_id.original_file(db);
163 let line_index = host.analysis().file_line_index(original_file).unwrap();
164 let text_range = src.value.either(
165 |it| it.syntax_node_ptr().range(),
166 |it| it.syntax_node_ptr().range(),
167 );
168 let (start, end) = (
169 line_index.line_col(text_range.start()),
170 line_index.line_col(text_range.end()),
171 );
172 bar.println(format!(
173 "{}:{}-{}:{}: {}",
174 start.line + 1,
175 start.col_utf16,
176 end.line + 1,
177 end.col_utf16,
178 ty.display(db)
179 ));
180 } else {
181 bar.println(format!("unknown location: {}", ty.display(db)));
182 }
183 }
128 if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) { 184 if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
129 num_type_mismatches += 1; 185 num_type_mismatches += 1;
130 if verbosity.is_verbose() { 186 if verbosity.is_verbose() {
@@ -164,6 +220,15 @@ pub fn run(
164 } 220 }
165 } 221 }
166 } 222 }
223 if verbosity.is_spammy() {
224 bar.println(format!(
225 "In {}: {} exprs, {} unknown, {} partial",
226 full_name,
227 num_exprs - previous_exprs,
228 num_exprs_unknown - previous_unknown,
229 num_exprs_partially_unknown - previous_partially_unknown
230 ));
231 }
167 bar.inc(1); 232 bar.inc(1);
168 } 233 }
169 bar.finish_and_clear(); 234 bar.finish_and_clear();
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs
index 806612c2c..750cbab86 100644
--- a/crates/ra_cli/src/main.rs
+++ b/crates/ra_cli/src/main.rs
@@ -16,6 +16,7 @@ type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>;
16 16
17#[derive(Clone, Copy)] 17#[derive(Clone, Copy)]
18pub enum Verbosity { 18pub enum Verbosity {
19 Spammy,
19 Verbose, 20 Verbose,
20 Normal, 21 Normal,
21 Quiet, 22 Quiet,
@@ -24,7 +25,13 @@ pub enum Verbosity {
24impl Verbosity { 25impl Verbosity {
25 fn is_verbose(self) -> bool { 26 fn is_verbose(self) -> bool {
26 match self { 27 match self {
27 Verbosity::Verbose => true, 28 Verbosity::Verbose | Verbosity::Spammy => true,
29 _ => false,
30 }
31 }
32 fn is_spammy(self) -> bool {
33 match self {
34 Verbosity::Spammy => true,
28 _ => false, 35 _ => false,
29 } 36 }
30 } 37 }
@@ -86,14 +93,18 @@ fn main() -> Result<()> {
86 return Ok(()); 93 return Ok(());
87 } 94 }
88 let verbosity = match ( 95 let verbosity = match (
96 matches.contains(["-vv", "--spammy"]),
89 matches.contains(["-v", "--verbose"]), 97 matches.contains(["-v", "--verbose"]),
90 matches.contains(["-q", "--quiet"]), 98 matches.contains(["-q", "--quiet"]),
91 ) { 99 ) {
92 (false, false) => Verbosity::Normal, 100 (true, _, true) => Err("Invalid flags: -q conflicts with -vv")?,
93 (false, true) => Verbosity::Quiet, 101 (true, _, false) => Verbosity::Spammy,
94 (true, false) => Verbosity::Verbose, 102 (false, false, false) => Verbosity::Normal,
95 (true, true) => Err("Invalid flags: -q conflicts with -v")?, 103 (false, false, true) => Verbosity::Quiet,
104 (false, true, false) => Verbosity::Verbose,
105 (false, true, true) => Err("Invalid flags: -q conflicts with -v")?,
96 }; 106 };
107 let randomize = matches.contains("--randomize");
97 let memory_usage = matches.contains("--memory-usage"); 108 let memory_usage = matches.contains("--memory-usage");
98 let only: Option<String> = matches.opt_value_from_str(["-o", "--only"])?; 109 let only: Option<String> = matches.opt_value_from_str(["-o", "--only"])?;
99 let with_deps: bool = matches.contains("--with-deps"); 110 let with_deps: bool = matches.contains("--with-deps");
@@ -111,6 +122,7 @@ fn main() -> Result<()> {
111 path.as_ref(), 122 path.as_ref(),
112 only.as_ref().map(String::as_ref), 123 only.as_ref().map(String::as_ref),
113 with_deps, 124 with_deps,
125 randomize,
114 )?; 126 )?;
115 } 127 }
116 "analysis-bench" => { 128 "analysis-bench" => {
@@ -120,25 +132,16 @@ fn main() -> Result<()> {
120 } 132 }
121 let verbose = matches.contains(["-v", "--verbose"]); 133 let verbose = matches.contains(["-v", "--verbose"]);
122 let path: String = matches.opt_value_from_str("--path")?.unwrap_or_default(); 134 let path: String = matches.opt_value_from_str("--path")?.unwrap_or_default();
123 let highlight_path = matches.opt_value_from_str("--highlight")?; 135 let highlight_path: Option<String> = matches.opt_value_from_str("--highlight")?;
124 let complete_path = matches.opt_value_from_str("--complete")?; 136 let complete_path: Option<String> = matches.opt_value_from_str("--complete")?;
125 if highlight_path.is_some() && complete_path.is_some() { 137 let goto_def_path: Option<String> = matches.opt_value_from_str("--goto-def")?;
126 panic!("either --highlight or --complete must be set, not both") 138 let op = match (highlight_path, complete_path, goto_def_path) {
127 } 139 (Some(path), None, None) => analysis_bench::Op::Highlight { path: path.into() },
128 let op = if let Some(path) = highlight_path { 140 (None, Some(position), None) => analysis_bench::Op::Complete(position.parse()?),
129 let path: String = path; 141 (None, None, Some(position)) => analysis_bench::Op::GotoDef(position.parse()?),
130 analysis_bench::Op::Highlight { path: path.into() } 142 _ => panic!(
131 } else if let Some(path_line_col) = complete_path { 143 "exactly one of `--highlight`, `--complete` or `--goto-def` must be set"
132 let path_line_col: String = path_line_col; 144 ),
133 let (path_line, column) = rsplit_at_char(path_line_col.as_str(), ':')?;
134 let (path, line) = rsplit_at_char(path_line, ':')?;
135 analysis_bench::Op::Complete {
136 path: path.into(),
137 line: line.parse()?,
138 column: column.parse()?,
139 }
140 } else {
141 panic!("either --highlight or --complete must be set")
142 }; 145 };
143 matches.finish().or_else(handle_extra_flags)?; 146 matches.finish().or_else(handle_extra_flags)?;
144 analysis_bench::run(verbose, path.as_ref(), op)?; 147 analysis_bench::run(verbose, path.as_ref(), op)?;
@@ -171,8 +174,3 @@ fn read_stdin() -> Result<String> {
171 std::io::stdin().read_to_string(&mut buff)?; 174 std::io::stdin().read_to_string(&mut buff)?;
172 Ok(buff) 175 Ok(buff)
173} 176}
174
175fn rsplit_at_char(s: &str, c: char) -> Result<(&str, &str)> {
176 let idx = s.rfind(':').ok_or_else(|| format!("no `{}` in {}", c, s))?;
177 Ok((&s[..idx], &s[idx + 1..]))
178}
diff --git a/crates/ra_hir/src/source_analyzer.rs b/crates/ra_hir/src/source_analyzer.rs
index bb9a35c5d..94d5b4cfd 100644
--- a/crates/ra_hir/src/source_analyzer.rs
+++ b/crates/ra_hir/src/source_analyzer.rs
@@ -15,11 +15,9 @@ use hir_def::{
15 }, 15 },
16 expr::{ExprId, PatId}, 16 expr::{ExprId, PatId},
17 resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs}, 17 resolver::{self, resolver_for_scope, Resolver, TypeNs, ValueNs},
18 DefWithBodyId, TraitId, 18 AsMacroCall, DefWithBodyId, TraitId,
19};
20use hir_expand::{
21 hygiene::Hygiene, name::AsName, AstId, HirFileId, InFile, MacroCallId, MacroCallKind,
22}; 19};
20use hir_expand::{hygiene::Hygiene, name::AsName, HirFileId, InFile, MacroCallId};
23use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment}; 21use hir_ty::{InEnvironment, InferenceResult, TraitEnvironment};
24use ra_syntax::{ 22use ra_syntax::{
25 ast::{self, AstNode}, 23 ast::{self, AstNode},
@@ -363,12 +361,10 @@ impl SourceAnalyzer {
363 db: &impl HirDatabase, 361 db: &impl HirDatabase,
364 macro_call: InFile<&ast::MacroCall>, 362 macro_call: InFile<&ast::MacroCall>,
365 ) -> Option<Expansion> { 363 ) -> Option<Expansion> {
366 let def = self.resolve_macro_call(db, macro_call)?.id; 364 let macro_call_id = macro_call.as_call_id(db, |path| {
367 let ast_id = AstId::new( 365 self.resolver.resolve_path_as_macro(db, &path).map(|it| it.into())
368 macro_call.file_id, 366 })?;
369 db.ast_id_map(macro_call.file_id).ast_id(macro_call.value), 367 Some(Expansion { macro_call_id })
370 );
371 Some(Expansion { macro_call_id: def.as_call_id(db, MacroCallKind::FnLike(ast_id)) })
372 } 368 }
373} 369}
374 370
diff --git a/crates/ra_hir_def/src/body.rs b/crates/ra_hir_def/src/body.rs
index 142c52d35..010d35e55 100644
--- a/crates/ra_hir_def/src/body.rs
+++ b/crates/ra_hir_def/src/body.rs
@@ -7,9 +7,7 @@ use std::{mem, ops::Index, sync::Arc};
7 7
8use drop_bomb::DropBomb; 8use drop_bomb::DropBomb;
9use either::Either; 9use either::Either;
10use hir_expand::{ 10use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile, MacroDefId};
11 ast_id_map::AstIdMap, hygiene::Hygiene, AstId, HirFileId, InFile, MacroCallKind, MacroDefId,
12};
13use ra_arena::{map::ArenaMap, Arena}; 11use ra_arena::{map::ArenaMap, Arena};
14use ra_prof::profile; 12use ra_prof::profile;
15use ra_syntax::{ast, AstNode, AstPtr}; 13use ra_syntax::{ast, AstNode, AstPtr};
@@ -23,7 +21,7 @@ use crate::{
23 nameres::CrateDefMap, 21 nameres::CrateDefMap,
24 path::{ModPath, Path}, 22 path::{ModPath, Path},
25 src::HasSource, 23 src::HasSource,
26 DefWithBodyId, HasModule, Lookup, ModuleId, 24 AsMacroCall, DefWithBodyId, HasModule, Lookup, ModuleId,
27}; 25};
28 26
29pub(crate) struct Expander { 27pub(crate) struct Expander {
@@ -51,30 +49,26 @@ impl Expander {
51 db: &DB, 49 db: &DB,
52 macro_call: ast::MacroCall, 50 macro_call: ast::MacroCall,
53 ) -> Option<(Mark, T)> { 51 ) -> Option<(Mark, T)> {
54 let ast_id = AstId::new( 52 let macro_call = InFile::new(self.current_file_id, &macro_call);
55 self.current_file_id, 53
56 db.ast_id_map(self.current_file_id).ast_id(&macro_call), 54 if let Some(call_id) =
57 ); 55 macro_call.as_call_id(db, |path| self.resolve_path_as_macro(db, &path))
58 56 {
59 if let Some(path) = macro_call.path().and_then(|path| self.parse_mod_path(path)) { 57 let file_id = call_id.as_file();
60 if let Some(def) = self.resolve_path_as_macro(db, &path) { 58 if let Some(node) = db.parse_or_expand(file_id) {
61 let call_id = def.as_call_id(db, MacroCallKind::FnLike(ast_id)); 59 if let Some(expr) = T::cast(node) {
62 let file_id = call_id.as_file(); 60 log::debug!("macro expansion {:#?}", expr.syntax());
63 if let Some(node) = db.parse_or_expand(file_id) { 61
64 if let Some(expr) = T::cast(node) { 62 let mark = Mark {
65 log::debug!("macro expansion {:#?}", expr.syntax()); 63 file_id: self.current_file_id,
66 64 ast_id_map: mem::take(&mut self.ast_id_map),
67 let mark = Mark { 65 bomb: DropBomb::new("expansion mark dropped"),
68 file_id: self.current_file_id, 66 };
69 ast_id_map: mem::take(&mut self.ast_id_map), 67 self.hygiene = Hygiene::new(db, file_id);
70 bomb: DropBomb::new("expansion mark dropped"), 68 self.current_file_id = file_id;
71 }; 69 self.ast_id_map = db.ast_id_map(file_id);
72 self.hygiene = Hygiene::new(db, file_id); 70
73 self.current_file_id = file_id; 71 return Some((mark, expr));
74 self.ast_id_map = db.ast_id_map(file_id);
75
76 return Some((mark, expr));
77 }
78 } 72 }
79 } 73 }
80 } 74 }
@@ -99,10 +93,6 @@ impl Expander {
99 Path::from_src(path, &self.hygiene) 93 Path::from_src(path, &self.hygiene)
100 } 94 }
101 95
102 fn parse_mod_path(&mut self, path: ast::Path) -> Option<ModPath> {
103 ModPath::from_src(path, &self.hygiene)
104 }
105
106 fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &ModPath) -> Option<MacroDefId> { 96 fn resolve_path_as_macro(&self, db: &impl DefDatabase, path: &ModPath) -> Option<MacroDefId> {
107 self.crate_def_map 97 self.crate_def_map
108 .resolve_path(db, self.module.local_id, path, BuiltinShadowMode::Other) 98 .resolve_path(db, self.module.local_id, path, BuiltinShadowMode::Other)
diff --git a/crates/ra_hir_def/src/lib.rs b/crates/ra_hir_def/src/lib.rs
index feb3a300d..aa0b558b8 100644
--- a/crates/ra_hir_def/src/lib.rs
+++ b/crates/ra_hir_def/src/lib.rs
@@ -46,7 +46,10 @@ mod marks;
46 46
47use std::hash::Hash; 47use std::hash::Hash;
48 48
49use hir_expand::{ast_id_map::FileAstId, AstId, HirFileId, InFile, MacroDefId}; 49use hir_expand::{
50 ast_id_map::FileAstId, db::AstDatabase, hygiene::Hygiene, AstId, HirFileId, InFile,
51 MacroCallId, MacroCallKind, MacroDefId,
52};
50use ra_arena::{impl_arena_id, RawId}; 53use ra_arena::{impl_arena_id, RawId};
51use ra_db::{impl_intern_key, salsa, CrateId}; 54use ra_db::{impl_intern_key, salsa, CrateId};
52use ra_syntax::{ast, AstNode}; 55use ra_syntax::{ast, AstNode};
@@ -413,3 +416,61 @@ impl HasModule for StaticLoc {
413 self.container.module(db) 416 self.container.module(db)
414 } 417 }
415} 418}
419
420/// A helper trait for converting to MacroCallId
421pub trait AsMacroCall {
422 fn as_call_id(
423 &self,
424 db: &(impl db::DefDatabase + AstDatabase),
425 resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
426 ) -> Option<MacroCallId>;
427}
428
429impl AsMacroCall for InFile<&ast::MacroCall> {
430 fn as_call_id(
431 &self,
432 db: &(impl db::DefDatabase + AstDatabase),
433 resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
434 ) -> Option<MacroCallId> {
435 let ast_id = AstId::new(self.file_id, db.ast_id_map(self.file_id).ast_id(self.value));
436 let h = Hygiene::new(db, self.file_id);
437 let path = path::ModPath::from_src(self.value.path()?, &h)?;
438
439 AstIdWithPath::new(ast_id.file_id, ast_id.value, path).as_call_id(db, resolver)
440 }
441}
442
443/// Helper wrapper for `AstId` with `ModPath`
444#[derive(Clone, Debug, Eq, PartialEq)]
445struct AstIdWithPath<T: ast::AstNode> {
446 pub ast_id: AstId<T>,
447 pub path: path::ModPath,
448}
449
450impl<T: ast::AstNode> AstIdWithPath<T> {
451 pub fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWithPath<T> {
452 AstIdWithPath { ast_id: AstId::new(file_id, ast_id), path }
453 }
454}
455
456impl AsMacroCall for AstIdWithPath<ast::MacroCall> {
457 fn as_call_id(
458 &self,
459 db: &impl AstDatabase,
460 resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
461 ) -> Option<MacroCallId> {
462 let def = resolver(self.path.clone())?;
463 Some(def.as_call_id(db, MacroCallKind::FnLike(self.ast_id.clone())))
464 }
465}
466
467impl AsMacroCall for AstIdWithPath<ast::ModuleItem> {
468 fn as_call_id(
469 &self,
470 db: &impl AstDatabase,
471 resolver: impl Fn(path::ModPath) -> Option<MacroDefId>,
472 ) -> Option<MacroCallId> {
473 let def = resolver(self.path.clone())?;
474 Some(def.as_call_id(db, MacroCallKind::Attr(self.ast_id.clone())))
475 }
476}
diff --git a/crates/ra_hir_def/src/nameres/collector.rs b/crates/ra_hir_def/src/nameres/collector.rs
index b1f3f525d..51c65a5d7 100644
--- a/crates/ra_hir_def/src/nameres/collector.rs
+++ b/crates/ra_hir_def/src/nameres/collector.rs
@@ -7,7 +7,7 @@ use hir_expand::{
7 builtin_derive::find_builtin_derive, 7 builtin_derive::find_builtin_derive,
8 builtin_macro::find_builtin_macro, 8 builtin_macro::find_builtin_macro,
9 name::{name, AsName, Name}, 9 name::{name, AsName, Name},
10 HirFileId, MacroCallId, MacroCallKind, MacroDefId, MacroDefKind, 10 HirFileId, MacroCallId, MacroDefId, MacroDefKind,
11}; 11};
12use ra_cfg::CfgOptions; 12use ra_cfg::CfgOptions;
13use ra_db::{CrateId, FileId}; 13use ra_db::{CrateId, FileId};
@@ -25,8 +25,9 @@ use crate::{
25 path::{ImportAlias, ModPath, PathKind}, 25 path::{ImportAlias, ModPath, PathKind},
26 per_ns::PerNs, 26 per_ns::PerNs,
27 visibility::Visibility, 27 visibility::Visibility,
28 AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern, 28 AdtId, AsMacroCall, AstId, AstIdWithPath, ConstLoc, ContainerId, EnumLoc, EnumVariantId,
29 LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, 29 FunctionLoc, ImplLoc, Intern, LocalModuleId, ModuleDefId, ModuleId, StaticLoc, StructLoc,
30 TraitLoc, TypeAliasLoc, UnionLoc,
30}; 31};
31 32
32pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap { 33pub(super) fn collect_defs(db: &impl DefDatabase, mut def_map: CrateDefMap) -> CrateDefMap {
@@ -99,11 +100,16 @@ struct ImportDirective {
99#[derive(Clone, Debug, Eq, PartialEq)] 100#[derive(Clone, Debug, Eq, PartialEq)]
100struct MacroDirective { 101struct MacroDirective {
101 module_id: LocalModuleId, 102 module_id: LocalModuleId,
102 ast_id: AstId<ast::MacroCall>, 103 ast_id: AstIdWithPath<ast::MacroCall>,
103 path: ModPath,
104 legacy: Option<MacroCallId>, 104 legacy: Option<MacroCallId>,
105} 105}
106 106
107#[derive(Clone, Debug, Eq, PartialEq)]
108struct DeriveDirective {
109 module_id: LocalModuleId,
110 ast_id: AstIdWithPath<ast::ModuleItem>,
111}
112
107/// Walks the tree of module recursively 113/// Walks the tree of module recursively
108struct DefCollector<'a, DB> { 114struct DefCollector<'a, DB> {
109 db: &'a DB, 115 db: &'a DB,
@@ -112,7 +118,7 @@ struct DefCollector<'a, DB> {
112 unresolved_imports: Vec<ImportDirective>, 118 unresolved_imports: Vec<ImportDirective>,
113 resolved_imports: Vec<ImportDirective>, 119 resolved_imports: Vec<ImportDirective>,
114 unexpanded_macros: Vec<MacroDirective>, 120 unexpanded_macros: Vec<MacroDirective>,
115 unexpanded_attribute_macros: Vec<(LocalModuleId, AstId<ast::ModuleItem>, ModPath)>, 121 unexpanded_attribute_macros: Vec<DeriveDirective>,
116 mod_dirs: FxHashMap<LocalModuleId, ModDir>, 122 mod_dirs: FxHashMap<LocalModuleId, ModDir>,
117 cfg_options: &'a CfgOptions, 123 cfg_options: &'a CfgOptions,
118} 124}
@@ -515,16 +521,16 @@ where
515 return false; 521 return false;
516 } 522 }
517 523
518 let resolved_res = self.def_map.resolve_path_fp_with_macro( 524 if let Some(call_id) = directive.ast_id.as_call_id(self.db, |path| {
519 self.db, 525 let resolved_res = self.def_map.resolve_path_fp_with_macro(
520 ResolveMode::Other, 526 self.db,
521 directive.module_id, 527 ResolveMode::Other,
522 &directive.path, 528 directive.module_id,
523 BuiltinShadowMode::Module, 529 &path,
524 ); 530 BuiltinShadowMode::Module,
525 531 );
526 if let Some(def) = resolved_res.resolved_def.take_macros() { 532 resolved_res.resolved_def.take_macros()
527 let call_id = def.as_call_id(self.db, MacroCallKind::FnLike(directive.ast_id)); 533 }) {
528 resolved.push((directive.module_id, call_id)); 534 resolved.push((directive.module_id, call_id));
529 res = ReachedFixedPoint::No; 535 res = ReachedFixedPoint::No;
530 return false; 536 return false;
@@ -532,12 +538,11 @@ where
532 538
533 true 539 true
534 }); 540 });
535 attribute_macros.retain(|(module_id, ast_id, path)| { 541 attribute_macros.retain(|directive| {
536 let resolved_res = self.resolve_attribute_macro(path); 542 if let Some(call_id) =
537 543 directive.ast_id.as_call_id(self.db, |path| self.resolve_attribute_macro(&path))
538 if let Some(def) = resolved_res { 544 {
539 let call_id = def.as_call_id(self.db, MacroCallKind::Attr(*ast_id)); 545 resolved.push((directive.module_id, call_id));
540 resolved.push((*module_id, call_id));
541 res = ReachedFixedPoint::No; 546 res = ReachedFixedPoint::No;
542 return false; 547 return false;
543 } 548 }
@@ -833,20 +838,22 @@ where
833 }; 838 };
834 let path = ModPath::from_tt_ident(ident); 839 let path = ModPath::from_tt_ident(ident);
835 840
836 let ast_id = AstId::new(self.file_id, def.kind.ast_id()); 841 let ast_id = AstIdWithPath::new(self.file_id, def.kind.ast_id(), path);
837 self.def_collector.unexpanded_attribute_macros.push((self.module_id, ast_id, path)); 842 self.def_collector
843 .unexpanded_attribute_macros
844 .push(DeriveDirective { module_id: self.module_id, ast_id });
838 } 845 }
839 } 846 }
840 } 847 }
841 848
842 fn collect_macro(&mut self, mac: &raw::MacroData) { 849 fn collect_macro(&mut self, mac: &raw::MacroData) {
843 let ast_id = AstId::new(self.file_id, mac.ast_id); 850 let mut ast_id = AstIdWithPath::new(self.file_id, mac.ast_id, mac.path.clone());
844 851
845 // Case 0: builtin macros 852 // Case 0: builtin macros
846 if mac.builtin { 853 if mac.builtin {
847 if let Some(name) = &mac.name { 854 if let Some(name) = &mac.name {
848 let krate = self.def_collector.def_map.krate; 855 let krate = self.def_collector.def_map.krate;
849 if let Some(macro_id) = find_builtin_macro(name, krate, ast_id) { 856 if let Some(macro_id) = find_builtin_macro(name, krate, ast_id.ast_id) {
850 self.def_collector.define_macro( 857 self.def_collector.define_macro(
851 self.module_id, 858 self.module_id,
852 name.clone(), 859 name.clone(),
@@ -862,7 +869,7 @@ where
862 if is_macro_rules(&mac.path) { 869 if is_macro_rules(&mac.path) {
863 if let Some(name) = &mac.name { 870 if let Some(name) = &mac.name {
864 let macro_id = MacroDefId { 871 let macro_id = MacroDefId {
865 ast_id: Some(ast_id), 872 ast_id: Some(ast_id.ast_id),
866 krate: Some(self.def_collector.def_map.krate), 873 krate: Some(self.def_collector.def_map.krate),
867 kind: MacroDefKind::Declarative, 874 kind: MacroDefKind::Declarative,
868 }; 875 };
@@ -872,15 +879,13 @@ where
872 } 879 }
873 880
874 // Case 2: try to resolve in legacy scope and expand macro_rules 881 // Case 2: try to resolve in legacy scope and expand macro_rules
875 if let Some(macro_def) = mac.path.as_ident().and_then(|name| { 882 if let Some(macro_call_id) = ast_id.as_call_id(self.def_collector.db, |path| {
876 self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name) 883 path.as_ident().and_then(|name| {
884 self.def_collector.def_map[self.module_id].scope.get_legacy_macro(&name)
885 })
877 }) { 886 }) {
878 let macro_call_id =
879 macro_def.as_call_id(self.def_collector.db, MacroCallKind::FnLike(ast_id));
880
881 self.def_collector.unexpanded_macros.push(MacroDirective { 887 self.def_collector.unexpanded_macros.push(MacroDirective {
882 module_id: self.module_id, 888 module_id: self.module_id,
883 path: mac.path.clone(),
884 ast_id, 889 ast_id,
885 legacy: Some(macro_call_id), 890 legacy: Some(macro_call_id),
886 }); 891 });
@@ -890,14 +895,12 @@ where
890 895
891 // Case 3: resolve in module scope, expand during name resolution. 896 // Case 3: resolve in module scope, expand during name resolution.
892 // We rewrite simple path `macro_name` to `self::macro_name` to force resolve in module scope only. 897 // We rewrite simple path `macro_name` to `self::macro_name` to force resolve in module scope only.
893 let mut path = mac.path.clone(); 898 if ast_id.path.is_ident() {
894 if path.is_ident() { 899 ast_id.path.kind = PathKind::Super(0);
895 path.kind = PathKind::Super(0);
896 } 900 }
897 901
898 self.def_collector.unexpanded_macros.push(MacroDirective { 902 self.def_collector.unexpanded_macros.push(MacroDirective {
899 module_id: self.module_id, 903 module_id: self.module_id,
900 path,
901 ast_id, 904 ast_id,
902 legacy: None, 905 legacy: None,
903 }); 906 });
diff --git a/crates/ra_hir_ty/src/infer/unify.rs b/crates/ra_hir_ty/src/infer/unify.rs
index fe05642ae..1dc842f40 100644
--- a/crates/ra_hir_ty/src/infer/unify.rs
+++ b/crates/ra_hir_ty/src/infer/unify.rs
@@ -249,6 +249,8 @@ impl InferenceTable {
249 match (ty1, ty2) { 249 match (ty1, ty2) {
250 (Ty::Unknown, _) | (_, Ty::Unknown) => true, 250 (Ty::Unknown, _) | (_, Ty::Unknown) => true,
251 251
252 (Ty::Placeholder(p1), Ty::Placeholder(p2)) if *p1 == *p2 => true,
253
252 (Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2))) 254 (Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2)))
253 | (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2))) 255 | (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2)))
254 | (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2))) 256 | (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2)))
diff --git a/crates/ra_hir_ty/src/tests/coercion.rs b/crates/ra_hir_ty/src/tests/coercion.rs
index fc5ef36a5..42330b269 100644
--- a/crates/ra_hir_ty/src/tests/coercion.rs
+++ b/crates/ra_hir_ty/src/tests/coercion.rs
@@ -526,3 +526,25 @@ fn test() {
526 "### 526 "###
527 ); 527 );
528} 528}
529
530#[test]
531fn coerce_placeholder_ref() {
532 // placeholders should unify, even behind references
533 assert_snapshot!(
534 infer_with_mismatches(r#"
535struct S<T> { t: T }
536impl<TT> S<TT> {
537 fn get(&self) -> &TT {
538 &self.t
539 }
540}
541"#, true),
542 @r###"
543 [51; 55) 'self': &S<TT>
544 [64; 87) '{ ... }': &TT
545 [74; 81) '&self.t': &TT
546 [75; 79) 'self': &S<TT>
547 [75; 81) 'self.t': TT
548 "###
549 );
550}
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs
index 88af61e87..ff8e75b48 100644
--- a/crates/ra_hir_ty/src/traits.rs
+++ b/crates/ra_hir_ty/src/traits.rs
@@ -60,6 +60,9 @@ impl TraitSolver {
60 context.0.db.check_canceled(); 60 context.0.db.check_canceled();
61 let remaining = fuel.get(); 61 let remaining = fuel.get();
62 fuel.set(remaining - 1); 62 fuel.set(remaining - 1);
63 if remaining == 0 {
64 log::debug!("fuel exhausted");
65 }
63 remaining > 0 66 remaining > 0
64 }) 67 })
65 } 68 }
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html
index 1cc55e78b..a02dbaf2f 100644
--- a/crates/ra_ide/src/snapshots/highlighting.html
+++ b/crates/ra_ide/src/snapshots/highlighting.html
@@ -16,6 +16,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
16.literal { color: #BFEBBF; } 16.literal { color: #BFEBBF; }
17.literal\.numeric { color: #6A8759; } 17.literal\.numeric { color: #6A8759; }
18.macro { color: #94BFF3; } 18.macro { color: #94BFF3; }
19.module { color: #AFD8AF; }
19.variable { color: #DCDCCC; } 20.variable { color: #DCDCCC; }
20.variable\.mut { color: #DCDCCC; text-decoration: underline; } 21.variable\.mut { color: #DCDCCC; text-decoration: underline; }
21 22
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
index 918fd4b97..95f038f00 100644
--- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html
+++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
@@ -16,6 +16,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
16.literal { color: #BFEBBF; } 16.literal { color: #BFEBBF; }
17.literal\.numeric { color: #6A8759; } 17.literal\.numeric { color: #6A8759; }
18.macro { color: #94BFF3; } 18.macro { color: #94BFF3; }
19.module { color: #AFD8AF; }
19.variable { color: #DCDCCC; } 20.variable { color: #DCDCCC; }
20.variable\.mut { color: #DCDCCC; text-decoration: underline; } 21.variable\.mut { color: #DCDCCC; text-decoration: underline; }
21 22
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs
index 174e13595..20c414ca1 100644
--- a/crates/ra_ide/src/syntax_highlighting.rs
+++ b/crates/ra_ide/src/syntax_highlighting.rs
@@ -365,6 +365,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
365.literal { color: #BFEBBF; } 365.literal { color: #BFEBBF; }
366.literal\\.numeric { color: #6A8759; } 366.literal\\.numeric { color: #6A8759; }
367.macro { color: #94BFF3; } 367.macro { color: #94BFF3; }
368.module { color: #AFD8AF; }
368.variable { color: #DCDCCC; } 369.variable { color: #DCDCCC; }
369.variable\\.mut { color: #DCDCCC; text-decoration: underline; } 370.variable\\.mut { color: #DCDCCC; text-decoration: underline; }
370 371
diff --git a/crates/ra_ide_db/Cargo.toml b/crates/ra_ide_db/Cargo.toml
index 716e88bc1..495fffb5a 100644
--- a/crates/ra_ide_db/Cargo.toml
+++ b/crates/ra_ide_db/Cargo.toml
@@ -22,7 +22,6 @@ fst = { version = "0.3.1", default-features = false }
22rustc-hash = "1.0" 22rustc-hash = "1.0"
23unicase = "2.2.0" 23unicase = "2.2.0"
24superslice = "1.0.0" 24superslice = "1.0.0"
25rand = { version = "0.7.0", features = ["small_rng"] }
26once_cell = "1.2.0" 25once_cell = "1.2.0"
27 26
28ra_syntax = { path = "../ra_syntax" } 27ra_syntax = { path = "../ra_syntax" }
diff --git a/crates/ra_lsp_server/src/main.rs b/crates/ra_lsp_server/src/main.rs
index c8a017c5c..ed2eaabd4 100644
--- a/crates/ra_lsp_server/src/main.rs
+++ b/crates/ra_lsp_server/src/main.rs
@@ -15,13 +15,8 @@ fn main() -> Result<()> {
15 15
16fn setup_logging() -> Result<()> { 16fn setup_logging() -> Result<()> {
17 std::env::set_var("RUST_BACKTRACE", "short"); 17 std::env::set_var("RUST_BACKTRACE", "short");
18
19 env_logger::try_init()?; 18 env_logger::try_init()?;
20 19 ra_prof::init();
21 ra_prof::set_filter(match std::env::var("RA_PROFILE") {
22 Ok(spec) => ra_prof::Filter::from_spec(&spec),
23 Err(_) => ra_prof::Filter::disabled(),
24 });
25 Ok(()) 20 Ok(())
26} 21}
27 22
diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs
index d38ff397e..c0bfbc2ee 100644
--- a/crates/ra_prof/src/lib.rs
+++ b/crates/ra_prof/src/lib.rs
@@ -26,6 +26,13 @@ pub use crate::memory_usage::{Bytes, MemoryUsage};
26#[global_allocator] 26#[global_allocator]
27static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; 27static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
28 28
29pub fn init() {
30 set_filter(match std::env::var("RA_PROFILE") {
31 Ok(spec) => Filter::from_spec(&spec),
32 Err(_) => Filter::disabled(),
33 });
34}
35
29/// Set profiling filter. It specifies descriptions allowed to profile. 36/// Set profiling filter. It specifies descriptions allowed to profile.
30/// This is helpful when call stack has too many nested profiling scopes. 37/// This is helpful when call stack has too many nested profiling scopes.
31/// Additionally filter can specify maximum depth of profiling scopes nesting. 38/// Additionally filter can specify maximum depth of profiling scopes nesting.
diff --git a/docs/dev/debugging.md b/docs/dev/debugging.md
index 1ccf4dca2..e6b082156 100644
--- a/docs/dev/debugging.md
+++ b/docs/dev/debugging.md
@@ -1,44 +1,66 @@
1# Debugging vs Code plugin and the Language Server 1# Debugging VSCode plugin and the language server
2 2
3**NOTE:** the information here is mostly obsolete 3## Prerequisites
4 4
5Install [LLDB](https://lldb.llvm.org/) and the [LLDB Extension](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb). 5- Install [LLDB](https://lldb.llvm.org/) and the [LLDB Extension](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb).
6- Open the root folder in VSCode. Here you can access the preconfigured debug setups.
6 7
7Checkout rust rust-analyzer and open it in vscode. 8 <img height=150px src="https://user-images.githubusercontent.com/36276403/74611090-92ec5380-5101-11ea-8a41-598f51f3f3e3.png" alt="Debug options view">
9
10- Install all TypeScript dependencies
11 ```bash
12 cd editors/code
13 npm install
14 ```
15
16## Common knowledge
17
18* All debug configurations open a new `[Extension Development Host]` VSCode instance
19where **only** the `rust-analyzer` extension being debugged is enabled.
20* To activate the extension you need to open any Rust project folder in `[Extension Development Host]`.
8 21
9```
10$ git clone https://github.com/rust-analyzer/rust-analyzer.git --depth 1
11$ cd rust-analyzer
12$ code .
13```
14 22
15- To attach to the `lsp server` in linux you'll have to run: 23## Debug TypeScript VSCode extension
16 24
17 `echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope` 25- `Run Extension` - runs the extension with the globally installed `ra_lsp_server` binary.
26- `Run Extension (Dev Server)` - runs extension with the locally built LSP server (`target/debug/ra_lsp_server`).
27
28TypeScript debugging is configured to watch your source edits and recompile.
29To apply changes to an already running debug process press <kbd>Ctrl+Shift+P</kbd> and run the following command in your `[Extension Development Host]`
30
31```
32> Developer: Reload Window
33```
18 34
19 This enables ptrace on non forked processes 35## Debug Rust LSP server
20 36
21- Ensure the dependencies for the extension are installed, run the `npm: install - editors/code` task in vscode. 37- When attaching a debugger to an already running `rust-analyzer` server on Linux you might need to enable `ptrace` for unrelated processes by running:
22 38
23- Launch the `Debug Extension`, this will build the extension and the `lsp server`. 39 ```
40 echo 0 | sudo tee /proc/sys/kernel/yama/ptrace_scope
41 ```
24 42
25- A new instance of vscode with `[Extension Development Host]` in the title.
26 43
27 Don't worry about disabling `rls` all other extensions will be disabled but this one. 44- By default, the LSP server is built without debug information. To enable it, you'll need to change `Cargo.toml`:
45 ```toml
46 [profile.dev]
47 debug = 2
48 ```
28 49
29- In the new vscode instance open a rust project, and navigate to a rust file 50- Select `Run Extension (Dev Server)` to run your locally built `target/debug/ra_lsp_server`.
30 51
31- In the original vscode start an additional debug session (the three periods in the launch) and select `Debug Lsp Server`. 52- In the original VSCode window once again select the `Attach To Server` debug configuration.
32 53
33- A list of running processes should appear select the `ra_lsp_server` from this repo. 54- A list of running processes should appear. Select the `ra_lsp_server` from this repo.
34 55
35- Navigate to `crates/ra_lsp_server/src/main_loop.rs` and add a breakpoint to the `on_task` function. 56- Navigate to `crates/ra_lsp_server/src/main_loop.rs` and add a breakpoint to the `on_task` function.
36 57
37- Go back to the `[Extension Development Host]` instance and hover over a rust variable and your breakpoint should hit. 58- Go back to the `[Extension Development Host]` instance and hover over a Rust variable and your breakpoint should hit.
38 59
39## Demo 60## Demo
40 61
41![demonstration of debugging](https://user-images.githubusercontent.com/1711539/51384036-254fab80-1b2c-11e9-824d-95f9a6e9cf4f.gif) 62- [Debugging TypeScript VScode extension](https://www.youtube.com/watch?v=T-hvpK6s4wM).
63- [Debugging Rust LSP server](https://www.youtube.com/watch?v=EaNb5rg4E0M).
42 64
43## Troubleshooting 65## Troubleshooting
44 66
diff --git a/docs/user/readme.adoc b/docs/user/readme.adoc
index 867aae975..553687e78 100644
--- a/docs/user/readme.adoc
+++ b/docs/user/readme.adoc
@@ -27,8 +27,9 @@ https://github.com/rust-analyzer/rust-analyzer/tree/master/editors/code[in tree]
27 27
28You can install the latest release of the plugin from 28You can install the latest release of the plugin from
29https://marketplace.visualstudio.com/items?itemName=matklad.rust-analyzer[the marketplace]. 29https://marketplace.visualstudio.com/items?itemName=matklad.rust-analyzer[the marketplace].
30By default, the plugin will download the latest version of the server as well. 30By default, the plugin will download the matching version of the server as well.
31 31
32// FIXME: update the image (its text has changed)
32image::https://user-images.githubusercontent.com/36276403/74103174-a40df100-4b52-11ea-81f4-372c70797924.png[] 33image::https://user-images.githubusercontent.com/36276403/74103174-a40df100-4b52-11ea-81f4-372c70797924.png[]
33 34
34The server binary is stored in `~/.config/Code/User/globalStorage/matklad.rust-analyzer`. 35The server binary is stored in `~/.config/Code/User/globalStorage/matklad.rust-analyzer`.
@@ -37,9 +38,7 @@ Note that we only support the latest version of VS Code.
37 38
38==== Updates 39==== Updates
39 40
40The extension will be updated automatically as new versions become available. 41The extension will be updated automatically as new versions become available. It will ask your permission to download the matching language server version binary if needed.
41The server update functionality is in progress.
42For the time being, the workaround is to remove the binary from `globalStorage` and to restart the extension.
43 42
44==== Building From Source 43==== Building From Source
45 44
diff --git a/editors/code/package.json b/editors/code/package.json
index a607c2148..ed1cae2ab 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -6,7 +6,7 @@
6 "private": true, 6 "private": true,
7 "icon": "icon.png", 7 "icon": "icon.png",
8 "//": "The real version is in release.yaml, this one just needs to be bigger", 8 "//": "The real version is in release.yaml, this one just needs to be bigger",
9 "version": "0.2.0-dev", 9 "version": "0.2.20200211-dev",
10 "publisher": "matklad", 10 "publisher": "matklad",
11 "repository": { 11 "repository": {
12 "url": "https://github.com/rust-analyzer/rust-analyzer.git", 12 "url": "https://github.com/rust-analyzer/rust-analyzer.git",
@@ -233,11 +233,10 @@
233 "description": "Trace requests to the ra_lsp_server" 233 "description": "Trace requests to the ra_lsp_server"
234 }, 234 },
235 "rust-analyzer.lruCapacity": { 235 "rust-analyzer.lruCapacity": {
236 "type": [ 236 "type": [ "null", "integer" ],
237 "number",
238 "null"
239 ],
240 "default": null, 237 "default": null,
238 "minimum": 0,
239 "exclusiveMinimum": true,
241 "description": "Number of syntax trees rust-analyzer keeps in memory" 240 "description": "Number of syntax trees rust-analyzer keeps in memory"
242 }, 241 },
243 "rust-analyzer.displayInlayHints": { 242 "rust-analyzer.displayInlayHints": {
@@ -246,9 +245,10 @@
246 "description": "Display additional type and parameter information in the editor" 245 "description": "Display additional type and parameter information in the editor"
247 }, 246 },
248 "rust-analyzer.maxInlayHintLength": { 247 "rust-analyzer.maxInlayHintLength": {
249 "type": "number", 248 "type": [ "null", "integer" ],
250 "default": 20, 249 "default": 20,
251 "exclusiveMinimum": 0, 250 "minimum": 0,
251 "exclusiveMinimum": true,
252 "description": "Maximum length for inlay hints" 252 "description": "Maximum length for inlay hints"
253 }, 253 },
254 "rust-analyzer.cargoFeatures.noDefaultFeatures": { 254 "rust-analyzer.cargoFeatures.noDefaultFeatures": {
diff --git a/editors/code/rollup.config.js b/editors/code/rollup.config.js
index f8d320f46..337385a24 100644
--- a/editors/code/rollup.config.js
+++ b/editors/code/rollup.config.js
@@ -18,6 +18,7 @@ export default {
18 external: [...nodeBuiltins, 'vscode'], 18 external: [...nodeBuiltins, 'vscode'],
19 output: { 19 output: {
20 file: './out/main.js', 20 file: './out/main.js',
21 format: 'cjs' 21 format: 'cjs',
22 exports: 'named'
22 } 23 }
23}; 24};
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index dcf9d0c06..efef820ab 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -2,7 +2,7 @@ import * as lc from 'vscode-languageclient';
2import * as vscode from 'vscode'; 2import * as vscode from 'vscode';
3 3
4import { Config } from './config'; 4import { Config } from './config';
5import { ensureLanguageServerBinary } from './installation/language_server'; 5import { ensureServerBinary } from './installation/server';
6import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed'; 6import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed';
7 7
8export async function createClient(config: Config): Promise<null | lc.LanguageClient> { 8export async function createClient(config: Config): Promise<null | lc.LanguageClient> {
@@ -11,11 +11,11 @@ export async function createClient(config: Config): Promise<null | lc.LanguageCl
11 // It might be a good idea to test if the uri points to a file. 11 // It might be a good idea to test if the uri points to a file.
12 const workspaceFolderPath = vscode.workspace.workspaceFolders?.[0]?.uri.fsPath ?? '.'; 12 const workspaceFolderPath = vscode.workspace.workspaceFolders?.[0]?.uri.fsPath ?? '.';
13 13
14 const langServerPath = await ensureLanguageServerBinary(config.langServerBinarySource); 14 const serverPath = await ensureServerBinary(config.serverSource);
15 if (!langServerPath) return null; 15 if (!serverPath) return null;
16 16
17 const run: lc.Executable = { 17 const run: lc.Executable = {
18 command: langServerPath, 18 command: serverPath,
19 options: { cwd: workspaceFolderPath }, 19 options: { cwd: workspaceFolderPath },
20 }; 20 };
21 const serverOptions: lc.ServerOptions = { 21 const serverOptions: lc.ServerOptions = {
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index 8cd89e119..53e2a414b 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -24,6 +24,19 @@ export class Config {
24 ] 24 ]
25 .map(opt => `${Config.rootSection}.${opt}`); 25 .map(opt => `${Config.rootSection}.${opt}`);
26 26
27 private static readonly extensionVersion: string = (() => {
28 const packageJsonVersion = vscode
29 .extensions
30 .getExtension("matklad.rust-analyzer")!
31 .packageJSON
32 .version as string; // n.n.YYYYMMDD
33
34 const realVersionRegexp = /^\d+\.\d+\.(\d{4})(\d{2})(\d{2})/;
35 const [, yyyy, mm, dd] = packageJsonVersion.match(realVersionRegexp)!;
36
37 return `${yyyy}-${mm}-${dd}`;
38 })();
39
27 private cfg!: vscode.WorkspaceConfiguration; 40 private cfg!: vscode.WorkspaceConfiguration;
28 41
29 constructor(private readonly ctx: vscode.ExtensionContext) { 42 constructor(private readonly ctx: vscode.ExtensionContext) {
@@ -31,7 +44,6 @@ export class Config {
31 this.refreshConfig(); 44 this.refreshConfig();
32 } 45 }
33 46
34
35 private refreshConfig() { 47 private refreshConfig() {
36 this.cfg = vscode.workspace.getConfiguration(Config.rootSection); 48 this.cfg = vscode.workspace.getConfiguration(Config.rootSection);
37 console.log("Using configuration:", this.cfg); 49 console.log("Using configuration:", this.cfg);
@@ -68,7 +80,7 @@ export class Config {
68 * `platform` on GitHub releases. (It is also stored under the same name when 80 * `platform` on GitHub releases. (It is also stored under the same name when
69 * downloaded by the extension). 81 * downloaded by the extension).
70 */ 82 */
71 get prebuiltLangServerFileName(): null | string { 83 get prebuiltServerFileName(): null | string {
72 // See possible `arch` values here: 84 // See possible `arch` values here:
73 // https://nodejs.org/api/process.html#process_process_arch 85 // https://nodejs.org/api/process.html#process_process_arch
74 86
@@ -98,17 +110,17 @@ export class Config {
98 } 110 }
99 } 111 }
100 112
101 get langServerBinarySource(): null | BinarySource { 113 get serverSource(): null | BinarySource {
102 const langServerPath = RA_LSP_DEBUG ?? this.cfg.get<null | string>("raLspServerPath"); 114 const serverPath = RA_LSP_DEBUG ?? this.cfg.get<null | string>("raLspServerPath");
103 115
104 if (langServerPath) { 116 if (serverPath) {
105 return { 117 return {
106 type: BinarySource.Type.ExplicitPath, 118 type: BinarySource.Type.ExplicitPath,
107 path: Config.replaceTildeWithHomeDir(langServerPath) 119 path: Config.replaceTildeWithHomeDir(serverPath)
108 }; 120 };
109 } 121 }
110 122
111 const prebuiltBinaryName = this.prebuiltLangServerFileName; 123 const prebuiltBinaryName = this.prebuiltServerFileName;
112 124
113 if (!prebuiltBinaryName) return null; 125 if (!prebuiltBinaryName) return null;
114 126
@@ -116,6 +128,8 @@ export class Config {
116 type: BinarySource.Type.GithubRelease, 128 type: BinarySource.Type.GithubRelease,
117 dir: this.ctx.globalStoragePath, 129 dir: this.ctx.globalStoragePath,
118 file: prebuiltBinaryName, 130 file: prebuiltBinaryName,
131 storage: this.ctx.globalState,
132 version: Config.extensionVersion,
119 repo: { 133 repo: {
120 name: "rust-analyzer", 134 name: "rust-analyzer",
121 owner: "rust-analyzer", 135 owner: "rust-analyzer",
@@ -153,5 +167,5 @@ export class Config {
153 } 167 }
154 168
155 // for internal use 169 // for internal use
156 get withSysroot() { return this.cfg.get("withSysroot", false); } 170 get withSysroot() { return this.cfg.get("withSysroot", true) as boolean; }
157} 171}
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index 70042a479..ff6245f78 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -60,6 +60,10 @@ export class Ctx {
60 this.pushCleanup(d); 60 this.pushCleanup(d);
61 } 61 }
62 62
63 get globalState(): vscode.Memento {
64 return this.extCtx.globalState;
65 }
66
63 get subscriptions(): Disposable[] { 67 get subscriptions(): Disposable[] {
64 return this.extCtx.subscriptions; 68 return this.extCtx.subscriptions;
65 } 69 }
@@ -87,15 +91,11 @@ export async function sendRequestWithRetry<R>(
87 for (const delay of [2, 4, 6, 8, 10, null]) { 91 for (const delay of [2, 4, 6, 8, 10, null]) {
88 try { 92 try {
89 return await (token ? client.sendRequest(method, param, token) : client.sendRequest(method, param)); 93 return await (token ? client.sendRequest(method, param, token) : client.sendRequest(method, param));
90 } catch (e) { 94 } catch (err) {
91 if ( 95 if (delay === null || err.code !== lc.ErrorCodes.ContentModified) {
92 e.code === lc.ErrorCodes.ContentModified && 96 throw err;
93 delay !== null
94 ) {
95 await sleep(10 * (1 << delay));
96 continue;
97 } 97 }
98 throw e; 98 await sleep(10 * (1 << delay));
99 } 99 }
100 } 100 }
101 throw 'unreachable'; 101 throw 'unreachable';
diff --git a/editors/code/src/inlay_hints.ts b/editors/code/src/inlay_hints.ts
index 1c019a51b..3896878cd 100644
--- a/editors/code/src/inlay_hints.ts
+++ b/editors/code/src/inlay_hints.ts
@@ -13,7 +13,7 @@ export function activateInlayHints(ctx: Ctx) {
13 13
14 vscode.workspace.onDidChangeTextDocument( 14 vscode.workspace.onDidChangeTextDocument(
15 async event => { 15 async event => {
16 if (event.contentChanges.length !== 0) return; 16 if (event.contentChanges.length === 0) return;
17 if (event.document.languageId !== 'rust') return; 17 if (event.document.languageId !== 'rust') return;
18 await hintsUpdater.refresh(); 18 await hintsUpdater.refresh();
19 }, 19 },
@@ -27,7 +27,9 @@ export function activateInlayHints(ctx: Ctx) {
27 ctx.subscriptions 27 ctx.subscriptions
28 ); 28 );
29 29
30 ctx.onDidRestart(_ => hintsUpdater.setEnabled(ctx.config.displayInlayHints)); 30 // We pass async function though it will not be awaited when called,
31 // thus Promise rejections won't be handled, but this should never throw in fact...
32 ctx.onDidRestart(async _ => hintsUpdater.setEnabled(ctx.config.displayInlayHints));
31} 33}
32 34
33interface InlayHintsParams { 35interface InlayHintsParams {
@@ -36,7 +38,7 @@ interface InlayHintsParams {
36 38
37interface InlayHint { 39interface InlayHint {
38 range: vscode.Range; 40 range: vscode.Range;
39 kind: string; 41 kind: "TypeHint" | "ParameterHint";
40 label: string; 42 label: string;
41} 43}
42 44
@@ -53,7 +55,7 @@ const parameterHintDecorationType = vscode.window.createTextEditorDecorationType
53}); 55});
54 56
55class HintsUpdater { 57class HintsUpdater {
56 private pending: Map<string, vscode.CancellationTokenSource> = new Map(); 58 private pending = new Map<string, vscode.CancellationTokenSource>();
57 private ctx: Ctx; 59 private ctx: Ctx;
58 private enabled: boolean; 60 private enabled: boolean;
59 61
@@ -62,30 +64,36 @@ class HintsUpdater {
62 this.enabled = ctx.config.displayInlayHints; 64 this.enabled = ctx.config.displayInlayHints;
63 } 65 }
64 66
65 async setEnabled(enabled: boolean) { 67 async setEnabled(enabled: boolean): Promise<void> {
66 if (this.enabled == enabled) return; 68 if (this.enabled == enabled) return;
67 this.enabled = enabled; 69 this.enabled = enabled;
68 70
69 if (this.enabled) { 71 if (this.enabled) {
70 await this.refresh(); 72 return await this.refresh();
71 } else {
72 this.allEditors.forEach(it => {
73 this.setTypeDecorations(it, []);
74 this.setParameterDecorations(it, []);
75 });
76 } 73 }
74 this.allEditors.forEach(it => {
75 this.setTypeDecorations(it, []);
76 this.setParameterDecorations(it, []);
77 });
77 } 78 }
78 79
79 async refresh() { 80 async refresh() {
80 if (!this.enabled) return; 81 if (!this.enabled) return;
81 const promises = this.allEditors.map(it => this.refreshEditor(it)); 82 await Promise.all(this.allEditors.map(it => this.refreshEditor(it)));
82 await Promise.all(promises); 83 }
84
85 private get allEditors(): vscode.TextEditor[] {
86 return vscode.window.visibleTextEditors.filter(
87 editor => editor.document.languageId === 'rust',
88 );
83 } 89 }
84 90
85 private async refreshEditor(editor: vscode.TextEditor): Promise<void> { 91 private async refreshEditor(editor: vscode.TextEditor): Promise<void> {
86 const newHints = await this.queryHints(editor.document.uri.toString()); 92 const newHints = await this.queryHints(editor.document.uri.toString());
87 if (newHints == null) return; 93 if (newHints == null) return;
88 const newTypeDecorations = newHints.filter(hint => hint.kind === 'TypeHint') 94
95 const newTypeDecorations = newHints
96 .filter(hint => hint.kind === 'TypeHint')
89 .map(hint => ({ 97 .map(hint => ({
90 range: hint.range, 98 range: hint.range,
91 renderOptions: { 99 renderOptions: {
@@ -96,7 +104,8 @@ class HintsUpdater {
96 })); 104 }));
97 this.setTypeDecorations(editor, newTypeDecorations); 105 this.setTypeDecorations(editor, newTypeDecorations);
98 106
99 const newParameterDecorations = newHints.filter(hint => hint.kind === 'ParameterHint') 107 const newParameterDecorations = newHints
108 .filter(hint => hint.kind === 'ParameterHint')
100 .map(hint => ({ 109 .map(hint => ({
101 range: hint.range, 110 range: hint.range,
102 renderOptions: { 111 renderOptions: {
@@ -108,12 +117,6 @@ class HintsUpdater {
108 this.setParameterDecorations(editor, newParameterDecorations); 117 this.setParameterDecorations(editor, newParameterDecorations);
109 } 118 }
110 119
111 private get allEditors(): vscode.TextEditor[] {
112 return vscode.window.visibleTextEditors.filter(
113 editor => editor.document.languageId === 'rust',
114 );
115 }
116
117 private setTypeDecorations( 120 private setTypeDecorations(
118 editor: vscode.TextEditor, 121 editor: vscode.TextEditor,
119 decorations: vscode.DecorationOptions[], 122 decorations: vscode.DecorationOptions[],
@@ -137,12 +140,14 @@ class HintsUpdater {
137 private async queryHints(documentUri: string): Promise<InlayHint[] | null> { 140 private async queryHints(documentUri: string): Promise<InlayHint[] | null> {
138 const client = this.ctx.client; 141 const client = this.ctx.client;
139 if (!client) return null; 142 if (!client) return null;
143
140 const request: InlayHintsParams = { 144 const request: InlayHintsParams = {
141 textDocument: { uri: documentUri }, 145 textDocument: { uri: documentUri },
142 }; 146 };
143 const tokenSource = new vscode.CancellationTokenSource(); 147 const tokenSource = new vscode.CancellationTokenSource();
144 const prev = this.pending.get(documentUri); 148 const prevHintsRequest = this.pending.get(documentUri);
145 if (prev) prev.cancel(); 149 prevHintsRequest?.cancel();
150
146 this.pending.set(documentUri, tokenSource); 151 this.pending.set(documentUri, tokenSource);
147 try { 152 try {
148 return await sendRequestWithRetry<InlayHint[] | null>( 153 return await sendRequestWithRetry<InlayHint[] | null>(
diff --git a/editors/code/src/installation/download_artifact.ts b/editors/code/src/installation/download_artifact.ts
new file mode 100644
index 000000000..de655f8f4
--- /dev/null
+++ b/editors/code/src/installation/download_artifact.ts
@@ -0,0 +1,58 @@
1import * as vscode from "vscode";
2import * as path from "path";
3import { promises as fs } from "fs";
4import { strict as assert } from "assert";
5
6import { ArtifactReleaseInfo } from "./interfaces";
7import { downloadFile } from "./download_file";
8import { throttle } from "throttle-debounce";
9
10/**
11 * Downloads artifact from given `downloadUrl`.
12 * Creates `installationDir` if it is not yet created and put the artifact under
13 * `artifactFileName`.
14 * Displays info about the download progress in an info message printing the name
15 * of the artifact as `displayName`.
16 */
17export async function downloadArtifact(
18 {downloadUrl, releaseName}: ArtifactReleaseInfo,
19 artifactFileName: string,
20 installationDir: string,
21 displayName: string,
22) {
23 await fs.mkdir(installationDir).catch(err => assert.strictEqual(
24 err?.code,
25 "EEXIST",
26 `Couldn't create directory "${installationDir}" to download `+
27 `${artifactFileName} artifact: ${err.message}`
28 ));
29
30 const installationPath = path.join(installationDir, artifactFileName);
31
32 console.time(`Downloading ${artifactFileName}`);
33 await vscode.window.withProgress(
34 {
35 location: vscode.ProgressLocation.Notification,
36 cancellable: false, // FIXME: add support for canceling download?
37 title: `Downloading ${displayName} (${releaseName})`
38 },
39 async (progress, _cancellationToken) => {
40 let lastPrecentage = 0;
41 const filePermissions = 0o755; // (rwx, r_x, r_x)
42 await downloadFile(downloadUrl, installationPath, filePermissions, throttle(
43 200,
44 /* noTrailing: */ true,
45 (readBytes, totalBytes) => {
46 const newPercentage = (readBytes / totalBytes) * 100;
47 progress.report({
48 message: newPercentage.toFixed(0) + "%",
49 increment: newPercentage - lastPrecentage
50 });
51
52 lastPrecentage = newPercentage;
53 })
54 );
55 }
56 );
57 console.timeEnd(`Downloading ${artifactFileName}`);
58}
diff --git a/editors/code/src/installation/fetch_latest_artifact_metadata.ts b/editors/code/src/installation/fetch_artifact_release_info.ts
index 7e3700603..7d497057a 100644
--- a/editors/code/src/installation/fetch_latest_artifact_metadata.ts
+++ b/editors/code/src/installation/fetch_artifact_release_info.ts
@@ -1,26 +1,32 @@
1import fetch from "node-fetch"; 1import fetch from "node-fetch";
2import { GithubRepo, ArtifactMetadata } from "./interfaces"; 2import { GithubRepo, ArtifactReleaseInfo } from "./interfaces";
3 3
4const GITHUB_API_ENDPOINT_URL = "https://api.github.com"; 4const GITHUB_API_ENDPOINT_URL = "https://api.github.com";
5 5
6
6/** 7/**
7 * Fetches the latest release from GitHub `repo` and returns metadata about 8 * Fetches the release with `releaseTag` (or just latest release when not specified)
8 * `artifactFileName` shipped with this release or `null` if no such artifact was published. 9 * from GitHub `repo` and returns metadata about `artifactFileName` shipped with
10 * this release or `null` if no such artifact was published.
9 */ 11 */
10export async function fetchLatestArtifactMetadata( 12export async function fetchArtifactReleaseInfo(
11 repo: GithubRepo, artifactFileName: string 13 repo: GithubRepo, artifactFileName: string, releaseTag?: string
12): Promise<null | ArtifactMetadata> { 14): Promise<null | ArtifactReleaseInfo> {
13 15
14 const repoOwner = encodeURIComponent(repo.owner); 16 const repoOwner = encodeURIComponent(repo.owner);
15 const repoName = encodeURIComponent(repo.name); 17 const repoName = encodeURIComponent(repo.name);
16 18
17 const apiEndpointPath = `/repos/${repoOwner}/${repoName}/releases/latest`; 19 const apiEndpointPath = releaseTag
20 ? `/repos/${repoOwner}/${repoName}/releases/tags/${releaseTag}`
21 : `/repos/${repoOwner}/${repoName}/releases/latest`;
22
18 const requestUrl = GITHUB_API_ENDPOINT_URL + apiEndpointPath; 23 const requestUrl = GITHUB_API_ENDPOINT_URL + apiEndpointPath;
19 24
20 // We skip runtime type checks for simplicity (here we cast from `any` to `GithubRelease`) 25 // We skip runtime type checks for simplicity (here we cast from `any` to `GithubRelease`)
21 26
22 console.log("Issuing request for released artifacts metadata to", requestUrl); 27 console.log("Issuing request for released artifacts metadata to", requestUrl);
23 28
29 // FIXME: handle non-ok response
24 const response: GithubRelease = await fetch(requestUrl, { 30 const response: GithubRelease = await fetch(requestUrl, {
25 headers: { Accept: "application/vnd.github.v3+json" } 31 headers: { Accept: "application/vnd.github.v3+json" }
26 }) 32 })
diff --git a/editors/code/src/installation/interfaces.ts b/editors/code/src/installation/interfaces.ts
index 8039d0b90..e40839e4b 100644
--- a/editors/code/src/installation/interfaces.ts
+++ b/editors/code/src/installation/interfaces.ts
@@ -1,3 +1,5 @@
1import * as vscode from "vscode";
2
1export interface GithubRepo { 3export interface GithubRepo {
2 name: string; 4 name: string;
3 owner: string; 5 owner: string;
@@ -6,7 +8,7 @@ export interface GithubRepo {
6/** 8/**
7 * Metadata about particular artifact retrieved from GitHub releases. 9 * Metadata about particular artifact retrieved from GitHub releases.
8 */ 10 */
9export interface ArtifactMetadata { 11export interface ArtifactReleaseInfo {
10 releaseName: string; 12 releaseName: string;
11 downloadUrl: string; 13 downloadUrl: string;
12} 14}
@@ -50,6 +52,17 @@ export namespace BinarySource {
50 * and in local `.dir`. 52 * and in local `.dir`.
51 */ 53 */
52 file: string; 54 file: string;
55
56 /**
57 * Tag of github release that denotes a version required by this extension.
58 */
59 version: string;
60
61 /**
62 * Object that provides `get()/update()` operations to store metadata
63 * about the actual binary, e.g. its actual version.
64 */
65 storage: vscode.Memento;
53 } 66 }
54 67
55} 68}
diff --git a/editors/code/src/installation/language_server.ts b/editors/code/src/installation/language_server.ts
deleted file mode 100644
index 4797c3f01..000000000
--- a/editors/code/src/installation/language_server.ts
+++ /dev/null
@@ -1,148 +0,0 @@
1import * as vscode from "vscode";
2import * as path from "path";
3import { strict as assert } from "assert";
4import { promises as fs } from "fs";
5import { promises as dns } from "dns";
6import { spawnSync } from "child_process";
7import { throttle } from "throttle-debounce";
8
9import { BinarySource } from "./interfaces";
10import { fetchLatestArtifactMetadata } from "./fetch_latest_artifact_metadata";
11import { downloadFile } from "./download_file";
12
13export async function downloadLatestLanguageServer(
14 {file: artifactFileName, dir: installationDir, repo}: BinarySource.GithubRelease
15) {
16 const { releaseName, downloadUrl } = (await fetchLatestArtifactMetadata(
17 repo, artifactFileName
18 ))!;
19
20 await fs.mkdir(installationDir).catch(err => assert.strictEqual(
21 err?.code,
22 "EEXIST",
23 `Couldn't create directory "${installationDir}" to download `+
24 `language server binary: ${err.message}`
25 ));
26
27 const installationPath = path.join(installationDir, artifactFileName);
28
29 console.time("Downloading ra_lsp_server");
30 await vscode.window.withProgress(
31 {
32 location: vscode.ProgressLocation.Notification,
33 cancellable: false, // FIXME: add support for canceling download?
34 title: `Downloading language server (${releaseName})`
35 },
36 async (progress, _cancellationToken) => {
37 let lastPrecentage = 0;
38 const filePermissions = 0o755; // (rwx, r_x, r_x)
39 await downloadFile(downloadUrl, installationPath, filePermissions, throttle(
40 200,
41 /* noTrailing: */ true,
42 (readBytes, totalBytes) => {
43 const newPercentage = (readBytes / totalBytes) * 100;
44 progress.report({
45 message: newPercentage.toFixed(0) + "%",
46 increment: newPercentage - lastPrecentage
47 });
48
49 lastPrecentage = newPercentage;
50 })
51 );
52 }
53 );
54 console.timeEnd("Downloading ra_lsp_server");
55}
56export async function ensureLanguageServerBinary(
57 langServerSource: null | BinarySource
58): Promise<null | string> {
59
60 if (!langServerSource) {
61 vscode.window.showErrorMessage(
62 "Unfortunately we don't ship binaries for your platform yet. " +
63 "You need to manually clone rust-analyzer repository and " +
64 "run `cargo xtask install --server` to build the language server from sources. " +
65 "If you feel that your platform should be supported, please create an issue " +
66 "about that [here](https://github.com/rust-analyzer/rust-analyzer/issues) and we " +
67 "will consider it."
68 );
69 return null;
70 }
71
72 switch (langServerSource.type) {
73 case BinarySource.Type.ExplicitPath: {
74 if (isBinaryAvailable(langServerSource.path)) {
75 return langServerSource.path;
76 }
77
78 vscode.window.showErrorMessage(
79 `Unable to run ${langServerSource.path} binary. ` +
80 `To use the pre-built language server, set "rust-analyzer.raLspServerPath" ` +
81 "value to `null` or remove it from the settings to use it by default."
82 );
83 return null;
84 }
85 case BinarySource.Type.GithubRelease: {
86 const prebuiltBinaryPath = path.join(langServerSource.dir, langServerSource.file);
87
88 if (isBinaryAvailable(prebuiltBinaryPath)) {
89 return prebuiltBinaryPath;
90 }
91
92 const userResponse = await vscode.window.showInformationMessage(
93 "Language server binary for rust-analyzer was not found. " +
94 "Do you want to download it now?",
95 "Download now", "Cancel"
96 );
97 if (userResponse !== "Download now") return null;
98
99 try {
100 await downloadLatestLanguageServer(langServerSource);
101 } catch (err) {
102 vscode.window.showErrorMessage(
103 `Failed to download language server from ${langServerSource.repo.name} ` +
104 `GitHub repository: ${err.message}`
105 );
106
107 console.error(err);
108
109 dns.resolve('example.com').then(
110 addrs => console.log("DNS resolution for example.com was successful", addrs),
111 err => {
112 console.error(
113 "DNS resolution for example.com failed, " +
114 "there might be an issue with Internet availability"
115 );
116 console.error(err);
117 }
118 );
119
120 return null;
121 }
122
123 if (!isBinaryAvailable(prebuiltBinaryPath)) assert(false,
124 `Downloaded language server binary is not functional.` +
125 `Downloaded from: ${JSON.stringify(langServerSource)}`
126 );
127
128
129 vscode.window.showInformationMessage(
130 "Rust analyzer language server was successfully installed 🦀"
131 );
132
133 return prebuiltBinaryPath;
134 }
135 }
136
137 function isBinaryAvailable(binaryPath: string) {
138 const res = spawnSync(binaryPath, ["--version"]);
139
140 // ACHTUNG! `res` type declaration is inherently wrong, see
141 // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/42221
142
143 console.log("Checked binary availablity via --version", res);
144 console.log(binaryPath, "--version output:", res.output?.map(String));
145
146 return res.status === 0;
147 }
148}
diff --git a/editors/code/src/installation/server.ts b/editors/code/src/installation/server.ts
new file mode 100644
index 000000000..80cb719e3
--- /dev/null
+++ b/editors/code/src/installation/server.ts
@@ -0,0 +1,124 @@
1import * as vscode from "vscode";
2import * as path from "path";
3import { strict as assert } from "assert";
4import { promises as dns } from "dns";
5import { spawnSync } from "child_process";
6
7import { BinarySource } from "./interfaces";
8import { fetchArtifactReleaseInfo } from "./fetch_artifact_release_info";
9import { downloadArtifact } from "./download_artifact";
10
11export async function ensureServerBinary(source: null | BinarySource): Promise<null | string> {
12 if (!source) {
13 vscode.window.showErrorMessage(
14 "Unfortunately we don't ship binaries for your platform yet. " +
15 "You need to manually clone rust-analyzer repository and " +
16 "run `cargo xtask install --server` to build the language server from sources. " +
17 "If you feel that your platform should be supported, please create an issue " +
18 "about that [here](https://github.com/rust-analyzer/rust-analyzer/issues) and we " +
19 "will consider it."
20 );
21 return null;
22 }
23
24 switch (source.type) {
25 case BinarySource.Type.ExplicitPath: {
26 if (isBinaryAvailable(source.path)) {
27 return source.path;
28 }
29
30 vscode.window.showErrorMessage(
31 `Unable to run ${source.path} binary. ` +
32 `To use the pre-built language server, set "rust-analyzer.raLspServerPath" ` +
33 "value to `null` or remove it from the settings to use it by default."
34 );
35 return null;
36 }
37 case BinarySource.Type.GithubRelease: {
38 const prebuiltBinaryPath = path.join(source.dir, source.file);
39
40 const installedVersion: null | string = getServerVersion(source.storage);
41 const requiredVersion: string = source.version;
42
43 console.log("Installed version:", installedVersion, "required:", requiredVersion);
44
45 if (isBinaryAvailable(prebuiltBinaryPath) && installedVersion == requiredVersion) {
46 // FIXME: check for new releases and notify the user to update if possible
47 return prebuiltBinaryPath;
48 }
49
50 const userResponse = await vscode.window.showInformationMessage(
51 `Language server version ${source.version} for rust-analyzer is not installed. ` +
52 "Do you want to download it now?",
53 "Download now", "Cancel"
54 );
55 if (userResponse !== "Download now") return null;
56
57 if (!await downloadServer(source)) return null;
58
59 return prebuiltBinaryPath;
60 }
61 }
62}
63
64async function downloadServer(source: BinarySource.GithubRelease): Promise<boolean> {
65 try {
66 const releaseInfo = (await fetchArtifactReleaseInfo(source.repo, source.file, source.version))!;
67
68 await downloadArtifact(releaseInfo, source.file, source.dir, "language server");
69 await setServerVersion(source.storage, releaseInfo.releaseName);
70 } catch (err) {
71 vscode.window.showErrorMessage(
72 `Failed to download language server from ${source.repo.name} ` +
73 `GitHub repository: ${err.message}`
74 );
75
76 console.error(err);
77
78 dns.resolve('example.com').then(
79 addrs => console.log("DNS resolution for example.com was successful", addrs),
80 err => {
81 console.error(
82 "DNS resolution for example.com failed, " +
83 "there might be an issue with Internet availability"
84 );
85 console.error(err);
86 }
87 );
88 return false;
89 }
90
91 if (!isBinaryAvailable(path.join(source.dir, source.file))) assert(false,
92 `Downloaded language server binary is not functional.` +
93 `Downloaded from: ${JSON.stringify(source, null, 4)}`
94 );
95
96 vscode.window.showInformationMessage(
97 "Rust analyzer language server was successfully installed 🦀"
98 );
99
100 return true;
101}
102
103function isBinaryAvailable(binaryPath: string): boolean {
104 const res = spawnSync(binaryPath, ["--version"]);
105
106 // ACHTUNG! `res` type declaration is inherently wrong, see
107 // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/42221
108
109 console.log("Checked binary availablity via --version", res);
110 console.log(binaryPath, "--version output:", res.output?.map(String));
111
112 return res.status === 0;
113}
114
115function getServerVersion(storage: vscode.Memento): null | string {
116 const version = storage.get<null | string>("server-version", null);
117 console.log("Get server-version:", version);
118 return version;
119}
120
121async function setServerVersion(storage: vscode.Memento, version: string): Promise<void> {
122 console.log("Set server-version:", version);
123 await storage.update("server-version", version.toString());
124}