aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.github/workflows/release.yaml2
-rw-r--r--Cargo.lock3
-rw-r--r--Cargo.toml5
-rw-r--r--crates/ra_cli/Cargo.toml2
-rw-r--r--crates/ra_cli/src/analysis_bench.rs2
-rw-r--r--crates/ra_cli/src/analysis_stats.rs83
-rw-r--r--crates/ra_cli/src/main.rs22
-rw-r--r--crates/ra_hir_def/src/nameres/collector.rs2
-rw-r--r--crates/ra_hir_def/src/resolver.rs6
-rw-r--r--crates/ra_hir_ty/src/display.rs373
-rw-r--r--crates/ra_hir_ty/src/infer/unify.rs2
-rw-r--r--crates/ra_hir_ty/src/lib.rs383
-rw-r--r--crates/ra_hir_ty/src/lower.rs40
-rw-r--r--crates/ra_hir_ty/src/marks.rs2
-rw-r--r--crates/ra_hir_ty/src/method_resolution.rs9
-rw-r--r--crates/ra_hir_ty/src/tests/coercion.rs22
-rw-r--r--crates/ra_hir_ty/src/tests/method_resolution.rs32
-rw-r--r--crates/ra_hir_ty/src/tests/traits.rs48
-rw-r--r--crates/ra_hir_ty/src/traits.rs3
-rw-r--r--crates/ra_hir_ty/src/traits/chalk.rs4
-rw-r--r--crates/ra_ide/src/snapshots/highlighting.html1
-rw-r--r--crates/ra_ide/src/snapshots/rainbow_highlighting.html1
-rw-r--r--crates/ra_ide/src/syntax_highlighting.rs1
-rw-r--r--crates/ra_ide_db/Cargo.toml1
-rw-r--r--crates/ra_lsp_server/src/main.rs7
-rw-r--r--crates/ra_prof/src/lib.rs7
-rw-r--r--crates/ra_project_model/src/lib.rs6
-rw-r--r--docs/user/README.md280
-rw-r--r--docs/user/readme.adoc154
-rw-r--r--editors/code/package-lock.json20
-rw-r--r--editors/code/package.json29
-rw-r--r--editors/code/rollup.config.js3
-rw-r--r--editors/code/src/client.ts31
-rw-r--r--editors/code/src/config.ts294
-rw-r--r--editors/code/src/ctx.ts4
-rw-r--r--editors/code/src/installation/download_artifact.ts58
-rw-r--r--editors/code/src/installation/fetch_artifact_release_info.ts (renamed from editors/code/src/installation/fetch_latest_artifact_metadata.ts)20
-rw-r--r--editors/code/src/installation/interfaces.ts15
-rw-r--r--editors/code/src/installation/language_server.ts148
-rw-r--r--editors/code/src/installation/server.ts124
-rw-r--r--editors/code/src/status_display.ts4
-rw-r--r--xtask/src/cmd.rs56
-rw-r--r--xtask/src/install.rs118
-rw-r--r--xtask/src/lib.rs77
-rw-r--r--xtask/src/main.rs3
-rw-r--r--xtask/src/not_bash.rs165
-rw-r--r--xtask/src/pre_commit.rs6
47 files changed, 1415 insertions, 1263 deletions
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
index ff7a95ee1..eae4fbcb5 100644
--- a/.github/workflows/release.yaml
+++ b/.github/workflows/release.yaml
@@ -190,4 +190,4 @@ jobs:
190 - name: Publish Extension 190 - name: Publish Extension
191 working-directory: ./editors/code 191 working-directory: ./editors/code
192 # token from https://dev.azure.com/rust-analyzer/ 192 # token from https://dev.azure.com/rust-analyzer/
193 run: ./node_modules/vsce/out/vsce publish 0.1.$(date +%Y%m%d) --pat ${{ secrets.MARKETPLACE_TOKEN }} 193 run: npx vsce publish 0.1.$(date +%Y%m%d) --pat ${{ secrets.MARKETPLACE_TOKEN }}
diff --git a/Cargo.lock b/Cargo.lock
index f1651edaa..f44e514dd 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -1015,6 +1015,7 @@ name = "ra_cli"
1015version = "0.1.0" 1015version = "0.1.0"
1016dependencies = [ 1016dependencies = [
1017 "env_logger", 1017 "env_logger",
1018 "itertools",
1018 "pico-args", 1019 "pico-args",
1019 "ra_batch", 1020 "ra_batch",
1020 "ra_db", 1021 "ra_db",
@@ -1024,6 +1025,7 @@ dependencies = [
1024 "ra_ide", 1025 "ra_ide",
1025 "ra_prof", 1026 "ra_prof",
1026 "ra_syntax", 1027 "ra_syntax",
1028 "rand 0.7.3",
1027] 1029]
1028 1030
1029[[package]] 1031[[package]]
@@ -1174,7 +1176,6 @@ dependencies = [
1174 "ra_prof", 1176 "ra_prof",
1175 "ra_syntax", 1177 "ra_syntax",
1176 "ra_text_edit", 1178 "ra_text_edit",
1177 "rand 0.7.3",
1178 "rayon", 1179 "rayon",
1179 "rustc-hash", 1180 "rustc-hash",
1180 "superslice", 1181 "superslice",
diff --git a/Cargo.toml b/Cargo.toml
index e5620b1b7..c034e2424 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -31,3 +31,8 @@ opt-level = 0
31 31
32[patch.'crates-io'] 32[patch.'crates-io']
33# rowan = { path = "../rowan" } 33# rowan = { path = "../rowan" }
34
35[patch.'https://github.com/rust-lang/chalk.git']
36# chalk-solve = { path = "../chalk/chalk-solve" }
37# chalk-rust-ir = { path = "../chalk/chalk-rust-ir" }
38# chalk-ir = { path = "../chalk/chalk-ir" }
diff --git a/crates/ra_cli/Cargo.toml b/crates/ra_cli/Cargo.toml
index bcd408421..53d4876f6 100644
--- a/crates/ra_cli/Cargo.toml
+++ b/crates/ra_cli/Cargo.toml
@@ -6,8 +6,10 @@ authors = ["rust-analyzer developers"]
6publish = false 6publish = false
7 7
8[dependencies] 8[dependencies]
9itertools = "0.8.0"
9pico-args = "0.3.0" 10pico-args = "0.3.0"
10env_logger = { version = "0.7.1", default-features = false } 11env_logger = { version = "0.7.1", default-features = false }
12rand = { version = "0.7.0", features = ["small_rng"] }
11 13
12ra_syntax = { path = "../ra_syntax" } 14ra_syntax = { path = "../ra_syntax" }
13ra_ide = { path = "../ra_ide" } 15ra_ide = { path = "../ra_ide" }
diff --git a/crates/ra_cli/src/analysis_bench.rs b/crates/ra_cli/src/analysis_bench.rs
index 5485a38ff..764df6b9e 100644
--- a/crates/ra_cli/src/analysis_bench.rs
+++ b/crates/ra_cli/src/analysis_bench.rs
@@ -20,6 +20,8 @@ pub(crate) enum Op {
20} 20}
21 21
22pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> { 22pub(crate) fn run(verbose: bool, path: &Path, op: Op) -> Result<()> {
23 ra_prof::init();
24
23 let start = Instant::now(); 25 let start = Instant::now();
24 eprint!("loading: "); 26 eprint!("loading: ");
25 let (mut host, roots) = ra_batch::load_cargo(path)?; 27 let (mut host, roots) = ra_batch::load_cargo(path)?;
diff --git a/crates/ra_cli/src/analysis_stats.rs b/crates/ra_cli/src/analysis_stats.rs
index 833235bff..6d2dd34c6 100644
--- a/crates/ra_cli/src/analysis_stats.rs
+++ b/crates/ra_cli/src/analysis_stats.rs
@@ -2,6 +2,9 @@
2 2
3use std::{collections::HashSet, fmt::Write, path::Path, time::Instant}; 3use std::{collections::HashSet, fmt::Write, path::Path, time::Instant};
4 4
5use itertools::Itertools;
6use rand::{seq::SliceRandom, thread_rng};
7
5use hir::{ 8use hir::{
6 db::{DefDatabase, HirDatabase}, 9 db::{DefDatabase, HirDatabase},
7 AssocItem, Crate, HasSource, HirDisplay, ModuleDef, 10 AssocItem, Crate, HasSource, HirDisplay, ModuleDef,
@@ -19,6 +22,7 @@ pub fn run(
19 path: &Path, 22 path: &Path,
20 only: Option<&str>, 23 only: Option<&str>,
21 with_deps: bool, 24 with_deps: bool,
25 randomize: bool,
22) -> Result<()> { 26) -> Result<()> {
23 let db_load_time = Instant::now(); 27 let db_load_time = Instant::now();
24 let (mut host, roots) = ra_batch::load_cargo(path)?; 28 let (mut host, roots) = ra_batch::load_cargo(path)?;
@@ -41,7 +45,11 @@ pub fn run(
41 }) 45 })
42 .collect::<HashSet<_>>(); 46 .collect::<HashSet<_>>();
43 47
44 for krate in Crate::all(db) { 48 let mut krates = Crate::all(db);
49 if randomize {
50 krates.shuffle(&mut thread_rng());
51 }
52 for krate in krates {
45 let module = krate.root_module(db).expect("crate without root module"); 53 let module = krate.root_module(db).expect("crate without root module");
46 let file_id = module.definition_source(db).file_id; 54 let file_id = module.definition_source(db).file_id;
47 if members.contains(&db.file_source_root(file_id.original_file(db))) { 55 if members.contains(&db.file_source_root(file_id.original_file(db))) {
@@ -50,6 +58,10 @@ pub fn run(
50 } 58 }
51 } 59 }
52 60
61 if randomize {
62 visit_queue.shuffle(&mut thread_rng());
63 }
64
53 println!("Crates in this dir: {}", num_crates); 65 println!("Crates in this dir: {}", num_crates);
54 let mut num_decls = 0; 66 let mut num_decls = 0;
55 let mut funcs = Vec::new(); 67 let mut funcs = Vec::new();
@@ -79,10 +91,14 @@ pub fn run(
79 println!("Total functions: {}", funcs.len()); 91 println!("Total functions: {}", funcs.len());
80 println!("Item Collection: {:?}, {}", analysis_time.elapsed(), ra_prof::memory_usage()); 92 println!("Item Collection: {:?}, {}", analysis_time.elapsed(), ra_prof::memory_usage());
81 93
94 if randomize {
95 funcs.shuffle(&mut thread_rng());
96 }
97
82 let inference_time = Instant::now(); 98 let inference_time = Instant::now();
83 let mut bar = match verbosity { 99 let mut bar = match verbosity {
84 Verbosity::Verbose | Verbosity::Normal => ProgressReport::new(funcs.len() as u64), 100 Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(),
85 Verbosity::Quiet => ProgressReport::hidden(), 101 _ => ProgressReport::new(funcs.len() as u64),
86 }; 102 };
87 103
88 bar.tick(); 104 bar.tick();
@@ -92,7 +108,20 @@ pub fn run(
92 let mut num_type_mismatches = 0; 108 let mut num_type_mismatches = 0;
93 for f in funcs { 109 for f in funcs {
94 let name = f.name(db); 110 let name = f.name(db);
95 let mut msg = format!("processing: {}", name); 111 let full_name = f
112 .module(db)
113 .path_to_root(db)
114 .into_iter()
115 .rev()
116 .filter_map(|it| it.name(db))
117 .chain(Some(f.name(db)))
118 .join("::");
119 if let Some(only_name) = only {
120 if name.to_string() != only_name && full_name != only_name {
121 continue;
122 }
123 }
124 let mut msg = format!("processing: {}", full_name);
96 if verbosity.is_verbose() { 125 if verbosity.is_verbose() {
97 let src = f.source(db); 126 let src = f.source(db);
98 let original_file = src.file_id.original_file(db); 127 let original_file = src.file_id.original_file(db);
@@ -100,15 +129,15 @@ pub fn run(
100 let syntax_range = src.value.syntax().text_range(); 129 let syntax_range = src.value.syntax().text_range();
101 write!(msg, " ({:?} {})", path, syntax_range).unwrap(); 130 write!(msg, " ({:?} {})", path, syntax_range).unwrap();
102 } 131 }
103 bar.set_message(&msg); 132 if verbosity.is_spammy() {
104 if let Some(only_name) = only { 133 bar.println(format!("{}", msg));
105 if name.to_string() != only_name {
106 continue;
107 }
108 } 134 }
135 bar.set_message(&msg);
109 let f_id = FunctionId::from(f); 136 let f_id = FunctionId::from(f);
110 let body = db.body(f_id.into()); 137 let body = db.body(f_id.into());
111 let inference_result = db.infer(f_id.into()); 138 let inference_result = db.infer(f_id.into());
139 let (previous_exprs, previous_unknown, previous_partially_unknown) =
140 (num_exprs, num_exprs_unknown, num_exprs_partially_unknown);
112 for (expr_id, _) in body.exprs.iter() { 141 for (expr_id, _) in body.exprs.iter() {
113 let ty = &inference_result[expr_id]; 142 let ty = &inference_result[expr_id];
114 num_exprs += 1; 143 num_exprs += 1;
@@ -125,6 +154,33 @@ pub fn run(
125 num_exprs_partially_unknown += 1; 154 num_exprs_partially_unknown += 1;
126 } 155 }
127 } 156 }
157 if only.is_some() && verbosity.is_spammy() {
158 // in super-verbose mode for just one function, we print every single expression
159 let (_, sm) = db.body_with_source_map(f_id.into());
160 let src = sm.expr_syntax(expr_id);
161 if let Some(src) = src {
162 let original_file = src.file_id.original_file(db);
163 let line_index = host.analysis().file_line_index(original_file).unwrap();
164 let text_range = src.value.either(
165 |it| it.syntax_node_ptr().range(),
166 |it| it.syntax_node_ptr().range(),
167 );
168 let (start, end) = (
169 line_index.line_col(text_range.start()),
170 line_index.line_col(text_range.end()),
171 );
172 bar.println(format!(
173 "{}:{}-{}:{}: {}",
174 start.line + 1,
175 start.col_utf16,
176 end.line + 1,
177 end.col_utf16,
178 ty.display(db)
179 ));
180 } else {
181 bar.println(format!("unknown location: {}", ty.display(db)));
182 }
183 }
128 if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) { 184 if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) {
129 num_type_mismatches += 1; 185 num_type_mismatches += 1;
130 if verbosity.is_verbose() { 186 if verbosity.is_verbose() {
@@ -164,6 +220,15 @@ pub fn run(
164 } 220 }
165 } 221 }
166 } 222 }
223 if verbosity.is_spammy() {
224 bar.println(format!(
225 "In {}: {} exprs, {} unknown, {} partial",
226 full_name,
227 num_exprs - previous_exprs,
228 num_exprs_unknown - previous_unknown,
229 num_exprs_partially_unknown - previous_partially_unknown
230 ));
231 }
167 bar.inc(1); 232 bar.inc(1);
168 } 233 }
169 bar.finish_and_clear(); 234 bar.finish_and_clear();
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs
index 806612c2c..6a0e447b9 100644
--- a/crates/ra_cli/src/main.rs
+++ b/crates/ra_cli/src/main.rs
@@ -16,6 +16,7 @@ type Result<T> = std::result::Result<T, Box<dyn Error + Send + Sync>>;
16 16
17#[derive(Clone, Copy)] 17#[derive(Clone, Copy)]
18pub enum Verbosity { 18pub enum Verbosity {
19 Spammy,
19 Verbose, 20 Verbose,
20 Normal, 21 Normal,
21 Quiet, 22 Quiet,
@@ -24,7 +25,13 @@ pub enum Verbosity {
24impl Verbosity { 25impl Verbosity {
25 fn is_verbose(self) -> bool { 26 fn is_verbose(self) -> bool {
26 match self { 27 match self {
27 Verbosity::Verbose => true, 28 Verbosity::Verbose | Verbosity::Spammy => true,
29 _ => false,
30 }
31 }
32 fn is_spammy(self) -> bool {
33 match self {
34 Verbosity::Spammy => true,
28 _ => false, 35 _ => false,
29 } 36 }
30 } 37 }
@@ -86,14 +93,18 @@ fn main() -> Result<()> {
86 return Ok(()); 93 return Ok(());
87 } 94 }
88 let verbosity = match ( 95 let verbosity = match (
96 matches.contains(["-vv", "--spammy"]),
89 matches.contains(["-v", "--verbose"]), 97 matches.contains(["-v", "--verbose"]),
90 matches.contains(["-q", "--quiet"]), 98 matches.contains(["-q", "--quiet"]),
91 ) { 99 ) {
92 (false, false) => Verbosity::Normal, 100 (true, _, true) => Err("Invalid flags: -q conflicts with -vv")?,
93 (false, true) => Verbosity::Quiet, 101 (true, _, false) => Verbosity::Spammy,
94 (true, false) => Verbosity::Verbose, 102 (false, false, false) => Verbosity::Normal,
95 (true, true) => Err("Invalid flags: -q conflicts with -v")?, 103 (false, false, true) => Verbosity::Quiet,
104 (false, true, false) => Verbosity::Verbose,
105 (false, true, true) => Err("Invalid flags: -q conflicts with -v")?,
96 }; 106 };
107 let randomize = matches.contains("--randomize");
97 let memory_usage = matches.contains("--memory-usage"); 108 let memory_usage = matches.contains("--memory-usage");
98 let only: Option<String> = matches.opt_value_from_str(["-o", "--only"])?; 109 let only: Option<String> = matches.opt_value_from_str(["-o", "--only"])?;
99 let with_deps: bool = matches.contains("--with-deps"); 110 let with_deps: bool = matches.contains("--with-deps");
@@ -111,6 +122,7 @@ fn main() -> Result<()> {
111 path.as_ref(), 122 path.as_ref(),
112 only.as_ref().map(String::as_ref), 123 only.as_ref().map(String::as_ref),
113 with_deps, 124 with_deps,
125 randomize,
114 )?; 126 )?;
115 } 127 }
116 "analysis-bench" => { 128 "analysis-bench" => {
diff --git a/crates/ra_hir_def/src/nameres/collector.rs b/crates/ra_hir_def/src/nameres/collector.rs
index 6352c71ef..b1f3f525d 100644
--- a/crates/ra_hir_def/src/nameres/collector.rs
+++ b/crates/ra_hir_def/src/nameres/collector.rs
@@ -146,7 +146,7 @@ where
146 ReachedFixedPoint::Yes => break, 146 ReachedFixedPoint::Yes => break,
147 ReachedFixedPoint::No => i += 1, 147 ReachedFixedPoint::No => i += 1,
148 } 148 }
149 if i == 1000 { 149 if i == 10000 {
150 log::error!("name resolution is stuck"); 150 log::error!("name resolution is stuck");
151 break; 151 break;
152 } 152 }
diff --git a/crates/ra_hir_def/src/resolver.rs b/crates/ra_hir_def/src/resolver.rs
index 05cf4646a..e2b228e80 100644
--- a/crates/ra_hir_def/src/resolver.rs
+++ b/crates/ra_hir_def/src/resolver.rs
@@ -542,11 +542,7 @@ impl Resolver {
542 542
543 fn push_generic_params_scope(self, db: &impl DefDatabase, def: GenericDefId) -> Resolver { 543 fn push_generic_params_scope(self, db: &impl DefDatabase, def: GenericDefId) -> Resolver {
544 let params = db.generic_params(def); 544 let params = db.generic_params(def);
545 if params.types.is_empty() { 545 self.push_scope(Scope::GenericParams { def, params })
546 self
547 } else {
548 self.push_scope(Scope::GenericParams { def, params })
549 }
550 } 546 }
551 547
552 fn push_impl_block_scope(self, impl_block: ImplId) -> Resolver { 548 fn push_impl_block_scope(self, impl_block: ImplId) -> Resolver {
diff --git a/crates/ra_hir_ty/src/display.rs b/crates/ra_hir_ty/src/display.rs
index d1ff85f0f..14e089cf4 100644
--- a/crates/ra_hir_ty/src/display.rs
+++ b/crates/ra_hir_ty/src/display.rs
@@ -2,7 +2,12 @@
2 2
3use std::fmt; 3use std::fmt;
4 4
5use crate::db::HirDatabase; 5use crate::{
6 db::HirDatabase, utils::generics, ApplicationTy, CallableDef, FnSig, GenericPredicate,
7 Obligation, ProjectionTy, Substs, TraitRef, Ty, TypeCtor,
8};
9use hir_def::{generics::TypeParamProvenance, AdtId, AssocContainerId, Lookup};
10use hir_expand::name::Name;
6 11
7pub struct HirFormatter<'a, 'b, DB> { 12pub struct HirFormatter<'a, 'b, DB> {
8 pub db: &'a DB, 13 pub db: &'a DB,
@@ -97,3 +102,369 @@ where
97 }) 102 })
98 } 103 }
99} 104}
105
106const TYPE_HINT_TRUNCATION: &str = "…";
107
108impl HirDisplay for &Ty {
109 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
110 HirDisplay::hir_fmt(*self, f)
111 }
112}
113
114impl HirDisplay for ApplicationTy {
115 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
116 if f.should_truncate() {
117 return write!(f, "{}", TYPE_HINT_TRUNCATION);
118 }
119
120 match self.ctor {
121 TypeCtor::Bool => write!(f, "bool")?,
122 TypeCtor::Char => write!(f, "char")?,
123 TypeCtor::Int(t) => write!(f, "{}", t)?,
124 TypeCtor::Float(t) => write!(f, "{}", t)?,
125 TypeCtor::Str => write!(f, "str")?,
126 TypeCtor::Slice => {
127 let t = self.parameters.as_single();
128 write!(f, "[{}]", t.display(f.db))?;
129 }
130 TypeCtor::Array => {
131 let t = self.parameters.as_single();
132 write!(f, "[{}; _]", t.display(f.db))?;
133 }
134 TypeCtor::RawPtr(m) => {
135 let t = self.parameters.as_single();
136 write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?;
137 }
138 TypeCtor::Ref(m) => {
139 let t = self.parameters.as_single();
140 let ty_display = if f.omit_verbose_types() {
141 t.display_truncated(f.db, f.max_size)
142 } else {
143 t.display(f.db)
144 };
145 write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?;
146 }
147 TypeCtor::Never => write!(f, "!")?,
148 TypeCtor::Tuple { .. } => {
149 let ts = &self.parameters;
150 if ts.len() == 1 {
151 write!(f, "({},)", ts[0].display(f.db))?;
152 } else {
153 write!(f, "(")?;
154 f.write_joined(&*ts.0, ", ")?;
155 write!(f, ")")?;
156 }
157 }
158 TypeCtor::FnPtr { .. } => {
159 let sig = FnSig::from_fn_ptr_substs(&self.parameters);
160 write!(f, "fn(")?;
161 f.write_joined(sig.params(), ", ")?;
162 write!(f, ") -> {}", sig.ret().display(f.db))?;
163 }
164 TypeCtor::FnDef(def) => {
165 let sig = f.db.callable_item_signature(def).subst(&self.parameters);
166 let name = match def {
167 CallableDef::FunctionId(ff) => f.db.function_data(ff).name.clone(),
168 CallableDef::StructId(s) => f.db.struct_data(s).name.clone(),
169 CallableDef::EnumVariantId(e) => {
170 let enum_data = f.db.enum_data(e.parent);
171 enum_data.variants[e.local_id].name.clone()
172 }
173 };
174 match def {
175 CallableDef::FunctionId(_) => write!(f, "fn {}", name)?,
176 CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {
177 write!(f, "{}", name)?
178 }
179 }
180 if self.parameters.len() > 0 {
181 let generics = generics(f.db, def.into());
182 let (parent_params, self_param, type_params, _impl_trait_params) =
183 generics.provenance_split();
184 let total_len = parent_params + self_param + type_params;
185 // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
186 if total_len > 0 {
187 write!(f, "<")?;
188 f.write_joined(&self.parameters.0[..total_len], ", ")?;
189 write!(f, ">")?;
190 }
191 }
192 write!(f, "(")?;
193 f.write_joined(sig.params(), ", ")?;
194 write!(f, ") -> {}", sig.ret().display(f.db))?;
195 }
196 TypeCtor::Adt(def_id) => {
197 let name = match def_id {
198 AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
199 AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
200 AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
201 };
202 write!(f, "{}", name)?;
203 if self.parameters.len() > 0 {
204 write!(f, "<")?;
205
206 let mut non_default_parameters = Vec::with_capacity(self.parameters.len());
207 let parameters_to_write = if f.omit_verbose_types() {
208 match self
209 .ctor
210 .as_generic_def()
211 .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
212 .filter(|defaults| !defaults.is_empty())
213 {
214 Option::None => self.parameters.0.as_ref(),
215 Option::Some(default_parameters) => {
216 for (i, parameter) in self.parameters.iter().enumerate() {
217 match (parameter, default_parameters.get(i)) {
218 (&Ty::Unknown, _) | (_, None) => {
219 non_default_parameters.push(parameter.clone())
220 }
221 (_, Some(default_parameter))
222 if parameter != default_parameter =>
223 {
224 non_default_parameters.push(parameter.clone())
225 }
226 _ => (),
227 }
228 }
229 &non_default_parameters
230 }
231 }
232 } else {
233 self.parameters.0.as_ref()
234 };
235
236 f.write_joined(parameters_to_write, ", ")?;
237 write!(f, ">")?;
238 }
239 }
240 TypeCtor::AssociatedType(type_alias) => {
241 let trait_ = match type_alias.lookup(f.db).container {
242 AssocContainerId::TraitId(it) => it,
243 _ => panic!("not an associated type"),
244 };
245 let trait_name = f.db.trait_data(trait_).name.clone();
246 let name = f.db.type_alias_data(type_alias).name.clone();
247 write!(f, "{}::{}", trait_name, name)?;
248 if self.parameters.len() > 0 {
249 write!(f, "<")?;
250 f.write_joined(&*self.parameters.0, ", ")?;
251 write!(f, ">")?;
252 }
253 }
254 TypeCtor::Closure { .. } => {
255 let sig = self.parameters[0]
256 .callable_sig(f.db)
257 .expect("first closure parameter should contain signature");
258 let return_type_hint = sig.ret().display(f.db);
259 if sig.params().is_empty() {
260 write!(f, "|| -> {}", return_type_hint)?;
261 } else if f.omit_verbose_types() {
262 write!(f, "|{}| -> {}", TYPE_HINT_TRUNCATION, return_type_hint)?;
263 } else {
264 write!(f, "|")?;
265 f.write_joined(sig.params(), ", ")?;
266 write!(f, "| -> {}", return_type_hint)?;
267 };
268 }
269 }
270 Ok(())
271 }
272}
273
274impl HirDisplay for ProjectionTy {
275 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
276 if f.should_truncate() {
277 return write!(f, "{}", TYPE_HINT_TRUNCATION);
278 }
279
280 let trait_name = f.db.trait_data(self.trait_(f.db)).name.clone();
281 write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_name,)?;
282 if self.parameters.len() > 1 {
283 write!(f, "<")?;
284 f.write_joined(&self.parameters[1..], ", ")?;
285 write!(f, ">")?;
286 }
287 write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?;
288 Ok(())
289 }
290}
291
292impl HirDisplay for Ty {
293 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
294 if f.should_truncate() {
295 return write!(f, "{}", TYPE_HINT_TRUNCATION);
296 }
297
298 match self {
299 Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
300 Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
301 Ty::Placeholder(id) => {
302 let generics = generics(f.db, id.parent);
303 let param_data = &generics.params.types[id.local_id];
304 match param_data.provenance {
305 TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
306 write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))?
307 }
308 TypeParamProvenance::ArgumentImplTrait => {
309 write!(f, "impl ")?;
310 let bounds = f.db.generic_predicates_for_param(*id);
311 let substs = Substs::type_params_for_generics(&generics);
312 write_bounds_like_dyn_trait(
313 &bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(),
314 f,
315 )?;
316 }
317 }
318 }
319 Ty::Bound(idx) => write!(f, "?{}", idx)?,
320 Ty::Dyn(predicates) | Ty::Opaque(predicates) => {
321 match self {
322 Ty::Dyn(_) => write!(f, "dyn ")?,
323 Ty::Opaque(_) => write!(f, "impl ")?,
324 _ => unreachable!(),
325 };
326 write_bounds_like_dyn_trait(&predicates, f)?;
327 }
328 Ty::Unknown => write!(f, "{{unknown}}")?,
329 Ty::Infer(..) => write!(f, "_")?,
330 }
331 Ok(())
332 }
333}
334
335fn write_bounds_like_dyn_trait(
336 predicates: &[GenericPredicate],
337 f: &mut HirFormatter<impl HirDatabase>,
338) -> fmt::Result {
339 // Note: This code is written to produce nice results (i.e.
340 // corresponding to surface Rust) for types that can occur in
341 // actual Rust. It will have weird results if the predicates
342 // aren't as expected (i.e. self types = $0, projection
343 // predicates for a certain trait come after the Implemented
344 // predicate for that trait).
345 let mut first = true;
346 let mut angle_open = false;
347 for p in predicates.iter() {
348 match p {
349 GenericPredicate::Implemented(trait_ref) => {
350 if angle_open {
351 write!(f, ">")?;
352 }
353 if !first {
354 write!(f, " + ")?;
355 }
356 // We assume that the self type is $0 (i.e. the
357 // existential) here, which is the only thing that's
358 // possible in actual Rust, and hence don't print it
359 write!(f, "{}", f.db.trait_data(trait_ref.trait_).name.clone())?;
360 if trait_ref.substs.len() > 1 {
361 write!(f, "<")?;
362 f.write_joined(&trait_ref.substs[1..], ", ")?;
363 // there might be assoc type bindings, so we leave the angle brackets open
364 angle_open = true;
365 }
366 }
367 GenericPredicate::Projection(projection_pred) => {
368 // in types in actual Rust, these will always come
369 // after the corresponding Implemented predicate
370 if angle_open {
371 write!(f, ", ")?;
372 } else {
373 write!(f, "<")?;
374 angle_open = true;
375 }
376 let name =
377 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name.clone();
378 write!(f, "{} = ", name)?;
379 projection_pred.ty.hir_fmt(f)?;
380 }
381 GenericPredicate::Error => {
382 if angle_open {
383 // impl Trait<X, {error}>
384 write!(f, ", ")?;
385 } else if !first {
386 // impl Trait + {error}
387 write!(f, " + ")?;
388 }
389 p.hir_fmt(f)?;
390 }
391 }
392 first = false;
393 }
394 if angle_open {
395 write!(f, ">")?;
396 }
397 Ok(())
398}
399
400impl TraitRef {
401 fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result {
402 if f.should_truncate() {
403 return write!(f, "{}", TYPE_HINT_TRUNCATION);
404 }
405
406 self.substs[0].hir_fmt(f)?;
407 if use_as {
408 write!(f, " as ")?;
409 } else {
410 write!(f, ": ")?;
411 }
412 write!(f, "{}", f.db.trait_data(self.trait_).name.clone())?;
413 if self.substs.len() > 1 {
414 write!(f, "<")?;
415 f.write_joined(&self.substs[1..], ", ")?;
416 write!(f, ">")?;
417 }
418 Ok(())
419 }
420}
421
422impl HirDisplay for TraitRef {
423 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
424 self.hir_fmt_ext(f, false)
425 }
426}
427
428impl HirDisplay for &GenericPredicate {
429 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
430 HirDisplay::hir_fmt(*self, f)
431 }
432}
433
434impl HirDisplay for GenericPredicate {
435 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
436 if f.should_truncate() {
437 return write!(f, "{}", TYPE_HINT_TRUNCATION);
438 }
439
440 match self {
441 GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
442 GenericPredicate::Projection(projection_pred) => {
443 write!(f, "<")?;
444 projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?;
445 write!(
446 f,
447 ">::{} = {}",
448 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name,
449 projection_pred.ty.display(f.db)
450 )?;
451 }
452 GenericPredicate::Error => write!(f, "{{error}}")?,
453 }
454 Ok(())
455 }
456}
457
458impl HirDisplay for Obligation {
459 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
460 match self {
461 Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db)),
462 Obligation::Projection(proj) => write!(
463 f,
464 "Normalize({} => {})",
465 proj.projection_ty.display(f.db),
466 proj.ty.display(f.db)
467 ),
468 }
469 }
470}
diff --git a/crates/ra_hir_ty/src/infer/unify.rs b/crates/ra_hir_ty/src/infer/unify.rs
index fe05642ae..1dc842f40 100644
--- a/crates/ra_hir_ty/src/infer/unify.rs
+++ b/crates/ra_hir_ty/src/infer/unify.rs
@@ -249,6 +249,8 @@ impl InferenceTable {
249 match (ty1, ty2) { 249 match (ty1, ty2) {
250 (Ty::Unknown, _) | (_, Ty::Unknown) => true, 250 (Ty::Unknown, _) | (_, Ty::Unknown) => true,
251 251
252 (Ty::Placeholder(p1), Ty::Placeholder(p2)) if *p1 == *p2 => true,
253
252 (Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2))) 254 (Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2)))
253 | (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2))) 255 | (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2)))
254 | (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2))) 256 | (Ty::Infer(InferTy::FloatVar(tv1)), Ty::Infer(InferTy::FloatVar(tv2)))
diff --git a/crates/ra_hir_ty/src/lib.rs b/crates/ra_hir_ty/src/lib.rs
index c5fe18c85..571579cc4 100644
--- a/crates/ra_hir_ty/src/lib.rs
+++ b/crates/ra_hir_ty/src/lib.rs
@@ -41,13 +41,12 @@ mod marks;
41 41
42use std::ops::Deref; 42use std::ops::Deref;
43use std::sync::Arc; 43use std::sync::Arc;
44use std::{fmt, iter, mem}; 44use std::{iter, mem};
45 45
46use hir_def::{ 46use hir_def::{
47 expr::ExprId, generics::TypeParamProvenance, type_ref::Mutability, AdtId, AssocContainerId, 47 expr::ExprId, type_ref::Mutability, AdtId, AssocContainerId, DefWithBodyId, GenericDefId,
48 DefWithBodyId, GenericDefId, HasModule, Lookup, TraitId, TypeAliasId, TypeParamId, 48 HasModule, Lookup, TraitId, TypeAliasId, TypeParamId,
49}; 49};
50use hir_expand::name::Name;
51use ra_db::{impl_intern_key, salsa, CrateId}; 50use ra_db::{impl_intern_key, salsa, CrateId};
52 51
53use crate::{ 52use crate::{
@@ -55,7 +54,7 @@ use crate::{
55 primitive::{FloatTy, IntTy, Uncertain}, 54 primitive::{FloatTy, IntTy, Uncertain},
56 utils::{generics, make_mut_slice, Generics}, 55 utils::{generics, make_mut_slice, Generics},
57}; 56};
58use display::{HirDisplay, HirFormatter}; 57use display::HirDisplay;
59 58
60pub use autoderef::autoderef; 59pub use autoderef::autoderef;
61pub use infer::{do_infer_query, InferTy, InferenceResult}; 60pub use infer::{do_infer_query, InferTy, InferenceResult};
@@ -291,7 +290,7 @@ pub enum Ty {
291 /// {}` when we're type-checking the body of that function. In this 290 /// {}` when we're type-checking the body of that function. In this
292 /// situation, we know this stands for *some* type, but don't know the exact 291 /// situation, we know this stands for *some* type, but don't know the exact
293 /// type. 292 /// type.
294 Param(TypeParamId), 293 Placeholder(TypeParamId),
295 294
296 /// A bound type variable. This is used in various places: when representing 295 /// A bound type variable. This is used in various places: when representing
297 /// some polymorphic type like the type of function `fn f<T>`, the type 296 /// some polymorphic type like the type of function `fn f<T>`, the type
@@ -365,7 +364,7 @@ impl Substs {
365 364
366 /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). 365 /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
367 pub(crate) fn type_params_for_generics(generic_params: &Generics) -> Substs { 366 pub(crate) fn type_params_for_generics(generic_params: &Generics) -> Substs {
368 Substs(generic_params.iter().map(|(id, _)| Ty::Param(id)).collect()) 367 Substs(generic_params.iter().map(|(id, _)| Ty::Placeholder(id)).collect())
369 } 368 }
370 369
371 /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`). 370 /// Return Substs that replace each parameter by itself (i.e. `Ty::Param`).
@@ -813,7 +812,7 @@ impl TypeWalk for Ty {
813 p.walk(f); 812 p.walk(f);
814 } 813 }
815 } 814 }
816 Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} 815 Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {}
817 } 816 }
818 f(self); 817 f(self);
819 } 818 }
@@ -831,374 +830,8 @@ impl TypeWalk for Ty {
831 p.walk_mut_binders(f, binders + 1); 830 p.walk_mut_binders(f, binders + 1);
832 } 831 }
833 } 832 }
834 Ty::Param { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {} 833 Ty::Placeholder { .. } | Ty::Bound(_) | Ty::Infer(_) | Ty::Unknown => {}
835 } 834 }
836 f(self, binders); 835 f(self, binders);
837 } 836 }
838} 837}
839
840const TYPE_HINT_TRUNCATION: &str = "…";
841
842impl HirDisplay for &Ty {
843 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
844 HirDisplay::hir_fmt(*self, f)
845 }
846}
847
848impl HirDisplay for ApplicationTy {
849 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
850 if f.should_truncate() {
851 return write!(f, "{}", TYPE_HINT_TRUNCATION);
852 }
853
854 match self.ctor {
855 TypeCtor::Bool => write!(f, "bool")?,
856 TypeCtor::Char => write!(f, "char")?,
857 TypeCtor::Int(t) => write!(f, "{}", t)?,
858 TypeCtor::Float(t) => write!(f, "{}", t)?,
859 TypeCtor::Str => write!(f, "str")?,
860 TypeCtor::Slice => {
861 let t = self.parameters.as_single();
862 write!(f, "[{}]", t.display(f.db))?;
863 }
864 TypeCtor::Array => {
865 let t = self.parameters.as_single();
866 write!(f, "[{}; _]", t.display(f.db))?;
867 }
868 TypeCtor::RawPtr(m) => {
869 let t = self.parameters.as_single();
870 write!(f, "*{}{}", m.as_keyword_for_ptr(), t.display(f.db))?;
871 }
872 TypeCtor::Ref(m) => {
873 let t = self.parameters.as_single();
874 let ty_display = if f.omit_verbose_types() {
875 t.display_truncated(f.db, f.max_size)
876 } else {
877 t.display(f.db)
878 };
879 write!(f, "&{}{}", m.as_keyword_for_ref(), ty_display)?;
880 }
881 TypeCtor::Never => write!(f, "!")?,
882 TypeCtor::Tuple { .. } => {
883 let ts = &self.parameters;
884 if ts.len() == 1 {
885 write!(f, "({},)", ts[0].display(f.db))?;
886 } else {
887 write!(f, "(")?;
888 f.write_joined(&*ts.0, ", ")?;
889 write!(f, ")")?;
890 }
891 }
892 TypeCtor::FnPtr { .. } => {
893 let sig = FnSig::from_fn_ptr_substs(&self.parameters);
894 write!(f, "fn(")?;
895 f.write_joined(sig.params(), ", ")?;
896 write!(f, ") -> {}", sig.ret().display(f.db))?;
897 }
898 TypeCtor::FnDef(def) => {
899 let sig = f.db.callable_item_signature(def).subst(&self.parameters);
900 let name = match def {
901 CallableDef::FunctionId(ff) => f.db.function_data(ff).name.clone(),
902 CallableDef::StructId(s) => f.db.struct_data(s).name.clone(),
903 CallableDef::EnumVariantId(e) => {
904 let enum_data = f.db.enum_data(e.parent);
905 enum_data.variants[e.local_id].name.clone()
906 }
907 };
908 match def {
909 CallableDef::FunctionId(_) => write!(f, "fn {}", name)?,
910 CallableDef::StructId(_) | CallableDef::EnumVariantId(_) => {
911 write!(f, "{}", name)?
912 }
913 }
914 if self.parameters.len() > 0 {
915 let generics = generics(f.db, def.into());
916 let (parent_params, self_param, type_params, _impl_trait_params) =
917 generics.provenance_split();
918 let total_len = parent_params + self_param + type_params;
919 // We print all params except implicit impl Trait params. Still a bit weird; should we leave out parent and self?
920 if total_len > 0 {
921 write!(f, "<")?;
922 f.write_joined(&self.parameters.0[..total_len], ", ")?;
923 write!(f, ">")?;
924 }
925 }
926 write!(f, "(")?;
927 f.write_joined(sig.params(), ", ")?;
928 write!(f, ") -> {}", sig.ret().display(f.db))?;
929 }
930 TypeCtor::Adt(def_id) => {
931 let name = match def_id {
932 AdtId::StructId(it) => f.db.struct_data(it).name.clone(),
933 AdtId::UnionId(it) => f.db.union_data(it).name.clone(),
934 AdtId::EnumId(it) => f.db.enum_data(it).name.clone(),
935 };
936 write!(f, "{}", name)?;
937 if self.parameters.len() > 0 {
938 write!(f, "<")?;
939
940 let mut non_default_parameters = Vec::with_capacity(self.parameters.len());
941 let parameters_to_write = if f.omit_verbose_types() {
942 match self
943 .ctor
944 .as_generic_def()
945 .map(|generic_def_id| f.db.generic_defaults(generic_def_id))
946 .filter(|defaults| !defaults.is_empty())
947 {
948 Option::None => self.parameters.0.as_ref(),
949 Option::Some(default_parameters) => {
950 for (i, parameter) in self.parameters.iter().enumerate() {
951 match (parameter, default_parameters.get(i)) {
952 (&Ty::Unknown, _) | (_, None) => {
953 non_default_parameters.push(parameter.clone())
954 }
955 (_, Some(default_parameter))
956 if parameter != default_parameter =>
957 {
958 non_default_parameters.push(parameter.clone())
959 }
960 _ => (),
961 }
962 }
963 &non_default_parameters
964 }
965 }
966 } else {
967 self.parameters.0.as_ref()
968 };
969
970 f.write_joined(parameters_to_write, ", ")?;
971 write!(f, ">")?;
972 }
973 }
974 TypeCtor::AssociatedType(type_alias) => {
975 let trait_ = match type_alias.lookup(f.db).container {
976 AssocContainerId::TraitId(it) => it,
977 _ => panic!("not an associated type"),
978 };
979 let trait_name = f.db.trait_data(trait_).name.clone();
980 let name = f.db.type_alias_data(type_alias).name.clone();
981 write!(f, "{}::{}", trait_name, name)?;
982 if self.parameters.len() > 0 {
983 write!(f, "<")?;
984 f.write_joined(&*self.parameters.0, ", ")?;
985 write!(f, ">")?;
986 }
987 }
988 TypeCtor::Closure { .. } => {
989 let sig = self.parameters[0]
990 .callable_sig(f.db)
991 .expect("first closure parameter should contain signature");
992 let return_type_hint = sig.ret().display(f.db);
993 if sig.params().is_empty() {
994 write!(f, "|| -> {}", return_type_hint)?;
995 } else if f.omit_verbose_types() {
996 write!(f, "|{}| -> {}", TYPE_HINT_TRUNCATION, return_type_hint)?;
997 } else {
998 write!(f, "|")?;
999 f.write_joined(sig.params(), ", ")?;
1000 write!(f, "| -> {}", return_type_hint)?;
1001 };
1002 }
1003 }
1004 Ok(())
1005 }
1006}
1007
1008impl HirDisplay for ProjectionTy {
1009 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1010 if f.should_truncate() {
1011 return write!(f, "{}", TYPE_HINT_TRUNCATION);
1012 }
1013
1014 let trait_name = f.db.trait_data(self.trait_(f.db)).name.clone();
1015 write!(f, "<{} as {}", self.parameters[0].display(f.db), trait_name,)?;
1016 if self.parameters.len() > 1 {
1017 write!(f, "<")?;
1018 f.write_joined(&self.parameters[1..], ", ")?;
1019 write!(f, ">")?;
1020 }
1021 write!(f, ">::{}", f.db.type_alias_data(self.associated_ty).name)?;
1022 Ok(())
1023 }
1024}
1025
1026impl HirDisplay for Ty {
1027 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1028 if f.should_truncate() {
1029 return write!(f, "{}", TYPE_HINT_TRUNCATION);
1030 }
1031
1032 match self {
1033 Ty::Apply(a_ty) => a_ty.hir_fmt(f)?,
1034 Ty::Projection(p_ty) => p_ty.hir_fmt(f)?,
1035 Ty::Param(id) => {
1036 let generics = generics(f.db, id.parent);
1037 let param_data = &generics.params.types[id.local_id];
1038 match param_data.provenance {
1039 TypeParamProvenance::TypeParamList | TypeParamProvenance::TraitSelf => {
1040 write!(f, "{}", param_data.name.clone().unwrap_or_else(Name::missing))?
1041 }
1042 TypeParamProvenance::ArgumentImplTrait => {
1043 write!(f, "impl ")?;
1044 let bounds = f.db.generic_predicates_for_param(*id);
1045 let substs = Substs::type_params_for_generics(&generics);
1046 write_bounds_like_dyn_trait(
1047 &bounds.iter().map(|b| b.clone().subst(&substs)).collect::<Vec<_>>(),
1048 f,
1049 )?;
1050 }
1051 }
1052 }
1053 Ty::Bound(idx) => write!(f, "?{}", idx)?,
1054 Ty::Dyn(predicates) | Ty::Opaque(predicates) => {
1055 match self {
1056 Ty::Dyn(_) => write!(f, "dyn ")?,
1057 Ty::Opaque(_) => write!(f, "impl ")?,
1058 _ => unreachable!(),
1059 };
1060 write_bounds_like_dyn_trait(&predicates, f)?;
1061 }
1062 Ty::Unknown => write!(f, "{{unknown}}")?,
1063 Ty::Infer(..) => write!(f, "_")?,
1064 }
1065 Ok(())
1066 }
1067}
1068
1069fn write_bounds_like_dyn_trait(
1070 predicates: &[GenericPredicate],
1071 f: &mut HirFormatter<impl HirDatabase>,
1072) -> fmt::Result {
1073 // Note: This code is written to produce nice results (i.e.
1074 // corresponding to surface Rust) for types that can occur in
1075 // actual Rust. It will have weird results if the predicates
1076 // aren't as expected (i.e. self types = $0, projection
1077 // predicates for a certain trait come after the Implemented
1078 // predicate for that trait).
1079 let mut first = true;
1080 let mut angle_open = false;
1081 for p in predicates.iter() {
1082 match p {
1083 GenericPredicate::Implemented(trait_ref) => {
1084 if angle_open {
1085 write!(f, ">")?;
1086 }
1087 if !first {
1088 write!(f, " + ")?;
1089 }
1090 // We assume that the self type is $0 (i.e. the
1091 // existential) here, which is the only thing that's
1092 // possible in actual Rust, and hence don't print it
1093 write!(f, "{}", f.db.trait_data(trait_ref.trait_).name.clone())?;
1094 if trait_ref.substs.len() > 1 {
1095 write!(f, "<")?;
1096 f.write_joined(&trait_ref.substs[1..], ", ")?;
1097 // there might be assoc type bindings, so we leave the angle brackets open
1098 angle_open = true;
1099 }
1100 }
1101 GenericPredicate::Projection(projection_pred) => {
1102 // in types in actual Rust, these will always come
1103 // after the corresponding Implemented predicate
1104 if angle_open {
1105 write!(f, ", ")?;
1106 } else {
1107 write!(f, "<")?;
1108 angle_open = true;
1109 }
1110 let name =
1111 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name.clone();
1112 write!(f, "{} = ", name)?;
1113 projection_pred.ty.hir_fmt(f)?;
1114 }
1115 GenericPredicate::Error => {
1116 if angle_open {
1117 // impl Trait<X, {error}>
1118 write!(f, ", ")?;
1119 } else if !first {
1120 // impl Trait + {error}
1121 write!(f, " + ")?;
1122 }
1123 p.hir_fmt(f)?;
1124 }
1125 }
1126 first = false;
1127 }
1128 if angle_open {
1129 write!(f, ">")?;
1130 }
1131 Ok(())
1132}
1133
1134impl TraitRef {
1135 fn hir_fmt_ext(&self, f: &mut HirFormatter<impl HirDatabase>, use_as: bool) -> fmt::Result {
1136 if f.should_truncate() {
1137 return write!(f, "{}", TYPE_HINT_TRUNCATION);
1138 }
1139
1140 self.substs[0].hir_fmt(f)?;
1141 if use_as {
1142 write!(f, " as ")?;
1143 } else {
1144 write!(f, ": ")?;
1145 }
1146 write!(f, "{}", f.db.trait_data(self.trait_).name.clone())?;
1147 if self.substs.len() > 1 {
1148 write!(f, "<")?;
1149 f.write_joined(&self.substs[1..], ", ")?;
1150 write!(f, ">")?;
1151 }
1152 Ok(())
1153 }
1154}
1155
1156impl HirDisplay for TraitRef {
1157 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1158 self.hir_fmt_ext(f, false)
1159 }
1160}
1161
1162impl HirDisplay for &GenericPredicate {
1163 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1164 HirDisplay::hir_fmt(*self, f)
1165 }
1166}
1167
1168impl HirDisplay for GenericPredicate {
1169 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1170 if f.should_truncate() {
1171 return write!(f, "{}", TYPE_HINT_TRUNCATION);
1172 }
1173
1174 match self {
1175 GenericPredicate::Implemented(trait_ref) => trait_ref.hir_fmt(f)?,
1176 GenericPredicate::Projection(projection_pred) => {
1177 write!(f, "<")?;
1178 projection_pred.projection_ty.trait_ref(f.db).hir_fmt_ext(f, true)?;
1179 write!(
1180 f,
1181 ">::{} = {}",
1182 f.db.type_alias_data(projection_pred.projection_ty.associated_ty).name,
1183 projection_pred.ty.display(f.db)
1184 )?;
1185 }
1186 GenericPredicate::Error => write!(f, "{{error}}")?,
1187 }
1188 Ok(())
1189 }
1190}
1191
1192impl HirDisplay for Obligation {
1193 fn hir_fmt(&self, f: &mut HirFormatter<impl HirDatabase>) -> fmt::Result {
1194 match self {
1195 Obligation::Trait(tr) => write!(f, "Implements({})", tr.display(f.db)),
1196 Obligation::Projection(proj) => write!(
1197 f,
1198 "Normalize({} => {})",
1199 proj.projection_ty.display(f.db),
1200 proj.ty.display(f.db)
1201 ),
1202 }
1203 }
1204}
diff --git a/crates/ra_hir_ty/src/lower.rs b/crates/ra_hir_ty/src/lower.rs
index c68c5852b..6a2aded02 100644
--- a/crates/ra_hir_ty/src/lower.rs
+++ b/crates/ra_hir_ty/src/lower.rs
@@ -14,9 +14,9 @@ use hir_def::{
14 path::{GenericArg, Path, PathSegment, PathSegments}, 14 path::{GenericArg, Path, PathSegment, PathSegments},
15 resolver::{HasResolver, Resolver, TypeNs}, 15 resolver::{HasResolver, Resolver, TypeNs},
16 type_ref::{TypeBound, TypeRef}, 16 type_ref::{TypeBound, TypeRef},
17 AdtId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, 17 AdtId, AssocContainerId, ConstId, EnumId, EnumVariantId, FunctionId, GenericDefId, HasModule,
18 LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId, UnionId, 18 ImplId, LocalStructFieldId, Lookup, StaticId, StructId, TraitId, TypeAliasId, TypeParamId,
19 VariantId, 19 UnionId, VariantId,
20}; 20};
21use ra_arena::map::ArenaMap; 21use ra_arena::map::ArenaMap;
22use ra_db::CrateId; 22use ra_db::CrateId;
@@ -152,7 +152,7 @@ impl Ty {
152 data.provenance == TypeParamProvenance::ArgumentImplTrait 152 data.provenance == TypeParamProvenance::ArgumentImplTrait
153 }) 153 })
154 .nth(idx as usize) 154 .nth(idx as usize)
155 .map_or(Ty::Unknown, |(id, _)| Ty::Param(id)); 155 .map_or(Ty::Unknown, |(id, _)| Ty::Placeholder(id));
156 param 156 param
157 } else { 157 } else {
158 Ty::Unknown 158 Ty::Unknown
@@ -270,7 +270,7 @@ impl Ty {
270 let generics = 270 let generics =
271 generics(ctx.db, ctx.resolver.generic_def().expect("generics in scope")); 271 generics(ctx.db, ctx.resolver.generic_def().expect("generics in scope"));
272 match ctx.type_param_mode { 272 match ctx.type_param_mode {
273 TypeParamLoweringMode::Placeholder => Ty::Param(param_id), 273 TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
274 TypeParamLoweringMode::Variable => { 274 TypeParamLoweringMode::Variable => {
275 let idx = generics.param_idx(param_id).expect("matching generics"); 275 let idx = generics.param_idx(param_id).expect("matching generics");
276 Ty::Bound(idx) 276 Ty::Bound(idx)
@@ -339,7 +339,7 @@ impl Ty {
339 None => return Ty::Unknown, // this can't actually happen 339 None => return Ty::Unknown, // this can't actually happen
340 }; 340 };
341 let param_id = match self_ty { 341 let param_id = match self_ty {
342 Ty::Param(id) if ctx.type_param_mode == TypeParamLoweringMode::Placeholder => id, 342 Ty::Placeholder(id) if ctx.type_param_mode == TypeParamLoweringMode::Placeholder => id,
343 Ty::Bound(idx) if ctx.type_param_mode == TypeParamLoweringMode::Variable => { 343 Ty::Bound(idx) if ctx.type_param_mode == TypeParamLoweringMode::Variable => {
344 let generics = generics(ctx.db, def); 344 let generics = generics(ctx.db, def);
345 let param_id = if let Some((id, _)) = generics.iter().nth(idx as usize) { 345 let param_id = if let Some((id, _)) = generics.iter().nth(idx as usize) {
@@ -544,7 +544,7 @@ impl GenericPredicate {
544 let generics = generics(ctx.db, generic_def); 544 let generics = generics(ctx.db, generic_def);
545 let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id }; 545 let param_id = hir_def::TypeParamId { parent: generic_def, local_id: *param_id };
546 match ctx.type_param_mode { 546 match ctx.type_param_mode {
547 TypeParamLoweringMode::Placeholder => Ty::Param(param_id), 547 TypeParamLoweringMode::Placeholder => Ty::Placeholder(param_id),
548 TypeParamLoweringMode::Variable => { 548 TypeParamLoweringMode::Variable => {
549 let idx = generics.param_idx(param_id).expect("matching generics"); 549 let idx = generics.param_idx(param_id).expect("matching generics");
550 Ty::Bound(idx) 550 Ty::Bound(idx)
@@ -672,11 +672,35 @@ impl TraitEnvironment {
672 pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> { 672 pub fn lower(db: &impl HirDatabase, resolver: &Resolver) -> Arc<TraitEnvironment> {
673 let ctx = TyLoweringContext::new(db, &resolver) 673 let ctx = TyLoweringContext::new(db, &resolver)
674 .with_type_param_mode(TypeParamLoweringMode::Placeholder); 674 .with_type_param_mode(TypeParamLoweringMode::Placeholder);
675 let predicates = resolver 675 let mut predicates = resolver
676 .where_predicates_in_scope() 676 .where_predicates_in_scope()
677 .flat_map(|pred| GenericPredicate::from_where_predicate(&ctx, pred)) 677 .flat_map(|pred| GenericPredicate::from_where_predicate(&ctx, pred))
678 .collect::<Vec<_>>(); 678 .collect::<Vec<_>>();
679 679
680 if let Some(def) = resolver.generic_def() {
681 let container: Option<AssocContainerId> = match def {
682 // FIXME: is there a function for this?
683 GenericDefId::FunctionId(f) => Some(f.lookup(db).container),
684 GenericDefId::AdtId(_) => None,
685 GenericDefId::TraitId(_) => None,
686 GenericDefId::TypeAliasId(t) => Some(t.lookup(db).container),
687 GenericDefId::ImplId(_) => None,
688 GenericDefId::EnumVariantId(_) => None,
689 GenericDefId::ConstId(c) => Some(c.lookup(db).container),
690 };
691 if let Some(AssocContainerId::TraitId(trait_id)) = container {
692 // add `Self: Trait<T1, T2, ...>` to the environment in trait
693 // function default implementations (and hypothetical code
694 // inside consts or type aliases)
695 test_utils::tested_by!(trait_self_implements_self);
696 let substs = Substs::type_params(db, trait_id);
697 let trait_ref = TraitRef { trait_: trait_id, substs };
698 let pred = GenericPredicate::Implemented(trait_ref);
699
700 predicates.push(pred);
701 }
702 }
703
680 Arc::new(TraitEnvironment { predicates }) 704 Arc::new(TraitEnvironment { predicates })
681 } 705 }
682} 706}
diff --git a/crates/ra_hir_ty/src/marks.rs b/crates/ra_hir_ty/src/marks.rs
index 0f754eb9c..de5cb1d6b 100644
--- a/crates/ra_hir_ty/src/marks.rs
+++ b/crates/ra_hir_ty/src/marks.rs
@@ -4,6 +4,8 @@ test_utils::marks!(
4 type_var_cycles_resolve_completely 4 type_var_cycles_resolve_completely
5 type_var_cycles_resolve_as_possible 5 type_var_cycles_resolve_as_possible
6 type_var_resolves_to_int_var 6 type_var_resolves_to_int_var
7 impl_self_type_match_without_receiver
7 match_ergonomics_ref 8 match_ergonomics_ref
8 coerce_merge_fail_fallback 9 coerce_merge_fail_fallback
10 trait_self_implements_self
9); 11);
diff --git a/crates/ra_hir_ty/src/method_resolution.rs b/crates/ra_hir_ty/src/method_resolution.rs
index 5283bff28..4f8c52433 100644
--- a/crates/ra_hir_ty/src/method_resolution.rs
+++ b/crates/ra_hir_ty/src/method_resolution.rs
@@ -425,6 +425,15 @@ fn iterate_inherent_methods<T>(
425 if !is_valid_candidate(db, name, receiver_ty, item, self_ty) { 425 if !is_valid_candidate(db, name, receiver_ty, item, self_ty) {
426 continue; 426 continue;
427 } 427 }
428 // we have to check whether the self type unifies with the type
429 // that the impl is for. If we have a receiver type, this
430 // already happens in `is_valid_candidate` above; if not, we
431 // check it here
432 if receiver_ty.is_none() && inherent_impl_substs(db, impl_block, self_ty).is_none()
433 {
434 test_utils::tested_by!(impl_self_type_match_without_receiver);
435 continue;
436 }
428 if let Some(result) = callback(&self_ty.value, item) { 437 if let Some(result) = callback(&self_ty.value, item) {
429 return Some(result); 438 return Some(result);
430 } 439 }
diff --git a/crates/ra_hir_ty/src/tests/coercion.rs b/crates/ra_hir_ty/src/tests/coercion.rs
index fc5ef36a5..42330b269 100644
--- a/crates/ra_hir_ty/src/tests/coercion.rs
+++ b/crates/ra_hir_ty/src/tests/coercion.rs
@@ -526,3 +526,25 @@ fn test() {
526 "### 526 "###
527 ); 527 );
528} 528}
529
530#[test]
531fn coerce_placeholder_ref() {
532 // placeholders should unify, even behind references
533 assert_snapshot!(
534 infer_with_mismatches(r#"
535struct S<T> { t: T }
536impl<TT> S<TT> {
537 fn get(&self) -> &TT {
538 &self.t
539 }
540}
541"#, true),
542 @r###"
543 [51; 55) 'self': &S<TT>
544 [64; 87) '{ ... }': &TT
545 [74; 81) '&self.t': &TT
546 [75; 79) 'self': &S<TT>
547 [75; 81) 'self.t': TT
548 "###
549 );
550}
diff --git a/crates/ra_hir_ty/src/tests/method_resolution.rs b/crates/ra_hir_ty/src/tests/method_resolution.rs
index 1722563aa..1f767d324 100644
--- a/crates/ra_hir_ty/src/tests/method_resolution.rs
+++ b/crates/ra_hir_ty/src/tests/method_resolution.rs
@@ -964,6 +964,38 @@ fn test() { S2.into()<|>; }
964} 964}
965 965
966#[test] 966#[test]
967fn method_resolution_overloaded_method() {
968 test_utils::covers!(impl_self_type_match_without_receiver);
969 let t = type_at(
970 r#"
971//- main.rs
972struct Wrapper<T>(T);
973struct Foo<T>(T);
974struct Bar<T>(T);
975
976impl<T> Wrapper<Foo<T>> {
977 pub fn new(foo_: T) -> Self {
978 Wrapper(Foo(foo_))
979 }
980}
981
982impl<T> Wrapper<Bar<T>> {
983 pub fn new(bar_: T) -> Self {
984 Wrapper(Bar(bar_))
985 }
986}
987
988fn main() {
989 let a = Wrapper::<Foo<f32>>::new(1.0);
990 let b = Wrapper::<Bar<f32>>::new(1.0);
991 (a, b)<|>;
992}
993"#,
994 );
995 assert_eq!(t, "(Wrapper<Foo<f32>>, Wrapper<Bar<f32>>)")
996}
997
998#[test]
967fn method_resolution_encountering_fn_type() { 999fn method_resolution_encountering_fn_type() {
968 type_at( 1000 type_at(
969 r#" 1001 r#"
diff --git a/crates/ra_hir_ty/src/tests/traits.rs b/crates/ra_hir_ty/src/tests/traits.rs
index 17611ddbf..aa2018944 100644
--- a/crates/ra_hir_ty/src/tests/traits.rs
+++ b/crates/ra_hir_ty/src/tests/traits.rs
@@ -300,6 +300,54 @@ fn test() {
300} 300}
301 301
302#[test] 302#[test]
303fn trait_default_method_self_bound_implements_trait() {
304 test_utils::covers!(trait_self_implements_self);
305 assert_snapshot!(
306 infer(r#"
307trait Trait {
308 fn foo(&self) -> i64;
309 fn bar(&self) -> {
310 let x = self.foo();
311 }
312}
313"#),
314 @r###"
315 [27; 31) 'self': &Self
316 [53; 57) 'self': &Self
317 [62; 97) '{ ... }': ()
318 [76; 77) 'x': i64
319 [80; 84) 'self': &Self
320 [80; 90) 'self.foo()': i64
321 "###
322 );
323}
324
325#[test]
326fn trait_default_method_self_bound_implements_super_trait() {
327 test_utils::covers!(trait_self_implements_self);
328 assert_snapshot!(
329 infer(r#"
330trait SuperTrait {
331 fn foo(&self) -> i64;
332}
333trait Trait: SuperTrait {
334 fn bar(&self) -> {
335 let x = self.foo();
336 }
337}
338"#),
339 @r###"
340 [32; 36) 'self': &Self
341 [86; 90) 'self': &Self
342 [95; 130) '{ ... }': ()
343 [109; 110) 'x': i64
344 [113; 117) 'self': &Self
345 [113; 123) 'self.foo()': i64
346 "###
347 );
348}
349
350#[test]
303fn infer_project_associated_type() { 351fn infer_project_associated_type() {
304 // y, z, a don't yet work because of https://github.com/rust-lang/chalk/issues/234 352 // y, z, a don't yet work because of https://github.com/rust-lang/chalk/issues/234
305 assert_snapshot!( 353 assert_snapshot!(
diff --git a/crates/ra_hir_ty/src/traits.rs b/crates/ra_hir_ty/src/traits.rs
index 88af61e87..ff8e75b48 100644
--- a/crates/ra_hir_ty/src/traits.rs
+++ b/crates/ra_hir_ty/src/traits.rs
@@ -60,6 +60,9 @@ impl TraitSolver {
60 context.0.db.check_canceled(); 60 context.0.db.check_canceled();
61 let remaining = fuel.get(); 61 let remaining = fuel.get();
62 fuel.set(remaining - 1); 62 fuel.set(remaining - 1);
63 if remaining == 0 {
64 log::debug!("fuel exhausted");
65 }
63 remaining > 0 66 remaining > 0
64 }) 67 })
65 } 68 }
diff --git a/crates/ra_hir_ty/src/traits/chalk.rs b/crates/ra_hir_ty/src/traits/chalk.rs
index 4974c565b..882160fa8 100644
--- a/crates/ra_hir_ty/src/traits/chalk.rs
+++ b/crates/ra_hir_ty/src/traits/chalk.rs
@@ -142,7 +142,7 @@ impl ToChalk for Ty {
142 let substitution = proj_ty.parameters.to_chalk(db); 142 let substitution = proj_ty.parameters.to_chalk(db);
143 chalk_ir::AliasTy { associated_ty_id, substitution }.cast().intern() 143 chalk_ir::AliasTy { associated_ty_id, substitution }.cast().intern()
144 } 144 }
145 Ty::Param(id) => { 145 Ty::Placeholder(id) => {
146 let interned_id = db.intern_type_param_id(id); 146 let interned_id = db.intern_type_param_id(id);
147 PlaceholderIndex { 147 PlaceholderIndex {
148 ui: UniverseIndex::ROOT, 148 ui: UniverseIndex::ROOT,
@@ -184,7 +184,7 @@ impl ToChalk for Ty {
184 let interned_id = crate::db::GlobalTypeParamId::from_intern_id( 184 let interned_id = crate::db::GlobalTypeParamId::from_intern_id(
185 crate::salsa::InternId::from(idx.idx), 185 crate::salsa::InternId::from(idx.idx),
186 ); 186 );
187 Ty::Param(db.lookup_intern_type_param_id(interned_id)) 187 Ty::Placeholder(db.lookup_intern_type_param_id(interned_id))
188 } 188 }
189 chalk_ir::TyData::Alias(proj) => { 189 chalk_ir::TyData::Alias(proj) => {
190 let associated_ty = from_chalk(db, proj.associated_ty_id); 190 let associated_ty = from_chalk(db, proj.associated_ty_id);
diff --git a/crates/ra_ide/src/snapshots/highlighting.html b/crates/ra_ide/src/snapshots/highlighting.html
index 1cc55e78b..a02dbaf2f 100644
--- a/crates/ra_ide/src/snapshots/highlighting.html
+++ b/crates/ra_ide/src/snapshots/highlighting.html
@@ -16,6 +16,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
16.literal { color: #BFEBBF; } 16.literal { color: #BFEBBF; }
17.literal\.numeric { color: #6A8759; } 17.literal\.numeric { color: #6A8759; }
18.macro { color: #94BFF3; } 18.macro { color: #94BFF3; }
19.module { color: #AFD8AF; }
19.variable { color: #DCDCCC; } 20.variable { color: #DCDCCC; }
20.variable\.mut { color: #DCDCCC; text-decoration: underline; } 21.variable\.mut { color: #DCDCCC; text-decoration: underline; }
21 22
diff --git a/crates/ra_ide/src/snapshots/rainbow_highlighting.html b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
index 918fd4b97..95f038f00 100644
--- a/crates/ra_ide/src/snapshots/rainbow_highlighting.html
+++ b/crates/ra_ide/src/snapshots/rainbow_highlighting.html
@@ -16,6 +16,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
16.literal { color: #BFEBBF; } 16.literal { color: #BFEBBF; }
17.literal\.numeric { color: #6A8759; } 17.literal\.numeric { color: #6A8759; }
18.macro { color: #94BFF3; } 18.macro { color: #94BFF3; }
19.module { color: #AFD8AF; }
19.variable { color: #DCDCCC; } 20.variable { color: #DCDCCC; }
20.variable\.mut { color: #DCDCCC; text-decoration: underline; } 21.variable\.mut { color: #DCDCCC; text-decoration: underline; }
21 22
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs
index 174e13595..20c414ca1 100644
--- a/crates/ra_ide/src/syntax_highlighting.rs
+++ b/crates/ra_ide/src/syntax_highlighting.rs
@@ -365,6 +365,7 @@ pre { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padd
365.literal { color: #BFEBBF; } 365.literal { color: #BFEBBF; }
366.literal\\.numeric { color: #6A8759; } 366.literal\\.numeric { color: #6A8759; }
367.macro { color: #94BFF3; } 367.macro { color: #94BFF3; }
368.module { color: #AFD8AF; }
368.variable { color: #DCDCCC; } 369.variable { color: #DCDCCC; }
369.variable\\.mut { color: #DCDCCC; text-decoration: underline; } 370.variable\\.mut { color: #DCDCCC; text-decoration: underline; }
370 371
diff --git a/crates/ra_ide_db/Cargo.toml b/crates/ra_ide_db/Cargo.toml
index 716e88bc1..495fffb5a 100644
--- a/crates/ra_ide_db/Cargo.toml
+++ b/crates/ra_ide_db/Cargo.toml
@@ -22,7 +22,6 @@ fst = { version = "0.3.1", default-features = false }
22rustc-hash = "1.0" 22rustc-hash = "1.0"
23unicase = "2.2.0" 23unicase = "2.2.0"
24superslice = "1.0.0" 24superslice = "1.0.0"
25rand = { version = "0.7.0", features = ["small_rng"] }
26once_cell = "1.2.0" 25once_cell = "1.2.0"
27 26
28ra_syntax = { path = "../ra_syntax" } 27ra_syntax = { path = "../ra_syntax" }
diff --git a/crates/ra_lsp_server/src/main.rs b/crates/ra_lsp_server/src/main.rs
index c8a017c5c..ed2eaabd4 100644
--- a/crates/ra_lsp_server/src/main.rs
+++ b/crates/ra_lsp_server/src/main.rs
@@ -15,13 +15,8 @@ fn main() -> Result<()> {
15 15
16fn setup_logging() -> Result<()> { 16fn setup_logging() -> Result<()> {
17 std::env::set_var("RUST_BACKTRACE", "short"); 17 std::env::set_var("RUST_BACKTRACE", "short");
18
19 env_logger::try_init()?; 18 env_logger::try_init()?;
20 19 ra_prof::init();
21 ra_prof::set_filter(match std::env::var("RA_PROFILE") {
22 Ok(spec) => ra_prof::Filter::from_spec(&spec),
23 Err(_) => ra_prof::Filter::disabled(),
24 });
25 Ok(()) 20 Ok(())
26} 21}
27 22
diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs
index d38ff397e..c0bfbc2ee 100644
--- a/crates/ra_prof/src/lib.rs
+++ b/crates/ra_prof/src/lib.rs
@@ -26,6 +26,13 @@ pub use crate::memory_usage::{Bytes, MemoryUsage};
26#[global_allocator] 26#[global_allocator]
27static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; 27static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
28 28
29pub fn init() {
30 set_filter(match std::env::var("RA_PROFILE") {
31 Ok(spec) => Filter::from_spec(&spec),
32 Err(_) => Filter::disabled(),
33 });
34}
35
29/// Set profiling filter. It specifies descriptions allowed to profile. 36/// Set profiling filter. It specifies descriptions allowed to profile.
30/// This is helpful when call stack has too many nested profiling scopes. 37/// This is helpful when call stack has too many nested profiling scopes.
31/// Additionally filter can specify maximum depth of profiling scopes nesting. 38/// Additionally filter can specify maximum depth of profiling scopes nesting.
diff --git a/crates/ra_project_model/src/lib.rs b/crates/ra_project_model/src/lib.rs
index fef405b7f..250255813 100644
--- a/crates/ra_project_model/src/lib.rs
+++ b/crates/ra_project_model/src/lib.rs
@@ -418,8 +418,10 @@ pub fn get_rustc_cfg_options() -> CfgOptions {
418 // Some nightly-only cfgs, which are required for stdlib 418 // Some nightly-only cfgs, which are required for stdlib
419 { 419 {
420 cfg_options.insert_atom("target_thread_local".into()); 420 cfg_options.insert_atom("target_thread_local".into());
421 for &target_has_atomic in ["16", "32", "64", "8", "cas", "ptr"].iter() { 421 for &target_has_atomic in ["8", "16", "32", "64", "cas", "ptr"].iter() {
422 cfg_options.insert_key_value("target_has_atomic".into(), target_has_atomic.into()) 422 cfg_options.insert_key_value("target_has_atomic".into(), target_has_atomic.into());
423 cfg_options
424 .insert_key_value("target_has_atomic_load_store".into(), target_has_atomic.into());
423 } 425 }
424 } 426 }
425 427
diff --git a/docs/user/README.md b/docs/user/README.md
deleted file mode 100644
index 14ca6fd64..000000000
--- a/docs/user/README.md
+++ /dev/null
@@ -1,280 +0,0 @@
1[github-releases]: https://github.com/rust-analyzer/rust-analyzer/releases
2
3The main interface to rust-analyzer is the
4[LSP](https://microsoft.github.io/language-server-protocol/) implementation. To
5install lsp server, you have three options:
6
7* **Preferred and default:** install the plugin/extension for your IDE and it will ask your permission to automatically download the latest lsp server for you from [GitHub releases][github-releases]. (See docs to find out whether this is implemented for your editor below).
8* Manually download prebuilt binaries from [GitHub releases][github-releases]
9 * `ra_lsp_server-linux` for Linux
10 * `ra_lsp_server-mac` for Mac
11 * `ra_lsp_server-windows.exe` for Windows
12* Clone the repository and build from sources
13```bash
14$ git clone [email protected]:rust-analyzer/rust-analyzer && cd rust-analyzer
15$ cargo xtask install --server # or cargo install --path ./crates/ra_lsp_server
16```
17
18This way you will get a binary named `ra_lsp_server` (with os suffix for prebuilt binaries)
19which you should be able to use with any LSP-compatible editor.
20
21We make use of custom extensions to LSP, so special client-side support is required to take full
22advantage of rust-analyzer. This repository contains support code for VS Code.
23
24Rust Analyzer needs sources of rust standard library to work, so
25you might also need to execute
26
27```
28$ rustup component add rust-src
29```
30
31See [./features.md](./features.md) document for a list of features that are available.
32
33## VS Code
34
35### Prerequisites
36
37You will need the most recent version of VS Code: we don't try to
38maintain compatibility with older versions yet.
39
40### Installation from prebuilt binaries
41
42We ship prebuilt binaries for Linux, Mac and Windows via
43[GitHub releases][github-releases].
44In order to use them you need to install the client VSCode extension.
45
46Publishing to VS Code marketplace is currently WIP. Thus, you need to manually download
47`rust-analyzer-0.1.0.vsix` file from latest [GitHub release][github-releases].
48
49After you downloaded the `.vsix` file you can install it from the terminal
50
51```
52$ code --install-extension rust-analyzer-0.1.0.vsix
53```
54
55Or open VS Code, press <kbd>Ctrl+Shift+P</kbd>, and search for the following command:
56
57<img width="500px" alt="Install from VSIX command" src="https://user-images.githubusercontent.com/36276403/74108225-c0c11d80-4b80-11ea-9b2a-0a43f09e29af.png">
58
59Press <kbd>Enter</kbd> and go to `rust-analyzer-0.1.0.vsix` file through the file explorer.
60
61Then open some Rust project and you should
62see an info message pop-up.
63
64<img height="140px" src="https://user-images.githubusercontent.com/36276403/74103174-a40df100-4b52-11ea-81f4-372c70797924.png" alt="Download now message"/>
65
66
67Click `Download now`, wait until the progress is 100% and you are ready to go.
68
69For updates you need to remove installed binary
70```
71rm -rf ${HOME}/.config/Code/User/globalStorage/matklad.rust-analyzer
72```
73
74`"Download latest language server"` command for VSCode and automatic updates detection is currently WIP.
75
76
77### Installation from sources
78
79In order to build the VS Code plugin from sources, you need to have node.js and npm with
80a minimum version of 12 installed. Please refer to
81[node.js and npm documentation](https://nodejs.org) for installation instructions.
82
83The experimental VS Code plugin can be built and installed by executing the
84following commands:
85
86```
87$ git clone https://github.com/rust-analyzer/rust-analyzer.git --depth 1
88$ cd rust-analyzer
89$ cargo xtask install
90```
91
92After that you need to amend your `settings.json` file to explicitly specify the
93path to `ra_lsp_server` that you've just built.
94```json
95{
96 "rust-analyzer.raLspServerPath": "ra_lsp_server"
97}
98```
99This should work on all platforms, otherwise if installed `ra_lsp_server` is not available through your `$PATH` then see how to configure it [here](#setting-up-the-PATH-variable).
100
101
102The automatic installation is expected to *just work* for common cases, if it
103doesn't, report bugs!
104
105**Note** [#1831](https://github.com/rust-analyzer/rust-analyzer/issues/1831): If you are using the popular
106[Vim emulation plugin](https://github.com/VSCodeVim/Vim), you will likely
107need to turn off the `rust-analyzer.enableEnhancedTyping` setting.
108(// TODO: This configuration is no longer available, enhanced typing shoud be disabled via removing Enter key binding, [see this issue](https://github.com/rust-analyzer/rust-analyzer/issues/3051))
109
110If you have an unusual setup (for example, `code` is not in the `PATH`), you
111should adapt these manual installation instructions:
112
113```
114$ git clone https://github.com/rust-analyzer/rust-analyzer.git --depth 1
115$ cd rust-analyzer
116$ cargo install --path ./crates/ra_lsp_server/ --force --locked
117$ cd ./editors/code
118$ npm install
119$ npm run package
120$ code --install-extension ./rust-analyzer-0.1.0.vsix
121```
122
123It's better to remove existing Rust plugins to avoid interference.
124
125Beyond basic LSP features, there are some extension commands which you can
126invoke via <kbd>Ctrl+Shift+P</kbd> or bind to a shortcut. See [./features.md](./features.md)
127for details.
128
129For updates, pull the latest changes from the master branch, run `cargo xtask install` again, and **restart** VS Code instance.
130See [microsoft/vscode#72308](https://github.com/microsoft/vscode/issues/72308) for why a full restart is needed.
131
132### VS Code Remote
133
134You can also use `rust-analyzer` with the Visual Studio Code Remote extensions
135(Remote SSH, Remote WSL, Remote Containers). In this case, however, you have to
136manually install the `.vsix` package:
137
1381. Build the extension on the remote host using the instructions above (ignore the
139 error if `code` cannot be found in your PATH: VSCode doesn't need to be installed
140 on the remote host).
1412. In Visual Studio Code open a connection to the remote host.
1423. Open the Extensions View (`View > Extensions`, keyboard shortcut: `Ctrl+Shift+X`).
1434. From the top-right kebab menu (`···`) select `Install from VSIX...`
1445. Inside the `rust-analyzer` directory find the `editors/code` subdirectory and choose
145 the `rust-analyzer-0.1.0.vsix` file.
1466. Restart Visual Studio Code and re-establish the connection to the remote host.
147
148In case of errors please make sure that `~/.cargo/bin` is in your `PATH` on the remote
149host.
150
151### Settings
152
153* `rust-analyzer.highlightingOn`: enables experimental syntax highlighting.
154 Colors can be configured via `editor.tokenColorCustomizations`.
155 As an example, [Pale Fire](https://github.com/matklad/pale-fire/) color scheme tweaks rust colors.
156* `rust-analyzer.enableEnhancedTyping`: by default, rust-analyzer intercepts the
157 `Enter` key to make it easier to continue comments. Note that it may conflict with VIM emulation plugin.
158* `rust-analyzer.raLspServerPath`: path to `ra_lsp_server` executable, when absent or `null` defaults to prebuilt binary path
159* `rust-analyzer.enableCargoWatchOnStartup`: prompt to install & enable `cargo
160 watch` for live error highlighting (note, this **does not** use rust-analyzer)
161* `rust-analyzer.excludeGlobs`: a list of glob-patterns for exclusion (see globset [docs](https://docs.rs/globset) for syntax).
162 Note: glob patterns are applied to all Cargo packages and a rooted at a package root.
163 This is not very intuitive and a limitation of a current implementation.
164* `rust-analyzer.useClientWatching`: use client provided file watching instead
165 of notify watching.
166* `rust-analyzer.cargo-watch.command`: `cargo-watch` command. (e.g: `clippy` will run as `cargo watch -x clippy` )
167* `rust-analyzer.cargo-watch.arguments`: cargo-watch check arguments.
168 (e.g: `--features="shumway,pdf"` will run as `cargo watch -x "check --features="shumway,pdf""` )
169* `rust-analyzer.cargo-watch.ignore`: list of patterns for cargo-watch to ignore (will be passed as `--ignore`)
170* `rust-analyzer.trace.server`: enables internal logging
171* `rust-analyzer.trace.cargo-watch`: enables cargo-watch logging
172* `RUST_SRC_PATH`: environment variable that overwrites the sysroot
173* `rust-analyzer.featureFlags` -- a JSON object to tweak fine-grained behavior:
174 ```jsonc
175 {
176 // Show diagnostics produced by rust-analyzer itself.
177 "lsp.diagnostics": true,
178 // Automatically insert `()` and `<>` when completing functions and types.
179 "completion.insertion.add-call-parenthesis": true,
180 // Enable completions like `.if`, `.match`, etc.
181 "completion.enable-postfix": true,
182 // Show notification when workspace is fully loaded
183 "notifications.workspace-loaded": true,
184 // Show error when no Cargo.toml was found
185 "notifications.cargo-toml-not-found": true,
186 }
187 ```
188
189
190## Emacs
191
192* install recent version of `emacs-lsp` package by following the instructions [here][emacs-lsp]
193* set `lsp-rust-server` to `'rust-analyzer`
194* run `lsp` in a Rust buffer
195* (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys
196
197[emacs-lsp]: https://github.com/emacs-lsp/lsp-mode
198
199
200## Vim and NeoVim (coc-rust-analyzer)
201
202* Install coc.nvim by following the instructions at [coc.nvim][] (nodejs required)
203* Run `:CocInstall coc-rust-analyzer` to install [coc-rust-analyzer], this extension implements _most_ of the features supported in the VSCode extension:
204 - same configurations as VSCode extension, `rust-analyzer.raLspServerPath`, `rust-analyzer.enableCargoWatchOnStartup` etc.
205 - same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.startCargoWatch` etc.
206 - highlighting and inlay_hints are not implemented yet
207
208[coc.nvim]: https://github.com/neoclide/coc.nvim
209[coc-rust-analyzer]: https://github.com/fannheyward/coc-rust-analyzer
210
211## Vim and NeoVim (LanguageClient-neovim)
212
213* Install LanguageClient-neovim by following the instructions [here][lang-client-neovim]
214 - The github project wiki has extra tips on configuration
215
216* Configure by adding this to your vim/neovim config file (replacing the existing rust specific line if it exists):
217
218```vim
219let g:LanguageClient_serverCommands = {
220\ 'rust': ['ra_lsp_server'],
221\ }
222```
223
224[lang-client-neovim]: https://github.com/autozimu/LanguageClient-neovim
225
226## NeoVim (nvim-lsp)
227
228NeoVim 0.5 (not yet released) has built in language server support. For a quick start configuration
229of rust-analyzer, use [neovim/nvim-lsp](https://github.com/neovim/nvim-lsp#rust_analyzer).
230Once `neovim/nvim-lsp` is installed, use `lua require'nvim_lsp'.rust_analyzer.setup({})` in your `init.vim`.
231
232
233## Sublime Text 3
234
235Prequisites:
236
237`LSP` package.
238
239Installation:
240
241* Invoke the command palette with <kbd>Ctrl+Shift+P</kbd>
242* Type `LSP Settings` to open the LSP preferences editor
243* Add the following LSP client definition to your settings:
244
245```json
246"rust-analyzer": {
247 "command": ["ra_lsp_server"],
248 "languageId": "rust",
249 "scopes": ["source.rust"],
250 "syntaxes": [
251 "Packages/Rust/Rust.sublime-syntax",
252 "Packages/Rust Enhanced/RustEnhanced.sublime-syntax"
253 ],
254 "initializationOptions": {
255 "featureFlags": {
256 }
257 },
258}
259```
260
261* You can now invoke the command palette and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer)
262
263
264<!-- Update links to this header when changing it! -->
265### Setting up the `PATH` variable
266
267On Unix systems, `rustup` adds `~/.cargo/bin` to `PATH` by modifying the shell's
268startup file. Depending on your configuration, your Desktop Environment might not
269actually load it. If you find that `rust-analyzer` only runs when starting the
270editor from the terminal, you will have to set up your `PATH` variable manually.
271
272There are a couple of ways to do that:
273
274- for Code, set `rust-analyzer.raLspServerPath` to `~/.cargo/bin` (the `~` is
275 automatically resolved by the extension)
276- copy the binary to a location that is already in `PATH`, e.g. `/usr/local/bin`
277- on Linux, use PAM to configure the `PATH` variable, by e.g. putting
278 `PATH DEFAULT=/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:@{HOME}/.cargo/bin:@{HOME}/.local/bin`
279 in your `~/.pam_environment` file; note that this might interfere with other
280 defaults set by the system administrator via `/etc/environment`.
diff --git a/docs/user/readme.adoc b/docs/user/readme.adoc
new file mode 100644
index 000000000..553687e78
--- /dev/null
+++ b/docs/user/readme.adoc
@@ -0,0 +1,154 @@
1= User Manual
2:toc: preamble
3:sectanchors:
4:page-layout: post
5
6
7// Master copy of this document lives in the https://github.com/rust-analyzer/rust-analyzer repository
8
9At it's core, rust-analyzer is a *library* for semantic analysis of the Rust code as it changes over time.
10This manual focuses on a specific usage of the library -- the implementation of
11https://microsoft.github.io/language-server-protocol/[Language Server Protocol].
12LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic feature like completion or goto definition by talking to an external language server process.
13
14To improve this document, send a pull request against
15https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/readme.adoc[this file].
16
17== Installation
18
19In theory, one should be able to just install the server binary and have it automatically work with any editor.
20We are not there yet, so some editor specific setup is required.
21
22=== VS Code
23
24This the best supported editor at the moment.
25rust-analyzer plugin for VS Code is maintained
26https://github.com/rust-analyzer/rust-analyzer/tree/master/editors/code[in tree].
27
28You can install the latest release of the plugin from
29https://marketplace.visualstudio.com/items?itemName=matklad.rust-analyzer[the marketplace].
30By default, the plugin will download the matching version of the server as well.
31
32// FIXME: update the image (its text has changed)
33image::https://user-images.githubusercontent.com/36276403/74103174-a40df100-4b52-11ea-81f4-372c70797924.png[]
34
35The server binary is stored in `~/.config/Code/User/globalStorage/matklad.rust-analyzer`.
36
37Note that we only support the latest version of VS Code.
38
39==== Updates
40
41The extension will be updated automatically as new versions become available. It will ask your permission to download the matching language server version binary if needed.
42
43==== Building From Source
44
45Alternatively, both the server and the plugin can be installed from source:
46
47[source]
48----
49$ git clone https://github.com/rust-analyzer/rust-analyzer.git && cd rust-analyzer
50$ cargo xtask install
51----
52
53You'll need Cargo, nodejs and npm for this.
54To make VS Code use the freshly build server, add this to the settings:
55
56[source,json]
57----
58{ "rust-analyzer.raLspServerPath": "ra_lsp_server" }
59----
60
61Note that installing via `xtask install` does not work for VS Code Remote, instead you'll need to install the `.vsix` manually.
62
63=== Language Server Binary
64
65Other editors generally require `ra_lsp_server` binary to be in `$PATH`.
66You can download pre-build binary from
67https://github.com/rust-analyzer/rust-analyzer/releases[relases]
68page, or you can install it from source using the following command:
69
70[source,bash]
71----
72$ cargo xtask install --server
73----
74
75=== Emacs
76
77Emacs support is maintained https://github.com/emacs-lsp/lsp-mode/blob/master/lsp-rust.el[upstream].
78
791. Install recent version of `emacs-lsp` package by following the instructions https://github.com/emacs-lsp/lsp-mode[here].
802. Set `lsp-rust-server` to `'rust-analyzer`.
813. Run `lsp` in a Rust buffer.
824. (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys.
83
84=== Vim
85
86The are several LSP client implementations for vim:
87
88==== coc-rust-analyzer
89
901. Install coc.nvim by following the instructions at
91 https://github.com/neoclide/coc.nvim[coc.nvim]
92 (nodejs required)
932. Run `:CocInstall coc-rust-analyzer` to install
94 https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer],
95 this extension implements _most_ of the features supported in the VSCode extension:
96 * same configurations as VSCode extension, `rust-analyzer.raLspServerPath`, `rust-analyzer.enableCargoWatchOnStartup` etc.
97 * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.startCargoWatch` etc.
98 * highlighting and inlay_hints are not implemented yet
99
100==== LanguageClient-neovim
101
1021. Install LanguageClient-neovim by following the instructions
103 https://github.com/autozimu/LanguageClient-neovim[here]
104 * The github project wiki has extra tips on configuration
105
1062. Configure by adding this to your vim/neovim config file (replacing the existing rust specific line if it exists):
107+
108[source,vim]
109----
110let g:LanguageClient_serverCommands = {
111\ 'rust': ['ra_lsp_server'],
112\ }
113----
114
115==== nvim-lsp
116
117NeoVim 0.5 (not yet released) has built in language server support.
118For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lsp#rust_analyzer[neovim/nvim-lsp].
119Once `neovim/nvim-lsp` is installed, use `lua require'nvim_lsp'.rust_analyzer.setup({})` in your `init.vim`.
120
121=== Sublime Text 3
122
123Prerequisites:
124
125`LSP` package.
126
127Installation:
128
1291. Invoke the command palette with <kbd>Ctrl+Shift+P</kbd>
1302. Type `LSP Settings` to open the LSP preferences editor
1313. Add the following LSP client definition to your settings:
132+
133[source,json]
134----
135"rust-analyzer": {
136 "command": ["ra_lsp_server"],
137 "languageId": "rust",
138 "scopes": ["source.rust"],
139 "syntaxes": [
140 "Packages/Rust/Rust.sublime-syntax",
141 "Packages/Rust Enhanced/RustEnhanced.sublime-syntax"
142 ],
143 "initializationOptions": {
144 "featureFlags": {
145 }
146 },
147}
148----
149
1504. You can now invoke the command palette and type LSP enable to locally/globally enable the rust-analyzer LSP (type LSP enable, then choose either locally or globally, then select rust-analyzer)
151
152== Usage
153
154See https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/user/features.md[features.md].
diff --git a/editors/code/package-lock.json b/editors/code/package-lock.json
index 5c056463e..c74078735 100644
--- a/editors/code/package-lock.json
+++ b/editors/code/package-lock.json
@@ -1,6 +1,6 @@
1{ 1{
2 "name": "rust-analyzer", 2 "name": "rust-analyzer",
3 "version": "0.1.0", 3 "version": "0.2.0-dev",
4 "lockfileVersion": 1, 4 "lockfileVersion": 1,
5 "requires": true, 5 "requires": true,
6 "dependencies": { 6 "dependencies": {
@@ -107,9 +107,9 @@
107 "dev": true 107 "dev": true
108 }, 108 },
109 "@types/vscode": { 109 "@types/vscode": {
110 "version": "1.41.0", 110 "version": "1.42.0",
111 "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.41.0.tgz", 111 "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.42.0.tgz",
112 "integrity": "sha512-7SfeY5u9jgiELwxyLB3z7l6l/GbN9CqpCQGkcRlB7tKRFBxzbz2PoBfGrLxI1vRfUCIq5+hg5vtDHExwq5j3+A==", 112 "integrity": "sha512-ds6TceMsh77Fs0Mq0Vap6Y72JbGWB8Bay4DrnJlf5d9ui2RSe1wis13oQm+XhguOeH1HUfLGzaDAoupTUtgabw==",
113 "dev": true 113 "dev": true
114 }, 114 },
115 "acorn": { 115 "acorn": {
@@ -662,9 +662,9 @@
662 } 662 }
663 }, 663 },
664 "readable-stream": { 664 "readable-stream": {
665 "version": "3.4.0", 665 "version": "3.6.0",
666 "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.4.0.tgz", 666 "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
667 "integrity": "sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==", 667 "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
668 "dev": true, 668 "dev": true,
669 "requires": { 669 "requires": {
670 "inherits": "^2.0.3", 670 "inherits": "^2.0.3",
@@ -860,9 +860,9 @@
860 "dev": true 860 "dev": true
861 }, 861 },
862 "vsce": { 862 "vsce": {
863 "version": "1.71.0", 863 "version": "1.73.0",
864 "resolved": "https://registry.npmjs.org/vsce/-/vsce-1.71.0.tgz", 864 "resolved": "https://registry.npmjs.org/vsce/-/vsce-1.73.0.tgz",
865 "integrity": "sha512-7k+LPC4oJYPyyxs0a5nh4A8CleQ6+2EMPiAiX/bDyN+PmwJFm2FFPqLRxdIsIWfFnkW4ZMQBf10+W62dCRd9kQ==", 865 "integrity": "sha512-6W37Ebbkj3uF3WhT+SCfRtsneRQEFcGvf/XYz+b6OAgDCj4gPurWyDVrqw/HLsbP1WflGIyUfVZ8t5M7kQp6Uw==",
866 "dev": true, 866 "dev": true,
867 "requires": { 867 "requires": {
868 "azure-devops-node-api": "^7.2.0", 868 "azure-devops-node-api": "^7.2.0",
diff --git a/editors/code/package.json b/editors/code/package.json
index f687eb8d4..ed1cae2ab 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -5,7 +5,8 @@
5 "preview": true, 5 "preview": true,
6 "private": true, 6 "private": true,
7 "icon": "icon.png", 7 "icon": "icon.png",
8 "version": "0.1.0", 8 "//": "The real version is in release.yaml, this one just needs to be bigger",
9 "version": "0.2.20200211-dev",
9 "publisher": "matklad", 10 "publisher": "matklad",
10 "repository": { 11 "repository": {
11 "url": "https://github.com/rust-analyzer/rust-analyzer.git", 12 "url": "https://github.com/rust-analyzer/rust-analyzer.git",
@@ -15,7 +16,7 @@
15 "Other" 16 "Other"
16 ], 17 ],
17 "engines": { 18 "engines": {
18 "vscode": "^1.41.0" 19 "vscode": "^1.42.0"
19 }, 20 },
20 "scripts": { 21 "scripts": {
21 "vscode:prepublish": "tsc && rollup -c", 22 "vscode:prepublish": "tsc && rollup -c",
@@ -35,13 +36,13 @@
35 "@types/node": "^12.12.25", 36 "@types/node": "^12.12.25",
36 "@types/node-fetch": "^2.5.4", 37 "@types/node-fetch": "^2.5.4",
37 "@types/throttle-debounce": "^2.1.0", 38 "@types/throttle-debounce": "^2.1.0",
38 "@types/vscode": "^1.41.0", 39 "@types/vscode": "^1.42.0",
39 "rollup": "^1.31.0", 40 "rollup": "^1.31.0",
40 "tslib": "^1.10.0", 41 "tslib": "^1.10.0",
41 "tslint": "^5.20.1", 42 "tslint": "^5.20.1",
42 "typescript": "^3.7.5", 43 "typescript": "^3.7.5",
43 "typescript-formatter": "^7.2.2", 44 "typescript-formatter": "^7.2.2",
44 "vsce": "^1.71.0" 45 "vsce": "^1.73.0"
45 }, 46 },
46 "activationEvents": [ 47 "activationEvents": [
47 "onLanguage:rust", 48 "onLanguage:rust",
@@ -181,6 +182,9 @@
181 }, 182 },
182 "rust-analyzer.excludeGlobs": { 183 "rust-analyzer.excludeGlobs": {
183 "type": "array", 184 "type": "array",
185 "items": {
186 "type": "string"
187 },
184 "default": [], 188 "default": [],
185 "description": "Paths to exclude from analysis" 189 "description": "Paths to exclude from analysis"
186 }, 190 },
@@ -196,6 +200,9 @@
196 }, 200 },
197 "rust-analyzer.cargo-watch.arguments": { 201 "rust-analyzer.cargo-watch.arguments": {
198 "type": "array", 202 "type": "array",
203 "items": {
204 "type": "string"
205 },
199 "description": "`cargo-watch` arguments. (e.g: `--features=\"shumway,pdf\"` will run as `cargo watch -x \"check --features=\"shumway,pdf\"\"` )", 206 "description": "`cargo-watch` arguments. (e.g: `--features=\"shumway,pdf\"` will run as `cargo watch -x \"check --features=\"shumway,pdf\"\"` )",
200 "default": [] 207 "default": []
201 }, 208 },
@@ -226,11 +233,10 @@
226 "description": "Trace requests to the ra_lsp_server" 233 "description": "Trace requests to the ra_lsp_server"
227 }, 234 },
228 "rust-analyzer.lruCapacity": { 235 "rust-analyzer.lruCapacity": {
229 "type": [ 236 "type": [ "null", "integer" ],
230 "number",
231 "null"
232 ],
233 "default": null, 237 "default": null,
238 "minimum": 0,
239 "exclusiveMinimum": true,
234 "description": "Number of syntax trees rust-analyzer keeps in memory" 240 "description": "Number of syntax trees rust-analyzer keeps in memory"
235 }, 241 },
236 "rust-analyzer.displayInlayHints": { 242 "rust-analyzer.displayInlayHints": {
@@ -239,8 +245,10 @@
239 "description": "Display additional type and parameter information in the editor" 245 "description": "Display additional type and parameter information in the editor"
240 }, 246 },
241 "rust-analyzer.maxInlayHintLength": { 247 "rust-analyzer.maxInlayHintLength": {
242 "type": "number", 248 "type": [ "null", "integer" ],
243 "default": 20, 249 "default": 20,
250 "minimum": 0,
251 "exclusiveMinimum": true,
244 "description": "Maximum length for inlay hints" 252 "description": "Maximum length for inlay hints"
245 }, 253 },
246 "rust-analyzer.cargoFeatures.noDefaultFeatures": { 254 "rust-analyzer.cargoFeatures.noDefaultFeatures": {
@@ -255,6 +263,9 @@
255 }, 263 },
256 "rust-analyzer.cargoFeatures.features": { 264 "rust-analyzer.cargoFeatures.features": {
257 "type": "array", 265 "type": "array",
266 "items": {
267 "type": "string"
268 },
258 "default": [], 269 "default": [],
259 "description": "List of features to activate" 270 "description": "List of features to activate"
260 } 271 }
diff --git a/editors/code/rollup.config.js b/editors/code/rollup.config.js
index f8d320f46..337385a24 100644
--- a/editors/code/rollup.config.js
+++ b/editors/code/rollup.config.js
@@ -18,6 +18,7 @@ export default {
18 external: [...nodeBuiltins, 'vscode'], 18 external: [...nodeBuiltins, 'vscode'],
19 output: { 19 output: {
20 file: './out/main.js', 20 file: './out/main.js',
21 format: 'cjs' 21 format: 'cjs',
22 exports: 'named'
22 } 23 }
23}; 24};
diff --git a/editors/code/src/client.ts b/editors/code/src/client.ts
index 2e3d4aba2..efef820ab 100644
--- a/editors/code/src/client.ts
+++ b/editors/code/src/client.ts
@@ -1,39 +1,42 @@
1import * as lc from 'vscode-languageclient'; 1import * as lc from 'vscode-languageclient';
2import * as vscode from 'vscode';
2 3
3import { window, workspace } from 'vscode';
4import { Config } from './config'; 4import { Config } from './config';
5import { ensureLanguageServerBinary } from './installation/language_server'; 5import { ensureServerBinary } from './installation/server';
6import { CallHierarchyFeature } from 'vscode-languageclient/lib/callHierarchy.proposed';
6 7
7export async function createClient(config: Config): Promise<null | lc.LanguageClient> { 8export async function createClient(config: Config): Promise<null | lc.LanguageClient> {
8 // '.' Is the fallback if no folder is open 9 // '.' Is the fallback if no folder is open
9 // TODO?: Workspace folders support Uri's (eg: file://test.txt). 10 // TODO?: Workspace folders support Uri's (eg: file://test.txt).
10 // It might be a good idea to test if the uri points to a file. 11 // It might be a good idea to test if the uri points to a file.
11 const workspaceFolderPath = workspace.workspaceFolders?.[0]?.uri.fsPath ?? '.'; 12 const workspaceFolderPath = vscode.workspace.workspaceFolders?.[0]?.uri.fsPath ?? '.';
12 13
13 const raLspServerPath = await ensureLanguageServerBinary(config.langServerSource); 14 const serverPath = await ensureServerBinary(config.serverSource);
14 if (!raLspServerPath) return null; 15 if (!serverPath) return null;
15 16
16 const run: lc.Executable = { 17 const run: lc.Executable = {
17 command: raLspServerPath, 18 command: serverPath,
18 options: { cwd: workspaceFolderPath }, 19 options: { cwd: workspaceFolderPath },
19 }; 20 };
20 const serverOptions: lc.ServerOptions = { 21 const serverOptions: lc.ServerOptions = {
21 run, 22 run,
22 debug: run, 23 debug: run,
23 }; 24 };
24 const traceOutputChannel = window.createOutputChannel( 25 const traceOutputChannel = vscode.window.createOutputChannel(
25 'Rust Analyzer Language Server Trace', 26 'Rust Analyzer Language Server Trace',
26 ); 27 );
28 const cargoWatchOpts = config.cargoWatchOptions;
29
27 const clientOptions: lc.LanguageClientOptions = { 30 const clientOptions: lc.LanguageClientOptions = {
28 documentSelector: [{ scheme: 'file', language: 'rust' }], 31 documentSelector: [{ scheme: 'file', language: 'rust' }],
29 initializationOptions: { 32 initializationOptions: {
30 publishDecorations: true, 33 publishDecorations: true,
31 lruCapacity: config.lruCapacity, 34 lruCapacity: config.lruCapacity,
32 maxInlayHintLength: config.maxInlayHintLength, 35 maxInlayHintLength: config.maxInlayHintLength,
33 cargoWatchEnable: config.cargoWatchOptions.enable, 36 cargoWatchEnable: cargoWatchOpts.enable,
34 cargoWatchArgs: config.cargoWatchOptions.arguments, 37 cargoWatchArgs: cargoWatchOpts.arguments,
35 cargoWatchCommand: config.cargoWatchOptions.command, 38 cargoWatchCommand: cargoWatchOpts.command,
36 cargoWatchAllTargets: config.cargoWatchOptions.allTargets, 39 cargoWatchAllTargets: cargoWatchOpts.allTargets,
37 excludeGlobs: config.excludeGlobs, 40 excludeGlobs: config.excludeGlobs,
38 useClientWatching: config.useClientWatching, 41 useClientWatching: config.useClientWatching,
39 featureFlags: config.featureFlags, 42 featureFlags: config.featureFlags,
@@ -78,6 +81,10 @@ export async function createClient(config: Config): Promise<null | lc.LanguageCl
78 } 81 }
79 }, 82 },
80 }; 83 };
81 res.registerProposedFeatures(); 84
85 // To turn on all proposed features use: res.registerProposedFeatures();
86 // Here we want to just enable CallHierarchyFeature since it is available on stable.
87 // Note that while the CallHierarchyFeature is stable the LSP protocol is not.
88 res.registerFeature(new CallHierarchyFeature(res));
82 return res; 89 return res;
83} 90}
diff --git a/editors/code/src/config.ts b/editors/code/src/config.ts
index 418845436..70cb0a612 100644
--- a/editors/code/src/config.ts
+++ b/editors/code/src/config.ts
@@ -16,45 +16,62 @@ export interface CargoFeatures {
16 allFeatures: boolean; 16 allFeatures: boolean;
17 features: string[]; 17 features: string[];
18} 18}
19
20export class Config { 19export class Config {
21 langServerSource!: null | BinarySource; 20 private static readonly rootSection = "rust-analyzer";
22 21 private static readonly requiresReloadOpts = [
23 highlightingOn = true; 22 "cargoFeatures",
24 rainbowHighlightingOn = false; 23 "cargo-watch",
25 enableEnhancedTyping = true; 24 ]
26 lruCapacity: null | number = null; 25 .map(opt => `${Config.rootSection}.${opt}`);
27 displayInlayHints = true; 26
28 maxInlayHintLength: null | number = null; 27 private static readonly extensionVersion: string = (() => {
29 excludeGlobs: string[] = []; 28 const packageJsonVersion = vscode
30 useClientWatching = true; 29 .extensions
31 featureFlags: Record<string, boolean> = {}; 30 .getExtension("matklad.rust-analyzer")!
32 // for internal use 31 .packageJSON
33 withSysroot: null | boolean = null; 32 .version as string; // n.n.YYYYMMDD
34 cargoWatchOptions: CargoWatchOptions = { 33
35 enable: true, 34 const realVersionRegexp = /^\d+\.\d+\.(\d{4})(\d{2})(\d{2})/;
36 arguments: [], 35 const [, yyyy, mm, dd] = packageJsonVersion.match(realVersionRegexp)!;
37 command: '', 36
38 allTargets: true, 37 return `${yyyy}-${mm}-${dd}`;
39 }; 38 })();
40 cargoFeatures: CargoFeatures = { 39
41 noDefaultFeatures: false, 40 private cfg!: vscode.WorkspaceConfiguration;
42 allFeatures: true, 41
43 features: [], 42 constructor(private readonly ctx: vscode.ExtensionContext) {
44 }; 43 vscode.workspace.onDidChangeConfiguration(this.onConfigChange, this, ctx.subscriptions);
45 44 this.refreshConfig();
46 private prevEnhancedTyping: null | boolean = null; 45 }
47 private prevCargoFeatures: null | CargoFeatures = null; 46
48 private prevCargoWatchOptions: null | CargoWatchOptions = null; 47
49 48 private refreshConfig() {
50 constructor(ctx: vscode.ExtensionContext) { 49 this.cfg = vscode.workspace.getConfiguration(Config.rootSection);
51 vscode.workspace.onDidChangeConfiguration(_ => this.refresh(ctx), null, ctx.subscriptions); 50 console.log("Using configuration:", this.cfg);
52 this.refresh(ctx);
53 } 51 }
54 52
55 private static expandPathResolving(path: string) { 53 private async onConfigChange(event: vscode.ConfigurationChangeEvent) {
56 if (path.startsWith('~/')) { 54 this.refreshConfig();
57 return path.replace('~', os.homedir()); 55
56 const requiresReloadOpt = Config.requiresReloadOpts.find(
57 opt => event.affectsConfiguration(opt)
58 );
59
60 if (!requiresReloadOpt) return;
61
62 const userResponse = await vscode.window.showInformationMessage(
63 `Changing "${requiresReloadOpt}" requires a reload`,
64 "Reload now"
65 );
66
67 if (userResponse === "Reload now") {
68 vscode.commands.executeCommand("workbench.action.reloadWindow");
69 }
70 }
71
72 private static replaceTildeWithHomeDir(path: string) {
73 if (path.startsWith("~/")) {
74 return os.homedir() + path.slice("~".length);
58 } 75 }
59 return path; 76 return path;
60 } 77 }
@@ -64,17 +81,14 @@ export class Config {
64 * `platform` on GitHub releases. (It is also stored under the same name when 81 * `platform` on GitHub releases. (It is also stored under the same name when
65 * downloaded by the extension). 82 * downloaded by the extension).
66 */ 83 */
67 private static prebuiltLangServerFileName( 84 get prebuiltServerFileName(): null | string {
68 platform: NodeJS.Platform,
69 arch: string
70 ): null | string {
71 // See possible `arch` values here: 85 // See possible `arch` values here:
72 // https://nodejs.org/api/process.html#process_process_arch 86 // https://nodejs.org/api/process.html#process_process_arch
73 87
74 switch (platform) { 88 switch (process.platform) {
75 89
76 case "linux": { 90 case "linux": {
77 switch (arch) { 91 switch (process.arch) {
78 case "arm": 92 case "arm":
79 case "arm64": return null; 93 case "arm64": return null;
80 94
@@ -97,29 +111,26 @@ export class Config {
97 } 111 }
98 } 112 }
99 113
100 private static langServerBinarySource( 114 get serverSource(): null | BinarySource {
101 ctx: vscode.ExtensionContext, 115 const serverPath = RA_LSP_DEBUG ?? this.cfg.get<null | string>("raLspServerPath");
102 config: vscode.WorkspaceConfiguration
103 ): null | BinarySource {
104 const langServerPath = RA_LSP_DEBUG ?? config.get<null | string>("raLspServerPath");
105 116
106 if (langServerPath) { 117 if (serverPath) {
107 return { 118 return {
108 type: BinarySource.Type.ExplicitPath, 119 type: BinarySource.Type.ExplicitPath,
109 path: Config.expandPathResolving(langServerPath) 120 path: Config.replaceTildeWithHomeDir(serverPath)
110 }; 121 };
111 } 122 }
112 123
113 const prebuiltBinaryName = Config.prebuiltLangServerFileName( 124 const prebuiltBinaryName = this.prebuiltServerFileName;
114 process.platform, process.arch
115 );
116 125
117 if (!prebuiltBinaryName) return null; 126 if (!prebuiltBinaryName) return null;
118 127
119 return { 128 return {
120 type: BinarySource.Type.GithubRelease, 129 type: BinarySource.Type.GithubRelease,
121 dir: ctx.globalStoragePath, 130 dir: this.ctx.globalStoragePath,
122 file: prebuiltBinaryName, 131 file: prebuiltBinaryName,
132 storage: this.ctx.globalState,
133 version: Config.extensionVersion,
123 repo: { 134 repo: {
124 name: "rust-analyzer", 135 name: "rust-analyzer",
125 owner: "rust-analyzer", 136 owner: "rust-analyzer",
@@ -127,158 +138,35 @@ export class Config {
127 }; 138 };
128 } 139 }
129 140
141 // We don't do runtime config validation here for simplicity. More on stackoverflow:
142 // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension
130 143
131 // FIXME: revisit the logic for `if (.has(...)) config.get(...)` set default 144 get highlightingOn() { return this.cfg.get("highlightingOn") as boolean; }
132 // values only in one place (i.e. remove default values from non-readonly members declarations) 145 get rainbowHighlightingOn() { return this.cfg.get("rainbowHighlightingOn") as boolean; }
133 private refresh(ctx: vscode.ExtensionContext) { 146 get lruCapacity() { return this.cfg.get("lruCapacity") as null | number; }
134 const config = vscode.workspace.getConfiguration('rust-analyzer'); 147 get displayInlayHints() { return this.cfg.get("displayInlayHints") as boolean; }
135 148 get maxInlayHintLength() { return this.cfg.get("maxInlayHintLength") as number; }
136 let requireReloadMessage = null; 149 get excludeGlobs() { return this.cfg.get("excludeGlobs") as string[]; }
137 150 get useClientWatching() { return this.cfg.get("useClientWatching") as boolean; }
138 if (config.has('highlightingOn')) { 151 get featureFlags() { return this.cfg.get("featureFlags") as Record<string, boolean>; }
139 this.highlightingOn = config.get('highlightingOn') as boolean;
140 }
141
142 if (config.has('rainbowHighlightingOn')) {
143 this.rainbowHighlightingOn = config.get(
144 'rainbowHighlightingOn',
145 ) as boolean;
146 }
147
148 if (config.has('enableEnhancedTyping')) {
149 this.enableEnhancedTyping = config.get(
150 'enableEnhancedTyping',
151 ) as boolean;
152
153 if (this.prevEnhancedTyping === null) {
154 this.prevEnhancedTyping = this.enableEnhancedTyping;
155 }
156 } else if (this.prevEnhancedTyping === null) {
157 this.prevEnhancedTyping = this.enableEnhancedTyping;
158 }
159
160 if (this.prevEnhancedTyping !== this.enableEnhancedTyping) {
161 requireReloadMessage =
162 'Changing enhanced typing setting requires a reload';
163 this.prevEnhancedTyping = this.enableEnhancedTyping;
164 }
165
166 this.langServerSource = Config.langServerBinarySource(ctx, config);
167
168 if (config.has('cargo-watch.enable')) {
169 this.cargoWatchOptions.enable = config.get<boolean>(
170 'cargo-watch.enable',
171 true,
172 );
173 }
174
175 if (config.has('cargo-watch.arguments')) {
176 this.cargoWatchOptions.arguments = config.get<string[]>(
177 'cargo-watch.arguments',
178 [],
179 );
180 }
181
182 if (config.has('cargo-watch.command')) {
183 this.cargoWatchOptions.command = config.get<string>(
184 'cargo-watch.command',
185 '',
186 );
187 }
188
189 if (config.has('cargo-watch.allTargets')) {
190 this.cargoWatchOptions.allTargets = config.get<boolean>(
191 'cargo-watch.allTargets',
192 true,
193 );
194 }
195
196 if (config.has('lruCapacity')) {
197 this.lruCapacity = config.get('lruCapacity') as number;
198 }
199
200 if (config.has('displayInlayHints')) {
201 this.displayInlayHints = config.get('displayInlayHints') as boolean;
202 }
203 if (config.has('maxInlayHintLength')) {
204 this.maxInlayHintLength = config.get(
205 'maxInlayHintLength',
206 ) as number;
207 }
208 if (config.has('excludeGlobs')) {
209 this.excludeGlobs = config.get('excludeGlobs') || [];
210 }
211 if (config.has('useClientWatching')) {
212 this.useClientWatching = config.get('useClientWatching') || true;
213 }
214 if (config.has('featureFlags')) {
215 this.featureFlags = config.get('featureFlags') || {};
216 }
217 if (config.has('withSysroot')) {
218 this.withSysroot = config.get('withSysroot') || false;
219 }
220 152
221 if (config.has('cargoFeatures.noDefaultFeatures')) { 153 get cargoWatchOptions(): CargoWatchOptions {
222 this.cargoFeatures.noDefaultFeatures = config.get( 154 return {
223 'cargoFeatures.noDefaultFeatures', 155 enable: this.cfg.get("cargo-watch.enable") as boolean,
224 false, 156 arguments: this.cfg.get("cargo-watch.arguments") as string[],
225 ); 157 allTargets: this.cfg.get("cargo-watch.allTargets") as boolean,
226 } 158 command: this.cfg.get("cargo-watch.command") as string,
227 if (config.has('cargoFeatures.allFeatures')) { 159 };
228 this.cargoFeatures.allFeatures = config.get( 160 }
229 'cargoFeatures.allFeatures',
230 true,
231 );
232 }
233 if (config.has('cargoFeatures.features')) {
234 this.cargoFeatures.features = config.get(
235 'cargoFeatures.features',
236 [],
237 );
238 }
239 161
240 if ( 162 get cargoFeatures(): CargoFeatures {
241 this.prevCargoFeatures !== null && 163 return {
242 (this.cargoFeatures.allFeatures !== 164 noDefaultFeatures: this.cfg.get("cargoFeatures.noDefaultFeatures") as boolean,
243 this.prevCargoFeatures.allFeatures || 165 allFeatures: this.cfg.get("cargoFeatures.allFeatures") as boolean,
244 this.cargoFeatures.noDefaultFeatures !== 166 features: this.cfg.get("cargoFeatures.features") as string[],
245 this.prevCargoFeatures.noDefaultFeatures || 167 };
246 this.cargoFeatures.features.length !==
247 this.prevCargoFeatures.features.length ||
248 this.cargoFeatures.features.some(
249 (v, i) => v !== this.prevCargoFeatures!.features[i],
250 ))
251 ) {
252 requireReloadMessage = 'Changing cargo features requires a reload';
253 }
254 this.prevCargoFeatures = { ...this.cargoFeatures };
255
256 if (this.prevCargoWatchOptions !== null) {
257 const changed =
258 this.cargoWatchOptions.enable !== this.prevCargoWatchOptions.enable ||
259 this.cargoWatchOptions.command !== this.prevCargoWatchOptions.command ||
260 this.cargoWatchOptions.allTargets !== this.prevCargoWatchOptions.allTargets ||
261 this.cargoWatchOptions.arguments.length !== this.prevCargoWatchOptions.arguments.length ||
262 this.cargoWatchOptions.arguments.some(
263 (v, i) => v !== this.prevCargoWatchOptions!.arguments[i],
264 );
265 if (changed) {
266 requireReloadMessage = 'Changing cargo-watch options requires a reload';
267 }
268 }
269 this.prevCargoWatchOptions = { ...this.cargoWatchOptions };
270
271 if (requireReloadMessage !== null) {
272 const reloadAction = 'Reload now';
273 vscode.window
274 .showInformationMessage(requireReloadMessage, reloadAction)
275 .then(selectedAction => {
276 if (selectedAction === reloadAction) {
277 vscode.commands.executeCommand(
278 'workbench.action.reloadWindow',
279 );
280 }
281 });
282 }
283 } 168 }
169
170 // for internal use
171 get withSysroot() { return this.cfg.get("withSysroot", true) as boolean; }
284} 172}
diff --git a/editors/code/src/ctx.ts b/editors/code/src/ctx.ts
index 70042a479..9fcf2ec38 100644
--- a/editors/code/src/ctx.ts
+++ b/editors/code/src/ctx.ts
@@ -60,6 +60,10 @@ export class Ctx {
60 this.pushCleanup(d); 60 this.pushCleanup(d);
61 } 61 }
62 62
63 get globalState(): vscode.Memento {
64 return this.extCtx.globalState;
65 }
66
63 get subscriptions(): Disposable[] { 67 get subscriptions(): Disposable[] {
64 return this.extCtx.subscriptions; 68 return this.extCtx.subscriptions;
65 } 69 }
diff --git a/editors/code/src/installation/download_artifact.ts b/editors/code/src/installation/download_artifact.ts
new file mode 100644
index 000000000..de655f8f4
--- /dev/null
+++ b/editors/code/src/installation/download_artifact.ts
@@ -0,0 +1,58 @@
1import * as vscode from "vscode";
2import * as path from "path";
3import { promises as fs } from "fs";
4import { strict as assert } from "assert";
5
6import { ArtifactReleaseInfo } from "./interfaces";
7import { downloadFile } from "./download_file";
8import { throttle } from "throttle-debounce";
9
10/**
11 * Downloads artifact from given `downloadUrl`.
12 * Creates `installationDir` if it is not yet created and put the artifact under
13 * `artifactFileName`.
14 * Displays info about the download progress in an info message printing the name
15 * of the artifact as `displayName`.
16 */
17export async function downloadArtifact(
18 {downloadUrl, releaseName}: ArtifactReleaseInfo,
19 artifactFileName: string,
20 installationDir: string,
21 displayName: string,
22) {
23 await fs.mkdir(installationDir).catch(err => assert.strictEqual(
24 err?.code,
25 "EEXIST",
26 `Couldn't create directory "${installationDir}" to download `+
27 `${artifactFileName} artifact: ${err.message}`
28 ));
29
30 const installationPath = path.join(installationDir, artifactFileName);
31
32 console.time(`Downloading ${artifactFileName}`);
33 await vscode.window.withProgress(
34 {
35 location: vscode.ProgressLocation.Notification,
36 cancellable: false, // FIXME: add support for canceling download?
37 title: `Downloading ${displayName} (${releaseName})`
38 },
39 async (progress, _cancellationToken) => {
40 let lastPrecentage = 0;
41 const filePermissions = 0o755; // (rwx, r_x, r_x)
42 await downloadFile(downloadUrl, installationPath, filePermissions, throttle(
43 200,
44 /* noTrailing: */ true,
45 (readBytes, totalBytes) => {
46 const newPercentage = (readBytes / totalBytes) * 100;
47 progress.report({
48 message: newPercentage.toFixed(0) + "%",
49 increment: newPercentage - lastPrecentage
50 });
51
52 lastPrecentage = newPercentage;
53 })
54 );
55 }
56 );
57 console.timeEnd(`Downloading ${artifactFileName}`);
58}
diff --git a/editors/code/src/installation/fetch_latest_artifact_metadata.ts b/editors/code/src/installation/fetch_artifact_release_info.ts
index 7e3700603..7d497057a 100644
--- a/editors/code/src/installation/fetch_latest_artifact_metadata.ts
+++ b/editors/code/src/installation/fetch_artifact_release_info.ts
@@ -1,26 +1,32 @@
1import fetch from "node-fetch"; 1import fetch from "node-fetch";
2import { GithubRepo, ArtifactMetadata } from "./interfaces"; 2import { GithubRepo, ArtifactReleaseInfo } from "./interfaces";
3 3
4const GITHUB_API_ENDPOINT_URL = "https://api.github.com"; 4const GITHUB_API_ENDPOINT_URL = "https://api.github.com";
5 5
6
6/** 7/**
7 * Fetches the latest release from GitHub `repo` and returns metadata about 8 * Fetches the release with `releaseTag` (or just latest release when not specified)
8 * `artifactFileName` shipped with this release or `null` if no such artifact was published. 9 * from GitHub `repo` and returns metadata about `artifactFileName` shipped with
10 * this release or `null` if no such artifact was published.
9 */ 11 */
10export async function fetchLatestArtifactMetadata( 12export async function fetchArtifactReleaseInfo(
11 repo: GithubRepo, artifactFileName: string 13 repo: GithubRepo, artifactFileName: string, releaseTag?: string
12): Promise<null | ArtifactMetadata> { 14): Promise<null | ArtifactReleaseInfo> {
13 15
14 const repoOwner = encodeURIComponent(repo.owner); 16 const repoOwner = encodeURIComponent(repo.owner);
15 const repoName = encodeURIComponent(repo.name); 17 const repoName = encodeURIComponent(repo.name);
16 18
17 const apiEndpointPath = `/repos/${repoOwner}/${repoName}/releases/latest`; 19 const apiEndpointPath = releaseTag
20 ? `/repos/${repoOwner}/${repoName}/releases/tags/${releaseTag}`
21 : `/repos/${repoOwner}/${repoName}/releases/latest`;
22
18 const requestUrl = GITHUB_API_ENDPOINT_URL + apiEndpointPath; 23 const requestUrl = GITHUB_API_ENDPOINT_URL + apiEndpointPath;
19 24
20 // We skip runtime type checks for simplicity (here we cast from `any` to `GithubRelease`) 25 // We skip runtime type checks for simplicity (here we cast from `any` to `GithubRelease`)
21 26
22 console.log("Issuing request for released artifacts metadata to", requestUrl); 27 console.log("Issuing request for released artifacts metadata to", requestUrl);
23 28
29 // FIXME: handle non-ok response
24 const response: GithubRelease = await fetch(requestUrl, { 30 const response: GithubRelease = await fetch(requestUrl, {
25 headers: { Accept: "application/vnd.github.v3+json" } 31 headers: { Accept: "application/vnd.github.v3+json" }
26 }) 32 })
diff --git a/editors/code/src/installation/interfaces.ts b/editors/code/src/installation/interfaces.ts
index 8039d0b90..e40839e4b 100644
--- a/editors/code/src/installation/interfaces.ts
+++ b/editors/code/src/installation/interfaces.ts
@@ -1,3 +1,5 @@
1import * as vscode from "vscode";
2
1export interface GithubRepo { 3export interface GithubRepo {
2 name: string; 4 name: string;
3 owner: string; 5 owner: string;
@@ -6,7 +8,7 @@ export interface GithubRepo {
6/** 8/**
7 * Metadata about particular artifact retrieved from GitHub releases. 9 * Metadata about particular artifact retrieved from GitHub releases.
8 */ 10 */
9export interface ArtifactMetadata { 11export interface ArtifactReleaseInfo {
10 releaseName: string; 12 releaseName: string;
11 downloadUrl: string; 13 downloadUrl: string;
12} 14}
@@ -50,6 +52,17 @@ export namespace BinarySource {
50 * and in local `.dir`. 52 * and in local `.dir`.
51 */ 53 */
52 file: string; 54 file: string;
55
56 /**
57 * Tag of github release that denotes a version required by this extension.
58 */
59 version: string;
60
61 /**
62 * Object that provides `get()/update()` operations to store metadata
63 * about the actual binary, e.g. its actual version.
64 */
65 storage: vscode.Memento;
53 } 66 }
54 67
55} 68}
diff --git a/editors/code/src/installation/language_server.ts b/editors/code/src/installation/language_server.ts
deleted file mode 100644
index 4797c3f01..000000000
--- a/editors/code/src/installation/language_server.ts
+++ /dev/null
@@ -1,148 +0,0 @@
1import * as vscode from "vscode";
2import * as path from "path";
3import { strict as assert } from "assert";
4import { promises as fs } from "fs";
5import { promises as dns } from "dns";
6import { spawnSync } from "child_process";
7import { throttle } from "throttle-debounce";
8
9import { BinarySource } from "./interfaces";
10import { fetchLatestArtifactMetadata } from "./fetch_latest_artifact_metadata";
11import { downloadFile } from "./download_file";
12
13export async function downloadLatestLanguageServer(
14 {file: artifactFileName, dir: installationDir, repo}: BinarySource.GithubRelease
15) {
16 const { releaseName, downloadUrl } = (await fetchLatestArtifactMetadata(
17 repo, artifactFileName
18 ))!;
19
20 await fs.mkdir(installationDir).catch(err => assert.strictEqual(
21 err?.code,
22 "EEXIST",
23 `Couldn't create directory "${installationDir}" to download `+
24 `language server binary: ${err.message}`
25 ));
26
27 const installationPath = path.join(installationDir, artifactFileName);
28
29 console.time("Downloading ra_lsp_server");
30 await vscode.window.withProgress(
31 {
32 location: vscode.ProgressLocation.Notification,
33 cancellable: false, // FIXME: add support for canceling download?
34 title: `Downloading language server (${releaseName})`
35 },
36 async (progress, _cancellationToken) => {
37 let lastPrecentage = 0;
38 const filePermissions = 0o755; // (rwx, r_x, r_x)
39 await downloadFile(downloadUrl, installationPath, filePermissions, throttle(
40 200,
41 /* noTrailing: */ true,
42 (readBytes, totalBytes) => {
43 const newPercentage = (readBytes / totalBytes) * 100;
44 progress.report({
45 message: newPercentage.toFixed(0) + "%",
46 increment: newPercentage - lastPrecentage
47 });
48
49 lastPrecentage = newPercentage;
50 })
51 );
52 }
53 );
54 console.timeEnd("Downloading ra_lsp_server");
55}
56export async function ensureLanguageServerBinary(
57 langServerSource: null | BinarySource
58): Promise<null | string> {
59
60 if (!langServerSource) {
61 vscode.window.showErrorMessage(
62 "Unfortunately we don't ship binaries for your platform yet. " +
63 "You need to manually clone rust-analyzer repository and " +
64 "run `cargo xtask install --server` to build the language server from sources. " +
65 "If you feel that your platform should be supported, please create an issue " +
66 "about that [here](https://github.com/rust-analyzer/rust-analyzer/issues) and we " +
67 "will consider it."
68 );
69 return null;
70 }
71
72 switch (langServerSource.type) {
73 case BinarySource.Type.ExplicitPath: {
74 if (isBinaryAvailable(langServerSource.path)) {
75 return langServerSource.path;
76 }
77
78 vscode.window.showErrorMessage(
79 `Unable to run ${langServerSource.path} binary. ` +
80 `To use the pre-built language server, set "rust-analyzer.raLspServerPath" ` +
81 "value to `null` or remove it from the settings to use it by default."
82 );
83 return null;
84 }
85 case BinarySource.Type.GithubRelease: {
86 const prebuiltBinaryPath = path.join(langServerSource.dir, langServerSource.file);
87
88 if (isBinaryAvailable(prebuiltBinaryPath)) {
89 return prebuiltBinaryPath;
90 }
91
92 const userResponse = await vscode.window.showInformationMessage(
93 "Language server binary for rust-analyzer was not found. " +
94 "Do you want to download it now?",
95 "Download now", "Cancel"
96 );
97 if (userResponse !== "Download now") return null;
98
99 try {
100 await downloadLatestLanguageServer(langServerSource);
101 } catch (err) {
102 vscode.window.showErrorMessage(
103 `Failed to download language server from ${langServerSource.repo.name} ` +
104 `GitHub repository: ${err.message}`
105 );
106
107 console.error(err);
108
109 dns.resolve('example.com').then(
110 addrs => console.log("DNS resolution for example.com was successful", addrs),
111 err => {
112 console.error(
113 "DNS resolution for example.com failed, " +
114 "there might be an issue with Internet availability"
115 );
116 console.error(err);
117 }
118 );
119
120 return null;
121 }
122
123 if (!isBinaryAvailable(prebuiltBinaryPath)) assert(false,
124 `Downloaded language server binary is not functional.` +
125 `Downloaded from: ${JSON.stringify(langServerSource)}`
126 );
127
128
129 vscode.window.showInformationMessage(
130 "Rust analyzer language server was successfully installed 🦀"
131 );
132
133 return prebuiltBinaryPath;
134 }
135 }
136
137 function isBinaryAvailable(binaryPath: string) {
138 const res = spawnSync(binaryPath, ["--version"]);
139
140 // ACHTUNG! `res` type declaration is inherently wrong, see
141 // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/42221
142
143 console.log("Checked binary availablity via --version", res);
144 console.log(binaryPath, "--version output:", res.output?.map(String));
145
146 return res.status === 0;
147 }
148}
diff --git a/editors/code/src/installation/server.ts b/editors/code/src/installation/server.ts
new file mode 100644
index 000000000..80cb719e3
--- /dev/null
+++ b/editors/code/src/installation/server.ts
@@ -0,0 +1,124 @@
1import * as vscode from "vscode";
2import * as path from "path";
3import { strict as assert } from "assert";
4import { promises as dns } from "dns";
5import { spawnSync } from "child_process";
6
7import { BinarySource } from "./interfaces";
8import { fetchArtifactReleaseInfo } from "./fetch_artifact_release_info";
9import { downloadArtifact } from "./download_artifact";
10
11export async function ensureServerBinary(source: null | BinarySource): Promise<null | string> {
12 if (!source) {
13 vscode.window.showErrorMessage(
14 "Unfortunately we don't ship binaries for your platform yet. " +
15 "You need to manually clone rust-analyzer repository and " +
16 "run `cargo xtask install --server` to build the language server from sources. " +
17 "If you feel that your platform should be supported, please create an issue " +
18 "about that [here](https://github.com/rust-analyzer/rust-analyzer/issues) and we " +
19 "will consider it."
20 );
21 return null;
22 }
23
24 switch (source.type) {
25 case BinarySource.Type.ExplicitPath: {
26 if (isBinaryAvailable(source.path)) {
27 return source.path;
28 }
29
30 vscode.window.showErrorMessage(
31 `Unable to run ${source.path} binary. ` +
32 `To use the pre-built language server, set "rust-analyzer.raLspServerPath" ` +
33 "value to `null` or remove it from the settings to use it by default."
34 );
35 return null;
36 }
37 case BinarySource.Type.GithubRelease: {
38 const prebuiltBinaryPath = path.join(source.dir, source.file);
39
40 const installedVersion: null | string = getServerVersion(source.storage);
41 const requiredVersion: string = source.version;
42
43 console.log("Installed version:", installedVersion, "required:", requiredVersion);
44
45 if (isBinaryAvailable(prebuiltBinaryPath) && installedVersion == requiredVersion) {
46 // FIXME: check for new releases and notify the user to update if possible
47 return prebuiltBinaryPath;
48 }
49
50 const userResponse = await vscode.window.showInformationMessage(
51 `Language server version ${source.version} for rust-analyzer is not installed. ` +
52 "Do you want to download it now?",
53 "Download now", "Cancel"
54 );
55 if (userResponse !== "Download now") return null;
56
57 if (!await downloadServer(source)) return null;
58
59 return prebuiltBinaryPath;
60 }
61 }
62}
63
64async function downloadServer(source: BinarySource.GithubRelease): Promise<boolean> {
65 try {
66 const releaseInfo = (await fetchArtifactReleaseInfo(source.repo, source.file, source.version))!;
67
68 await downloadArtifact(releaseInfo, source.file, source.dir, "language server");
69 await setServerVersion(source.storage, releaseInfo.releaseName);
70 } catch (err) {
71 vscode.window.showErrorMessage(
72 `Failed to download language server from ${source.repo.name} ` +
73 `GitHub repository: ${err.message}`
74 );
75
76 console.error(err);
77
78 dns.resolve('example.com').then(
79 addrs => console.log("DNS resolution for example.com was successful", addrs),
80 err => {
81 console.error(
82 "DNS resolution for example.com failed, " +
83 "there might be an issue with Internet availability"
84 );
85 console.error(err);
86 }
87 );
88 return false;
89 }
90
91 if (!isBinaryAvailable(path.join(source.dir, source.file))) assert(false,
92 `Downloaded language server binary is not functional.` +
93 `Downloaded from: ${JSON.stringify(source, null, 4)}`
94 );
95
96 vscode.window.showInformationMessage(
97 "Rust analyzer language server was successfully installed 🦀"
98 );
99
100 return true;
101}
102
103function isBinaryAvailable(binaryPath: string): boolean {
104 const res = spawnSync(binaryPath, ["--version"]);
105
106 // ACHTUNG! `res` type declaration is inherently wrong, see
107 // https://github.com/DefinitelyTyped/DefinitelyTyped/issues/42221
108
109 console.log("Checked binary availablity via --version", res);
110 console.log(binaryPath, "--version output:", res.output?.map(String));
111
112 return res.status === 0;
113}
114
115function getServerVersion(storage: vscode.Memento): null | string {
116 const version = storage.get<null | string>("server-version", null);
117 console.log("Get server-version:", version);
118 return version;
119}
120
121async function setServerVersion(storage: vscode.Memento, version: string): Promise<void> {
122 console.log("Set server-version:", version);
123 await storage.update("server-version", version.toString());
124}
diff --git a/editors/code/src/status_display.ts b/editors/code/src/status_display.ts
index 51dbf388b..993e79d70 100644
--- a/editors/code/src/status_display.ts
+++ b/editors/code/src/status_display.ts
@@ -66,9 +66,9 @@ class StatusDisplay implements Disposable {
66 66
67 refreshLabel() { 67 refreshLabel() {
68 if (this.packageName) { 68 if (this.packageName) {
69 this.statusBarItem!.text = `${spinnerFrames[this.i]} cargo ${this.command} [${this.packageName}]`; 69 this.statusBarItem.text = `${spinnerFrames[this.i]} cargo ${this.command} [${this.packageName}]`;
70 } else { 70 } else {
71 this.statusBarItem!.text = `${spinnerFrames[this.i]} cargo ${this.command}`; 71 this.statusBarItem.text = `${spinnerFrames[this.i]} cargo ${this.command}`;
72 } 72 }
73 } 73 }
74 74
diff --git a/xtask/src/cmd.rs b/xtask/src/cmd.rs
deleted file mode 100644
index 37497fb74..000000000
--- a/xtask/src/cmd.rs
+++ /dev/null
@@ -1,56 +0,0 @@
1use std::process::{Command, Output, Stdio};
2
3use anyhow::{Context, Result};
4
5use crate::project_root;
6
7pub struct Cmd<'a> {
8 pub unix: &'a str,
9 pub windows: &'a str,
10 pub work_dir: &'a str,
11}
12
13impl Cmd<'_> {
14 pub fn run(self) -> Result<()> {
15 if cfg!(windows) {
16 run(self.windows, self.work_dir)
17 } else {
18 run(self.unix, self.work_dir)
19 }
20 }
21 pub fn run_with_output(self) -> Result<String> {
22 if cfg!(windows) {
23 run_with_output(self.windows, self.work_dir)
24 } else {
25 run_with_output(self.unix, self.work_dir)
26 }
27 }
28}
29
30pub fn run(cmdline: &str, dir: &str) -> Result<()> {
31 do_run(cmdline, dir, &mut |c| {
32 c.stdout(Stdio::inherit());
33 })
34 .map(|_| ())
35}
36
37pub fn run_with_output(cmdline: &str, dir: &str) -> Result<String> {
38 let output = do_run(cmdline, dir, &mut |_| {})?;
39 let stdout = String::from_utf8(output.stdout)?;
40 let stdout = stdout.trim().to_string();
41 Ok(stdout)
42}
43
44fn do_run(cmdline: &str, dir: &str, f: &mut dyn FnMut(&mut Command)) -> Result<Output> {
45 eprintln!("\nwill run: {}", cmdline);
46 let proj_dir = project_root().join(dir);
47 let mut args = cmdline.split_whitespace();
48 let exec = args.next().unwrap();
49 let mut cmd = Command::new(exec);
50 f(cmd.args(args).current_dir(proj_dir).stderr(Stdio::inherit()));
51 let output = cmd.output().with_context(|| format!("running `{}`", cmdline))?;
52 if !output.status.success() {
53 anyhow::bail!("`{}` exited with {}", cmdline, output.status);
54 }
55 Ok(output)
56}
diff --git a/xtask/src/install.rs b/xtask/src/install.rs
index 99e1eddb1..00bbabce4 100644
--- a/xtask/src/install.rs
+++ b/xtask/src/install.rs
@@ -2,9 +2,9 @@
2 2
3use std::{env, path::PathBuf, str}; 3use std::{env, path::PathBuf, str};
4 4
5use anyhow::{Context, Result}; 5use anyhow::{bail, format_err, Context, Result};
6 6
7use crate::cmd::{run, run_with_output, Cmd}; 7use crate::not_bash::{ls, pushd, rm, run};
8 8
9// Latest stable, feel free to send a PR if this lags behind. 9// Latest stable, feel free to send a PR if this lags behind.
10const REQUIRED_RUST_VERSION: u32 = 41; 10const REQUIRED_RUST_VERSION: u32 = 41;
@@ -55,7 +55,7 @@ fn fix_path_for_mac() -> Result<()> {
55 const ROOT_DIR: &str = ""; 55 const ROOT_DIR: &str = "";
56 let home_dir = match env::var("HOME") { 56 let home_dir = match env::var("HOME") {
57 Ok(home) => home, 57 Ok(home) => home,
58 Err(e) => anyhow::bail!("Failed getting HOME from environment with error: {}.", e), 58 Err(e) => bail!("Failed getting HOME from environment with error: {}.", e),
59 }; 59 };
60 60
61 [ROOT_DIR, &home_dir] 61 [ROOT_DIR, &home_dir]
@@ -69,7 +69,7 @@ fn fix_path_for_mac() -> Result<()> {
69 if !vscode_path.is_empty() { 69 if !vscode_path.is_empty() {
70 let vars = match env::var_os("PATH") { 70 let vars = match env::var_os("PATH") {
71 Some(path) => path, 71 Some(path) => path,
72 None => anyhow::bail!("Could not get PATH variable from env."), 72 None => bail!("Could not get PATH variable from env."),
73 }; 73 };
74 74
75 let mut paths = env::split_paths(&vars).collect::<Vec<_>>(); 75 let mut paths = env::split_paths(&vars).collect::<Vec<_>>();
@@ -82,84 +82,61 @@ fn fix_path_for_mac() -> Result<()> {
82} 82}
83 83
84fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> { 84fn install_client(ClientOpt::VsCode: ClientOpt) -> Result<()> {
85 let npm_version = Cmd { 85 let _dir = pushd("./editors/code");
86 unix: r"npm --version",
87 windows: r"cmd.exe /c npm --version",
88 work_dir: "./editors/code",
89 }
90 .run();
91
92 if npm_version.is_err() {
93 eprintln!("\nERROR: `npm --version` failed, `npm` is required to build the VS Code plugin")
94 }
95 86
96 Cmd { unix: r"npm install", windows: r"cmd.exe /c npm install", work_dir: "./editors/code" } 87 let find_code = |f: fn(&str) -> bool| -> Result<&'static str> {
97 .run()?; 88 ["code", "code-insiders", "codium", "code-oss"]
98 Cmd { 89 .iter()
99 unix: r"npm run package --scripts-prepend-node-path", 90 .copied()
100 windows: r"cmd.exe /c npm run package", 91 .find(|bin| f(bin))
101 work_dir: "./editors/code", 92 .ok_or_else(|| {
102 } 93 format_err!("Can't execute `code --version`. Perhaps it is not in $PATH?")
103 .run()?; 94 })
95 };
104 96
105 let code_binary = ["code", "code-insiders", "codium", "code-oss"].iter().find(|bin| { 97 let installed_extensions;
106 Cmd { 98 if cfg!(unix) {
107 unix: &format!("{} --version", bin), 99 run!("npm --version").context("`npm` is required to build the VS Code plugin")?;
108 windows: &format!("cmd.exe /c {}.cmd --version", bin), 100 run!("npm install")?;
109 work_dir: "./editors/code",
110 }
111 .run()
112 .is_ok()
113 });
114 101
115 let code_binary = match code_binary { 102 let vsix_pkg = {
116 Some(it) => it, 103 rm("*.vsix")?;
117 None => anyhow::bail!("Can't execute `code --version`. Perhaps it is not in $PATH?"), 104 run!("npm run package --scripts-prepend-node-path")?;
118 }; 105 ls("*.vsix")?.pop().unwrap()
106 };
119 107
120 Cmd { 108 let code = find_code(|bin| run!("{} --version", bin).is_ok())?;
121 unix: &format!(r"{} --install-extension ./rust-analyzer-0.1.0.vsix --force", code_binary), 109 run!("{} --install-extension {} --force", code, vsix_pkg.display())?;
122 windows: &format!( 110 installed_extensions = run!("{} --list-extensions", code; echo = false)?;
123 r"cmd.exe /c {}.cmd --install-extension ./rust-analyzer-0.1.0.vsix --force", 111 } else {
124 code_binary 112 run!("cmd.exe /c npm --version")
125 ), 113 .context("`npm` is required to build the VS Code plugin")?;
126 work_dir: "./editors/code", 114 run!("cmd.exe /c npm install")?;
127 } 115
128 .run()?; 116 let vsix_pkg = {
117 rm("*.vsix")?;
118 run!("cmd.exe /c npm run package")?;
119 ls("*.vsix")?.pop().unwrap()
120 };
129 121
130 let installed_extensions = Cmd { 122 let code = find_code(|bin| run!("cmd.exe /c {}.cmd --version", bin).is_ok())?;
131 unix: &format!(r"{} --list-extensions", code_binary), 123 run!(r"cmd.exe /c {}.cmd --install-extension {} --force", code, vsix_pkg.display())?;
132 windows: &format!(r"cmd.exe /c {}.cmd --list-extensions", code_binary), 124 installed_extensions = run!("cmd.exe /c {}.cmd --list-extensions", code; echo = false)?;
133 work_dir: ".",
134 } 125 }
135 .run_with_output()?;
136 126
137 if !installed_extensions.contains("rust-analyzer") { 127 if !installed_extensions.contains("rust-analyzer") {
138 anyhow::bail!( 128 bail!(
139 "Could not install the Visual Studio Code extension. \ 129 "Could not install the Visual Studio Code extension. \
140 Please make sure you have at least NodeJS 10.x together with the latest version of VS Code installed and try again." 130 Please make sure you have at least NodeJS 12.x together with the latest version of VS Code installed and try again."
141 ); 131 );
142 } 132 }
143 133
144 if installed_extensions.contains("ra-lsp") {
145 Cmd {
146 unix: &format!(r"{} --uninstall-extension matklad.ra-lsp", code_binary),
147 windows: &format!(
148 r"cmd.exe /c {}.cmd --uninstall-extension matklad.ra-lsp",
149 code_binary
150 ),
151 work_dir: "./editors/code",
152 }
153 .run()?;
154 }
155
156 Ok(()) 134 Ok(())
157} 135}
158 136
159fn install_server(opts: ServerOpt) -> Result<()> { 137fn install_server(opts: ServerOpt) -> Result<()> {
160 let mut old_rust = false; 138 let mut old_rust = false;
161 if let Ok(stdout) = run_with_output("cargo --version", ".") { 139 if let Ok(stdout) = run!("cargo --version") {
162 println!("{}", stdout);
163 if !check_version(&stdout, REQUIRED_RUST_VERSION) { 140 if !check_version(&stdout, REQUIRED_RUST_VERSION) {
164 old_rust = true; 141 old_rust = true;
165 } 142 }
@@ -172,20 +149,17 @@ fn install_server(opts: ServerOpt) -> Result<()> {
172 ) 149 )
173 } 150 }
174 151
175 let res = if opts.jemalloc { 152 let jemalloc = if opts.jemalloc { "--features jemalloc" } else { "" };
176 run("cargo install --path crates/ra_lsp_server --locked --force --features jemalloc", ".") 153 let res = run!("cargo install --path crates/ra_lsp_server --locked --force {}", jemalloc);
177 } else {
178 run("cargo install --path crates/ra_lsp_server --locked --force", ".")
179 };
180 154
181 if res.is_err() && old_rust { 155 if res.is_err() && old_rust {
182 eprintln!( 156 eprintln!(
183 "\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n", 157 "\nWARNING: at least rust 1.{}.0 is required to compile rust-analyzer\n",
184 REQUIRED_RUST_VERSION, 158 REQUIRED_RUST_VERSION,
185 ) 159 );
186 } 160 }
187 161
188 res 162 res.map(drop)
189} 163}
190 164
191fn check_version(version_output: &str, min_minor_version: u32) -> bool { 165fn check_version(version_output: &str, min_minor_version: u32) -> bool {
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs
index 1bb1882b0..2bcd76d60 100644
--- a/xtask/src/lib.rs
+++ b/xtask/src/lib.rs
@@ -1,6 +1,6 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3mod cmd; 3pub mod not_bash;
4pub mod install; 4pub mod install;
5pub mod pre_commit; 5pub mod pre_commit;
6 6
@@ -9,15 +9,15 @@ mod ast_src;
9 9
10use anyhow::Context; 10use anyhow::Context;
11use std::{ 11use std::{
12 env, fs, 12 env,
13 io::Write, 13 io::Write,
14 path::{Path, PathBuf}, 14 path::{Path, PathBuf},
15 process::{Command, Stdio}, 15 process::{Command, Stdio},
16}; 16};
17 17
18use crate::{ 18use crate::{
19 cmd::{run, run_with_output},
20 codegen::Mode, 19 codegen::Mode,
20 not_bash::{fs2, pushd, rm_rf, run},
21}; 21};
22 22
23pub use anyhow::Result; 23pub use anyhow::Result;
@@ -38,9 +38,9 @@ pub fn run_rustfmt(mode: Mode) -> Result<()> {
38 ensure_rustfmt()?; 38 ensure_rustfmt()?;
39 39
40 if mode == Mode::Verify { 40 if mode == Mode::Verify {
41 run(&format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN), ".")?; 41 run!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN)?;
42 } else { 42 } else {
43 run(&format!("rustup run {} -- cargo fmt", TOOLCHAIN), ".")?; 43 run!("rustup run {} -- cargo fmt", TOOLCHAIN)?;
44 } 44 }
45 Ok(()) 45 Ok(())
46} 46}
@@ -70,8 +70,9 @@ fn ensure_rustfmt() -> Result<()> {
70 Ok(status) if status.success() => return Ok(()), 70 Ok(status) if status.success() => return Ok(()),
71 _ => (), 71 _ => (),
72 }; 72 };
73 run(&format!("rustup toolchain install {}", TOOLCHAIN), ".")?; 73 run!("rustup toolchain install {}", TOOLCHAIN)?;
74 run(&format!("rustup component add rustfmt --toolchain {}", TOOLCHAIN), ".") 74 run!("rustup component add rustfmt --toolchain {}", TOOLCHAIN)?;
75 Ok(())
75} 76}
76 77
77pub fn run_clippy() -> Result<()> { 78pub fn run_clippy() -> Result<()> {
@@ -92,34 +93,28 @@ pub fn run_clippy() -> Result<()> {
92 "clippy::nonminimal_bool", 93 "clippy::nonminimal_bool",
93 "clippy::redundant_pattern_matching", 94 "clippy::redundant_pattern_matching",
94 ]; 95 ];
95 run( 96 run!(
96 &format!( 97 "rustup run {} -- cargo clippy --all-features --all-targets -- -A {}",
97 "rustup run {} -- cargo clippy --all-features --all-targets -- -A {}", 98 TOOLCHAIN,
98 TOOLCHAIN, 99 allowed_lints.join(" -A ")
99 allowed_lints.join(" -A ")
100 ),
101 ".",
102 )?; 100 )?;
103 Ok(()) 101 Ok(())
104} 102}
105 103
106fn install_clippy() -> Result<()> { 104fn install_clippy() -> Result<()> {
107 run(&format!("rustup toolchain install {}", TOOLCHAIN), ".")?; 105 run!("rustup toolchain install {}", TOOLCHAIN)?;
108 run(&format!("rustup component add clippy --toolchain {}", TOOLCHAIN), ".") 106 run!("rustup component add clippy --toolchain {}", TOOLCHAIN)?;
107 Ok(())
109} 108}
110 109
111pub fn run_fuzzer() -> Result<()> { 110pub fn run_fuzzer() -> Result<()> {
112 match Command::new("cargo") 111 let _d = pushd("./crates/ra_syntax");
113 .args(&["fuzz", "--help"]) 112 if run!("cargo fuzz --help").is_err() {
114 .stderr(Stdio::null()) 113 run!("cargo install cargo-fuzz")?;
115 .stdout(Stdio::null())
116 .status()
117 {
118 Ok(status) if status.success() => (),
119 _ => run("cargo install cargo-fuzz", ".")?,
120 }; 114 };
121 115
122 run("rustup run nightly -- cargo fuzz run parser", "./crates/ra_syntax") 116 run!("rustup run nightly -- cargo fuzz run parser")?;
117 Ok(())
123} 118}
124 119
125/// Cleans the `./target` dir after the build such that only 120/// Cleans the `./target` dir after the build such that only
@@ -141,7 +136,7 @@ pub fn run_pre_cache() -> Result<()> {
141 } 136 }
142 } 137 }
143 138
144 fs::remove_file("./target/.rustc_info.json")?; 139 fs2::remove_file("./target/.rustc_info.json")?;
145 let to_delete = ["ra_", "heavy_test"]; 140 let to_delete = ["ra_", "heavy_test"];
146 for &dir in ["./target/debug/deps", "target/debug/.fingerprint"].iter() { 141 for &dir in ["./target/debug/deps", "target/debug/.fingerprint"].iter() {
147 for entry in Path::new(dir).read_dir()? { 142 for entry in Path::new(dir).read_dir()? {
@@ -155,22 +150,20 @@ pub fn run_pre_cache() -> Result<()> {
155 Ok(()) 150 Ok(())
156} 151}
157 152
158fn rm_rf(path: &Path) -> Result<()> { 153pub fn run_release(dry_run: bool) -> Result<()> {
159 if path.is_file() { fs::remove_file(path) } else { fs::remove_dir_all(path) } 154 if !dry_run {
160 .with_context(|| format!("failed to remove {:?}", path)) 155 run!("git switch release")?;
161} 156 run!("git fetch upstream")?;
162 157 run!("git reset --hard upstream/master")?;
163pub fn run_release() -> Result<()> { 158 run!("git push")?;
164 run("git switch release", ".")?; 159 }
165 run("git fetch upstream", ".")?;
166 run("git reset --hard upstream/master", ".")?;
167 run("git push", ".")?;
168 160
169 let changelog_dir = project_root().join("../rust-analyzer.github.io/thisweek/_posts"); 161 let website_root = project_root().join("../rust-analyzer.github.io");
162 let changelog_dir = website_root.join("./thisweek/_posts");
170 163
171 let today = run_with_output("date --iso", ".")?; 164 let today = run!("date --iso")?;
172 let commit = run_with_output("git rev-parse HEAD", ".")?; 165 let commit = run!("git rev-parse HEAD")?;
173 let changelog_n = fs::read_dir(changelog_dir.as_path())?.count(); 166 let changelog_n = fs2::read_dir(changelog_dir.as_path())?.count();
174 167
175 let contents = format!( 168 let contents = format!(
176 "\ 169 "\
@@ -193,7 +186,9 @@ Release: release:{}[]
193 ); 186 );
194 187
195 let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n)); 188 let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n));
196 fs::write(&path, &contents)?; 189 fs2::write(&path, &contents)?;
190
191 fs2::copy(project_root().join("./docs/user/readme.adoc"), website_root.join("manual.adoc"))?;
197 192
198 Ok(()) 193 Ok(())
199} 194}
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index 7ca727bde..a7dffe2cc 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -93,8 +93,9 @@ FLAGS:
93 run_pre_cache() 93 run_pre_cache()
94 } 94 }
95 "release" => { 95 "release" => {
96 let dry_run = args.contains("--dry-run");
96 args.finish()?; 97 args.finish()?;
97 run_release() 98 run_release(dry_run)
98 } 99 }
99 _ => { 100 _ => {
100 eprintln!( 101 eprintln!(
diff --git a/xtask/src/not_bash.rs b/xtask/src/not_bash.rs
new file mode 100644
index 000000000..3e30e7279
--- /dev/null
+++ b/xtask/src/not_bash.rs
@@ -0,0 +1,165 @@
1//! A bad shell -- small cross platform module for writing glue code
2use std::{
3 cell::RefCell,
4 env,
5 ffi::OsStr,
6 fs,
7 path::{Path, PathBuf},
8 process::{Command, Stdio},
9};
10
11use anyhow::{bail, Context, Result};
12
13pub mod fs2 {
14 use std::{fs, path::Path};
15
16 use anyhow::{Context, Result};
17
18 pub fn read_dir<P: AsRef<Path>>(path: P) -> Result<fs::ReadDir> {
19 let path = path.as_ref();
20 fs::read_dir(path).with_context(|| format!("Failed to read {}", path.display()))
21 }
22
23 pub fn write<P: AsRef<Path>, C: AsRef<[u8]>>(path: P, contents: C) -> Result<()> {
24 let path = path.as_ref();
25 fs::write(path, contents).with_context(|| format!("Failed to write {}", path.display()))
26 }
27
28 pub fn copy<P: AsRef<Path>, Q: AsRef<Path>>(from: P, to: Q) -> Result<u64> {
29 let from = from.as_ref();
30 let to = to.as_ref();
31 fs::copy(from, to)
32 .with_context(|| format!("Failed to copy {} to {}", from.display(), to.display()))
33 }
34
35 pub fn remove_file<P: AsRef<Path>>(path: P) -> Result<()> {
36 let path = path.as_ref();
37 fs::remove_file(path).with_context(|| format!("Failed to remove file {}", path.display()))
38 }
39
40 pub fn remove_dir_all<P: AsRef<Path>>(path: P) -> Result<()> {
41 let path = path.as_ref();
42 fs::remove_dir_all(path).with_context(|| format!("Failed to remove dir {}", path.display()))
43 }
44}
45
46macro_rules! _run {
47 ($($expr:expr),*) => {
48 run!($($expr),*; echo = true)
49 };
50 ($($expr:expr),* ; echo = $echo:expr) => {
51 $crate::not_bash::run_process(format!($($expr),*), $echo)
52 };
53}
54pub(crate) use _run as run;
55
56pub struct Pushd {
57 _p: (),
58}
59
60pub fn pushd(path: impl Into<PathBuf>) -> Pushd {
61 Env::with(|env| env.pushd(path.into()));
62 Pushd { _p: () }
63}
64
65impl Drop for Pushd {
66 fn drop(&mut self) {
67 Env::with(|env| env.popd())
68 }
69}
70
71pub fn rm(glob: &str) -> Result<()> {
72 let cwd = Env::with(|env| env.cwd());
73 ls(glob)?.into_iter().try_for_each(|it| fs::remove_file(cwd.join(it)))?;
74 Ok(())
75}
76
77pub fn rm_rf(path: impl AsRef<Path>) -> Result<()> {
78 let path = path.as_ref();
79 if path.is_file() {
80 fs2::remove_file(path)
81 } else {
82 fs2::remove_dir_all(path)
83 }
84}
85
86pub fn ls(glob: &str) -> Result<Vec<PathBuf>> {
87 let cwd = Env::with(|env| env.cwd());
88 let mut res = Vec::new();
89 for entry in fs::read_dir(&cwd)? {
90 let entry = entry?;
91 if matches(&entry.file_name(), glob) {
92 let path = entry.path();
93 let path = path.strip_prefix(&cwd).unwrap();
94 res.push(path.to_path_buf())
95 }
96 }
97 return Ok(res);
98
99 fn matches(file_name: &OsStr, glob: &str) -> bool {
100 assert!(glob.starts_with('*'));
101 file_name.to_string_lossy().ends_with(&glob[1..])
102 }
103}
104
105#[doc(hidden)]
106pub fn run_process(cmd: String, echo: bool) -> Result<String> {
107 run_process_inner(&cmd, echo).with_context(|| format!("process `{}` failed", cmd))
108}
109
110fn run_process_inner(cmd: &str, echo: bool) -> Result<String> {
111 let cwd = Env::with(|env| env.cwd());
112 let mut args = shelx(cmd);
113 let binary = args.remove(0);
114
115 if echo {
116 println!("> {}", cmd)
117 }
118
119 let output = Command::new(binary)
120 .args(args)
121 .current_dir(cwd)
122 .stdin(Stdio::null())
123 .stderr(Stdio::inherit())
124 .output()?;
125 let stdout = String::from_utf8(output.stdout)?;
126
127 if echo {
128 print!("{}", stdout)
129 }
130
131 if !output.status.success() {
132 bail!("{}", output.status)
133 }
134
135 Ok(stdout.trim().to_string())
136}
137
138// FIXME: some real shell lexing here
139fn shelx(cmd: &str) -> Vec<String> {
140 cmd.split_whitespace().map(|it| it.to_string()).collect()
141}
142
143#[derive(Default)]
144struct Env {
145 pushd_stack: Vec<PathBuf>,
146}
147
148impl Env {
149 fn with<F: FnOnce(&mut Env) -> T, T>(f: F) -> T {
150 thread_local! {
151 static ENV: RefCell<Env> = Default::default();
152 }
153 ENV.with(|it| f(&mut *it.borrow_mut()))
154 }
155
156 fn pushd(&mut self, dir: PathBuf) {
157 self.pushd_stack.push(dir)
158 }
159 fn popd(&mut self) {
160 self.pushd_stack.pop().unwrap();
161 }
162 fn cwd(&self) -> PathBuf {
163 self.pushd_stack.last().cloned().unwrap_or_else(|| env::current_dir().unwrap())
164 }
165}
diff --git a/xtask/src/pre_commit.rs b/xtask/src/pre_commit.rs
index 1533f64dc..056f34acf 100644
--- a/xtask/src/pre_commit.rs
+++ b/xtask/src/pre_commit.rs
@@ -4,18 +4,18 @@ use std::{fs, path::PathBuf};
4 4
5use anyhow::{bail, Result}; 5use anyhow::{bail, Result};
6 6
7use crate::{cmd::run_with_output, project_root, run, run_rustfmt, Mode}; 7use crate::{not_bash::run, project_root, run_rustfmt, Mode};
8 8
9// FIXME: if there are changed `.ts` files, also reformat TypeScript (by 9// FIXME: if there are changed `.ts` files, also reformat TypeScript (by
10// shelling out to `npm fmt`). 10// shelling out to `npm fmt`).
11pub fn run_hook() -> Result<()> { 11pub fn run_hook() -> Result<()> {
12 run_rustfmt(Mode::Overwrite)?; 12 run_rustfmt(Mode::Overwrite)?;
13 13
14 let diff = run_with_output("git diff --diff-filter=MAR --name-only --cached", ".")?; 14 let diff = run!("git diff --diff-filter=MAR --name-only --cached")?;
15 15
16 let root = project_root(); 16 let root = project_root();
17 for line in diff.lines() { 17 for line in diff.lines() {
18 run(&format!("git update-index --add {}", root.join(line).to_string_lossy()), ".")?; 18 run!("git update-index --add {}", root.join(line).display())?;
19 } 19 }
20 20
21 Ok(()) 21 Ok(())