diff options
Diffstat (limited to 'crates/ra_lsp_server')
25 files changed, 0 insertions, 5833 deletions
diff --git a/crates/ra_lsp_server/Cargo.toml b/crates/ra_lsp_server/Cargo.toml deleted file mode 100644 index 151ca3da5..000000000 --- a/crates/ra_lsp_server/Cargo.toml +++ /dev/null | |||
@@ -1,58 +0,0 @@ | |||
1 | [package] | ||
2 | edition = "2018" | ||
3 | name = "ra_lsp_server" | ||
4 | version = "0.1.0" | ||
5 | authors = ["rust-analyzer developers"] | ||
6 | autobins = false | ||
7 | |||
8 | [lib] | ||
9 | doctest = false | ||
10 | |||
11 | [[bin]] | ||
12 | name = "ra_lsp_server" | ||
13 | path = "./src/bin/main.rs" | ||
14 | |||
15 | [dependencies] | ||
16 | anyhow = "1.0" | ||
17 | crossbeam-channel = "0.4" | ||
18 | either = "1.5" | ||
19 | env_logger = { version = "0.7.1", default-features = false } | ||
20 | globset = "0.4.4" | ||
21 | itertools = "0.8.0" | ||
22 | jod-thread = "0.1.0" | ||
23 | log = "0.4.3" | ||
24 | lsp-types = { version = "0.70.0", features = ["proposed"] } | ||
25 | parking_lot = "0.10.0" | ||
26 | pico-args = "0.3.0" | ||
27 | rand = { version = "0.7.0", features = ["small_rng"] } | ||
28 | relative-path = "1.0.0" | ||
29 | rustc-hash = "1.0" | ||
30 | serde = { version = "1.0.83", features = ["derive"] } | ||
31 | serde_json = "1.0.34" | ||
32 | threadpool = "1.7.1" | ||
33 | |||
34 | lsp-server = "0.3.0" | ||
35 | ra_cargo_watch = { path = "../ra_cargo_watch" } | ||
36 | ra_ide = { path = "../ra_ide" } | ||
37 | ra_prof = { path = "../ra_prof" } | ||
38 | ra_project_model = { path = "../ra_project_model" } | ||
39 | ra_syntax = { path = "../ra_syntax" } | ||
40 | ra_text_edit = { path = "../ra_text_edit" } | ||
41 | ra_vfs = "0.5.0" | ||
42 | |||
43 | # This should only be used in CLI | ||
44 | ra_db = { path = "../ra_db" } | ||
45 | hir = { path = "../ra_hir", package = "ra_hir" } | ||
46 | hir_def = { path = "../ra_hir_def", package = "ra_hir_def" } | ||
47 | hir_ty = { path = "../ra_hir_ty", package = "ra_hir_ty" } | ||
48 | |||
49 | |||
50 | [target.'cfg(windows)'.dependencies] | ||
51 | winapi = "0.3" | ||
52 | |||
53 | [dev-dependencies] | ||
54 | tempfile = "3" | ||
55 | test_utils = { path = "../test_utils" } | ||
56 | |||
57 | [features] | ||
58 | jemalloc = [ "ra_prof/jemalloc" ] | ||
diff --git a/crates/ra_lsp_server/build.rs b/crates/ra_lsp_server/build.rs deleted file mode 100644 index 05f9772c0..000000000 --- a/crates/ra_lsp_server/build.rs +++ /dev/null | |||
@@ -1,15 +0,0 @@ | |||
1 | //! Just embed git-hash to `--version` | ||
2 | |||
3 | use std::process::Command; | ||
4 | |||
5 | fn main() { | ||
6 | let rev = rev().unwrap_or_else(|| "???????".to_string()); | ||
7 | println!("cargo:rustc-env=REV={}", rev) | ||
8 | } | ||
9 | |||
10 | fn rev() -> Option<String> { | ||
11 | let output = Command::new("git").args(&["rev-parse", "HEAD"]).output().ok()?; | ||
12 | let stdout = String::from_utf8(output.stdout).ok()?; | ||
13 | let short_hash = stdout.get(0..7)?; | ||
14 | Some(short_hash.to_owned()) | ||
15 | } | ||
diff --git a/crates/ra_lsp_server/src/bin/args.rs b/crates/ra_lsp_server/src/bin/args.rs deleted file mode 100644 index 3890fe13a..000000000 --- a/crates/ra_lsp_server/src/bin/args.rs +++ /dev/null | |||
@@ -1,242 +0,0 @@ | |||
1 | //! Command like parsing for rust-analyzer. | ||
2 | //! | ||
3 | //! If run started args, we run the LSP server loop. With a subcommand, we do a | ||
4 | //! one-time batch processing. | ||
5 | |||
6 | use anyhow::{bail, Result}; | ||
7 | use pico_args::Arguments; | ||
8 | use ra_lsp_server::cli::{BenchWhat, Position, Verbosity}; | ||
9 | |||
10 | use std::{fmt::Write, path::PathBuf}; | ||
11 | |||
12 | pub(crate) struct Args { | ||
13 | pub(crate) verbosity: Verbosity, | ||
14 | pub(crate) command: Command, | ||
15 | } | ||
16 | |||
17 | pub(crate) enum Command { | ||
18 | Parse { | ||
19 | no_dump: bool, | ||
20 | }, | ||
21 | Symbols, | ||
22 | Highlight { | ||
23 | rainbow: bool, | ||
24 | }, | ||
25 | Stats { | ||
26 | randomize: bool, | ||
27 | memory_usage: bool, | ||
28 | only: Option<String>, | ||
29 | with_deps: bool, | ||
30 | path: PathBuf, | ||
31 | }, | ||
32 | Bench { | ||
33 | path: PathBuf, | ||
34 | what: BenchWhat, | ||
35 | }, | ||
36 | RunServer, | ||
37 | Version, | ||
38 | } | ||
39 | |||
40 | impl Args { | ||
41 | pub(crate) fn parse() -> Result<Result<Args, HelpPrinted>> { | ||
42 | let mut matches = Arguments::from_env(); | ||
43 | |||
44 | if matches.contains("--version") { | ||
45 | matches.finish().or_else(handle_extra_flags)?; | ||
46 | return Ok(Ok(Args { verbosity: Verbosity::Normal, command: Command::Version })); | ||
47 | } | ||
48 | |||
49 | let verbosity = match ( | ||
50 | matches.contains(["-vv", "--spammy"]), | ||
51 | matches.contains(["-v", "--verbose"]), | ||
52 | matches.contains(["-q", "--quiet"]), | ||
53 | ) { | ||
54 | (true, _, true) => bail!("Invalid flags: -q conflicts with -vv"), | ||
55 | (true, _, false) => Verbosity::Spammy, | ||
56 | (false, false, false) => Verbosity::Normal, | ||
57 | (false, false, true) => Verbosity::Quiet, | ||
58 | (false, true, false) => Verbosity::Verbose, | ||
59 | (false, true, true) => bail!("Invalid flags: -q conflicts with -v"), | ||
60 | }; | ||
61 | |||
62 | let subcommand = match matches.subcommand()? { | ||
63 | Some(it) => it, | ||
64 | None => { | ||
65 | matches.finish().or_else(handle_extra_flags)?; | ||
66 | return Ok(Ok(Args { verbosity, command: Command::RunServer })); | ||
67 | } | ||
68 | }; | ||
69 | let command = match subcommand.as_str() { | ||
70 | "parse" => { | ||
71 | if matches.contains(["-h", "--help"]) { | ||
72 | eprintln!( | ||
73 | "\ | ||
74 | ra-cli-parse | ||
75 | |||
76 | USAGE: | ||
77 | ra_lsp_server parse [FLAGS] | ||
78 | |||
79 | FLAGS: | ||
80 | -h, --help Prints help inforamtion | ||
81 | --no-dump" | ||
82 | ); | ||
83 | return Ok(Err(HelpPrinted)); | ||
84 | } | ||
85 | |||
86 | let no_dump = matches.contains("--no-dump"); | ||
87 | matches.finish().or_else(handle_extra_flags)?; | ||
88 | Command::Parse { no_dump } | ||
89 | } | ||
90 | "symbols" => { | ||
91 | if matches.contains(["-h", "--help"]) { | ||
92 | eprintln!( | ||
93 | "\ | ||
94 | ra-cli-symbols | ||
95 | |||
96 | USAGE: | ||
97 | ra_lsp_server highlight [FLAGS] | ||
98 | |||
99 | FLAGS: | ||
100 | -h, --help Prints help inforamtion" | ||
101 | ); | ||
102 | return Ok(Err(HelpPrinted)); | ||
103 | } | ||
104 | |||
105 | matches.finish().or_else(handle_extra_flags)?; | ||
106 | |||
107 | Command::Symbols | ||
108 | } | ||
109 | "highlight" => { | ||
110 | if matches.contains(["-h", "--help"]) { | ||
111 | eprintln!( | ||
112 | "\ | ||
113 | ra-cli-highlight | ||
114 | |||
115 | USAGE: | ||
116 | ra_lsp_server highlight [FLAGS] | ||
117 | |||
118 | FLAGS: | ||
119 | -h, --help Prints help information | ||
120 | -r, --rainbow" | ||
121 | ); | ||
122 | return Ok(Err(HelpPrinted)); | ||
123 | } | ||
124 | |||
125 | let rainbow = matches.contains(["-r", "--rainbow"]); | ||
126 | matches.finish().or_else(handle_extra_flags)?; | ||
127 | Command::Highlight { rainbow } | ||
128 | } | ||
129 | "analysis-stats" => { | ||
130 | if matches.contains(["-h", "--help"]) { | ||
131 | eprintln!( | ||
132 | "\ | ||
133 | ra-cli-analysis-stats | ||
134 | |||
135 | USAGE: | ||
136 | ra_lsp_server analysis-stats [FLAGS] [OPTIONS] [PATH] | ||
137 | |||
138 | FLAGS: | ||
139 | -h, --help Prints help information | ||
140 | --memory-usage | ||
141 | -v, --verbose | ||
142 | -q, --quiet | ||
143 | |||
144 | OPTIONS: | ||
145 | -o <ONLY> | ||
146 | |||
147 | ARGS: | ||
148 | <PATH>" | ||
149 | ); | ||
150 | return Ok(Err(HelpPrinted)); | ||
151 | } | ||
152 | |||
153 | let randomize = matches.contains("--randomize"); | ||
154 | let memory_usage = matches.contains("--memory-usage"); | ||
155 | let only: Option<String> = matches.opt_value_from_str(["-o", "--only"])?; | ||
156 | let with_deps: bool = matches.contains("--with-deps"); | ||
157 | let path = { | ||
158 | let mut trailing = matches.free()?; | ||
159 | if trailing.len() != 1 { | ||
160 | bail!("Invalid flags"); | ||
161 | } | ||
162 | trailing.pop().unwrap().into() | ||
163 | }; | ||
164 | |||
165 | Command::Stats { randomize, memory_usage, only, with_deps, path } | ||
166 | } | ||
167 | "analysis-bench" => { | ||
168 | if matches.contains(["-h", "--help"]) { | ||
169 | eprintln!( | ||
170 | "\ | ||
171 | ra_lsp_server-analysis-bench | ||
172 | |||
173 | USAGE: | ||
174 | ra_lsp_server analysis-bench [FLAGS] [OPTIONS] [PATH] | ||
175 | |||
176 | FLAGS: | ||
177 | -h, --help Prints help information | ||
178 | -v, --verbose | ||
179 | |||
180 | OPTIONS: | ||
181 | --complete <PATH:LINE:COLUMN> Compute completions at this location | ||
182 | --highlight <PATH> Hightlight this file | ||
183 | |||
184 | ARGS: | ||
185 | <PATH> Project to analyse" | ||
186 | ); | ||
187 | return Ok(Err(HelpPrinted)); | ||
188 | } | ||
189 | |||
190 | let path: PathBuf = matches.opt_value_from_str("--path")?.unwrap_or_default(); | ||
191 | let highlight_path: Option<String> = matches.opt_value_from_str("--highlight")?; | ||
192 | let complete_path: Option<Position> = matches.opt_value_from_str("--complete")?; | ||
193 | let goto_def_path: Option<Position> = matches.opt_value_from_str("--goto-def")?; | ||
194 | let what = match (highlight_path, complete_path, goto_def_path) { | ||
195 | (Some(path), None, None) => BenchWhat::Highlight { path: path.into() }, | ||
196 | (None, Some(position), None) => BenchWhat::Complete(position), | ||
197 | (None, None, Some(position)) => BenchWhat::GotoDef(position), | ||
198 | _ => panic!( | ||
199 | "exactly one of `--highlight`, `--complete` or `--goto-def` must be set" | ||
200 | ), | ||
201 | }; | ||
202 | Command::Bench { path, what } | ||
203 | } | ||
204 | _ => { | ||
205 | eprintln!( | ||
206 | "\ | ||
207 | ra-cli | ||
208 | |||
209 | USAGE: | ||
210 | ra_lsp_server <SUBCOMMAND> | ||
211 | |||
212 | FLAGS: | ||
213 | -h, --help Prints help information | ||
214 | |||
215 | SUBCOMMANDS: | ||
216 | analysis-bench | ||
217 | analysis-stats | ||
218 | highlight | ||
219 | parse | ||
220 | symbols" | ||
221 | ); | ||
222 | return Ok(Err(HelpPrinted)); | ||
223 | } | ||
224 | }; | ||
225 | Ok(Ok(Args { verbosity, command })) | ||
226 | } | ||
227 | } | ||
228 | |||
229 | pub(crate) struct HelpPrinted; | ||
230 | |||
231 | fn handle_extra_flags(e: pico_args::Error) -> Result<()> { | ||
232 | if let pico_args::Error::UnusedArgsLeft(flags) = e { | ||
233 | let mut invalid_flags = String::new(); | ||
234 | for flag in flags { | ||
235 | write!(&mut invalid_flags, "{}, ", flag)?; | ||
236 | } | ||
237 | let (invalid_flags, _) = invalid_flags.split_at(invalid_flags.len() - 2); | ||
238 | bail!("Invalid flags: {}", invalid_flags); | ||
239 | } else { | ||
240 | bail!(e); | ||
241 | } | ||
242 | } | ||
diff --git a/crates/ra_lsp_server/src/bin/main.rs b/crates/ra_lsp_server/src/bin/main.rs deleted file mode 100644 index e25d54a0d..000000000 --- a/crates/ra_lsp_server/src/bin/main.rs +++ /dev/null | |||
@@ -1,98 +0,0 @@ | |||
1 | //! Driver for rust-analyzer. | ||
2 | //! | ||
3 | //! Based on cli flags, either spawns an LSP server, or runs a batch analysis | ||
4 | mod args; | ||
5 | |||
6 | use lsp_server::Connection; | ||
7 | use ra_lsp_server::{cli, from_json, show_message, Result, ServerConfig}; | ||
8 | use ra_prof; | ||
9 | |||
10 | use crate::args::HelpPrinted; | ||
11 | |||
12 | fn main() -> Result<()> { | ||
13 | setup_logging()?; | ||
14 | let args = match args::Args::parse()? { | ||
15 | Ok(it) => it, | ||
16 | Err(HelpPrinted) => return Ok(()), | ||
17 | }; | ||
18 | match args.command { | ||
19 | args::Command::Parse { no_dump } => cli::parse(no_dump)?, | ||
20 | args::Command::Symbols => cli::symbols()?, | ||
21 | args::Command::Highlight { rainbow } => cli::highlight(rainbow)?, | ||
22 | args::Command::Stats { randomize, memory_usage, only, with_deps, path } => { | ||
23 | cli::analysis_stats( | ||
24 | args.verbosity, | ||
25 | memory_usage, | ||
26 | path.as_ref(), | ||
27 | only.as_ref().map(String::as_ref), | ||
28 | with_deps, | ||
29 | randomize, | ||
30 | )? | ||
31 | } | ||
32 | |||
33 | args::Command::Bench { path, what } => { | ||
34 | cli::analysis_bench(args.verbosity, path.as_ref(), what)? | ||
35 | } | ||
36 | |||
37 | args::Command::RunServer => run_server()?, | ||
38 | args::Command::Version => println!("rust-analyzer {}", env!("REV")), | ||
39 | } | ||
40 | Ok(()) | ||
41 | } | ||
42 | |||
43 | fn setup_logging() -> Result<()> { | ||
44 | std::env::set_var("RUST_BACKTRACE", "short"); | ||
45 | env_logger::try_init()?; | ||
46 | ra_prof::init(); | ||
47 | Ok(()) | ||
48 | } | ||
49 | |||
50 | fn run_server() -> Result<()> { | ||
51 | log::info!("lifecycle: server started"); | ||
52 | |||
53 | let (connection, io_threads) = Connection::stdio(); | ||
54 | let server_capabilities = serde_json::to_value(ra_lsp_server::server_capabilities()).unwrap(); | ||
55 | |||
56 | let initialize_params = connection.initialize(server_capabilities)?; | ||
57 | let initialize_params = | ||
58 | from_json::<lsp_types::InitializeParams>("InitializeParams", initialize_params)?; | ||
59 | |||
60 | if let Some(client_info) = initialize_params.client_info { | ||
61 | log::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default()); | ||
62 | } | ||
63 | |||
64 | let cwd = std::env::current_dir()?; | ||
65 | let root = initialize_params.root_uri.and_then(|it| it.to_file_path().ok()).unwrap_or(cwd); | ||
66 | |||
67 | let workspace_roots = initialize_params | ||
68 | .workspace_folders | ||
69 | .map(|workspaces| { | ||
70 | workspaces.into_iter().filter_map(|it| it.uri.to_file_path().ok()).collect::<Vec<_>>() | ||
71 | }) | ||
72 | .filter(|workspaces| !workspaces.is_empty()) | ||
73 | .unwrap_or_else(|| vec![root]); | ||
74 | |||
75 | let server_config = initialize_params | ||
76 | .initialization_options | ||
77 | .and_then(|v| { | ||
78 | from_json::<ServerConfig>("config", v) | ||
79 | .map_err(|e| { | ||
80 | log::error!("{}", e); | ||
81 | show_message(lsp_types::MessageType::Error, e.to_string(), &connection.sender); | ||
82 | }) | ||
83 | .ok() | ||
84 | }) | ||
85 | .unwrap_or_default(); | ||
86 | |||
87 | ra_lsp_server::main_loop( | ||
88 | workspace_roots, | ||
89 | initialize_params.capabilities, | ||
90 | server_config, | ||
91 | connection, | ||
92 | )?; | ||
93 | |||
94 | log::info!("shutting down IO..."); | ||
95 | io_threads.join()?; | ||
96 | log::info!("... IO is down"); | ||
97 | Ok(()) | ||
98 | } | ||
diff --git a/crates/ra_lsp_server/src/caps.rs b/crates/ra_lsp_server/src/caps.rs deleted file mode 100644 index c4711076c..000000000 --- a/crates/ra_lsp_server/src/caps.rs +++ /dev/null | |||
@@ -1,62 +0,0 @@ | |||
1 | //! Advertizes the capabilities of the LSP Server. | ||
2 | |||
3 | use lsp_types::{ | ||
4 | CallHierarchyServerCapability, CodeActionProviderCapability, CodeLensOptions, | ||
5 | CompletionOptions, DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability, | ||
6 | ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions, | ||
7 | SelectionRangeProviderCapability, ServerCapabilities, SignatureHelpOptions, | ||
8 | TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions, | ||
9 | TypeDefinitionProviderCapability, WorkDoneProgressOptions, | ||
10 | }; | ||
11 | |||
12 | pub fn server_capabilities() -> ServerCapabilities { | ||
13 | ServerCapabilities { | ||
14 | text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions { | ||
15 | open_close: Some(true), | ||
16 | change: Some(TextDocumentSyncKind::Full), | ||
17 | will_save: None, | ||
18 | will_save_wait_until: None, | ||
19 | save: Some(SaveOptions::default()), | ||
20 | })), | ||
21 | hover_provider: Some(true), | ||
22 | completion_provider: Some(CompletionOptions { | ||
23 | resolve_provider: None, | ||
24 | trigger_characters: Some(vec![":".to_string(), ".".to_string()]), | ||
25 | work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, | ||
26 | }), | ||
27 | signature_help_provider: Some(SignatureHelpOptions { | ||
28 | trigger_characters: Some(vec!["(".to_string(), ",".to_string()]), | ||
29 | retrigger_characters: None, | ||
30 | work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, | ||
31 | }), | ||
32 | declaration_provider: None, | ||
33 | definition_provider: Some(true), | ||
34 | type_definition_provider: Some(TypeDefinitionProviderCapability::Simple(true)), | ||
35 | implementation_provider: Some(ImplementationProviderCapability::Simple(true)), | ||
36 | references_provider: Some(true), | ||
37 | document_highlight_provider: Some(true), | ||
38 | document_symbol_provider: Some(true), | ||
39 | workspace_symbol_provider: Some(true), | ||
40 | code_action_provider: Some(CodeActionProviderCapability::Simple(true)), | ||
41 | code_lens_provider: Some(CodeLensOptions { resolve_provider: Some(true) }), | ||
42 | document_formatting_provider: Some(true), | ||
43 | document_range_formatting_provider: None, | ||
44 | document_on_type_formatting_provider: Some(DocumentOnTypeFormattingOptions { | ||
45 | first_trigger_character: "=".to_string(), | ||
46 | more_trigger_character: Some(vec![".".to_string(), ">".to_string()]), | ||
47 | }), | ||
48 | selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)), | ||
49 | semantic_highlighting: None, | ||
50 | folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)), | ||
51 | rename_provider: Some(RenameProviderCapability::Options(RenameOptions { | ||
52 | prepare_provider: Some(true), | ||
53 | work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None }, | ||
54 | })), | ||
55 | document_link_provider: None, | ||
56 | color_provider: None, | ||
57 | execute_command_provider: None, | ||
58 | workspace: None, | ||
59 | call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)), | ||
60 | experimental: Default::default(), | ||
61 | } | ||
62 | } | ||
diff --git a/crates/ra_lsp_server/src/cargo_target_spec.rs b/crates/ra_lsp_server/src/cargo_target_spec.rs deleted file mode 100644 index 53751aafb..000000000 --- a/crates/ra_lsp_server/src/cargo_target_spec.rs +++ /dev/null | |||
@@ -1,118 +0,0 @@ | |||
1 | //! See `CargoTargetSpec` | ||
2 | |||
3 | use ra_ide::{FileId, RunnableKind, TestId}; | ||
4 | use ra_project_model::{self, ProjectWorkspace, TargetKind}; | ||
5 | |||
6 | use crate::{world::WorldSnapshot, Result}; | ||
7 | |||
8 | /// Abstract representation of Cargo target. | ||
9 | /// | ||
10 | /// We use it to cook up the set of cli args we need to pass to Cargo to | ||
11 | /// build/test/run the target. | ||
12 | pub(crate) struct CargoTargetSpec { | ||
13 | pub(crate) package: String, | ||
14 | pub(crate) target: String, | ||
15 | pub(crate) target_kind: TargetKind, | ||
16 | } | ||
17 | |||
18 | impl CargoTargetSpec { | ||
19 | pub(crate) fn runnable_args( | ||
20 | spec: Option<CargoTargetSpec>, | ||
21 | kind: &RunnableKind, | ||
22 | ) -> Result<Vec<String>> { | ||
23 | let mut res = Vec::new(); | ||
24 | match kind { | ||
25 | RunnableKind::Test { test_id } => { | ||
26 | res.push("test".to_string()); | ||
27 | if let Some(spec) = spec { | ||
28 | spec.push_to(&mut res); | ||
29 | } | ||
30 | res.push("--".to_string()); | ||
31 | res.push(test_id.to_string()); | ||
32 | if let TestId::Path(_) = test_id { | ||
33 | res.push("--exact".to_string()); | ||
34 | } | ||
35 | res.push("--nocapture".to_string()); | ||
36 | } | ||
37 | RunnableKind::TestMod { path } => { | ||
38 | res.push("test".to_string()); | ||
39 | if let Some(spec) = spec { | ||
40 | spec.push_to(&mut res); | ||
41 | } | ||
42 | res.push("--".to_string()); | ||
43 | res.push(path.to_string()); | ||
44 | res.push("--nocapture".to_string()); | ||
45 | } | ||
46 | RunnableKind::Bench { test_id } => { | ||
47 | res.push("bench".to_string()); | ||
48 | if let Some(spec) = spec { | ||
49 | spec.push_to(&mut res); | ||
50 | } | ||
51 | res.push("--".to_string()); | ||
52 | res.push(test_id.to_string()); | ||
53 | if let TestId::Path(_) = test_id { | ||
54 | res.push("--exact".to_string()); | ||
55 | } | ||
56 | res.push("--nocapture".to_string()); | ||
57 | } | ||
58 | RunnableKind::Bin => { | ||
59 | res.push("run".to_string()); | ||
60 | if let Some(spec) = spec { | ||
61 | spec.push_to(&mut res); | ||
62 | } | ||
63 | } | ||
64 | } | ||
65 | Ok(res) | ||
66 | } | ||
67 | |||
68 | pub(crate) fn for_file( | ||
69 | world: &WorldSnapshot, | ||
70 | file_id: FileId, | ||
71 | ) -> Result<Option<CargoTargetSpec>> { | ||
72 | let &crate_id = match world.analysis().crate_for(file_id)?.first() { | ||
73 | Some(crate_id) => crate_id, | ||
74 | None => return Ok(None), | ||
75 | }; | ||
76 | let file_id = world.analysis().crate_root(crate_id)?; | ||
77 | let path = world.file_id_to_path(file_id); | ||
78 | let res = world.workspaces.iter().find_map(|ws| match ws { | ||
79 | ProjectWorkspace::Cargo { cargo, .. } => { | ||
80 | let tgt = cargo.target_by_root(&path)?; | ||
81 | Some(CargoTargetSpec { | ||
82 | package: tgt.package(&cargo).name(&cargo).to_string(), | ||
83 | target: tgt.name(&cargo).to_string(), | ||
84 | target_kind: tgt.kind(&cargo), | ||
85 | }) | ||
86 | } | ||
87 | ProjectWorkspace::Json { .. } => None, | ||
88 | }); | ||
89 | Ok(res) | ||
90 | } | ||
91 | |||
92 | pub(crate) fn push_to(self, buf: &mut Vec<String>) { | ||
93 | buf.push("--package".to_string()); | ||
94 | buf.push(self.package); | ||
95 | match self.target_kind { | ||
96 | TargetKind::Bin => { | ||
97 | buf.push("--bin".to_string()); | ||
98 | buf.push(self.target); | ||
99 | } | ||
100 | TargetKind::Test => { | ||
101 | buf.push("--test".to_string()); | ||
102 | buf.push(self.target); | ||
103 | } | ||
104 | TargetKind::Bench => { | ||
105 | buf.push("--bench".to_string()); | ||
106 | buf.push(self.target); | ||
107 | } | ||
108 | TargetKind::Example => { | ||
109 | buf.push("--example".to_string()); | ||
110 | buf.push(self.target); | ||
111 | } | ||
112 | TargetKind::Lib => { | ||
113 | buf.push("--lib".to_string()); | ||
114 | } | ||
115 | TargetKind::Other => (), | ||
116 | } | ||
117 | } | ||
118 | } | ||
diff --git a/crates/ra_lsp_server/src/cli.rs b/crates/ra_lsp_server/src/cli.rs deleted file mode 100644 index c9738d101..000000000 --- a/crates/ra_lsp_server/src/cli.rs +++ /dev/null | |||
@@ -1,75 +0,0 @@ | |||
1 | //! Various batch processing tasks, intended primarily for debugging. | ||
2 | |||
3 | mod load_cargo; | ||
4 | mod analysis_stats; | ||
5 | mod analysis_bench; | ||
6 | mod progress_report; | ||
7 | |||
8 | use std::io::Read; | ||
9 | |||
10 | use anyhow::Result; | ||
11 | use ra_ide::{file_structure, Analysis}; | ||
12 | use ra_prof::profile; | ||
13 | use ra_syntax::{AstNode, SourceFile}; | ||
14 | |||
15 | #[derive(Clone, Copy)] | ||
16 | pub enum Verbosity { | ||
17 | Spammy, | ||
18 | Verbose, | ||
19 | Normal, | ||
20 | Quiet, | ||
21 | } | ||
22 | |||
23 | impl Verbosity { | ||
24 | pub fn is_verbose(self) -> bool { | ||
25 | match self { | ||
26 | Verbosity::Verbose | Verbosity::Spammy => true, | ||
27 | _ => false, | ||
28 | } | ||
29 | } | ||
30 | pub fn is_spammy(self) -> bool { | ||
31 | match self { | ||
32 | Verbosity::Spammy => true, | ||
33 | _ => false, | ||
34 | } | ||
35 | } | ||
36 | } | ||
37 | |||
38 | pub fn parse(no_dump: bool) -> Result<()> { | ||
39 | let _p = profile("parsing"); | ||
40 | let file = file()?; | ||
41 | if !no_dump { | ||
42 | println!("{:#?}", file.syntax()); | ||
43 | } | ||
44 | std::mem::forget(file); | ||
45 | Ok(()) | ||
46 | } | ||
47 | |||
48 | pub fn symbols() -> Result<()> { | ||
49 | let file = file()?; | ||
50 | for s in file_structure(&file) { | ||
51 | println!("{:?}", s); | ||
52 | } | ||
53 | Ok(()) | ||
54 | } | ||
55 | |||
56 | pub fn highlight(rainbow: bool) -> Result<()> { | ||
57 | let (analysis, file_id) = Analysis::from_single_file(read_stdin()?); | ||
58 | let html = analysis.highlight_as_html(file_id, rainbow).unwrap(); | ||
59 | println!("{}", html); | ||
60 | Ok(()) | ||
61 | } | ||
62 | |||
63 | pub use analysis_bench::{analysis_bench, BenchWhat, Position}; | ||
64 | pub use analysis_stats::analysis_stats; | ||
65 | |||
66 | fn file() -> Result<SourceFile> { | ||
67 | let text = read_stdin()?; | ||
68 | Ok(SourceFile::parse(&text).tree()) | ||
69 | } | ||
70 | |||
71 | fn read_stdin() -> Result<String> { | ||
72 | let mut buff = String::new(); | ||
73 | std::io::stdin().read_to_string(&mut buff)?; | ||
74 | Ok(buff) | ||
75 | } | ||
diff --git a/crates/ra_lsp_server/src/cli/analysis_bench.rs b/crates/ra_lsp_server/src/cli/analysis_bench.rs deleted file mode 100644 index 91855e592..000000000 --- a/crates/ra_lsp_server/src/cli/analysis_bench.rs +++ /dev/null | |||
@@ -1,158 +0,0 @@ | |||
1 | //! Benchmark operations like highlighting or goto definition. | ||
2 | |||
3 | use std::{ | ||
4 | path::{Path, PathBuf}, | ||
5 | str::FromStr, | ||
6 | sync::Arc, | ||
7 | time::Instant, | ||
8 | }; | ||
9 | |||
10 | use anyhow::{format_err, Result}; | ||
11 | use ra_db::{ | ||
12 | salsa::{Database, Durability}, | ||
13 | FileId, SourceDatabaseExt, | ||
14 | }; | ||
15 | use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FilePosition, LineCol}; | ||
16 | |||
17 | use crate::cli::{load_cargo::load_cargo, Verbosity}; | ||
18 | |||
19 | pub enum BenchWhat { | ||
20 | Highlight { path: PathBuf }, | ||
21 | Complete(Position), | ||
22 | GotoDef(Position), | ||
23 | } | ||
24 | |||
25 | pub struct Position { | ||
26 | pub path: PathBuf, | ||
27 | pub line: u32, | ||
28 | pub column: u32, | ||
29 | } | ||
30 | |||
31 | impl FromStr for Position { | ||
32 | type Err = anyhow::Error; | ||
33 | fn from_str(s: &str) -> Result<Self> { | ||
34 | let (path_line, column) = rsplit_at_char(s, ':')?; | ||
35 | let (path, line) = rsplit_at_char(path_line, ':')?; | ||
36 | Ok(Position { path: path.into(), line: line.parse()?, column: column.parse()? }) | ||
37 | } | ||
38 | } | ||
39 | |||
40 | fn rsplit_at_char(s: &str, c: char) -> Result<(&str, &str)> { | ||
41 | let idx = s.rfind(c).ok_or_else(|| format_err!("no `{}` in {}", c, s))?; | ||
42 | Ok((&s[..idx], &s[idx + 1..])) | ||
43 | } | ||
44 | |||
45 | pub fn analysis_bench(verbosity: Verbosity, path: &Path, what: BenchWhat) -> Result<()> { | ||
46 | ra_prof::init(); | ||
47 | |||
48 | let start = Instant::now(); | ||
49 | eprint!("loading: "); | ||
50 | let (mut host, roots) = load_cargo(path)?; | ||
51 | let db = host.raw_database(); | ||
52 | eprintln!("{:?}\n", start.elapsed()); | ||
53 | |||
54 | let file_id = { | ||
55 | let path = match &what { | ||
56 | BenchWhat::Highlight { path } => path, | ||
57 | BenchWhat::Complete(pos) | BenchWhat::GotoDef(pos) => &pos.path, | ||
58 | }; | ||
59 | let path = std::env::current_dir()?.join(path).canonicalize()?; | ||
60 | roots | ||
61 | .iter() | ||
62 | .find_map(|(source_root_id, project_root)| { | ||
63 | if project_root.is_member() { | ||
64 | for file_id in db.source_root(*source_root_id).walk() { | ||
65 | let rel_path = db.file_relative_path(file_id); | ||
66 | let abs_path = rel_path.to_path(project_root.path()); | ||
67 | if abs_path == path { | ||
68 | return Some(file_id); | ||
69 | } | ||
70 | } | ||
71 | } | ||
72 | None | ||
73 | }) | ||
74 | .ok_or_else(|| format_err!("Can't find {}", path.display()))? | ||
75 | }; | ||
76 | |||
77 | match &what { | ||
78 | BenchWhat::Highlight { .. } => { | ||
79 | let res = do_work(&mut host, file_id, |analysis| { | ||
80 | analysis.diagnostics(file_id).unwrap(); | ||
81 | analysis.highlight_as_html(file_id, false).unwrap() | ||
82 | }); | ||
83 | if verbosity.is_verbose() { | ||
84 | println!("\n{}", res); | ||
85 | } | ||
86 | } | ||
87 | BenchWhat::Complete(pos) | BenchWhat::GotoDef(pos) => { | ||
88 | let is_completion = match what { | ||
89 | BenchWhat::Complete(..) => true, | ||
90 | _ => false, | ||
91 | }; | ||
92 | |||
93 | let offset = host | ||
94 | .analysis() | ||
95 | .file_line_index(file_id)? | ||
96 | .offset(LineCol { line: pos.line - 1, col_utf16: pos.column }); | ||
97 | let file_postion = FilePosition { file_id, offset }; | ||
98 | |||
99 | if is_completion { | ||
100 | let res = | ||
101 | do_work(&mut host, file_id, |analysis| analysis.completions(file_postion)); | ||
102 | if verbosity.is_verbose() { | ||
103 | println!("\n{:#?}", res); | ||
104 | } | ||
105 | } else { | ||
106 | let res = | ||
107 | do_work(&mut host, file_id, |analysis| analysis.goto_definition(file_postion)); | ||
108 | if verbosity.is_verbose() { | ||
109 | println!("\n{:#?}", res); | ||
110 | } | ||
111 | } | ||
112 | } | ||
113 | } | ||
114 | Ok(()) | ||
115 | } | ||
116 | |||
117 | fn do_work<F: Fn(&Analysis) -> T, T>(host: &mut AnalysisHost, file_id: FileId, work: F) -> T { | ||
118 | { | ||
119 | let start = Instant::now(); | ||
120 | eprint!("from scratch: "); | ||
121 | work(&host.analysis()); | ||
122 | eprintln!("{:?}", start.elapsed()); | ||
123 | } | ||
124 | { | ||
125 | let start = Instant::now(); | ||
126 | eprint!("no change: "); | ||
127 | work(&host.analysis()); | ||
128 | eprintln!("{:?}", start.elapsed()); | ||
129 | } | ||
130 | { | ||
131 | let start = Instant::now(); | ||
132 | eprint!("trivial change: "); | ||
133 | host.raw_database_mut().salsa_runtime_mut().synthetic_write(Durability::LOW); | ||
134 | work(&host.analysis()); | ||
135 | eprintln!("{:?}", start.elapsed()); | ||
136 | } | ||
137 | { | ||
138 | let start = Instant::now(); | ||
139 | eprint!("comment change: "); | ||
140 | { | ||
141 | let mut text = host.analysis().file_text(file_id).unwrap().to_string(); | ||
142 | text.push_str("\n/* Hello world */\n"); | ||
143 | let mut change = AnalysisChange::new(); | ||
144 | change.change_file(file_id, Arc::new(text)); | ||
145 | host.apply_change(change); | ||
146 | } | ||
147 | work(&host.analysis()); | ||
148 | eprintln!("{:?}", start.elapsed()); | ||
149 | } | ||
150 | { | ||
151 | let start = Instant::now(); | ||
152 | eprint!("const change: "); | ||
153 | host.raw_database_mut().salsa_runtime_mut().synthetic_write(Durability::HIGH); | ||
154 | let res = work(&host.analysis()); | ||
155 | eprintln!("{:?}", start.elapsed()); | ||
156 | res | ||
157 | } | ||
158 | } | ||
diff --git a/crates/ra_lsp_server/src/cli/analysis_stats.rs b/crates/ra_lsp_server/src/cli/analysis_stats.rs deleted file mode 100644 index 99ab6e443..000000000 --- a/crates/ra_lsp_server/src/cli/analysis_stats.rs +++ /dev/null | |||
@@ -1,260 +0,0 @@ | |||
1 | //! Fully type-check project and print various stats, like the number of type | ||
2 | //! errors. | ||
3 | |||
4 | use std::{collections::HashSet, fmt::Write, path::Path, time::Instant}; | ||
5 | |||
6 | use hir::{ | ||
7 | db::{DefDatabase, HirDatabase}, | ||
8 | AssocItem, Crate, HasSource, HirDisplay, ModuleDef, | ||
9 | }; | ||
10 | use hir_def::FunctionId; | ||
11 | use hir_ty::{Ty, TypeWalk}; | ||
12 | use itertools::Itertools; | ||
13 | use ra_db::SourceDatabaseExt; | ||
14 | use ra_syntax::AstNode; | ||
15 | use rand::{seq::SliceRandom, thread_rng}; | ||
16 | |||
17 | use crate::cli::{load_cargo::load_cargo, progress_report::ProgressReport, Result, Verbosity}; | ||
18 | |||
19 | pub fn analysis_stats( | ||
20 | verbosity: Verbosity, | ||
21 | memory_usage: bool, | ||
22 | path: &Path, | ||
23 | only: Option<&str>, | ||
24 | with_deps: bool, | ||
25 | randomize: bool, | ||
26 | ) -> Result<()> { | ||
27 | let db_load_time = Instant::now(); | ||
28 | let (mut host, roots) = load_cargo(path)?; | ||
29 | let db = host.raw_database(); | ||
30 | println!("Database loaded, {} roots, {:?}", roots.len(), db_load_time.elapsed()); | ||
31 | let analysis_time = Instant::now(); | ||
32 | let mut num_crates = 0; | ||
33 | let mut visited_modules = HashSet::new(); | ||
34 | let mut visit_queue = Vec::new(); | ||
35 | |||
36 | let members = | ||
37 | roots | ||
38 | .into_iter() | ||
39 | .filter_map(|(source_root_id, project_root)| { | ||
40 | if with_deps || project_root.is_member() { | ||
41 | Some(source_root_id) | ||
42 | } else { | ||
43 | None | ||
44 | } | ||
45 | }) | ||
46 | .collect::<HashSet<_>>(); | ||
47 | |||
48 | let mut krates = Crate::all(db); | ||
49 | if randomize { | ||
50 | krates.shuffle(&mut thread_rng()); | ||
51 | } | ||
52 | for krate in krates { | ||
53 | let module = krate.root_module(db).expect("crate without root module"); | ||
54 | let file_id = module.definition_source(db).file_id; | ||
55 | if members.contains(&db.file_source_root(file_id.original_file(db))) { | ||
56 | num_crates += 1; | ||
57 | visit_queue.push(module); | ||
58 | } | ||
59 | } | ||
60 | |||
61 | if randomize { | ||
62 | visit_queue.shuffle(&mut thread_rng()); | ||
63 | } | ||
64 | |||
65 | println!("Crates in this dir: {}", num_crates); | ||
66 | let mut num_decls = 0; | ||
67 | let mut funcs = Vec::new(); | ||
68 | while let Some(module) = visit_queue.pop() { | ||
69 | if visited_modules.insert(module) { | ||
70 | visit_queue.extend(module.children(db)); | ||
71 | |||
72 | for decl in module.declarations(db) { | ||
73 | num_decls += 1; | ||
74 | if let ModuleDef::Function(f) = decl { | ||
75 | funcs.push(f); | ||
76 | } | ||
77 | } | ||
78 | |||
79 | for impl_block in module.impl_blocks(db) { | ||
80 | for item in impl_block.items(db) { | ||
81 | num_decls += 1; | ||
82 | if let AssocItem::Function(f) = item { | ||
83 | funcs.push(f); | ||
84 | } | ||
85 | } | ||
86 | } | ||
87 | } | ||
88 | } | ||
89 | println!("Total modules found: {}", visited_modules.len()); | ||
90 | println!("Total declarations: {}", num_decls); | ||
91 | println!("Total functions: {}", funcs.len()); | ||
92 | println!("Item Collection: {:?}, {}", analysis_time.elapsed(), ra_prof::memory_usage()); | ||
93 | |||
94 | if randomize { | ||
95 | funcs.shuffle(&mut thread_rng()); | ||
96 | } | ||
97 | |||
98 | let inference_time = Instant::now(); | ||
99 | let mut bar = match verbosity { | ||
100 | Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(), | ||
101 | _ => ProgressReport::new(funcs.len() as u64), | ||
102 | }; | ||
103 | |||
104 | bar.tick(); | ||
105 | let mut num_exprs = 0; | ||
106 | let mut num_exprs_unknown = 0; | ||
107 | let mut num_exprs_partially_unknown = 0; | ||
108 | let mut num_type_mismatches = 0; | ||
109 | for f in funcs { | ||
110 | let name = f.name(db); | ||
111 | let full_name = f | ||
112 | .module(db) | ||
113 | .path_to_root(db) | ||
114 | .into_iter() | ||
115 | .rev() | ||
116 | .filter_map(|it| it.name(db)) | ||
117 | .chain(Some(f.name(db))) | ||
118 | .join("::"); | ||
119 | if let Some(only_name) = only { | ||
120 | if name.to_string() != only_name && full_name != only_name { | ||
121 | continue; | ||
122 | } | ||
123 | } | ||
124 | let mut msg = format!("processing: {}", full_name); | ||
125 | if verbosity.is_verbose() { | ||
126 | let src = f.source(db); | ||
127 | let original_file = src.file_id.original_file(db); | ||
128 | let path = db.file_relative_path(original_file); | ||
129 | let syntax_range = src.value.syntax().text_range(); | ||
130 | write!(msg, " ({:?} {})", path, syntax_range).unwrap(); | ||
131 | } | ||
132 | if verbosity.is_spammy() { | ||
133 | bar.println(format!("{}", msg)); | ||
134 | } | ||
135 | bar.set_message(&msg); | ||
136 | let f_id = FunctionId::from(f); | ||
137 | let body = db.body(f_id.into()); | ||
138 | let inference_result = db.infer(f_id.into()); | ||
139 | let (previous_exprs, previous_unknown, previous_partially_unknown) = | ||
140 | (num_exprs, num_exprs_unknown, num_exprs_partially_unknown); | ||
141 | for (expr_id, _) in body.exprs.iter() { | ||
142 | let ty = &inference_result[expr_id]; | ||
143 | num_exprs += 1; | ||
144 | if let Ty::Unknown = ty { | ||
145 | num_exprs_unknown += 1; | ||
146 | } else { | ||
147 | let mut is_partially_unknown = false; | ||
148 | ty.walk(&mut |ty| { | ||
149 | if let Ty::Unknown = ty { | ||
150 | is_partially_unknown = true; | ||
151 | } | ||
152 | }); | ||
153 | if is_partially_unknown { | ||
154 | num_exprs_partially_unknown += 1; | ||
155 | } | ||
156 | } | ||
157 | if only.is_some() && verbosity.is_spammy() { | ||
158 | // in super-verbose mode for just one function, we print every single expression | ||
159 | let (_, sm) = db.body_with_source_map(f_id.into()); | ||
160 | let src = sm.expr_syntax(expr_id); | ||
161 | if let Some(src) = src { | ||
162 | let original_file = src.file_id.original_file(db); | ||
163 | let line_index = host.analysis().file_line_index(original_file).unwrap(); | ||
164 | let text_range = src.value.either( | ||
165 | |it| it.syntax_node_ptr().range(), | ||
166 | |it| it.syntax_node_ptr().range(), | ||
167 | ); | ||
168 | let (start, end) = ( | ||
169 | line_index.line_col(text_range.start()), | ||
170 | line_index.line_col(text_range.end()), | ||
171 | ); | ||
172 | bar.println(format!( | ||
173 | "{}:{}-{}:{}: {}", | ||
174 | start.line + 1, | ||
175 | start.col_utf16, | ||
176 | end.line + 1, | ||
177 | end.col_utf16, | ||
178 | ty.display(db) | ||
179 | )); | ||
180 | } else { | ||
181 | bar.println(format!("unknown location: {}", ty.display(db))); | ||
182 | } | ||
183 | } | ||
184 | if let Some(mismatch) = inference_result.type_mismatch_for_expr(expr_id) { | ||
185 | num_type_mismatches += 1; | ||
186 | if verbosity.is_verbose() { | ||
187 | let (_, sm) = db.body_with_source_map(f_id.into()); | ||
188 | let src = sm.expr_syntax(expr_id); | ||
189 | if let Some(src) = src { | ||
190 | // FIXME: it might be nice to have a function (on Analysis?) that goes from Source<T> -> (LineCol, LineCol) directly | ||
191 | let original_file = src.file_id.original_file(db); | ||
192 | let path = db.file_relative_path(original_file); | ||
193 | let line_index = host.analysis().file_line_index(original_file).unwrap(); | ||
194 | let text_range = src.value.either( | ||
195 | |it| it.syntax_node_ptr().range(), | ||
196 | |it| it.syntax_node_ptr().range(), | ||
197 | ); | ||
198 | let (start, end) = ( | ||
199 | line_index.line_col(text_range.start()), | ||
200 | line_index.line_col(text_range.end()), | ||
201 | ); | ||
202 | bar.println(format!( | ||
203 | "{} {}:{}-{}:{}: Expected {}, got {}", | ||
204 | path, | ||
205 | start.line + 1, | ||
206 | start.col_utf16, | ||
207 | end.line + 1, | ||
208 | end.col_utf16, | ||
209 | mismatch.expected.display(db), | ||
210 | mismatch.actual.display(db) | ||
211 | )); | ||
212 | } else { | ||
213 | bar.println(format!( | ||
214 | "{}: Expected {}, got {}", | ||
215 | name, | ||
216 | mismatch.expected.display(db), | ||
217 | mismatch.actual.display(db) | ||
218 | )); | ||
219 | } | ||
220 | } | ||
221 | } | ||
222 | } | ||
223 | if verbosity.is_spammy() { | ||
224 | bar.println(format!( | ||
225 | "In {}: {} exprs, {} unknown, {} partial", | ||
226 | full_name, | ||
227 | num_exprs - previous_exprs, | ||
228 | num_exprs_unknown - previous_unknown, | ||
229 | num_exprs_partially_unknown - previous_partially_unknown | ||
230 | )); | ||
231 | } | ||
232 | bar.inc(1); | ||
233 | } | ||
234 | bar.finish_and_clear(); | ||
235 | println!("Total expressions: {}", num_exprs); | ||
236 | println!( | ||
237 | "Expressions of unknown type: {} ({}%)", | ||
238 | num_exprs_unknown, | ||
239 | if num_exprs > 0 { num_exprs_unknown * 100 / num_exprs } else { 100 } | ||
240 | ); | ||
241 | println!( | ||
242 | "Expressions of partially unknown type: {} ({}%)", | ||
243 | num_exprs_partially_unknown, | ||
244 | if num_exprs > 0 { num_exprs_partially_unknown * 100 / num_exprs } else { 100 } | ||
245 | ); | ||
246 | println!("Type mismatches: {}", num_type_mismatches); | ||
247 | println!("Inference: {:?}, {}", inference_time.elapsed(), ra_prof::memory_usage()); | ||
248 | println!("Total: {:?}, {}", analysis_time.elapsed(), ra_prof::memory_usage()); | ||
249 | |||
250 | if memory_usage { | ||
251 | for (name, bytes) in host.per_query_memory_usage() { | ||
252 | println!("{:>8} {}", bytes, name) | ||
253 | } | ||
254 | let before = ra_prof::memory_usage(); | ||
255 | drop(host); | ||
256 | println!("leftover: {}", before.allocated - ra_prof::memory_usage().allocated) | ||
257 | } | ||
258 | |||
259 | Ok(()) | ||
260 | } | ||
diff --git a/crates/ra_lsp_server/src/cli/load_cargo.rs b/crates/ra_lsp_server/src/cli/load_cargo.rs deleted file mode 100644 index 8cd08ecb6..000000000 --- a/crates/ra_lsp_server/src/cli/load_cargo.rs +++ /dev/null | |||
@@ -1,155 +0,0 @@ | |||
1 | //! Loads a Cargo project into a static instance of analysis, without support | ||
2 | //! for incorporating changes. | ||
3 | |||
4 | use std::path::Path; | ||
5 | |||
6 | use anyhow::Result; | ||
7 | use crossbeam_channel::{unbounded, Receiver}; | ||
8 | use ra_db::{CrateGraph, FileId, SourceRootId}; | ||
9 | use ra_ide::{AnalysisChange, AnalysisHost, FeatureFlags}; | ||
10 | use ra_project_model::{get_rustc_cfg_options, PackageRoot, ProjectWorkspace}; | ||
11 | use ra_vfs::{RootEntry, Vfs, VfsChange, VfsTask, Watch}; | ||
12 | use rustc_hash::{FxHashMap, FxHashSet}; | ||
13 | |||
14 | use crate::vfs_glob::RustPackageFilterBuilder; | ||
15 | |||
16 | fn vfs_file_to_id(f: ra_vfs::VfsFile) -> FileId { | ||
17 | FileId(f.0) | ||
18 | } | ||
19 | fn vfs_root_to_id(r: ra_vfs::VfsRoot) -> SourceRootId { | ||
20 | SourceRootId(r.0) | ||
21 | } | ||
22 | |||
23 | pub(crate) fn load_cargo( | ||
24 | root: &Path, | ||
25 | ) -> Result<(AnalysisHost, FxHashMap<SourceRootId, PackageRoot>)> { | ||
26 | let root = std::env::current_dir()?.join(root); | ||
27 | let ws = ProjectWorkspace::discover(root.as_ref(), &Default::default())?; | ||
28 | let project_roots = ws.to_roots(); | ||
29 | let (sender, receiver) = unbounded(); | ||
30 | let sender = Box::new(move |t| sender.send(t).unwrap()); | ||
31 | let (mut vfs, roots) = Vfs::new( | ||
32 | project_roots | ||
33 | .iter() | ||
34 | .map(|pkg_root| { | ||
35 | RootEntry::new( | ||
36 | pkg_root.path().clone(), | ||
37 | RustPackageFilterBuilder::default() | ||
38 | .set_member(pkg_root.is_member()) | ||
39 | .into_vfs_filter(), | ||
40 | ) | ||
41 | }) | ||
42 | .collect(), | ||
43 | sender, | ||
44 | Watch(false), | ||
45 | ); | ||
46 | |||
47 | // FIXME: cfg options? | ||
48 | let default_cfg_options = { | ||
49 | let mut opts = get_rustc_cfg_options(); | ||
50 | opts.insert_atom("test".into()); | ||
51 | opts.insert_atom("debug_assertion".into()); | ||
52 | opts | ||
53 | }; | ||
54 | |||
55 | let (crate_graph, _crate_names) = | ||
56 | ws.to_crate_graph(&default_cfg_options, &mut |path: &Path| { | ||
57 | let vfs_file = vfs.load(path); | ||
58 | log::debug!("vfs file {:?} -> {:?}", path, vfs_file); | ||
59 | vfs_file.map(vfs_file_to_id) | ||
60 | }); | ||
61 | log::debug!("crate graph: {:?}", crate_graph); | ||
62 | |||
63 | let source_roots = roots | ||
64 | .iter() | ||
65 | .map(|&vfs_root| { | ||
66 | let source_root_id = vfs_root_to_id(vfs_root); | ||
67 | let project_root = project_roots | ||
68 | .iter() | ||
69 | .find(|it| it.path() == &vfs.root2path(vfs_root)) | ||
70 | .unwrap() | ||
71 | .clone(); | ||
72 | (source_root_id, project_root) | ||
73 | }) | ||
74 | .collect::<FxHashMap<_, _>>(); | ||
75 | let host = load(&source_roots, crate_graph, &mut vfs, receiver); | ||
76 | Ok((host, source_roots)) | ||
77 | } | ||
78 | |||
79 | pub(crate) fn load( | ||
80 | source_roots: &FxHashMap<SourceRootId, PackageRoot>, | ||
81 | crate_graph: CrateGraph, | ||
82 | vfs: &mut Vfs, | ||
83 | receiver: Receiver<VfsTask>, | ||
84 | ) -> AnalysisHost { | ||
85 | let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok()); | ||
86 | let mut host = AnalysisHost::new(lru_cap, FeatureFlags::default()); | ||
87 | let mut analysis_change = AnalysisChange::new(); | ||
88 | analysis_change.set_crate_graph(crate_graph); | ||
89 | |||
90 | // wait until Vfs has loaded all roots | ||
91 | let mut roots_loaded = FxHashSet::default(); | ||
92 | for task in receiver { | ||
93 | vfs.handle_task(task); | ||
94 | let mut done = false; | ||
95 | for change in vfs.commit_changes() { | ||
96 | match change { | ||
97 | VfsChange::AddRoot { root, files } => { | ||
98 | let source_root_id = vfs_root_to_id(root); | ||
99 | let is_local = source_roots[&source_root_id].is_member(); | ||
100 | log::debug!( | ||
101 | "loaded source root {:?} with path {:?}", | ||
102 | source_root_id, | ||
103 | vfs.root2path(root) | ||
104 | ); | ||
105 | analysis_change.add_root(source_root_id, is_local); | ||
106 | analysis_change.set_debug_root_path( | ||
107 | source_root_id, | ||
108 | source_roots[&source_root_id].path().display().to_string(), | ||
109 | ); | ||
110 | |||
111 | let mut file_map = FxHashMap::default(); | ||
112 | for (vfs_file, path, text) in files { | ||
113 | let file_id = vfs_file_to_id(vfs_file); | ||
114 | analysis_change.add_file(source_root_id, file_id, path.clone(), text); | ||
115 | file_map.insert(path, file_id); | ||
116 | } | ||
117 | roots_loaded.insert(source_root_id); | ||
118 | if roots_loaded.len() == vfs.n_roots() { | ||
119 | done = true; | ||
120 | } | ||
121 | } | ||
122 | VfsChange::AddFile { root, file, path, text } => { | ||
123 | let source_root_id = vfs_root_to_id(root); | ||
124 | let file_id = vfs_file_to_id(file); | ||
125 | analysis_change.add_file(source_root_id, file_id, path, text); | ||
126 | } | ||
127 | VfsChange::RemoveFile { .. } | VfsChange::ChangeFile { .. } => { | ||
128 | // We just need the first scan, so just ignore these | ||
129 | } | ||
130 | } | ||
131 | } | ||
132 | if done { | ||
133 | break; | ||
134 | } | ||
135 | } | ||
136 | |||
137 | host.apply_change(analysis_change); | ||
138 | host | ||
139 | } | ||
140 | |||
141 | #[cfg(test)] | ||
142 | mod tests { | ||
143 | use super::*; | ||
144 | |||
145 | use hir::Crate; | ||
146 | |||
147 | #[test] | ||
148 | fn test_loading_rust_analyzer() { | ||
149 | let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); | ||
150 | let (host, _roots) = load_cargo(path).unwrap(); | ||
151 | let n_crates = Crate::all(host.raw_database()).len(); | ||
152 | // RA has quite a few crates, but the exact count doesn't matter | ||
153 | assert!(n_crates > 20); | ||
154 | } | ||
155 | } | ||
diff --git a/crates/ra_lsp_server/src/cli/progress_report.rs b/crates/ra_lsp_server/src/cli/progress_report.rs deleted file mode 100644 index 31867a1e9..000000000 --- a/crates/ra_lsp_server/src/cli/progress_report.rs +++ /dev/null | |||
@@ -1,120 +0,0 @@ | |||
1 | //! A simple progress bar | ||
2 | //! | ||
3 | //! A single thread non-optimized progress bar | ||
4 | use std::io::Write; | ||
5 | |||
6 | /// A Simple ASCII Progress Bar | ||
7 | pub struct ProgressReport { | ||
8 | curr: f32, | ||
9 | text: String, | ||
10 | hidden: bool, | ||
11 | |||
12 | len: u64, | ||
13 | pos: u64, | ||
14 | msg: String, | ||
15 | } | ||
16 | |||
17 | impl ProgressReport { | ||
18 | pub fn new(len: u64) -> ProgressReport { | ||
19 | ProgressReport { | ||
20 | curr: 0.0, | ||
21 | text: String::new(), | ||
22 | hidden: false, | ||
23 | len, | ||
24 | pos: 0, | ||
25 | msg: String::new(), | ||
26 | } | ||
27 | } | ||
28 | |||
29 | pub fn hidden() -> ProgressReport { | ||
30 | ProgressReport { | ||
31 | curr: 0.0, | ||
32 | text: String::new(), | ||
33 | hidden: true, | ||
34 | len: 0, | ||
35 | pos: 0, | ||
36 | msg: String::new(), | ||
37 | } | ||
38 | } | ||
39 | |||
40 | pub fn set_message(&mut self, msg: &str) { | ||
41 | self.msg = msg.to_string(); | ||
42 | self.tick(); | ||
43 | } | ||
44 | |||
45 | pub fn println<I: Into<String>>(&mut self, msg: I) { | ||
46 | self.clear(); | ||
47 | println!("{}", msg.into()); | ||
48 | self.tick(); | ||
49 | } | ||
50 | |||
51 | pub fn inc(&mut self, delta: u64) { | ||
52 | self.pos += delta; | ||
53 | if self.len == 0 { | ||
54 | self.set_value(0.0) | ||
55 | } else { | ||
56 | self.set_value((self.pos as f32) / (self.len as f32)) | ||
57 | } | ||
58 | self.tick(); | ||
59 | } | ||
60 | |||
61 | pub fn finish_and_clear(&mut self) { | ||
62 | self.clear(); | ||
63 | } | ||
64 | |||
65 | pub fn tick(&mut self) { | ||
66 | if self.hidden { | ||
67 | return; | ||
68 | } | ||
69 | let percent = (self.curr * 100.0) as u32; | ||
70 | let text = format!("{}/{} {:3>}% {}", self.pos, self.len, percent, self.msg); | ||
71 | self.update_text(&text); | ||
72 | } | ||
73 | |||
74 | fn update_text(&mut self, text: &str) { | ||
75 | // Get length of common portion | ||
76 | let mut common_prefix_length = 0; | ||
77 | let common_length = usize::min(self.text.len(), text.len()); | ||
78 | |||
79 | while common_prefix_length < common_length | ||
80 | && text.chars().nth(common_prefix_length).unwrap() | ||
81 | == self.text.chars().nth(common_prefix_length).unwrap() | ||
82 | { | ||
83 | common_prefix_length += 1; | ||
84 | } | ||
85 | |||
86 | // Backtrack to the first differing character | ||
87 | let mut output = String::new(); | ||
88 | output += &'\x08'.to_string().repeat(self.text.len() - common_prefix_length); | ||
89 | // Output new suffix | ||
90 | output += &text[common_prefix_length..text.len()]; | ||
91 | |||
92 | // If the new text is shorter than the old one: delete overlapping characters | ||
93 | if let Some(overlap_count) = self.text.len().checked_sub(text.len()) { | ||
94 | if overlap_count > 0 { | ||
95 | output += &" ".repeat(overlap_count); | ||
96 | output += &"\x08".repeat(overlap_count); | ||
97 | } | ||
98 | } | ||
99 | |||
100 | let _ = std::io::stdout().write(output.as_bytes()); | ||
101 | let _ = std::io::stdout().flush(); | ||
102 | self.text = text.to_string(); | ||
103 | } | ||
104 | |||
105 | fn set_value(&mut self, value: f32) { | ||
106 | self.curr = f32::max(0.0, f32::min(1.0, value)); | ||
107 | } | ||
108 | |||
109 | fn clear(&mut self) { | ||
110 | if self.hidden { | ||
111 | return; | ||
112 | } | ||
113 | |||
114 | // Fill all last text to space and return the cursor | ||
115 | let spaces = " ".repeat(self.text.len()); | ||
116 | let backspaces = "\x08".repeat(self.text.len()); | ||
117 | print!("{}{}{}", backspaces, spaces, backspaces); | ||
118 | self.text = String::new(); | ||
119 | } | ||
120 | } | ||
diff --git a/crates/ra_lsp_server/src/config.rs b/crates/ra_lsp_server/src/config.rs deleted file mode 100644 index 3314269ec..000000000 --- a/crates/ra_lsp_server/src/config.rs +++ /dev/null | |||
@@ -1,105 +0,0 @@ | |||
1 | //! Config used by the language server. | ||
2 | //! | ||
3 | //! We currently get this config from `initialize` LSP request, which is not the | ||
4 | //! best way to do it, but was the simplest thing we could implement. | ||
5 | //! | ||
6 | //! Of particular interest is the `feature_flags` hash map: while other fields | ||
7 | //! configure the server itself, feature flags are passed into analysis, and | ||
8 | //! tweak things like automatic insertion of `()` in completions. | ||
9 | |||
10 | use rustc_hash::FxHashMap; | ||
11 | |||
12 | use ra_project_model::CargoFeatures; | ||
13 | use serde::{Deserialize, Deserializer}; | ||
14 | |||
15 | /// Client provided initialization options | ||
16 | #[derive(Deserialize, Clone, Debug, PartialEq, Eq)] | ||
17 | #[serde(rename_all = "camelCase", default)] | ||
18 | pub struct ServerConfig { | ||
19 | /// Whether the client supports our custom highlighting publishing decorations. | ||
20 | /// This is different to the highlightingOn setting, which is whether the user | ||
21 | /// wants our custom highlighting to be used. | ||
22 | /// | ||
23 | /// Defaults to `false` | ||
24 | #[serde(deserialize_with = "nullable_bool_false")] | ||
25 | pub publish_decorations: bool, | ||
26 | |||
27 | pub exclude_globs: Vec<String>, | ||
28 | #[serde(deserialize_with = "nullable_bool_false")] | ||
29 | pub use_client_watching: bool, | ||
30 | |||
31 | pub lru_capacity: Option<usize>, | ||
32 | |||
33 | pub max_inlay_hint_length: Option<usize>, | ||
34 | |||
35 | pub cargo_watch_enable: bool, | ||
36 | pub cargo_watch_args: Vec<String>, | ||
37 | pub cargo_watch_command: String, | ||
38 | pub cargo_watch_all_targets: bool, | ||
39 | |||
40 | /// For internal usage to make integrated tests faster. | ||
41 | #[serde(deserialize_with = "nullable_bool_true")] | ||
42 | pub with_sysroot: bool, | ||
43 | |||
44 | /// Fine grained feature flags to disable specific features. | ||
45 | pub feature_flags: FxHashMap<String, bool>, | ||
46 | |||
47 | pub rustfmt_args: Vec<String>, | ||
48 | |||
49 | /// Cargo feature configurations. | ||
50 | pub cargo_features: CargoFeatures, | ||
51 | } | ||
52 | |||
53 | impl Default for ServerConfig { | ||
54 | fn default() -> ServerConfig { | ||
55 | ServerConfig { | ||
56 | publish_decorations: false, | ||
57 | exclude_globs: Vec::new(), | ||
58 | use_client_watching: false, | ||
59 | lru_capacity: None, | ||
60 | max_inlay_hint_length: None, | ||
61 | cargo_watch_enable: true, | ||
62 | cargo_watch_args: Vec::new(), | ||
63 | cargo_watch_command: "check".to_string(), | ||
64 | cargo_watch_all_targets: true, | ||
65 | with_sysroot: true, | ||
66 | feature_flags: FxHashMap::default(), | ||
67 | cargo_features: Default::default(), | ||
68 | rustfmt_args: Vec::new(), | ||
69 | } | ||
70 | } | ||
71 | } | ||
72 | |||
73 | /// Deserializes a null value to a bool false by default | ||
74 | fn nullable_bool_false<'de, D>(deserializer: D) -> Result<bool, D::Error> | ||
75 | where | ||
76 | D: Deserializer<'de>, | ||
77 | { | ||
78 | let opt = Option::deserialize(deserializer)?; | ||
79 | Ok(opt.unwrap_or(false)) | ||
80 | } | ||
81 | |||
82 | /// Deserializes a null value to a bool true by default | ||
83 | fn nullable_bool_true<'de, D>(deserializer: D) -> Result<bool, D::Error> | ||
84 | where | ||
85 | D: Deserializer<'de>, | ||
86 | { | ||
87 | let opt = Option::deserialize(deserializer)?; | ||
88 | Ok(opt.unwrap_or(true)) | ||
89 | } | ||
90 | |||
91 | #[cfg(test)] | ||
92 | mod test { | ||
93 | use super::*; | ||
94 | |||
95 | #[test] | ||
96 | fn deserialize_init_options_defaults() { | ||
97 | // check that null == default for both fields | ||
98 | let default = ServerConfig::default(); | ||
99 | assert_eq!(default, serde_json::from_str(r#"{}"#).unwrap()); | ||
100 | assert_eq!( | ||
101 | default, | ||
102 | serde_json::from_str(r#"{"publishDecorations":null, "lruCapacity":null}"#).unwrap() | ||
103 | ); | ||
104 | } | ||
105 | } | ||
diff --git a/crates/ra_lsp_server/src/conv.rs b/crates/ra_lsp_server/src/conv.rs deleted file mode 100644 index 90ef74056..000000000 --- a/crates/ra_lsp_server/src/conv.rs +++ /dev/null | |||
@@ -1,630 +0,0 @@ | |||
1 | //! Convenience module responsible for translating between rust-analyzer's types | ||
2 | //! and LSP types. | ||
3 | |||
4 | use lsp_types::{ | ||
5 | self, CreateFile, DiagnosticSeverity, DocumentChangeOperation, DocumentChanges, Documentation, | ||
6 | Location, LocationLink, MarkupContent, MarkupKind, Position, Range, RenameFile, ResourceOp, | ||
7 | SymbolKind, TextDocumentEdit, TextDocumentIdentifier, TextDocumentItem, | ||
8 | TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier, WorkspaceEdit, | ||
9 | }; | ||
10 | use ra_ide::{ | ||
11 | translate_offset_with_edit, CompletionItem, CompletionItemKind, FileId, FilePosition, | ||
12 | FileRange, FileSystemEdit, Fold, FoldKind, InsertTextFormat, LineCol, LineIndex, | ||
13 | NavigationTarget, RangeInfo, ReferenceAccess, Severity, SourceChange, SourceFileEdit, | ||
14 | }; | ||
15 | use ra_syntax::{SyntaxKind, TextRange, TextUnit}; | ||
16 | use ra_text_edit::{AtomTextEdit, TextEdit}; | ||
17 | use ra_vfs::LineEndings; | ||
18 | |||
19 | use crate::{req, world::WorldSnapshot, Result}; | ||
20 | |||
21 | pub trait Conv { | ||
22 | type Output; | ||
23 | fn conv(self) -> Self::Output; | ||
24 | } | ||
25 | |||
26 | pub trait ConvWith<CTX> { | ||
27 | type Output; | ||
28 | fn conv_with(self, ctx: CTX) -> Self::Output; | ||
29 | } | ||
30 | |||
31 | pub trait TryConvWith<CTX> { | ||
32 | type Output; | ||
33 | fn try_conv_with(self, ctx: CTX) -> Result<Self::Output>; | ||
34 | } | ||
35 | |||
36 | impl Conv for SyntaxKind { | ||
37 | type Output = SymbolKind; | ||
38 | |||
39 | fn conv(self) -> <Self as Conv>::Output { | ||
40 | match self { | ||
41 | SyntaxKind::FN_DEF => SymbolKind::Function, | ||
42 | SyntaxKind::STRUCT_DEF => SymbolKind::Struct, | ||
43 | SyntaxKind::ENUM_DEF => SymbolKind::Enum, | ||
44 | SyntaxKind::ENUM_VARIANT => SymbolKind::EnumMember, | ||
45 | SyntaxKind::TRAIT_DEF => SymbolKind::Interface, | ||
46 | SyntaxKind::MACRO_CALL => SymbolKind::Function, | ||
47 | SyntaxKind::MODULE => SymbolKind::Module, | ||
48 | SyntaxKind::TYPE_ALIAS_DEF => SymbolKind::TypeParameter, | ||
49 | SyntaxKind::RECORD_FIELD_DEF => SymbolKind::Field, | ||
50 | SyntaxKind::STATIC_DEF => SymbolKind::Constant, | ||
51 | SyntaxKind::CONST_DEF => SymbolKind::Constant, | ||
52 | SyntaxKind::IMPL_BLOCK => SymbolKind::Object, | ||
53 | _ => SymbolKind::Variable, | ||
54 | } | ||
55 | } | ||
56 | } | ||
57 | |||
58 | impl Conv for ReferenceAccess { | ||
59 | type Output = ::lsp_types::DocumentHighlightKind; | ||
60 | |||
61 | fn conv(self) -> Self::Output { | ||
62 | use lsp_types::DocumentHighlightKind; | ||
63 | match self { | ||
64 | ReferenceAccess::Read => DocumentHighlightKind::Read, | ||
65 | ReferenceAccess::Write => DocumentHighlightKind::Write, | ||
66 | } | ||
67 | } | ||
68 | } | ||
69 | |||
70 | impl Conv for CompletionItemKind { | ||
71 | type Output = ::lsp_types::CompletionItemKind; | ||
72 | |||
73 | fn conv(self) -> <Self as Conv>::Output { | ||
74 | use lsp_types::CompletionItemKind::*; | ||
75 | match self { | ||
76 | CompletionItemKind::Keyword => Keyword, | ||
77 | CompletionItemKind::Snippet => Snippet, | ||
78 | CompletionItemKind::Module => Module, | ||
79 | CompletionItemKind::Function => Function, | ||
80 | CompletionItemKind::Struct => Struct, | ||
81 | CompletionItemKind::Enum => Enum, | ||
82 | CompletionItemKind::EnumVariant => EnumMember, | ||
83 | CompletionItemKind::BuiltinType => Struct, | ||
84 | CompletionItemKind::Binding => Variable, | ||
85 | CompletionItemKind::Field => Field, | ||
86 | CompletionItemKind::Trait => Interface, | ||
87 | CompletionItemKind::TypeAlias => Struct, | ||
88 | CompletionItemKind::Const => Constant, | ||
89 | CompletionItemKind::Static => Value, | ||
90 | CompletionItemKind::Method => Method, | ||
91 | CompletionItemKind::TypeParam => TypeParameter, | ||
92 | CompletionItemKind::Macro => Method, | ||
93 | } | ||
94 | } | ||
95 | } | ||
96 | |||
97 | impl Conv for Severity { | ||
98 | type Output = DiagnosticSeverity; | ||
99 | fn conv(self) -> DiagnosticSeverity { | ||
100 | match self { | ||
101 | Severity::Error => DiagnosticSeverity::Error, | ||
102 | Severity::WeakWarning => DiagnosticSeverity::Hint, | ||
103 | } | ||
104 | } | ||
105 | } | ||
106 | |||
107 | impl ConvWith<(&LineIndex, LineEndings)> for CompletionItem { | ||
108 | type Output = ::lsp_types::CompletionItem; | ||
109 | |||
110 | fn conv_with(self, ctx: (&LineIndex, LineEndings)) -> ::lsp_types::CompletionItem { | ||
111 | let mut additional_text_edits = Vec::new(); | ||
112 | let mut text_edit = None; | ||
113 | // LSP does not allow arbitrary edits in completion, so we have to do a | ||
114 | // non-trivial mapping here. | ||
115 | for atom_edit in self.text_edit().as_atoms() { | ||
116 | if self.source_range().is_subrange(&atom_edit.delete) { | ||
117 | text_edit = Some(if atom_edit.delete == self.source_range() { | ||
118 | atom_edit.conv_with(ctx) | ||
119 | } else { | ||
120 | assert!(self.source_range().end() == atom_edit.delete.end()); | ||
121 | let range1 = | ||
122 | TextRange::from_to(atom_edit.delete.start(), self.source_range().start()); | ||
123 | let range2 = self.source_range(); | ||
124 | let edit1 = AtomTextEdit::replace(range1, String::new()); | ||
125 | let edit2 = AtomTextEdit::replace(range2, atom_edit.insert.clone()); | ||
126 | additional_text_edits.push(edit1.conv_with(ctx)); | ||
127 | edit2.conv_with(ctx) | ||
128 | }) | ||
129 | } else { | ||
130 | assert!(self.source_range().intersection(&atom_edit.delete).is_none()); | ||
131 | additional_text_edits.push(atom_edit.conv_with(ctx)); | ||
132 | } | ||
133 | } | ||
134 | let text_edit = text_edit.unwrap(); | ||
135 | |||
136 | let mut res = lsp_types::CompletionItem { | ||
137 | label: self.label().to_string(), | ||
138 | detail: self.detail().map(|it| it.to_string()), | ||
139 | filter_text: Some(self.lookup().to_string()), | ||
140 | kind: self.kind().map(|it| it.conv()), | ||
141 | text_edit: Some(text_edit), | ||
142 | additional_text_edits: Some(additional_text_edits), | ||
143 | documentation: self.documentation().map(|it| it.conv()), | ||
144 | deprecated: Some(self.deprecated()), | ||
145 | ..Default::default() | ||
146 | }; | ||
147 | |||
148 | if self.deprecated() { | ||
149 | res.tags = Some(vec![lsp_types::CompletionItemTag::Deprecated]) | ||
150 | } | ||
151 | |||
152 | res.insert_text_format = Some(match self.insert_text_format() { | ||
153 | InsertTextFormat::Snippet => lsp_types::InsertTextFormat::Snippet, | ||
154 | InsertTextFormat::PlainText => lsp_types::InsertTextFormat::PlainText, | ||
155 | }); | ||
156 | |||
157 | res | ||
158 | } | ||
159 | } | ||
160 | |||
161 | impl ConvWith<&LineIndex> for Position { | ||
162 | type Output = TextUnit; | ||
163 | |||
164 | fn conv_with(self, line_index: &LineIndex) -> TextUnit { | ||
165 | let line_col = LineCol { line: self.line as u32, col_utf16: self.character as u32 }; | ||
166 | line_index.offset(line_col) | ||
167 | } | ||
168 | } | ||
169 | |||
170 | impl ConvWith<&LineIndex> for TextUnit { | ||
171 | type Output = Position; | ||
172 | |||
173 | fn conv_with(self, line_index: &LineIndex) -> Position { | ||
174 | let line_col = line_index.line_col(self); | ||
175 | Position::new(u64::from(line_col.line), u64::from(line_col.col_utf16)) | ||
176 | } | ||
177 | } | ||
178 | |||
179 | impl ConvWith<&LineIndex> for TextRange { | ||
180 | type Output = Range; | ||
181 | |||
182 | fn conv_with(self, line_index: &LineIndex) -> Range { | ||
183 | Range::new(self.start().conv_with(line_index), self.end().conv_with(line_index)) | ||
184 | } | ||
185 | } | ||
186 | |||
187 | impl ConvWith<&LineIndex> for Range { | ||
188 | type Output = TextRange; | ||
189 | |||
190 | fn conv_with(self, line_index: &LineIndex) -> TextRange { | ||
191 | TextRange::from_to(self.start.conv_with(line_index), self.end.conv_with(line_index)) | ||
192 | } | ||
193 | } | ||
194 | |||
195 | impl Conv for ra_ide::Documentation { | ||
196 | type Output = lsp_types::Documentation; | ||
197 | fn conv(self) -> Documentation { | ||
198 | Documentation::MarkupContent(MarkupContent { | ||
199 | kind: MarkupKind::Markdown, | ||
200 | value: crate::markdown::format_docs(self.as_str()), | ||
201 | }) | ||
202 | } | ||
203 | } | ||
204 | |||
205 | impl Conv for ra_ide::FunctionSignature { | ||
206 | type Output = lsp_types::SignatureInformation; | ||
207 | fn conv(self) -> Self::Output { | ||
208 | use lsp_types::{ParameterInformation, ParameterLabel, SignatureInformation}; | ||
209 | |||
210 | let label = self.to_string(); | ||
211 | |||
212 | let documentation = self.doc.map(|it| it.conv()); | ||
213 | |||
214 | let parameters: Vec<ParameterInformation> = self | ||
215 | .parameters | ||
216 | .into_iter() | ||
217 | .map(|param| ParameterInformation { | ||
218 | label: ParameterLabel::Simple(param), | ||
219 | documentation: None, | ||
220 | }) | ||
221 | .collect(); | ||
222 | |||
223 | SignatureInformation { label, documentation, parameters: Some(parameters) } | ||
224 | } | ||
225 | } | ||
226 | |||
227 | impl ConvWith<(&LineIndex, LineEndings)> for TextEdit { | ||
228 | type Output = Vec<lsp_types::TextEdit>; | ||
229 | |||
230 | fn conv_with(self, ctx: (&LineIndex, LineEndings)) -> Vec<lsp_types::TextEdit> { | ||
231 | self.as_atoms().iter().map_conv_with(ctx).collect() | ||
232 | } | ||
233 | } | ||
234 | |||
235 | impl ConvWith<(&LineIndex, LineEndings)> for &AtomTextEdit { | ||
236 | type Output = lsp_types::TextEdit; | ||
237 | |||
238 | fn conv_with( | ||
239 | self, | ||
240 | (line_index, line_endings): (&LineIndex, LineEndings), | ||
241 | ) -> lsp_types::TextEdit { | ||
242 | let mut new_text = self.insert.clone(); | ||
243 | if line_endings == LineEndings::Dos { | ||
244 | new_text = new_text.replace('\n', "\r\n"); | ||
245 | } | ||
246 | lsp_types::TextEdit { range: self.delete.conv_with(line_index), new_text } | ||
247 | } | ||
248 | } | ||
249 | |||
250 | pub(crate) struct FoldConvCtx<'a> { | ||
251 | pub(crate) text: &'a str, | ||
252 | pub(crate) line_index: &'a LineIndex, | ||
253 | pub(crate) line_folding_only: bool, | ||
254 | } | ||
255 | |||
256 | impl ConvWith<&FoldConvCtx<'_>> for Fold { | ||
257 | type Output = lsp_types::FoldingRange; | ||
258 | |||
259 | fn conv_with(self, ctx: &FoldConvCtx) -> lsp_types::FoldingRange { | ||
260 | let kind = match self.kind { | ||
261 | FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment), | ||
262 | FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports), | ||
263 | FoldKind::Mods => None, | ||
264 | FoldKind::Block => None, | ||
265 | }; | ||
266 | |||
267 | let range = self.range.conv_with(&ctx.line_index); | ||
268 | |||
269 | if ctx.line_folding_only { | ||
270 | // Clients with line_folding_only == true (such as VSCode) will fold the whole end line | ||
271 | // even if it contains text not in the folding range. To prevent that we exclude | ||
272 | // range.end.line from the folding region if there is more text after range.end | ||
273 | // on the same line. | ||
274 | let has_more_text_on_end_line = ctx.text | ||
275 | [TextRange::from_to(self.range.end(), TextUnit::of_str(ctx.text))] | ||
276 | .chars() | ||
277 | .take_while(|it| *it != '\n') | ||
278 | .any(|it| !it.is_whitespace()); | ||
279 | |||
280 | let end_line = if has_more_text_on_end_line { | ||
281 | range.end.line.saturating_sub(1) | ||
282 | } else { | ||
283 | range.end.line | ||
284 | }; | ||
285 | |||
286 | lsp_types::FoldingRange { | ||
287 | start_line: range.start.line, | ||
288 | start_character: None, | ||
289 | end_line, | ||
290 | end_character: None, | ||
291 | kind, | ||
292 | } | ||
293 | } else { | ||
294 | lsp_types::FoldingRange { | ||
295 | start_line: range.start.line, | ||
296 | start_character: Some(range.start.character), | ||
297 | end_line: range.end.line, | ||
298 | end_character: Some(range.end.character), | ||
299 | kind, | ||
300 | } | ||
301 | } | ||
302 | } | ||
303 | } | ||
304 | |||
305 | impl<T: ConvWith<CTX>, CTX> ConvWith<CTX> for Option<T> { | ||
306 | type Output = Option<T::Output>; | ||
307 | |||
308 | fn conv_with(self, ctx: CTX) -> Self::Output { | ||
309 | self.map(|x| ConvWith::conv_with(x, ctx)) | ||
310 | } | ||
311 | } | ||
312 | |||
313 | impl TryConvWith<&WorldSnapshot> for &Url { | ||
314 | type Output = FileId; | ||
315 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> { | ||
316 | world.uri_to_file_id(self) | ||
317 | } | ||
318 | } | ||
319 | |||
320 | impl TryConvWith<&WorldSnapshot> for FileId { | ||
321 | type Output = Url; | ||
322 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<Url> { | ||
323 | world.file_id_to_uri(self) | ||
324 | } | ||
325 | } | ||
326 | |||
327 | impl TryConvWith<&WorldSnapshot> for &TextDocumentItem { | ||
328 | type Output = FileId; | ||
329 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> { | ||
330 | self.uri.try_conv_with(world) | ||
331 | } | ||
332 | } | ||
333 | |||
334 | impl TryConvWith<&WorldSnapshot> for &VersionedTextDocumentIdentifier { | ||
335 | type Output = FileId; | ||
336 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> { | ||
337 | self.uri.try_conv_with(world) | ||
338 | } | ||
339 | } | ||
340 | |||
341 | impl TryConvWith<&WorldSnapshot> for &TextDocumentIdentifier { | ||
342 | type Output = FileId; | ||
343 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileId> { | ||
344 | world.uri_to_file_id(&self.uri) | ||
345 | } | ||
346 | } | ||
347 | |||
348 | impl TryConvWith<&WorldSnapshot> for &TextDocumentPositionParams { | ||
349 | type Output = FilePosition; | ||
350 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<FilePosition> { | ||
351 | let file_id = self.text_document.try_conv_with(world)?; | ||
352 | let line_index = world.analysis().file_line_index(file_id)?; | ||
353 | let offset = self.position.conv_with(&line_index); | ||
354 | Ok(FilePosition { file_id, offset }) | ||
355 | } | ||
356 | } | ||
357 | |||
358 | impl TryConvWith<&WorldSnapshot> for (&TextDocumentIdentifier, Range) { | ||
359 | type Output = FileRange; | ||
360 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<FileRange> { | ||
361 | let file_id = self.0.try_conv_with(world)?; | ||
362 | let line_index = world.analysis().file_line_index(file_id)?; | ||
363 | let range = self.1.conv_with(&line_index); | ||
364 | Ok(FileRange { file_id, range }) | ||
365 | } | ||
366 | } | ||
367 | |||
368 | impl<T: TryConvWith<CTX>, CTX: Copy> TryConvWith<CTX> for Vec<T> { | ||
369 | type Output = Vec<<T as TryConvWith<CTX>>::Output>; | ||
370 | fn try_conv_with(self, ctx: CTX) -> Result<Self::Output> { | ||
371 | let mut res = Vec::with_capacity(self.len()); | ||
372 | for item in self { | ||
373 | res.push(item.try_conv_with(ctx)?); | ||
374 | } | ||
375 | Ok(res) | ||
376 | } | ||
377 | } | ||
378 | |||
379 | impl TryConvWith<&WorldSnapshot> for SourceChange { | ||
380 | type Output = req::SourceChange; | ||
381 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<req::SourceChange> { | ||
382 | let cursor_position = match self.cursor_position { | ||
383 | None => None, | ||
384 | Some(pos) => { | ||
385 | let line_index = world.analysis().file_line_index(pos.file_id)?; | ||
386 | let edit = self | ||
387 | .source_file_edits | ||
388 | .iter() | ||
389 | .find(|it| it.file_id == pos.file_id) | ||
390 | .map(|it| &it.edit); | ||
391 | let line_col = match edit { | ||
392 | Some(edit) => translate_offset_with_edit(&*line_index, pos.offset, edit), | ||
393 | None => line_index.line_col(pos.offset), | ||
394 | }; | ||
395 | let position = | ||
396 | Position::new(u64::from(line_col.line), u64::from(line_col.col_utf16)); | ||
397 | Some(TextDocumentPositionParams { | ||
398 | text_document: TextDocumentIdentifier::new(pos.file_id.try_conv_with(world)?), | ||
399 | position, | ||
400 | }) | ||
401 | } | ||
402 | }; | ||
403 | let mut document_changes: Vec<DocumentChangeOperation> = Vec::new(); | ||
404 | for resource_op in self.file_system_edits.try_conv_with(world)? { | ||
405 | document_changes.push(DocumentChangeOperation::Op(resource_op)); | ||
406 | } | ||
407 | for text_document_edit in self.source_file_edits.try_conv_with(world)? { | ||
408 | document_changes.push(DocumentChangeOperation::Edit(text_document_edit)); | ||
409 | } | ||
410 | let workspace_edit = WorkspaceEdit { | ||
411 | changes: None, | ||
412 | document_changes: Some(DocumentChanges::Operations(document_changes)), | ||
413 | }; | ||
414 | Ok(req::SourceChange { label: self.label, workspace_edit, cursor_position }) | ||
415 | } | ||
416 | } | ||
417 | |||
418 | impl TryConvWith<&WorldSnapshot> for SourceFileEdit { | ||
419 | type Output = TextDocumentEdit; | ||
420 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<TextDocumentEdit> { | ||
421 | let text_document = VersionedTextDocumentIdentifier { | ||
422 | uri: self.file_id.try_conv_with(world)?, | ||
423 | version: None, | ||
424 | }; | ||
425 | let line_index = world.analysis().file_line_index(self.file_id)?; | ||
426 | let line_endings = world.file_line_endings(self.file_id); | ||
427 | let edits = | ||
428 | self.edit.as_atoms().iter().map_conv_with((&line_index, line_endings)).collect(); | ||
429 | Ok(TextDocumentEdit { text_document, edits }) | ||
430 | } | ||
431 | } | ||
432 | |||
433 | impl TryConvWith<&WorldSnapshot> for FileSystemEdit { | ||
434 | type Output = ResourceOp; | ||
435 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<ResourceOp> { | ||
436 | let res = match self { | ||
437 | FileSystemEdit::CreateFile { source_root, path } => { | ||
438 | let uri = world.path_to_uri(source_root, &path)?; | ||
439 | ResourceOp::Create(CreateFile { uri, options: None }) | ||
440 | } | ||
441 | FileSystemEdit::MoveFile { src, dst_source_root, dst_path } => { | ||
442 | let old_uri = world.file_id_to_uri(src)?; | ||
443 | let new_uri = world.path_to_uri(dst_source_root, &dst_path)?; | ||
444 | ResourceOp::Rename(RenameFile { old_uri, new_uri, options: None }) | ||
445 | } | ||
446 | }; | ||
447 | Ok(res) | ||
448 | } | ||
449 | } | ||
450 | |||
451 | impl TryConvWith<&WorldSnapshot> for &NavigationTarget { | ||
452 | type Output = Location; | ||
453 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<Location> { | ||
454 | let line_index = world.analysis().file_line_index(self.file_id())?; | ||
455 | let range = self.range(); | ||
456 | to_location(self.file_id(), range, &world, &line_index) | ||
457 | } | ||
458 | } | ||
459 | |||
460 | impl TryConvWith<&WorldSnapshot> for (FileId, RangeInfo<NavigationTarget>) { | ||
461 | type Output = LocationLink; | ||
462 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<LocationLink> { | ||
463 | let (src_file_id, target) = self; | ||
464 | |||
465 | let target_uri = target.info.file_id().try_conv_with(world)?; | ||
466 | let src_line_index = world.analysis().file_line_index(src_file_id)?; | ||
467 | let tgt_line_index = world.analysis().file_line_index(target.info.file_id())?; | ||
468 | |||
469 | let target_range = target.info.full_range().conv_with(&tgt_line_index); | ||
470 | |||
471 | let target_selection_range = target | ||
472 | .info | ||
473 | .focus_range() | ||
474 | .map(|it| it.conv_with(&tgt_line_index)) | ||
475 | .unwrap_or(target_range); | ||
476 | |||
477 | let res = LocationLink { | ||
478 | origin_selection_range: Some(target.range.conv_with(&src_line_index)), | ||
479 | target_uri, | ||
480 | target_range, | ||
481 | target_selection_range, | ||
482 | }; | ||
483 | Ok(res) | ||
484 | } | ||
485 | } | ||
486 | |||
487 | impl TryConvWith<&WorldSnapshot> for (FileId, RangeInfo<Vec<NavigationTarget>>) { | ||
488 | type Output = req::GotoDefinitionResponse; | ||
489 | fn try_conv_with(self, world: &WorldSnapshot) -> Result<req::GotoTypeDefinitionResponse> { | ||
490 | let (file_id, RangeInfo { range, info: navs }) = self; | ||
491 | let links = navs | ||
492 | .into_iter() | ||
493 | .map(|nav| (file_id, RangeInfo::new(range, nav))) | ||
494 | .try_conv_with_to_vec(world)?; | ||
495 | if world.options.supports_location_link { | ||
496 | Ok(links.into()) | ||
497 | } else { | ||
498 | let locations: Vec<Location> = links | ||
499 | .into_iter() | ||
500 | .map(|link| Location { uri: link.target_uri, range: link.target_selection_range }) | ||
501 | .collect(); | ||
502 | Ok(locations.into()) | ||
503 | } | ||
504 | } | ||
505 | } | ||
506 | |||
507 | pub fn to_call_hierarchy_item( | ||
508 | file_id: FileId, | ||
509 | range: TextRange, | ||
510 | world: &WorldSnapshot, | ||
511 | line_index: &LineIndex, | ||
512 | nav: NavigationTarget, | ||
513 | ) -> Result<lsp_types::CallHierarchyItem> { | ||
514 | Ok(lsp_types::CallHierarchyItem { | ||
515 | name: nav.name().to_string(), | ||
516 | kind: nav.kind().conv(), | ||
517 | tags: None, | ||
518 | detail: nav.description().map(|it| it.to_string()), | ||
519 | uri: file_id.try_conv_with(&world)?, | ||
520 | range: nav.range().conv_with(&line_index), | ||
521 | selection_range: range.conv_with(&line_index), | ||
522 | }) | ||
523 | } | ||
524 | |||
525 | pub fn to_location( | ||
526 | file_id: FileId, | ||
527 | range: TextRange, | ||
528 | world: &WorldSnapshot, | ||
529 | line_index: &LineIndex, | ||
530 | ) -> Result<Location> { | ||
531 | let url = file_id.try_conv_with(world)?; | ||
532 | let loc = Location::new(url, range.conv_with(line_index)); | ||
533 | Ok(loc) | ||
534 | } | ||
535 | |||
536 | pub trait MapConvWith<CTX>: Sized { | ||
537 | type Output; | ||
538 | |||
539 | fn map_conv_with(self, ctx: CTX) -> ConvWithIter<Self, CTX> { | ||
540 | ConvWithIter { iter: self, ctx } | ||
541 | } | ||
542 | } | ||
543 | |||
544 | impl<CTX, I> MapConvWith<CTX> for I | ||
545 | where | ||
546 | I: Iterator, | ||
547 | I::Item: ConvWith<CTX>, | ||
548 | { | ||
549 | type Output = <I::Item as ConvWith<CTX>>::Output; | ||
550 | } | ||
551 | |||
552 | pub struct ConvWithIter<I, CTX> { | ||
553 | iter: I, | ||
554 | ctx: CTX, | ||
555 | } | ||
556 | |||
557 | impl<I, CTX> Iterator for ConvWithIter<I, CTX> | ||
558 | where | ||
559 | I: Iterator, | ||
560 | I::Item: ConvWith<CTX>, | ||
561 | CTX: Copy, | ||
562 | { | ||
563 | type Item = <I::Item as ConvWith<CTX>>::Output; | ||
564 | |||
565 | fn next(&mut self) -> Option<Self::Item> { | ||
566 | self.iter.next().map(|item| item.conv_with(self.ctx)) | ||
567 | } | ||
568 | } | ||
569 | |||
570 | pub trait TryConvWithToVec<CTX>: Sized { | ||
571 | type Output; | ||
572 | |||
573 | fn try_conv_with_to_vec(self, ctx: CTX) -> Result<Vec<Self::Output>>; | ||
574 | } | ||
575 | |||
576 | impl<I, CTX> TryConvWithToVec<CTX> for I | ||
577 | where | ||
578 | I: Iterator, | ||
579 | I::Item: TryConvWith<CTX>, | ||
580 | CTX: Copy, | ||
581 | { | ||
582 | type Output = <I::Item as TryConvWith<CTX>>::Output; | ||
583 | |||
584 | fn try_conv_with_to_vec(self, ctx: CTX) -> Result<Vec<Self::Output>> { | ||
585 | self.map(|it| it.try_conv_with(ctx)).collect() | ||
586 | } | ||
587 | } | ||
588 | |||
589 | #[cfg(test)] | ||
590 | mod tests { | ||
591 | use super::*; | ||
592 | use test_utils::extract_ranges; | ||
593 | |||
594 | #[test] | ||
595 | fn conv_fold_line_folding_only_fixup() { | ||
596 | let text = r#"<fold>mod a; | ||
597 | mod b; | ||
598 | mod c;</fold> | ||
599 | |||
600 | fn main() <fold>{ | ||
601 | if cond <fold>{ | ||
602 | a::do_a(); | ||
603 | }</fold> else <fold>{ | ||
604 | b::do_b(); | ||
605 | }</fold> | ||
606 | }</fold>"#; | ||
607 | |||
608 | let (ranges, text) = extract_ranges(text, "fold"); | ||
609 | assert_eq!(ranges.len(), 4); | ||
610 | let folds = vec![ | ||
611 | Fold { range: ranges[0], kind: FoldKind::Mods }, | ||
612 | Fold { range: ranges[1], kind: FoldKind::Block }, | ||
613 | Fold { range: ranges[2], kind: FoldKind::Block }, | ||
614 | Fold { range: ranges[3], kind: FoldKind::Block }, | ||
615 | ]; | ||
616 | |||
617 | let line_index = LineIndex::new(&text); | ||
618 | let ctx = FoldConvCtx { text: &text, line_index: &line_index, line_folding_only: true }; | ||
619 | let converted: Vec<_> = folds.into_iter().map_conv_with(&ctx).collect(); | ||
620 | |||
621 | let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)]; | ||
622 | assert_eq!(converted.len(), expected_lines.len()); | ||
623 | for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) { | ||
624 | assert_eq!(folding_range.start_line, *start_line); | ||
625 | assert_eq!(folding_range.start_character, None); | ||
626 | assert_eq!(folding_range.end_line, *end_line); | ||
627 | assert_eq!(folding_range.end_character, None); | ||
628 | } | ||
629 | } | ||
630 | } | ||
diff --git a/crates/ra_lsp_server/src/diagnostics.rs b/crates/ra_lsp_server/src/diagnostics.rs deleted file mode 100644 index e7924f0a3..000000000 --- a/crates/ra_lsp_server/src/diagnostics.rs +++ /dev/null | |||
@@ -1,87 +0,0 @@ | |||
1 | //! Book keeping for keeping diagnostics easily in sync with the client. | ||
2 | |||
3 | use std::{collections::HashMap, sync::Arc}; | ||
4 | |||
5 | use lsp_types::{CodeActionOrCommand, Diagnostic, Range}; | ||
6 | use ra_ide::FileId; | ||
7 | |||
8 | pub type CheckFixes = Arc<HashMap<FileId, Vec<Fix>>>; | ||
9 | |||
10 | #[derive(Debug, Default, Clone)] | ||
11 | pub struct DiagnosticCollection { | ||
12 | pub native: HashMap<FileId, Vec<Diagnostic>>, | ||
13 | pub check: HashMap<FileId, Vec<Diagnostic>>, | ||
14 | pub check_fixes: CheckFixes, | ||
15 | } | ||
16 | |||
17 | #[derive(Debug, Clone)] | ||
18 | pub struct Fix { | ||
19 | pub range: Range, | ||
20 | pub action: CodeActionOrCommand, | ||
21 | } | ||
22 | |||
23 | #[derive(Debug)] | ||
24 | pub enum DiagnosticTask { | ||
25 | ClearCheck, | ||
26 | AddCheck(FileId, Diagnostic, Vec<CodeActionOrCommand>), | ||
27 | SetNative(FileId, Vec<Diagnostic>), | ||
28 | } | ||
29 | |||
30 | impl DiagnosticCollection { | ||
31 | pub fn clear_check(&mut self) -> Vec<FileId> { | ||
32 | Arc::make_mut(&mut self.check_fixes).clear(); | ||
33 | self.check.drain().map(|(key, _value)| key).collect() | ||
34 | } | ||
35 | |||
36 | pub fn add_check_diagnostic( | ||
37 | &mut self, | ||
38 | file_id: FileId, | ||
39 | diagnostic: Diagnostic, | ||
40 | fixes: Vec<CodeActionOrCommand>, | ||
41 | ) { | ||
42 | let diagnostics = self.check.entry(file_id).or_default(); | ||
43 | for existing_diagnostic in diagnostics.iter() { | ||
44 | if are_diagnostics_equal(&existing_diagnostic, &diagnostic) { | ||
45 | return; | ||
46 | } | ||
47 | } | ||
48 | |||
49 | let check_fixes = Arc::make_mut(&mut self.check_fixes); | ||
50 | check_fixes | ||
51 | .entry(file_id) | ||
52 | .or_default() | ||
53 | .extend(fixes.into_iter().map(|action| Fix { range: diagnostic.range, action })); | ||
54 | diagnostics.push(diagnostic); | ||
55 | } | ||
56 | |||
57 | pub fn set_native_diagnostics(&mut self, file_id: FileId, diagnostics: Vec<Diagnostic>) { | ||
58 | self.native.insert(file_id, diagnostics); | ||
59 | } | ||
60 | |||
61 | pub fn diagnostics_for(&self, file_id: FileId) -> impl Iterator<Item = &Diagnostic> { | ||
62 | let native = self.native.get(&file_id).into_iter().flatten(); | ||
63 | let check = self.check.get(&file_id).into_iter().flatten(); | ||
64 | native.chain(check) | ||
65 | } | ||
66 | |||
67 | pub fn handle_task(&mut self, task: DiagnosticTask) -> Vec<FileId> { | ||
68 | match task { | ||
69 | DiagnosticTask::ClearCheck => self.clear_check(), | ||
70 | DiagnosticTask::AddCheck(file_id, diagnostic, fixes) => { | ||
71 | self.add_check_diagnostic(file_id, diagnostic, fixes); | ||
72 | vec![file_id] | ||
73 | } | ||
74 | DiagnosticTask::SetNative(file_id, diagnostics) => { | ||
75 | self.set_native_diagnostics(file_id, diagnostics); | ||
76 | vec![file_id] | ||
77 | } | ||
78 | } | ||
79 | } | ||
80 | } | ||
81 | |||
82 | fn are_diagnostics_equal(left: &Diagnostic, right: &Diagnostic) -> bool { | ||
83 | left.source == right.source | ||
84 | && left.severity == right.severity | ||
85 | && left.range == right.range | ||
86 | && left.message == right.message | ||
87 | } | ||
diff --git a/crates/ra_lsp_server/src/lib.rs b/crates/ra_lsp_server/src/lib.rs deleted file mode 100644 index 0dae30e46..000000000 --- a/crates/ra_lsp_server/src/lib.rs +++ /dev/null | |||
@@ -1,54 +0,0 @@ | |||
1 | //! Implementation of the LSP for rust-analyzer. | ||
2 | //! | ||
3 | //! This crate takes Rust-specific analysis results from ra_ide and translates | ||
4 | //! into LSP types. | ||
5 | //! | ||
6 | //! It also is the root of all state. `world` module defines the bulk of the | ||
7 | //! state, and `main_loop` module defines the rules for modifying it. | ||
8 | //! | ||
9 | //! The `cli` submodule implements some batch-processing analysis, primarily as | ||
10 | //! a debugging aid. | ||
11 | #![recursion_limit = "512"] | ||
12 | |||
13 | pub mod cli; | ||
14 | |||
15 | #[allow(unused)] | ||
16 | macro_rules! println { | ||
17 | ($($tt:tt)*) => { | ||
18 | compile_error!("stdout is locked, use eprintln") | ||
19 | }; | ||
20 | } | ||
21 | |||
22 | #[allow(unused)] | ||
23 | macro_rules! print { | ||
24 | ($($tt:tt)*) => { | ||
25 | compile_error!("stdout is locked, use eprint") | ||
26 | }; | ||
27 | } | ||
28 | |||
29 | mod vfs_glob; | ||
30 | mod caps; | ||
31 | mod cargo_target_spec; | ||
32 | mod conv; | ||
33 | mod main_loop; | ||
34 | mod markdown; | ||
35 | pub mod req; | ||
36 | mod config; | ||
37 | mod world; | ||
38 | mod diagnostics; | ||
39 | |||
40 | use serde::de::DeserializeOwned; | ||
41 | |||
42 | pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>; | ||
43 | pub use crate::{ | ||
44 | caps::server_capabilities, | ||
45 | config::ServerConfig, | ||
46 | main_loop::LspError, | ||
47 | main_loop::{main_loop, show_message}, | ||
48 | }; | ||
49 | |||
50 | pub fn from_json<T: DeserializeOwned>(what: &'static str, json: serde_json::Value) -> Result<T> { | ||
51 | let res = T::deserialize(&json) | ||
52 | .map_err(|e| format!("Failed to deserialize {}: {}; {}", what, e, json))?; | ||
53 | Ok(res) | ||
54 | } | ||
diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs deleted file mode 100644 index 67d8a5f6f..000000000 --- a/crates/ra_lsp_server/src/main_loop.rs +++ /dev/null | |||
@@ -1,889 +0,0 @@ | |||
1 | //! The main loop of `ra_lsp_server` responsible for dispatching LSP | ||
2 | //! requests/replies and notifications back to the client. | ||
3 | |||
4 | mod handlers; | ||
5 | mod subscriptions; | ||
6 | pub(crate) mod pending_requests; | ||
7 | |||
8 | use std::{ | ||
9 | env, | ||
10 | error::Error, | ||
11 | fmt, panic, | ||
12 | path::PathBuf, | ||
13 | sync::Arc, | ||
14 | time::{Duration, Instant}, | ||
15 | }; | ||
16 | |||
17 | use crossbeam_channel::{select, unbounded, RecvError, Sender}; | ||
18 | use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; | ||
19 | use lsp_types::{ClientCapabilities, NumberOrString}; | ||
20 | use ra_cargo_watch::{url_from_path_with_drive_lowercasing, CheckOptions, CheckTask}; | ||
21 | use ra_ide::{Canceled, FeatureFlags, FileId, LibraryData, SourceRootId}; | ||
22 | use ra_prof::profile; | ||
23 | use ra_vfs::{VfsFile, VfsTask, Watch}; | ||
24 | use relative_path::RelativePathBuf; | ||
25 | use rustc_hash::FxHashSet; | ||
26 | use serde::{de::DeserializeOwned, Serialize}; | ||
27 | use threadpool::ThreadPool; | ||
28 | |||
29 | use crate::{ | ||
30 | diagnostics::DiagnosticTask, | ||
31 | main_loop::{ | ||
32 | pending_requests::{PendingRequest, PendingRequests}, | ||
33 | subscriptions::Subscriptions, | ||
34 | }, | ||
35 | req, | ||
36 | world::{Options, WorldSnapshot, WorldState}, | ||
37 | Result, ServerConfig, | ||
38 | }; | ||
39 | |||
40 | #[derive(Debug)] | ||
41 | pub struct LspError { | ||
42 | pub code: i32, | ||
43 | pub message: String, | ||
44 | } | ||
45 | |||
46 | impl LspError { | ||
47 | pub fn new(code: i32, message: String) -> LspError { | ||
48 | LspError { code, message } | ||
49 | } | ||
50 | } | ||
51 | |||
52 | impl fmt::Display for LspError { | ||
53 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
54 | write!(f, "Language Server request failed with {}. ({})", self.code, self.message) | ||
55 | } | ||
56 | } | ||
57 | |||
58 | impl Error for LspError {} | ||
59 | |||
60 | pub fn main_loop( | ||
61 | ws_roots: Vec<PathBuf>, | ||
62 | client_caps: ClientCapabilities, | ||
63 | config: ServerConfig, | ||
64 | connection: Connection, | ||
65 | ) -> Result<()> { | ||
66 | log::info!("server_config: {:#?}", config); | ||
67 | |||
68 | // Windows scheduler implements priority boosts: if thread waits for an | ||
69 | // event (like a condvar), and event fires, priority of the thread is | ||
70 | // temporary bumped. This optimization backfires in our case: each time the | ||
71 | // `main_loop` schedules a task to run on a threadpool, the worker threads | ||
72 | // gets a higher priority, and (on a machine with fewer cores) displaces the | ||
73 | // main loop! We work-around this by marking the main loop as a | ||
74 | // higher-priority thread. | ||
75 | // | ||
76 | // https://docs.microsoft.com/en-us/windows/win32/procthread/scheduling-priorities | ||
77 | // https://docs.microsoft.com/en-us/windows/win32/procthread/priority-boosts | ||
78 | // https://github.com/rust-analyzer/rust-analyzer/issues/2835 | ||
79 | #[cfg(windows)] | ||
80 | unsafe { | ||
81 | use winapi::um::processthreadsapi::*; | ||
82 | let thread = GetCurrentThread(); | ||
83 | let thread_priority_above_normal = 1; | ||
84 | SetThreadPriority(thread, thread_priority_above_normal); | ||
85 | } | ||
86 | |||
87 | let mut loop_state = LoopState::default(); | ||
88 | let mut world_state = { | ||
89 | let feature_flags = { | ||
90 | let mut ff = FeatureFlags::default(); | ||
91 | for (flag, value) in config.feature_flags { | ||
92 | if ff.set(flag.as_str(), value).is_err() { | ||
93 | log::error!("unknown feature flag: {:?}", flag); | ||
94 | show_message( | ||
95 | req::MessageType::Error, | ||
96 | format!("unknown feature flag: {:?}", flag), | ||
97 | &connection.sender, | ||
98 | ); | ||
99 | } | ||
100 | } | ||
101 | ff | ||
102 | }; | ||
103 | log::info!("feature_flags: {:#?}", feature_flags); | ||
104 | |||
105 | // FIXME: support dynamic workspace loading. | ||
106 | let workspaces = { | ||
107 | let mut loaded_workspaces = Vec::new(); | ||
108 | for ws_root in &ws_roots { | ||
109 | let workspace = ra_project_model::ProjectWorkspace::discover_with_sysroot( | ||
110 | ws_root.as_path(), | ||
111 | config.with_sysroot, | ||
112 | &config.cargo_features, | ||
113 | ); | ||
114 | match workspace { | ||
115 | Ok(workspace) => loaded_workspaces.push(workspace), | ||
116 | Err(e) => { | ||
117 | log::error!("loading workspace failed: {}", e); | ||
118 | if let Some(ra_project_model::CargoTomlNotFoundError(_)) = e.downcast_ref() | ||
119 | { | ||
120 | if !feature_flags.get("notifications.cargo-toml-not-found") { | ||
121 | continue; | ||
122 | } | ||
123 | } | ||
124 | show_message( | ||
125 | req::MessageType::Error, | ||
126 | format!("rust-analyzer failed to load workspace: {}", e), | ||
127 | &connection.sender, | ||
128 | ); | ||
129 | } | ||
130 | } | ||
131 | } | ||
132 | loaded_workspaces | ||
133 | }; | ||
134 | |||
135 | let globs = config | ||
136 | .exclude_globs | ||
137 | .iter() | ||
138 | .map(|glob| crate::vfs_glob::Glob::new(glob)) | ||
139 | .collect::<std::result::Result<Vec<_>, _>>()?; | ||
140 | |||
141 | if config.use_client_watching { | ||
142 | let registration_options = req::DidChangeWatchedFilesRegistrationOptions { | ||
143 | watchers: workspaces | ||
144 | .iter() | ||
145 | .flat_map(|ws| ws.to_roots()) | ||
146 | .filter(|root| root.is_member()) | ||
147 | .map(|root| format!("{}/**/*.rs", root.path().display())) | ||
148 | .map(|glob_pattern| req::FileSystemWatcher { glob_pattern, kind: None }) | ||
149 | .collect(), | ||
150 | }; | ||
151 | let registration = req::Registration { | ||
152 | id: "file-watcher".to_string(), | ||
153 | method: "workspace/didChangeWatchedFiles".to_string(), | ||
154 | register_options: Some(serde_json::to_value(registration_options).unwrap()), | ||
155 | }; | ||
156 | let params = req::RegistrationParams { registrations: vec![registration] }; | ||
157 | let request = | ||
158 | request_new::<req::RegisterCapability>(loop_state.next_request_id(), params); | ||
159 | connection.sender.send(request.into()).unwrap(); | ||
160 | } | ||
161 | |||
162 | let options = { | ||
163 | let text_document_caps = client_caps.text_document.as_ref(); | ||
164 | Options { | ||
165 | publish_decorations: config.publish_decorations, | ||
166 | supports_location_link: text_document_caps | ||
167 | .and_then(|it| it.definition) | ||
168 | .and_then(|it| it.link_support) | ||
169 | .unwrap_or(false), | ||
170 | line_folding_only: text_document_caps | ||
171 | .and_then(|it| it.folding_range.as_ref()) | ||
172 | .and_then(|it| it.line_folding_only) | ||
173 | .unwrap_or(false), | ||
174 | max_inlay_hint_length: config.max_inlay_hint_length, | ||
175 | cargo_watch: CheckOptions { | ||
176 | enable: config.cargo_watch_enable, | ||
177 | args: config.cargo_watch_args, | ||
178 | command: config.cargo_watch_command, | ||
179 | all_targets: config.cargo_watch_all_targets, | ||
180 | }, | ||
181 | rustfmt_args: config.rustfmt_args, | ||
182 | } | ||
183 | }; | ||
184 | |||
185 | WorldState::new( | ||
186 | ws_roots, | ||
187 | workspaces, | ||
188 | config.lru_capacity, | ||
189 | &globs, | ||
190 | Watch(!config.use_client_watching), | ||
191 | options, | ||
192 | feature_flags, | ||
193 | ) | ||
194 | }; | ||
195 | |||
196 | let pool = ThreadPool::default(); | ||
197 | let (task_sender, task_receiver) = unbounded::<Task>(); | ||
198 | let (libdata_sender, libdata_receiver) = unbounded::<LibraryData>(); | ||
199 | |||
200 | log::info!("server initialized, serving requests"); | ||
201 | { | ||
202 | let task_sender = task_sender; | ||
203 | let libdata_sender = libdata_sender; | ||
204 | loop { | ||
205 | log::trace!("selecting"); | ||
206 | let event = select! { | ||
207 | recv(&connection.receiver) -> msg => match msg { | ||
208 | Ok(msg) => Event::Msg(msg), | ||
209 | Err(RecvError) => Err("client exited without shutdown")?, | ||
210 | }, | ||
211 | recv(task_receiver) -> task => Event::Task(task.unwrap()), | ||
212 | recv(world_state.task_receiver) -> task => match task { | ||
213 | Ok(task) => Event::Vfs(task), | ||
214 | Err(RecvError) => Err("vfs died")?, | ||
215 | }, | ||
216 | recv(libdata_receiver) -> data => Event::Lib(data.unwrap()), | ||
217 | recv(world_state.check_watcher.task_recv) -> task => match task { | ||
218 | Ok(task) => Event::CheckWatcher(task), | ||
219 | Err(RecvError) => Err("check watcher died")?, | ||
220 | } | ||
221 | }; | ||
222 | if let Event::Msg(Message::Request(req)) = &event { | ||
223 | if connection.handle_shutdown(&req)? { | ||
224 | break; | ||
225 | }; | ||
226 | } | ||
227 | loop_turn( | ||
228 | &pool, | ||
229 | &task_sender, | ||
230 | &libdata_sender, | ||
231 | &connection, | ||
232 | &mut world_state, | ||
233 | &mut loop_state, | ||
234 | event, | ||
235 | )?; | ||
236 | } | ||
237 | } | ||
238 | world_state.analysis_host.request_cancellation(); | ||
239 | log::info!("waiting for tasks to finish..."); | ||
240 | task_receiver.into_iter().for_each(|task| { | ||
241 | on_task(task, &connection.sender, &mut loop_state.pending_requests, &mut world_state) | ||
242 | }); | ||
243 | libdata_receiver.into_iter().for_each(drop); | ||
244 | log::info!("...tasks have finished"); | ||
245 | log::info!("joining threadpool..."); | ||
246 | drop(pool); | ||
247 | log::info!("...threadpool has finished"); | ||
248 | |||
249 | let vfs = Arc::try_unwrap(world_state.vfs).expect("all snapshots should be dead"); | ||
250 | drop(vfs); | ||
251 | |||
252 | Ok(()) | ||
253 | } | ||
254 | |||
255 | #[derive(Debug)] | ||
256 | enum Task { | ||
257 | Respond(Response), | ||
258 | Notify(Notification), | ||
259 | Diagnostic(DiagnosticTask), | ||
260 | } | ||
261 | |||
262 | enum Event { | ||
263 | Msg(Message), | ||
264 | Task(Task), | ||
265 | Vfs(VfsTask), | ||
266 | Lib(LibraryData), | ||
267 | CheckWatcher(CheckTask), | ||
268 | } | ||
269 | |||
270 | impl fmt::Debug for Event { | ||
271 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
272 | let debug_verbose_not = |not: &Notification, f: &mut fmt::Formatter| { | ||
273 | f.debug_struct("Notification").field("method", ¬.method).finish() | ||
274 | }; | ||
275 | |||
276 | match self { | ||
277 | Event::Msg(Message::Notification(not)) => { | ||
278 | if notification_is::<req::DidOpenTextDocument>(not) | ||
279 | || notification_is::<req::DidChangeTextDocument>(not) | ||
280 | { | ||
281 | return debug_verbose_not(not, f); | ||
282 | } | ||
283 | } | ||
284 | Event::Task(Task::Notify(not)) => { | ||
285 | if notification_is::<req::PublishDecorations>(not) | ||
286 | || notification_is::<req::PublishDiagnostics>(not) | ||
287 | { | ||
288 | return debug_verbose_not(not, f); | ||
289 | } | ||
290 | } | ||
291 | Event::Task(Task::Respond(resp)) => { | ||
292 | return f | ||
293 | .debug_struct("Response") | ||
294 | .field("id", &resp.id) | ||
295 | .field("error", &resp.error) | ||
296 | .finish(); | ||
297 | } | ||
298 | _ => (), | ||
299 | } | ||
300 | match self { | ||
301 | Event::Msg(it) => fmt::Debug::fmt(it, f), | ||
302 | Event::Task(it) => fmt::Debug::fmt(it, f), | ||
303 | Event::Vfs(it) => fmt::Debug::fmt(it, f), | ||
304 | Event::Lib(it) => fmt::Debug::fmt(it, f), | ||
305 | Event::CheckWatcher(it) => fmt::Debug::fmt(it, f), | ||
306 | } | ||
307 | } | ||
308 | } | ||
309 | |||
310 | #[derive(Debug, Default)] | ||
311 | struct LoopState { | ||
312 | next_request_id: u64, | ||
313 | pending_responses: FxHashSet<RequestId>, | ||
314 | pending_requests: PendingRequests, | ||
315 | subscriptions: Subscriptions, | ||
316 | // We try not to index more than MAX_IN_FLIGHT_LIBS libraries at the same | ||
317 | // time to always have a thread ready to react to input. | ||
318 | in_flight_libraries: usize, | ||
319 | pending_libraries: Vec<(SourceRootId, Vec<(FileId, RelativePathBuf, Arc<String>)>)>, | ||
320 | workspace_loaded: bool, | ||
321 | } | ||
322 | |||
323 | impl LoopState { | ||
324 | fn next_request_id(&mut self) -> RequestId { | ||
325 | self.next_request_id += 1; | ||
326 | let res: RequestId = self.next_request_id.into(); | ||
327 | let inserted = self.pending_responses.insert(res.clone()); | ||
328 | assert!(inserted); | ||
329 | res | ||
330 | } | ||
331 | } | ||
332 | |||
333 | fn loop_turn( | ||
334 | pool: &ThreadPool, | ||
335 | task_sender: &Sender<Task>, | ||
336 | libdata_sender: &Sender<LibraryData>, | ||
337 | connection: &Connection, | ||
338 | world_state: &mut WorldState, | ||
339 | loop_state: &mut LoopState, | ||
340 | event: Event, | ||
341 | ) -> Result<()> { | ||
342 | let loop_start = Instant::now(); | ||
343 | |||
344 | // NOTE: don't count blocking select! call as a loop-turn time | ||
345 | let _p = profile("main_loop_inner/loop-turn"); | ||
346 | log::info!("loop turn = {:?}", event); | ||
347 | let queue_count = pool.queued_count(); | ||
348 | if queue_count > 0 { | ||
349 | log::info!("queued count = {}", queue_count); | ||
350 | } | ||
351 | |||
352 | match event { | ||
353 | Event::Task(task) => { | ||
354 | on_task(task, &connection.sender, &mut loop_state.pending_requests, world_state); | ||
355 | world_state.maybe_collect_garbage(); | ||
356 | } | ||
357 | Event::Vfs(task) => { | ||
358 | world_state.vfs.write().handle_task(task); | ||
359 | } | ||
360 | Event::Lib(lib) => { | ||
361 | world_state.add_lib(lib); | ||
362 | world_state.maybe_collect_garbage(); | ||
363 | loop_state.in_flight_libraries -= 1; | ||
364 | } | ||
365 | Event::CheckWatcher(task) => on_check_task(task, world_state, task_sender)?, | ||
366 | Event::Msg(msg) => match msg { | ||
367 | Message::Request(req) => on_request( | ||
368 | world_state, | ||
369 | &mut loop_state.pending_requests, | ||
370 | pool, | ||
371 | task_sender, | ||
372 | &connection.sender, | ||
373 | loop_start, | ||
374 | req, | ||
375 | )?, | ||
376 | Message::Notification(not) => { | ||
377 | on_notification( | ||
378 | &connection.sender, | ||
379 | world_state, | ||
380 | &mut loop_state.pending_requests, | ||
381 | &mut loop_state.subscriptions, | ||
382 | not, | ||
383 | )?; | ||
384 | } | ||
385 | Message::Response(resp) => { | ||
386 | let removed = loop_state.pending_responses.remove(&resp.id); | ||
387 | if !removed { | ||
388 | log::error!("unexpected response: {:?}", resp) | ||
389 | } | ||
390 | } | ||
391 | }, | ||
392 | }; | ||
393 | |||
394 | let mut state_changed = false; | ||
395 | if let Some(changes) = world_state.process_changes() { | ||
396 | state_changed = true; | ||
397 | loop_state.pending_libraries.extend(changes); | ||
398 | } | ||
399 | |||
400 | let max_in_flight_libs = pool.max_count().saturating_sub(2).max(1); | ||
401 | while loop_state.in_flight_libraries < max_in_flight_libs | ||
402 | && !loop_state.pending_libraries.is_empty() | ||
403 | { | ||
404 | let (root, files) = loop_state.pending_libraries.pop().unwrap(); | ||
405 | loop_state.in_flight_libraries += 1; | ||
406 | let sender = libdata_sender.clone(); | ||
407 | pool.execute(move || { | ||
408 | log::info!("indexing {:?} ... ", root); | ||
409 | let data = LibraryData::prepare(root, files); | ||
410 | sender.send(data).unwrap(); | ||
411 | }); | ||
412 | } | ||
413 | |||
414 | if !loop_state.workspace_loaded | ||
415 | && world_state.roots_to_scan == 0 | ||
416 | && loop_state.pending_libraries.is_empty() | ||
417 | && loop_state.in_flight_libraries == 0 | ||
418 | { | ||
419 | loop_state.workspace_loaded = true; | ||
420 | let n_packages: usize = world_state.workspaces.iter().map(|it| it.n_packages()).sum(); | ||
421 | if world_state.feature_flags().get("notifications.workspace-loaded") { | ||
422 | let msg = format!("workspace loaded, {} rust packages", n_packages); | ||
423 | show_message(req::MessageType::Info, msg, &connection.sender); | ||
424 | } | ||
425 | world_state.check_watcher.update(); | ||
426 | } | ||
427 | |||
428 | if state_changed { | ||
429 | update_file_notifications_on_threadpool( | ||
430 | pool, | ||
431 | world_state.snapshot(), | ||
432 | world_state.options.publish_decorations, | ||
433 | task_sender.clone(), | ||
434 | loop_state.subscriptions.subscriptions(), | ||
435 | ) | ||
436 | } | ||
437 | |||
438 | let loop_duration = loop_start.elapsed(); | ||
439 | if loop_duration > Duration::from_millis(100) { | ||
440 | log::error!("overly long loop turn: {:?}", loop_duration); | ||
441 | if env::var("RA_PROFILE").is_ok() { | ||
442 | show_message( | ||
443 | req::MessageType::Error, | ||
444 | format!("overly long loop turn: {:?}", loop_duration), | ||
445 | &connection.sender, | ||
446 | ); | ||
447 | } | ||
448 | } | ||
449 | |||
450 | Ok(()) | ||
451 | } | ||
452 | |||
453 | fn on_task( | ||
454 | task: Task, | ||
455 | msg_sender: &Sender<Message>, | ||
456 | pending_requests: &mut PendingRequests, | ||
457 | state: &mut WorldState, | ||
458 | ) { | ||
459 | match task { | ||
460 | Task::Respond(response) => { | ||
461 | if let Some(completed) = pending_requests.finish(&response.id) { | ||
462 | log::info!("handled req#{} in {:?}", completed.id, completed.duration); | ||
463 | state.complete_request(completed); | ||
464 | msg_sender.send(response.into()).unwrap(); | ||
465 | } | ||
466 | } | ||
467 | Task::Notify(n) => { | ||
468 | msg_sender.send(n.into()).unwrap(); | ||
469 | } | ||
470 | Task::Diagnostic(task) => on_diagnostic_task(task, msg_sender, state), | ||
471 | } | ||
472 | } | ||
473 | |||
474 | fn on_request( | ||
475 | world: &mut WorldState, | ||
476 | pending_requests: &mut PendingRequests, | ||
477 | pool: &ThreadPool, | ||
478 | task_sender: &Sender<Task>, | ||
479 | msg_sender: &Sender<Message>, | ||
480 | request_received: Instant, | ||
481 | req: Request, | ||
482 | ) -> Result<()> { | ||
483 | let mut pool_dispatcher = PoolDispatcher { | ||
484 | req: Some(req), | ||
485 | pool, | ||
486 | world, | ||
487 | task_sender, | ||
488 | msg_sender, | ||
489 | pending_requests, | ||
490 | request_received, | ||
491 | }; | ||
492 | pool_dispatcher | ||
493 | .on_sync::<req::CollectGarbage>(|s, ()| Ok(s.collect_garbage()))? | ||
494 | .on_sync::<req::JoinLines>(|s, p| handlers::handle_join_lines(s.snapshot(), p))? | ||
495 | .on_sync::<req::OnEnter>(|s, p| handlers::handle_on_enter(s.snapshot(), p))? | ||
496 | .on_sync::<req::SelectionRangeRequest>(|s, p| { | ||
497 | handlers::handle_selection_range(s.snapshot(), p) | ||
498 | })? | ||
499 | .on_sync::<req::FindMatchingBrace>(|s, p| { | ||
500 | handlers::handle_find_matching_brace(s.snapshot(), p) | ||
501 | })? | ||
502 | .on::<req::AnalyzerStatus>(handlers::handle_analyzer_status)? | ||
503 | .on::<req::SyntaxTree>(handlers::handle_syntax_tree)? | ||
504 | .on::<req::ExpandMacro>(handlers::handle_expand_macro)? | ||
505 | .on::<req::OnTypeFormatting>(handlers::handle_on_type_formatting)? | ||
506 | .on::<req::DocumentSymbolRequest>(handlers::handle_document_symbol)? | ||
507 | .on::<req::WorkspaceSymbol>(handlers::handle_workspace_symbol)? | ||
508 | .on::<req::GotoDefinition>(handlers::handle_goto_definition)? | ||
509 | .on::<req::GotoImplementation>(handlers::handle_goto_implementation)? | ||
510 | .on::<req::GotoTypeDefinition>(handlers::handle_goto_type_definition)? | ||
511 | .on::<req::ParentModule>(handlers::handle_parent_module)? | ||
512 | .on::<req::Runnables>(handlers::handle_runnables)? | ||
513 | .on::<req::DecorationsRequest>(handlers::handle_decorations)? | ||
514 | .on::<req::Completion>(handlers::handle_completion)? | ||
515 | .on::<req::CodeActionRequest>(handlers::handle_code_action)? | ||
516 | .on::<req::CodeLensRequest>(handlers::handle_code_lens)? | ||
517 | .on::<req::CodeLensResolve>(handlers::handle_code_lens_resolve)? | ||
518 | .on::<req::FoldingRangeRequest>(handlers::handle_folding_range)? | ||
519 | .on::<req::SignatureHelpRequest>(handlers::handle_signature_help)? | ||
520 | .on::<req::HoverRequest>(handlers::handle_hover)? | ||
521 | .on::<req::PrepareRenameRequest>(handlers::handle_prepare_rename)? | ||
522 | .on::<req::Rename>(handlers::handle_rename)? | ||
523 | .on::<req::References>(handlers::handle_references)? | ||
524 | .on::<req::Formatting>(handlers::handle_formatting)? | ||
525 | .on::<req::DocumentHighlightRequest>(handlers::handle_document_highlight)? | ||
526 | .on::<req::InlayHints>(handlers::handle_inlay_hints)? | ||
527 | .on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)? | ||
528 | .on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)? | ||
529 | .on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)? | ||
530 | .on::<req::Ssr>(handlers::handle_ssr)? | ||
531 | .finish(); | ||
532 | Ok(()) | ||
533 | } | ||
534 | |||
535 | fn on_notification( | ||
536 | msg_sender: &Sender<Message>, | ||
537 | state: &mut WorldState, | ||
538 | pending_requests: &mut PendingRequests, | ||
539 | subs: &mut Subscriptions, | ||
540 | not: Notification, | ||
541 | ) -> Result<()> { | ||
542 | let not = match notification_cast::<req::Cancel>(not) { | ||
543 | Ok(params) => { | ||
544 | let id: RequestId = match params.id { | ||
545 | NumberOrString::Number(id) => id.into(), | ||
546 | NumberOrString::String(id) => id.into(), | ||
547 | }; | ||
548 | if pending_requests.cancel(&id) { | ||
549 | let response = Response::new_err( | ||
550 | id, | ||
551 | ErrorCode::RequestCanceled as i32, | ||
552 | "canceled by client".to_string(), | ||
553 | ); | ||
554 | msg_sender.send(response.into()).unwrap() | ||
555 | } | ||
556 | return Ok(()); | ||
557 | } | ||
558 | Err(not) => not, | ||
559 | }; | ||
560 | let not = match notification_cast::<req::DidOpenTextDocument>(not) { | ||
561 | Ok(params) => { | ||
562 | let uri = params.text_document.uri; | ||
563 | let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; | ||
564 | if let Some(file_id) = | ||
565 | state.vfs.write().add_file_overlay(&path, params.text_document.text) | ||
566 | { | ||
567 | subs.add_sub(FileId(file_id.0)); | ||
568 | } | ||
569 | return Ok(()); | ||
570 | } | ||
571 | Err(not) => not, | ||
572 | }; | ||
573 | let not = match notification_cast::<req::DidChangeTextDocument>(not) { | ||
574 | Ok(mut params) => { | ||
575 | let uri = params.text_document.uri; | ||
576 | let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; | ||
577 | let text = | ||
578 | params.content_changes.pop().ok_or_else(|| "empty changes".to_string())?.text; | ||
579 | state.vfs.write().change_file_overlay(path.as_path(), text); | ||
580 | return Ok(()); | ||
581 | } | ||
582 | Err(not) => not, | ||
583 | }; | ||
584 | let not = match notification_cast::<req::DidSaveTextDocument>(not) { | ||
585 | Ok(_params) => { | ||
586 | state.check_watcher.update(); | ||
587 | return Ok(()); | ||
588 | } | ||
589 | Err(not) => not, | ||
590 | }; | ||
591 | let not = match notification_cast::<req::DidCloseTextDocument>(not) { | ||
592 | Ok(params) => { | ||
593 | let uri = params.text_document.uri; | ||
594 | let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; | ||
595 | if let Some(file_id) = state.vfs.write().remove_file_overlay(path.as_path()) { | ||
596 | subs.remove_sub(FileId(file_id.0)); | ||
597 | } | ||
598 | let params = | ||
599 | req::PublishDiagnosticsParams { uri, diagnostics: Vec::new(), version: None }; | ||
600 | let not = notification_new::<req::PublishDiagnostics>(params); | ||
601 | msg_sender.send(not.into()).unwrap(); | ||
602 | return Ok(()); | ||
603 | } | ||
604 | Err(not) => not, | ||
605 | }; | ||
606 | let not = match notification_cast::<req::DidChangeConfiguration>(not) { | ||
607 | Ok(_params) => { | ||
608 | return Ok(()); | ||
609 | } | ||
610 | Err(not) => not, | ||
611 | }; | ||
612 | let not = match notification_cast::<req::DidChangeWatchedFiles>(not) { | ||
613 | Ok(params) => { | ||
614 | let mut vfs = state.vfs.write(); | ||
615 | for change in params.changes { | ||
616 | let uri = change.uri; | ||
617 | let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; | ||
618 | vfs.notify_changed(path) | ||
619 | } | ||
620 | return Ok(()); | ||
621 | } | ||
622 | Err(not) => not, | ||
623 | }; | ||
624 | log::error!("unhandled notification: {:?}", not); | ||
625 | Ok(()) | ||
626 | } | ||
627 | |||
628 | fn on_check_task( | ||
629 | task: CheckTask, | ||
630 | world_state: &mut WorldState, | ||
631 | task_sender: &Sender<Task>, | ||
632 | ) -> Result<()> { | ||
633 | match task { | ||
634 | CheckTask::ClearDiagnostics => { | ||
635 | task_sender.send(Task::Diagnostic(DiagnosticTask::ClearCheck))?; | ||
636 | } | ||
637 | |||
638 | CheckTask::AddDiagnostic { url, diagnostic, fixes } => { | ||
639 | let path = url.to_file_path().map_err(|()| format!("invalid uri: {}", url))?; | ||
640 | let file_id = match world_state.vfs.read().path2file(&path) { | ||
641 | Some(file) => FileId(file.0), | ||
642 | None => { | ||
643 | log::error!("File with cargo diagnostic not found in VFS: {}", path.display()); | ||
644 | return Ok(()); | ||
645 | } | ||
646 | }; | ||
647 | |||
648 | task_sender | ||
649 | .send(Task::Diagnostic(DiagnosticTask::AddCheck(file_id, diagnostic, fixes)))?; | ||
650 | } | ||
651 | |||
652 | CheckTask::Status(progress) => { | ||
653 | let params = req::ProgressParams { | ||
654 | token: req::ProgressToken::String("rustAnalyzer/cargoWatcher".to_string()), | ||
655 | value: req::ProgressParamsValue::WorkDone(progress), | ||
656 | }; | ||
657 | let not = notification_new::<req::Progress>(params); | ||
658 | task_sender.send(Task::Notify(not)).unwrap(); | ||
659 | } | ||
660 | }; | ||
661 | |||
662 | Ok(()) | ||
663 | } | ||
664 | |||
665 | fn on_diagnostic_task(task: DiagnosticTask, msg_sender: &Sender<Message>, state: &mut WorldState) { | ||
666 | let subscriptions = state.diagnostics.handle_task(task); | ||
667 | |||
668 | for file_id in subscriptions { | ||
669 | let path = state.vfs.read().file2path(VfsFile(file_id.0)); | ||
670 | let uri = match url_from_path_with_drive_lowercasing(&path) { | ||
671 | Ok(uri) => uri, | ||
672 | Err(err) => { | ||
673 | log::error!("Couldn't convert path to url ({}): {:?}", err, path.to_string_lossy()); | ||
674 | continue; | ||
675 | } | ||
676 | }; | ||
677 | |||
678 | let diagnostics = state.diagnostics.diagnostics_for(file_id).cloned().collect(); | ||
679 | let params = req::PublishDiagnosticsParams { uri, diagnostics, version: None }; | ||
680 | let not = notification_new::<req::PublishDiagnostics>(params); | ||
681 | msg_sender.send(not.into()).unwrap(); | ||
682 | } | ||
683 | } | ||
684 | |||
685 | struct PoolDispatcher<'a> { | ||
686 | req: Option<Request>, | ||
687 | pool: &'a ThreadPool, | ||
688 | world: &'a mut WorldState, | ||
689 | pending_requests: &'a mut PendingRequests, | ||
690 | msg_sender: &'a Sender<Message>, | ||
691 | task_sender: &'a Sender<Task>, | ||
692 | request_received: Instant, | ||
693 | } | ||
694 | |||
695 | impl<'a> PoolDispatcher<'a> { | ||
696 | /// Dispatches the request onto the current thread | ||
697 | fn on_sync<R>( | ||
698 | &mut self, | ||
699 | f: fn(&mut WorldState, R::Params) -> Result<R::Result>, | ||
700 | ) -> Result<&mut Self> | ||
701 | where | ||
702 | R: req::Request + 'static, | ||
703 | R::Params: DeserializeOwned + panic::UnwindSafe + 'static, | ||
704 | R::Result: Serialize + 'static, | ||
705 | { | ||
706 | let (id, params) = match self.parse::<R>() { | ||
707 | Some(it) => it, | ||
708 | None => { | ||
709 | return Ok(self); | ||
710 | } | ||
711 | }; | ||
712 | let world = panic::AssertUnwindSafe(&mut *self.world); | ||
713 | let task = panic::catch_unwind(move || { | ||
714 | let result = f(world.0, params); | ||
715 | result_to_task::<R>(id, result) | ||
716 | }) | ||
717 | .map_err(|_| format!("sync task {:?} panicked", R::METHOD))?; | ||
718 | on_task(task, self.msg_sender, self.pending_requests, self.world); | ||
719 | Ok(self) | ||
720 | } | ||
721 | |||
722 | /// Dispatches the request onto thread pool | ||
723 | fn on<R>(&mut self, f: fn(WorldSnapshot, R::Params) -> Result<R::Result>) -> Result<&mut Self> | ||
724 | where | ||
725 | R: req::Request + 'static, | ||
726 | R::Params: DeserializeOwned + Send + 'static, | ||
727 | R::Result: Serialize + 'static, | ||
728 | { | ||
729 | let (id, params) = match self.parse::<R>() { | ||
730 | Some(it) => it, | ||
731 | None => { | ||
732 | return Ok(self); | ||
733 | } | ||
734 | }; | ||
735 | |||
736 | self.pool.execute({ | ||
737 | let world = self.world.snapshot(); | ||
738 | let sender = self.task_sender.clone(); | ||
739 | move || { | ||
740 | let result = f(world, params); | ||
741 | let task = result_to_task::<R>(id, result); | ||
742 | sender.send(task).unwrap(); | ||
743 | } | ||
744 | }); | ||
745 | |||
746 | Ok(self) | ||
747 | } | ||
748 | |||
749 | fn parse<R>(&mut self) -> Option<(RequestId, R::Params)> | ||
750 | where | ||
751 | R: req::Request + 'static, | ||
752 | R::Params: DeserializeOwned + 'static, | ||
753 | { | ||
754 | let req = self.req.take()?; | ||
755 | let (id, params) = match req.extract::<R::Params>(R::METHOD) { | ||
756 | Ok(it) => it, | ||
757 | Err(req) => { | ||
758 | self.req = Some(req); | ||
759 | return None; | ||
760 | } | ||
761 | }; | ||
762 | self.pending_requests.start(PendingRequest { | ||
763 | id: id.clone(), | ||
764 | method: R::METHOD.to_string(), | ||
765 | received: self.request_received, | ||
766 | }); | ||
767 | Some((id, params)) | ||
768 | } | ||
769 | |||
770 | fn finish(&mut self) { | ||
771 | match self.req.take() { | ||
772 | None => (), | ||
773 | Some(req) => { | ||
774 | log::error!("unknown request: {:?}", req); | ||
775 | let resp = Response::new_err( | ||
776 | req.id, | ||
777 | ErrorCode::MethodNotFound as i32, | ||
778 | "unknown request".to_string(), | ||
779 | ); | ||
780 | self.msg_sender.send(resp.into()).unwrap(); | ||
781 | } | ||
782 | } | ||
783 | } | ||
784 | } | ||
785 | |||
786 | fn result_to_task<R>(id: RequestId, result: Result<R::Result>) -> Task | ||
787 | where | ||
788 | R: req::Request + 'static, | ||
789 | R::Params: DeserializeOwned + 'static, | ||
790 | R::Result: Serialize + 'static, | ||
791 | { | ||
792 | let response = match result { | ||
793 | Ok(resp) => Response::new_ok(id, &resp), | ||
794 | Err(e) => match e.downcast::<LspError>() { | ||
795 | Ok(lsp_error) => Response::new_err(id, lsp_error.code, lsp_error.message), | ||
796 | Err(e) => { | ||
797 | if is_canceled(&e) { | ||
798 | Response::new_err( | ||
799 | id, | ||
800 | ErrorCode::ContentModified as i32, | ||
801 | "content modified".to_string(), | ||
802 | ) | ||
803 | } else { | ||
804 | Response::new_err(id, ErrorCode::InternalError as i32, e.to_string()) | ||
805 | } | ||
806 | } | ||
807 | }, | ||
808 | }; | ||
809 | Task::Respond(response) | ||
810 | } | ||
811 | |||
812 | fn update_file_notifications_on_threadpool( | ||
813 | pool: &ThreadPool, | ||
814 | world: WorldSnapshot, | ||
815 | publish_decorations: bool, | ||
816 | task_sender: Sender<Task>, | ||
817 | subscriptions: Vec<FileId>, | ||
818 | ) { | ||
819 | log::trace!("updating notifications for {:?}", subscriptions); | ||
820 | let publish_diagnostics = world.feature_flags().get("lsp.diagnostics"); | ||
821 | pool.execute(move || { | ||
822 | for file_id in subscriptions { | ||
823 | if publish_diagnostics { | ||
824 | match handlers::publish_diagnostics(&world, file_id) { | ||
825 | Err(e) => { | ||
826 | if !is_canceled(&e) { | ||
827 | log::error!("failed to compute diagnostics: {:?}", e); | ||
828 | } | ||
829 | } | ||
830 | Ok(task) => { | ||
831 | task_sender.send(Task::Diagnostic(task)).unwrap(); | ||
832 | } | ||
833 | } | ||
834 | } | ||
835 | if publish_decorations { | ||
836 | match handlers::publish_decorations(&world, file_id) { | ||
837 | Err(e) => { | ||
838 | if !is_canceled(&e) { | ||
839 | log::error!("failed to compute decorations: {:?}", e); | ||
840 | } | ||
841 | } | ||
842 | Ok(params) => { | ||
843 | let not = notification_new::<req::PublishDecorations>(params); | ||
844 | task_sender.send(Task::Notify(not)).unwrap(); | ||
845 | } | ||
846 | } | ||
847 | } | ||
848 | } | ||
849 | }); | ||
850 | } | ||
851 | |||
852 | pub fn show_message(typ: req::MessageType, message: impl Into<String>, sender: &Sender<Message>) { | ||
853 | let message = message.into(); | ||
854 | let params = req::ShowMessageParams { typ, message }; | ||
855 | let not = notification_new::<req::ShowMessage>(params); | ||
856 | sender.send(not.into()).unwrap(); | ||
857 | } | ||
858 | |||
859 | fn is_canceled(e: &Box<dyn std::error::Error + Send + Sync>) -> bool { | ||
860 | e.downcast_ref::<Canceled>().is_some() | ||
861 | } | ||
862 | |||
863 | fn notification_is<N: lsp_types::notification::Notification>(notification: &Notification) -> bool { | ||
864 | notification.method == N::METHOD | ||
865 | } | ||
866 | |||
867 | fn notification_cast<N>(notification: Notification) -> std::result::Result<N::Params, Notification> | ||
868 | where | ||
869 | N: lsp_types::notification::Notification, | ||
870 | N::Params: DeserializeOwned, | ||
871 | { | ||
872 | notification.extract(N::METHOD) | ||
873 | } | ||
874 | |||
875 | fn notification_new<N>(params: N::Params) -> Notification | ||
876 | where | ||
877 | N: lsp_types::notification::Notification, | ||
878 | N::Params: Serialize, | ||
879 | { | ||
880 | Notification::new(N::METHOD.to_string(), params) | ||
881 | } | ||
882 | |||
883 | fn request_new<R>(id: RequestId, params: R::Params) -> Request | ||
884 | where | ||
885 | R: lsp_types::request::Request, | ||
886 | R::Params: Serialize, | ||
887 | { | ||
888 | Request::new(id, R::METHOD.to_string(), params) | ||
889 | } | ||
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs deleted file mode 100644 index bb7bab372..000000000 --- a/crates/ra_lsp_server/src/main_loop/handlers.rs +++ /dev/null | |||
@@ -1,1070 +0,0 @@ | |||
1 | //! This module is responsible for implementing handlers for Language Server | ||
2 | //! Protocol. The majority of requests are fulfilled by calling into the | ||
3 | //! `ra_ide` crate. | ||
4 | |||
5 | use std::{ | ||
6 | collections::hash_map::Entry, | ||
7 | fmt::Write as _, | ||
8 | io::Write as _, | ||
9 | process::{self, Stdio}, | ||
10 | }; | ||
11 | |||
12 | use lsp_server::ErrorCode; | ||
13 | use lsp_types::{ | ||
14 | CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem, | ||
15 | CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams, | ||
16 | CodeAction, CodeActionOrCommand, CodeActionResponse, CodeLens, Command, CompletionItem, | ||
17 | Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange, | ||
18 | FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position, | ||
19 | PrepareRenameResponse, Range, RenameParams, SymbolInformation, TextDocumentIdentifier, | ||
20 | TextEdit, WorkspaceEdit, | ||
21 | }; | ||
22 | use ra_ide::{ | ||
23 | AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, | ||
24 | SearchScope, | ||
25 | }; | ||
26 | use ra_prof::profile; | ||
27 | use ra_syntax::{AstNode, SyntaxKind, TextRange, TextUnit}; | ||
28 | use rustc_hash::FxHashMap; | ||
29 | use serde::{Deserialize, Serialize}; | ||
30 | use serde_json::to_value; | ||
31 | |||
32 | use crate::{ | ||
33 | cargo_target_spec::CargoTargetSpec, | ||
34 | conv::{ | ||
35 | to_call_hierarchy_item, to_location, Conv, ConvWith, FoldConvCtx, MapConvWith, TryConvWith, | ||
36 | TryConvWithToVec, | ||
37 | }, | ||
38 | diagnostics::DiagnosticTask, | ||
39 | from_json, | ||
40 | req::{self, Decoration, InlayHint, InlayHintsParams, InlayKind}, | ||
41 | world::WorldSnapshot, | ||
42 | LspError, Result, | ||
43 | }; | ||
44 | |||
45 | pub fn handle_analyzer_status(world: WorldSnapshot, _: ()) -> Result<String> { | ||
46 | let _p = profile("handle_analyzer_status"); | ||
47 | let mut buf = world.status(); | ||
48 | writeln!(buf, "\n\nrequests:").unwrap(); | ||
49 | let requests = world.latest_requests.read(); | ||
50 | for (is_last, r) in requests.iter() { | ||
51 | let mark = if is_last { "*" } else { " " }; | ||
52 | writeln!(buf, "{}{:4} {:<36}{}ms", mark, r.id, r.method, r.duration.as_millis()).unwrap(); | ||
53 | } | ||
54 | Ok(buf) | ||
55 | } | ||
56 | |||
57 | pub fn handle_syntax_tree(world: WorldSnapshot, params: req::SyntaxTreeParams) -> Result<String> { | ||
58 | let _p = profile("handle_syntax_tree"); | ||
59 | let id = params.text_document.try_conv_with(&world)?; | ||
60 | let line_index = world.analysis().file_line_index(id)?; | ||
61 | let text_range = params.range.map(|p| p.conv_with(&line_index)); | ||
62 | let res = world.analysis().syntax_tree(id, text_range)?; | ||
63 | Ok(res) | ||
64 | } | ||
65 | |||
66 | pub fn handle_expand_macro( | ||
67 | world: WorldSnapshot, | ||
68 | params: req::ExpandMacroParams, | ||
69 | ) -> Result<Option<req::ExpandedMacro>> { | ||
70 | let _p = profile("handle_expand_macro"); | ||
71 | let file_id = params.text_document.try_conv_with(&world)?; | ||
72 | let line_index = world.analysis().file_line_index(file_id)?; | ||
73 | let offset = params.position.map(|p| p.conv_with(&line_index)); | ||
74 | |||
75 | match offset { | ||
76 | None => Ok(None), | ||
77 | Some(offset) => { | ||
78 | let res = world.analysis().expand_macro(FilePosition { file_id, offset })?; | ||
79 | Ok(res.map(|it| req::ExpandedMacro { name: it.name, expansion: it.expansion })) | ||
80 | } | ||
81 | } | ||
82 | } | ||
83 | |||
84 | pub fn handle_selection_range( | ||
85 | world: WorldSnapshot, | ||
86 | params: req::SelectionRangeParams, | ||
87 | ) -> Result<Vec<req::SelectionRange>> { | ||
88 | let _p = profile("handle_selection_range"); | ||
89 | let file_id = params.text_document.try_conv_with(&world)?; | ||
90 | let line_index = world.analysis().file_line_index(file_id)?; | ||
91 | params | ||
92 | .positions | ||
93 | .into_iter() | ||
94 | .map_conv_with(&line_index) | ||
95 | .map(|position| { | ||
96 | let mut ranges = Vec::new(); | ||
97 | { | ||
98 | let mut range = TextRange::from_to(position, position); | ||
99 | loop { | ||
100 | ranges.push(range); | ||
101 | let frange = FileRange { file_id, range }; | ||
102 | let next = world.analysis().extend_selection(frange)?; | ||
103 | if next == range { | ||
104 | break; | ||
105 | } else { | ||
106 | range = next | ||
107 | } | ||
108 | } | ||
109 | } | ||
110 | let mut range = req::SelectionRange { | ||
111 | range: ranges.last().unwrap().conv_with(&line_index), | ||
112 | parent: None, | ||
113 | }; | ||
114 | for r in ranges.iter().rev().skip(1) { | ||
115 | range = req::SelectionRange { | ||
116 | range: r.conv_with(&line_index), | ||
117 | parent: Some(Box::new(range)), | ||
118 | } | ||
119 | } | ||
120 | Ok(range) | ||
121 | }) | ||
122 | .collect() | ||
123 | } | ||
124 | |||
125 | pub fn handle_find_matching_brace( | ||
126 | world: WorldSnapshot, | ||
127 | params: req::FindMatchingBraceParams, | ||
128 | ) -> Result<Vec<Position>> { | ||
129 | let _p = profile("handle_find_matching_brace"); | ||
130 | let file_id = params.text_document.try_conv_with(&world)?; | ||
131 | let line_index = world.analysis().file_line_index(file_id)?; | ||
132 | let res = params | ||
133 | .offsets | ||
134 | .into_iter() | ||
135 | .map_conv_with(&line_index) | ||
136 | .map(|offset| { | ||
137 | if let Ok(Some(matching_brace_offset)) = | ||
138 | world.analysis().matching_brace(FilePosition { file_id, offset }) | ||
139 | { | ||
140 | matching_brace_offset | ||
141 | } else { | ||
142 | offset | ||
143 | } | ||
144 | }) | ||
145 | .map_conv_with(&line_index) | ||
146 | .collect(); | ||
147 | Ok(res) | ||
148 | } | ||
149 | |||
150 | pub fn handle_join_lines( | ||
151 | world: WorldSnapshot, | ||
152 | params: req::JoinLinesParams, | ||
153 | ) -> Result<req::SourceChange> { | ||
154 | let _p = profile("handle_join_lines"); | ||
155 | let frange = (¶ms.text_document, params.range).try_conv_with(&world)?; | ||
156 | world.analysis().join_lines(frange)?.try_conv_with(&world) | ||
157 | } | ||
158 | |||
159 | pub fn handle_on_enter( | ||
160 | world: WorldSnapshot, | ||
161 | params: req::TextDocumentPositionParams, | ||
162 | ) -> Result<Option<req::SourceChange>> { | ||
163 | let _p = profile("handle_on_enter"); | ||
164 | let position = params.try_conv_with(&world)?; | ||
165 | match world.analysis().on_enter(position)? { | ||
166 | None => Ok(None), | ||
167 | Some(edit) => Ok(Some(edit.try_conv_with(&world)?)), | ||
168 | } | ||
169 | } | ||
170 | |||
171 | // Don't forget to add new trigger characters to `ServerCapabilities` in `caps.rs`. | ||
172 | pub fn handle_on_type_formatting( | ||
173 | world: WorldSnapshot, | ||
174 | params: req::DocumentOnTypeFormattingParams, | ||
175 | ) -> Result<Option<Vec<TextEdit>>> { | ||
176 | let _p = profile("handle_on_type_formatting"); | ||
177 | let mut position = params.text_document_position.try_conv_with(&world)?; | ||
178 | let line_index = world.analysis().file_line_index(position.file_id)?; | ||
179 | let line_endings = world.file_line_endings(position.file_id); | ||
180 | |||
181 | // in `ra_ide`, the `on_type` invariant is that | ||
182 | // `text.char_at(position) == typed_char`. | ||
183 | position.offset -= TextUnit::of_char('.'); | ||
184 | let char_typed = params.ch.chars().next().unwrap_or('\0'); | ||
185 | |||
186 | // We have an assist that inserts ` ` after typing `->` in `fn foo() ->{`, | ||
187 | // but it requires precise cursor positioning to work, and one can't | ||
188 | // position the cursor with on_type formatting. So, let's just toggle this | ||
189 | // feature off here, hoping that we'll enable it one day, 😿. | ||
190 | if char_typed == '>' { | ||
191 | return Ok(None); | ||
192 | } | ||
193 | |||
194 | let edit = world.analysis().on_char_typed(position, char_typed)?; | ||
195 | let mut edit = match edit { | ||
196 | Some(it) => it, | ||
197 | None => return Ok(None), | ||
198 | }; | ||
199 | |||
200 | // This should be a single-file edit | ||
201 | let edit = edit.source_file_edits.pop().unwrap(); | ||
202 | |||
203 | let change: Vec<TextEdit> = edit.edit.conv_with((&line_index, line_endings)); | ||
204 | Ok(Some(change)) | ||
205 | } | ||
206 | |||
207 | pub fn handle_document_symbol( | ||
208 | world: WorldSnapshot, | ||
209 | params: req::DocumentSymbolParams, | ||
210 | ) -> Result<Option<req::DocumentSymbolResponse>> { | ||
211 | let _p = profile("handle_document_symbol"); | ||
212 | let file_id = params.text_document.try_conv_with(&world)?; | ||
213 | let line_index = world.analysis().file_line_index(file_id)?; | ||
214 | |||
215 | let mut parents: Vec<(DocumentSymbol, Option<usize>)> = Vec::new(); | ||
216 | |||
217 | for symbol in world.analysis().file_structure(file_id)? { | ||
218 | let doc_symbol = DocumentSymbol { | ||
219 | name: symbol.label, | ||
220 | detail: symbol.detail, | ||
221 | kind: symbol.kind.conv(), | ||
222 | deprecated: Some(symbol.deprecated), | ||
223 | range: symbol.node_range.conv_with(&line_index), | ||
224 | selection_range: symbol.navigation_range.conv_with(&line_index), | ||
225 | children: None, | ||
226 | }; | ||
227 | parents.push((doc_symbol, symbol.parent)); | ||
228 | } | ||
229 | let mut res = Vec::new(); | ||
230 | while let Some((node, parent)) = parents.pop() { | ||
231 | match parent { | ||
232 | None => res.push(node), | ||
233 | Some(i) => { | ||
234 | let children = &mut parents[i].0.children; | ||
235 | if children.is_none() { | ||
236 | *children = Some(Vec::new()); | ||
237 | } | ||
238 | children.as_mut().unwrap().push(node); | ||
239 | } | ||
240 | } | ||
241 | } | ||
242 | |||
243 | Ok(Some(res.into())) | ||
244 | } | ||
245 | |||
246 | pub fn handle_workspace_symbol( | ||
247 | world: WorldSnapshot, | ||
248 | params: req::WorkspaceSymbolParams, | ||
249 | ) -> Result<Option<Vec<SymbolInformation>>> { | ||
250 | let _p = profile("handle_workspace_symbol"); | ||
251 | let all_symbols = params.query.contains('#'); | ||
252 | let libs = params.query.contains('*'); | ||
253 | let query = { | ||
254 | let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect(); | ||
255 | let mut q = Query::new(query); | ||
256 | if !all_symbols { | ||
257 | q.only_types(); | ||
258 | } | ||
259 | if libs { | ||
260 | q.libs(); | ||
261 | } | ||
262 | q.limit(128); | ||
263 | q | ||
264 | }; | ||
265 | let mut res = exec_query(&world, query)?; | ||
266 | if res.is_empty() && !all_symbols { | ||
267 | let mut query = Query::new(params.query); | ||
268 | query.limit(128); | ||
269 | res = exec_query(&world, query)?; | ||
270 | } | ||
271 | |||
272 | return Ok(Some(res)); | ||
273 | |||
274 | fn exec_query(world: &WorldSnapshot, query: Query) -> Result<Vec<SymbolInformation>> { | ||
275 | let mut res = Vec::new(); | ||
276 | for nav in world.analysis().symbol_search(query)? { | ||
277 | let info = SymbolInformation { | ||
278 | name: nav.name().to_string(), | ||
279 | kind: nav.kind().conv(), | ||
280 | location: nav.try_conv_with(world)?, | ||
281 | container_name: nav.container_name().map(|v| v.to_string()), | ||
282 | deprecated: None, | ||
283 | }; | ||
284 | res.push(info); | ||
285 | } | ||
286 | Ok(res) | ||
287 | } | ||
288 | } | ||
289 | |||
290 | pub fn handle_goto_definition( | ||
291 | world: WorldSnapshot, | ||
292 | params: req::TextDocumentPositionParams, | ||
293 | ) -> Result<Option<req::GotoDefinitionResponse>> { | ||
294 | let _p = profile("handle_goto_definition"); | ||
295 | let position = params.try_conv_with(&world)?; | ||
296 | let nav_info = match world.analysis().goto_definition(position)? { | ||
297 | None => return Ok(None), | ||
298 | Some(it) => it, | ||
299 | }; | ||
300 | let res = (position.file_id, nav_info).try_conv_with(&world)?; | ||
301 | Ok(Some(res)) | ||
302 | } | ||
303 | |||
304 | pub fn handle_goto_implementation( | ||
305 | world: WorldSnapshot, | ||
306 | params: req::TextDocumentPositionParams, | ||
307 | ) -> Result<Option<req::GotoImplementationResponse>> { | ||
308 | let _p = profile("handle_goto_implementation"); | ||
309 | let position = params.try_conv_with(&world)?; | ||
310 | let nav_info = match world.analysis().goto_implementation(position)? { | ||
311 | None => return Ok(None), | ||
312 | Some(it) => it, | ||
313 | }; | ||
314 | let res = (position.file_id, nav_info).try_conv_with(&world)?; | ||
315 | Ok(Some(res)) | ||
316 | } | ||
317 | |||
318 | pub fn handle_goto_type_definition( | ||
319 | world: WorldSnapshot, | ||
320 | params: req::TextDocumentPositionParams, | ||
321 | ) -> Result<Option<req::GotoTypeDefinitionResponse>> { | ||
322 | let _p = profile("handle_goto_type_definition"); | ||
323 | let position = params.try_conv_with(&world)?; | ||
324 | let nav_info = match world.analysis().goto_type_definition(position)? { | ||
325 | None => return Ok(None), | ||
326 | Some(it) => it, | ||
327 | }; | ||
328 | let res = (position.file_id, nav_info).try_conv_with(&world)?; | ||
329 | Ok(Some(res)) | ||
330 | } | ||
331 | |||
332 | pub fn handle_parent_module( | ||
333 | world: WorldSnapshot, | ||
334 | params: req::TextDocumentPositionParams, | ||
335 | ) -> Result<Vec<Location>> { | ||
336 | let _p = profile("handle_parent_module"); | ||
337 | let position = params.try_conv_with(&world)?; | ||
338 | world.analysis().parent_module(position)?.iter().try_conv_with_to_vec(&world) | ||
339 | } | ||
340 | |||
341 | pub fn handle_runnables( | ||
342 | world: WorldSnapshot, | ||
343 | params: req::RunnablesParams, | ||
344 | ) -> Result<Vec<req::Runnable>> { | ||
345 | let _p = profile("handle_runnables"); | ||
346 | let file_id = params.text_document.try_conv_with(&world)?; | ||
347 | let line_index = world.analysis().file_line_index(file_id)?; | ||
348 | let offset = params.position.map(|it| it.conv_with(&line_index)); | ||
349 | let mut res = Vec::new(); | ||
350 | let workspace_root = world.workspace_root_for(file_id); | ||
351 | for runnable in world.analysis().runnables(file_id)? { | ||
352 | if let Some(offset) = offset { | ||
353 | if !runnable.range.contains_inclusive(offset) { | ||
354 | continue; | ||
355 | } | ||
356 | } | ||
357 | res.push(to_lsp_runnable(&world, file_id, runnable)?); | ||
358 | } | ||
359 | let mut check_args = vec!["check".to_string()]; | ||
360 | let label; | ||
361 | match CargoTargetSpec::for_file(&world, file_id)? { | ||
362 | Some(spec) => { | ||
363 | label = format!("cargo check -p {}", spec.package); | ||
364 | spec.push_to(&mut check_args); | ||
365 | } | ||
366 | None => { | ||
367 | label = "cargo check --all".to_string(); | ||
368 | check_args.push("--all".to_string()) | ||
369 | } | ||
370 | } | ||
371 | // Always add `cargo check`. | ||
372 | res.push(req::Runnable { | ||
373 | range: Default::default(), | ||
374 | label, | ||
375 | bin: "cargo".to_string(), | ||
376 | args: check_args, | ||
377 | env: FxHashMap::default(), | ||
378 | cwd: workspace_root.map(|root| root.to_string_lossy().to_string()), | ||
379 | }); | ||
380 | Ok(res) | ||
381 | } | ||
382 | |||
383 | pub fn handle_decorations( | ||
384 | world: WorldSnapshot, | ||
385 | params: TextDocumentIdentifier, | ||
386 | ) -> Result<Vec<Decoration>> { | ||
387 | let _p = profile("handle_decorations"); | ||
388 | let file_id = params.try_conv_with(&world)?; | ||
389 | highlight(&world, file_id) | ||
390 | } | ||
391 | |||
392 | pub fn handle_completion( | ||
393 | world: WorldSnapshot, | ||
394 | params: req::CompletionParams, | ||
395 | ) -> Result<Option<req::CompletionResponse>> { | ||
396 | let _p = profile("handle_completion"); | ||
397 | let position = params.text_document_position.try_conv_with(&world)?; | ||
398 | let completion_triggered_after_single_colon = { | ||
399 | let mut res = false; | ||
400 | if let Some(ctx) = params.context { | ||
401 | if ctx.trigger_character.unwrap_or_default() == ":" { | ||
402 | let source_file = world.analysis().parse(position.file_id)?; | ||
403 | let syntax = source_file.syntax(); | ||
404 | let text = syntax.text(); | ||
405 | if let Some(next_char) = text.char_at(position.offset) { | ||
406 | let diff = TextUnit::of_char(next_char) + TextUnit::of_char(':'); | ||
407 | let prev_char = position.offset - diff; | ||
408 | if text.char_at(prev_char) != Some(':') { | ||
409 | res = true; | ||
410 | } | ||
411 | } | ||
412 | } | ||
413 | } | ||
414 | res | ||
415 | }; | ||
416 | if completion_triggered_after_single_colon { | ||
417 | return Ok(None); | ||
418 | } | ||
419 | |||
420 | let items = match world.analysis().completions(position)? { | ||
421 | None => return Ok(None), | ||
422 | Some(items) => items, | ||
423 | }; | ||
424 | let line_index = world.analysis().file_line_index(position.file_id)?; | ||
425 | let line_endings = world.file_line_endings(position.file_id); | ||
426 | let items: Vec<CompletionItem> = | ||
427 | items.into_iter().map(|item| item.conv_with((&line_index, line_endings))).collect(); | ||
428 | |||
429 | Ok(Some(items.into())) | ||
430 | } | ||
431 | |||
432 | pub fn handle_folding_range( | ||
433 | world: WorldSnapshot, | ||
434 | params: FoldingRangeParams, | ||
435 | ) -> Result<Option<Vec<FoldingRange>>> { | ||
436 | let _p = profile("handle_folding_range"); | ||
437 | let file_id = params.text_document.try_conv_with(&world)?; | ||
438 | let folds = world.analysis().folding_ranges(file_id)?; | ||
439 | let text = world.analysis().file_text(file_id)?; | ||
440 | let line_index = world.analysis().file_line_index(file_id)?; | ||
441 | let ctx = FoldConvCtx { | ||
442 | text: &text, | ||
443 | line_index: &line_index, | ||
444 | line_folding_only: world.options.line_folding_only, | ||
445 | }; | ||
446 | let res = Some(folds.into_iter().map_conv_with(&ctx).collect()); | ||
447 | Ok(res) | ||
448 | } | ||
449 | |||
450 | pub fn handle_signature_help( | ||
451 | world: WorldSnapshot, | ||
452 | params: req::TextDocumentPositionParams, | ||
453 | ) -> Result<Option<req::SignatureHelp>> { | ||
454 | let _p = profile("handle_signature_help"); | ||
455 | let position = params.try_conv_with(&world)?; | ||
456 | if let Some(call_info) = world.analysis().call_info(position)? { | ||
457 | let active_parameter = call_info.active_parameter.map(|it| it as i64); | ||
458 | let sig_info = call_info.signature.conv(); | ||
459 | |||
460 | Ok(Some(req::SignatureHelp { | ||
461 | signatures: vec![sig_info], | ||
462 | active_signature: Some(0), | ||
463 | active_parameter, | ||
464 | })) | ||
465 | } else { | ||
466 | Ok(None) | ||
467 | } | ||
468 | } | ||
469 | |||
470 | pub fn handle_hover( | ||
471 | world: WorldSnapshot, | ||
472 | params: req::TextDocumentPositionParams, | ||
473 | ) -> Result<Option<Hover>> { | ||
474 | let _p = profile("handle_hover"); | ||
475 | let position = params.try_conv_with(&world)?; | ||
476 | let info = match world.analysis().hover(position)? { | ||
477 | None => return Ok(None), | ||
478 | Some(info) => info, | ||
479 | }; | ||
480 | let line_index = world.analysis.file_line_index(position.file_id)?; | ||
481 | let range = info.range.conv_with(&line_index); | ||
482 | let res = Hover { | ||
483 | contents: HoverContents::Markup(MarkupContent { | ||
484 | kind: MarkupKind::Markdown, | ||
485 | value: crate::markdown::format_docs(&info.info.to_markup()), | ||
486 | }), | ||
487 | range: Some(range), | ||
488 | }; | ||
489 | Ok(Some(res)) | ||
490 | } | ||
491 | |||
492 | pub fn handle_prepare_rename( | ||
493 | world: WorldSnapshot, | ||
494 | params: req::TextDocumentPositionParams, | ||
495 | ) -> Result<Option<PrepareRenameResponse>> { | ||
496 | let _p = profile("handle_prepare_rename"); | ||
497 | let position = params.try_conv_with(&world)?; | ||
498 | |||
499 | let optional_change = world.analysis().rename(position, "dummy")?; | ||
500 | let range = match optional_change { | ||
501 | None => return Ok(None), | ||
502 | Some(it) => it.range, | ||
503 | }; | ||
504 | |||
505 | let file_id = params.text_document.try_conv_with(&world)?; | ||
506 | let line_index = world.analysis().file_line_index(file_id)?; | ||
507 | let range = range.conv_with(&line_index); | ||
508 | Ok(Some(PrepareRenameResponse::Range(range))) | ||
509 | } | ||
510 | |||
511 | pub fn handle_rename(world: WorldSnapshot, params: RenameParams) -> Result<Option<WorkspaceEdit>> { | ||
512 | let _p = profile("handle_rename"); | ||
513 | let position = params.text_document_position.try_conv_with(&world)?; | ||
514 | |||
515 | if params.new_name.is_empty() { | ||
516 | return Err(LspError::new( | ||
517 | ErrorCode::InvalidParams as i32, | ||
518 | "New Name cannot be empty".into(), | ||
519 | ) | ||
520 | .into()); | ||
521 | } | ||
522 | |||
523 | let optional_change = world.analysis().rename(position, &*params.new_name)?; | ||
524 | let change = match optional_change { | ||
525 | None => return Ok(None), | ||
526 | Some(it) => it.info, | ||
527 | }; | ||
528 | |||
529 | let source_change_req = change.try_conv_with(&world)?; | ||
530 | |||
531 | Ok(Some(source_change_req.workspace_edit)) | ||
532 | } | ||
533 | |||
534 | pub fn handle_references( | ||
535 | world: WorldSnapshot, | ||
536 | params: req::ReferenceParams, | ||
537 | ) -> Result<Option<Vec<Location>>> { | ||
538 | let _p = profile("handle_references"); | ||
539 | let position = params.text_document_position.try_conv_with(&world)?; | ||
540 | |||
541 | let refs = match world.analysis().find_all_refs(position, None)? { | ||
542 | None => return Ok(None), | ||
543 | Some(refs) => refs, | ||
544 | }; | ||
545 | |||
546 | let locations = if params.context.include_declaration { | ||
547 | refs.into_iter() | ||
548 | .filter_map(|reference| { | ||
549 | let line_index = | ||
550 | world.analysis().file_line_index(reference.file_range.file_id).ok()?; | ||
551 | to_location( | ||
552 | reference.file_range.file_id, | ||
553 | reference.file_range.range, | ||
554 | &world, | ||
555 | &line_index, | ||
556 | ) | ||
557 | .ok() | ||
558 | }) | ||
559 | .collect() | ||
560 | } else { | ||
561 | // Only iterate over the references if include_declaration was false | ||
562 | refs.references() | ||
563 | .iter() | ||
564 | .filter_map(|reference| { | ||
565 | let line_index = | ||
566 | world.analysis().file_line_index(reference.file_range.file_id).ok()?; | ||
567 | to_location( | ||
568 | reference.file_range.file_id, | ||
569 | reference.file_range.range, | ||
570 | &world, | ||
571 | &line_index, | ||
572 | ) | ||
573 | .ok() | ||
574 | }) | ||
575 | .collect() | ||
576 | }; | ||
577 | |||
578 | Ok(Some(locations)) | ||
579 | } | ||
580 | |||
581 | pub fn handle_formatting( | ||
582 | world: WorldSnapshot, | ||
583 | params: DocumentFormattingParams, | ||
584 | ) -> Result<Option<Vec<TextEdit>>> { | ||
585 | let _p = profile("handle_formatting"); | ||
586 | let file_id = params.text_document.try_conv_with(&world)?; | ||
587 | let file = world.analysis().file_text(file_id)?; | ||
588 | let crate_ids = world.analysis().crate_for(file_id)?; | ||
589 | |||
590 | let file_line_index = world.analysis().file_line_index(file_id)?; | ||
591 | let end_position = TextUnit::of_str(&file).conv_with(&file_line_index); | ||
592 | |||
593 | let mut rustfmt = process::Command::new("rustfmt"); | ||
594 | rustfmt.args(&world.options.rustfmt_args); | ||
595 | if let Some(&crate_id) = crate_ids.first() { | ||
596 | // Assume all crates are in the same edition | ||
597 | let edition = world.analysis().crate_edition(crate_id)?; | ||
598 | rustfmt.args(&["--edition", &edition.to_string()]); | ||
599 | } | ||
600 | |||
601 | if let Ok(path) = params.text_document.uri.to_file_path() { | ||
602 | if let Some(parent) = path.parent() { | ||
603 | rustfmt.current_dir(parent); | ||
604 | } | ||
605 | } | ||
606 | let mut rustfmt = rustfmt.stdin(Stdio::piped()).stdout(Stdio::piped()).spawn()?; | ||
607 | |||
608 | rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?; | ||
609 | |||
610 | let output = rustfmt.wait_with_output()?; | ||
611 | let captured_stdout = String::from_utf8(output.stdout)?; | ||
612 | |||
613 | if !output.status.success() { | ||
614 | match output.status.code() { | ||
615 | Some(1) => { | ||
616 | // While `rustfmt` doesn't have a specific exit code for parse errors this is the | ||
617 | // likely cause exiting with 1. Most Language Servers swallow parse errors on | ||
618 | // formatting because otherwise an error is surfaced to the user on top of the | ||
619 | // syntax error diagnostics they're already receiving. This is especially jarring | ||
620 | // if they have format on save enabled. | ||
621 | log::info!("rustfmt exited with status 1, assuming parse error and ignoring"); | ||
622 | return Ok(None); | ||
623 | } | ||
624 | _ => { | ||
625 | // Something else happened - e.g. `rustfmt` is missing or caught a signal | ||
626 | return Err(LspError::new( | ||
627 | -32900, | ||
628 | format!( | ||
629 | r#"rustfmt exited with: | ||
630 | Status: {} | ||
631 | stdout: {}"#, | ||
632 | output.status, captured_stdout, | ||
633 | ), | ||
634 | ) | ||
635 | .into()); | ||
636 | } | ||
637 | } | ||
638 | } | ||
639 | |||
640 | Ok(Some(vec![TextEdit { | ||
641 | range: Range::new(Position::new(0, 0), end_position), | ||
642 | new_text: captured_stdout, | ||
643 | }])) | ||
644 | } | ||
645 | |||
646 | pub fn handle_code_action( | ||
647 | world: WorldSnapshot, | ||
648 | params: req::CodeActionParams, | ||
649 | ) -> Result<Option<CodeActionResponse>> { | ||
650 | let _p = profile("handle_code_action"); | ||
651 | let file_id = params.text_document.try_conv_with(&world)?; | ||
652 | let line_index = world.analysis().file_line_index(file_id)?; | ||
653 | let range = params.range.conv_with(&line_index); | ||
654 | |||
655 | let diagnostics = world.analysis().diagnostics(file_id)?; | ||
656 | let mut res = CodeActionResponse::default(); | ||
657 | |||
658 | let fixes_from_diagnostics = diagnostics | ||
659 | .into_iter() | ||
660 | .filter_map(|d| Some((d.range, d.fix?))) | ||
661 | .filter(|(diag_range, _fix)| diag_range.intersection(&range).is_some()) | ||
662 | .map(|(_range, fix)| fix); | ||
663 | |||
664 | for source_edit in fixes_from_diagnostics { | ||
665 | let title = source_edit.label.clone(); | ||
666 | let edit = source_edit.try_conv_with(&world)?; | ||
667 | |||
668 | let command = Command { | ||
669 | title, | ||
670 | command: "rust-analyzer.applySourceChange".to_string(), | ||
671 | arguments: Some(vec![to_value(edit).unwrap()]), | ||
672 | }; | ||
673 | let action = CodeAction { | ||
674 | title: command.title.clone(), | ||
675 | kind: None, | ||
676 | diagnostics: None, | ||
677 | edit: None, | ||
678 | command: Some(command), | ||
679 | is_preferred: None, | ||
680 | }; | ||
681 | res.push(action.into()); | ||
682 | } | ||
683 | |||
684 | for fix in world.check_fixes.get(&file_id).into_iter().flatten() { | ||
685 | let fix_range = fix.range.conv_with(&line_index); | ||
686 | if fix_range.intersection(&range).is_none() { | ||
687 | continue; | ||
688 | } | ||
689 | res.push(fix.action.clone()); | ||
690 | } | ||
691 | |||
692 | let mut groups = FxHashMap::default(); | ||
693 | for assist in world.analysis().assists(FileRange { file_id, range })?.into_iter() { | ||
694 | let arg = to_value(assist.source_change.try_conv_with(&world)?)?; | ||
695 | |||
696 | let (command, title, arg) = match assist.group_label { | ||
697 | None => ("rust-analyzer.applySourceChange", assist.label.clone(), arg), | ||
698 | |||
699 | // Group all assists with the same `group_label` into a single CodeAction. | ||
700 | Some(group_label) => { | ||
701 | match groups.entry(group_label.clone()) { | ||
702 | Entry::Occupied(entry) => { | ||
703 | let idx: usize = *entry.get(); | ||
704 | match &mut res[idx] { | ||
705 | CodeActionOrCommand::CodeAction(CodeAction { | ||
706 | command: Some(Command { arguments: Some(arguments), .. }), | ||
707 | .. | ||
708 | }) => match arguments.as_mut_slice() { | ||
709 | [serde_json::Value::Array(arguments)] => arguments.push(arg), | ||
710 | _ => panic!("invalid group"), | ||
711 | }, | ||
712 | _ => panic!("invalid group"), | ||
713 | } | ||
714 | continue; | ||
715 | } | ||
716 | Entry::Vacant(entry) => { | ||
717 | entry.insert(res.len()); | ||
718 | } | ||
719 | } | ||
720 | ("rust-analyzer.selectAndApplySourceChange", group_label, to_value(vec![arg])?) | ||
721 | } | ||
722 | }; | ||
723 | |||
724 | let command = Command { | ||
725 | title: assist.label.clone(), | ||
726 | command: command.to_string(), | ||
727 | arguments: Some(vec![arg]), | ||
728 | }; | ||
729 | |||
730 | let kind = match assist.id { | ||
731 | AssistId("introduce_variable") => Some("refactor.extract.variable".to_string()), | ||
732 | AssistId("add_custom_impl") => Some("refactor.rewrite.add_custom_impl".to_string()), | ||
733 | _ => None, | ||
734 | }; | ||
735 | |||
736 | let action = CodeAction { | ||
737 | title, | ||
738 | kind, | ||
739 | diagnostics: None, | ||
740 | edit: None, | ||
741 | command: Some(command), | ||
742 | is_preferred: None, | ||
743 | }; | ||
744 | res.push(action.into()); | ||
745 | } | ||
746 | |||
747 | Ok(Some(res)) | ||
748 | } | ||
749 | |||
750 | pub fn handle_code_lens( | ||
751 | world: WorldSnapshot, | ||
752 | params: req::CodeLensParams, | ||
753 | ) -> Result<Option<Vec<CodeLens>>> { | ||
754 | let _p = profile("handle_code_lens"); | ||
755 | let file_id = params.text_document.try_conv_with(&world)?; | ||
756 | let line_index = world.analysis().file_line_index(file_id)?; | ||
757 | |||
758 | let mut lenses: Vec<CodeLens> = Default::default(); | ||
759 | |||
760 | // Gather runnables | ||
761 | for runnable in world.analysis().runnables(file_id)? { | ||
762 | let title = match &runnable.kind { | ||
763 | RunnableKind::Test { .. } | RunnableKind::TestMod { .. } => "▶️\u{fe0e}Run Test", | ||
764 | RunnableKind::Bench { .. } => "Run Bench", | ||
765 | RunnableKind::Bin => "Run", | ||
766 | } | ||
767 | .to_string(); | ||
768 | let r = to_lsp_runnable(&world, file_id, runnable)?; | ||
769 | let lens = CodeLens { | ||
770 | range: r.range, | ||
771 | command: Some(Command { | ||
772 | title, | ||
773 | command: "rust-analyzer.runSingle".into(), | ||
774 | arguments: Some(vec![to_value(r).unwrap()]), | ||
775 | }), | ||
776 | data: None, | ||
777 | }; | ||
778 | |||
779 | lenses.push(lens); | ||
780 | } | ||
781 | |||
782 | // Handle impls | ||
783 | lenses.extend( | ||
784 | world | ||
785 | .analysis() | ||
786 | .file_structure(file_id)? | ||
787 | .into_iter() | ||
788 | .filter(|it| match it.kind { | ||
789 | SyntaxKind::TRAIT_DEF | SyntaxKind::STRUCT_DEF | SyntaxKind::ENUM_DEF => true, | ||
790 | _ => false, | ||
791 | }) | ||
792 | .map(|it| { | ||
793 | let range = it.node_range.conv_with(&line_index); | ||
794 | let pos = range.start; | ||
795 | let lens_params = | ||
796 | req::TextDocumentPositionParams::new(params.text_document.clone(), pos); | ||
797 | CodeLens { | ||
798 | range, | ||
799 | command: None, | ||
800 | data: Some(to_value(CodeLensResolveData::Impls(lens_params)).unwrap()), | ||
801 | } | ||
802 | }), | ||
803 | ); | ||
804 | |||
805 | Ok(Some(lenses)) | ||
806 | } | ||
807 | |||
808 | #[derive(Debug, Serialize, Deserialize)] | ||
809 | #[serde(rename_all = "camelCase")] | ||
810 | enum CodeLensResolveData { | ||
811 | Impls(req::TextDocumentPositionParams), | ||
812 | } | ||
813 | |||
814 | pub fn handle_code_lens_resolve(world: WorldSnapshot, code_lens: CodeLens) -> Result<CodeLens> { | ||
815 | let _p = profile("handle_code_lens_resolve"); | ||
816 | let data = code_lens.data.unwrap(); | ||
817 | let resolve = from_json::<Option<CodeLensResolveData>>("CodeLensResolveData", data)?; | ||
818 | match resolve { | ||
819 | Some(CodeLensResolveData::Impls(lens_params)) => { | ||
820 | let locations: Vec<Location> = | ||
821 | match handle_goto_implementation(world, lens_params.clone())? { | ||
822 | Some(req::GotoDefinitionResponse::Scalar(loc)) => vec![loc], | ||
823 | Some(req::GotoDefinitionResponse::Array(locs)) => locs, | ||
824 | Some(req::GotoDefinitionResponse::Link(links)) => links | ||
825 | .into_iter() | ||
826 | .map(|link| Location::new(link.target_uri, link.target_selection_range)) | ||
827 | .collect(), | ||
828 | _ => vec![], | ||
829 | }; | ||
830 | |||
831 | let title = if locations.len() == 1 { | ||
832 | "1 implementation".into() | ||
833 | } else { | ||
834 | format!("{} implementations", locations.len()) | ||
835 | }; | ||
836 | |||
837 | // We cannot use the 'editor.action.showReferences' command directly | ||
838 | // because that command requires vscode types which we convert in the handler | ||
839 | // on the client side. | ||
840 | let cmd = Command { | ||
841 | title, | ||
842 | command: "rust-analyzer.showReferences".into(), | ||
843 | arguments: Some(vec![ | ||
844 | to_value(&lens_params.text_document.uri).unwrap(), | ||
845 | to_value(code_lens.range.start).unwrap(), | ||
846 | to_value(locations).unwrap(), | ||
847 | ]), | ||
848 | }; | ||
849 | Ok(CodeLens { range: code_lens.range, command: Some(cmd), data: None }) | ||
850 | } | ||
851 | None => Ok(CodeLens { | ||
852 | range: code_lens.range, | ||
853 | command: Some(Command { title: "Error".into(), ..Default::default() }), | ||
854 | data: None, | ||
855 | }), | ||
856 | } | ||
857 | } | ||
858 | |||
859 | pub fn handle_document_highlight( | ||
860 | world: WorldSnapshot, | ||
861 | params: req::TextDocumentPositionParams, | ||
862 | ) -> Result<Option<Vec<DocumentHighlight>>> { | ||
863 | let _p = profile("handle_document_highlight"); | ||
864 | let file_id = params.text_document.try_conv_with(&world)?; | ||
865 | let line_index = world.analysis().file_line_index(file_id)?; | ||
866 | |||
867 | let refs = match world | ||
868 | .analysis() | ||
869 | .find_all_refs(params.try_conv_with(&world)?, Some(SearchScope::single_file(file_id)))? | ||
870 | { | ||
871 | None => return Ok(None), | ||
872 | Some(refs) => refs, | ||
873 | }; | ||
874 | |||
875 | Ok(Some( | ||
876 | refs.into_iter() | ||
877 | .filter(|reference| reference.file_range.file_id == file_id) | ||
878 | .map(|reference| DocumentHighlight { | ||
879 | range: reference.file_range.range.conv_with(&line_index), | ||
880 | kind: reference.access.map(|it| it.conv()), | ||
881 | }) | ||
882 | .collect(), | ||
883 | )) | ||
884 | } | ||
885 | |||
886 | pub fn handle_ssr(world: WorldSnapshot, params: req::SsrParams) -> Result<req::SourceChange> { | ||
887 | let _p = profile("handle_ssr"); | ||
888 | world.analysis().structural_search_replace(¶ms.arg)??.try_conv_with(&world) | ||
889 | } | ||
890 | |||
891 | pub fn publish_diagnostics(world: &WorldSnapshot, file_id: FileId) -> Result<DiagnosticTask> { | ||
892 | let _p = profile("publish_diagnostics"); | ||
893 | let line_index = world.analysis().file_line_index(file_id)?; | ||
894 | let diagnostics: Vec<Diagnostic> = world | ||
895 | .analysis() | ||
896 | .diagnostics(file_id)? | ||
897 | .into_iter() | ||
898 | .map(|d| Diagnostic { | ||
899 | range: d.range.conv_with(&line_index), | ||
900 | severity: Some(d.severity.conv()), | ||
901 | code: None, | ||
902 | source: Some("rust-analyzer".to_string()), | ||
903 | message: d.message, | ||
904 | related_information: None, | ||
905 | tags: None, | ||
906 | }) | ||
907 | .collect(); | ||
908 | Ok(DiagnosticTask::SetNative(file_id, diagnostics)) | ||
909 | } | ||
910 | |||
911 | pub fn publish_decorations( | ||
912 | world: &WorldSnapshot, | ||
913 | file_id: FileId, | ||
914 | ) -> Result<req::PublishDecorationsParams> { | ||
915 | let _p = profile("publish_decorations"); | ||
916 | let uri = world.file_id_to_uri(file_id)?; | ||
917 | Ok(req::PublishDecorationsParams { uri, decorations: highlight(&world, file_id)? }) | ||
918 | } | ||
919 | |||
920 | fn to_lsp_runnable( | ||
921 | world: &WorldSnapshot, | ||
922 | file_id: FileId, | ||
923 | runnable: Runnable, | ||
924 | ) -> Result<req::Runnable> { | ||
925 | let spec = CargoTargetSpec::for_file(world, file_id)?; | ||
926 | let args = CargoTargetSpec::runnable_args(spec, &runnable.kind)?; | ||
927 | let line_index = world.analysis().file_line_index(file_id)?; | ||
928 | let label = match &runnable.kind { | ||
929 | RunnableKind::Test { test_id } => format!("test {}", test_id), | ||
930 | RunnableKind::TestMod { path } => format!("test-mod {}", path), | ||
931 | RunnableKind::Bench { test_id } => format!("bench {}", test_id), | ||
932 | RunnableKind::Bin => "run binary".to_string(), | ||
933 | }; | ||
934 | Ok(req::Runnable { | ||
935 | range: runnable.range.conv_with(&line_index), | ||
936 | label, | ||
937 | bin: "cargo".to_string(), | ||
938 | args, | ||
939 | env: { | ||
940 | let mut m = FxHashMap::default(); | ||
941 | m.insert("RUST_BACKTRACE".to_string(), "short".to_string()); | ||
942 | m | ||
943 | }, | ||
944 | cwd: world.workspace_root_for(file_id).map(|root| root.to_string_lossy().to_string()), | ||
945 | }) | ||
946 | } | ||
947 | fn highlight(world: &WorldSnapshot, file_id: FileId) -> Result<Vec<Decoration>> { | ||
948 | let line_index = world.analysis().file_line_index(file_id)?; | ||
949 | let res = world | ||
950 | .analysis() | ||
951 | .highlight(file_id)? | ||
952 | .into_iter() | ||
953 | .map(|h| Decoration { | ||
954 | range: h.range.conv_with(&line_index), | ||
955 | tag: h.tag, | ||
956 | binding_hash: h.binding_hash.map(|x| x.to_string()), | ||
957 | }) | ||
958 | .collect(); | ||
959 | Ok(res) | ||
960 | } | ||
961 | |||
962 | pub fn handle_inlay_hints( | ||
963 | world: WorldSnapshot, | ||
964 | params: InlayHintsParams, | ||
965 | ) -> Result<Vec<InlayHint>> { | ||
966 | let _p = profile("handle_inlay_hints"); | ||
967 | let file_id = params.text_document.try_conv_with(&world)?; | ||
968 | let analysis = world.analysis(); | ||
969 | let line_index = analysis.file_line_index(file_id)?; | ||
970 | Ok(analysis | ||
971 | .inlay_hints(file_id, world.options.max_inlay_hint_length)? | ||
972 | .into_iter() | ||
973 | .map(|api_type| InlayHint { | ||
974 | label: api_type.label.to_string(), | ||
975 | range: api_type.range.conv_with(&line_index), | ||
976 | kind: match api_type.kind { | ||
977 | ra_ide::InlayKind::TypeHint => InlayKind::TypeHint, | ||
978 | ra_ide::InlayKind::ParameterHint => InlayKind::ParameterHint, | ||
979 | }, | ||
980 | }) | ||
981 | .collect()) | ||
982 | } | ||
983 | |||
984 | pub fn handle_call_hierarchy_prepare( | ||
985 | world: WorldSnapshot, | ||
986 | params: CallHierarchyPrepareParams, | ||
987 | ) -> Result<Option<Vec<CallHierarchyItem>>> { | ||
988 | let _p = profile("handle_call_hierarchy_prepare"); | ||
989 | let position = params.text_document_position_params.try_conv_with(&world)?; | ||
990 | let file_id = position.file_id; | ||
991 | |||
992 | let nav_info = match world.analysis().call_hierarchy(position)? { | ||
993 | None => return Ok(None), | ||
994 | Some(it) => it, | ||
995 | }; | ||
996 | |||
997 | let line_index = world.analysis().file_line_index(file_id)?; | ||
998 | let RangeInfo { range, info: navs } = nav_info; | ||
999 | let res = navs | ||
1000 | .into_iter() | ||
1001 | .filter(|it| it.kind() == SyntaxKind::FN_DEF) | ||
1002 | .filter_map(|it| to_call_hierarchy_item(file_id, range, &world, &line_index, it).ok()) | ||
1003 | .collect(); | ||
1004 | |||
1005 | Ok(Some(res)) | ||
1006 | } | ||
1007 | |||
1008 | pub fn handle_call_hierarchy_incoming( | ||
1009 | world: WorldSnapshot, | ||
1010 | params: CallHierarchyIncomingCallsParams, | ||
1011 | ) -> Result<Option<Vec<CallHierarchyIncomingCall>>> { | ||
1012 | let _p = profile("handle_call_hierarchy_incoming"); | ||
1013 | let item = params.item; | ||
1014 | |||
1015 | let doc = TextDocumentIdentifier::new(item.uri); | ||
1016 | let frange: FileRange = (&doc, item.range).try_conv_with(&world)?; | ||
1017 | let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() }; | ||
1018 | |||
1019 | let call_items = match world.analysis().incoming_calls(fpos)? { | ||
1020 | None => return Ok(None), | ||
1021 | Some(it) => it, | ||
1022 | }; | ||
1023 | |||
1024 | let mut res = vec![]; | ||
1025 | |||
1026 | for call_item in call_items.into_iter() { | ||
1027 | let file_id = call_item.target.file_id(); | ||
1028 | let line_index = world.analysis().file_line_index(file_id)?; | ||
1029 | let range = call_item.target.range(); | ||
1030 | let item = to_call_hierarchy_item(file_id, range, &world, &line_index, call_item.target)?; | ||
1031 | res.push(CallHierarchyIncomingCall { | ||
1032 | from: item, | ||
1033 | from_ranges: call_item.ranges.iter().map(|it| it.conv_with(&line_index)).collect(), | ||
1034 | }); | ||
1035 | } | ||
1036 | |||
1037 | Ok(Some(res)) | ||
1038 | } | ||
1039 | |||
1040 | pub fn handle_call_hierarchy_outgoing( | ||
1041 | world: WorldSnapshot, | ||
1042 | params: CallHierarchyOutgoingCallsParams, | ||
1043 | ) -> Result<Option<Vec<CallHierarchyOutgoingCall>>> { | ||
1044 | let _p = profile("handle_call_hierarchy_outgoing"); | ||
1045 | let item = params.item; | ||
1046 | |||
1047 | let doc = TextDocumentIdentifier::new(item.uri); | ||
1048 | let frange: FileRange = (&doc, item.range).try_conv_with(&world)?; | ||
1049 | let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() }; | ||
1050 | |||
1051 | let call_items = match world.analysis().outgoing_calls(fpos)? { | ||
1052 | None => return Ok(None), | ||
1053 | Some(it) => it, | ||
1054 | }; | ||
1055 | |||
1056 | let mut res = vec![]; | ||
1057 | |||
1058 | for call_item in call_items.into_iter() { | ||
1059 | let file_id = call_item.target.file_id(); | ||
1060 | let line_index = world.analysis().file_line_index(file_id)?; | ||
1061 | let range = call_item.target.range(); | ||
1062 | let item = to_call_hierarchy_item(file_id, range, &world, &line_index, call_item.target)?; | ||
1063 | res.push(CallHierarchyOutgoingCall { | ||
1064 | to: item, | ||
1065 | from_ranges: call_item.ranges.iter().map(|it| it.conv_with(&line_index)).collect(), | ||
1066 | }); | ||
1067 | } | ||
1068 | |||
1069 | Ok(Some(res)) | ||
1070 | } | ||
diff --git a/crates/ra_lsp_server/src/main_loop/pending_requests.rs b/crates/ra_lsp_server/src/main_loop/pending_requests.rs deleted file mode 100644 index 73b33e419..000000000 --- a/crates/ra_lsp_server/src/main_loop/pending_requests.rs +++ /dev/null | |||
@@ -1,75 +0,0 @@ | |||
1 | //! Data structures that keep track of inflight requests. | ||
2 | |||
3 | use std::time::{Duration, Instant}; | ||
4 | |||
5 | use lsp_server::RequestId; | ||
6 | use rustc_hash::FxHashMap; | ||
7 | |||
8 | #[derive(Debug)] | ||
9 | pub struct CompletedRequest { | ||
10 | pub id: RequestId, | ||
11 | pub method: String, | ||
12 | pub duration: Duration, | ||
13 | } | ||
14 | |||
15 | #[derive(Debug)] | ||
16 | pub(crate) struct PendingRequest { | ||
17 | pub(crate) id: RequestId, | ||
18 | pub(crate) method: String, | ||
19 | pub(crate) received: Instant, | ||
20 | } | ||
21 | |||
22 | impl From<PendingRequest> for CompletedRequest { | ||
23 | fn from(pending: PendingRequest) -> CompletedRequest { | ||
24 | CompletedRequest { | ||
25 | id: pending.id, | ||
26 | method: pending.method, | ||
27 | duration: pending.received.elapsed(), | ||
28 | } | ||
29 | } | ||
30 | } | ||
31 | |||
32 | #[derive(Debug, Default)] | ||
33 | pub(crate) struct PendingRequests { | ||
34 | map: FxHashMap<RequestId, PendingRequest>, | ||
35 | } | ||
36 | |||
37 | impl PendingRequests { | ||
38 | pub(crate) fn start(&mut self, request: PendingRequest) { | ||
39 | let id = request.id.clone(); | ||
40 | let prev = self.map.insert(id.clone(), request); | ||
41 | assert!(prev.is_none(), "duplicate request with id {}", id); | ||
42 | } | ||
43 | pub(crate) fn cancel(&mut self, id: &RequestId) -> bool { | ||
44 | self.map.remove(id).is_some() | ||
45 | } | ||
46 | pub(crate) fn finish(&mut self, id: &RequestId) -> Option<CompletedRequest> { | ||
47 | self.map.remove(id).map(CompletedRequest::from) | ||
48 | } | ||
49 | } | ||
50 | |||
51 | const N_COMPLETED_REQUESTS: usize = 10; | ||
52 | |||
53 | #[derive(Debug, Default)] | ||
54 | pub struct LatestRequests { | ||
55 | // hand-rolling VecDeque here to print things in a nicer way | ||
56 | buf: [Option<CompletedRequest>; N_COMPLETED_REQUESTS], | ||
57 | idx: usize, | ||
58 | } | ||
59 | |||
60 | impl LatestRequests { | ||
61 | pub(crate) fn record(&mut self, request: CompletedRequest) { | ||
62 | // special case: don't track status request itself | ||
63 | if request.method == "rust-analyzer/analyzerStatus" { | ||
64 | return; | ||
65 | } | ||
66 | let idx = self.idx; | ||
67 | self.buf[idx] = Some(request); | ||
68 | self.idx = (idx + 1) % N_COMPLETED_REQUESTS; | ||
69 | } | ||
70 | |||
71 | pub(crate) fn iter(&self) -> impl Iterator<Item = (bool, &CompletedRequest)> { | ||
72 | let idx = self.idx; | ||
73 | self.buf.iter().enumerate().filter_map(move |(i, req)| Some((i == idx, req.as_ref()?))) | ||
74 | } | ||
75 | } | ||
diff --git a/crates/ra_lsp_server/src/main_loop/subscriptions.rs b/crates/ra_lsp_server/src/main_loop/subscriptions.rs deleted file mode 100644 index bee6437cf..000000000 --- a/crates/ra_lsp_server/src/main_loop/subscriptions.rs +++ /dev/null | |||
@@ -1,22 +0,0 @@ | |||
1 | //! Keeps track of file subscriptions -- the set of currently opened files for | ||
2 | //! which we want to publish diagnostics, syntax highlighting, etc. | ||
3 | |||
4 | use ra_ide::FileId; | ||
5 | use rustc_hash::FxHashSet; | ||
6 | |||
7 | #[derive(Default, Debug)] | ||
8 | pub(crate) struct Subscriptions { | ||
9 | subs: FxHashSet<FileId>, | ||
10 | } | ||
11 | |||
12 | impl Subscriptions { | ||
13 | pub(crate) fn add_sub(&mut self, file_id: FileId) { | ||
14 | self.subs.insert(file_id); | ||
15 | } | ||
16 | pub(crate) fn remove_sub(&mut self, file_id: FileId) { | ||
17 | self.subs.remove(&file_id); | ||
18 | } | ||
19 | pub(crate) fn subscriptions(&self) -> Vec<FileId> { | ||
20 | self.subs.iter().cloned().collect() | ||
21 | } | ||
22 | } | ||
diff --git a/crates/ra_lsp_server/src/markdown.rs b/crates/ra_lsp_server/src/markdown.rs deleted file mode 100644 index 76bef45cc..000000000 --- a/crates/ra_lsp_server/src/markdown.rs +++ /dev/null | |||
@@ -1,75 +0,0 @@ | |||
1 | //! Transforms markdown | ||
2 | |||
3 | pub(crate) fn format_docs(src: &str) -> String { | ||
4 | let mut processed_lines = Vec::new(); | ||
5 | let mut in_code_block = false; | ||
6 | for line in src.lines() { | ||
7 | if in_code_block && code_line_ignored_by_rustdoc(line) { | ||
8 | continue; | ||
9 | } | ||
10 | |||
11 | if line.starts_with("```") { | ||
12 | in_code_block ^= true | ||
13 | } | ||
14 | |||
15 | let line = if in_code_block && line.starts_with("```") && !line.contains("rust") { | ||
16 | "```rust" | ||
17 | } else { | ||
18 | line | ||
19 | }; | ||
20 | |||
21 | processed_lines.push(line); | ||
22 | } | ||
23 | processed_lines.join("\n") | ||
24 | } | ||
25 | |||
26 | fn code_line_ignored_by_rustdoc(line: &str) -> bool { | ||
27 | let trimmed = line.trim(); | ||
28 | trimmed == "#" || trimmed.starts_with("# ") || trimmed.starts_with("#\t") | ||
29 | } | ||
30 | |||
31 | #[cfg(test)] | ||
32 | mod tests { | ||
33 | use super::*; | ||
34 | |||
35 | #[test] | ||
36 | fn test_format_docs_adds_rust() { | ||
37 | let comment = "```\nfn some_rust() {}\n```"; | ||
38 | assert_eq!(format_docs(comment), "```rust\nfn some_rust() {}\n```"); | ||
39 | } | ||
40 | |||
41 | #[test] | ||
42 | fn test_format_docs_skips_comments_in_rust_block() { | ||
43 | let comment = | ||
44 | "```rust\n # skip1\n# skip2\n#stay1\nstay2\n#\n #\n # \n #\tskip3\n\t#\t\n```"; | ||
45 | assert_eq!(format_docs(comment), "```rust\n#stay1\nstay2\n```"); | ||
46 | } | ||
47 | |||
48 | #[test] | ||
49 | fn test_format_docs_keeps_comments_outside_of_rust_block() { | ||
50 | let comment = " # stay1\n# stay2\n#stay3\nstay4\n#\n #\n # \n #\tstay5\n\t#\t"; | ||
51 | assert_eq!(format_docs(comment), comment); | ||
52 | } | ||
53 | |||
54 | #[test] | ||
55 | fn test_format_docs_preserves_newlines() { | ||
56 | let comment = "this\nis\nultiline"; | ||
57 | assert_eq!(format_docs(comment), comment); | ||
58 | } | ||
59 | |||
60 | #[test] | ||
61 | fn test_code_blocks_in_comments_marked_as_rust() { | ||
62 | let comment = r#"```rust | ||
63 | fn main(){} | ||
64 | ``` | ||
65 | Some comment. | ||
66 | ``` | ||
67 | let a = 1; | ||
68 | ```"#; | ||
69 | |||
70 | assert_eq!( | ||
71 | format_docs(comment), | ||
72 | "```rust\nfn main(){}\n```\nSome comment.\n```rust\nlet a = 1;\n```" | ||
73 | ); | ||
74 | } | ||
75 | } | ||
diff --git a/crates/ra_lsp_server/src/req.rs b/crates/ra_lsp_server/src/req.rs deleted file mode 100644 index 7ff7f60b3..000000000 --- a/crates/ra_lsp_server/src/req.rs +++ /dev/null | |||
@@ -1,221 +0,0 @@ | |||
1 | //! Defines `rust-analyzer` specific custom messages. | ||
2 | |||
3 | use lsp_types::{Location, Position, Range, TextDocumentIdentifier, Url}; | ||
4 | use rustc_hash::FxHashMap; | ||
5 | use serde::{Deserialize, Serialize}; | ||
6 | |||
7 | pub use lsp_types::{ | ||
8 | notification::*, request::*, ApplyWorkspaceEditParams, CodeActionParams, CodeLens, | ||
9 | CodeLensParams, CompletionParams, CompletionResponse, DiagnosticTag, | ||
10 | DidChangeConfigurationParams, DidChangeWatchedFilesParams, | ||
11 | DidChangeWatchedFilesRegistrationOptions, DocumentOnTypeFormattingParams, DocumentSymbolParams, | ||
12 | DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType, | ||
13 | PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken, | ||
14 | PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange, | ||
15 | SelectionRangeParams, ServerCapabilities, ShowMessageParams, SignatureHelp, SymbolKind, | ||
16 | TextDocumentEdit, TextDocumentPositionParams, TextEdit, WorkDoneProgressParams, WorkspaceEdit, | ||
17 | WorkspaceSymbolParams, | ||
18 | }; | ||
19 | |||
20 | pub enum AnalyzerStatus {} | ||
21 | |||
22 | impl Request for AnalyzerStatus { | ||
23 | type Params = (); | ||
24 | type Result = String; | ||
25 | const METHOD: &'static str = "rust-analyzer/analyzerStatus"; | ||
26 | } | ||
27 | |||
28 | pub enum CollectGarbage {} | ||
29 | |||
30 | impl Request for CollectGarbage { | ||
31 | type Params = (); | ||
32 | type Result = (); | ||
33 | const METHOD: &'static str = "rust-analyzer/collectGarbage"; | ||
34 | } | ||
35 | |||
36 | pub enum SyntaxTree {} | ||
37 | |||
38 | impl Request for SyntaxTree { | ||
39 | type Params = SyntaxTreeParams; | ||
40 | type Result = String; | ||
41 | const METHOD: &'static str = "rust-analyzer/syntaxTree"; | ||
42 | } | ||
43 | |||
44 | #[derive(Deserialize, Debug)] | ||
45 | #[serde(rename_all = "camelCase")] | ||
46 | pub struct SyntaxTreeParams { | ||
47 | pub text_document: TextDocumentIdentifier, | ||
48 | pub range: Option<Range>, | ||
49 | } | ||
50 | |||
51 | #[derive(Serialize, Debug)] | ||
52 | #[serde(rename_all = "camelCase")] | ||
53 | pub struct ExpandedMacro { | ||
54 | pub name: String, | ||
55 | pub expansion: String, | ||
56 | } | ||
57 | |||
58 | pub enum ExpandMacro {} | ||
59 | |||
60 | impl Request for ExpandMacro { | ||
61 | type Params = ExpandMacroParams; | ||
62 | type Result = Option<ExpandedMacro>; | ||
63 | const METHOD: &'static str = "rust-analyzer/expandMacro"; | ||
64 | } | ||
65 | |||
66 | #[derive(Deserialize, Debug)] | ||
67 | #[serde(rename_all = "camelCase")] | ||
68 | pub struct ExpandMacroParams { | ||
69 | pub text_document: TextDocumentIdentifier, | ||
70 | pub position: Option<Position>, | ||
71 | } | ||
72 | |||
73 | pub enum FindMatchingBrace {} | ||
74 | |||
75 | impl Request for FindMatchingBrace { | ||
76 | type Params = FindMatchingBraceParams; | ||
77 | type Result = Vec<Position>; | ||
78 | const METHOD: &'static str = "rust-analyzer/findMatchingBrace"; | ||
79 | } | ||
80 | |||
81 | #[derive(Deserialize, Debug)] | ||
82 | #[serde(rename_all = "camelCase")] | ||
83 | pub struct FindMatchingBraceParams { | ||
84 | pub text_document: TextDocumentIdentifier, | ||
85 | pub offsets: Vec<Position>, | ||
86 | } | ||
87 | |||
88 | pub enum DecorationsRequest {} | ||
89 | |||
90 | impl Request for DecorationsRequest { | ||
91 | type Params = TextDocumentIdentifier; | ||
92 | type Result = Vec<Decoration>; | ||
93 | const METHOD: &'static str = "rust-analyzer/decorationsRequest"; | ||
94 | } | ||
95 | |||
96 | pub enum PublishDecorations {} | ||
97 | |||
98 | impl Notification for PublishDecorations { | ||
99 | type Params = PublishDecorationsParams; | ||
100 | const METHOD: &'static str = "rust-analyzer/publishDecorations"; | ||
101 | } | ||
102 | |||
103 | #[derive(Serialize, Debug)] | ||
104 | #[serde(rename_all = "camelCase")] | ||
105 | pub struct PublishDecorationsParams { | ||
106 | pub uri: Url, | ||
107 | pub decorations: Vec<Decoration>, | ||
108 | } | ||
109 | |||
110 | #[derive(Serialize, Debug)] | ||
111 | #[serde(rename_all = "camelCase")] | ||
112 | pub struct Decoration { | ||
113 | pub range: Range, | ||
114 | pub tag: &'static str, | ||
115 | pub binding_hash: Option<String>, | ||
116 | } | ||
117 | |||
118 | pub enum ParentModule {} | ||
119 | |||
120 | impl Request for ParentModule { | ||
121 | type Params = TextDocumentPositionParams; | ||
122 | type Result = Vec<Location>; | ||
123 | const METHOD: &'static str = "rust-analyzer/parentModule"; | ||
124 | } | ||
125 | |||
126 | pub enum JoinLines {} | ||
127 | |||
128 | impl Request for JoinLines { | ||
129 | type Params = JoinLinesParams; | ||
130 | type Result = SourceChange; | ||
131 | const METHOD: &'static str = "rust-analyzer/joinLines"; | ||
132 | } | ||
133 | |||
134 | #[derive(Deserialize, Debug)] | ||
135 | #[serde(rename_all = "camelCase")] | ||
136 | pub struct JoinLinesParams { | ||
137 | pub text_document: TextDocumentIdentifier, | ||
138 | pub range: Range, | ||
139 | } | ||
140 | |||
141 | pub enum OnEnter {} | ||
142 | |||
143 | impl Request for OnEnter { | ||
144 | type Params = TextDocumentPositionParams; | ||
145 | type Result = Option<SourceChange>; | ||
146 | const METHOD: &'static str = "rust-analyzer/onEnter"; | ||
147 | } | ||
148 | |||
149 | pub enum Runnables {} | ||
150 | |||
151 | impl Request for Runnables { | ||
152 | type Params = RunnablesParams; | ||
153 | type Result = Vec<Runnable>; | ||
154 | const METHOD: &'static str = "rust-analyzer/runnables"; | ||
155 | } | ||
156 | |||
157 | #[derive(Serialize, Deserialize, Debug)] | ||
158 | #[serde(rename_all = "camelCase")] | ||
159 | pub struct RunnablesParams { | ||
160 | pub text_document: TextDocumentIdentifier, | ||
161 | pub position: Option<Position>, | ||
162 | } | ||
163 | |||
164 | #[derive(Serialize, Debug)] | ||
165 | #[serde(rename_all = "camelCase")] | ||
166 | pub struct Runnable { | ||
167 | pub range: Range, | ||
168 | pub label: String, | ||
169 | pub bin: String, | ||
170 | pub args: Vec<String>, | ||
171 | pub env: FxHashMap<String, String>, | ||
172 | pub cwd: Option<String>, | ||
173 | } | ||
174 | |||
175 | #[derive(Serialize, Debug)] | ||
176 | #[serde(rename_all = "camelCase")] | ||
177 | pub struct SourceChange { | ||
178 | pub label: String, | ||
179 | pub workspace_edit: WorkspaceEdit, | ||
180 | pub cursor_position: Option<TextDocumentPositionParams>, | ||
181 | } | ||
182 | |||
183 | pub enum InlayHints {} | ||
184 | |||
185 | impl Request for InlayHints { | ||
186 | type Params = InlayHintsParams; | ||
187 | type Result = Vec<InlayHint>; | ||
188 | const METHOD: &'static str = "rust-analyzer/inlayHints"; | ||
189 | } | ||
190 | |||
191 | #[derive(Serialize, Deserialize, Debug)] | ||
192 | #[serde(rename_all = "camelCase")] | ||
193 | pub struct InlayHintsParams { | ||
194 | pub text_document: TextDocumentIdentifier, | ||
195 | } | ||
196 | |||
197 | #[derive(Debug, PartialEq, Eq, Deserialize, Serialize)] | ||
198 | pub enum InlayKind { | ||
199 | TypeHint, | ||
200 | ParameterHint, | ||
201 | } | ||
202 | |||
203 | #[derive(Debug, Deserialize, Serialize)] | ||
204 | pub struct InlayHint { | ||
205 | pub range: Range, | ||
206 | pub kind: InlayKind, | ||
207 | pub label: String, | ||
208 | } | ||
209 | |||
210 | pub enum Ssr {} | ||
211 | |||
212 | impl Request for Ssr { | ||
213 | type Params = SsrParams; | ||
214 | type Result = SourceChange; | ||
215 | const METHOD: &'static str = "rust-analyzer/ssr"; | ||
216 | } | ||
217 | |||
218 | #[derive(Debug, Deserialize, Serialize)] | ||
219 | pub struct SsrParams { | ||
220 | pub arg: String, | ||
221 | } | ||
diff --git a/crates/ra_lsp_server/src/vfs_glob.rs b/crates/ra_lsp_server/src/vfs_glob.rs deleted file mode 100644 index 91b33f94e..000000000 --- a/crates/ra_lsp_server/src/vfs_glob.rs +++ /dev/null | |||
@@ -1,94 +0,0 @@ | |||
1 | //! Exclusion rules for vfs. | ||
2 | //! | ||
3 | //! By default, we include only `.rs` files, and skip some know offenders like | ||
4 | //! `/target` or `/node_modules` altogether. | ||
5 | //! | ||
6 | //! It's also possible to add custom exclusion globs. | ||
7 | |||
8 | use globset::{GlobSet, GlobSetBuilder}; | ||
9 | use ra_vfs::{Filter, RelativePath}; | ||
10 | |||
11 | pub use globset::{Glob, GlobBuilder}; | ||
12 | |||
13 | const ALWAYS_IGNORED: &[&str] = &["target/**", "**/node_modules/**", "**/.git/**"]; | ||
14 | const IGNORED_FOR_NON_MEMBERS: &[&str] = &["examples/**", "tests/**", "benches/**"]; | ||
15 | |||
16 | pub struct RustPackageFilterBuilder { | ||
17 | is_member: bool, | ||
18 | exclude: GlobSetBuilder, | ||
19 | } | ||
20 | |||
21 | impl Default for RustPackageFilterBuilder { | ||
22 | fn default() -> RustPackageFilterBuilder { | ||
23 | RustPackageFilterBuilder { is_member: false, exclude: GlobSetBuilder::new() } | ||
24 | } | ||
25 | } | ||
26 | |||
27 | impl RustPackageFilterBuilder { | ||
28 | pub fn set_member(mut self, is_member: bool) -> RustPackageFilterBuilder { | ||
29 | self.is_member = is_member; | ||
30 | self | ||
31 | } | ||
32 | pub fn exclude(mut self, glob: Glob) -> RustPackageFilterBuilder { | ||
33 | self.exclude.add(glob); | ||
34 | self | ||
35 | } | ||
36 | pub fn into_vfs_filter(self) -> Box<dyn Filter> { | ||
37 | let RustPackageFilterBuilder { is_member, mut exclude } = self; | ||
38 | for &glob in ALWAYS_IGNORED { | ||
39 | exclude.add(Glob::new(glob).unwrap()); | ||
40 | } | ||
41 | if !is_member { | ||
42 | for &glob in IGNORED_FOR_NON_MEMBERS { | ||
43 | exclude.add(Glob::new(glob).unwrap()); | ||
44 | } | ||
45 | } | ||
46 | Box::new(RustPackageFilter { exclude: exclude.build().unwrap() }) | ||
47 | } | ||
48 | } | ||
49 | |||
50 | struct RustPackageFilter { | ||
51 | exclude: GlobSet, | ||
52 | } | ||
53 | |||
54 | impl Filter for RustPackageFilter { | ||
55 | fn include_dir(&self, dir_path: &RelativePath) -> bool { | ||
56 | !self.exclude.is_match(dir_path.as_str()) | ||
57 | } | ||
58 | |||
59 | fn include_file(&self, file_path: &RelativePath) -> bool { | ||
60 | file_path.extension() == Some("rs") | ||
61 | } | ||
62 | } | ||
63 | |||
64 | #[test] | ||
65 | fn test_globs() { | ||
66 | let filter = RustPackageFilterBuilder::default().set_member(true).into_vfs_filter(); | ||
67 | |||
68 | assert!(filter.include_dir(RelativePath::new("src/tests"))); | ||
69 | assert!(filter.include_dir(RelativePath::new("src/target"))); | ||
70 | assert!(filter.include_dir(RelativePath::new("tests"))); | ||
71 | assert!(filter.include_dir(RelativePath::new("benches"))); | ||
72 | |||
73 | assert!(!filter.include_dir(RelativePath::new("target"))); | ||
74 | assert!(!filter.include_dir(RelativePath::new("src/foo/.git"))); | ||
75 | assert!(!filter.include_dir(RelativePath::new("foo/node_modules"))); | ||
76 | |||
77 | let filter = RustPackageFilterBuilder::default().set_member(false).into_vfs_filter(); | ||
78 | |||
79 | assert!(filter.include_dir(RelativePath::new("src/tests"))); | ||
80 | assert!(filter.include_dir(RelativePath::new("src/target"))); | ||
81 | |||
82 | assert!(!filter.include_dir(RelativePath::new("target"))); | ||
83 | assert!(!filter.include_dir(RelativePath::new("src/foo/.git"))); | ||
84 | assert!(!filter.include_dir(RelativePath::new("foo/node_modules"))); | ||
85 | assert!(!filter.include_dir(RelativePath::new("tests"))); | ||
86 | assert!(!filter.include_dir(RelativePath::new("benches"))); | ||
87 | |||
88 | let filter = RustPackageFilterBuilder::default() | ||
89 | .set_member(true) | ||
90 | .exclude(Glob::new("src/llvm-project/**").unwrap()) | ||
91 | .into_vfs_filter(); | ||
92 | |||
93 | assert!(!filter.include_dir(RelativePath::new("src/llvm-project/clang"))); | ||
94 | } | ||
diff --git a/crates/ra_lsp_server/src/world.rs b/crates/ra_lsp_server/src/world.rs deleted file mode 100644 index 96efab844..000000000 --- a/crates/ra_lsp_server/src/world.rs +++ /dev/null | |||
@@ -1,314 +0,0 @@ | |||
1 | //! The context or environment in which the language server functions. In our | ||
2 | //! server implementation this is know as the `WorldState`. | ||
3 | //! | ||
4 | //! Each tick provides an immutable snapshot of the state as `WorldSnapshot`. | ||
5 | |||
6 | use std::{ | ||
7 | path::{Path, PathBuf}, | ||
8 | sync::Arc, | ||
9 | }; | ||
10 | |||
11 | use crossbeam_channel::{unbounded, Receiver}; | ||
12 | use lsp_server::ErrorCode; | ||
13 | use lsp_types::Url; | ||
14 | use parking_lot::RwLock; | ||
15 | use ra_cargo_watch::{url_from_path_with_drive_lowercasing, CheckOptions, CheckWatcher}; | ||
16 | use ra_ide::{ | ||
17 | Analysis, AnalysisChange, AnalysisHost, CrateGraph, FeatureFlags, FileId, LibraryData, | ||
18 | SourceRootId, | ||
19 | }; | ||
20 | use ra_project_model::{get_rustc_cfg_options, ProjectWorkspace}; | ||
21 | use ra_vfs::{LineEndings, RootEntry, Vfs, VfsChange, VfsFile, VfsRoot, VfsTask, Watch}; | ||
22 | use relative_path::RelativePathBuf; | ||
23 | |||
24 | use crate::{ | ||
25 | diagnostics::{CheckFixes, DiagnosticCollection}, | ||
26 | main_loop::pending_requests::{CompletedRequest, LatestRequests}, | ||
27 | vfs_glob::{Glob, RustPackageFilterBuilder}, | ||
28 | LspError, Result, | ||
29 | }; | ||
30 | |||
31 | #[derive(Debug, Clone)] | ||
32 | pub struct Options { | ||
33 | pub publish_decorations: bool, | ||
34 | pub supports_location_link: bool, | ||
35 | pub line_folding_only: bool, | ||
36 | pub max_inlay_hint_length: Option<usize>, | ||
37 | pub rustfmt_args: Vec<String>, | ||
38 | pub cargo_watch: CheckOptions, | ||
39 | } | ||
40 | |||
41 | /// `WorldState` is the primary mutable state of the language server | ||
42 | /// | ||
43 | /// The most interesting components are `vfs`, which stores a consistent | ||
44 | /// snapshot of the file systems, and `analysis_host`, which stores our | ||
45 | /// incremental salsa database. | ||
46 | #[derive(Debug)] | ||
47 | pub struct WorldState { | ||
48 | pub options: Options, | ||
49 | //FIXME: this belongs to `LoopState` rather than to `WorldState` | ||
50 | pub roots_to_scan: usize, | ||
51 | pub roots: Vec<PathBuf>, | ||
52 | pub workspaces: Arc<Vec<ProjectWorkspace>>, | ||
53 | pub analysis_host: AnalysisHost, | ||
54 | pub vfs: Arc<RwLock<Vfs>>, | ||
55 | pub task_receiver: Receiver<VfsTask>, | ||
56 | pub latest_requests: Arc<RwLock<LatestRequests>>, | ||
57 | pub check_watcher: CheckWatcher, | ||
58 | pub diagnostics: DiagnosticCollection, | ||
59 | } | ||
60 | |||
61 | /// An immutable snapshot of the world's state at a point in time. | ||
62 | pub struct WorldSnapshot { | ||
63 | pub options: Options, | ||
64 | pub workspaces: Arc<Vec<ProjectWorkspace>>, | ||
65 | pub analysis: Analysis, | ||
66 | pub latest_requests: Arc<RwLock<LatestRequests>>, | ||
67 | pub check_fixes: CheckFixes, | ||
68 | vfs: Arc<RwLock<Vfs>>, | ||
69 | } | ||
70 | |||
71 | impl WorldState { | ||
72 | pub fn new( | ||
73 | folder_roots: Vec<PathBuf>, | ||
74 | workspaces: Vec<ProjectWorkspace>, | ||
75 | lru_capacity: Option<usize>, | ||
76 | exclude_globs: &[Glob], | ||
77 | watch: Watch, | ||
78 | options: Options, | ||
79 | feature_flags: FeatureFlags, | ||
80 | ) -> WorldState { | ||
81 | let mut change = AnalysisChange::new(); | ||
82 | |||
83 | let mut roots = Vec::new(); | ||
84 | roots.extend(folder_roots.iter().map(|path| { | ||
85 | let mut filter = RustPackageFilterBuilder::default().set_member(true); | ||
86 | for glob in exclude_globs.iter() { | ||
87 | filter = filter.exclude(glob.clone()); | ||
88 | } | ||
89 | RootEntry::new(path.clone(), filter.into_vfs_filter()) | ||
90 | })); | ||
91 | for ws in workspaces.iter() { | ||
92 | roots.extend(ws.to_roots().into_iter().map(|pkg_root| { | ||
93 | let mut filter = | ||
94 | RustPackageFilterBuilder::default().set_member(pkg_root.is_member()); | ||
95 | for glob in exclude_globs.iter() { | ||
96 | filter = filter.exclude(glob.clone()); | ||
97 | } | ||
98 | RootEntry::new(pkg_root.path().clone(), filter.into_vfs_filter()) | ||
99 | })); | ||
100 | } | ||
101 | let (task_sender, task_receiver) = unbounded(); | ||
102 | let task_sender = Box::new(move |t| task_sender.send(t).unwrap()); | ||
103 | let (mut vfs, vfs_roots) = Vfs::new(roots, task_sender, watch); | ||
104 | let roots_to_scan = vfs_roots.len(); | ||
105 | for r in vfs_roots { | ||
106 | let vfs_root_path = vfs.root2path(r); | ||
107 | let is_local = folder_roots.iter().any(|it| vfs_root_path.starts_with(it)); | ||
108 | change.add_root(SourceRootId(r.0), is_local); | ||
109 | change.set_debug_root_path(SourceRootId(r.0), vfs_root_path.display().to_string()); | ||
110 | } | ||
111 | |||
112 | // FIXME: Read default cfgs from config | ||
113 | let default_cfg_options = { | ||
114 | let mut opts = get_rustc_cfg_options(); | ||
115 | opts.insert_atom("test".into()); | ||
116 | opts.insert_atom("debug_assertion".into()); | ||
117 | opts | ||
118 | }; | ||
119 | |||
120 | // Create crate graph from all the workspaces | ||
121 | let mut crate_graph = CrateGraph::default(); | ||
122 | let mut load = |path: &std::path::Path| { | ||
123 | let vfs_file = vfs.load(path); | ||
124 | vfs_file.map(|f| FileId(f.0)) | ||
125 | }; | ||
126 | for ws in workspaces.iter() { | ||
127 | let (graph, crate_names) = ws.to_crate_graph(&default_cfg_options, &mut load); | ||
128 | let shift = crate_graph.extend(graph); | ||
129 | for (crate_id, name) in crate_names { | ||
130 | change.set_debug_crate_name(crate_id.shift(shift), name) | ||
131 | } | ||
132 | } | ||
133 | change.set_crate_graph(crate_graph); | ||
134 | |||
135 | // FIXME: Figure out the multi-workspace situation | ||
136 | let check_watcher = workspaces | ||
137 | .iter() | ||
138 | .find_map(|w| match w { | ||
139 | ProjectWorkspace::Cargo { cargo, .. } => Some(cargo), | ||
140 | ProjectWorkspace::Json { .. } => None, | ||
141 | }) | ||
142 | .map(|cargo| { | ||
143 | let cargo_project_root = cargo.workspace_root().to_path_buf(); | ||
144 | CheckWatcher::new(&options.cargo_watch, cargo_project_root) | ||
145 | }) | ||
146 | .unwrap_or_else(|| { | ||
147 | log::warn!("Cargo check watching only supported for cargo workspaces, disabling"); | ||
148 | CheckWatcher::dummy() | ||
149 | }); | ||
150 | |||
151 | let mut analysis_host = AnalysisHost::new(lru_capacity, feature_flags); | ||
152 | analysis_host.apply_change(change); | ||
153 | WorldState { | ||
154 | options, | ||
155 | roots_to_scan, | ||
156 | roots: folder_roots, | ||
157 | workspaces: Arc::new(workspaces), | ||
158 | analysis_host, | ||
159 | vfs: Arc::new(RwLock::new(vfs)), | ||
160 | task_receiver, | ||
161 | latest_requests: Default::default(), | ||
162 | check_watcher, | ||
163 | diagnostics: Default::default(), | ||
164 | } | ||
165 | } | ||
166 | |||
167 | /// Returns a vec of libraries | ||
168 | /// FIXME: better API here | ||
169 | pub fn process_changes( | ||
170 | &mut self, | ||
171 | ) -> Option<Vec<(SourceRootId, Vec<(FileId, RelativePathBuf, Arc<String>)>)>> { | ||
172 | let changes = self.vfs.write().commit_changes(); | ||
173 | if changes.is_empty() { | ||
174 | return None; | ||
175 | } | ||
176 | let mut libs = Vec::new(); | ||
177 | let mut change = AnalysisChange::new(); | ||
178 | for c in changes { | ||
179 | match c { | ||
180 | VfsChange::AddRoot { root, files } => { | ||
181 | let root_path = self.vfs.read().root2path(root); | ||
182 | let is_local = self.roots.iter().any(|r| root_path.starts_with(r)); | ||
183 | if is_local { | ||
184 | self.roots_to_scan -= 1; | ||
185 | for (file, path, text) in files { | ||
186 | change.add_file(SourceRootId(root.0), FileId(file.0), path, text); | ||
187 | } | ||
188 | } else { | ||
189 | let files = files | ||
190 | .into_iter() | ||
191 | .map(|(vfsfile, path, text)| (FileId(vfsfile.0), path, text)) | ||
192 | .collect(); | ||
193 | libs.push((SourceRootId(root.0), files)); | ||
194 | } | ||
195 | } | ||
196 | VfsChange::AddFile { root, file, path, text } => { | ||
197 | change.add_file(SourceRootId(root.0), FileId(file.0), path, text); | ||
198 | } | ||
199 | VfsChange::RemoveFile { root, file, path } => { | ||
200 | change.remove_file(SourceRootId(root.0), FileId(file.0), path) | ||
201 | } | ||
202 | VfsChange::ChangeFile { file, text } => { | ||
203 | change.change_file(FileId(file.0), text); | ||
204 | } | ||
205 | } | ||
206 | } | ||
207 | self.analysis_host.apply_change(change); | ||
208 | Some(libs) | ||
209 | } | ||
210 | |||
211 | pub fn add_lib(&mut self, data: LibraryData) { | ||
212 | self.roots_to_scan -= 1; | ||
213 | let mut change = AnalysisChange::new(); | ||
214 | change.add_library(data); | ||
215 | self.analysis_host.apply_change(change); | ||
216 | } | ||
217 | |||
218 | pub fn snapshot(&self) -> WorldSnapshot { | ||
219 | WorldSnapshot { | ||
220 | options: self.options.clone(), | ||
221 | workspaces: Arc::clone(&self.workspaces), | ||
222 | analysis: self.analysis_host.analysis(), | ||
223 | vfs: Arc::clone(&self.vfs), | ||
224 | latest_requests: Arc::clone(&self.latest_requests), | ||
225 | check_fixes: Arc::clone(&self.diagnostics.check_fixes), | ||
226 | } | ||
227 | } | ||
228 | |||
229 | pub fn maybe_collect_garbage(&mut self) { | ||
230 | self.analysis_host.maybe_collect_garbage() | ||
231 | } | ||
232 | |||
233 | pub fn collect_garbage(&mut self) { | ||
234 | self.analysis_host.collect_garbage() | ||
235 | } | ||
236 | |||
237 | pub fn complete_request(&mut self, request: CompletedRequest) { | ||
238 | self.latest_requests.write().record(request) | ||
239 | } | ||
240 | |||
241 | pub fn feature_flags(&self) -> &FeatureFlags { | ||
242 | self.analysis_host.feature_flags() | ||
243 | } | ||
244 | } | ||
245 | |||
246 | impl WorldSnapshot { | ||
247 | pub fn analysis(&self) -> &Analysis { | ||
248 | &self.analysis | ||
249 | } | ||
250 | |||
251 | pub fn uri_to_file_id(&self, uri: &Url) -> Result<FileId> { | ||
252 | let path = uri.to_file_path().map_err(|()| format!("invalid uri: {}", uri))?; | ||
253 | let file = self.vfs.read().path2file(&path).ok_or_else(|| { | ||
254 | // Show warning as this file is outside current workspace | ||
255 | LspError { | ||
256 | code: ErrorCode::InvalidRequest as i32, | ||
257 | message: "Rust file outside current workspace is not supported yet.".to_string(), | ||
258 | } | ||
259 | })?; | ||
260 | Ok(FileId(file.0)) | ||
261 | } | ||
262 | |||
263 | pub fn file_id_to_uri(&self, id: FileId) -> Result<Url> { | ||
264 | let path = self.vfs.read().file2path(VfsFile(id.0)); | ||
265 | let url = url_from_path_with_drive_lowercasing(path)?; | ||
266 | |||
267 | Ok(url) | ||
268 | } | ||
269 | |||
270 | pub fn file_id_to_path(&self, id: FileId) -> PathBuf { | ||
271 | self.vfs.read().file2path(VfsFile(id.0)) | ||
272 | } | ||
273 | |||
274 | pub fn file_line_endings(&self, id: FileId) -> LineEndings { | ||
275 | self.vfs.read().file_line_endings(VfsFile(id.0)) | ||
276 | } | ||
277 | |||
278 | pub fn path_to_uri(&self, root: SourceRootId, path: &RelativePathBuf) -> Result<Url> { | ||
279 | let base = self.vfs.read().root2path(VfsRoot(root.0)); | ||
280 | let path = path.to_path(base); | ||
281 | let url = Url::from_file_path(&path) | ||
282 | .map_err(|_| format!("can't convert path to url: {}", path.display()))?; | ||
283 | Ok(url) | ||
284 | } | ||
285 | |||
286 | pub fn status(&self) -> String { | ||
287 | let mut res = String::new(); | ||
288 | if self.workspaces.is_empty() { | ||
289 | res.push_str("no workspaces\n") | ||
290 | } else { | ||
291 | res.push_str("workspaces:\n"); | ||
292 | for w in self.workspaces.iter() { | ||
293 | res += &format!("{} packages loaded\n", w.n_packages()); | ||
294 | } | ||
295 | } | ||
296 | res.push_str("\nanalysis:\n"); | ||
297 | res.push_str( | ||
298 | &self | ||
299 | .analysis | ||
300 | .status() | ||
301 | .unwrap_or_else(|_| "Analysis retrieval was cancelled".to_owned()), | ||
302 | ); | ||
303 | res | ||
304 | } | ||
305 | |||
306 | pub fn workspace_root_for(&self, file_id: FileId) -> Option<&Path> { | ||
307 | let path = self.vfs.read().file2path(VfsFile(file_id.0)); | ||
308 | self.workspaces.iter().find_map(|ws| ws.workspace_root_for(&path)) | ||
309 | } | ||
310 | |||
311 | pub fn feature_flags(&self) -> &FeatureFlags { | ||
312 | self.analysis.feature_flags() | ||
313 | } | ||
314 | } | ||
diff --git a/crates/ra_lsp_server/tests/heavy_tests/main.rs b/crates/ra_lsp_server/tests/heavy_tests/main.rs deleted file mode 100644 index 9ca31cbcc..000000000 --- a/crates/ra_lsp_server/tests/heavy_tests/main.rs +++ /dev/null | |||
@@ -1,582 +0,0 @@ | |||
1 | mod support; | ||
2 | |||
3 | use std::{collections::HashMap, time::Instant}; | ||
4 | |||
5 | use lsp_types::{ | ||
6 | CodeActionContext, DidOpenTextDocumentParams, DocumentFormattingParams, FormattingOptions, | ||
7 | PartialResultParams, Position, Range, TextDocumentItem, TextDocumentPositionParams, | ||
8 | WorkDoneProgressParams, | ||
9 | }; | ||
10 | use ra_lsp_server::req::{ | ||
11 | CodeActionParams, CodeActionRequest, Completion, CompletionParams, DidOpenTextDocument, | ||
12 | Formatting, OnEnter, Runnables, RunnablesParams, | ||
13 | }; | ||
14 | use serde_json::json; | ||
15 | use tempfile::TempDir; | ||
16 | use test_utils::skip_slow_tests; | ||
17 | |||
18 | use crate::support::{project, Project}; | ||
19 | |||
20 | const PROFILE: &'static str = ""; | ||
21 | // const PROFILE: &'static str = "*@3>100"; | ||
22 | |||
23 | #[test] | ||
24 | fn completes_items_from_standard_library() { | ||
25 | if skip_slow_tests() { | ||
26 | return; | ||
27 | } | ||
28 | |||
29 | let project_start = Instant::now(); | ||
30 | let server = Project::with_fixture( | ||
31 | r#" | ||
32 | //- Cargo.toml | ||
33 | [package] | ||
34 | name = "foo" | ||
35 | version = "0.0.0" | ||
36 | |||
37 | //- src/lib.rs | ||
38 | use std::collections::Spam; | ||
39 | "#, | ||
40 | ) | ||
41 | .with_sysroot(true) | ||
42 | .server(); | ||
43 | server.wait_until_workspace_is_loaded(); | ||
44 | eprintln!("loading took {:?}", project_start.elapsed()); | ||
45 | let completion_start = Instant::now(); | ||
46 | let res = server.send_request::<Completion>(CompletionParams { | ||
47 | text_document_position: TextDocumentPositionParams::new( | ||
48 | server.doc_id("src/lib.rs"), | ||
49 | Position::new(0, 23), | ||
50 | ), | ||
51 | context: None, | ||
52 | partial_result_params: PartialResultParams::default(), | ||
53 | work_done_progress_params: WorkDoneProgressParams::default(), | ||
54 | }); | ||
55 | assert!(format!("{}", res).contains("HashMap")); | ||
56 | eprintln!("completion took {:?}", completion_start.elapsed()); | ||
57 | } | ||
58 | |||
59 | #[test] | ||
60 | fn test_runnables_no_project() { | ||
61 | if skip_slow_tests() { | ||
62 | return; | ||
63 | } | ||
64 | |||
65 | let server = project( | ||
66 | r" | ||
67 | //- lib.rs | ||
68 | #[test] | ||
69 | fn foo() { | ||
70 | } | ||
71 | ", | ||
72 | ); | ||
73 | server.wait_until_workspace_is_loaded(); | ||
74 | server.request::<Runnables>( | ||
75 | RunnablesParams { text_document: server.doc_id("lib.rs"), position: None }, | ||
76 | json!([ | ||
77 | { | ||
78 | "args": [ "test", "--", "foo", "--nocapture" ], | ||
79 | "bin": "cargo", | ||
80 | "env": { "RUST_BACKTRACE": "short" }, | ||
81 | "cwd": null, | ||
82 | "label": "test foo", | ||
83 | "range": { | ||
84 | "end": { "character": 1, "line": 2 }, | ||
85 | "start": { "character": 0, "line": 0 } | ||
86 | } | ||
87 | }, | ||
88 | { | ||
89 | "args": [ | ||
90 | "check", | ||
91 | "--all" | ||
92 | ], | ||
93 | "bin": "cargo", | ||
94 | "env": {}, | ||
95 | "cwd": null, | ||
96 | "label": "cargo check --all", | ||
97 | "range": { | ||
98 | "end": { | ||
99 | "character": 0, | ||
100 | "line": 0 | ||
101 | }, | ||
102 | "start": { | ||
103 | "character": 0, | ||
104 | "line": 0 | ||
105 | } | ||
106 | } | ||
107 | } | ||
108 | ]), | ||
109 | ); | ||
110 | } | ||
111 | |||
112 | #[test] | ||
113 | fn test_runnables_project() { | ||
114 | if skip_slow_tests() { | ||
115 | return; | ||
116 | } | ||
117 | |||
118 | let code = r#" | ||
119 | //- foo/Cargo.toml | ||
120 | [package] | ||
121 | name = "foo" | ||
122 | version = "0.0.0" | ||
123 | |||
124 | //- foo/src/lib.rs | ||
125 | pub fn foo() {} | ||
126 | |||
127 | //- foo/tests/spam.rs | ||
128 | #[test] | ||
129 | fn test_eggs() {} | ||
130 | |||
131 | //- bar/Cargo.toml | ||
132 | [package] | ||
133 | name = "bar" | ||
134 | version = "0.0.0" | ||
135 | |||
136 | //- bar/src/main.rs | ||
137 | fn main() {} | ||
138 | "#; | ||
139 | |||
140 | let server = Project::with_fixture(code).root("foo").root("bar").server(); | ||
141 | |||
142 | server.wait_until_workspace_is_loaded(); | ||
143 | server.request::<Runnables>( | ||
144 | RunnablesParams { | ||
145 | text_document: server.doc_id("foo/tests/spam.rs"), | ||
146 | position: None, | ||
147 | }, | ||
148 | json!([ | ||
149 | { | ||
150 | "args": [ "test", "--package", "foo", "--test", "spam", "--", "test_eggs", "--exact", "--nocapture" ], | ||
151 | "bin": "cargo", | ||
152 | "env": { "RUST_BACKTRACE": "short" }, | ||
153 | "label": "test test_eggs", | ||
154 | "range": { | ||
155 | "end": { "character": 17, "line": 1 }, | ||
156 | "start": { "character": 0, "line": 0 } | ||
157 | }, | ||
158 | "cwd": server.path().join("foo") | ||
159 | }, | ||
160 | { | ||
161 | "args": [ | ||
162 | "check", | ||
163 | "--package", | ||
164 | "foo", | ||
165 | "--test", | ||
166 | "spam" | ||
167 | ], | ||
168 | "bin": "cargo", | ||
169 | "env": {}, | ||
170 | "cwd": server.path().join("foo"), | ||
171 | "label": "cargo check -p foo", | ||
172 | "range": { | ||
173 | "end": { | ||
174 | "character": 0, | ||
175 | "line": 0 | ||
176 | }, | ||
177 | "start": { | ||
178 | "character": 0, | ||
179 | "line": 0 | ||
180 | } | ||
181 | } | ||
182 | } | ||
183 | ]) | ||
184 | ); | ||
185 | } | ||
186 | |||
187 | #[test] | ||
188 | fn test_format_document() { | ||
189 | if skip_slow_tests() { | ||
190 | return; | ||
191 | } | ||
192 | |||
193 | let server = project( | ||
194 | r#" | ||
195 | //- Cargo.toml | ||
196 | [package] | ||
197 | name = "foo" | ||
198 | version = "0.0.0" | ||
199 | |||
200 | //- src/lib.rs | ||
201 | mod bar; | ||
202 | |||
203 | fn main() { | ||
204 | } | ||
205 | |||
206 | pub use std::collections::HashMap; | ||
207 | "#, | ||
208 | ); | ||
209 | server.wait_until_workspace_is_loaded(); | ||
210 | |||
211 | server.request::<Formatting>( | ||
212 | DocumentFormattingParams { | ||
213 | text_document: server.doc_id("src/lib.rs"), | ||
214 | options: FormattingOptions { | ||
215 | tab_size: 4, | ||
216 | insert_spaces: false, | ||
217 | insert_final_newline: None, | ||
218 | trim_final_newlines: None, | ||
219 | trim_trailing_whitespace: None, | ||
220 | properties: HashMap::new(), | ||
221 | }, | ||
222 | work_done_progress_params: WorkDoneProgressParams::default(), | ||
223 | }, | ||
224 | json!([ | ||
225 | { | ||
226 | "newText": r#"mod bar; | ||
227 | |||
228 | fn main() {} | ||
229 | |||
230 | pub use std::collections::HashMap; | ||
231 | "#, | ||
232 | "range": { | ||
233 | "end": { | ||
234 | "character": 0, | ||
235 | "line": 7 | ||
236 | }, | ||
237 | "start": { | ||
238 | "character": 0, | ||
239 | "line": 0 | ||
240 | } | ||
241 | } | ||
242 | } | ||
243 | ]), | ||
244 | ); | ||
245 | } | ||
246 | |||
247 | #[test] | ||
248 | fn test_format_document_2018() { | ||
249 | if skip_slow_tests() { | ||
250 | return; | ||
251 | } | ||
252 | |||
253 | let server = project( | ||
254 | r#" | ||
255 | //- Cargo.toml | ||
256 | [package] | ||
257 | name = "foo" | ||
258 | version = "0.0.0" | ||
259 | edition = "2018" | ||
260 | |||
261 | //- src/lib.rs | ||
262 | mod bar; | ||
263 | |||
264 | async fn test() { | ||
265 | } | ||
266 | |||
267 | fn main() { | ||
268 | } | ||
269 | |||
270 | pub use std::collections::HashMap; | ||
271 | "#, | ||
272 | ); | ||
273 | server.wait_until_workspace_is_loaded(); | ||
274 | |||
275 | server.request::<Formatting>( | ||
276 | DocumentFormattingParams { | ||
277 | text_document: server.doc_id("src/lib.rs"), | ||
278 | options: FormattingOptions { | ||
279 | tab_size: 4, | ||
280 | insert_spaces: false, | ||
281 | properties: HashMap::new(), | ||
282 | insert_final_newline: None, | ||
283 | trim_final_newlines: None, | ||
284 | trim_trailing_whitespace: None, | ||
285 | }, | ||
286 | work_done_progress_params: WorkDoneProgressParams::default(), | ||
287 | }, | ||
288 | json!([ | ||
289 | { | ||
290 | "newText": r#"mod bar; | ||
291 | |||
292 | async fn test() {} | ||
293 | |||
294 | fn main() {} | ||
295 | |||
296 | pub use std::collections::HashMap; | ||
297 | "#, | ||
298 | "range": { | ||
299 | "end": { | ||
300 | "character": 0, | ||
301 | "line": 10 | ||
302 | }, | ||
303 | "start": { | ||
304 | "character": 0, | ||
305 | "line": 0 | ||
306 | } | ||
307 | } | ||
308 | } | ||
309 | ]), | ||
310 | ); | ||
311 | } | ||
312 | |||
313 | #[test] | ||
314 | fn test_missing_module_code_action() { | ||
315 | if skip_slow_tests() { | ||
316 | return; | ||
317 | } | ||
318 | |||
319 | let server = project( | ||
320 | r#" | ||
321 | //- Cargo.toml | ||
322 | [package] | ||
323 | name = "foo" | ||
324 | version = "0.0.0" | ||
325 | |||
326 | //- src/lib.rs | ||
327 | mod bar; | ||
328 | |||
329 | fn main() {} | ||
330 | "#, | ||
331 | ); | ||
332 | server.wait_until_workspace_is_loaded(); | ||
333 | let empty_context = || CodeActionContext { diagnostics: Vec::new(), only: None }; | ||
334 | server.request::<CodeActionRequest>( | ||
335 | CodeActionParams { | ||
336 | text_document: server.doc_id("src/lib.rs"), | ||
337 | range: Range::new(Position::new(0, 4), Position::new(0, 7)), | ||
338 | context: empty_context(), | ||
339 | partial_result_params: PartialResultParams::default(), | ||
340 | work_done_progress_params: WorkDoneProgressParams::default(), | ||
341 | }, | ||
342 | json!([ | ||
343 | { | ||
344 | "command": { | ||
345 | "arguments": [ | ||
346 | { | ||
347 | "cursorPosition": null, | ||
348 | "label": "create module", | ||
349 | "workspaceEdit": { | ||
350 | "documentChanges": [ | ||
351 | { | ||
352 | "kind": "create", | ||
353 | "uri": "file:///[..]/src/bar.rs" | ||
354 | } | ||
355 | ] | ||
356 | } | ||
357 | } | ||
358 | ], | ||
359 | "command": "rust-analyzer.applySourceChange", | ||
360 | "title": "create module" | ||
361 | }, | ||
362 | "title": "create module" | ||
363 | } | ||
364 | ]), | ||
365 | ); | ||
366 | |||
367 | server.request::<CodeActionRequest>( | ||
368 | CodeActionParams { | ||
369 | text_document: server.doc_id("src/lib.rs"), | ||
370 | range: Range::new(Position::new(2, 4), Position::new(2, 7)), | ||
371 | context: empty_context(), | ||
372 | partial_result_params: PartialResultParams::default(), | ||
373 | work_done_progress_params: WorkDoneProgressParams::default(), | ||
374 | }, | ||
375 | json!([]), | ||
376 | ); | ||
377 | } | ||
378 | |||
379 | #[test] | ||
380 | fn test_missing_module_code_action_in_json_project() { | ||
381 | if skip_slow_tests() { | ||
382 | return; | ||
383 | } | ||
384 | |||
385 | let tmp_dir = TempDir::new().unwrap(); | ||
386 | |||
387 | let path = tmp_dir.path(); | ||
388 | |||
389 | let project = json!({ | ||
390 | "roots": [path], | ||
391 | "crates": [ { | ||
392 | "root_module": path.join("src/lib.rs"), | ||
393 | "deps": [], | ||
394 | "edition": "2015", | ||
395 | "atom_cfgs": [], | ||
396 | "key_value_cfgs": {} | ||
397 | } ] | ||
398 | }); | ||
399 | |||
400 | let code = format!( | ||
401 | r#" | ||
402 | //- rust-project.json | ||
403 | {PROJECT} | ||
404 | |||
405 | //- src/lib.rs | ||
406 | mod bar; | ||
407 | |||
408 | fn main() {{}} | ||
409 | "#, | ||
410 | PROJECT = project.to_string(), | ||
411 | ); | ||
412 | |||
413 | let server = Project::with_fixture(&code).tmp_dir(tmp_dir).server(); | ||
414 | |||
415 | server.wait_until_workspace_is_loaded(); | ||
416 | let empty_context = || CodeActionContext { diagnostics: Vec::new(), only: None }; | ||
417 | server.request::<CodeActionRequest>( | ||
418 | CodeActionParams { | ||
419 | text_document: server.doc_id("src/lib.rs"), | ||
420 | range: Range::new(Position::new(0, 4), Position::new(0, 7)), | ||
421 | context: empty_context(), | ||
422 | partial_result_params: PartialResultParams::default(), | ||
423 | work_done_progress_params: WorkDoneProgressParams::default(), | ||
424 | }, | ||
425 | json!([ | ||
426 | { | ||
427 | "command": { | ||
428 | "arguments": [ | ||
429 | { | ||
430 | "cursorPosition": null, | ||
431 | "label": "create module", | ||
432 | "workspaceEdit": { | ||
433 | "documentChanges": [ | ||
434 | { | ||
435 | "kind": "create", | ||
436 | "uri": "file:///[..]/src/bar.rs" | ||
437 | } | ||
438 | ] | ||
439 | } | ||
440 | } | ||
441 | ], | ||
442 | "command": "rust-analyzer.applySourceChange", | ||
443 | "title": "create module" | ||
444 | }, | ||
445 | "title": "create module" | ||
446 | } | ||
447 | ]), | ||
448 | ); | ||
449 | |||
450 | server.request::<CodeActionRequest>( | ||
451 | CodeActionParams { | ||
452 | text_document: server.doc_id("src/lib.rs"), | ||
453 | range: Range::new(Position::new(2, 4), Position::new(2, 7)), | ||
454 | context: empty_context(), | ||
455 | partial_result_params: PartialResultParams::default(), | ||
456 | work_done_progress_params: WorkDoneProgressParams::default(), | ||
457 | }, | ||
458 | json!([]), | ||
459 | ); | ||
460 | } | ||
461 | |||
462 | #[test] | ||
463 | fn diagnostics_dont_block_typing() { | ||
464 | if skip_slow_tests() { | ||
465 | return; | ||
466 | } | ||
467 | |||
468 | let librs: String = (0..10).map(|i| format!("mod m{};", i)).collect(); | ||
469 | let libs: String = (0..10).map(|i| format!("//- src/m{}.rs\nfn foo() {{}}\n\n", i)).collect(); | ||
470 | let server = Project::with_fixture(&format!( | ||
471 | r#" | ||
472 | //- Cargo.toml | ||
473 | [package] | ||
474 | name = "foo" | ||
475 | version = "0.0.0" | ||
476 | |||
477 | //- src/lib.rs | ||
478 | {} | ||
479 | |||
480 | {} | ||
481 | |||
482 | fn main() {{}} | ||
483 | "#, | ||
484 | librs, libs | ||
485 | )) | ||
486 | .with_sysroot(true) | ||
487 | .server(); | ||
488 | |||
489 | server.wait_until_workspace_is_loaded(); | ||
490 | for i in 0..10 { | ||
491 | server.notification::<DidOpenTextDocument>(DidOpenTextDocumentParams { | ||
492 | text_document: TextDocumentItem { | ||
493 | uri: server.doc_id(&format!("src/m{}.rs", i)).uri, | ||
494 | language_id: "rust".to_string(), | ||
495 | version: 0, | ||
496 | text: "/// Docs\nfn foo() {}".to_string(), | ||
497 | }, | ||
498 | }); | ||
499 | } | ||
500 | let start = std::time::Instant::now(); | ||
501 | server.request::<OnEnter>( | ||
502 | TextDocumentPositionParams { | ||
503 | text_document: server.doc_id("src/m0.rs"), | ||
504 | position: Position { line: 0, character: 5 }, | ||
505 | }, | ||
506 | json!({ | ||
507 | "cursorPosition": { | ||
508 | "position": { "character": 4, "line": 1 }, | ||
509 | "textDocument": { "uri": "file:///[..]src/m0.rs" } | ||
510 | }, | ||
511 | "label": "on enter", | ||
512 | "workspaceEdit": { | ||
513 | "documentChanges": [ | ||
514 | { | ||
515 | "edits": [ | ||
516 | { | ||
517 | "newText": "\n/// ", | ||
518 | "range": { | ||
519 | "end": { "character": 5, "line": 0 }, | ||
520 | "start": { "character": 5, "line": 0 } | ||
521 | } | ||
522 | } | ||
523 | ], | ||
524 | "textDocument": { "uri": "file:///[..]src/m0.rs", "version": null } | ||
525 | } | ||
526 | ] | ||
527 | } | ||
528 | }), | ||
529 | ); | ||
530 | let elapsed = start.elapsed(); | ||
531 | assert!(elapsed.as_millis() < 2000, "typing enter took {:?}", elapsed); | ||
532 | } | ||
533 | |||
534 | #[test] | ||
535 | fn preserves_dos_line_endings() { | ||
536 | if skip_slow_tests() { | ||
537 | return; | ||
538 | } | ||
539 | |||
540 | let server = Project::with_fixture( | ||
541 | &" | ||
542 | //- Cargo.toml | ||
543 | [package] | ||
544 | name = \"foo\" | ||
545 | version = \"0.0.0\" | ||
546 | |||
547 | //- src/main.rs | ||
548 | /// Some Docs\r\nfn main() {} | ||
549 | ", | ||
550 | ) | ||
551 | .server(); | ||
552 | |||
553 | server.request::<OnEnter>( | ||
554 | TextDocumentPositionParams { | ||
555 | text_document: server.doc_id("src/main.rs"), | ||
556 | position: Position { line: 0, character: 8 }, | ||
557 | }, | ||
558 | json!({ | ||
559 | "cursorPosition": { | ||
560 | "position": { "line": 1, "character": 4 }, | ||
561 | "textDocument": { "uri": "file:///[..]src/main.rs" } | ||
562 | }, | ||
563 | "label": "on enter", | ||
564 | "workspaceEdit": { | ||
565 | "documentChanges": [ | ||
566 | { | ||
567 | "edits": [ | ||
568 | { | ||
569 | "newText": "\r\n/// ", | ||
570 | "range": { | ||
571 | "end": { "line": 0, "character": 8 }, | ||
572 | "start": { "line": 0, "character": 8 } | ||
573 | } | ||
574 | } | ||
575 | ], | ||
576 | "textDocument": { "uri": "file:///[..]src/main.rs", "version": null } | ||
577 | } | ||
578 | ] | ||
579 | } | ||
580 | }), | ||
581 | ); | ||
582 | } | ||
diff --git a/crates/ra_lsp_server/tests/heavy_tests/support.rs b/crates/ra_lsp_server/tests/heavy_tests/support.rs deleted file mode 100644 index d5ea52fa9..000000000 --- a/crates/ra_lsp_server/tests/heavy_tests/support.rs +++ /dev/null | |||
@@ -1,254 +0,0 @@ | |||
1 | use std::{ | ||
2 | cell::{Cell, RefCell}, | ||
3 | fs, | ||
4 | path::{Path, PathBuf}, | ||
5 | sync::Once, | ||
6 | time::Duration, | ||
7 | }; | ||
8 | |||
9 | use crossbeam_channel::{after, select, Receiver}; | ||
10 | use lsp_server::{Connection, Message, Notification, Request}; | ||
11 | use lsp_types::{ | ||
12 | notification::{DidOpenTextDocument, Exit}, | ||
13 | request::Shutdown, | ||
14 | ClientCapabilities, DidOpenTextDocumentParams, GotoCapability, TextDocumentClientCapabilities, | ||
15 | TextDocumentIdentifier, TextDocumentItem, Url, | ||
16 | }; | ||
17 | use serde::Serialize; | ||
18 | use serde_json::{to_string_pretty, Value}; | ||
19 | use tempfile::TempDir; | ||
20 | use test_utils::{find_mismatch, parse_fixture}; | ||
21 | |||
22 | use ra_lsp_server::{main_loop, req, ServerConfig}; | ||
23 | |||
24 | pub struct Project<'a> { | ||
25 | fixture: &'a str, | ||
26 | with_sysroot: bool, | ||
27 | tmp_dir: Option<TempDir>, | ||
28 | roots: Vec<PathBuf>, | ||
29 | } | ||
30 | |||
31 | impl<'a> Project<'a> { | ||
32 | pub fn with_fixture(fixture: &str) -> Project { | ||
33 | Project { fixture, tmp_dir: None, roots: vec![], with_sysroot: false } | ||
34 | } | ||
35 | |||
36 | pub fn tmp_dir(mut self, tmp_dir: TempDir) -> Project<'a> { | ||
37 | self.tmp_dir = Some(tmp_dir); | ||
38 | self | ||
39 | } | ||
40 | |||
41 | pub fn root(mut self, path: &str) -> Project<'a> { | ||
42 | self.roots.push(path.into()); | ||
43 | self | ||
44 | } | ||
45 | |||
46 | pub fn with_sysroot(mut self, sysroot: bool) -> Project<'a> { | ||
47 | self.with_sysroot = sysroot; | ||
48 | self | ||
49 | } | ||
50 | |||
51 | pub fn server(self) -> Server { | ||
52 | let tmp_dir = self.tmp_dir.unwrap_or_else(|| TempDir::new().unwrap()); | ||
53 | static INIT: Once = Once::new(); | ||
54 | INIT.call_once(|| { | ||
55 | let _ = env_logger::builder().is_test(true).try_init().unwrap(); | ||
56 | ra_prof::set_filter(if crate::PROFILE.is_empty() { | ||
57 | ra_prof::Filter::disabled() | ||
58 | } else { | ||
59 | ra_prof::Filter::from_spec(&crate::PROFILE) | ||
60 | }); | ||
61 | }); | ||
62 | |||
63 | let mut paths = vec![]; | ||
64 | |||
65 | for entry in parse_fixture(self.fixture) { | ||
66 | let path = tmp_dir.path().join(entry.meta); | ||
67 | fs::create_dir_all(path.parent().unwrap()).unwrap(); | ||
68 | fs::write(path.as_path(), entry.text.as_bytes()).unwrap(); | ||
69 | paths.push((path, entry.text)); | ||
70 | } | ||
71 | |||
72 | let roots = self.roots.into_iter().map(|root| tmp_dir.path().join(root)).collect(); | ||
73 | |||
74 | Server::new(tmp_dir, self.with_sysroot, roots, paths) | ||
75 | } | ||
76 | } | ||
77 | |||
78 | pub fn project(fixture: &str) -> Server { | ||
79 | Project::with_fixture(fixture).server() | ||
80 | } | ||
81 | |||
82 | pub struct Server { | ||
83 | req_id: Cell<u64>, | ||
84 | messages: RefCell<Vec<Message>>, | ||
85 | dir: TempDir, | ||
86 | _thread: jod_thread::JoinHandle<()>, | ||
87 | client: Connection, | ||
88 | } | ||
89 | |||
90 | impl Server { | ||
91 | fn new( | ||
92 | dir: TempDir, | ||
93 | with_sysroot: bool, | ||
94 | roots: Vec<PathBuf>, | ||
95 | files: Vec<(PathBuf, String)>, | ||
96 | ) -> Server { | ||
97 | let path = dir.path().to_path_buf(); | ||
98 | |||
99 | let roots = if roots.is_empty() { vec![path] } else { roots }; | ||
100 | let (connection, client) = Connection::memory(); | ||
101 | |||
102 | let _thread = jod_thread::Builder::new() | ||
103 | .name("test server".to_string()) | ||
104 | .spawn(move || { | ||
105 | main_loop( | ||
106 | roots, | ||
107 | ClientCapabilities { | ||
108 | workspace: None, | ||
109 | text_document: Some(TextDocumentClientCapabilities { | ||
110 | definition: Some(GotoCapability { | ||
111 | dynamic_registration: None, | ||
112 | link_support: Some(true), | ||
113 | }), | ||
114 | ..Default::default() | ||
115 | }), | ||
116 | window: None, | ||
117 | experimental: None, | ||
118 | }, | ||
119 | ServerConfig { with_sysroot, ..ServerConfig::default() }, | ||
120 | connection, | ||
121 | ) | ||
122 | .unwrap() | ||
123 | }) | ||
124 | .expect("failed to spawn a thread"); | ||
125 | |||
126 | let res = | ||
127 | Server { req_id: Cell::new(1), dir, messages: Default::default(), client, _thread }; | ||
128 | |||
129 | for (path, text) in files { | ||
130 | res.notification::<DidOpenTextDocument>(DidOpenTextDocumentParams { | ||
131 | text_document: TextDocumentItem { | ||
132 | uri: Url::from_file_path(path).unwrap(), | ||
133 | language_id: "rust".to_string(), | ||
134 | version: 0, | ||
135 | text, | ||
136 | }, | ||
137 | }) | ||
138 | } | ||
139 | res | ||
140 | } | ||
141 | |||
142 | pub fn doc_id(&self, rel_path: &str) -> TextDocumentIdentifier { | ||
143 | let path = self.dir.path().join(rel_path); | ||
144 | TextDocumentIdentifier { uri: Url::from_file_path(path).unwrap() } | ||
145 | } | ||
146 | |||
147 | pub fn notification<N>(&self, params: N::Params) | ||
148 | where | ||
149 | N: lsp_types::notification::Notification, | ||
150 | N::Params: Serialize, | ||
151 | { | ||
152 | let r = Notification::new(N::METHOD.to_string(), params); | ||
153 | self.send_notification(r) | ||
154 | } | ||
155 | |||
156 | pub fn request<R>(&self, params: R::Params, expected_resp: Value) | ||
157 | where | ||
158 | R: lsp_types::request::Request, | ||
159 | R::Params: Serialize, | ||
160 | { | ||
161 | let actual = self.send_request::<R>(params); | ||
162 | if let Some((expected_part, actual_part)) = find_mismatch(&expected_resp, &actual) { | ||
163 | panic!( | ||
164 | "JSON mismatch\nExpected:\n{}\nWas:\n{}\nExpected part:\n{}\nActual part:\n{}\n", | ||
165 | to_string_pretty(&expected_resp).unwrap(), | ||
166 | to_string_pretty(&actual).unwrap(), | ||
167 | to_string_pretty(expected_part).unwrap(), | ||
168 | to_string_pretty(actual_part).unwrap(), | ||
169 | ); | ||
170 | } | ||
171 | } | ||
172 | |||
173 | pub fn send_request<R>(&self, params: R::Params) -> Value | ||
174 | where | ||
175 | R: lsp_types::request::Request, | ||
176 | R::Params: Serialize, | ||
177 | { | ||
178 | let id = self.req_id.get(); | ||
179 | self.req_id.set(id + 1); | ||
180 | |||
181 | let r = Request::new(id.into(), R::METHOD.to_string(), params); | ||
182 | self.send_request_(r) | ||
183 | } | ||
184 | fn send_request_(&self, r: Request) -> Value { | ||
185 | let id = r.id.clone(); | ||
186 | self.client.sender.send(r.into()).unwrap(); | ||
187 | while let Some(msg) = self.recv() { | ||
188 | match msg { | ||
189 | Message::Request(req) => panic!("unexpected request: {:?}", req), | ||
190 | Message::Notification(_) => (), | ||
191 | Message::Response(res) => { | ||
192 | assert_eq!(res.id, id); | ||
193 | if let Some(err) = res.error { | ||
194 | panic!("error response: {:#?}", err); | ||
195 | } | ||
196 | return res.result.unwrap(); | ||
197 | } | ||
198 | } | ||
199 | } | ||
200 | panic!("no response"); | ||
201 | } | ||
202 | pub fn wait_until_workspace_is_loaded(&self) { | ||
203 | self.wait_for_message_cond(1, &|msg: &Message| match msg { | ||
204 | Message::Notification(n) if n.method == "window/showMessage" => { | ||
205 | let msg = | ||
206 | n.clone().extract::<req::ShowMessageParams>("window/showMessage").unwrap(); | ||
207 | msg.message.starts_with("workspace loaded") | ||
208 | } | ||
209 | _ => false, | ||
210 | }) | ||
211 | } | ||
212 | fn wait_for_message_cond(&self, n: usize, cond: &dyn Fn(&Message) -> bool) { | ||
213 | let mut total = 0; | ||
214 | for msg in self.messages.borrow().iter() { | ||
215 | if cond(msg) { | ||
216 | total += 1 | ||
217 | } | ||
218 | } | ||
219 | while total < n { | ||
220 | let msg = self.recv().expect("no response"); | ||
221 | if cond(&msg) { | ||
222 | total += 1; | ||
223 | } | ||
224 | } | ||
225 | } | ||
226 | fn recv(&self) -> Option<Message> { | ||
227 | recv_timeout(&self.client.receiver).map(|msg| { | ||
228 | self.messages.borrow_mut().push(msg.clone()); | ||
229 | msg | ||
230 | }) | ||
231 | } | ||
232 | fn send_notification(&self, not: Notification) { | ||
233 | self.client.sender.send(Message::Notification(not)).unwrap(); | ||
234 | } | ||
235 | |||
236 | pub fn path(&self) -> &Path { | ||
237 | self.dir.path() | ||
238 | } | ||
239 | } | ||
240 | |||
241 | impl Drop for Server { | ||
242 | fn drop(&mut self) { | ||
243 | self.request::<Shutdown>((), Value::Null); | ||
244 | self.notification::<Exit>(()); | ||
245 | } | ||
246 | } | ||
247 | |||
248 | fn recv_timeout(receiver: &Receiver<Message>) -> Option<Message> { | ||
249 | let timeout = Duration::from_secs(120); | ||
250 | select! { | ||
251 | recv(receiver) -> msg => msg.ok(), | ||
252 | recv(after(timeout)) -> _ => panic!("timed out"), | ||
253 | } | ||
254 | } | ||