diff options
author | Aleksey Kladov <[email protected]> | 2021-05-17 16:37:06 +0100 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2021-05-17 16:37:06 +0100 |
commit | 41510f437e87e013f2015bed1a964163c6d3f1ff (patch) | |
tree | ead7add1ada64dfad8e819a32a5ea4ffa8c0cd60 /crates | |
parent | f9d4a9eaee6f86d9bd60cf6d18ec744b56696135 (diff) |
minor: adjust config name
Diffstat (limited to 'crates')
-rw-r--r-- | crates/rust-analyzer/src/config.rs | 20 | ||||
-rw-r--r-- | crates/rust-analyzer/src/handlers.rs | 12 | ||||
-rw-r--r-- | crates/rust-analyzer/src/semantic_tokens.rs | 4 | ||||
-rw-r--r-- | crates/rust-analyzer/src/to_proto.rs | 8 | ||||
-rw-r--r-- | crates/rust-analyzer/tests/rust-analyzer/main.rs | 34 |
5 files changed, 22 insertions, 56 deletions
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 905a6ee55..a3866c1ba 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs | |||
@@ -124,6 +124,13 @@ config_data! { | |||
124 | /// These directories will be ignored by rust-analyzer. | 124 | /// These directories will be ignored by rust-analyzer. |
125 | files_excludeDirs: Vec<PathBuf> = "[]", | 125 | files_excludeDirs: Vec<PathBuf> = "[]", |
126 | 126 | ||
127 | /// Use semantic tokens for strings. | ||
128 | /// | ||
129 | /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars. | ||
130 | /// By disabling semantic tokens for strings, other grammars can be used to highlight | ||
131 | /// their contents. | ||
132 | highlighting_strings: bool = "true", | ||
133 | |||
127 | /// Whether to show `Debug` action. Only applies when | 134 | /// Whether to show `Debug` action. Only applies when |
128 | /// `#rust-analyzer.hoverActions.enable#` is set. | 135 | /// `#rust-analyzer.hoverActions.enable#` is set. |
129 | hoverActions_debug: bool = "true", | 136 | hoverActions_debug: bool = "true", |
@@ -208,13 +215,6 @@ config_data! { | |||
208 | /// Advanced option, fully override the command rust-analyzer uses for | 215 | /// Advanced option, fully override the command rust-analyzer uses for |
209 | /// formatting. | 216 | /// formatting. |
210 | rustfmt_overrideCommand: Option<Vec<String>> = "null", | 217 | rustfmt_overrideCommand: Option<Vec<String>> = "null", |
211 | |||
212 | /// Use semantic tokens for strings. | ||
213 | /// | ||
214 | /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars. | ||
215 | /// By disabling semantic tokens for strings, other grammars can be used to highlight | ||
216 | /// their contents. | ||
217 | semanticStringTokens: bool = "true", | ||
218 | } | 218 | } |
219 | } | 219 | } |
220 | 220 | ||
@@ -388,9 +388,6 @@ impl Config { | |||
388 | pub fn line_folding_only(&self) -> bool { | 388 | pub fn line_folding_only(&self) -> bool { |
389 | try_or!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?, false) | 389 | try_or!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?, false) |
390 | } | 390 | } |
391 | pub fn semantic_strings(&self) -> bool { | ||
392 | self.data.semanticStringTokens | ||
393 | } | ||
394 | pub fn hierarchical_symbols(&self) -> bool { | 391 | pub fn hierarchical_symbols(&self) -> bool { |
395 | try_or!( | 392 | try_or!( |
396 | self.caps | 393 | self.caps |
@@ -665,6 +662,9 @@ impl Config { | |||
665 | refs: self.data.lens_enable && self.data.lens_references, | 662 | refs: self.data.lens_enable && self.data.lens_references, |
666 | } | 663 | } |
667 | } | 664 | } |
665 | pub fn highlighting_strings(&self) -> bool { | ||
666 | self.data.highlighting_strings | ||
667 | } | ||
668 | pub fn hover(&self) -> HoverConfig { | 668 | pub fn hover(&self) -> HoverConfig { |
669 | HoverConfig { | 669 | HoverConfig { |
670 | implementations: self.data.hoverActions_enable | 670 | implementations: self.data.hoverActions_enable |
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 85e45337c..8fe97fd7c 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs | |||
@@ -1394,9 +1394,9 @@ pub(crate) fn handle_semantic_tokens_full( | |||
1394 | let line_index = snap.file_line_index(file_id)?; | 1394 | let line_index = snap.file_line_index(file_id)?; |
1395 | 1395 | ||
1396 | let highlights = snap.analysis.highlight(file_id)?; | 1396 | let highlights = snap.analysis.highlight(file_id)?; |
1397 | let semantic_strings = snap.config.semantic_strings(); | 1397 | let highlight_strings = snap.config.highlighting_strings(); |
1398 | let semantic_tokens = | 1398 | let semantic_tokens = |
1399 | to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings); | 1399 | to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings); |
1400 | 1400 | ||
1401 | // Unconditionally cache the tokens | 1401 | // Unconditionally cache the tokens |
1402 | snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone()); | 1402 | snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone()); |
@@ -1415,9 +1415,9 @@ pub(crate) fn handle_semantic_tokens_full_delta( | |||
1415 | let line_index = snap.file_line_index(file_id)?; | 1415 | let line_index = snap.file_line_index(file_id)?; |
1416 | 1416 | ||
1417 | let highlights = snap.analysis.highlight(file_id)?; | 1417 | let highlights = snap.analysis.highlight(file_id)?; |
1418 | let semantic_strings = snap.config.semantic_strings(); | 1418 | let highlight_strings = snap.config.highlighting_strings(); |
1419 | let semantic_tokens = | 1419 | let semantic_tokens = |
1420 | to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings); | 1420 | to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings); |
1421 | 1421 | ||
1422 | let mut cache = snap.semantic_tokens_cache.lock(); | 1422 | let mut cache = snap.semantic_tokens_cache.lock(); |
1423 | let cached_tokens = cache.entry(params.text_document.uri).or_default(); | 1423 | let cached_tokens = cache.entry(params.text_document.uri).or_default(); |
@@ -1446,9 +1446,9 @@ pub(crate) fn handle_semantic_tokens_range( | |||
1446 | let line_index = snap.file_line_index(frange.file_id)?; | 1446 | let line_index = snap.file_line_index(frange.file_id)?; |
1447 | 1447 | ||
1448 | let highlights = snap.analysis.highlight_range(frange)?; | 1448 | let highlights = snap.analysis.highlight_range(frange)?; |
1449 | let semantic_strings = snap.config.semantic_strings(); | 1449 | let highlight_strings = snap.config.highlighting_strings(); |
1450 | let semantic_tokens = | 1450 | let semantic_tokens = |
1451 | to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings); | 1451 | to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings); |
1452 | Ok(Some(semantic_tokens.into())) | 1452 | Ok(Some(semantic_tokens.into())) |
1453 | } | 1453 | } |
1454 | 1454 | ||
diff --git a/crates/rust-analyzer/src/semantic_tokens.rs b/crates/rust-analyzer/src/semantic_tokens.rs index c9d38693e..4fd576adb 100644 --- a/crates/rust-analyzer/src/semantic_tokens.rs +++ b/crates/rust-analyzer/src/semantic_tokens.rs | |||
@@ -184,8 +184,8 @@ pub(crate) fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<S | |||
184 | } | 184 | } |
185 | } | 185 | } |
186 | 186 | ||
187 | pub(crate) fn type_index(type_: SemanticTokenType) -> u32 { | 187 | pub(crate) fn type_index(ty: SemanticTokenType) -> u32 { |
188 | SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32 | 188 | SUPPORTED_TYPES.iter().position(|it| *it == ty).unwrap() as u32 |
189 | } | 189 | } |
190 | 190 | ||
191 | #[cfg(test)] | 191 | #[cfg(test)] |
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 6dc9f82ab..9dec46c78 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs | |||
@@ -381,7 +381,7 @@ pub(crate) fn semantic_tokens( | |||
381 | text: &str, | 381 | text: &str, |
382 | line_index: &LineIndex, | 382 | line_index: &LineIndex, |
383 | highlights: Vec<HlRange>, | 383 | highlights: Vec<HlRange>, |
384 | include_strings: bool, | 384 | highlight_strings: bool, |
385 | ) -> lsp_types::SemanticTokens { | 385 | ) -> lsp_types::SemanticTokens { |
386 | let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string(); | 386 | let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string(); |
387 | let mut builder = semantic_tokens::SemanticTokensBuilder::new(id); | 387 | let mut builder = semantic_tokens::SemanticTokensBuilder::new(id); |
@@ -390,11 +390,11 @@ pub(crate) fn semantic_tokens( | |||
390 | if highlight_range.highlight.is_empty() { | 390 | if highlight_range.highlight.is_empty() { |
391 | continue; | 391 | continue; |
392 | } | 392 | } |
393 | let (typ, mods) = semantic_token_type_and_modifiers(highlight_range.highlight); | 393 | let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight); |
394 | if !include_strings && typ == lsp_types::SemanticTokenType::STRING { | 394 | if !highlight_strings && ty == lsp_types::SemanticTokenType::STRING { |
395 | continue; | 395 | continue; |
396 | } | 396 | } |
397 | let token_index = semantic_tokens::type_index(typ); | 397 | let token_index = semantic_tokens::type_index(ty); |
398 | let modifier_bitset = mods.0; | 398 | let modifier_bitset = mods.0; |
399 | 399 | ||
400 | for mut text_range in line_index.index.lines(highlight_range.range) { | 400 | for mut text_range in line_index.index.lines(highlight_range.range) { |
diff --git a/crates/rust-analyzer/tests/rust-analyzer/main.rs b/crates/rust-analyzer/tests/rust-analyzer/main.rs index 920c43f25..c940ef214 100644 --- a/crates/rust-analyzer/tests/rust-analyzer/main.rs +++ b/crates/rust-analyzer/tests/rust-analyzer/main.rs | |||
@@ -39,40 +39,6 @@ const PROFILE: &str = ""; | |||
39 | // const PROFILE: &'static str = "*@3>100"; | 39 | // const PROFILE: &'static str = "*@3>100"; |
40 | 40 | ||
41 | #[test] | 41 | #[test] |
42 | fn can_disable_semantic_strings() { | ||
43 | if skip_slow_tests() { | ||
44 | return; | ||
45 | } | ||
46 | |||
47 | [true, false].iter().for_each(|semantic_strings| { | ||
48 | let server = Project::with_fixture( | ||
49 | r#" | ||
50 | //- /Cargo.toml | ||
51 | [package] | ||
52 | name = "foo" | ||
53 | version = "0.0.0" | ||
54 | |||
55 | //- /src/lib.rs | ||
56 | const foo: &'static str = "hi"; | ||
57 | "#, | ||
58 | ) | ||
59 | .with_config(serde_json::json!({ "semanticStringTokens": semantic_strings })) | ||
60 | .server() | ||
61 | .wait_until_workspace_is_loaded(); | ||
62 | |||
63 | let res = server.send_request::<SemanticTokensRangeRequest>(SemanticTokensRangeParams { | ||
64 | text_document: server.doc_id("src/lib.rs"), | ||
65 | partial_result_params: PartialResultParams::default(), | ||
66 | work_done_progress_params: WorkDoneProgressParams::default(), | ||
67 | range: Range::new(Position::new(0, 26), Position::new(0, 30)), | ||
68 | }); | ||
69 | |||
70 | let tok_res: SemanticTokens = from_value(res).expect("invalid server response"); | ||
71 | assert!(tok_res.data.len() == *semantic_strings as usize); | ||
72 | }); | ||
73 | } | ||
74 | |||
75 | #[test] | ||
76 | fn completes_items_from_standard_library() { | 42 | fn completes_items_from_standard_library() { |
77 | if skip_slow_tests() { | 43 | if skip_slow_tests() { |
78 | return; | 44 | return; |