aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbors[bot] <26634292+bors[bot]@users.noreply.github.com>2021-05-17 15:41:56 +0100
committerGitHub <[email protected]>2021-05-17 15:41:56 +0100
commitf9d4a9eaee6f86d9bd60cf6d18ec744b56696135 (patch)
tree31c5c36a446ca3cd123309ac85dc39e0e23cdc74
parentfa02911078cfa1b3d3b9fb2cbbed9d1de98cd88e (diff)
parent7ae3967e5739b786fcd3f7b7b333c73f2c38e8dd (diff)
Merge #8795
8795: Allow semantic tokens for strings to be disabled r=matklad a=djrenren Fixes https://github.com/rust-analyzer/rust-analyzer/issues/7111 Pretty straightforward change, but open to any suggestions if there's a more recommended testing strategy than what I went with. Co-authored-by: John Renner <[email protected]>
-rw-r--r--crates/rust-analyzer/src/config.rs10
-rw-r--r--crates/rust-analyzer/src/handlers.rs13
-rw-r--r--crates/rust-analyzer/src/to_proto.rs8
-rw-r--r--crates/rust-analyzer/tests/rust-analyzer/main.rs43
-rw-r--r--docs/user/generated_config.adoc9
-rw-r--r--editors/code/package.json5
6 files changed, 78 insertions, 10 deletions
diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs
index d83670bda..905a6ee55 100644
--- a/crates/rust-analyzer/src/config.rs
+++ b/crates/rust-analyzer/src/config.rs
@@ -208,6 +208,13 @@ config_data! {
208 /// Advanced option, fully override the command rust-analyzer uses for 208 /// Advanced option, fully override the command rust-analyzer uses for
209 /// formatting. 209 /// formatting.
210 rustfmt_overrideCommand: Option<Vec<String>> = "null", 210 rustfmt_overrideCommand: Option<Vec<String>> = "null",
211
212 /// Use semantic tokens for strings.
213 ///
214 /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
215 /// By disabling semantic tokens for strings, other grammars can be used to highlight
216 /// their contents.
217 semanticStringTokens: bool = "true",
211 } 218 }
212} 219}
213 220
@@ -381,6 +388,9 @@ impl Config {
381 pub fn line_folding_only(&self) -> bool { 388 pub fn line_folding_only(&self) -> bool {
382 try_or!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?, false) 389 try_or!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?, false)
383 } 390 }
391 pub fn semantic_strings(&self) -> bool {
392 self.data.semanticStringTokens
393 }
384 pub fn hierarchical_symbols(&self) -> bool { 394 pub fn hierarchical_symbols(&self) -> bool {
385 try_or!( 395 try_or!(
386 self.caps 396 self.caps
diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs
index 551013aa9..85e45337c 100644
--- a/crates/rust-analyzer/src/handlers.rs
+++ b/crates/rust-analyzer/src/handlers.rs
@@ -1394,7 +1394,9 @@ pub(crate) fn handle_semantic_tokens_full(
1394 let line_index = snap.file_line_index(file_id)?; 1394 let line_index = snap.file_line_index(file_id)?;
1395 1395
1396 let highlights = snap.analysis.highlight(file_id)?; 1396 let highlights = snap.analysis.highlight(file_id)?;
1397 let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); 1397 let semantic_strings = snap.config.semantic_strings();
1398 let semantic_tokens =
1399 to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
1398 1400
1399 // Unconditionally cache the tokens 1401 // Unconditionally cache the tokens
1400 snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone()); 1402 snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
@@ -1413,8 +1415,9 @@ pub(crate) fn handle_semantic_tokens_full_delta(
1413 let line_index = snap.file_line_index(file_id)?; 1415 let line_index = snap.file_line_index(file_id)?;
1414 1416
1415 let highlights = snap.analysis.highlight(file_id)?; 1417 let highlights = snap.analysis.highlight(file_id)?;
1416 1418 let semantic_strings = snap.config.semantic_strings();
1417 let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); 1419 let semantic_tokens =
1420 to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
1418 1421
1419 let mut cache = snap.semantic_tokens_cache.lock(); 1422 let mut cache = snap.semantic_tokens_cache.lock();
1420 let cached_tokens = cache.entry(params.text_document.uri).or_default(); 1423 let cached_tokens = cache.entry(params.text_document.uri).or_default();
@@ -1443,7 +1446,9 @@ pub(crate) fn handle_semantic_tokens_range(
1443 let line_index = snap.file_line_index(frange.file_id)?; 1446 let line_index = snap.file_line_index(frange.file_id)?;
1444 1447
1445 let highlights = snap.analysis.highlight_range(frange)?; 1448 let highlights = snap.analysis.highlight_range(frange)?;
1446 let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights); 1449 let semantic_strings = snap.config.semantic_strings();
1450 let semantic_tokens =
1451 to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings);
1447 Ok(Some(semantic_tokens.into())) 1452 Ok(Some(semantic_tokens.into()))
1448} 1453}
1449 1454
diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs
index 73dcba694..6dc9f82ab 100644
--- a/crates/rust-analyzer/src/to_proto.rs
+++ b/crates/rust-analyzer/src/to_proto.rs
@@ -381,6 +381,7 @@ pub(crate) fn semantic_tokens(
381 text: &str, 381 text: &str,
382 line_index: &LineIndex, 382 line_index: &LineIndex,
383 highlights: Vec<HlRange>, 383 highlights: Vec<HlRange>,
384 include_strings: bool,
384) -> lsp_types::SemanticTokens { 385) -> lsp_types::SemanticTokens {
385 let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string(); 386 let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
386 let mut builder = semantic_tokens::SemanticTokensBuilder::new(id); 387 let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
@@ -389,8 +390,11 @@ pub(crate) fn semantic_tokens(
389 if highlight_range.highlight.is_empty() { 390 if highlight_range.highlight.is_empty() {
390 continue; 391 continue;
391 } 392 }
392 let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight); 393 let (typ, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
393 let token_index = semantic_tokens::type_index(type_); 394 if !include_strings && typ == lsp_types::SemanticTokenType::STRING {
395 continue;
396 }
397 let token_index = semantic_tokens::type_index(typ);
394 let modifier_bitset = mods.0; 398 let modifier_bitset = mods.0;
395 399
396 for mut text_range in line_index.index.lines(highlight_range.range) { 400 for mut text_range in line_index.index.lines(highlight_range.range) {
diff --git a/crates/rust-analyzer/tests/rust-analyzer/main.rs b/crates/rust-analyzer/tests/rust-analyzer/main.rs
index 9e89209ea..920c43f25 100644
--- a/crates/rust-analyzer/tests/rust-analyzer/main.rs
+++ b/crates/rust-analyzer/tests/rust-analyzer/main.rs
@@ -18,15 +18,16 @@ use lsp_types::{
18 notification::DidOpenTextDocument, 18 notification::DidOpenTextDocument,
19 request::{ 19 request::{
20 CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest, 20 CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest,
21 WillRenameFiles, 21 SemanticTokensRangeRequest, WillRenameFiles,
22 }, 22 },
23 CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams, 23 CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams,
24 DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams, 24 DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams,
25 PartialResultParams, Position, Range, RenameFilesParams, TextDocumentItem, 25 PartialResultParams, Position, Range, RenameFilesParams, SemanticTokens,
26 TextDocumentPositionParams, WorkDoneProgressParams, 26 SemanticTokensRangeParams, TextDocumentItem, TextDocumentPositionParams,
27 WorkDoneProgressParams,
27}; 28};
28use rust_analyzer::lsp_ext::{OnEnter, Runnables, RunnablesParams}; 29use rust_analyzer::lsp_ext::{OnEnter, Runnables, RunnablesParams};
29use serde_json::json; 30use serde_json::{from_value, json};
30use test_utils::skip_slow_tests; 31use test_utils::skip_slow_tests;
31 32
32use crate::{ 33use crate::{
@@ -38,6 +39,40 @@ const PROFILE: &str = "";
38// const PROFILE: &'static str = "*@3>100"; 39// const PROFILE: &'static str = "*@3>100";
39 40
40#[test] 41#[test]
42fn can_disable_semantic_strings() {
43 if skip_slow_tests() {
44 return;
45 }
46
47 [true, false].iter().for_each(|semantic_strings| {
48 let server = Project::with_fixture(
49 r#"
50//- /Cargo.toml
51[package]
52name = "foo"
53version = "0.0.0"
54
55//- /src/lib.rs
56const foo: &'static str = "hi";
57"#,
58 )
59 .with_config(serde_json::json!({ "semanticStringTokens": semantic_strings }))
60 .server()
61 .wait_until_workspace_is_loaded();
62
63 let res = server.send_request::<SemanticTokensRangeRequest>(SemanticTokensRangeParams {
64 text_document: server.doc_id("src/lib.rs"),
65 partial_result_params: PartialResultParams::default(),
66 work_done_progress_params: WorkDoneProgressParams::default(),
67 range: Range::new(Position::new(0, 26), Position::new(0, 30)),
68 });
69
70 let tok_res: SemanticTokens = from_value(res).expect("invalid server response");
71 assert!(tok_res.data.len() == *semantic_strings as usize);
72 });
73}
74
75#[test]
41fn completes_items_from_standard_library() { 76fn completes_items_from_standard_library() {
42 if skip_slow_tests() { 77 if skip_slow_tests() {
43 return; 78 return;
diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc
index f70558200..e2d74e164 100644
--- a/docs/user/generated_config.adoc
+++ b/docs/user/generated_config.adoc
@@ -332,3 +332,12 @@ Additional arguments to `rustfmt`.
332Advanced option, fully override the command rust-analyzer uses for 332Advanced option, fully override the command rust-analyzer uses for
333formatting. 333formatting.
334-- 334--
335[[rust-analyzer.semanticStringTokens]]rust-analyzer.semanticStringTokens (default: `true`)::
336+
337--
338Use semantic tokens for strings.
339
340In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
341By disabling semantic tokens for strings, other grammars can be used to highlight
342their contents.
343--
diff --git a/editors/code/package.json b/editors/code/package.json
index 0f38a1673..14cffac06 100644
--- a/editors/code/package.json
+++ b/editors/code/package.json
@@ -778,6 +778,11 @@
778 "type": "string" 778 "type": "string"
779 } 779 }
780 }, 780 },
781 "rust-analyzer.semanticStringTokens": {
782 "markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.",
783 "default": true,
784 "type": "boolean"
785 },
781 "$generated-end": false 786 "$generated-end": false
782 } 787 }
783 }, 788 },