From 7ae3967e5739b786fcd3f7b7b333c73f2c38e8dd Mon Sep 17 00:00:00 2001 From: John Renner Date: Mon, 10 May 2021 13:34:09 -0700 Subject: Formatting and docs --- crates/rust-analyzer/src/config.rs | 10 +++++++--- crates/rust-analyzer/src/handlers.rs | 9 ++++++--- crates/rust-analyzer/src/to_proto.rs | 2 +- crates/rust-analyzer/tests/rust-analyzer/main.rs | 16 +++++++--------- 4 files changed, 21 insertions(+), 16 deletions(-) (limited to 'crates') diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 3818160b7..123b63f53 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -44,9 +44,6 @@ config_data! { /// Show function name and docs in parameter hints. callInfo_full: bool = "true", - /// Use semantic tokens for strings. Disable to support injected grammars - semanticStringTokens: bool = "true", - /// Automatically refresh project info via `cargo metadata` on /// `Cargo.toml` changes. cargo_autoreload: bool = "true", @@ -211,6 +208,13 @@ config_data! { /// Advanced option, fully override the command rust-analyzer uses for /// formatting. rustfmt_overrideCommand: Option> = "null", + + /// Use semantic tokens for strings. + /// + /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars. + /// By disabling semantic tokens for strings, other grammars can be used to highlight + /// their contents. + semanticStringTokens: bool = "true", } } diff --git a/crates/rust-analyzer/src/handlers.rs b/crates/rust-analyzer/src/handlers.rs index 85dd73fca..78b558a21 100644 --- a/crates/rust-analyzer/src/handlers.rs +++ b/crates/rust-analyzer/src/handlers.rs @@ -1377,7 +1377,8 @@ pub(crate) fn handle_semantic_tokens_full( let highlights = snap.analysis.highlight(file_id)?; let semantic_strings = snap.config.semantic_strings(); - let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings); + let semantic_tokens = + to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings); // Unconditionally cache the tokens snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone()); @@ -1397,7 +1398,8 @@ pub(crate) fn handle_semantic_tokens_full_delta( let highlights = snap.analysis.highlight(file_id)?; let semantic_strings = snap.config.semantic_strings(); - let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings); + let semantic_tokens = + to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings); let mut cache = snap.semantic_tokens_cache.lock(); let cached_tokens = cache.entry(params.text_document.uri).or_default(); @@ -1427,7 +1429,8 @@ pub(crate) fn handle_semantic_tokens_range( let highlights = snap.analysis.highlight_range(frange)?; let semantic_strings = snap.config.semantic_strings(); - let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings); + let semantic_tokens = + to_proto::semantic_tokens(&text, &line_index, highlights, semantic_strings); Ok(Some(semantic_tokens.into())) } diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 01ffe8db1..5f2dd418f 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -381,7 +381,7 @@ pub(crate) fn semantic_tokens( text: &str, line_index: &LineIndex, highlights: Vec, - include_strings: bool + include_strings: bool, ) -> lsp_types::SemanticTokens { let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string(); let mut builder = semantic_tokens::SemanticTokensBuilder::new(id); diff --git a/crates/rust-analyzer/tests/rust-analyzer/main.rs b/crates/rust-analyzer/tests/rust-analyzer/main.rs index 62f34b643..920c43f25 100644 --- a/crates/rust-analyzer/tests/rust-analyzer/main.rs +++ b/crates/rust-analyzer/tests/rust-analyzer/main.rs @@ -18,13 +18,13 @@ use lsp_types::{ notification::DidOpenTextDocument, request::{ CodeActionRequest, Completion, Formatting, GotoTypeDefinition, HoverRequest, - SemanticTokensRangeRequest, WillRenameFiles + SemanticTokensRangeRequest, WillRenameFiles, }, CodeActionContext, CodeActionParams, CompletionParams, DidOpenTextDocumentParams, DocumentFormattingParams, FileRename, FormattingOptions, GotoDefinitionParams, HoverParams, - PartialResultParams, Position, Range, RenameFilesParams, SemanticTokensRangeParams, TextDocumentItem, - TextDocumentPositionParams, WorkDoneProgressParams, - SemanticTokens + PartialResultParams, Position, Range, RenameFilesParams, SemanticTokens, + SemanticTokensRangeParams, TextDocumentItem, TextDocumentPositionParams, + WorkDoneProgressParams, }; use rust_analyzer::lsp_ext::{OnEnter, Runnables, RunnablesParams}; use serde_json::{from_value, json}; @@ -56,10 +56,9 @@ version = "0.0.0" const foo: &'static str = "hi"; "#, ) - .with_config(serde_json::json!({ - "semanticStringTokens": semantic_strings - })) - .server().wait_until_workspace_is_loaded(); + .with_config(serde_json::json!({ "semanticStringTokens": semantic_strings })) + .server() + .wait_until_workspace_is_loaded(); let res = server.send_request::(SemanticTokensRangeParams { text_document: server.doc_id("src/lib.rs"), @@ -73,7 +72,6 @@ const foo: &'static str = "hi"; }); } - #[test] fn completes_items_from_standard_library() { if skip_slow_tests() { -- cgit v1.2.3