diff options
-rw-r--r-- | crates/ra_ide/src/lib.rs | 7 | ||||
-rw-r--r-- | crates/ra_ide/src/syntax_highlighting.rs | 49 | ||||
-rw-r--r-- | crates/rust-analyzer/src/caps.rs | 16 | ||||
-rw-r--r-- | crates/rust-analyzer/src/main_loop.rs | 3 | ||||
-rw-r--r-- | crates/rust-analyzer/src/main_loop/handlers.rs | 26 | ||||
-rw-r--r-- | crates/rust-analyzer/src/req.rs | 7 |
6 files changed, 92 insertions, 16 deletions
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs index 82e10bc7e..c02bb08a0 100644 --- a/crates/ra_ide/src/lib.rs +++ b/crates/ra_ide/src/lib.rs | |||
@@ -430,6 +430,13 @@ impl Analysis { | |||
430 | self.with_db(|db| syntax_highlighting::highlight(db, file_id)) | 430 | self.with_db(|db| syntax_highlighting::highlight(db, file_id)) |
431 | } | 431 | } |
432 | 432 | ||
433 | /// Computes syntax highlighting for the given file range. | ||
434 | pub fn highlight_range(&self, frange: FileRange) -> Cancelable<Vec<HighlightedRange>> { | ||
435 | self.with_db(|db| { | ||
436 | syntax_highlighting::highlight_range(db, frange.file_id, Some(frange.range)) | ||
437 | }) | ||
438 | } | ||
439 | |||
433 | /// Computes syntax highlighting for the given file. | 440 | /// Computes syntax highlighting for the given file. |
434 | pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancelable<String> { | 441 | pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancelable<String> { |
435 | self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow)) | 442 | self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow)) |
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs index 812229b4e..22c84561f 100644 --- a/crates/ra_ide/src/syntax_highlighting.rs +++ b/crates/ra_ide/src/syntax_highlighting.rs | |||
@@ -5,8 +5,8 @@ use ra_db::SourceDatabase; | |||
5 | use ra_ide_db::{defs::NameDefinition, RootDatabase}; | 5 | use ra_ide_db::{defs::NameDefinition, RootDatabase}; |
6 | use ra_prof::profile; | 6 | use ra_prof::profile; |
7 | use ra_syntax::{ | 7 | use ra_syntax::{ |
8 | ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, TextRange, | 8 | ast, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, |
9 | WalkEvent, T, | 9 | TextRange, WalkEvent, T, |
10 | }; | 10 | }; |
11 | use rustc_hash::FxHashMap; | 11 | use rustc_hash::FxHashMap; |
12 | 12 | ||
@@ -69,6 +69,16 @@ fn is_control_keyword(kind: SyntaxKind) -> bool { | |||
69 | 69 | ||
70 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { | 70 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { |
71 | let _p = profile("highlight"); | 71 | let _p = profile("highlight"); |
72 | highlight_range(db, file_id, None) | ||
73 | } | ||
74 | |||
75 | pub(crate) fn highlight_range( | ||
76 | db: &RootDatabase, | ||
77 | file_id: FileId, | ||
78 | range: Option<TextRange>, | ||
79 | ) -> Vec<HighlightedRange> { | ||
80 | let _p = profile("highlight_range"); | ||
81 | |||
72 | let parse = db.parse(file_id); | 82 | let parse = db.parse(file_id); |
73 | let root = parse.tree().syntax().clone(); | 83 | let root = parse.tree().syntax().clone(); |
74 | 84 | ||
@@ -79,6 +89,15 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
79 | 89 | ||
80 | let mut in_macro_call = None; | 90 | let mut in_macro_call = None; |
81 | 91 | ||
92 | // Determine the root based on the range | ||
93 | let root = match range { | ||
94 | Some(range) => match root.covering_element(range) { | ||
95 | NodeOrToken::Node(node) => node, | ||
96 | NodeOrToken::Token(token) => token.parent(), | ||
97 | }, | ||
98 | None => root, | ||
99 | }; | ||
100 | |||
82 | for event in root.preorder_with_tokens() { | 101 | for event in root.preorder_with_tokens() { |
83 | match event { | 102 | match event { |
84 | WalkEvent::Enter(node) => match node.kind() { | 103 | WalkEvent::Enter(node) => match node.kind() { |
@@ -374,7 +393,10 @@ mod tests { | |||
374 | 393 | ||
375 | use test_utils::{assert_eq_text, project_dir, read_text}; | 394 | use test_utils::{assert_eq_text, project_dir, read_text}; |
376 | 395 | ||
377 | use crate::mock_analysis::{single_file, MockAnalysis}; | 396 | use crate::{ |
397 | mock_analysis::{single_file, MockAnalysis}, | ||
398 | FileRange, TextRange, | ||
399 | }; | ||
378 | 400 | ||
379 | #[test] | 401 | #[test] |
380 | fn test_highlighting() { | 402 | fn test_highlighting() { |
@@ -475,4 +497,25 @@ fn bar() { | |||
475 | let _ = host.analysis().highlight(file_id).unwrap(); | 497 | let _ = host.analysis().highlight(file_id).unwrap(); |
476 | // eprintln!("elapsed: {:?}", t.elapsed()); | 498 | // eprintln!("elapsed: {:?}", t.elapsed()); |
477 | } | 499 | } |
500 | |||
501 | #[test] | ||
502 | fn test_ranges() { | ||
503 | let (analysis, file_id) = single_file( | ||
504 | r#" | ||
505 | #[derive(Clone, Debug)] | ||
506 | struct Foo { | ||
507 | pub x: i32, | ||
508 | pub y: i32, | ||
509 | }"#, | ||
510 | ); | ||
511 | |||
512 | let highlights = &analysis | ||
513 | .highlight_range(FileRange { | ||
514 | file_id, | ||
515 | range: TextRange::offset_len(82.into(), 1.into()), // "x" | ||
516 | }) | ||
517 | .unwrap(); | ||
518 | |||
519 | assert_eq!(highlights[0].tag, "field"); | ||
520 | } | ||
478 | } | 521 | } |
diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs index 638987ee8..db82eeb1c 100644 --- a/crates/rust-analyzer/src/caps.rs +++ b/crates/rust-analyzer/src/caps.rs | |||
@@ -7,9 +7,9 @@ use lsp_types::{ | |||
7 | CompletionOptions, DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability, | 7 | CompletionOptions, DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability, |
8 | ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions, | 8 | ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions, |
9 | SelectionRangeProviderCapability, SemanticTokensDocumentProvider, SemanticTokensLegend, | 9 | SelectionRangeProviderCapability, SemanticTokensDocumentProvider, SemanticTokensLegend, |
10 | SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities, | 10 | SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability, |
11 | SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind, | 11 | TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability, |
12 | TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions, | 12 | WorkDoneProgressOptions, |
13 | }; | 13 | }; |
14 | 14 | ||
15 | pub fn server_capabilities() -> ServerCapabilities { | 15 | pub fn server_capabilities() -> ServerCapabilities { |
@@ -60,7 +60,7 @@ pub fn server_capabilities() -> ServerCapabilities { | |||
60 | execute_command_provider: None, | 60 | execute_command_provider: None, |
61 | workspace: None, | 61 | workspace: None, |
62 | call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)), | 62 | call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)), |
63 | semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensOptions( | 63 | semantic_tokens_provider: Some( |
64 | SemanticTokensOptions { | 64 | SemanticTokensOptions { |
65 | legend: SemanticTokensLegend { | 65 | legend: SemanticTokensLegend { |
66 | token_types: semantic_tokens::supported_token_types().iter().cloned().collect(), | 66 | token_types: semantic_tokens::supported_token_types().iter().cloned().collect(), |
@@ -71,9 +71,11 @@ pub fn server_capabilities() -> ServerCapabilities { | |||
71 | }, | 71 | }, |
72 | 72 | ||
73 | document_provider: Some(SemanticTokensDocumentProvider::Bool(true)), | 73 | document_provider: Some(SemanticTokensDocumentProvider::Bool(true)), |
74 | ..SemanticTokensOptions::default() | 74 | range_provider: Some(true), |
75 | }, | 75 | work_done_progress_options: Default::default(), |
76 | )), | 76 | } |
77 | .into(), | ||
78 | ), | ||
77 | experimental: Default::default(), | 79 | experimental: Default::default(), |
78 | } | 80 | } |
79 | } | 81 | } |
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 6e9e604a6..2b25f5443 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs | |||
@@ -527,8 +527,9 @@ fn on_request( | |||
527 | .on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)? | 527 | .on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)? |
528 | .on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)? | 528 | .on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)? |
529 | .on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)? | 529 | .on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)? |
530 | .on::<req::Ssr>(handlers::handle_ssr)? | ||
531 | .on::<req::SemanticTokensRequest>(handlers::handle_semantic_tokens)? | 530 | .on::<req::SemanticTokensRequest>(handlers::handle_semantic_tokens)? |
531 | .on::<req::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range)? | ||
532 | .on::<req::Ssr>(handlers::handle_ssr)? | ||
532 | .finish(); | 533 | .finish(); |
533 | Ok(()) | 534 | Ok(()) |
534 | } | 535 | } |
diff --git a/crates/rust-analyzer/src/main_loop/handlers.rs b/crates/rust-analyzer/src/main_loop/handlers.rs index e13e7c95a..267edd578 100644 --- a/crates/rust-analyzer/src/main_loop/handlers.rs +++ b/crates/rust-analyzer/src/main_loop/handlers.rs | |||
@@ -17,8 +17,8 @@ use lsp_types::{ | |||
17 | Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange, | 17 | Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange, |
18 | FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position, | 18 | FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position, |
19 | PrepareRenameResponse, Range, RenameParams, SemanticTokenModifier, SemanticTokenType, | 19 | PrepareRenameResponse, Range, RenameParams, SemanticTokenModifier, SemanticTokenType, |
20 | SemanticTokens, SemanticTokensParams, SemanticTokensResult, SymbolInformation, | 20 | SemanticTokens, SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult, |
21 | TextDocumentIdentifier, TextEdit, WorkspaceEdit, | 21 | SemanticTokensResult, SymbolInformation, TextDocumentIdentifier, TextEdit, WorkspaceEdit, |
22 | }; | 22 | }; |
23 | use ra_ide::{ | 23 | use ra_ide::{ |
24 | AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, | 24 | AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, |
@@ -1092,3 +1092,25 @@ pub fn handle_semantic_tokens( | |||
1092 | 1092 | ||
1093 | Ok(Some(tokens.into())) | 1093 | Ok(Some(tokens.into())) |
1094 | } | 1094 | } |
1095 | |||
1096 | pub fn handle_semantic_tokens_range( | ||
1097 | world: WorldSnapshot, | ||
1098 | params: SemanticTokensRangeParams, | ||
1099 | ) -> Result<Option<SemanticTokensRangeResult>> { | ||
1100 | let _p = profile("handle_semantic_tokens_range"); | ||
1101 | |||
1102 | let frange = (¶ms.text_document, params.range).try_conv_with(&world)?; | ||
1103 | let line_index = world.analysis().file_line_index(frange.file_id)?; | ||
1104 | |||
1105 | let mut builder = SemanticTokensBuilder::default(); | ||
1106 | |||
1107 | for h in world.analysis().highlight_range(frange)?.into_iter() { | ||
1108 | let type_and_modifiers: (SemanticTokenType, Vec<SemanticTokenModifier>) = h.tag.conv(); | ||
1109 | let (token_type, token_modifiers) = type_and_modifiers.conv(); | ||
1110 | builder.push(h.range.conv_with(&line_index), token_type, token_modifiers); | ||
1111 | } | ||
1112 | |||
1113 | let tokens = SemanticTokens { data: builder.build(), ..Default::default() }; | ||
1114 | |||
1115 | Ok(Some(tokens.into())) | ||
1116 | } | ||
diff --git a/crates/rust-analyzer/src/req.rs b/crates/rust-analyzer/src/req.rs index 3734899bc..642ac41ac 100644 --- a/crates/rust-analyzer/src/req.rs +++ b/crates/rust-analyzer/src/req.rs | |||
@@ -12,9 +12,10 @@ pub use lsp_types::{ | |||
12 | DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType, | 12 | DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType, |
13 | PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken, | 13 | PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken, |
14 | PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange, | 14 | PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange, |
15 | SelectionRangeParams, SemanticTokensParams, SemanticTokensResult, ServerCapabilities, | 15 | SelectionRangeParams, SemanticTokensParams, SemanticTokensRangeParams, |
16 | ShowMessageParams, SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams, | 16 | SemanticTokensRangeResult, SemanticTokensResult, ServerCapabilities, ShowMessageParams, |
17 | TextEdit, WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams, | 17 | SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams, TextEdit, |
18 | WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams, | ||
18 | }; | 19 | }; |
19 | 20 | ||
20 | pub enum AnalyzerStatus {} | 21 | pub enum AnalyzerStatus {} |