aboutsummaryrefslogtreecommitdiff
path: root/crates
diff options
context:
space:
mode:
Diffstat (limited to 'crates')
-rw-r--r--crates/ra_ide/src/lib.rs9
-rw-r--r--crates/ra_ide/src/syntax_highlighting.rs111
-rw-r--r--crates/rust-analyzer/src/caps.rs16
-rw-r--r--crates/rust-analyzer/src/main_loop.rs3
-rw-r--r--crates/rust-analyzer/src/main_loop/handlers.rs26
-rw-r--r--crates/rust-analyzer/src/req.rs7
6 files changed, 131 insertions, 41 deletions
diff --git a/crates/ra_ide/src/lib.rs b/crates/ra_ide/src/lib.rs
index 82e10bc7e..d22870669 100644
--- a/crates/ra_ide/src/lib.rs
+++ b/crates/ra_ide/src/lib.rs
@@ -425,9 +425,14 @@ impl Analysis {
425 self.with_db(|db| runnables::runnables(db, file_id)) 425 self.with_db(|db| runnables::runnables(db, file_id))
426 } 426 }
427 427
428 /// Computes syntax highlighting for the given file. 428 /// Computes syntax highlighting for the given file
429 pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> { 429 pub fn highlight(&self, file_id: FileId) -> Cancelable<Vec<HighlightedRange>> {
430 self.with_db(|db| syntax_highlighting::highlight(db, file_id)) 430 self.with_db(|db| syntax_highlighting::highlight(db, file_id, None))
431 }
432
433 /// Computes syntax highlighting for the given file range.
434 pub fn highlight_range(&self, frange: FileRange) -> Cancelable<Vec<HighlightedRange>> {
435 self.with_db(|db| syntax_highlighting::highlight(db, frange.file_id, Some(frange.range)))
431 } 436 }
432 437
433 /// Computes syntax highlighting for the given file. 438 /// Computes syntax highlighting for the given file.
diff --git a/crates/ra_ide/src/syntax_highlighting.rs b/crates/ra_ide/src/syntax_highlighting.rs
index 812229b4e..9bc3ad448 100644
--- a/crates/ra_ide/src/syntax_highlighting.rs
+++ b/crates/ra_ide/src/syntax_highlighting.rs
@@ -5,8 +5,8 @@ use ra_db::SourceDatabase;
5use ra_ide_db::{defs::NameDefinition, RootDatabase}; 5use ra_ide_db::{defs::NameDefinition, RootDatabase};
6use ra_prof::profile; 6use ra_prof::profile;
7use ra_syntax::{ 7use ra_syntax::{
8 ast, AstNode, Direction, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken, TextRange, 8 ast, AstNode, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxKind::*, SyntaxToken,
9 WalkEvent, T, 9 TextRange, WalkEvent, T,
10}; 10};
11use rustc_hash::FxHashMap; 11use rustc_hash::FxHashMap;
12 12
@@ -67,8 +67,13 @@ fn is_control_keyword(kind: SyntaxKind) -> bool {
67 } 67 }
68} 68}
69 69
70pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { 70pub(crate) fn highlight(
71 db: &RootDatabase,
72 file_id: FileId,
73 range: Option<TextRange>,
74) -> Vec<HighlightedRange> {
71 let _p = profile("highlight"); 75 let _p = profile("highlight");
76
72 let parse = db.parse(file_id); 77 let parse = db.parse(file_id);
73 let root = parse.tree().syntax().clone(); 78 let root = parse.tree().syntax().clone();
74 79
@@ -79,22 +84,56 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
79 84
80 let mut in_macro_call = None; 85 let mut in_macro_call = None;
81 86
87 // Determine the root based on the given range.
88 let (root, highlight_range) = if let Some(range) = range {
89 let root = match root.covering_element(range) {
90 NodeOrToken::Node(node) => node,
91 NodeOrToken::Token(token) => token.parent(),
92 };
93 (root, range)
94 } else {
95 (root.clone(), root.text_range())
96 };
97
82 for event in root.preorder_with_tokens() { 98 for event in root.preorder_with_tokens() {
83 match event { 99 match event {
84 WalkEvent::Enter(node) => match node.kind() { 100 WalkEvent::Enter(node) => {
85 MACRO_CALL => { 101 if node.text_range().intersection(&highlight_range).is_none() {
86 in_macro_call = Some(node.clone()); 102 continue;
87 if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) {
88 res.push(HighlightedRange { range, tag: tags::MACRO, binding_hash: None });
89 }
90 } 103 }
91 _ if in_macro_call.is_some() => { 104
92 if let Some(token) = node.as_token() { 105 match node.kind() {
93 if let Some((tag, binding_hash)) = highlight_token_tree( 106 MACRO_CALL => {
107 in_macro_call = Some(node.clone());
108 if let Some(range) = highlight_macro(InFile::new(file_id.into(), node)) {
109 res.push(HighlightedRange {
110 range,
111 tag: tags::MACRO,
112 binding_hash: None,
113 });
114 }
115 }
116 _ if in_macro_call.is_some() => {
117 if let Some(token) = node.as_token() {
118 if let Some((tag, binding_hash)) = highlight_token_tree(
119 &mut sb,
120 &analyzer,
121 &mut bindings_shadow_count,
122 InFile::new(file_id.into(), token.clone()),
123 ) {
124 res.push(HighlightedRange {
125 range: node.text_range(),
126 tag,
127 binding_hash,
128 });
129 }
130 }
131 }
132 _ => {
133 if let Some((tag, binding_hash)) = highlight_node(
94 &mut sb, 134 &mut sb,
95 &analyzer,
96 &mut bindings_shadow_count, 135 &mut bindings_shadow_count,
97 InFile::new(file_id.into(), token.clone()), 136 InFile::new(file_id.into(), node.clone()),
98 ) { 137 ) {
99 res.push(HighlightedRange { 138 res.push(HighlightedRange {
100 range: node.text_range(), 139 range: node.text_range(),
@@ -104,17 +143,12 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa
104 } 143 }
105 } 144 }
106 } 145 }
107 _ => { 146 }
108 if let Some((tag, binding_hash)) = highlight_node(
109 &mut sb,
110 &mut bindings_shadow_count,
111 InFile::new(file_id.into(), node.clone()),
112 ) {
113 res.push(HighlightedRange { range: node.text_range(), tag, binding_hash });
114 }
115 }
116 },
117 WalkEvent::Leave(node) => { 147 WalkEvent::Leave(node) => {
148 if node.text_range().intersection(&highlight_range).is_none() {
149 continue;
150 }
151
118 if let Some(m) = in_macro_call.as_ref() { 152 if let Some(m) = in_macro_call.as_ref() {
119 if *m == node { 153 if *m == node {
120 in_macro_call = None; 154 in_macro_call = None;
@@ -265,7 +299,7 @@ pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: boo
265 ) 299 )
266 } 300 }
267 301
268 let mut ranges = highlight(db, file_id); 302 let mut ranges = highlight(db, file_id, None);
269 ranges.sort_by_key(|it| it.range.start()); 303 ranges.sort_by_key(|it| it.range.start());
270 // quick non-optimal heuristic to intersect token ranges and highlighted ranges 304 // quick non-optimal heuristic to intersect token ranges and highlighted ranges
271 let mut frontier = 0; 305 let mut frontier = 0;
@@ -374,7 +408,10 @@ mod tests {
374 408
375 use test_utils::{assert_eq_text, project_dir, read_text}; 409 use test_utils::{assert_eq_text, project_dir, read_text};
376 410
377 use crate::mock_analysis::{single_file, MockAnalysis}; 411 use crate::{
412 mock_analysis::{single_file, MockAnalysis},
413 FileRange, TextRange,
414 };
378 415
379 #[test] 416 #[test]
380 fn test_highlighting() { 417 fn test_highlighting() {
@@ -475,4 +512,26 @@ fn bar() {
475 let _ = host.analysis().highlight(file_id).unwrap(); 512 let _ = host.analysis().highlight(file_id).unwrap();
476 // eprintln!("elapsed: {:?}", t.elapsed()); 513 // eprintln!("elapsed: {:?}", t.elapsed());
477 } 514 }
515
516 #[test]
517 fn test_ranges() {
518 let (analysis, file_id) = single_file(
519 r#"
520 #[derive(Clone, Debug)]
521 struct Foo {
522 pub x: i32,
523 pub y: i32,
524 }"#,
525 );
526
527 // The "x"
528 let highlights = &analysis
529 .highlight_range(FileRange {
530 file_id,
531 range: TextRange::offset_len(82.into(), 1.into()),
532 })
533 .unwrap();
534
535 assert_eq!(highlights[0].tag, "field");
536 }
478} 537}
diff --git a/crates/rust-analyzer/src/caps.rs b/crates/rust-analyzer/src/caps.rs
index 638987ee8..db82eeb1c 100644
--- a/crates/rust-analyzer/src/caps.rs
+++ b/crates/rust-analyzer/src/caps.rs
@@ -7,9 +7,9 @@ use lsp_types::{
7 CompletionOptions, DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability, 7 CompletionOptions, DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability,
8 ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions, 8 ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions,
9 SelectionRangeProviderCapability, SemanticTokensDocumentProvider, SemanticTokensLegend, 9 SelectionRangeProviderCapability, SemanticTokensDocumentProvider, SemanticTokensLegend,
10 SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities, 10 SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability,
11 SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind, 11 TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability,
12 TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions, 12 WorkDoneProgressOptions,
13}; 13};
14 14
15pub fn server_capabilities() -> ServerCapabilities { 15pub fn server_capabilities() -> ServerCapabilities {
@@ -60,7 +60,7 @@ pub fn server_capabilities() -> ServerCapabilities {
60 execute_command_provider: None, 60 execute_command_provider: None,
61 workspace: None, 61 workspace: None,
62 call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)), 62 call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
63 semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensOptions( 63 semantic_tokens_provider: Some(
64 SemanticTokensOptions { 64 SemanticTokensOptions {
65 legend: SemanticTokensLegend { 65 legend: SemanticTokensLegend {
66 token_types: semantic_tokens::supported_token_types().iter().cloned().collect(), 66 token_types: semantic_tokens::supported_token_types().iter().cloned().collect(),
@@ -71,9 +71,11 @@ pub fn server_capabilities() -> ServerCapabilities {
71 }, 71 },
72 72
73 document_provider: Some(SemanticTokensDocumentProvider::Bool(true)), 73 document_provider: Some(SemanticTokensDocumentProvider::Bool(true)),
74 ..SemanticTokensOptions::default() 74 range_provider: Some(true),
75 }, 75 work_done_progress_options: Default::default(),
76 )), 76 }
77 .into(),
78 ),
77 experimental: Default::default(), 79 experimental: Default::default(),
78 } 80 }
79} 81}
diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs
index 6e9e604a6..2b25f5443 100644
--- a/crates/rust-analyzer/src/main_loop.rs
+++ b/crates/rust-analyzer/src/main_loop.rs
@@ -527,8 +527,9 @@ fn on_request(
527 .on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)? 527 .on::<req::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)?
528 .on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)? 528 .on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)?
529 .on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)? 529 .on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)?
530 .on::<req::Ssr>(handlers::handle_ssr)?
531 .on::<req::SemanticTokensRequest>(handlers::handle_semantic_tokens)? 530 .on::<req::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
531 .on::<req::SemanticTokensRangeRequest>(handlers::handle_semantic_tokens_range)?
532 .on::<req::Ssr>(handlers::handle_ssr)?
532 .finish(); 533 .finish();
533 Ok(()) 534 Ok(())
534} 535}
diff --git a/crates/rust-analyzer/src/main_loop/handlers.rs b/crates/rust-analyzer/src/main_loop/handlers.rs
index e13e7c95a..267edd578 100644
--- a/crates/rust-analyzer/src/main_loop/handlers.rs
+++ b/crates/rust-analyzer/src/main_loop/handlers.rs
@@ -17,8 +17,8 @@ use lsp_types::{
17 Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange, 17 Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange,
18 FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position, 18 FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position,
19 PrepareRenameResponse, Range, RenameParams, SemanticTokenModifier, SemanticTokenType, 19 PrepareRenameResponse, Range, RenameParams, SemanticTokenModifier, SemanticTokenType,
20 SemanticTokens, SemanticTokensParams, SemanticTokensResult, SymbolInformation, 20 SemanticTokens, SemanticTokensParams, SemanticTokensRangeParams, SemanticTokensRangeResult,
21 TextDocumentIdentifier, TextEdit, WorkspaceEdit, 21 SemanticTokensResult, SymbolInformation, TextDocumentIdentifier, TextEdit, WorkspaceEdit,
22}; 22};
23use ra_ide::{ 23use ra_ide::{
24 AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind, 24 AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind,
@@ -1092,3 +1092,25 @@ pub fn handle_semantic_tokens(
1092 1092
1093 Ok(Some(tokens.into())) 1093 Ok(Some(tokens.into()))
1094} 1094}
1095
1096pub fn handle_semantic_tokens_range(
1097 world: WorldSnapshot,
1098 params: SemanticTokensRangeParams,
1099) -> Result<Option<SemanticTokensRangeResult>> {
1100 let _p = profile("handle_semantic_tokens_range");
1101
1102 let frange = (&params.text_document, params.range).try_conv_with(&world)?;
1103 let line_index = world.analysis().file_line_index(frange.file_id)?;
1104
1105 let mut builder = SemanticTokensBuilder::default();
1106
1107 for h in world.analysis().highlight_range(frange)?.into_iter() {
1108 let type_and_modifiers: (SemanticTokenType, Vec<SemanticTokenModifier>) = h.tag.conv();
1109 let (token_type, token_modifiers) = type_and_modifiers.conv();
1110 builder.push(h.range.conv_with(&line_index), token_type, token_modifiers);
1111 }
1112
1113 let tokens = SemanticTokens { data: builder.build(), ..Default::default() };
1114
1115 Ok(Some(tokens.into()))
1116}
diff --git a/crates/rust-analyzer/src/req.rs b/crates/rust-analyzer/src/req.rs
index 3734899bc..642ac41ac 100644
--- a/crates/rust-analyzer/src/req.rs
+++ b/crates/rust-analyzer/src/req.rs
@@ -12,9 +12,10 @@ pub use lsp_types::{
12 DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType, 12 DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType,
13 PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken, 13 PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken,
14 PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange, 14 PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange,
15 SelectionRangeParams, SemanticTokensParams, SemanticTokensResult, ServerCapabilities, 15 SelectionRangeParams, SemanticTokensParams, SemanticTokensRangeParams,
16 ShowMessageParams, SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams, 16 SemanticTokensRangeResult, SemanticTokensResult, ServerCapabilities, ShowMessageParams,
17 TextEdit, WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams, 17 SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams, TextEdit,
18 WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams,
18}; 19};
19 20
20pub enum AnalyzerStatus {} 21pub enum AnalyzerStatus {}