diff options
Diffstat (limited to 'crates')
182 files changed, 1120 insertions, 466 deletions
diff --git a/crates/ra_assists/src/assists/auto_import.rs b/crates/ra_assists/src/assists/auto_import.rs index 2629f00e6..2068256b0 100644 --- a/crates/ra_assists/src/assists/auto_import.rs +++ b/crates/ra_assists/src/assists/auto_import.rs | |||
@@ -1,7 +1,6 @@ | |||
1 | use hir::{db::HirDatabase, ModPath}; | 1 | use hir::{db::HirDatabase, ModPath}; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | ast::{self, AstNode}, | 3 | ast::{self, AstNode}, |
4 | SyntaxKind::USE_ITEM, | ||
5 | SyntaxNode, | 4 | SyntaxNode, |
6 | }; | 5 | }; |
7 | 6 | ||
@@ -33,9 +32,11 @@ pub(crate) fn auto_import<F: ImportsLocator>( | |||
33 | ) -> Option<Assist> { | 32 | ) -> Option<Assist> { |
34 | let path_to_import: ast::Path = ctx.find_node_at_offset()?; | 33 | let path_to_import: ast::Path = ctx.find_node_at_offset()?; |
35 | let path_to_import_syntax = path_to_import.syntax(); | 34 | let path_to_import_syntax = path_to_import.syntax(); |
36 | if path_to_import_syntax.ancestors().find(|ancestor| ancestor.kind() == USE_ITEM).is_some() { | 35 | if path_to_import_syntax.ancestors().find_map(ast::UseItem::cast).is_some() { |
37 | return None; | 36 | return None; |
38 | } | 37 | } |
38 | let name_to_import = | ||
39 | path_to_import_syntax.descendants().find_map(ast::NameRef::cast)?.syntax().to_string(); | ||
39 | 40 | ||
40 | let module = path_to_import_syntax.ancestors().find_map(ast::Module::cast); | 41 | let module = path_to_import_syntax.ancestors().find_map(ast::Module::cast); |
41 | let position = match module.and_then(|it| it.item_list()) { | 42 | let position = match module.and_then(|it| it.item_list()) { |
@@ -52,7 +53,7 @@ pub(crate) fn auto_import<F: ImportsLocator>( | |||
52 | } | 53 | } |
53 | 54 | ||
54 | let proposed_imports = imports_locator | 55 | let proposed_imports = imports_locator |
55 | .find_imports(&path_to_import_syntax.to_string()) | 56 | .find_imports(&name_to_import) |
56 | .into_iter() | 57 | .into_iter() |
57 | .filter_map(|module_def| module_with_name_to_import.find_use_path(ctx.db, module_def)) | 58 | .filter_map(|module_def| module_with_name_to_import.find_use_path(ctx.db, module_def)) |
58 | .filter(|use_path| !use_path.segments.is_empty()) | 59 | .filter(|use_path| !use_path.segments.is_empty()) |
@@ -62,16 +63,12 @@ pub(crate) fn auto_import<F: ImportsLocator>( | |||
62 | return None; | 63 | return None; |
63 | } | 64 | } |
64 | 65 | ||
65 | ctx.add_assist_group( | 66 | ctx.add_assist_group(AssistId("auto_import"), format!("Import {}", name_to_import), || { |
66 | AssistId("auto_import"), | 67 | proposed_imports |
67 | format!("Import {}", path_to_import_syntax), | 68 | .into_iter() |
68 | || { | 69 | .map(|import| import_to_action(import, &position, &path_to_import_syntax)) |
69 | proposed_imports | 70 | .collect() |
70 | .into_iter() | 71 | }) |
71 | .map(|import| import_to_action(import, &position, &path_to_import_syntax)) | ||
72 | .collect() | ||
73 | }, | ||
74 | ) | ||
75 | } | 72 | } |
76 | 73 | ||
77 | fn import_to_action(import: ModPath, position: &SyntaxNode, anchor: &SyntaxNode) -> ActionBuilder { | 74 | fn import_to_action(import: ModPath, position: &SyntaxNode, anchor: &SyntaxNode) -> ActionBuilder { |
@@ -121,21 +118,29 @@ mod tests { | |||
121 | r" | 118 | r" |
122 | use PubMod::PubStruct1; | 119 | use PubMod::PubStruct1; |
123 | 120 | ||
124 | PubStruct2<|> | 121 | struct Test { |
122 | test: Pub<|>Struct2<u8>, | ||
123 | } | ||
125 | 124 | ||
126 | pub mod PubMod { | 125 | pub mod PubMod { |
127 | pub struct PubStruct1; | 126 | pub struct PubStruct1; |
128 | pub struct PubStruct2; | 127 | pub struct PubStruct2<T> { |
128 | _t: T, | ||
129 | } | ||
129 | } | 130 | } |
130 | ", | 131 | ", |
131 | r" | 132 | r" |
132 | use PubMod::{PubStruct2, PubStruct1}; | 133 | use PubMod::{PubStruct2, PubStruct1}; |
133 | 134 | ||
134 | PubStruct2<|> | 135 | struct Test { |
136 | test: Pub<|>Struct2<u8>, | ||
137 | } | ||
135 | 138 | ||
136 | pub mod PubMod { | 139 | pub mod PubMod { |
137 | pub struct PubStruct1; | 140 | pub struct PubStruct1; |
138 | pub struct PubStruct2; | 141 | pub struct PubStruct2<T> { |
142 | _t: T, | ||
143 | } | ||
139 | } | 144 | } |
140 | ", | 145 | ", |
141 | ); | 146 | ); |
diff --git a/crates/ra_cargo_watch/src/conv.rs b/crates/ra_cargo_watch/src/conv.rs index 8fba400ae..506370535 100644 --- a/crates/ra_cargo_watch/src/conv.rs +++ b/crates/ra_cargo_watch/src/conv.rs | |||
@@ -1,12 +1,11 @@ | |||
1 | //! This module provides the functionality needed to convert diagnostics from | 1 | //! This module provides the functionality needed to convert diagnostics from |
2 | //! `cargo check` json format to the LSP diagnostic format. | 2 | //! `cargo check` json format to the LSP diagnostic format. |
3 | use cargo_metadata::diagnostic::{ | 3 | use cargo_metadata::diagnostic::{ |
4 | Applicability, Diagnostic as RustDiagnostic, DiagnosticLevel, DiagnosticSpan, | 4 | Diagnostic as RustDiagnostic, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion, |
5 | DiagnosticSpanMacroExpansion, | ||
6 | }; | 5 | }; |
7 | use lsp_types::{ | 6 | use lsp_types::{ |
8 | Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, Location, | 7 | CodeAction, Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, DiagnosticTag, |
9 | NumberOrString, Position, Range, Url, | 8 | Location, NumberOrString, Position, Range, TextEdit, Url, WorkspaceEdit, |
10 | }; | 9 | }; |
11 | use std::{ | 10 | use std::{ |
12 | fmt::Write, | 11 | fmt::Write, |
@@ -117,38 +116,9 @@ fn is_deprecated(rd: &RustDiagnostic) -> bool { | |||
117 | } | 116 | } |
118 | } | 117 | } |
119 | 118 | ||
120 | #[derive(Clone, Debug)] | ||
121 | pub struct SuggestedFix { | ||
122 | pub title: String, | ||
123 | pub location: Location, | ||
124 | pub replacement: String, | ||
125 | pub applicability: Applicability, | ||
126 | pub diagnostics: Vec<Diagnostic>, | ||
127 | } | ||
128 | |||
129 | impl std::cmp::PartialEq<SuggestedFix> for SuggestedFix { | ||
130 | fn eq(&self, other: &SuggestedFix) -> bool { | ||
131 | if self.title == other.title | ||
132 | && self.location == other.location | ||
133 | && self.replacement == other.replacement | ||
134 | { | ||
135 | // Applicability doesn't impl PartialEq... | ||
136 | match (&self.applicability, &other.applicability) { | ||
137 | (Applicability::MachineApplicable, Applicability::MachineApplicable) => true, | ||
138 | (Applicability::HasPlaceholders, Applicability::HasPlaceholders) => true, | ||
139 | (Applicability::MaybeIncorrect, Applicability::MaybeIncorrect) => true, | ||
140 | (Applicability::Unspecified, Applicability::Unspecified) => true, | ||
141 | _ => false, | ||
142 | } | ||
143 | } else { | ||
144 | false | ||
145 | } | ||
146 | } | ||
147 | } | ||
148 | |||
149 | enum MappedRustChildDiagnostic { | 119 | enum MappedRustChildDiagnostic { |
150 | Related(DiagnosticRelatedInformation), | 120 | Related(DiagnosticRelatedInformation), |
151 | SuggestedFix(SuggestedFix), | 121 | SuggestedFix(CodeAction), |
152 | MessageLine(String), | 122 | MessageLine(String), |
153 | } | 123 | } |
154 | 124 | ||
@@ -176,12 +146,20 @@ fn map_rust_child_diagnostic( | |||
176 | rd.message.clone() | 146 | rd.message.clone() |
177 | }; | 147 | }; |
178 | 148 | ||
179 | MappedRustChildDiagnostic::SuggestedFix(SuggestedFix { | 149 | let edit = { |
150 | let edits = vec![TextEdit::new(location.range, suggested_replacement.clone())]; | ||
151 | let mut edit_map = std::collections::HashMap::new(); | ||
152 | edit_map.insert(location.uri, edits); | ||
153 | WorkspaceEdit::new(edit_map) | ||
154 | }; | ||
155 | |||
156 | MappedRustChildDiagnostic::SuggestedFix(CodeAction { | ||
180 | title, | 157 | title, |
181 | location, | 158 | kind: Some("quickfix".to_string()), |
182 | replacement: suggested_replacement.clone(), | 159 | diagnostics: None, |
183 | applicability: span.suggestion_applicability.clone().unwrap_or(Applicability::Unknown), | 160 | edit: Some(edit), |
184 | diagnostics: vec![], | 161 | command: None, |
162 | is_preferred: None, | ||
185 | }) | 163 | }) |
186 | } else { | 164 | } else { |
187 | MappedRustChildDiagnostic::Related(DiagnosticRelatedInformation { | 165 | MappedRustChildDiagnostic::Related(DiagnosticRelatedInformation { |
@@ -195,7 +173,7 @@ fn map_rust_child_diagnostic( | |||
195 | pub(crate) struct MappedRustDiagnostic { | 173 | pub(crate) struct MappedRustDiagnostic { |
196 | pub location: Location, | 174 | pub location: Location, |
197 | pub diagnostic: Diagnostic, | 175 | pub diagnostic: Diagnostic, |
198 | pub suggested_fixes: Vec<SuggestedFix>, | 176 | pub fixes: Vec<CodeAction>, |
199 | } | 177 | } |
200 | 178 | ||
201 | /// Converts a Rust root diagnostic to LSP form | 179 | /// Converts a Rust root diagnostic to LSP form |
@@ -250,15 +228,13 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
250 | } | 228 | } |
251 | } | 229 | } |
252 | 230 | ||
253 | let mut suggested_fixes = vec![]; | 231 | let mut fixes = vec![]; |
254 | let mut message = rd.message.clone(); | 232 | let mut message = rd.message.clone(); |
255 | for child in &rd.children { | 233 | for child in &rd.children { |
256 | let child = map_rust_child_diagnostic(&child, workspace_root); | 234 | let child = map_rust_child_diagnostic(&child, workspace_root); |
257 | match child { | 235 | match child { |
258 | MappedRustChildDiagnostic::Related(related) => related_information.push(related), | 236 | MappedRustChildDiagnostic::Related(related) => related_information.push(related), |
259 | MappedRustChildDiagnostic::SuggestedFix(suggested_fix) => { | 237 | MappedRustChildDiagnostic::SuggestedFix(code_action) => fixes.push(code_action.into()), |
260 | suggested_fixes.push(suggested_fix) | ||
261 | } | ||
262 | MappedRustChildDiagnostic::MessageLine(message_line) => { | 238 | MappedRustChildDiagnostic::MessageLine(message_line) => { |
263 | write!(&mut message, "\n{}", message_line).unwrap(); | 239 | write!(&mut message, "\n{}", message_line).unwrap(); |
264 | 240 | ||
@@ -295,7 +271,7 @@ pub(crate) fn map_rust_diagnostic_to_lsp( | |||
295 | tags: if !tags.is_empty() { Some(tags) } else { None }, | 271 | tags: if !tags.is_empty() { Some(tags) } else { None }, |
296 | }; | 272 | }; |
297 | 273 | ||
298 | Some(MappedRustDiagnostic { location, diagnostic, suggested_fixes }) | 274 | Some(MappedRustDiagnostic { location, diagnostic, fixes }) |
299 | } | 275 | } |
300 | 276 | ||
301 | /// Returns a `Url` object from a given path, will lowercase drive letters if present. | 277 | /// Returns a `Url` object from a given path, will lowercase drive letters if present. |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap index cb0920914..95ca163dc 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_clippy_pass_by_ref.snap | |||
@@ -61,25 +61,39 @@ MappedRustDiagnostic { | |||
61 | ), | 61 | ), |
62 | tags: None, | 62 | tags: None, |
63 | }, | 63 | }, |
64 | suggested_fixes: [ | 64 | fixes: [ |
65 | SuggestedFix { | 65 | CodeAction { |
66 | title: "consider passing by value instead: \'self\'", | 66 | title: "consider passing by value instead: \'self\'", |
67 | location: Location { | 67 | kind: Some( |
68 | uri: "file:///test/compiler/mir/tagset.rs", | 68 | "quickfix", |
69 | range: Range { | 69 | ), |
70 | start: Position { | 70 | diagnostics: None, |
71 | line: 41, | 71 | edit: Some( |
72 | character: 23, | 72 | WorkspaceEdit { |
73 | }, | 73 | changes: Some( |
74 | end: Position { | 74 | { |
75 | line: 41, | 75 | "file:///test/compiler/mir/tagset.rs": [ |
76 | character: 28, | 76 | TextEdit { |
77 | }, | 77 | range: Range { |
78 | start: Position { | ||
79 | line: 41, | ||
80 | character: 23, | ||
81 | }, | ||
82 | end: Position { | ||
83 | line: 41, | ||
84 | character: 28, | ||
85 | }, | ||
86 | }, | ||
87 | new_text: "self", | ||
88 | }, | ||
89 | ], | ||
90 | }, | ||
91 | ), | ||
92 | document_changes: None, | ||
78 | }, | 93 | }, |
79 | }, | 94 | ), |
80 | replacement: "self", | 95 | command: None, |
81 | applicability: Unspecified, | 96 | is_preferred: None, |
82 | diagnostics: [], | ||
83 | }, | 97 | }, |
84 | ], | 98 | ], |
85 | } | 99 | } |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_handles_macro_location.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_handles_macro_location.snap index 19510ecc1..12eb32df4 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_handles_macro_location.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_handles_macro_location.snap | |||
@@ -42,5 +42,5 @@ MappedRustDiagnostic { | |||
42 | related_information: None, | 42 | related_information: None, |
43 | tags: None, | 43 | tags: None, |
44 | }, | 44 | }, |
45 | suggested_fixes: [], | 45 | fixes: [], |
46 | } | 46 | } |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_macro_compiler_error.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_macro_compiler_error.snap index 92f7eec05..7b83a7cd0 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_macro_compiler_error.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_macro_compiler_error.snap | |||
@@ -57,5 +57,5 @@ MappedRustDiagnostic { | |||
57 | ), | 57 | ), |
58 | tags: None, | 58 | tags: None, |
59 | }, | 59 | }, |
60 | suggested_fixes: [], | 60 | fixes: [], |
61 | } | 61 | } |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_incompatible_type_for_trait.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_incompatible_type_for_trait.snap index cf683e4b6..54679c5db 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_incompatible_type_for_trait.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_incompatible_type_for_trait.snap | |||
@@ -42,5 +42,5 @@ MappedRustDiagnostic { | |||
42 | related_information: None, | 42 | related_information: None, |
43 | tags: None, | 43 | tags: None, |
44 | }, | 44 | }, |
45 | suggested_fixes: [], | 45 | fixes: [], |
46 | } | 46 | } |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_mismatched_type.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_mismatched_type.snap index 8c1483c74..57df4ceaf 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_mismatched_type.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_mismatched_type.snap | |||
@@ -42,5 +42,5 @@ MappedRustDiagnostic { | |||
42 | related_information: None, | 42 | related_information: None, |
43 | tags: None, | 43 | tags: None, |
44 | }, | 44 | }, |
45 | suggested_fixes: [], | 45 | fixes: [], |
46 | } | 46 | } |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_unused_variable.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_unused_variable.snap index eb5a2247b..3e1fe736c 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_unused_variable.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_unused_variable.snap | |||
@@ -46,25 +46,39 @@ MappedRustDiagnostic { | |||
46 | ], | 46 | ], |
47 | ), | 47 | ), |
48 | }, | 48 | }, |
49 | suggested_fixes: [ | 49 | fixes: [ |
50 | SuggestedFix { | 50 | CodeAction { |
51 | title: "consider prefixing with an underscore: \'_foo\'", | 51 | title: "consider prefixing with an underscore: \'_foo\'", |
52 | location: Location { | 52 | kind: Some( |
53 | uri: "file:///test/driver/subcommand/repl.rs", | 53 | "quickfix", |
54 | range: Range { | 54 | ), |
55 | start: Position { | 55 | diagnostics: None, |
56 | line: 290, | 56 | edit: Some( |
57 | character: 8, | 57 | WorkspaceEdit { |
58 | }, | 58 | changes: Some( |
59 | end: Position { | 59 | { |
60 | line: 290, | 60 | "file:///test/driver/subcommand/repl.rs": [ |
61 | character: 11, | 61 | TextEdit { |
62 | }, | 62 | range: Range { |
63 | start: Position { | ||
64 | line: 290, | ||
65 | character: 8, | ||
66 | }, | ||
67 | end: Position { | ||
68 | line: 290, | ||
69 | character: 11, | ||
70 | }, | ||
71 | }, | ||
72 | new_text: "_foo", | ||
73 | }, | ||
74 | ], | ||
75 | }, | ||
76 | ), | ||
77 | document_changes: None, | ||
63 | }, | 78 | }, |
64 | }, | 79 | ), |
65 | replacement: "_foo", | 80 | command: None, |
66 | applicability: MachineApplicable, | 81 | is_preferred: None, |
67 | diagnostics: [], | ||
68 | }, | 82 | }, |
69 | ], | 83 | ], |
70 | } | 84 | } |
diff --git a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_wrong_number_of_parameters.snap b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_wrong_number_of_parameters.snap index 2f4518931..69301078d 100644 --- a/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_wrong_number_of_parameters.snap +++ b/crates/ra_cargo_watch/src/conv/snapshots/ra_cargo_watch__conv__test__snap_rustc_wrong_number_of_parameters.snap | |||
@@ -61,5 +61,5 @@ MappedRustDiagnostic { | |||
61 | ), | 61 | ), |
62 | tags: None, | 62 | tags: None, |
63 | }, | 63 | }, |
64 | suggested_fixes: [], | 64 | fixes: [], |
65 | } | 65 | } |
diff --git a/crates/ra_cargo_watch/src/lib.rs b/crates/ra_cargo_watch/src/lib.rs index a718a5e52..f07c34549 100644 --- a/crates/ra_cargo_watch/src/lib.rs +++ b/crates/ra_cargo_watch/src/lib.rs | |||
@@ -4,22 +4,20 @@ | |||
4 | use cargo_metadata::Message; | 4 | use cargo_metadata::Message; |
5 | use crossbeam_channel::{never, select, unbounded, Receiver, RecvError, Sender}; | 5 | use crossbeam_channel::{never, select, unbounded, Receiver, RecvError, Sender}; |
6 | use lsp_types::{ | 6 | use lsp_types::{ |
7 | Diagnostic, Url, WorkDoneProgress, WorkDoneProgressBegin, WorkDoneProgressEnd, | 7 | CodeAction, CodeActionOrCommand, Diagnostic, Url, WorkDoneProgress, WorkDoneProgressBegin, |
8 | WorkDoneProgressReport, | 8 | WorkDoneProgressEnd, WorkDoneProgressReport, |
9 | }; | 9 | }; |
10 | use std::{ | 10 | use std::{ |
11 | collections::HashMap, | ||
12 | io::{BufRead, BufReader}, | 11 | io::{BufRead, BufReader}, |
13 | path::PathBuf, | 12 | path::PathBuf, |
14 | process::{Command, Stdio}, | 13 | process::{Command, Stdio}, |
15 | sync::Arc, | ||
16 | thread::JoinHandle, | 14 | thread::JoinHandle, |
17 | time::Instant, | 15 | time::Instant, |
18 | }; | 16 | }; |
19 | 17 | ||
20 | mod conv; | 18 | mod conv; |
21 | 19 | ||
22 | use crate::conv::{map_rust_diagnostic_to_lsp, MappedRustDiagnostic, SuggestedFix}; | 20 | use crate::conv::{map_rust_diagnostic_to_lsp, MappedRustDiagnostic}; |
23 | 21 | ||
24 | pub use crate::conv::url_from_path_with_drive_lowercasing; | 22 | pub use crate::conv::url_from_path_with_drive_lowercasing; |
25 | 23 | ||
@@ -38,7 +36,6 @@ pub struct CheckOptions { | |||
38 | #[derive(Debug)] | 36 | #[derive(Debug)] |
39 | pub struct CheckWatcher { | 37 | pub struct CheckWatcher { |
40 | pub task_recv: Receiver<CheckTask>, | 38 | pub task_recv: Receiver<CheckTask>, |
41 | pub state: Arc<CheckState>, | ||
42 | cmd_send: Option<Sender<CheckCommand>>, | 39 | cmd_send: Option<Sender<CheckCommand>>, |
43 | handle: Option<JoinHandle<()>>, | 40 | handle: Option<JoinHandle<()>>, |
44 | } | 41 | } |
@@ -46,7 +43,6 @@ pub struct CheckWatcher { | |||
46 | impl CheckWatcher { | 43 | impl CheckWatcher { |
47 | pub fn new(options: &CheckOptions, workspace_root: PathBuf) -> CheckWatcher { | 44 | pub fn new(options: &CheckOptions, workspace_root: PathBuf) -> CheckWatcher { |
48 | let options = options.clone(); | 45 | let options = options.clone(); |
49 | let state = Arc::new(CheckState::new()); | ||
50 | 46 | ||
51 | let (task_send, task_recv) = unbounded::<CheckTask>(); | 47 | let (task_send, task_recv) = unbounded::<CheckTask>(); |
52 | let (cmd_send, cmd_recv) = unbounded::<CheckCommand>(); | 48 | let (cmd_send, cmd_recv) = unbounded::<CheckCommand>(); |
@@ -54,13 +50,12 @@ impl CheckWatcher { | |||
54 | let mut check = CheckWatcherThread::new(options, workspace_root); | 50 | let mut check = CheckWatcherThread::new(options, workspace_root); |
55 | check.run(&task_send, &cmd_recv); | 51 | check.run(&task_send, &cmd_recv); |
56 | }); | 52 | }); |
57 | CheckWatcher { task_recv, cmd_send: Some(cmd_send), handle: Some(handle), state } | 53 | CheckWatcher { task_recv, cmd_send: Some(cmd_send), handle: Some(handle) } |
58 | } | 54 | } |
59 | 55 | ||
60 | /// Returns a CheckWatcher that doesn't actually do anything | 56 | /// Returns a CheckWatcher that doesn't actually do anything |
61 | pub fn dummy() -> CheckWatcher { | 57 | pub fn dummy() -> CheckWatcher { |
62 | let state = Arc::new(CheckState::new()); | 58 | CheckWatcher { task_recv: never(), cmd_send: None, handle: None } |
63 | CheckWatcher { task_recv: never(), cmd_send: None, handle: None, state } | ||
64 | } | 59 | } |
65 | 60 | ||
66 | /// Schedule a re-start of the cargo check worker. | 61 | /// Schedule a re-start of the cargo check worker. |
@@ -87,84 +82,13 @@ impl std::ops::Drop for CheckWatcher { | |||
87 | } | 82 | } |
88 | } | 83 | } |
89 | 84 | ||
90 | #[derive(Clone, Debug)] | ||
91 | pub struct CheckState { | ||
92 | diagnostic_collection: HashMap<Url, Vec<Diagnostic>>, | ||
93 | suggested_fix_collection: HashMap<Url, Vec<SuggestedFix>>, | ||
94 | } | ||
95 | |||
96 | impl CheckState { | ||
97 | fn new() -> CheckState { | ||
98 | CheckState { | ||
99 | diagnostic_collection: HashMap::new(), | ||
100 | suggested_fix_collection: HashMap::new(), | ||
101 | } | ||
102 | } | ||
103 | |||
104 | /// Clear the cached diagnostics, and schedule updating diagnostics by the | ||
105 | /// server, to clear stale results. | ||
106 | pub fn clear(&mut self) -> Vec<Url> { | ||
107 | let cleared_files: Vec<Url> = self.diagnostic_collection.keys().cloned().collect(); | ||
108 | self.diagnostic_collection.clear(); | ||
109 | self.suggested_fix_collection.clear(); | ||
110 | cleared_files | ||
111 | } | ||
112 | |||
113 | pub fn diagnostics_for(&self, uri: &Url) -> Option<&[Diagnostic]> { | ||
114 | self.diagnostic_collection.get(uri).map(|d| d.as_slice()) | ||
115 | } | ||
116 | |||
117 | pub fn fixes_for(&self, uri: &Url) -> Option<&[SuggestedFix]> { | ||
118 | self.suggested_fix_collection.get(uri).map(|d| d.as_slice()) | ||
119 | } | ||
120 | |||
121 | pub fn add_diagnostic_with_fixes(&mut self, file_uri: Url, diagnostic: DiagnosticWithFixes) { | ||
122 | for fix in diagnostic.suggested_fixes { | ||
123 | self.add_suggested_fix_for_diagnostic(fix, &diagnostic.diagnostic); | ||
124 | } | ||
125 | self.add_diagnostic(file_uri, diagnostic.diagnostic); | ||
126 | } | ||
127 | |||
128 | fn add_diagnostic(&mut self, file_uri: Url, diagnostic: Diagnostic) { | ||
129 | let diagnostics = self.diagnostic_collection.entry(file_uri).or_default(); | ||
130 | |||
131 | // If we're building multiple targets it's possible we've already seen this diagnostic | ||
132 | let is_duplicate = diagnostics.iter().any(|d| are_diagnostics_equal(d, &diagnostic)); | ||
133 | if is_duplicate { | ||
134 | return; | ||
135 | } | ||
136 | |||
137 | diagnostics.push(diagnostic); | ||
138 | } | ||
139 | |||
140 | fn add_suggested_fix_for_diagnostic( | ||
141 | &mut self, | ||
142 | mut suggested_fix: SuggestedFix, | ||
143 | diagnostic: &Diagnostic, | ||
144 | ) { | ||
145 | let file_uri = suggested_fix.location.uri.clone(); | ||
146 | let file_suggestions = self.suggested_fix_collection.entry(file_uri).or_default(); | ||
147 | |||
148 | let existing_suggestion: Option<&mut SuggestedFix> = | ||
149 | file_suggestions.iter_mut().find(|s| s == &&suggested_fix); | ||
150 | if let Some(existing_suggestion) = existing_suggestion { | ||
151 | // The existing suggestion also applies to this new diagnostic | ||
152 | existing_suggestion.diagnostics.push(diagnostic.clone()); | ||
153 | } else { | ||
154 | // We haven't seen this suggestion before | ||
155 | suggested_fix.diagnostics.push(diagnostic.clone()); | ||
156 | file_suggestions.push(suggested_fix); | ||
157 | } | ||
158 | } | ||
159 | } | ||
160 | |||
161 | #[derive(Debug)] | 85 | #[derive(Debug)] |
162 | pub enum CheckTask { | 86 | pub enum CheckTask { |
163 | /// Request a clearing of all cached diagnostics from the check watcher | 87 | /// Request a clearing of all cached diagnostics from the check watcher |
164 | ClearDiagnostics, | 88 | ClearDiagnostics, |
165 | 89 | ||
166 | /// Request adding a diagnostic with fixes included to a file | 90 | /// Request adding a diagnostic with fixes included to a file |
167 | AddDiagnostic(Url, DiagnosticWithFixes), | 91 | AddDiagnostic { url: Url, diagnostic: Diagnostic, fixes: Vec<CodeActionOrCommand> }, |
168 | 92 | ||
169 | /// Request check progress notification to client | 93 | /// Request check progress notification to client |
170 | Status(WorkDoneProgress), | 94 | Status(WorkDoneProgress), |
@@ -279,10 +203,17 @@ impl CheckWatcherThread { | |||
279 | None => return, | 203 | None => return, |
280 | }; | 204 | }; |
281 | 205 | ||
282 | let MappedRustDiagnostic { location, diagnostic, suggested_fixes } = map_result; | 206 | let MappedRustDiagnostic { location, diagnostic, fixes } = map_result; |
207 | let fixes = fixes | ||
208 | .into_iter() | ||
209 | .map(|fix| { | ||
210 | CodeAction { diagnostics: Some(vec![diagnostic.clone()]), ..fix }.into() | ||
211 | }) | ||
212 | .collect(); | ||
283 | 213 | ||
284 | let diagnostic = DiagnosticWithFixes { diagnostic, suggested_fixes }; | 214 | task_send |
285 | task_send.send(CheckTask::AddDiagnostic(location.uri, diagnostic)).unwrap(); | 215 | .send(CheckTask::AddDiagnostic { url: location.uri, diagnostic, fixes }) |
216 | .unwrap(); | ||
286 | } | 217 | } |
287 | 218 | ||
288 | CheckEvent::Msg(Message::BuildScriptExecuted(_msg)) => {} | 219 | CheckEvent::Msg(Message::BuildScriptExecuted(_msg)) => {} |
@@ -294,7 +225,7 @@ impl CheckWatcherThread { | |||
294 | #[derive(Debug)] | 225 | #[derive(Debug)] |
295 | pub struct DiagnosticWithFixes { | 226 | pub struct DiagnosticWithFixes { |
296 | diagnostic: Diagnostic, | 227 | diagnostic: Diagnostic, |
297 | suggested_fixes: Vec<SuggestedFix>, | 228 | fixes: Vec<CodeAction>, |
298 | } | 229 | } |
299 | 230 | ||
300 | /// WatchThread exists to wrap around the communication needed to be able to | 231 | /// WatchThread exists to wrap around the communication needed to be able to |
@@ -429,10 +360,3 @@ impl std::ops::Drop for WatchThread { | |||
429 | } | 360 | } |
430 | } | 361 | } |
431 | } | 362 | } |
432 | |||
433 | fn are_diagnostics_equal(left: &Diagnostic, right: &Diagnostic) -> bool { | ||
434 | left.source == right.source | ||
435 | && left.severity == right.severity | ||
436 | && left.range == right.range | ||
437 | && left.message == right.message | ||
438 | } | ||
diff --git a/crates/ra_hir_def/src/nameres/collector.rs b/crates/ra_hir_def/src/nameres/collector.rs index 7499dff31..6352c71ef 100644 --- a/crates/ra_hir_def/src/nameres/collector.rs +++ b/crates/ra_hir_def/src/nameres/collector.rs | |||
@@ -22,7 +22,7 @@ use crate::{ | |||
22 | diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, | 22 | diagnostics::DefDiagnostic, mod_resolution::ModDir, path_resolution::ReachedFixedPoint, |
23 | raw, BuiltinShadowMode, CrateDefMap, ModuleData, ModuleOrigin, ResolveMode, | 23 | raw, BuiltinShadowMode, CrateDefMap, ModuleData, ModuleOrigin, ResolveMode, |
24 | }, | 24 | }, |
25 | path::{ModPath, PathKind}, | 25 | path::{ImportAlias, ModPath, PathKind}, |
26 | per_ns::PerNs, | 26 | per_ns::PerNs, |
27 | visibility::Visibility, | 27 | visibility::Visibility, |
28 | AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern, | 28 | AdtId, AstId, ConstLoc, ContainerId, EnumLoc, EnumVariantId, FunctionLoc, ImplLoc, Intern, |
@@ -438,7 +438,11 @@ where | |||
438 | } else { | 438 | } else { |
439 | match import.path.segments.last() { | 439 | match import.path.segments.last() { |
440 | Some(last_segment) => { | 440 | Some(last_segment) => { |
441 | let name = import.alias.clone().unwrap_or_else(|| last_segment.clone()); | 441 | let name = match &import.alias { |
442 | Some(ImportAlias::Alias(name)) => name.clone(), | ||
443 | Some(ImportAlias::Underscore) => last_segment.clone(), // FIXME rust-analyzer#2736 | ||
444 | None => last_segment.clone(), | ||
445 | }; | ||
442 | log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def); | 446 | log::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def); |
443 | 447 | ||
444 | // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 | 448 | // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658 |
diff --git a/crates/ra_hir_def/src/nameres/raw.rs b/crates/ra_hir_def/src/nameres/raw.rs index fac1169ef..650cf1f98 100644 --- a/crates/ra_hir_def/src/nameres/raw.rs +++ b/crates/ra_hir_def/src/nameres/raw.rs | |||
@@ -22,8 +22,11 @@ use ra_syntax::{ | |||
22 | use test_utils::tested_by; | 22 | use test_utils::tested_by; |
23 | 23 | ||
24 | use crate::{ | 24 | use crate::{ |
25 | attr::Attrs, db::DefDatabase, path::ModPath, visibility::RawVisibility, FileAstId, HirFileId, | 25 | attr::Attrs, |
26 | InFile, | 26 | db::DefDatabase, |
27 | path::{ImportAlias, ModPath}, | ||
28 | visibility::RawVisibility, | ||
29 | FileAstId, HirFileId, InFile, | ||
27 | }; | 30 | }; |
28 | 31 | ||
29 | /// `RawItems` is a set of top-level items in a file (except for impls). | 32 | /// `RawItems` is a set of top-level items in a file (except for impls). |
@@ -145,7 +148,7 @@ impl_arena_id!(Import); | |||
145 | #[derive(Debug, Clone, PartialEq, Eq)] | 148 | #[derive(Debug, Clone, PartialEq, Eq)] |
146 | pub struct ImportData { | 149 | pub struct ImportData { |
147 | pub(super) path: ModPath, | 150 | pub(super) path: ModPath, |
148 | pub(super) alias: Option<Name>, | 151 | pub(super) alias: Option<ImportAlias>, |
149 | pub(super) is_glob: bool, | 152 | pub(super) is_glob: bool, |
150 | pub(super) is_prelude: bool, | 153 | pub(super) is_prelude: bool, |
151 | pub(super) is_extern_crate: bool, | 154 | pub(super) is_extern_crate: bool, |
@@ -353,7 +356,11 @@ impl RawItemsCollector { | |||
353 | let path = ModPath::from_name_ref(&name_ref); | 356 | let path = ModPath::from_name_ref(&name_ref); |
354 | let visibility = | 357 | let visibility = |
355 | RawVisibility::from_ast_with_hygiene(extern_crate.visibility(), &self.hygiene); | 358 | RawVisibility::from_ast_with_hygiene(extern_crate.visibility(), &self.hygiene); |
356 | let alias = extern_crate.alias().and_then(|a| a.name()).map(|it| it.as_name()); | 359 | let alias = extern_crate.alias().map(|a| { |
360 | a.name() | ||
361 | .map(|it| it.as_name()) | ||
362 | .map_or(ImportAlias::Underscore, |a| ImportAlias::Alias(a)) | ||
363 | }); | ||
357 | let attrs = self.parse_attrs(&extern_crate); | 364 | let attrs = self.parse_attrs(&extern_crate); |
358 | // FIXME: cfg_attr | 365 | // FIXME: cfg_attr |
359 | let is_macro_use = extern_crate.has_atom_attr("macro_use"); | 366 | let is_macro_use = extern_crate.has_atom_attr("macro_use"); |
diff --git a/crates/ra_hir_def/src/path.rs b/crates/ra_hir_def/src/path.rs index a150b899f..246032c13 100644 --- a/crates/ra_hir_def/src/path.rs +++ b/crates/ra_hir_def/src/path.rs | |||
@@ -34,6 +34,14 @@ pub enum PathKind { | |||
34 | DollarCrate(CrateId), | 34 | DollarCrate(CrateId), |
35 | } | 35 | } |
36 | 36 | ||
37 | #[derive(Debug, Clone, PartialEq, Eq)] | ||
38 | pub enum ImportAlias { | ||
39 | /// Unnamed alias, as in `use Foo as _;` | ||
40 | Underscore, | ||
41 | /// Named alias | ||
42 | Alias(Name), | ||
43 | } | ||
44 | |||
37 | impl ModPath { | 45 | impl ModPath { |
38 | pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> { | 46 | pub fn from_src(path: ast::Path, hygiene: &Hygiene) -> Option<ModPath> { |
39 | lower::lower_path(path, hygiene).map(|it| it.mod_path) | 47 | lower::lower_path(path, hygiene).map(|it| it.mod_path) |
@@ -57,7 +65,7 @@ impl ModPath { | |||
57 | pub(crate) fn expand_use_item( | 65 | pub(crate) fn expand_use_item( |
58 | item_src: InFile<ast::UseItem>, | 66 | item_src: InFile<ast::UseItem>, |
59 | hygiene: &Hygiene, | 67 | hygiene: &Hygiene, |
60 | mut cb: impl FnMut(ModPath, &ast::UseTree, /* is_glob */ bool, Option<Name>), | 68 | mut cb: impl FnMut(ModPath, &ast::UseTree, /* is_glob */ bool, Option<ImportAlias>), |
61 | ) { | 69 | ) { |
62 | if let Some(tree) = item_src.value.use_tree() { | 70 | if let Some(tree) = item_src.value.use_tree() { |
63 | lower::lower_use_tree(None, tree, hygiene, &mut cb); | 71 | lower::lower_use_tree(None, tree, hygiene, &mut cb); |
diff --git a/crates/ra_hir_def/src/path/lower/lower_use.rs b/crates/ra_hir_def/src/path/lower/lower_use.rs index 531878174..d2bc9d193 100644 --- a/crates/ra_hir_def/src/path/lower/lower_use.rs +++ b/crates/ra_hir_def/src/path/lower/lower_use.rs | |||
@@ -4,20 +4,17 @@ | |||
4 | use std::iter; | 4 | use std::iter; |
5 | 5 | ||
6 | use either::Either; | 6 | use either::Either; |
7 | use hir_expand::{ | 7 | use hir_expand::{hygiene::Hygiene, name::AsName}; |
8 | hygiene::Hygiene, | ||
9 | name::{AsName, Name}, | ||
10 | }; | ||
11 | use ra_syntax::ast::{self, NameOwner}; | 8 | use ra_syntax::ast::{self, NameOwner}; |
12 | use test_utils::tested_by; | 9 | use test_utils::tested_by; |
13 | 10 | ||
14 | use crate::path::{ModPath, PathKind}; | 11 | use crate::path::{ImportAlias, ModPath, PathKind}; |
15 | 12 | ||
16 | pub(crate) fn lower_use_tree( | 13 | pub(crate) fn lower_use_tree( |
17 | prefix: Option<ModPath>, | 14 | prefix: Option<ModPath>, |
18 | tree: ast::UseTree, | 15 | tree: ast::UseTree, |
19 | hygiene: &Hygiene, | 16 | hygiene: &Hygiene, |
20 | cb: &mut dyn FnMut(ModPath, &ast::UseTree, bool, Option<Name>), | 17 | cb: &mut dyn FnMut(ModPath, &ast::UseTree, bool, Option<ImportAlias>), |
21 | ) { | 18 | ) { |
22 | if let Some(use_tree_list) = tree.use_tree_list() { | 19 | if let Some(use_tree_list) = tree.use_tree_list() { |
23 | let prefix = match tree.path() { | 20 | let prefix = match tree.path() { |
@@ -34,7 +31,11 @@ pub(crate) fn lower_use_tree( | |||
34 | lower_use_tree(prefix.clone(), child_tree, hygiene, cb); | 31 | lower_use_tree(prefix.clone(), child_tree, hygiene, cb); |
35 | } | 32 | } |
36 | } else { | 33 | } else { |
37 | let alias = tree.alias().and_then(|a| a.name()).map(|a| a.as_name()); | 34 | let alias = tree.alias().map(|a| { |
35 | a.name() | ||
36 | .map(|it| it.as_name()) | ||
37 | .map_or(ImportAlias::Underscore, |a| ImportAlias::Alias(a)) | ||
38 | }); | ||
38 | let is_glob = tree.has_star(); | 39 | let is_glob = tree.has_star(); |
39 | if let Some(ast_path) = tree.path() { | 40 | if let Some(ast_path) = tree.path() { |
40 | // Handle self in a path. | 41 | // Handle self in a path. |
diff --git a/crates/ra_ide/src/imports_locator.rs b/crates/ra_ide/src/imports_locator.rs index 48b014c7d..9e1a1c1ec 100644 --- a/crates/ra_ide/src/imports_locator.rs +++ b/crates/ra_ide/src/imports_locator.rs | |||
@@ -64,12 +64,9 @@ impl ImportsLocator for ImportsLocatorIde<'_> { | |||
64 | .into_iter() | 64 | .into_iter() |
65 | .chain(lib_results.into_iter()) | 65 | .chain(lib_results.into_iter()) |
66 | .filter_map(|import_candidate| self.get_name_definition(db, &import_candidate)) | 66 | .filter_map(|import_candidate| self.get_name_definition(db, &import_candidate)) |
67 | .filter_map(|name_definition_to_import| { | 67 | .filter_map(|name_definition_to_import| match name_definition_to_import.kind { |
68 | if let NameKind::Def(module_def) = name_definition_to_import.kind { | 68 | NameKind::Def(module_def) => Some(module_def), |
69 | Some(module_def) | 69 | _ => None, |
70 | } else { | ||
71 | None | ||
72 | } | ||
73 | }) | 70 | }) |
74 | .collect() | 71 | .collect() |
75 | } | 72 | } |
diff --git a/crates/ra_ide/src/inlay_hints.rs b/crates/ra_ide/src/inlay_hints.rs index 393ca9447..de447a5aa 100644 --- a/crates/ra_ide/src/inlay_hints.rs +++ b/crates/ra_ide/src/inlay_hints.rs | |||
@@ -376,7 +376,7 @@ fn main() { | |||
376 | let mut start = 0; | 376 | let mut start = 0; |
377 | (0..2).for_each(|increment| { | 377 | (0..2).for_each(|increment| { |
378 | start += increment; | 378 | start += increment; |
379 | }) | 379 | }); |
380 | 380 | ||
381 | let multiply = |a, b, c, d| a * b * c * d; | 381 | let multiply = |a, b, c, d| a * b * c * d; |
382 | let _: i32 = multiply(1, 2, 3, 4); | 382 | let _: i32 = multiply(1, 2, 3, 4); |
@@ -399,37 +399,37 @@ fn main() { | |||
399 | label: "i32", | 399 | label: "i32", |
400 | }, | 400 | }, |
401 | InlayHint { | 401 | InlayHint { |
402 | range: [114; 122), | 402 | range: [115; 123), |
403 | kind: TypeHint, | 403 | kind: TypeHint, |
404 | label: "|…| -> i32", | 404 | label: "|…| -> i32", |
405 | }, | 405 | }, |
406 | InlayHint { | 406 | InlayHint { |
407 | range: [126; 127), | 407 | range: [127; 128), |
408 | kind: TypeHint, | 408 | kind: TypeHint, |
409 | label: "i32", | 409 | label: "i32", |
410 | }, | 410 | }, |
411 | InlayHint { | 411 | InlayHint { |
412 | range: [129; 130), | 412 | range: [130; 131), |
413 | kind: TypeHint, | 413 | kind: TypeHint, |
414 | label: "i32", | 414 | label: "i32", |
415 | }, | 415 | }, |
416 | InlayHint { | 416 | InlayHint { |
417 | range: [132; 133), | 417 | range: [133; 134), |
418 | kind: TypeHint, | 418 | kind: TypeHint, |
419 | label: "i32", | 419 | label: "i32", |
420 | }, | 420 | }, |
421 | InlayHint { | 421 | InlayHint { |
422 | range: [135; 136), | 422 | range: [136; 137), |
423 | kind: TypeHint, | 423 | kind: TypeHint, |
424 | label: "i32", | 424 | label: "i32", |
425 | }, | 425 | }, |
426 | InlayHint { | 426 | InlayHint { |
427 | range: [200; 212), | 427 | range: [201; 213), |
428 | kind: TypeHint, | 428 | kind: TypeHint, |
429 | label: "&|…| -> i32", | 429 | label: "&|…| -> i32", |
430 | }, | 430 | }, |
431 | InlayHint { | 431 | InlayHint { |
432 | range: [235; 244), | 432 | range: [236; 245), |
433 | kind: TypeHint, | 433 | kind: TypeHint, |
434 | label: "|| -> i32", | 434 | label: "|| -> i32", |
435 | }, | 435 | }, |
diff --git a/crates/ra_ide/src/references/rename.rs b/crates/ra_ide/src/references/rename.rs index 626efb603..9a84c1c88 100644 --- a/crates/ra_ide/src/references/rename.rs +++ b/crates/ra_ide/src/references/rename.rs | |||
@@ -2,7 +2,9 @@ | |||
2 | 2 | ||
3 | use hir::ModuleSource; | 3 | use hir::ModuleSource; |
4 | use ra_db::{RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt}; | 4 | use ra_db::{RelativePath, RelativePathBuf, SourceDatabase, SourceDatabaseExt}; |
5 | use ra_syntax::{algo::find_node_at_offset, ast, tokenize, AstNode, SyntaxKind, SyntaxNode}; | 5 | use ra_syntax::{ |
6 | algo::find_node_at_offset, ast, lex_single_valid_syntax_kind, AstNode, SyntaxKind, SyntaxNode, | ||
7 | }; | ||
6 | use ra_text_edit::TextEdit; | 8 | use ra_text_edit::TextEdit; |
7 | 9 | ||
8 | use crate::{ | 10 | use crate::{ |
@@ -17,11 +19,9 @@ pub(crate) fn rename( | |||
17 | position: FilePosition, | 19 | position: FilePosition, |
18 | new_name: &str, | 20 | new_name: &str, |
19 | ) -> Option<RangeInfo<SourceChange>> { | 21 | ) -> Option<RangeInfo<SourceChange>> { |
20 | let tokens = tokenize(new_name); | 22 | match lex_single_valid_syntax_kind(new_name)? { |
21 | if tokens.len() != 1 | 23 | SyntaxKind::IDENT | SyntaxKind::UNDERSCORE => (), |
22 | || (tokens[0].kind != SyntaxKind::IDENT && tokens[0].kind != SyntaxKind::UNDERSCORE) | 24 | _ => return None, |
23 | { | ||
24 | return None; | ||
25 | } | 25 | } |
26 | 26 | ||
27 | let parse = db.parse(position.file_id); | 27 | let parse = db.parse(position.file_id); |
diff --git a/crates/ra_lsp_server/src/diagnostics.rs b/crates/ra_lsp_server/src/diagnostics.rs new file mode 100644 index 000000000..ea08bce24 --- /dev/null +++ b/crates/ra_lsp_server/src/diagnostics.rs | |||
@@ -0,0 +1,85 @@ | |||
1 | //! Book keeping for keeping diagnostics easily in sync with the client. | ||
2 | use lsp_types::{CodeActionOrCommand, Diagnostic, Range}; | ||
3 | use ra_ide::FileId; | ||
4 | use std::{collections::HashMap, sync::Arc}; | ||
5 | |||
6 | pub type CheckFixes = Arc<HashMap<FileId, Vec<Fix>>>; | ||
7 | |||
8 | #[derive(Debug, Default, Clone)] | ||
9 | pub struct DiagnosticCollection { | ||
10 | pub native: HashMap<FileId, Vec<Diagnostic>>, | ||
11 | pub check: HashMap<FileId, Vec<Diagnostic>>, | ||
12 | pub check_fixes: CheckFixes, | ||
13 | } | ||
14 | |||
15 | #[derive(Debug, Clone)] | ||
16 | pub struct Fix { | ||
17 | pub range: Range, | ||
18 | pub action: CodeActionOrCommand, | ||
19 | } | ||
20 | |||
21 | #[derive(Debug)] | ||
22 | pub enum DiagnosticTask { | ||
23 | ClearCheck, | ||
24 | AddCheck(FileId, Diagnostic, Vec<CodeActionOrCommand>), | ||
25 | SetNative(FileId, Vec<Diagnostic>), | ||
26 | } | ||
27 | |||
28 | impl DiagnosticCollection { | ||
29 | pub fn clear_check(&mut self) -> Vec<FileId> { | ||
30 | Arc::make_mut(&mut self.check_fixes).clear(); | ||
31 | self.check.drain().map(|(key, _value)| key).collect() | ||
32 | } | ||
33 | |||
34 | pub fn add_check_diagnostic( | ||
35 | &mut self, | ||
36 | file_id: FileId, | ||
37 | diagnostic: Diagnostic, | ||
38 | fixes: Vec<CodeActionOrCommand>, | ||
39 | ) { | ||
40 | let diagnostics = self.check.entry(file_id).or_default(); | ||
41 | for existing_diagnostic in diagnostics.iter() { | ||
42 | if are_diagnostics_equal(&existing_diagnostic, &diagnostic) { | ||
43 | return; | ||
44 | } | ||
45 | } | ||
46 | |||
47 | let check_fixes = Arc::make_mut(&mut self.check_fixes); | ||
48 | check_fixes | ||
49 | .entry(file_id) | ||
50 | .or_default() | ||
51 | .extend(fixes.into_iter().map(|action| Fix { range: diagnostic.range, action })); | ||
52 | diagnostics.push(diagnostic); | ||
53 | } | ||
54 | |||
55 | pub fn set_native_diagnostics(&mut self, file_id: FileId, diagnostics: Vec<Diagnostic>) { | ||
56 | self.native.insert(file_id, diagnostics); | ||
57 | } | ||
58 | |||
59 | pub fn diagnostics_for(&self, file_id: FileId) -> impl Iterator<Item = &Diagnostic> { | ||
60 | let native = self.native.get(&file_id).into_iter().flatten(); | ||
61 | let check = self.check.get(&file_id).into_iter().flatten(); | ||
62 | native.chain(check) | ||
63 | } | ||
64 | |||
65 | pub fn handle_task(&mut self, task: DiagnosticTask) -> Vec<FileId> { | ||
66 | match task { | ||
67 | DiagnosticTask::ClearCheck => self.clear_check(), | ||
68 | DiagnosticTask::AddCheck(file_id, diagnostic, fixes) => { | ||
69 | self.add_check_diagnostic(file_id, diagnostic, fixes); | ||
70 | vec![file_id] | ||
71 | } | ||
72 | DiagnosticTask::SetNative(file_id, diagnostics) => { | ||
73 | self.set_native_diagnostics(file_id, diagnostics); | ||
74 | vec![file_id] | ||
75 | } | ||
76 | } | ||
77 | } | ||
78 | } | ||
79 | |||
80 | fn are_diagnostics_equal(left: &Diagnostic, right: &Diagnostic) -> bool { | ||
81 | left.source == right.source | ||
82 | && left.severity == right.severity | ||
83 | && left.range == right.range | ||
84 | && left.message == right.message | ||
85 | } | ||
diff --git a/crates/ra_lsp_server/src/lib.rs b/crates/ra_lsp_server/src/lib.rs index 2ca149fd5..1208c1343 100644 --- a/crates/ra_lsp_server/src/lib.rs +++ b/crates/ra_lsp_server/src/lib.rs | |||
@@ -29,6 +29,7 @@ mod markdown; | |||
29 | pub mod req; | 29 | pub mod req; |
30 | mod config; | 30 | mod config; |
31 | mod world; | 31 | mod world; |
32 | mod diagnostics; | ||
32 | 33 | ||
33 | pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>; | 34 | pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>; |
34 | pub use crate::{ | 35 | pub use crate::{ |
diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs index 508fe08c0..12961ba37 100644 --- a/crates/ra_lsp_server/src/main_loop.rs +++ b/crates/ra_lsp_server/src/main_loop.rs | |||
@@ -17,16 +17,17 @@ use std::{ | |||
17 | use crossbeam_channel::{select, unbounded, RecvError, Sender}; | 17 | use crossbeam_channel::{select, unbounded, RecvError, Sender}; |
18 | use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; | 18 | use lsp_server::{Connection, ErrorCode, Message, Notification, Request, RequestId, Response}; |
19 | use lsp_types::{ClientCapabilities, NumberOrString}; | 19 | use lsp_types::{ClientCapabilities, NumberOrString}; |
20 | use ra_cargo_watch::{CheckOptions, CheckTask}; | 20 | use ra_cargo_watch::{url_from_path_with_drive_lowercasing, CheckOptions, CheckTask}; |
21 | use ra_ide::{Canceled, FeatureFlags, FileId, LibraryData, SourceRootId}; | 21 | use ra_ide::{Canceled, FeatureFlags, FileId, LibraryData, SourceRootId}; |
22 | use ra_prof::profile; | 22 | use ra_prof::profile; |
23 | use ra_vfs::{VfsTask, Watch}; | 23 | use ra_vfs::{VfsFile, VfsTask, Watch}; |
24 | use relative_path::RelativePathBuf; | 24 | use relative_path::RelativePathBuf; |
25 | use rustc_hash::FxHashSet; | 25 | use rustc_hash::FxHashSet; |
26 | use serde::{de::DeserializeOwned, Serialize}; | 26 | use serde::{de::DeserializeOwned, Serialize}; |
27 | use threadpool::ThreadPool; | 27 | use threadpool::ThreadPool; |
28 | 28 | ||
29 | use crate::{ | 29 | use crate::{ |
30 | diagnostics::DiagnosticTask, | ||
30 | main_loop::{ | 31 | main_loop::{ |
31 | pending_requests::{PendingRequest, PendingRequests}, | 32 | pending_requests::{PendingRequest, PendingRequests}, |
32 | subscriptions::Subscriptions, | 33 | subscriptions::Subscriptions, |
@@ -254,6 +255,7 @@ pub fn main_loop( | |||
254 | enum Task { | 255 | enum Task { |
255 | Respond(Response), | 256 | Respond(Response), |
256 | Notify(Notification), | 257 | Notify(Notification), |
258 | Diagnostic(DiagnosticTask), | ||
257 | } | 259 | } |
258 | 260 | ||
259 | enum Event { | 261 | enum Event { |
@@ -359,7 +361,7 @@ fn loop_turn( | |||
359 | world_state.maybe_collect_garbage(); | 361 | world_state.maybe_collect_garbage(); |
360 | loop_state.in_flight_libraries -= 1; | 362 | loop_state.in_flight_libraries -= 1; |
361 | } | 363 | } |
362 | Event::CheckWatcher(task) => on_check_task(pool, task, world_state, task_sender)?, | 364 | Event::CheckWatcher(task) => on_check_task(task, world_state, task_sender)?, |
363 | Event::Msg(msg) => match msg { | 365 | Event::Msg(msg) => match msg { |
364 | Message::Request(req) => on_request( | 366 | Message::Request(req) => on_request( |
365 | world_state, | 367 | world_state, |
@@ -464,6 +466,7 @@ fn on_task( | |||
464 | Task::Notify(n) => { | 466 | Task::Notify(n) => { |
465 | msg_sender.send(n.into()).unwrap(); | 467 | msg_sender.send(n.into()).unwrap(); |
466 | } | 468 | } |
469 | Task::Diagnostic(task) => on_diagnostic_task(task, msg_sender, state), | ||
467 | } | 470 | } |
468 | } | 471 | } |
469 | 472 | ||
@@ -621,23 +624,26 @@ fn on_notification( | |||
621 | } | 624 | } |
622 | 625 | ||
623 | fn on_check_task( | 626 | fn on_check_task( |
624 | pool: &ThreadPool, | ||
625 | task: CheckTask, | 627 | task: CheckTask, |
626 | world_state: &mut WorldState, | 628 | world_state: &mut WorldState, |
627 | task_sender: &Sender<Task>, | 629 | task_sender: &Sender<Task>, |
628 | ) -> Result<()> { | 630 | ) -> Result<()> { |
629 | let urls = match task { | 631 | match task { |
630 | CheckTask::ClearDiagnostics => { | 632 | CheckTask::ClearDiagnostics => { |
631 | let state = Arc::get_mut(&mut world_state.check_watcher.state) | 633 | task_sender.send(Task::Diagnostic(DiagnosticTask::ClearCheck))?; |
632 | .expect("couldn't get check watcher state as mutable"); | ||
633 | state.clear() | ||
634 | } | 634 | } |
635 | 635 | ||
636 | CheckTask::AddDiagnostic(url, diagnostic) => { | 636 | CheckTask::AddDiagnostic { url, diagnostic, fixes } => { |
637 | let state = Arc::get_mut(&mut world_state.check_watcher.state) | 637 | let path = url.to_file_path().map_err(|()| format!("invalid uri: {}", url))?; |
638 | .expect("couldn't get check watcher state as mutable"); | 638 | let file_id = world_state |
639 | state.add_diagnostic_with_fixes(url.clone(), diagnostic); | 639 | .vfs |
640 | vec![url] | 640 | .read() |
641 | .path2file(&path) | ||
642 | .map(|it| FileId(it.0)) | ||
643 | .ok_or_else(|| format!("unknown file: {}", path.to_string_lossy()))?; | ||
644 | |||
645 | task_sender | ||
646 | .send(Task::Diagnostic(DiagnosticTask::AddCheck(file_id, diagnostic, fixes)))?; | ||
641 | } | 647 | } |
642 | 648 | ||
643 | CheckTask::Status(progress) => { | 649 | CheckTask::Status(progress) => { |
@@ -647,31 +653,30 @@ fn on_check_task( | |||
647 | }; | 653 | }; |
648 | let not = notification_new::<req::Progress>(params); | 654 | let not = notification_new::<req::Progress>(params); |
649 | task_sender.send(Task::Notify(not)).unwrap(); | 655 | task_sender.send(Task::Notify(not)).unwrap(); |
650 | Vec::new() | ||
651 | } | 656 | } |
652 | }; | 657 | }; |
653 | 658 | ||
654 | let subscriptions = urls | 659 | Ok(()) |
655 | .into_iter() | 660 | } |
656 | .map(|url| { | ||
657 | let path = url.to_file_path().map_err(|()| format!("invalid uri: {}", url))?; | ||
658 | Ok(world_state.vfs.read().path2file(&path).map(|it| FileId(it.0))) | ||
659 | }) | ||
660 | .filter_map(|res| res.transpose()) | ||
661 | .collect::<Result<Vec<_>>>()?; | ||
662 | 661 | ||
663 | // We manually send a diagnostic update when the watcher asks | 662 | fn on_diagnostic_task(task: DiagnosticTask, msg_sender: &Sender<Message>, state: &mut WorldState) { |
664 | // us to, to avoid the issue of having to change the file to | 663 | let subscriptions = state.diagnostics.handle_task(task); |
665 | // receive updated diagnostics. | ||
666 | update_file_notifications_on_threadpool( | ||
667 | pool, | ||
668 | world_state.snapshot(), | ||
669 | false, | ||
670 | task_sender.clone(), | ||
671 | subscriptions, | ||
672 | ); | ||
673 | 664 | ||
674 | Ok(()) | 665 | for file_id in subscriptions { |
666 | let path = state.vfs.read().file2path(VfsFile(file_id.0)); | ||
667 | let uri = match url_from_path_with_drive_lowercasing(&path) { | ||
668 | Ok(uri) => uri, | ||
669 | Err(err) => { | ||
670 | log::error!("Couldn't convert path to url ({}): {:?}", err, path.to_string_lossy()); | ||
671 | continue; | ||
672 | } | ||
673 | }; | ||
674 | |||
675 | let diagnostics = state.diagnostics.diagnostics_for(file_id).cloned().collect(); | ||
676 | let params = req::PublishDiagnosticsParams { uri, diagnostics, version: None }; | ||
677 | let not = notification_new::<req::PublishDiagnostics>(params); | ||
678 | msg_sender.send(not.into()).unwrap(); | ||
679 | } | ||
675 | } | 680 | } |
676 | 681 | ||
677 | struct PoolDispatcher<'a> { | 682 | struct PoolDispatcher<'a> { |
@@ -819,9 +824,8 @@ fn update_file_notifications_on_threadpool( | |||
819 | log::error!("failed to compute diagnostics: {:?}", e); | 824 | log::error!("failed to compute diagnostics: {:?}", e); |
820 | } | 825 | } |
821 | } | 826 | } |
822 | Ok(params) => { | 827 | Ok(task) => { |
823 | let not = notification_new::<req::PublishDiagnostics>(params); | 828 | task_sender.send(Task::Diagnostic(task)).unwrap(); |
824 | task_sender.send(Task::Notify(not)).unwrap(); | ||
825 | } | 829 | } |
826 | } | 830 | } |
827 | } | 831 | } |
diff --git a/crates/ra_lsp_server/src/main_loop/handlers.rs b/crates/ra_lsp_server/src/main_loop/handlers.rs index 9aa1e7eea..282f6e8fc 100644 --- a/crates/ra_lsp_server/src/main_loop/handlers.rs +++ b/crates/ra_lsp_server/src/main_loop/handlers.rs | |||
@@ -33,6 +33,7 @@ use crate::{ | |||
33 | to_call_hierarchy_item, to_location, Conv, ConvWith, FoldConvCtx, MapConvWith, TryConvWith, | 33 | to_call_hierarchy_item, to_location, Conv, ConvWith, FoldConvCtx, MapConvWith, TryConvWith, |
34 | TryConvWithToVec, | 34 | TryConvWithToVec, |
35 | }, | 35 | }, |
36 | diagnostics::DiagnosticTask, | ||
36 | req::{self, Decoration, InlayHint, InlayHintsParams, InlayKind}, | 37 | req::{self, Decoration, InlayHint, InlayHintsParams, InlayKind}, |
37 | world::WorldSnapshot, | 38 | world::WorldSnapshot, |
38 | LspError, Result, | 39 | LspError, Result, |
@@ -676,28 +677,12 @@ pub fn handle_code_action( | |||
676 | res.push(action.into()); | 677 | res.push(action.into()); |
677 | } | 678 | } |
678 | 679 | ||
679 | for fix in world.check_watcher.fixes_for(¶ms.text_document.uri).into_iter().flatten() { | 680 | for fix in world.check_fixes.get(&file_id).into_iter().flatten() { |
680 | let fix_range = fix.location.range.conv_with(&line_index); | 681 | let fix_range = fix.range.conv_with(&line_index); |
681 | if fix_range.intersection(&range).is_none() { | 682 | if fix_range.intersection(&range).is_none() { |
682 | continue; | 683 | continue; |
683 | } | 684 | } |
684 | 685 | res.push(fix.action.clone()); | |
685 | let edit = { | ||
686 | let edits = vec![TextEdit::new(fix.location.range, fix.replacement.clone())]; | ||
687 | let mut edit_map = std::collections::HashMap::new(); | ||
688 | edit_map.insert(fix.location.uri.clone(), edits); | ||
689 | WorkspaceEdit::new(edit_map) | ||
690 | }; | ||
691 | |||
692 | let action = CodeAction { | ||
693 | title: fix.title.clone(), | ||
694 | kind: Some("quickfix".to_string()), | ||
695 | diagnostics: Some(fix.diagnostics.clone()), | ||
696 | edit: Some(edit), | ||
697 | command: None, | ||
698 | is_preferred: None, | ||
699 | }; | ||
700 | res.push(action.into()); | ||
701 | } | 686 | } |
702 | 687 | ||
703 | for assist in world.analysis().assists(FileRange { file_id, range })?.into_iter() { | 688 | for assist in world.analysis().assists(FileRange { file_id, range })?.into_iter() { |
@@ -875,14 +860,10 @@ pub fn handle_document_highlight( | |||
875 | )) | 860 | )) |
876 | } | 861 | } |
877 | 862 | ||
878 | pub fn publish_diagnostics( | 863 | pub fn publish_diagnostics(world: &WorldSnapshot, file_id: FileId) -> Result<DiagnosticTask> { |
879 | world: &WorldSnapshot, | ||
880 | file_id: FileId, | ||
881 | ) -> Result<req::PublishDiagnosticsParams> { | ||
882 | let _p = profile("publish_diagnostics"); | 864 | let _p = profile("publish_diagnostics"); |
883 | let uri = world.file_id_to_uri(file_id)?; | ||
884 | let line_index = world.analysis().file_line_index(file_id)?; | 865 | let line_index = world.analysis().file_line_index(file_id)?; |
885 | let mut diagnostics: Vec<Diagnostic> = world | 866 | let diagnostics: Vec<Diagnostic> = world |
886 | .analysis() | 867 | .analysis() |
887 | .diagnostics(file_id)? | 868 | .diagnostics(file_id)? |
888 | .into_iter() | 869 | .into_iter() |
@@ -896,10 +877,7 @@ pub fn publish_diagnostics( | |||
896 | tags: None, | 877 | tags: None, |
897 | }) | 878 | }) |
898 | .collect(); | 879 | .collect(); |
899 | if let Some(check_diags) = world.check_watcher.diagnostics_for(&uri) { | 880 | Ok(DiagnosticTask::SetNative(file_id, diagnostics)) |
900 | diagnostics.extend(check_diags.iter().cloned()); | ||
901 | } | ||
902 | Ok(req::PublishDiagnosticsParams { uri, diagnostics, version: None }) | ||
903 | } | 881 | } |
904 | 882 | ||
905 | pub fn publish_decorations( | 883 | pub fn publish_decorations( |
diff --git a/crates/ra_lsp_server/src/world.rs b/crates/ra_lsp_server/src/world.rs index 3059ef9ec..1ee02b47c 100644 --- a/crates/ra_lsp_server/src/world.rs +++ b/crates/ra_lsp_server/src/world.rs | |||
@@ -12,9 +12,7 @@ use crossbeam_channel::{unbounded, Receiver}; | |||
12 | use lsp_server::ErrorCode; | 12 | use lsp_server::ErrorCode; |
13 | use lsp_types::Url; | 13 | use lsp_types::Url; |
14 | use parking_lot::RwLock; | 14 | use parking_lot::RwLock; |
15 | use ra_cargo_watch::{ | 15 | use ra_cargo_watch::{url_from_path_with_drive_lowercasing, CheckOptions, CheckWatcher}; |
16 | url_from_path_with_drive_lowercasing, CheckOptions, CheckState, CheckWatcher, | ||
17 | }; | ||
18 | use ra_ide::{ | 16 | use ra_ide::{ |
19 | Analysis, AnalysisChange, AnalysisHost, CrateGraph, FeatureFlags, FileId, LibraryData, | 17 | Analysis, AnalysisChange, AnalysisHost, CrateGraph, FeatureFlags, FileId, LibraryData, |
20 | SourceRootId, | 18 | SourceRootId, |
@@ -25,6 +23,7 @@ use ra_vfs_glob::{Glob, RustPackageFilterBuilder}; | |||
25 | use relative_path::RelativePathBuf; | 23 | use relative_path::RelativePathBuf; |
26 | 24 | ||
27 | use crate::{ | 25 | use crate::{ |
26 | diagnostics::{CheckFixes, DiagnosticCollection}, | ||
28 | main_loop::pending_requests::{CompletedRequest, LatestRequests}, | 27 | main_loop::pending_requests::{CompletedRequest, LatestRequests}, |
29 | LspError, Result, | 28 | LspError, Result, |
30 | }; | 29 | }; |
@@ -55,6 +54,7 @@ pub struct WorldState { | |||
55 | pub task_receiver: Receiver<VfsTask>, | 54 | pub task_receiver: Receiver<VfsTask>, |
56 | pub latest_requests: Arc<RwLock<LatestRequests>>, | 55 | pub latest_requests: Arc<RwLock<LatestRequests>>, |
57 | pub check_watcher: CheckWatcher, | 56 | pub check_watcher: CheckWatcher, |
57 | pub diagnostics: DiagnosticCollection, | ||
58 | } | 58 | } |
59 | 59 | ||
60 | /// An immutable snapshot of the world's state at a point in time. | 60 | /// An immutable snapshot of the world's state at a point in time. |
@@ -63,7 +63,7 @@ pub struct WorldSnapshot { | |||
63 | pub workspaces: Arc<Vec<ProjectWorkspace>>, | 63 | pub workspaces: Arc<Vec<ProjectWorkspace>>, |
64 | pub analysis: Analysis, | 64 | pub analysis: Analysis, |
65 | pub latest_requests: Arc<RwLock<LatestRequests>>, | 65 | pub latest_requests: Arc<RwLock<LatestRequests>>, |
66 | pub check_watcher: CheckState, | 66 | pub check_fixes: CheckFixes, |
67 | vfs: Arc<RwLock<Vfs>>, | 67 | vfs: Arc<RwLock<Vfs>>, |
68 | } | 68 | } |
69 | 69 | ||
@@ -159,6 +159,7 @@ impl WorldState { | |||
159 | task_receiver, | 159 | task_receiver, |
160 | latest_requests: Default::default(), | 160 | latest_requests: Default::default(), |
161 | check_watcher, | 161 | check_watcher, |
162 | diagnostics: Default::default(), | ||
162 | } | 163 | } |
163 | } | 164 | } |
164 | 165 | ||
@@ -220,7 +221,7 @@ impl WorldState { | |||
220 | analysis: self.analysis_host.analysis(), | 221 | analysis: self.analysis_host.analysis(), |
221 | vfs: Arc::clone(&self.vfs), | 222 | vfs: Arc::clone(&self.vfs), |
222 | latest_requests: Arc::clone(&self.latest_requests), | 223 | latest_requests: Arc::clone(&self.latest_requests), |
223 | check_watcher: (*self.check_watcher.state).clone(), | 224 | check_fixes: Arc::clone(&self.diagnostics.check_fixes), |
224 | } | 225 | } |
225 | } | 226 | } |
226 | 227 | ||
diff --git a/crates/ra_mbe/src/subtree_source.rs b/crates/ra_mbe/src/subtree_source.rs index b841c39d3..c9f42b3dd 100644 --- a/crates/ra_mbe/src/subtree_source.rs +++ b/crates/ra_mbe/src/subtree_source.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! FIXME: write short doc here |
2 | 2 | ||
3 | use ra_parser::{Token, TokenSource}; | 3 | use ra_parser::{Token, TokenSource}; |
4 | use ra_syntax::{classify_literal, SmolStr, SyntaxKind, SyntaxKind::*, T}; | 4 | use ra_syntax::{lex_single_valid_syntax_kind, SmolStr, SyntaxKind, SyntaxKind::*, T}; |
5 | use std::cell::{Cell, Ref, RefCell}; | 5 | use std::cell::{Cell, Ref, RefCell}; |
6 | use tt::buffer::{Cursor, TokenBuffer}; | 6 | use tt::buffer::{Cursor, TokenBuffer}; |
7 | 7 | ||
@@ -129,8 +129,9 @@ fn convert_delim(d: Option<tt::DelimiterKind>, closing: bool) -> TtToken { | |||
129 | } | 129 | } |
130 | 130 | ||
131 | fn convert_literal(l: &tt::Literal) -> TtToken { | 131 | fn convert_literal(l: &tt::Literal) -> TtToken { |
132 | let kind = | 132 | let kind = lex_single_valid_syntax_kind(&l.text) |
133 | classify_literal(&l.text).map(|tkn| tkn.kind).unwrap_or_else(|| match l.text.as_ref() { | 133 | .filter(|kind| kind.is_literal()) |
134 | .unwrap_or_else(|| match l.text.as_ref() { | ||
134 | "true" => T![true], | 135 | "true" => T![true], |
135 | "false" => T![false], | 136 | "false" => T![false], |
136 | _ => panic!("Fail to convert given literal {:#?}", &l), | 137 | _ => panic!("Fail to convert given literal {:#?}", &l), |
diff --git a/crates/ra_syntax/src/algo.rs b/crates/ra_syntax/src/algo.rs index 30a479f01..acf677e7d 100644 --- a/crates/ra_syntax/src/algo.rs +++ b/crates/ra_syntax/src/algo.rs | |||
@@ -81,7 +81,7 @@ impl TreeDiff { | |||
81 | /// Specifically, returns a map whose keys are descendants of `from` and values | 81 | /// Specifically, returns a map whose keys are descendants of `from` and values |
82 | /// are descendants of `to`, such that `replace_descendants(from, map) == to`. | 82 | /// are descendants of `to`, such that `replace_descendants(from, map) == to`. |
83 | /// | 83 | /// |
84 | /// A trivial solution is a singletom map `{ from: to }`, but this function | 84 | /// A trivial solution is a singleton map `{ from: to }`, but this function |
85 | /// tries to find a more fine-grained diff. | 85 | /// tries to find a more fine-grained diff. |
86 | pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { | 86 | pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { |
87 | let mut buf = FxHashMap::default(); | 87 | let mut buf = FxHashMap::default(); |
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs index 36e648180..38c0e9a66 100644 --- a/crates/ra_syntax/src/ast/make.rs +++ b/crates/ra_syntax/src/ast/make.rs | |||
@@ -2,7 +2,7 @@ | |||
2 | //! of smaller pieces. | 2 | //! of smaller pieces. |
3 | use itertools::Itertools; | 3 | use itertools::Itertools; |
4 | 4 | ||
5 | use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxToken}; | 5 | use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxNode, SyntaxToken}; |
6 | 6 | ||
7 | pub fn name(text: &str) -> ast::Name { | 7 | pub fn name(text: &str) -> ast::Name { |
8 | ast_from_text(&format!("mod {};", text)) | 8 | ast_from_text(&format!("mod {};", text)) |
@@ -179,7 +179,16 @@ pub fn token(kind: SyntaxKind) -> SyntaxToken { | |||
179 | 179 | ||
180 | fn ast_from_text<N: AstNode>(text: &str) -> N { | 180 | fn ast_from_text<N: AstNode>(text: &str) -> N { |
181 | let parse = SourceFile::parse(text); | 181 | let parse = SourceFile::parse(text); |
182 | parse.tree().syntax().descendants().find_map(N::cast).unwrap() | 182 | let node = parse.tree().syntax().descendants().find_map(N::cast).unwrap(); |
183 | let node = node.syntax().clone(); | ||
184 | let node = unroot(node); | ||
185 | let node = N::cast(node).unwrap(); | ||
186 | assert_eq!(node.syntax().text_range().start(), 0.into()); | ||
187 | node | ||
188 | } | ||
189 | |||
190 | fn unroot(n: SyntaxNode) -> SyntaxNode { | ||
191 | SyntaxNode::new_root(n.green().clone()) | ||
183 | } | 192 | } |
184 | 193 | ||
185 | pub mod tokens { | 194 | pub mod tokens { |
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index 9931fec84..f8f4b64c1 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -41,7 +41,9 @@ use crate::syntax_node::GreenNode; | |||
41 | pub use crate::{ | 41 | pub use crate::{ |
42 | algo::InsertPosition, | 42 | algo::InsertPosition, |
43 | ast::{AstNode, AstToken}, | 43 | ast::{AstNode, AstToken}, |
44 | parsing::{classify_literal, tokenize, Token}, | 44 | parsing::{ |
45 | lex_single_syntax_kind, lex_single_valid_syntax_kind, tokenize, Token, TokenizeError, | ||
46 | }, | ||
45 | ptr::{AstPtr, SyntaxNodePtr}, | 47 | ptr::{AstPtr, SyntaxNodePtr}, |
46 | syntax_error::{Location, SyntaxError, SyntaxErrorKind}, | 48 | syntax_error::{Location, SyntaxError, SyntaxErrorKind}, |
47 | syntax_node::{ | 49 | syntax_node::{ |
diff --git a/crates/ra_syntax/src/parsing.rs b/crates/ra_syntax/src/parsing.rs index 0387f0378..e5eb80850 100644 --- a/crates/ra_syntax/src/parsing.rs +++ b/crates/ra_syntax/src/parsing.rs | |||
@@ -7,15 +7,23 @@ mod text_tree_sink; | |||
7 | mod reparsing; | 7 | mod reparsing; |
8 | 8 | ||
9 | use crate::{syntax_node::GreenNode, SyntaxError}; | 9 | use crate::{syntax_node::GreenNode, SyntaxError}; |
10 | use text_token_source::TextTokenSource; | ||
11 | use text_tree_sink::TextTreeSink; | ||
10 | 12 | ||
11 | pub use self::lexer::{classify_literal, tokenize, Token}; | 13 | pub use lexer::*; |
12 | 14 | ||
13 | pub(crate) use self::reparsing::incremental_reparse; | 15 | pub(crate) use self::reparsing::incremental_reparse; |
14 | 16 | ||
15 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { | 17 | pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { |
16 | let tokens = tokenize(&text); | 18 | let (tokens, lexer_errors) = tokenize(&text); |
17 | let mut token_source = text_token_source::TextTokenSource::new(text, &tokens); | 19 | |
18 | let mut tree_sink = text_tree_sink::TextTreeSink::new(text, &tokens); | 20 | let mut token_source = TextTokenSource::new(text, &tokens); |
21 | let mut tree_sink = TextTreeSink::new(text, &tokens); | ||
22 | |||
19 | ra_parser::parse(&mut token_source, &mut tree_sink); | 23 | ra_parser::parse(&mut token_source, &mut tree_sink); |
20 | tree_sink.finish() | 24 | |
25 | let (tree, mut parser_errors) = tree_sink.finish(); | ||
26 | parser_errors.extend(lexer_errors); | ||
27 | |||
28 | (tree, parser_errors) | ||
21 | } | 29 | } |
diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs index 6d839208d..f889e6a1d 100644 --- a/crates/ra_syntax/src/parsing/lexer.rs +++ b/crates/ra_syntax/src/parsing/lexer.rs | |||
@@ -1,8 +1,10 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! Lexer analyzes raw input string and produces lexemes (tokens). |
2 | //! It is just a bridge to `rustc_lexer`. | ||
2 | 3 | ||
3 | use crate::{ | 4 | use crate::{ |
5 | SyntaxError, SyntaxErrorKind, | ||
4 | SyntaxKind::{self, *}, | 6 | SyntaxKind::{self, *}, |
5 | TextUnit, | 7 | TextRange, TextUnit, |
6 | }; | 8 | }; |
7 | 9 | ||
8 | /// A token of Rust source. | 10 | /// A token of Rust source. |
@@ -14,91 +16,261 @@ pub struct Token { | |||
14 | pub len: TextUnit, | 16 | pub len: TextUnit, |
15 | } | 17 | } |
16 | 18 | ||
17 | fn match_literal_kind(kind: rustc_lexer::LiteralKind) -> SyntaxKind { | 19 | /// Break a string up into its component tokens. |
18 | match kind { | 20 | /// Beware that it checks for shebang first and its length contributes to resulting |
19 | rustc_lexer::LiteralKind::Int { .. } => INT_NUMBER, | 21 | /// tokens offsets. |
20 | rustc_lexer::LiteralKind::Float { .. } => FLOAT_NUMBER, | 22 | pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) { |
21 | rustc_lexer::LiteralKind::Char { .. } => CHAR, | 23 | // non-empty string is a precondtion of `rustc_lexer::strip_shebang()`. |
22 | rustc_lexer::LiteralKind::Byte { .. } => BYTE, | 24 | if text.is_empty() { |
23 | rustc_lexer::LiteralKind::Str { .. } => STRING, | 25 | return Default::default(); |
24 | rustc_lexer::LiteralKind::ByteStr { .. } => BYTE_STRING, | 26 | } |
25 | rustc_lexer::LiteralKind::RawStr { .. } => RAW_STRING, | 27 | |
26 | rustc_lexer::LiteralKind::RawByteStr { .. } => RAW_BYTE_STRING, | 28 | let mut tokens = Vec::new(); |
29 | let mut errors = Vec::new(); | ||
30 | |||
31 | let mut offset: usize = rustc_lexer::strip_shebang(text) | ||
32 | .map(|shebang_len| { | ||
33 | tokens.push(Token { kind: SHEBANG, len: TextUnit::from_usize(shebang_len) }); | ||
34 | shebang_len | ||
35 | }) | ||
36 | .unwrap_or(0); | ||
37 | |||
38 | let text_without_shebang = &text[offset..]; | ||
39 | |||
40 | for rustc_token in rustc_lexer::tokenize(text_without_shebang) { | ||
41 | let token_len = TextUnit::from_usize(rustc_token.len); | ||
42 | let token_range = TextRange::offset_len(TextUnit::from_usize(offset), token_len); | ||
43 | |||
44 | let (syntax_kind, error) = | ||
45 | rustc_token_kind_to_syntax_kind(&rustc_token.kind, &text[token_range]); | ||
46 | |||
47 | tokens.push(Token { kind: syntax_kind, len: token_len }); | ||
48 | |||
49 | if let Some(error) = error { | ||
50 | errors.push(SyntaxError::new(SyntaxErrorKind::TokenizeError(error), token_range)); | ||
51 | } | ||
52 | |||
53 | offset += rustc_token.len; | ||
27 | } | 54 | } |
55 | |||
56 | (tokens, errors) | ||
57 | } | ||
58 | |||
59 | /// Returns `SyntaxKind` and `Option<SyntaxError>` of the first token | ||
60 | /// encountered at the beginning of the string. | ||
61 | /// | ||
62 | /// Returns `None` if the string contains zero *or two or more* tokens. | ||
63 | /// The token is malformed if the returned error is not `None`. | ||
64 | /// | ||
65 | /// Beware that unescape errors are not checked at tokenization time. | ||
66 | pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxError>)> { | ||
67 | lex_first_token(text) | ||
68 | .filter(|(token, _)| token.len.to_usize() == text.len()) | ||
69 | .map(|(token, error)| (token.kind, error)) | ||
70 | } | ||
71 | |||
72 | /// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and | ||
73 | /// returns `None` if any tokenization error occured. | ||
74 | /// | ||
75 | /// Beware that unescape errors are not checked at tokenization time. | ||
76 | pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { | ||
77 | lex_first_token(text) | ||
78 | .filter(|(token, error)| !error.is_some() && token.len.to_usize() == text.len()) | ||
79 | .map(|(token, _error)| token.kind) | ||
28 | } | 80 | } |
29 | 81 | ||
30 | /// Break a string up into its component tokens | 82 | /// Returns `SyntaxKind` and `Option<SyntaxError>` of the first token |
31 | pub fn tokenize(text: &str) -> Vec<Token> { | 83 | /// encountered at the beginning of the string. |
84 | /// | ||
85 | /// Returns `None` if the string contains zero tokens or if the token was parsed | ||
86 | /// with an error. | ||
87 | /// The token is malformed if the returned error is not `None`. | ||
88 | /// | ||
89 | /// Beware that unescape errors are not checked at tokenization time. | ||
90 | fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> { | ||
91 | // non-empty string is a precondtion of `rustc_lexer::first_token()`. | ||
32 | if text.is_empty() { | 92 | if text.is_empty() { |
33 | return vec![]; | 93 | return None; |
34 | } | ||
35 | let mut text = text; | ||
36 | let mut acc = Vec::new(); | ||
37 | if let Some(len) = rustc_lexer::strip_shebang(text) { | ||
38 | acc.push(Token { kind: SHEBANG, len: TextUnit::from_usize(len) }); | ||
39 | text = &text[len..]; | ||
40 | } | 94 | } |
41 | while !text.is_empty() { | 95 | |
42 | let rustc_token = rustc_lexer::first_token(text); | 96 | let rustc_token = rustc_lexer::first_token(text); |
43 | let kind = match rustc_token.kind { | 97 | let (syntax_kind, error) = rustc_token_kind_to_syntax_kind(&rustc_token.kind, text); |
44 | rustc_lexer::TokenKind::LineComment => COMMENT, | 98 | |
45 | rustc_lexer::TokenKind::BlockComment { .. } => COMMENT, | 99 | let token = Token { kind: syntax_kind, len: TextUnit::from_usize(rustc_token.len) }; |
46 | rustc_lexer::TokenKind::Whitespace => WHITESPACE, | 100 | let error = error.map(|error| { |
47 | rustc_lexer::TokenKind::Ident => { | 101 | SyntaxError::new( |
48 | let token_text = &text[..rustc_token.len]; | 102 | SyntaxErrorKind::TokenizeError(error), |
103 | TextRange::from_to(TextUnit::from(0), TextUnit::of_str(text)), | ||
104 | ) | ||
105 | }); | ||
106 | |||
107 | Some((token, error)) | ||
108 | } | ||
109 | |||
110 | // FIXME: simplify TokenizeError to `SyntaxError(String, TextRange)` as per @matklad advice: | ||
111 | // https://github.com/rust-analyzer/rust-analyzer/pull/2911/files#r371175067 | ||
112 | |||
113 | /// Describes the values of `SyntaxErrorKind::TokenizeError` enum variant. | ||
114 | /// It describes all the types of errors that may happen during the tokenization | ||
115 | /// of Rust source. | ||
116 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | ||
117 | pub enum TokenizeError { | ||
118 | /// Base prefix was provided, but there were no digits | ||
119 | /// after it, e.g. `0x`, `0b`. | ||
120 | EmptyInt, | ||
121 | /// Float exponent lacks digits e.g. `12.34e+`, `12.3E+`, `12e-`, `1_E-`, | ||
122 | EmptyExponent, | ||
123 | |||
124 | /// Block comment lacks trailing delimiter `*/` | ||
125 | UnterminatedBlockComment, | ||
126 | /// Character literal lacks trailing delimiter `'` | ||
127 | UnterminatedChar, | ||
128 | /// Characterish byte literal lacks trailing delimiter `'` | ||
129 | UnterminatedByte, | ||
130 | /// String literal lacks trailing delimiter `"` | ||
131 | UnterminatedString, | ||
132 | /// Byte string literal lacks trailing delimiter `"` | ||
133 | UnterminatedByteString, | ||
134 | /// Raw literal lacks trailing delimiter e.g. `"##` | ||
135 | UnterminatedRawString, | ||
136 | /// Raw byte string literal lacks trailing delimiter e.g. `"##` | ||
137 | UnterminatedRawByteString, | ||
138 | |||
139 | /// Raw string lacks a quote after the pound characters e.g. `r###` | ||
140 | UnstartedRawString, | ||
141 | /// Raw byte string lacks a quote after the pound characters e.g. `br###` | ||
142 | UnstartedRawByteString, | ||
143 | |||
144 | /// Lifetime starts with a number e.g. `'4ever` | ||
145 | LifetimeStartsWithNumber, | ||
146 | } | ||
147 | |||
148 | fn rustc_token_kind_to_syntax_kind( | ||
149 | rustc_token_kind: &rustc_lexer::TokenKind, | ||
150 | token_text: &str, | ||
151 | ) -> (SyntaxKind, Option<TokenizeError>) { | ||
152 | // A note on an intended tradeoff: | ||
153 | // We drop some useful infromation here (see patterns with double dots `..`) | ||
154 | // Storing that info in `SyntaxKind` is not possible due to its layout requirements of | ||
155 | // being `u16` that come from `rowan::SyntaxKind`. | ||
156 | |||
157 | let syntax_kind = { | ||
158 | use rustc_lexer::TokenKind as TK; | ||
159 | use TokenizeError as TE; | ||
160 | |||
161 | match rustc_token_kind { | ||
162 | TK::LineComment => COMMENT, | ||
163 | |||
164 | TK::BlockComment { terminated: true } => COMMENT, | ||
165 | TK::BlockComment { terminated: false } => { | ||
166 | return (COMMENT, Some(TE::UnterminatedBlockComment)); | ||
167 | } | ||
168 | |||
169 | TK::Whitespace => WHITESPACE, | ||
170 | |||
171 | TK::Ident => { | ||
49 | if token_text == "_" { | 172 | if token_text == "_" { |
50 | UNDERSCORE | 173 | UNDERSCORE |
51 | } else { | 174 | } else { |
52 | SyntaxKind::from_keyword(&text[..rustc_token.len]).unwrap_or(IDENT) | 175 | SyntaxKind::from_keyword(token_text).unwrap_or(IDENT) |
53 | } | 176 | } |
54 | } | 177 | } |
55 | rustc_lexer::TokenKind::RawIdent => IDENT, | 178 | |
56 | rustc_lexer::TokenKind::Literal { kind, .. } => match_literal_kind(kind), | 179 | TK::RawIdent => IDENT, |
57 | rustc_lexer::TokenKind::Lifetime { .. } => LIFETIME, | 180 | TK::Literal { kind, .. } => return match_literal_kind(&kind), |
58 | rustc_lexer::TokenKind::Semi => SEMI, | 181 | |
59 | rustc_lexer::TokenKind::Comma => COMMA, | 182 | TK::Lifetime { starts_with_number: false } => LIFETIME, |
60 | rustc_lexer::TokenKind::Dot => DOT, | 183 | TK::Lifetime { starts_with_number: true } => { |
61 | rustc_lexer::TokenKind::OpenParen => L_PAREN, | 184 | return (LIFETIME, Some(TE::LifetimeStartsWithNumber)) |
62 | rustc_lexer::TokenKind::CloseParen => R_PAREN, | 185 | } |
63 | rustc_lexer::TokenKind::OpenBrace => L_CURLY, | 186 | |
64 | rustc_lexer::TokenKind::CloseBrace => R_CURLY, | 187 | TK::Semi => SEMI, |
65 | rustc_lexer::TokenKind::OpenBracket => L_BRACK, | 188 | TK::Comma => COMMA, |
66 | rustc_lexer::TokenKind::CloseBracket => R_BRACK, | 189 | TK::Dot => DOT, |
67 | rustc_lexer::TokenKind::At => AT, | 190 | TK::OpenParen => L_PAREN, |
68 | rustc_lexer::TokenKind::Pound => POUND, | 191 | TK::CloseParen => R_PAREN, |
69 | rustc_lexer::TokenKind::Tilde => TILDE, | 192 | TK::OpenBrace => L_CURLY, |
70 | rustc_lexer::TokenKind::Question => QUESTION, | 193 | TK::CloseBrace => R_CURLY, |
71 | rustc_lexer::TokenKind::Colon => COLON, | 194 | TK::OpenBracket => L_BRACK, |
72 | rustc_lexer::TokenKind::Dollar => DOLLAR, | 195 | TK::CloseBracket => R_BRACK, |
73 | rustc_lexer::TokenKind::Eq => EQ, | 196 | TK::At => AT, |
74 | rustc_lexer::TokenKind::Not => EXCL, | 197 | TK::Pound => POUND, |
75 | rustc_lexer::TokenKind::Lt => L_ANGLE, | 198 | TK::Tilde => TILDE, |
76 | rustc_lexer::TokenKind::Gt => R_ANGLE, | 199 | TK::Question => QUESTION, |
77 | rustc_lexer::TokenKind::Minus => MINUS, | 200 | TK::Colon => COLON, |
78 | rustc_lexer::TokenKind::And => AMP, | 201 | TK::Dollar => DOLLAR, |
79 | rustc_lexer::TokenKind::Or => PIPE, | 202 | TK::Eq => EQ, |
80 | rustc_lexer::TokenKind::Plus => PLUS, | 203 | TK::Not => EXCL, |
81 | rustc_lexer::TokenKind::Star => STAR, | 204 | TK::Lt => L_ANGLE, |
82 | rustc_lexer::TokenKind::Slash => SLASH, | 205 | TK::Gt => R_ANGLE, |
83 | rustc_lexer::TokenKind::Caret => CARET, | 206 | TK::Minus => MINUS, |
84 | rustc_lexer::TokenKind::Percent => PERCENT, | 207 | TK::And => AMP, |
85 | rustc_lexer::TokenKind::Unknown => ERROR, | 208 | TK::Or => PIPE, |
209 | TK::Plus => PLUS, | ||
210 | TK::Star => STAR, | ||
211 | TK::Slash => SLASH, | ||
212 | TK::Caret => CARET, | ||
213 | TK::Percent => PERCENT, | ||
214 | TK::Unknown => ERROR, | ||
215 | } | ||
216 | }; | ||
217 | |||
218 | return (syntax_kind, None); | ||
219 | |||
220 | fn match_literal_kind(kind: &rustc_lexer::LiteralKind) -> (SyntaxKind, Option<TokenizeError>) { | ||
221 | use rustc_lexer::LiteralKind as LK; | ||
222 | use TokenizeError as TE; | ||
223 | |||
224 | #[rustfmt::skip] | ||
225 | let syntax_kind = match *kind { | ||
226 | LK::Int { empty_int: false, .. } => INT_NUMBER, | ||
227 | LK::Int { empty_int: true, .. } => { | ||
228 | return (INT_NUMBER, Some(TE::EmptyInt)) | ||
229 | } | ||
230 | |||
231 | LK::Float { empty_exponent: false, .. } => FLOAT_NUMBER, | ||
232 | LK::Float { empty_exponent: true, .. } => { | ||
233 | return (FLOAT_NUMBER, Some(TE::EmptyExponent)) | ||
234 | } | ||
235 | |||
236 | LK::Char { terminated: true } => CHAR, | ||
237 | LK::Char { terminated: false } => { | ||
238 | return (CHAR, Some(TE::UnterminatedChar)) | ||
239 | } | ||
240 | |||
241 | LK::Byte { terminated: true } => BYTE, | ||
242 | LK::Byte { terminated: false } => { | ||
243 | return (BYTE, Some(TE::UnterminatedByte)) | ||
244 | } | ||
245 | |||
246 | LK::Str { terminated: true } => STRING, | ||
247 | LK::Str { terminated: false } => { | ||
248 | return (STRING, Some(TE::UnterminatedString)) | ||
249 | } | ||
250 | |||
251 | |||
252 | LK::ByteStr { terminated: true } => BYTE_STRING, | ||
253 | LK::ByteStr { terminated: false } => { | ||
254 | return (BYTE_STRING, Some(TE::UnterminatedByteString)) | ||
255 | } | ||
256 | |||
257 | LK::RawStr { started: true, terminated: true, .. } => RAW_STRING, | ||
258 | LK::RawStr { started: true, terminated: false, .. } => { | ||
259 | return (RAW_STRING, Some(TE::UnterminatedRawString)) | ||
260 | } | ||
261 | LK::RawStr { started: false, .. } => { | ||
262 | return (RAW_STRING, Some(TE::UnstartedRawString)) | ||
263 | } | ||
264 | |||
265 | LK::RawByteStr { started: true, terminated: true, .. } => RAW_BYTE_STRING, | ||
266 | LK::RawByteStr { started: true, terminated: false, .. } => { | ||
267 | return (RAW_BYTE_STRING, Some(TE::UnterminatedRawByteString)) | ||
268 | } | ||
269 | LK::RawByteStr { started: false, .. } => { | ||
270 | return (RAW_BYTE_STRING, Some(TE::UnstartedRawByteString)) | ||
271 | } | ||
86 | }; | 272 | }; |
87 | let token = Token { kind, len: TextUnit::from_usize(rustc_token.len) }; | ||
88 | acc.push(token); | ||
89 | text = &text[rustc_token.len..]; | ||
90 | } | ||
91 | acc | ||
92 | } | ||
93 | 273 | ||
94 | pub fn classify_literal(text: &str) -> Option<Token> { | 274 | (syntax_kind, None) |
95 | let t = rustc_lexer::first_token(text); | ||
96 | if t.len != text.len() { | ||
97 | return None; | ||
98 | } | 275 | } |
99 | let kind = match t.kind { | ||
100 | rustc_lexer::TokenKind::Literal { kind, .. } => match_literal_kind(kind), | ||
101 | _ => return None, | ||
102 | }; | ||
103 | Some(Token { kind, len: TextUnit::from_usize(t.len) }) | ||
104 | } | 276 | } |
diff --git a/crates/ra_syntax/src/parsing/reparsing.rs b/crates/ra_syntax/src/parsing/reparsing.rs index 06bdda11d..a86da0675 100644 --- a/crates/ra_syntax/src/parsing/reparsing.rs +++ b/crates/ra_syntax/src/parsing/reparsing.rs | |||
@@ -12,7 +12,7 @@ use ra_text_edit::AtomTextEdit; | |||
12 | use crate::{ | 12 | use crate::{ |
13 | algo, | 13 | algo, |
14 | parsing::{ | 14 | parsing::{ |
15 | lexer::{tokenize, Token}, | 15 | lexer::{lex_single_syntax_kind, tokenize, Token}, |
16 | text_token_source::TextTokenSource, | 16 | text_token_source::TextTokenSource, |
17 | text_tree_sink::TextTreeSink, | 17 | text_tree_sink::TextTreeSink, |
18 | }, | 18 | }, |
@@ -41,37 +41,42 @@ fn reparse_token<'node>( | |||
41 | root: &'node SyntaxNode, | 41 | root: &'node SyntaxNode, |
42 | edit: &AtomTextEdit, | 42 | edit: &AtomTextEdit, |
43 | ) -> Option<(GreenNode, TextRange)> { | 43 | ) -> Option<(GreenNode, TextRange)> { |
44 | let token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); | 44 | let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); |
45 | match token.kind() { | 45 | let prev_token_kind = prev_token.kind(); |
46 | match prev_token_kind { | ||
46 | WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { | 47 | WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => { |
47 | if token.kind() == WHITESPACE || token.kind() == COMMENT { | 48 | if prev_token_kind == WHITESPACE || prev_token_kind == COMMENT { |
48 | // removing a new line may extends previous token | 49 | // removing a new line may extends previous token |
49 | if token.text().to_string()[edit.delete - token.text_range().start()].contains('\n') | 50 | let deleted_range = edit.delete - prev_token.text_range().start(); |
50 | { | 51 | if prev_token.text()[deleted_range].contains('\n') { |
51 | return None; | 52 | return None; |
52 | } | 53 | } |
53 | } | 54 | } |
54 | 55 | ||
55 | let text = get_text_after_edit(token.clone().into(), &edit); | 56 | let mut new_text = get_text_after_edit(prev_token.clone().into(), &edit); |
56 | let lex_tokens = tokenize(&text); | 57 | let (new_token_kind, _error) = lex_single_syntax_kind(&new_text)?; |
57 | let lex_token = match lex_tokens[..] { | ||
58 | [lex_token] if lex_token.kind == token.kind() => lex_token, | ||
59 | _ => return None, | ||
60 | }; | ||
61 | 58 | ||
62 | if lex_token.kind == IDENT && is_contextual_kw(&text) { | 59 | if new_token_kind != prev_token_kind |
60 | || (new_token_kind == IDENT && is_contextual_kw(&new_text)) | ||
61 | { | ||
63 | return None; | 62 | return None; |
64 | } | 63 | } |
65 | 64 | ||
66 | if let Some(next_char) = root.text().char_at(token.text_range().end()) { | 65 | // Check that edited token is not a part of the bigger token. |
67 | let tokens_with_next_char = tokenize(&format!("{}{}", text, next_char)); | 66 | // E.g. if for source code `bruh"str"` the user removed `ruh`, then |
68 | if tokens_with_next_char.len() == 1 { | 67 | // `b` no longer remains an identifier, but becomes a part of byte string literal |
68 | if let Some(next_char) = root.text().char_at(prev_token.text_range().end()) { | ||
69 | new_text.push(next_char); | ||
70 | let token_with_next_char = lex_single_syntax_kind(&new_text); | ||
71 | if let Some((_kind, _error)) = token_with_next_char { | ||
69 | return None; | 72 | return None; |
70 | } | 73 | } |
74 | new_text.pop(); | ||
71 | } | 75 | } |
72 | 76 | ||
73 | let new_token = GreenToken::new(rowan::SyntaxKind(token.kind().into()), text.into()); | 77 | let new_token = |
74 | Some((token.replace_with(new_token), token.text_range())) | 78 | GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into()); |
79 | Some((prev_token.replace_with(new_token), prev_token.text_range())) | ||
75 | } | 80 | } |
76 | _ => None, | 81 | _ => None, |
77 | } | 82 | } |
@@ -83,20 +88,26 @@ fn reparse_block<'node>( | |||
83 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { | 88 | ) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { |
84 | let (node, reparser) = find_reparsable_node(root, edit.delete)?; | 89 | let (node, reparser) = find_reparsable_node(root, edit.delete)?; |
85 | let text = get_text_after_edit(node.clone().into(), &edit); | 90 | let text = get_text_after_edit(node.clone().into(), &edit); |
86 | let tokens = tokenize(&text); | 91 | |
92 | let (tokens, new_lexer_errors) = tokenize(&text); | ||
87 | if !is_balanced(&tokens) { | 93 | if !is_balanced(&tokens) { |
88 | return None; | 94 | return None; |
89 | } | 95 | } |
96 | |||
90 | let mut token_source = TextTokenSource::new(&text, &tokens); | 97 | let mut token_source = TextTokenSource::new(&text, &tokens); |
91 | let mut tree_sink = TextTreeSink::new(&text, &tokens); | 98 | let mut tree_sink = TextTreeSink::new(&text, &tokens); |
92 | reparser.parse(&mut token_source, &mut tree_sink); | 99 | reparser.parse(&mut token_source, &mut tree_sink); |
93 | let (green, new_errors) = tree_sink.finish(); | 100 | |
94 | Some((node.replace_with(green), new_errors, node.text_range())) | 101 | let (green, mut new_parser_errors) = tree_sink.finish(); |
102 | new_parser_errors.extend(new_lexer_errors); | ||
103 | |||
104 | Some((node.replace_with(green), new_parser_errors, node.text_range())) | ||
95 | } | 105 | } |
96 | 106 | ||
97 | fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String { | 107 | fn get_text_after_edit(element: SyntaxElement, edit: &AtomTextEdit) -> String { |
98 | let edit = | 108 | let edit = |
99 | AtomTextEdit::replace(edit.delete - element.text_range().start(), edit.insert.clone()); | 109 | AtomTextEdit::replace(edit.delete - element.text_range().start(), edit.insert.clone()); |
110 | |||
100 | let text = match element { | 111 | let text = match element { |
101 | NodeOrToken::Token(token) => token.text().to_string(), | 112 | NodeOrToken::Token(token) => token.text().to_string(), |
102 | NodeOrToken::Node(node) => node.text().to_string(), | 113 | NodeOrToken::Node(node) => node.text().to_string(), |
@@ -113,6 +124,7 @@ fn is_contextual_kw(text: &str) -> bool { | |||
113 | 124 | ||
114 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { | 125 | fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { |
115 | let node = algo::find_covering_element(node, range); | 126 | let node = algo::find_covering_element(node, range); |
127 | |||
116 | let mut ancestors = match node { | 128 | let mut ancestors = match node { |
117 | NodeOrToken::Token(it) => it.parent().ancestors(), | 129 | NodeOrToken::Token(it) => it.parent().ancestors(), |
118 | NodeOrToken::Node(it) => it.ancestors(), | 130 | NodeOrToken::Node(it) => it.ancestors(), |
@@ -182,7 +194,6 @@ mod tests { | |||
182 | let fully_reparsed = SourceFile::parse(&after); | 194 | let fully_reparsed = SourceFile::parse(&after); |
183 | let incrementally_reparsed: Parse<SourceFile> = { | 195 | let incrementally_reparsed: Parse<SourceFile> = { |
184 | let f = SourceFile::parse(&before); | 196 | let f = SourceFile::parse(&before); |
185 | let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() }; | ||
186 | let (green, new_errors, range) = | 197 | let (green, new_errors, range) = |
187 | incremental_reparse(f.tree().syntax(), &edit, f.errors.to_vec()).unwrap(); | 198 | incremental_reparse(f.tree().syntax(), &edit, f.errors.to_vec()).unwrap(); |
188 | assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); | 199 | assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length"); |
diff --git a/crates/ra_syntax/src/parsing/text_tree_sink.rs b/crates/ra_syntax/src/parsing/text_tree_sink.rs index c36756d6c..dd202601d 100644 --- a/crates/ra_syntax/src/parsing/text_tree_sink.rs +++ b/crates/ra_syntax/src/parsing/text_tree_sink.rs | |||
@@ -92,8 +92,8 @@ impl<'a> TreeSink for TextTreeSink<'a> { | |||
92 | } | 92 | } |
93 | 93 | ||
94 | impl<'a> TextTreeSink<'a> { | 94 | impl<'a> TextTreeSink<'a> { |
95 | pub(super) fn new(text: &'a str, tokens: &'a [Token]) -> TextTreeSink<'a> { | 95 | pub(super) fn new(text: &'a str, tokens: &'a [Token]) -> Self { |
96 | TextTreeSink { | 96 | Self { |
97 | text, | 97 | text, |
98 | tokens, | 98 | tokens, |
99 | text_pos: 0.into(), | 99 | text_pos: 0.into(), |
diff --git a/crates/ra_syntax/src/syntax_error.rs b/crates/ra_syntax/src/syntax_error.rs index 6c171df8d..7f9d36618 100644 --- a/crates/ra_syntax/src/syntax_error.rs +++ b/crates/ra_syntax/src/syntax_error.rs | |||
@@ -4,7 +4,7 @@ use std::fmt; | |||
4 | 4 | ||
5 | use ra_parser::ParseError; | 5 | use ra_parser::ParseError; |
6 | 6 | ||
7 | use crate::{validation::EscapeError, TextRange, TextUnit}; | 7 | use crate::{validation::EscapeError, TextRange, TextUnit, TokenizeError}; |
8 | 8 | ||
9 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 9 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
10 | pub struct SyntaxError { | 10 | pub struct SyntaxError { |
@@ -12,6 +12,10 @@ pub struct SyntaxError { | |||
12 | location: Location, | 12 | location: Location, |
13 | } | 13 | } |
14 | 14 | ||
15 | // FIXME: Location should be just `Location(TextRange)` | ||
16 | // TextUnit enum member just unnecessarily compicates things, | ||
17 | // we should'n treat it specially, it just as a `TextRange { start: x, end: x + 1 }` | ||
18 | // see `location_to_range()` in ra_ide/src/diagnostics | ||
15 | #[derive(Clone, PartialEq, Eq, Hash)] | 19 | #[derive(Clone, PartialEq, Eq, Hash)] |
16 | pub enum Location { | 20 | pub enum Location { |
17 | Offset(TextUnit), | 21 | Offset(TextUnit), |
@@ -67,6 +71,10 @@ impl SyntaxError { | |||
67 | 71 | ||
68 | self | 72 | self |
69 | } | 73 | } |
74 | |||
75 | pub fn debug_dump(&self, acc: &mut impl fmt::Write) { | ||
76 | writeln!(acc, "error {:?}: {}", self.location(), self.kind()).unwrap(); | ||
77 | } | ||
70 | } | 78 | } |
71 | 79 | ||
72 | impl fmt::Display for SyntaxError { | 80 | impl fmt::Display for SyntaxError { |
@@ -79,6 +87,10 @@ impl fmt::Display for SyntaxError { | |||
79 | pub enum SyntaxErrorKind { | 87 | pub enum SyntaxErrorKind { |
80 | ParseError(ParseError), | 88 | ParseError(ParseError), |
81 | EscapeError(EscapeError), | 89 | EscapeError(EscapeError), |
90 | TokenizeError(TokenizeError), | ||
91 | // FIXME: the obvious pattern of this enum dictates that the following enum variants | ||
92 | // should be wrapped into something like `SemmanticError(SemmanticError)` | ||
93 | // or `ValidateError(ValidateError)` or `SemmanticValidateError(...)` | ||
82 | InvalidBlockAttr, | 94 | InvalidBlockAttr, |
83 | InvalidMatchInnerAttr, | 95 | InvalidMatchInnerAttr, |
84 | InvalidTupleIndexFormat, | 96 | InvalidTupleIndexFormat, |
@@ -101,6 +113,7 @@ impl fmt::Display for SyntaxErrorKind { | |||
101 | } | 113 | } |
102 | ParseError(msg) => write!(f, "{}", msg.0), | 114 | ParseError(msg) => write!(f, "{}", msg.0), |
103 | EscapeError(err) => write!(f, "{}", err), | 115 | EscapeError(err) => write!(f, "{}", err), |
116 | TokenizeError(err) => write!(f, "{}", err), | ||
104 | VisibilityNotAllowed => { | 117 | VisibilityNotAllowed => { |
105 | write!(f, "unnecessary visibility qualifier") | 118 | write!(f, "unnecessary visibility qualifier") |
106 | } | 119 | } |
@@ -111,6 +124,51 @@ impl fmt::Display for SyntaxErrorKind { | |||
111 | } | 124 | } |
112 | } | 125 | } |
113 | 126 | ||
127 | impl fmt::Display for TokenizeError { | ||
128 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | ||
129 | #[rustfmt::skip] | ||
130 | let msg = match self { | ||
131 | TokenizeError::EmptyInt => { | ||
132 | "Missing digits after the integer base prefix" | ||
133 | } | ||
134 | TokenizeError::EmptyExponent => { | ||
135 | "Missing digits after the exponent symbol" | ||
136 | } | ||
137 | TokenizeError::UnterminatedBlockComment => { | ||
138 | "Missing trailing `*/` symbols to terminate the block comment" | ||
139 | } | ||
140 | TokenizeError::UnterminatedChar => { | ||
141 | "Missing trailing `'` symbol to terminate the character literal" | ||
142 | } | ||
143 | TokenizeError::UnterminatedByte => { | ||
144 | "Missing trailing `'` symbol to terminate the byte literal" | ||
145 | } | ||
146 | TokenizeError::UnterminatedString => { | ||
147 | "Missing trailing `\"` symbol to terminate the string literal" | ||
148 | } | ||
149 | TokenizeError::UnterminatedByteString => { | ||
150 | "Missing trailing `\"` symbol to terminate the byte string literal" | ||
151 | } | ||
152 | TokenizeError::UnterminatedRawString => { | ||
153 | "Missing trailing `\"` with `#` symbols to terminate the raw string literal" | ||
154 | } | ||
155 | TokenizeError::UnterminatedRawByteString => { | ||
156 | "Missing trailing `\"` with `#` symbols to terminate the raw byte string literal" | ||
157 | } | ||
158 | TokenizeError::UnstartedRawString => { | ||
159 | "Missing `\"` symbol after `#` symbols to begin the raw string literal" | ||
160 | } | ||
161 | TokenizeError::UnstartedRawByteString => { | ||
162 | "Missing `\"` symbol after `#` symbols to begin the raw byte string literal" | ||
163 | } | ||
164 | TokenizeError::LifetimeStartsWithNumber => { | ||
165 | "Lifetime name cannot start with a number" | ||
166 | } | ||
167 | }; | ||
168 | write!(f, "{}", msg) | ||
169 | } | ||
170 | } | ||
171 | |||
114 | impl fmt::Display for EscapeError { | 172 | impl fmt::Display for EscapeError { |
115 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { | 173 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { |
116 | let msg = match self { | 174 | let msg = match self { |
diff --git a/crates/ra_syntax/src/syntax_node.rs b/crates/ra_syntax/src/syntax_node.rs index b3eb5da63..7c2b18af3 100644 --- a/crates/ra_syntax/src/syntax_node.rs +++ b/crates/ra_syntax/src/syntax_node.rs | |||
@@ -4,7 +4,7 @@ | |||
4 | //! `SyntaxNode`, and a basic traversal API (parent, children, siblings). | 4 | //! `SyntaxNode`, and a basic traversal API (parent, children, siblings). |
5 | //! | 5 | //! |
6 | //! The *real* implementation is in the (language-agnostic) `rowan` crate, this | 6 | //! The *real* implementation is in the (language-agnostic) `rowan` crate, this |
7 | //! modules just wraps its API. | 7 | //! module just wraps its API. |
8 | 8 | ||
9 | use ra_parser::ParseError; | 9 | use ra_parser::ParseError; |
10 | use rowan::{GreenNodeBuilder, Language}; | 10 | use rowan::{GreenNodeBuilder, Language}; |
@@ -38,17 +38,12 @@ pub type SyntaxElementChildren = rowan::SyntaxElementChildren<RustLanguage>; | |||
38 | 38 | ||
39 | pub use rowan::{Direction, NodeOrToken}; | 39 | pub use rowan::{Direction, NodeOrToken}; |
40 | 40 | ||
41 | #[derive(Default)] | ||
41 | pub struct SyntaxTreeBuilder { | 42 | pub struct SyntaxTreeBuilder { |
42 | errors: Vec<SyntaxError>, | 43 | errors: Vec<SyntaxError>, |
43 | inner: GreenNodeBuilder<'static>, | 44 | inner: GreenNodeBuilder<'static>, |
44 | } | 45 | } |
45 | 46 | ||
46 | impl Default for SyntaxTreeBuilder { | ||
47 | fn default() -> SyntaxTreeBuilder { | ||
48 | SyntaxTreeBuilder { errors: Vec::new(), inner: GreenNodeBuilder::new() } | ||
49 | } | ||
50 | } | ||
51 | |||
52 | impl SyntaxTreeBuilder { | 47 | impl SyntaxTreeBuilder { |
53 | pub(crate) fn finish_raw(self) -> (GreenNode, Vec<SyntaxError>) { | 48 | pub(crate) fn finish_raw(self) -> (GreenNode, Vec<SyntaxError>) { |
54 | let green = self.inner.finish(); | 49 | let green = self.inner.finish(); |
diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs index 458920607..fb22b9e54 100644 --- a/crates/ra_syntax/src/tests.rs +++ b/crates/ra_syntax/src/tests.rs | |||
@@ -1,18 +1,28 @@ | |||
1 | use std::{ | 1 | use std::{ |
2 | fmt::Write, | 2 | fmt::Write, |
3 | path::{Component, PathBuf}, | 3 | path::{Component, Path, PathBuf}, |
4 | }; | 4 | }; |
5 | 5 | ||
6 | use test_utils::{collect_tests, dir_tests, project_dir, read_text}; | 6 | use test_utils::{collect_tests, dir_tests, project_dir, read_text}; |
7 | 7 | ||
8 | use crate::{fuzz, SourceFile}; | 8 | use crate::{fuzz, tokenize, Location, SourceFile, SyntaxError, TextRange, Token}; |
9 | 9 | ||
10 | #[test] | 10 | #[test] |
11 | fn lexer_tests() { | 11 | fn lexer_tests() { |
12 | dir_tests(&test_data_dir(), &["lexer"], |text, _| { | 12 | // FIXME: |
13 | let tokens = crate::tokenize(text); | 13 | // * Add tests for unicode escapes in byte-character and [raw]-byte-string literals |
14 | dump_tokens(&tokens, text) | 14 | // * Add tests for unescape errors |
15 | }) | 15 | |
16 | dir_tests(&test_data_dir(), &["lexer/ok"], |text, path| { | ||
17 | let (tokens, errors) = tokenize(text); | ||
18 | assert_errors_are_absent(&errors, path); | ||
19 | dump_tokens_and_errors(&tokens, &errors, text) | ||
20 | }); | ||
21 | dir_tests(&test_data_dir(), &["lexer/err"], |text, path| { | ||
22 | let (tokens, errors) = tokenize(text); | ||
23 | assert_errors_are_present(&errors, path); | ||
24 | dump_tokens_and_errors(&tokens, &errors, text) | ||
25 | }); | ||
16 | } | 26 | } |
17 | 27 | ||
18 | #[test] | 28 | #[test] |
@@ -32,18 +42,13 @@ fn parser_tests() { | |||
32 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { | 42 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| { |
33 | let parse = SourceFile::parse(text); | 43 | let parse = SourceFile::parse(text); |
34 | let errors = parse.errors(); | 44 | let errors = parse.errors(); |
35 | assert_eq!( | 45 | assert_errors_are_absent(&errors, path); |
36 | errors, | ||
37 | &[] as &[crate::SyntaxError], | ||
38 | "There should be no errors in the file {:?}", | ||
39 | path.display(), | ||
40 | ); | ||
41 | parse.debug_dump() | 46 | parse.debug_dump() |
42 | }); | 47 | }); |
43 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { | 48 | dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| { |
44 | let parse = SourceFile::parse(text); | 49 | let parse = SourceFile::parse(text); |
45 | let errors = parse.errors(); | 50 | let errors = parse.errors(); |
46 | assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display()); | 51 | assert_errors_are_present(&errors, path); |
47 | parse.debug_dump() | 52 | parse.debug_dump() |
48 | }); | 53 | }); |
49 | } | 54 | } |
@@ -75,7 +80,7 @@ fn self_hosting_parsing() { | |||
75 | .into_iter() | 80 | .into_iter() |
76 | .filter_entry(|entry| { | 81 | .filter_entry(|entry| { |
77 | !entry.path().components().any(|component| { | 82 | !entry.path().components().any(|component| { |
78 | // Get all files which are not in the crates/ra_syntax/tests/data folder | 83 | // Get all files which are not in the crates/ra_syntax/test_data folder |
79 | component == Component::Normal(OsStr::new("test_data")) | 84 | component == Component::Normal(OsStr::new("test_data")) |
80 | }) | 85 | }) |
81 | }) | 86 | }) |
@@ -101,15 +106,47 @@ fn test_data_dir() -> PathBuf { | |||
101 | project_dir().join("crates/ra_syntax/test_data") | 106 | project_dir().join("crates/ra_syntax/test_data") |
102 | } | 107 | } |
103 | 108 | ||
104 | fn dump_tokens(tokens: &[crate::Token], text: &str) -> String { | 109 | fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) { |
110 | assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display()); | ||
111 | } | ||
112 | fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) { | ||
113 | assert_eq!( | ||
114 | errors, | ||
115 | &[] as &[SyntaxError], | ||
116 | "There should be no errors in the file {:?}", | ||
117 | path.display(), | ||
118 | ); | ||
119 | } | ||
120 | |||
121 | fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String { | ||
105 | let mut acc = String::new(); | 122 | let mut acc = String::new(); |
106 | let mut offset = 0; | 123 | let mut offset = 0; |
107 | for token in tokens { | 124 | for token in tokens { |
108 | let len: u32 = token.len.into(); | 125 | let token_len = token.len.to_usize(); |
109 | let len = len as usize; | 126 | let token_text = &text[offset..offset + token_len]; |
110 | let token_text = &text[offset..offset + len]; | 127 | offset += token_len; |
111 | offset += len; | 128 | writeln!(acc, "{:?} {} {:?}", token.kind, token_len, token_text).unwrap(); |
112 | write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap() | 129 | } |
130 | for err in errors { | ||
131 | let err_range = location_to_range(err.location()); | ||
132 | writeln!( | ||
133 | acc, | ||
134 | "> error{:?} token({:?}) msg({})", | ||
135 | err.location(), | ||
136 | &text[err_range], | ||
137 | err.kind() | ||
138 | ) | ||
139 | .unwrap(); | ||
140 | } | ||
141 | return acc; | ||
142 | |||
143 | // FIXME: copy-pasted this from `ra_ide/src/diagnostics.rs` | ||
144 | // `Location` will be refactored soon in new PR, see todos here: | ||
145 | // https://github.com/rust-analyzer/rust-analyzer/issues/223 | ||
146 | fn location_to_range(location: Location) -> TextRange { | ||
147 | match location { | ||
148 | Location::Offset(offset) => TextRange::offset_len(offset, 1.into()), | ||
149 | Location::Range(range) => range, | ||
150 | } | ||
113 | } | 151 | } |
114 | acc | ||
115 | } | 152 | } |
diff --git a/crates/ra_syntax/src/validation.rs b/crates/ra_syntax/src/validation.rs index 445e3b3e4..8a5f0e4b7 100644 --- a/crates/ra_syntax/src/validation.rs +++ b/crates/ra_syntax/src/validation.rs | |||
@@ -94,6 +94,12 @@ impl From<rustc_lexer::unescape::EscapeError> for SyntaxErrorKind { | |||
94 | } | 94 | } |
95 | 95 | ||
96 | pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> { | 96 | pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> { |
97 | // FIXME: | ||
98 | // * Add validation of character literal containing only a single char | ||
99 | // * Add validation of `crate` keyword not appearing in the middle of the symbol path | ||
100 | // * Add validation of doc comments are being attached to nodes | ||
101 | // * Remove validation of unterminated literals (it is already implemented in `tokenize()`) | ||
102 | |||
97 | let mut errors = Vec::new(); | 103 | let mut errors = Vec::new(); |
98 | for node in root.descendants() { | 104 | for node in root.descendants() { |
99 | match_ast! { | 105 | match_ast! { |
diff --git a/crates/ra_syntax/test_data/lexer/0010_comments.rs b/crates/ra_syntax/test_data/lexer/0010_comments.rs deleted file mode 100644 index 71bdd1f9c..000000000 --- a/crates/ra_syntax/test_data/lexer/0010_comments.rs +++ /dev/null | |||
@@ -1,3 +0,0 @@ | |||
1 | #!/usr/bin/env bash | ||
2 | // hello | ||
3 | //! World | ||
diff --git a/crates/ra_syntax/test_data/lexer/0010_comments.txt b/crates/ra_syntax/test_data/lexer/0010_comments.txt deleted file mode 100644 index 3c997de3f..000000000 --- a/crates/ra_syntax/test_data/lexer/0010_comments.txt +++ /dev/null | |||
@@ -1,6 +0,0 @@ | |||
1 | SHEBANG 19 "#!/usr/bin/env bash" | ||
2 | WHITESPACE 1 "\n" | ||
3 | COMMENT 8 "// hello" | ||
4 | WHITESPACE 1 "\n" | ||
5 | COMMENT 9 "//! World" | ||
6 | WHITESPACE 1 "\n" | ||
diff --git a/crates/ra_syntax/test_data/lexer/0014_unclosed_char.rs b/crates/ra_syntax/test_data/lexer/0014_unclosed_char.rs deleted file mode 100644 index 9c0007077..000000000 --- a/crates/ra_syntax/test_data/lexer/0014_unclosed_char.rs +++ /dev/null | |||
@@ -1 +0,0 @@ | |||
1 | '1 \ No newline at end of file | ||
diff --git a/crates/ra_syntax/test_data/lexer/0014_unclosed_char.txt b/crates/ra_syntax/test_data/lexer/0014_unclosed_char.txt deleted file mode 100644 index 737a300ee..000000000 --- a/crates/ra_syntax/test_data/lexer/0014_unclosed_char.txt +++ /dev/null | |||
@@ -1 +0,0 @@ | |||
1 | LIFETIME 2 "\'1" | ||
diff --git a/crates/ra_syntax/test_data/lexer/0015_unclosed_string.rs b/crates/ra_syntax/test_data/lexer/0015_unclosed_string.rs deleted file mode 100644 index d771a26d4..000000000 --- a/crates/ra_syntax/test_data/lexer/0015_unclosed_string.rs +++ /dev/null | |||
@@ -1 +0,0 @@ | |||
1 | "hello | ||
diff --git a/crates/ra_syntax/test_data/lexer/0015_unclosed_string.txt b/crates/ra_syntax/test_data/lexer/0015_unclosed_string.txt deleted file mode 100644 index 728c40b66..000000000 --- a/crates/ra_syntax/test_data/lexer/0015_unclosed_string.txt +++ /dev/null | |||
@@ -1 +0,0 @@ | |||
1 | STRING 7 "\"hello\n" | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.rs new file mode 100644 index 000000000..ad2823b48 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
' \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.txt new file mode 100644 index 000000000..f24e1fd32 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0001_unclosed_char_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 1 "\'" | ||
2 | > error[0; 1) token("\'") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.rs b/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.rs new file mode 100644 index 000000000..e264a4152 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.rs | |||
@@ -0,0 +1 @@ | |||
'🦀 \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.txt b/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.txt new file mode 100644 index 000000000..bd08cfc44 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0002_unclosed_char_with_ferris.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 5 "\'🦀" | ||
2 | > error[0; 5) token("\'🦀") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.rs b/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.rs new file mode 100644 index 000000000..cf74b4dad --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.rs | |||
@@ -0,0 +1 @@ | |||
'\x7f \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.txt b/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.txt new file mode 100644 index 000000000..0ee22912d --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0003_unclosed_char_with_ascii_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 5 "\'\\x7f" | ||
2 | > error[0; 5) token("\'\\x7f") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.rs b/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.rs new file mode 100644 index 000000000..50be91f68 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.rs | |||
@@ -0,0 +1 @@ | |||
'\u{20AA} \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.txt b/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.txt new file mode 100644 index 000000000..96fac42ce --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0004_unclosed_char_with_unicode_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 9 "\'\\u{20AA}" | ||
2 | > error[0; 9) token("\'\\u{20AA}") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.rs b/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.rs new file mode 100644 index 000000000..309ecfe47 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.rs | |||
@@ -0,0 +1 @@ | |||
' \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.txt b/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.txt new file mode 100644 index 000000000..2059f3f81 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0005_unclosed_char_with_space.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 2 "\' " | ||
2 | > error[0; 2) token("\' ") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.rs b/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.rs new file mode 100644 index 000000000..6ba258b10 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.rs | |||
@@ -0,0 +1 @@ | |||
'\ \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.txt b/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.txt new file mode 100644 index 000000000..7dd376e59 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0006_unclosed_char_with_slash.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 2 "\'\\" | ||
2 | > error[0; 2) token("\'\\") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.rs b/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.rs new file mode 100644 index 000000000..78bef7e3e --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.rs | |||
@@ -0,0 +1 @@ | |||
'\n \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.txt b/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.txt new file mode 100644 index 000000000..ef7a0a147 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0007_unclosed_char_with_slash_n.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 3 "\'\\n" | ||
2 | > error[0; 3) token("\'\\n") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.rs b/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.rs new file mode 100644 index 000000000..a0e722065 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.rs | |||
@@ -0,0 +1 @@ | |||
'\' \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.txt b/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.txt new file mode 100644 index 000000000..13fc5ea9a --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0008_unclosed_char_with_slash_single_quote.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | CHAR 3 "\'\\\'" | ||
2 | > error[0; 3) token("\'\\\'") msg(Missing trailing `'` symbol to terminate the character literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.rs new file mode 100644 index 000000000..795dc7e25 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
b' \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.txt new file mode 100644 index 000000000..269d68c74 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0009_unclosed_byte_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 2 "b\'" | ||
2 | > error[0; 2) token("b\'") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.rs b/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.rs new file mode 100644 index 000000000..c9230dc24 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.rs | |||
@@ -0,0 +1 @@ | |||
b'🦀 \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.txt b/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.txt new file mode 100644 index 000000000..91a76e479 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0010_unclosed_byte_with_ferris.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 6 "b\'🦀" | ||
2 | > error[0; 6) token("b\'🦀") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.rs b/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.rs new file mode 100644 index 000000000..d146a8090 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.rs | |||
@@ -0,0 +1 @@ | |||
b'\x7f \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.txt b/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.txt new file mode 100644 index 000000000..b8c804a18 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0011_unclosed_byte_with_ascii_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 6 "b\'\\x7f" | ||
2 | > error[0; 6) token("b\'\\x7f") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.rs b/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.rs new file mode 100644 index 000000000..a3dec7c25 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.rs | |||
@@ -0,0 +1 @@ | |||
b'\u{20AA} \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.txt b/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.txt new file mode 100644 index 000000000..dfca22a59 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0012_unclosed_byte_with_unicode_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 10 "b\'\\u{20AA}" | ||
2 | > error[0; 10) token("b\'\\u{20AA}") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.rs b/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.rs new file mode 100644 index 000000000..93b7f9c87 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.rs | |||
@@ -0,0 +1 @@ | |||
b' \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.txt b/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.txt new file mode 100644 index 000000000..51a1cceab --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0013_unclosed_byte_with_space.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 3 "b\' " | ||
2 | > error[0; 3) token("b\' ") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.rs b/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.rs new file mode 100644 index 000000000..abffa5037 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.rs | |||
@@ -0,0 +1 @@ | |||
b'\ \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.txt b/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.txt new file mode 100644 index 000000000..24e835c27 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0014_unclosed_byte_with_slash.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 3 "b\'\\" | ||
2 | > error[0; 3) token("b\'\\") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.rs b/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.rs new file mode 100644 index 000000000..4f46836a9 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.rs | |||
@@ -0,0 +1 @@ | |||
b'\n \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.txt b/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.txt new file mode 100644 index 000000000..f1e39a41b --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0015_unclosed_byte_with_slash_n.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 4 "b\'\\n" | ||
2 | > error[0; 4) token("b\'\\n") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.rs b/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.rs new file mode 100644 index 000000000..645b641ee --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.rs | |||
@@ -0,0 +1 @@ | |||
b'\' \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.txt b/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.txt new file mode 100644 index 000000000..f8ffe815d --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0016_unclosed_byte_with_slash_single_quote.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE 4 "b\'\\\'" | ||
2 | > error[0; 4) token("b\'\\\'") msg(Missing trailing `'` symbol to terminate the byte literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.rs new file mode 100644 index 000000000..9d68933c4 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.txt new file mode 100644 index 000000000..823daaf6f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0017_unclosed_string_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 1 "\"" | ||
2 | > error[0; 1) token("\"") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.rs b/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.rs new file mode 100644 index 000000000..d439b8d2a --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.rs | |||
@@ -0,0 +1 @@ | |||
"🦀 \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.txt b/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.txt new file mode 100644 index 000000000..164580eb3 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0018_unclosed_string_with_ferris.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 5 "\"🦀" | ||
2 | > error[0; 5) token("\"🦀") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.rs b/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.rs new file mode 100644 index 000000000..56186a344 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.rs | |||
@@ -0,0 +1 @@ | |||
"\x7f \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.txt b/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.txt new file mode 100644 index 000000000..4453827c3 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0019_unclosed_string_with_ascii_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 5 "\"\\x7f" | ||
2 | > error[0; 5) token("\"\\x7f") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.rs b/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.rs new file mode 100644 index 000000000..ed24095c3 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.rs | |||
@@ -0,0 +1 @@ | |||
"\u{20AA} \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.txt b/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.txt new file mode 100644 index 000000000..aa614f304 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0020_unclosed_string_with_unicode_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 9 "\"\\u{20AA}" | ||
2 | > error[0; 9) token("\"\\u{20AA}") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.rs b/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.rs new file mode 100644 index 000000000..72cdc841f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.rs | |||
@@ -0,0 +1 @@ | |||
" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.txt b/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.txt new file mode 100644 index 000000000..b7db1236f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0021_unclosed_string_with_space.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 2 "\" " | ||
2 | > error[0; 2) token("\" ") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.rs b/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.rs new file mode 100644 index 000000000..00a258400 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.rs | |||
@@ -0,0 +1 @@ | |||
"\ \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.txt b/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.txt new file mode 100644 index 000000000..9d3df3799 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0022_unclosed_string_with_slash.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 2 "\"\\" | ||
2 | > error[0; 2) token("\"\\") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.rs b/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.rs new file mode 100644 index 000000000..a0c29b8cf --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.rs | |||
@@ -0,0 +1 @@ | |||
"\n \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.txt b/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.txt new file mode 100644 index 000000000..e3eb672b6 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0023_unclosed_string_with_slash_n.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 3 "\"\\n" | ||
2 | > error[0; 3) token("\"\\n") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.rs b/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.rs new file mode 100644 index 000000000..403c2d6dd --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.rs | |||
@@ -0,0 +1 @@ | |||
"\" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.txt b/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.txt new file mode 100644 index 000000000..041d7fb6e --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0024_unclosed_string_with_slash_double_quote.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | STRING 3 "\"\\\"" | ||
2 | > error[0; 3) token("\"\\\"") msg(Missing trailing `"` symbol to terminate the string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.rs new file mode 100644 index 000000000..36f4f4321 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
b" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.txt new file mode 100644 index 000000000..be7970a83 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0025_unclosed_byte_string_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 2 "b\"" | ||
2 | > error[0; 2) token("b\"") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.rs b/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.rs new file mode 100644 index 000000000..3c23a0372 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.rs | |||
@@ -0,0 +1 @@ | |||
b"🦀 \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.txt b/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.txt new file mode 100644 index 000000000..bf9aab132 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0026_unclosed_byte_string_with_ferris.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 6 "b\"🦀" | ||
2 | > error[0; 6) token("b\"🦀") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.rs b/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.rs new file mode 100644 index 000000000..836c112c1 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.rs | |||
@@ -0,0 +1 @@ | |||
b"\x7f \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.txt b/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.txt new file mode 100644 index 000000000..76e16d7d3 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0027_unclosed_byte_string_with_ascii_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 6 "b\"\\x7f" | ||
2 | > error[0; 6) token("b\"\\x7f") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.rs b/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.rs new file mode 100644 index 000000000..1c6df1d00 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.rs | |||
@@ -0,0 +1 @@ | |||
b"\u{20AA} \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.txt b/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.txt new file mode 100644 index 000000000..09adffa16 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0028_unclosed_byte_string_with_unicode_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 10 "b\"\\u{20AA}" | ||
2 | > error[0; 10) token("b\"\\u{20AA}") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.rs b/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.rs new file mode 100644 index 000000000..d6898541e --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.rs | |||
@@ -0,0 +1 @@ | |||
b" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.txt b/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.txt new file mode 100644 index 000000000..fcb7253c8 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0029_unclosed_byte_string_with_space.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 3 "b\" " | ||
2 | > error[0; 3) token("b\" ") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.rs b/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.rs new file mode 100644 index 000000000..cce661538 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.rs | |||
@@ -0,0 +1 @@ | |||
b"\ \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.txt b/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.txt new file mode 100644 index 000000000..0a1b3e269 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0030_unclosed_byte_string_with_slash.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 3 "b\"\\" | ||
2 | > error[0; 3) token("b\"\\") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.rs b/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.rs new file mode 100644 index 000000000..5e680aabb --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.rs | |||
@@ -0,0 +1 @@ | |||
b"\n \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.txt b/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.txt new file mode 100644 index 000000000..1fb89d2b6 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0031_unclosed_byte_string_with_slash_n.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 4 "b\"\\n" | ||
2 | > error[0; 4) token("b\"\\n") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.rs b/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.rs new file mode 100644 index 000000000..f2ff58ba9 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.rs | |||
@@ -0,0 +1 @@ | |||
b"\" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.txt b/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.txt new file mode 100644 index 000000000..718d36992 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0032_unclosed_byte_string_with_slash_double_quote.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | BYTE_STRING 4 "b\"\\\"" | ||
2 | > error[0; 4) token("b\"\\\"") msg(Missing trailing `"` symbol to terminate the byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.rs new file mode 100644 index 000000000..557c59b62 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
r##" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt new file mode 100644 index 000000000..93348f548 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0033_unclosed_raw_string_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 4 "r##\"" | ||
2 | > error[0; 4) token("r##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.rs b/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.rs new file mode 100644 index 000000000..bd046e4bb --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.rs | |||
@@ -0,0 +1 @@ | |||
r##"🦀 \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt b/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt new file mode 100644 index 000000000..42c70dfe8 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0034_unclosed_raw_string_with_ferris.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 8 "r##\"🦀" | ||
2 | > error[0; 8) token("r##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.rs b/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.rs new file mode 100644 index 000000000..5bec883dc --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.rs | |||
@@ -0,0 +1 @@ | |||
r##"\x7f \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt b/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt new file mode 100644 index 000000000..2bdeea0ff --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0035_unclosed_raw_string_with_ascii_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 8 "r##\"\\x7f" | ||
2 | > error[0; 8) token("r##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.rs b/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.rs new file mode 100644 index 000000000..bf05c3913 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.rs | |||
@@ -0,0 +1 @@ | |||
r##"\u{20AA} \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt b/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt new file mode 100644 index 000000000..667d4d79f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0036_unclosed_raw_string_with_unicode_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 12 "r##\"\\u{20AA}" | ||
2 | > error[0; 12) token("r##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.rs b/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.rs new file mode 100644 index 000000000..f104bae4f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.rs | |||
@@ -0,0 +1 @@ | |||
r##" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt b/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt new file mode 100644 index 000000000..dd9597a1a --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0037_unclosed_raw_string_with_space.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 5 "r##\" " | ||
2 | > error[0; 5) token("r##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.rs b/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.rs new file mode 100644 index 000000000..9242077b8 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.rs | |||
@@ -0,0 +1 @@ | |||
r##"\ \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt b/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt new file mode 100644 index 000000000..6ac6e3d62 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0038_unclosed_raw_string_with_slash.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 5 "r##\"\\" | ||
2 | > error[0; 5) token("r##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.rs b/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.rs new file mode 100644 index 000000000..db1c16f2b --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.rs | |||
@@ -0,0 +1 @@ | |||
r##"\n \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt b/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt new file mode 100644 index 000000000..9d35443f5 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0039_unclosed_raw_string_with_slash_n.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 6 "r##\"\\n" | ||
2 | > error[0; 6) token("r##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.rs new file mode 100644 index 000000000..ae5bae622 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
br##" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt new file mode 100644 index 000000000..81fa39ea5 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0040_unclosed_raw_byte_string_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 5 "br##\"" | ||
2 | > error[0; 5) token("br##\"") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.rs b/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.rs new file mode 100644 index 000000000..9ef01207a --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.rs | |||
@@ -0,0 +1 @@ | |||
br##"🦀 \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt b/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt new file mode 100644 index 000000000..c2503a4d0 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0041_unclosed_raw_byte_string_with_ferris.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 9 "br##\"🦀" | ||
2 | > error[0; 9) token("br##\"🦀") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.rs b/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.rs new file mode 100644 index 000000000..d50270afe --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.rs | |||
@@ -0,0 +1 @@ | |||
br##"\x7f \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt b/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt new file mode 100644 index 000000000..3bd3d8152 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0042_unclosed_raw_byte_string_with_ascii_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 9 "br##\"\\x7f" | ||
2 | > error[0; 9) token("br##\"\\x7f") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.rs b/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.rs new file mode 100644 index 000000000..90e299a1a --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.rs | |||
@@ -0,0 +1 @@ | |||
br##"\u{20AA} \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt b/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt new file mode 100644 index 000000000..a512f0428 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0043_unclosed_raw_byte_string_with_unicode_escape.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 13 "br##\"\\u{20AA}" | ||
2 | > error[0; 13) token("br##\"\\u{20AA}") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.rs b/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.rs new file mode 100644 index 000000000..14c602fd2 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.rs | |||
@@ -0,0 +1 @@ | |||
br##" \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt b/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt new file mode 100644 index 000000000..dc616a623 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0044_unclosed_raw_byte_string_with_space.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 6 "br##\" " | ||
2 | > error[0; 6) token("br##\" ") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.rs b/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.rs new file mode 100644 index 000000000..0b3c015d7 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.rs | |||
@@ -0,0 +1 @@ | |||
br##"\ \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt b/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt new file mode 100644 index 000000000..debafe380 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0045_unclosed_raw_byte_string_with_slash.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 6 "br##\"\\" | ||
2 | > error[0; 6) token("br##\"\\") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.rs b/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.rs new file mode 100644 index 000000000..0d8b0e7ab --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.rs | |||
@@ -0,0 +1 @@ | |||
br##"\n \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt b/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt new file mode 100644 index 000000000..524e617b7 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0046_unclosed_raw_byte_string_with_slash_n.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 7 "br##\"\\n" | ||
2 | > error[0; 7) token("br##\"\\n") msg(Missing trailing `"` with `#` symbols to terminate the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.rs new file mode 100644 index 000000000..eddf8d080 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
r## \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt new file mode 100644 index 000000000..00b046840 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0047_unstarted_raw_string_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_STRING 3 "r##" | ||
2 | > error[0; 3) token("r##") msg(Missing `"` symbol after `#` symbols to begin the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.rs new file mode 100644 index 000000000..7e8cadf4f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
br## \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt new file mode 100644 index 000000000..33b25e60f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0048_unstarted_raw_byte_string_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | RAW_BYTE_STRING 4 "br##" | ||
2 | > error[0; 4) token("br##") msg(Missing `"` symbol after `#` symbols to begin the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.rs b/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.rs new file mode 100644 index 000000000..534668a9b --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.rs | |||
@@ -0,0 +1 @@ | |||
r## I lack a quote! \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt b/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt new file mode 100644 index 000000000..782dfd974 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0049_unstarted_raw_string_with_ascii.txt | |||
@@ -0,0 +1,10 @@ | |||
1 | RAW_STRING 4 "r## " | ||
2 | IDENT 1 "I" | ||
3 | WHITESPACE 1 " " | ||
4 | IDENT 4 "lack" | ||
5 | WHITESPACE 1 " " | ||
6 | IDENT 1 "a" | ||
7 | WHITESPACE 1 " " | ||
8 | IDENT 5 "quote" | ||
9 | EXCL 1 "!" | ||
10 | > error[0; 4) token("r## ") msg(Missing `"` symbol after `#` symbols to begin the raw string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.rs b/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.rs new file mode 100644 index 000000000..d9b55455a --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.rs | |||
@@ -0,0 +1 @@ | |||
br## I lack a quote! \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt b/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt new file mode 100644 index 000000000..59c40cd65 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0050_unstarted_raw_byte_string_with_ascii.txt | |||
@@ -0,0 +1,10 @@ | |||
1 | RAW_BYTE_STRING 5 "br## " | ||
2 | IDENT 1 "I" | ||
3 | WHITESPACE 1 " " | ||
4 | IDENT 4 "lack" | ||
5 | WHITESPACE 1 " " | ||
6 | IDENT 1 "a" | ||
7 | WHITESPACE 1 " " | ||
8 | IDENT 5 "quote" | ||
9 | EXCL 1 "!" | ||
10 | > error[0; 5) token("br## ") msg(Missing `"` symbol after `#` symbols to begin the raw byte string literal) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.rs b/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.rs new file mode 100644 index 000000000..22e83649f --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.rs | |||
@@ -0,0 +1 @@ | |||
/* \ No newline at end of file | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.txt b/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.txt new file mode 100644 index 000000000..5d04cdaa4 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0051_unclosed_block_comment_at_eof.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | COMMENT 2 "/*" | ||
2 | > error[0; 2) token("/*") msg(Missing trailing `*/` symbols to terminate the block comment) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.rs b/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.rs new file mode 100644 index 000000000..c45c2844d --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.rs | |||
@@ -0,0 +1 @@ | |||
/* comment | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.txt b/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.txt new file mode 100644 index 000000000..8c6b678e3 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0052_unclosed_block_comment_with_content.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | COMMENT 11 "/* comment\n" | ||
2 | > error[0; 11) token("/* comment\n") msg(Missing trailing `*/` symbols to terminate the block comment) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.rs b/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.rs new file mode 100644 index 000000000..3fcfc9660 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.rs | |||
@@ -0,0 +1 @@ | |||
/* /* /* | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.txt b/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.txt new file mode 100644 index 000000000..250de34d9 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0053_unclosed_nested_block_comment_entirely.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | COMMENT 9 "/* /* /*\n" | ||
2 | > error[0; 9) token("/* /* /*\n") msg(Missing trailing `*/` symbols to terminate the block comment) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.rs b/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.rs new file mode 100644 index 000000000..26c898f01 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.rs | |||
@@ -0,0 +1 @@ | |||
/** /*! /* comment */ */ | |||
diff --git a/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.txt b/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.txt new file mode 100644 index 000000000..f97f2a8c7 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0054_unclosed_nested_block_comment_partially.txt | |||
@@ -0,0 +1,2 @@ | |||
1 | COMMENT 25 "/** /*! /* comment */ */\n" | ||
2 | > error[0; 25) token("/** /*! /* comment */ */\n") msg(Missing trailing `*/` symbols to terminate the block comment) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0055_empty_int.rs b/crates/ra_syntax/test_data/lexer/err/0055_empty_int.rs new file mode 100644 index 000000000..aa2a9fdca --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0055_empty_int.rs | |||
@@ -0,0 +1,17 @@ | |||
1 | 0b | ||
2 | 0o | ||
3 | 0x | ||
4 | |||
5 | 0b_ | ||
6 | 0o_ | ||
7 | 0x_ | ||
8 | |||
9 | 0bnoDigit | ||
10 | 0onoDigit | ||
11 | 0xnoDigit | ||
12 | |||
13 | 0xG | ||
14 | 0xg | ||
15 | |||
16 | 0x_g | ||
17 | 0x_G | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0055_empty_int.txt b/crates/ra_syntax/test_data/lexer/err/0055_empty_int.txt new file mode 100644 index 000000000..2fe5bd950 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0055_empty_int.txt | |||
@@ -0,0 +1,39 @@ | |||
1 | INT_NUMBER 2 "0b" | ||
2 | WHITESPACE 1 "\n" | ||
3 | INT_NUMBER 2 "0o" | ||
4 | WHITESPACE 1 "\n" | ||
5 | INT_NUMBER 2 "0x" | ||
6 | WHITESPACE 2 "\n\n" | ||
7 | INT_NUMBER 3 "0b_" | ||
8 | WHITESPACE 1 "\n" | ||
9 | INT_NUMBER 3 "0o_" | ||
10 | WHITESPACE 1 "\n" | ||
11 | INT_NUMBER 3 "0x_" | ||
12 | WHITESPACE 2 "\n\n" | ||
13 | INT_NUMBER 9 "0bnoDigit" | ||
14 | WHITESPACE 1 "\n" | ||
15 | INT_NUMBER 9 "0onoDigit" | ||
16 | WHITESPACE 1 "\n" | ||
17 | INT_NUMBER 9 "0xnoDigit" | ||
18 | WHITESPACE 2 "\n\n" | ||
19 | INT_NUMBER 3 "0xG" | ||
20 | WHITESPACE 1 "\n" | ||
21 | INT_NUMBER 3 "0xg" | ||
22 | WHITESPACE 2 "\n\n" | ||
23 | INT_NUMBER 4 "0x_g" | ||
24 | WHITESPACE 1 "\n" | ||
25 | INT_NUMBER 4 "0x_G" | ||
26 | WHITESPACE 1 "\n" | ||
27 | > error[0; 2) token("0b") msg(Missing digits after the integer base prefix) | ||
28 | > error[3; 5) token("0o") msg(Missing digits after the integer base prefix) | ||
29 | > error[6; 8) token("0x") msg(Missing digits after the integer base prefix) | ||
30 | > error[10; 13) token("0b_") msg(Missing digits after the integer base prefix) | ||
31 | > error[14; 17) token("0o_") msg(Missing digits after the integer base prefix) | ||
32 | > error[18; 21) token("0x_") msg(Missing digits after the integer base prefix) | ||
33 | > error[23; 32) token("0bnoDigit") msg(Missing digits after the integer base prefix) | ||
34 | > error[33; 42) token("0onoDigit") msg(Missing digits after the integer base prefix) | ||
35 | > error[43; 52) token("0xnoDigit") msg(Missing digits after the integer base prefix) | ||
36 | > error[54; 57) token("0xG") msg(Missing digits after the integer base prefix) | ||
37 | > error[58; 61) token("0xg") msg(Missing digits after the integer base prefix) | ||
38 | > error[63; 67) token("0x_g") msg(Missing digits after the integer base prefix) | ||
39 | > error[68; 72) token("0x_G") msg(Missing digits after the integer base prefix) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.rs b/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.rs new file mode 100644 index 000000000..286584c88 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.rs | |||
@@ -0,0 +1,22 @@ | |||
1 | 0e | ||
2 | 0E | ||
3 | |||
4 | 42e+ | ||
5 | 42e- | ||
6 | 42E+ | ||
7 | 42E- | ||
8 | |||
9 | 42.e+ | ||
10 | 42.e- | ||
11 | 42.E+ | ||
12 | 42.E- | ||
13 | |||
14 | 42.2e+ | ||
15 | 42.2e- | ||
16 | 42.2E+ | ||
17 | 42.2E- | ||
18 | |||
19 | 42.2e+f32 | ||
20 | 42.2e-f32 | ||
21 | 42.2E+f32 | ||
22 | 42.2E-f32 | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.txt b/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.txt new file mode 100644 index 000000000..ab35e20a5 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0056_empty_exponent.txt | |||
@@ -0,0 +1,62 @@ | |||
1 | FLOAT_NUMBER 2 "0e" | ||
2 | WHITESPACE 1 "\n" | ||
3 | FLOAT_NUMBER 2 "0E" | ||
4 | WHITESPACE 2 "\n\n" | ||
5 | FLOAT_NUMBER 4 "42e+" | ||
6 | WHITESPACE 1 "\n" | ||
7 | FLOAT_NUMBER 4 "42e-" | ||
8 | WHITESPACE 1 "\n" | ||
9 | FLOAT_NUMBER 4 "42E+" | ||
10 | WHITESPACE 1 "\n" | ||
11 | FLOAT_NUMBER 4 "42E-" | ||
12 | WHITESPACE 2 "\n\n" | ||
13 | INT_NUMBER 2 "42" | ||
14 | DOT 1 "." | ||
15 | IDENT 1 "e" | ||
16 | PLUS 1 "+" | ||
17 | WHITESPACE 1 "\n" | ||
18 | INT_NUMBER 2 "42" | ||
19 | DOT 1 "." | ||
20 | IDENT 1 "e" | ||
21 | MINUS 1 "-" | ||
22 | WHITESPACE 1 "\n" | ||
23 | INT_NUMBER 2 "42" | ||
24 | DOT 1 "." | ||
25 | IDENT 1 "E" | ||
26 | PLUS 1 "+" | ||
27 | WHITESPACE 1 "\n" | ||
28 | INT_NUMBER 2 "42" | ||
29 | DOT 1 "." | ||
30 | IDENT 1 "E" | ||
31 | MINUS 1 "-" | ||
32 | WHITESPACE 2 "\n\n" | ||
33 | FLOAT_NUMBER 6 "42.2e+" | ||
34 | WHITESPACE 1 "\n" | ||
35 | FLOAT_NUMBER 6 "42.2e-" | ||
36 | WHITESPACE 1 "\n" | ||
37 | FLOAT_NUMBER 6 "42.2E+" | ||
38 | WHITESPACE 1 "\n" | ||
39 | FLOAT_NUMBER 6 "42.2E-" | ||
40 | WHITESPACE 2 "\n\n" | ||
41 | FLOAT_NUMBER 9 "42.2e+f32" | ||
42 | WHITESPACE 1 "\n" | ||
43 | FLOAT_NUMBER 9 "42.2e-f32" | ||
44 | WHITESPACE 1 "\n" | ||
45 | FLOAT_NUMBER 9 "42.2E+f32" | ||
46 | WHITESPACE 1 "\n" | ||
47 | FLOAT_NUMBER 9 "42.2E-f32" | ||
48 | WHITESPACE 1 "\n" | ||
49 | > error[0; 2) token("0e") msg(Missing digits after the exponent symbol) | ||
50 | > error[3; 5) token("0E") msg(Missing digits after the exponent symbol) | ||
51 | > error[7; 11) token("42e+") msg(Missing digits after the exponent symbol) | ||
52 | > error[12; 16) token("42e-") msg(Missing digits after the exponent symbol) | ||
53 | > error[17; 21) token("42E+") msg(Missing digits after the exponent symbol) | ||
54 | > error[22; 26) token("42E-") msg(Missing digits after the exponent symbol) | ||
55 | > error[53; 59) token("42.2e+") msg(Missing digits after the exponent symbol) | ||
56 | > error[60; 66) token("42.2e-") msg(Missing digits after the exponent symbol) | ||
57 | > error[67; 73) token("42.2E+") msg(Missing digits after the exponent symbol) | ||
58 | > error[74; 80) token("42.2E-") msg(Missing digits after the exponent symbol) | ||
59 | > error[82; 91) token("42.2e+f32") msg(Missing digits after the exponent symbol) | ||
60 | > error[92; 101) token("42.2e-f32") msg(Missing digits after the exponent symbol) | ||
61 | > error[102; 111) token("42.2E+f32") msg(Missing digits after the exponent symbol) | ||
62 | > error[112; 121) token("42.2E-f32") msg(Missing digits after the exponent symbol) | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.rs b/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.rs new file mode 100644 index 000000000..a7698a404 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.rs | |||
@@ -0,0 +1,2 @@ | |||
1 | '1 | ||
2 | '1lifetime | ||
diff --git a/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.txt b/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.txt new file mode 100644 index 000000000..89b38bfac --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/err/0057_lifetime_strarts_with_a_number.txt | |||
@@ -0,0 +1,6 @@ | |||
1 | LIFETIME 2 "\'1" | ||
2 | WHITESPACE 1 "\n" | ||
3 | LIFETIME 10 "\'1lifetime" | ||
4 | WHITESPACE 1 "\n" | ||
5 | > error[0; 2) token("\'1") msg(Lifetime name cannot start with a number) | ||
6 | > error[3; 13) token("\'1lifetime") msg(Lifetime name cannot start with a number) | ||
diff --git a/crates/ra_syntax/test_data/lexer/0001_hello.rs b/crates/ra_syntax/test_data/lexer/ok/0001_hello.rs index 95d09f2b1..95d09f2b1 100644 --- a/crates/ra_syntax/test_data/lexer/0001_hello.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0001_hello.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0001_hello.txt b/crates/ra_syntax/test_data/lexer/ok/0001_hello.txt index 27a5940a9..27a5940a9 100644 --- a/crates/ra_syntax/test_data/lexer/0001_hello.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0001_hello.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0002_whitespace.rs b/crates/ra_syntax/test_data/lexer/ok/0002_whitespace.rs index 08fce1418..08fce1418 100644 --- a/crates/ra_syntax/test_data/lexer/0002_whitespace.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0002_whitespace.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0002_whitespace.txt b/crates/ra_syntax/test_data/lexer/ok/0002_whitespace.txt index 01d260918..01d260918 100644 --- a/crates/ra_syntax/test_data/lexer/0002_whitespace.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0002_whitespace.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0003_ident.rs b/crates/ra_syntax/test_data/lexer/ok/0003_ident.rs index c05c9c009..c05c9c009 100644 --- a/crates/ra_syntax/test_data/lexer/0003_ident.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0003_ident.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0003_ident.txt b/crates/ra_syntax/test_data/lexer/ok/0003_ident.txt index 4a0d5c053..4a0d5c053 100644 --- a/crates/ra_syntax/test_data/lexer/0003_ident.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0003_ident.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0004_numbers.rs b/crates/ra_syntax/test_data/lexer/ok/0004_numbers.rs index dc974b553..bc761c235 100644 --- a/crates/ra_syntax/test_data/lexer/0004_numbers.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0004_numbers.rs | |||
@@ -1,4 +1,4 @@ | |||
1 | 0 0b 0o 0x 00 0_ 0. 0e 0E 0z | 1 | 0 00 0_ 0. 0z |
2 | 01790 0b1790 0o1790 0x1790aAbBcCdDeEfF 001279 0_1279 0.1279 0e1279 0E1279 | 2 | 01790 0b1790 0o1790 0x1790aAbBcCdDeEfF 001279 0_1279 0.1279 0e1279 0E1279 |
3 | 0..2 | 3 | 0..2 |
4 | 0.foo() | 4 | 0.foo() |
@@ -6,4 +6,4 @@ | |||
6 | 0.e+1 | 6 | 0.e+1 |
7 | 0.0E-2 | 7 | 0.0E-2 |
8 | 0___0.10000____0000e+111__ | 8 | 0___0.10000____0000e+111__ |
9 | 1i64 92.0f32 11__s \ No newline at end of file | 9 | 1i64 92.0f32 11__s |
diff --git a/crates/ra_syntax/test_data/lexer/0004_numbers.txt b/crates/ra_syntax/test_data/lexer/ok/0004_numbers.txt index 7bb89b8ae..e19fc5789 100644 --- a/crates/ra_syntax/test_data/lexer/0004_numbers.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0004_numbers.txt | |||
@@ -1,21 +1,11 @@ | |||
1 | INT_NUMBER 1 "0" | 1 | INT_NUMBER 1 "0" |
2 | WHITESPACE 1 " " | 2 | WHITESPACE 1 " " |
3 | INT_NUMBER 2 "0b" | ||
4 | WHITESPACE 1 " " | ||
5 | INT_NUMBER 2 "0o" | ||
6 | WHITESPACE 1 " " | ||
7 | INT_NUMBER 2 "0x" | ||
8 | WHITESPACE 1 " " | ||
9 | INT_NUMBER 2 "00" | 3 | INT_NUMBER 2 "00" |
10 | WHITESPACE 1 " " | 4 | WHITESPACE 1 " " |
11 | INT_NUMBER 2 "0_" | 5 | INT_NUMBER 2 "0_" |
12 | WHITESPACE 1 " " | 6 | WHITESPACE 1 " " |
13 | FLOAT_NUMBER 2 "0." | 7 | FLOAT_NUMBER 2 "0." |
14 | WHITESPACE 1 " " | 8 | WHITESPACE 1 " " |
15 | FLOAT_NUMBER 2 "0e" | ||
16 | WHITESPACE 1 " " | ||
17 | FLOAT_NUMBER 2 "0E" | ||
18 | WHITESPACE 1 " " | ||
19 | INT_NUMBER 2 "0z" | 9 | INT_NUMBER 2 "0z" |
20 | WHITESPACE 1 "\n" | 10 | WHITESPACE 1 "\n" |
21 | INT_NUMBER 5 "01790" | 11 | INT_NUMBER 5 "01790" |
@@ -64,3 +54,4 @@ WHITESPACE 1 " " | |||
64 | FLOAT_NUMBER 7 "92.0f32" | 54 | FLOAT_NUMBER 7 "92.0f32" |
65 | WHITESPACE 1 " " | 55 | WHITESPACE 1 " " |
66 | INT_NUMBER 5 "11__s" | 56 | INT_NUMBER 5 "11__s" |
57 | WHITESPACE 1 "\n" | ||
diff --git a/crates/ra_syntax/test_data/lexer/0005_symbols.rs b/crates/ra_syntax/test_data/lexer/ok/0005_symbols.rs index 487569b5a..487569b5a 100644 --- a/crates/ra_syntax/test_data/lexer/0005_symbols.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0005_symbols.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0005_symbols.txt b/crates/ra_syntax/test_data/lexer/ok/0005_symbols.txt index 469a90e42..469a90e42 100644 --- a/crates/ra_syntax/test_data/lexer/0005_symbols.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0005_symbols.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0006_chars.rs b/crates/ra_syntax/test_data/lexer/ok/0006_chars.rs index 454ee0a5f..454ee0a5f 100644 --- a/crates/ra_syntax/test_data/lexer/0006_chars.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0006_chars.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0006_chars.txt b/crates/ra_syntax/test_data/lexer/ok/0006_chars.txt index 950954fbc..950954fbc 100644 --- a/crates/ra_syntax/test_data/lexer/0006_chars.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0006_chars.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0007_lifetimes.rs b/crates/ra_syntax/test_data/lexer/ok/0007_lifetimes.rs index b764f1dce..b764f1dce 100644 --- a/crates/ra_syntax/test_data/lexer/0007_lifetimes.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0007_lifetimes.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0007_lifetimes.txt b/crates/ra_syntax/test_data/lexer/ok/0007_lifetimes.txt index 005c29100..005c29100 100644 --- a/crates/ra_syntax/test_data/lexer/0007_lifetimes.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0007_lifetimes.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0008_byte_strings.rs b/crates/ra_syntax/test_data/lexer/ok/0008_byte_strings.rs index b54930f5e..b54930f5e 100644 --- a/crates/ra_syntax/test_data/lexer/0008_byte_strings.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0008_byte_strings.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0008_byte_strings.txt b/crates/ra_syntax/test_data/lexer/ok/0008_byte_strings.txt index bc03b51a8..bc03b51a8 100644 --- a/crates/ra_syntax/test_data/lexer/0008_byte_strings.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0008_byte_strings.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0009_strings.rs b/crates/ra_syntax/test_data/lexer/ok/0009_strings.rs index 4ddb5bffc..4ddb5bffc 100644 --- a/crates/ra_syntax/test_data/lexer/0009_strings.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0009_strings.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0009_strings.txt b/crates/ra_syntax/test_data/lexer/ok/0009_strings.txt index 4cb4d711d..4cb4d711d 100644 --- a/crates/ra_syntax/test_data/lexer/0009_strings.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0009_strings.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.rs b/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.rs new file mode 100644 index 000000000..4b6653f9c --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.rs | |||
@@ -0,0 +1,12 @@ | |||
1 | #!/usr/bin/env bash | ||
2 | // hello | ||
3 | //! World | ||
4 | //!! Inner line doc | ||
5 | /// Outer line doc | ||
6 | //// Just a comment | ||
7 | |||
8 | // | ||
9 | //! | ||
10 | //!! | ||
11 | /// | ||
12 | //// | ||
diff --git a/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.txt b/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.txt new file mode 100644 index 000000000..98a3818c0 --- /dev/null +++ b/crates/ra_syntax/test_data/lexer/ok/0010_single_line_comments.txt | |||
@@ -0,0 +1,22 @@ | |||
1 | SHEBANG 19 "#!/usr/bin/env bash" | ||
2 | WHITESPACE 1 "\n" | ||
3 | COMMENT 8 "// hello" | ||
4 | WHITESPACE 1 "\n" | ||
5 | COMMENT 9 "//! World" | ||
6 | WHITESPACE 1 "\n" | ||
7 | COMMENT 19 "//!! Inner line doc" | ||
8 | WHITESPACE 1 "\n" | ||
9 | COMMENT 18 "/// Outer line doc" | ||
10 | WHITESPACE 1 "\n" | ||
11 | COMMENT 19 "//// Just a comment" | ||
12 | WHITESPACE 2 "\n\n" | ||
13 | COMMENT 2 "//" | ||
14 | WHITESPACE 1 "\n" | ||
15 | COMMENT 3 "//!" | ||
16 | WHITESPACE 1 "\n" | ||
17 | COMMENT 4 "//!!" | ||
18 | WHITESPACE 1 "\n" | ||
19 | COMMENT 3 "///" | ||
20 | WHITESPACE 1 "\n" | ||
21 | COMMENT 4 "////" | ||
22 | WHITESPACE 1 "\n" | ||
diff --git a/crates/ra_syntax/test_data/lexer/0011_keywords.rs b/crates/ra_syntax/test_data/lexer/ok/0011_keywords.rs index 1e91bff4e..1e91bff4e 100644 --- a/crates/ra_syntax/test_data/lexer/0011_keywords.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0011_keywords.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0011_keywords.txt b/crates/ra_syntax/test_data/lexer/ok/0011_keywords.txt index 22c00eefb..22c00eefb 100644 --- a/crates/ra_syntax/test_data/lexer/0011_keywords.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0011_keywords.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/00012_block_comment.rs b/crates/ra_syntax/test_data/lexer/ok/0012_block_comment.rs index 708aac197..b880a59d9 100644 --- a/crates/ra_syntax/test_data/lexer/00012_block_comment.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0012_block_comment.rs | |||
@@ -1,4 +1,3 @@ | |||
1 | /* */ | 1 | /* */ |
2 | /**/ | 2 | /**/ |
3 | /* /* */ */ | 3 | /* /* */ */ |
4 | /* | ||
diff --git a/crates/ra_syntax/test_data/lexer/00012_block_comment.txt b/crates/ra_syntax/test_data/lexer/ok/0012_block_comment.txt index 9958b2518..2618e287e 100644 --- a/crates/ra_syntax/test_data/lexer/00012_block_comment.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0012_block_comment.txt | |||
@@ -4,4 +4,3 @@ COMMENT 4 "/**/" | |||
4 | WHITESPACE 1 "\n" | 4 | WHITESPACE 1 "\n" |
5 | COMMENT 11 "/* /* */ */" | 5 | COMMENT 11 "/* /* */ */" |
6 | WHITESPACE 1 "\n" | 6 | WHITESPACE 1 "\n" |
7 | COMMENT 3 "/*\n" | ||
diff --git a/crates/ra_syntax/test_data/lexer/0013_raw_strings.rs b/crates/ra_syntax/test_data/lexer/ok/0013_raw_strings.rs index e5ed0b693..e5ed0b693 100644 --- a/crates/ra_syntax/test_data/lexer/0013_raw_strings.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0013_raw_strings.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0013_raw_strings.txt b/crates/ra_syntax/test_data/lexer/ok/0013_raw_strings.txt index 9cf0957d1..9cf0957d1 100644 --- a/crates/ra_syntax/test_data/lexer/0013_raw_strings.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0013_raw_strings.txt | |||
diff --git a/crates/ra_syntax/test_data/lexer/0016_raw_ident.rs b/crates/ra_syntax/test_data/lexer/ok/0014_raw_ident.rs index b40a1b6a2..b40a1b6a2 100644 --- a/crates/ra_syntax/test_data/lexer/0016_raw_ident.rs +++ b/crates/ra_syntax/test_data/lexer/ok/0014_raw_ident.rs | |||
diff --git a/crates/ra_syntax/test_data/lexer/0016_raw_ident.txt b/crates/ra_syntax/test_data/lexer/ok/0014_raw_ident.txt index 484689693..484689693 100644 --- a/crates/ra_syntax/test_data/lexer/0016_raw_ident.txt +++ b/crates/ra_syntax/test_data/lexer/ok/0014_raw_ident.txt | |||