diff options
author | Aleksey Kladov <[email protected]> | 2019-05-28 16:46:11 +0100 |
---|---|---|
committer | Aleksey Kladov <[email protected]> | 2019-05-28 16:46:11 +0100 |
commit | 61e1474ab35deb7d54cc2f5d710b901f200b6467 (patch) | |
tree | 158afa2f165a70fdcb5870c940cc9653854e59fa | |
parent | c8bcfe6a05d82e151d459bcd2bd8a7b2742f7a66 (diff) |
fix typos in mbe tests
29 files changed, 92 insertions, 91 deletions
diff --git a/crates/ra_assists/src/assist_ctx.rs b/crates/ra_assists/src/assist_ctx.rs index e744e82d0..1d58d9e71 100644 --- a/crates/ra_assists/src/assist_ctx.rs +++ b/crates/ra_assists/src/assist_ctx.rs | |||
@@ -71,7 +71,7 @@ impl<'a, DB: HirDatabase> AssistCtx<'a, DB> { | |||
71 | where | 71 | where |
72 | F: FnOnce(AssistCtx<DB>) -> T, | 72 | F: FnOnce(AssistCtx<DB>) -> T, |
73 | { | 73 | { |
74 | let source_file = &db.parse(frange.file_id); | 74 | let source_file = &db.parse(frange.file_id).tree; |
75 | let assist = | 75 | let assist = |
76 | if should_compute_edit { Assist::Resolved(vec![]) } else { Assist::Unresolved(vec![]) }; | 76 | if should_compute_edit { Assist::Resolved(vec![]) } else { Assist::Unresolved(vec![]) }; |
77 | 77 | ||
diff --git a/crates/ra_assists/src/ast_editor.rs b/crates/ra_assists/src/ast_editor.rs index 9afcac01a..cabb3d862 100644 --- a/crates/ra_assists/src/ast_editor.rs +++ b/crates/ra_assists/src/ast_editor.rs | |||
@@ -283,7 +283,7 @@ impl AstBuilder<ast::NameRef> { | |||
283 | } | 283 | } |
284 | 284 | ||
285 | fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> { | 285 | fn ast_node_from_file_text<N: AstNode>(text: &str) -> TreeArc<N> { |
286 | let file = SourceFile::parse(text); | 286 | let file = SourceFile::parse(text).tree; |
287 | let res = file.syntax().descendants().find_map(N::cast).unwrap().to_owned(); | 287 | let res = file.syntax().descendants().find_map(N::cast).unwrap().to_owned(); |
288 | res | 288 | res |
289 | } | 289 | } |
@@ -292,7 +292,7 @@ mod tokens { | |||
292 | use once_cell::sync::Lazy; | 292 | use once_cell::sync::Lazy; |
293 | use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*, T}; | 293 | use ra_syntax::{AstNode, SourceFile, TreeArc, SyntaxToken, SyntaxKind::*, T}; |
294 | 294 | ||
295 | static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;")); | 295 | static SOURCE_FILE: Lazy<TreeArc<SourceFile>> = Lazy::new(|| SourceFile::parse(",\n; ;").tree); |
296 | 296 | ||
297 | pub(crate) fn comma() -> SyntaxToken<'static> { | 297 | pub(crate) fn comma() -> SyntaxToken<'static> { |
298 | SOURCE_FILE | 298 | SOURCE_FILE |
@@ -326,7 +326,7 @@ mod tokens { | |||
326 | 326 | ||
327 | impl WsBuilder { | 327 | impl WsBuilder { |
328 | pub(crate) fn new(text: &str) -> WsBuilder { | 328 | pub(crate) fn new(text: &str) -> WsBuilder { |
329 | WsBuilder(SourceFile::parse(text)) | 329 | WsBuilder(SourceFile::parse(text).ok().unwrap()) |
330 | } | 330 | } |
331 | pub(crate) fn ws(&self) -> SyntaxToken<'_> { | 331 | pub(crate) fn ws(&self) -> SyntaxToken<'_> { |
332 | self.0.syntax().first_child_or_token().unwrap().as_token().unwrap() | 332 | self.0.syntax().first_child_or_token().unwrap().as_token().unwrap() |
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs index 84a1564ce..c9ca13bbc 100644 --- a/crates/ra_cli/src/main.rs +++ b/crates/ra_cli/src/main.rs | |||
@@ -34,7 +34,7 @@ fn main() -> Result<()> { | |||
34 | if !matches.is_present("no-dump") { | 34 | if !matches.is_present("no-dump") { |
35 | println!("{}", file.syntax().debug_dump()); | 35 | println!("{}", file.syntax().debug_dump()); |
36 | } | 36 | } |
37 | ::std::mem::forget(file); | 37 | std::mem::forget(file); |
38 | } | 38 | } |
39 | ("symbols", _) => { | 39 | ("symbols", _) => { |
40 | let file = file()?; | 40 | let file = file()?; |
@@ -60,11 +60,11 @@ fn main() -> Result<()> { | |||
60 | 60 | ||
61 | fn file() -> Result<TreeArc<SourceFile>> { | 61 | fn file() -> Result<TreeArc<SourceFile>> { |
62 | let text = read_stdin()?; | 62 | let text = read_stdin()?; |
63 | Ok(SourceFile::parse(&text)) | 63 | Ok(SourceFile::parse(&text).tree) |
64 | } | 64 | } |
65 | 65 | ||
66 | fn read_stdin() -> Result<String> { | 66 | fn read_stdin() -> Result<String> { |
67 | let mut buff = String::new(); | 67 | let mut buff = String::new(); |
68 | ::std::io::stdin().read_to_string(&mut buff)?; | 68 | std::io::stdin().read_to_string(&mut buff)?; |
69 | Ok(buff) | 69 | Ok(buff) |
70 | } | 70 | } |
diff --git a/crates/ra_ide_api/src/call_info.rs b/crates/ra_ide_api/src/call_info.rs index 4413aec73..5d43282fd 100644 --- a/crates/ra_ide_api/src/call_info.rs +++ b/crates/ra_ide_api/src/call_info.rs | |||
@@ -10,7 +10,7 @@ use crate::{FilePosition, CallInfo, FunctionSignature, db::RootDatabase}; | |||
10 | 10 | ||
11 | /// Computes parameter information for the given call expression. | 11 | /// Computes parameter information for the given call expression. |
12 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { | 12 | pub(crate) fn call_info(db: &RootDatabase, position: FilePosition) -> Option<CallInfo> { |
13 | let file = db.parse(position.file_id); | 13 | let file = db.parse(position.file_id).tree; |
14 | let syntax = file.syntax(); | 14 | let syntax = file.syntax(); |
15 | 15 | ||
16 | // Find the calling expression and it's NameRef | 16 | // Find the calling expression and it's NameRef |
diff --git a/crates/ra_ide_api/src/change.rs b/crates/ra_ide_api/src/change.rs index 0e64abdbd..4b597afc0 100644 --- a/crates/ra_ide_api/src/change.rs +++ b/crates/ra_ide_api/src/change.rs | |||
@@ -138,7 +138,7 @@ impl LibraryData { | |||
138 | files: Vec<(FileId, RelativePathBuf, Arc<String>)>, | 138 | files: Vec<(FileId, RelativePathBuf, Arc<String>)>, |
139 | ) -> LibraryData { | 139 | ) -> LibraryData { |
140 | let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| { | 140 | let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, _, text)| { |
141 | let file = SourceFile::parse(text); | 141 | let file = SourceFile::parse(text).tree; |
142 | (*file_id, file) | 142 | (*file_id, file) |
143 | })); | 143 | })); |
144 | let mut root_change = RootChange::default(); | 144 | let mut root_change = RootChange::default(); |
diff --git a/crates/ra_ide_api/src/completion.rs b/crates/ra_ide_api/src/completion.rs index deff59cd3..3a75bbf92 100644 --- a/crates/ra_ide_api/src/completion.rs +++ b/crates/ra_ide_api/src/completion.rs | |||
@@ -51,8 +51,8 @@ pub use crate::completion::completion_item::{CompletionItem, CompletionItemKind, | |||
51 | /// identifier prefix/fuzzy match should be done higher in the stack, together | 51 | /// identifier prefix/fuzzy match should be done higher in the stack, together |
52 | /// with ordering of completions (currently this is done by the client). | 52 | /// with ordering of completions (currently this is done by the client). |
53 | pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Option<Completions> { | 53 | pub(crate) fn completions(db: &db::RootDatabase, position: FilePosition) -> Option<Completions> { |
54 | let original_file = db.parse(position.file_id); | 54 | let original_parse = db.parse(position.file_id); |
55 | let ctx = CompletionContext::new(db, &original_file, position)?; | 55 | let ctx = CompletionContext::new(db, &original_parse, position)?; |
56 | 56 | ||
57 | let mut acc = Completions::default(); | 57 | let mut acc = Completions::default(); |
58 | 58 | ||
diff --git a/crates/ra_ide_api/src/completion/completion_context.rs b/crates/ra_ide_api/src/completion/completion_context.rs index a8c8cc7b0..bda7d9bb2 100644 --- a/crates/ra_ide_api/src/completion/completion_context.rs +++ b/crates/ra_ide_api/src/completion/completion_context.rs | |||
@@ -1,6 +1,6 @@ | |||
1 | use ra_text_edit::AtomTextEdit; | 1 | use ra_text_edit::AtomTextEdit; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, SyntaxToken, | 3 | AstNode, SyntaxNode, SourceFile, TextUnit, TextRange, SyntaxToken, Parse, |
4 | ast, | 4 | ast, |
5 | algo::{find_token_at_offset, find_covering_element, find_node_at_offset}, | 5 | algo::{find_token_at_offset, find_covering_element, find_node_at_offset}, |
6 | SyntaxKind::*, | 6 | SyntaxKind::*, |
@@ -43,11 +43,12 @@ pub(crate) struct CompletionContext<'a> { | |||
43 | impl<'a> CompletionContext<'a> { | 43 | impl<'a> CompletionContext<'a> { |
44 | pub(super) fn new( | 44 | pub(super) fn new( |
45 | db: &'a db::RootDatabase, | 45 | db: &'a db::RootDatabase, |
46 | original_file: &'a SourceFile, | 46 | original_parse: &'a Parse, |
47 | position: FilePosition, | 47 | position: FilePosition, |
48 | ) -> Option<CompletionContext<'a>> { | 48 | ) -> Option<CompletionContext<'a>> { |
49 | let module = source_binder::module_from_position(db, position); | 49 | let module = source_binder::module_from_position(db, position); |
50 | let token = find_token_at_offset(original_file.syntax(), position.offset).left_biased()?; | 50 | let token = |
51 | find_token_at_offset(original_parse.tree.syntax(), position.offset).left_biased()?; | ||
51 | let analyzer = | 52 | let analyzer = |
52 | hir::SourceAnalyzer::new(db, position.file_id, token.parent(), Some(position.offset)); | 53 | hir::SourceAnalyzer::new(db, position.file_id, token.parent(), Some(position.offset)); |
53 | let mut ctx = CompletionContext { | 54 | let mut ctx = CompletionContext { |
@@ -69,7 +70,7 @@ impl<'a> CompletionContext<'a> { | |||
69 | dot_receiver: None, | 70 | dot_receiver: None, |
70 | is_call: false, | 71 | is_call: false, |
71 | }; | 72 | }; |
72 | ctx.fill(original_file, position.offset); | 73 | ctx.fill(&original_parse, position.offset); |
73 | Some(ctx) | 74 | Some(ctx) |
74 | } | 75 | } |
75 | 76 | ||
@@ -82,13 +83,13 @@ impl<'a> CompletionContext<'a> { | |||
82 | } | 83 | } |
83 | } | 84 | } |
84 | 85 | ||
85 | fn fill(&mut self, original_file: &'a SourceFile, offset: TextUnit) { | 86 | fn fill(&mut self, original_parse: &'a Parse, offset: TextUnit) { |
86 | // Insert a fake ident to get a valid parse tree. We will use this file | 87 | // Insert a fake ident to get a valid parse tree. We will use this file |
87 | // to determine context, though the original_file will be used for | 88 | // to determine context, though the original_file will be used for |
88 | // actual completion. | 89 | // actual completion. |
89 | let file = { | 90 | let file = { |
90 | let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string()); | 91 | let edit = AtomTextEdit::insert(offset, "intellijRulezz".to_string()); |
91 | original_file.reparse(&edit) | 92 | original_parse.reparse(&edit).tree |
92 | }; | 93 | }; |
93 | 94 | ||
94 | // First, let's try to complete a reference to some declaration. | 95 | // First, let's try to complete a reference to some declaration. |
@@ -99,7 +100,7 @@ impl<'a> CompletionContext<'a> { | |||
99 | self.is_param = true; | 100 | self.is_param = true; |
100 | return; | 101 | return; |
101 | } | 102 | } |
102 | self.classify_name_ref(original_file, name_ref); | 103 | self.classify_name_ref(&original_parse.tree, name_ref); |
103 | } | 104 | } |
104 | 105 | ||
105 | // Otherwise, see if this is a declaration. We can use heuristics to | 106 | // Otherwise, see if this is a declaration. We can use heuristics to |
diff --git a/crates/ra_ide_api/src/diagnostics.rs b/crates/ra_ide_api/src/diagnostics.rs index 923008708..4cf2a0b70 100644 --- a/crates/ra_ide_api/src/diagnostics.rs +++ b/crates/ra_ide_api/src/diagnostics.rs | |||
@@ -4,7 +4,7 @@ use itertools::Itertools; | |||
4 | use hir::{source_binder, diagnostics::{Diagnostic as _, DiagnosticSink}}; | 4 | use hir::{source_binder, diagnostics::{Diagnostic as _, DiagnosticSink}}; |
5 | use ra_db::SourceDatabase; | 5 | use ra_db::SourceDatabase; |
6 | use ra_syntax::{ | 6 | use ra_syntax::{ |
7 | T, Location, SourceFile, TextRange, SyntaxNode, | 7 | T, Location, TextRange, SyntaxNode, |
8 | ast::{self, AstNode, NamedFieldList, NamedField}, | 8 | ast::{self, AstNode, NamedFieldList, NamedField}, |
9 | }; | 9 | }; |
10 | use ra_assists::ast_editor::{AstEditor, AstBuilder}; | 10 | use ra_assists::ast_editor::{AstEditor, AstBuilder}; |
@@ -21,10 +21,17 @@ pub enum Severity { | |||
21 | 21 | ||
22 | pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> { | 22 | pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> { |
23 | let _p = profile("diagnostics"); | 23 | let _p = profile("diagnostics"); |
24 | let source_file = db.parse(file_id); | 24 | let parse = db.parse(file_id); |
25 | let mut res = Vec::new(); | 25 | let mut res = Vec::new(); |
26 | 26 | ||
27 | syntax_errors(&mut res, &source_file); | 27 | res.extend(parse.errors.iter().map(|err| Diagnostic { |
28 | range: location_to_range(err.location()), | ||
29 | message: format!("Syntax Error: {}", err), | ||
30 | severity: Severity::Error, | ||
31 | fix: None, | ||
32 | })); | ||
33 | |||
34 | let source_file = parse.tree; | ||
28 | 35 | ||
29 | for node in source_file.syntax().descendants() { | 36 | for node in source_file.syntax().descendants() { |
30 | check_unnecessary_braces_in_use_statement(&mut res, file_id, node); | 37 | check_unnecessary_braces_in_use_statement(&mut res, file_id, node); |
@@ -51,8 +58,9 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> | |||
51 | }) | 58 | }) |
52 | }) | 59 | }) |
53 | .on::<hir::diagnostics::MissingFields, _>(|d| { | 60 | .on::<hir::diagnostics::MissingFields, _>(|d| { |
61 | //TODO: commment | ||
54 | let file_id = d.file().original_file(db); | 62 | let file_id = d.file().original_file(db); |
55 | let source_file = db.parse(file_id); | 63 | let source_file = db.parse(file_id).tree; |
56 | let syntax_node = d.syntax_node_ptr(); | 64 | let syntax_node = d.syntax_node_ptr(); |
57 | let node = NamedFieldList::cast(syntax_node.to_node(source_file.syntax())).unwrap(); | 65 | let node = NamedFieldList::cast(syntax_node.to_node(source_file.syntax())).unwrap(); |
58 | let mut ast_editor = AstEditor::new(node); | 66 | let mut ast_editor = AstEditor::new(node); |
@@ -77,21 +85,11 @@ pub(crate) fn diagnostics(db: &RootDatabase, file_id: FileId) -> Vec<Diagnostic> | |||
77 | drop(sink); | 85 | drop(sink); |
78 | res.into_inner() | 86 | res.into_inner() |
79 | } | 87 | } |
80 | 88 | fn location_to_range(location: Location) -> TextRange { | |
81 | fn syntax_errors(acc: &mut Vec<Diagnostic>, source_file: &SourceFile) { | 89 | match location { |
82 | fn location_to_range(location: Location) -> TextRange { | 90 | Location::Offset(offset) => TextRange::offset_len(offset, 1.into()), |
83 | match location { | 91 | Location::Range(range) => range, |
84 | Location::Offset(offset) => TextRange::offset_len(offset, 1.into()), | ||
85 | Location::Range(range) => range, | ||
86 | } | ||
87 | } | 92 | } |
88 | |||
89 | acc.extend(source_file.errors().into_iter().map(|err| Diagnostic { | ||
90 | range: location_to_range(err.location()), | ||
91 | message: format!("Syntax Error: {}", err), | ||
92 | severity: Severity::Error, | ||
93 | fix: None, | ||
94 | })); | ||
95 | } | 93 | } |
96 | 94 | ||
97 | fn check_unnecessary_braces_in_use_statement( | 95 | fn check_unnecessary_braces_in_use_statement( |
@@ -177,6 +175,7 @@ fn check_struct_shorthand_initialization( | |||
177 | mod tests { | 175 | mod tests { |
178 | use test_utils::assert_eq_text; | 176 | use test_utils::assert_eq_text; |
179 | use insta::assert_debug_snapshot_matches; | 177 | use insta::assert_debug_snapshot_matches; |
178 | use ra_syntax::SourceFile; | ||
180 | 179 | ||
181 | use crate::mock_analysis::single_file; | 180 | use crate::mock_analysis::single_file; |
182 | 181 | ||
@@ -185,7 +184,7 @@ mod tests { | |||
185 | type DiagnosticChecker = fn(&mut Vec<Diagnostic>, FileId, &SyntaxNode) -> Option<()>; | 184 | type DiagnosticChecker = fn(&mut Vec<Diagnostic>, FileId, &SyntaxNode) -> Option<()>; |
186 | 185 | ||
187 | fn check_not_applicable(code: &str, func: DiagnosticChecker) { | 186 | fn check_not_applicable(code: &str, func: DiagnosticChecker) { |
188 | let file = SourceFile::parse(code); | 187 | let file = SourceFile::parse(code).tree; |
189 | let mut diagnostics = Vec::new(); | 188 | let mut diagnostics = Vec::new(); |
190 | for node in file.syntax().descendants() { | 189 | for node in file.syntax().descendants() { |
191 | func(&mut diagnostics, FileId(0), node); | 190 | func(&mut diagnostics, FileId(0), node); |
@@ -194,7 +193,7 @@ mod tests { | |||
194 | } | 193 | } |
195 | 194 | ||
196 | fn check_apply(before: &str, after: &str, func: DiagnosticChecker) { | 195 | fn check_apply(before: &str, after: &str, func: DiagnosticChecker) { |
197 | let file = SourceFile::parse(before); | 196 | let file = SourceFile::parse(before).tree; |
198 | let mut diagnostics = Vec::new(); | 197 | let mut diagnostics = Vec::new(); |
199 | for node in file.syntax().descendants() { | 198 | for node in file.syntax().descendants() { |
200 | func(&mut diagnostics, FileId(0), node); | 199 | func(&mut diagnostics, FileId(0), node); |
diff --git a/crates/ra_ide_api/src/display/navigation_target.rs b/crates/ra_ide_api/src/display/navigation_target.rs index 7f81483f7..ae729614f 100644 --- a/crates/ra_ide_api/src/display/navigation_target.rs +++ b/crates/ra_ide_api/src/display/navigation_target.rs | |||
@@ -79,7 +79,7 @@ impl NavigationTarget { | |||
79 | file_id: FileId, | 79 | file_id: FileId, |
80 | pat: AstPtr<ast::Pat>, | 80 | pat: AstPtr<ast::Pat>, |
81 | ) -> NavigationTarget { | 81 | ) -> NavigationTarget { |
82 | let file = db.parse(file_id); | 82 | let file = db.parse(file_id).tree; |
83 | let (name, full_range) = match pat.to_node(file.syntax()).kind() { | 83 | let (name, full_range) = match pat.to_node(file.syntax()).kind() { |
84 | ast::PatKind::BindPat(pat) => return NavigationTarget::from_bind_pat(file_id, &pat), | 84 | ast::PatKind::BindPat(pat) => return NavigationTarget::from_bind_pat(file_id, &pat), |
85 | _ => ("_".into(), pat.syntax_node_ptr().range()), | 85 | _ => ("_".into(), pat.syntax_node_ptr().range()), |
@@ -290,7 +290,7 @@ impl NavigationTarget { | |||
290 | } | 290 | } |
291 | 291 | ||
292 | pub(crate) fn node(&self, db: &RootDatabase) -> Option<TreeArc<SyntaxNode>> { | 292 | pub(crate) fn node(&self, db: &RootDatabase) -> Option<TreeArc<SyntaxNode>> { |
293 | let source_file = db.parse(self.file_id()); | 293 | let source_file = db.parse(self.file_id()).tree; |
294 | let source_file = source_file.syntax(); | 294 | let source_file = source_file.syntax(); |
295 | let node = source_file | 295 | let node = source_file |
296 | .descendants() | 296 | .descendants() |
diff --git a/crates/ra_ide_api/src/display/structure.rs b/crates/ra_ide_api/src/display/structure.rs index ec2c9bbc6..24ab7b59c 100644 --- a/crates/ra_ide_api/src/display/structure.rs +++ b/crates/ra_ide_api/src/display/structure.rs | |||
@@ -183,7 +183,9 @@ fn obsolete() {} | |||
183 | #[deprecated(note = "for awhile")] | 183 | #[deprecated(note = "for awhile")] |
184 | fn very_obsolete() {} | 184 | fn very_obsolete() {} |
185 | "#, | 185 | "#, |
186 | ); | 186 | ) |
187 | .ok() | ||
188 | .unwrap(); | ||
187 | let structure = file_structure(&file); | 189 | let structure = file_structure(&file); |
188 | assert_debug_snapshot_matches!("file_structure", structure); | 190 | assert_debug_snapshot_matches!("file_structure", structure); |
189 | } | 191 | } |
diff --git a/crates/ra_ide_api/src/extend_selection.rs b/crates/ra_ide_api/src/extend_selection.rs index 4553faad0..00c445310 100644 --- a/crates/ra_ide_api/src/extend_selection.rs +++ b/crates/ra_ide_api/src/extend_selection.rs | |||
@@ -11,7 +11,7 @@ use crate::{FileRange, db::RootDatabase}; | |||
11 | 11 | ||
12 | // FIXME: restore macro support | 12 | // FIXME: restore macro support |
13 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { | 13 | pub(crate) fn extend_selection(db: &RootDatabase, frange: FileRange) -> TextRange { |
14 | let source_file = db.parse(frange.file_id); | 14 | let source_file = db.parse(frange.file_id).tree; |
15 | try_extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range) | 15 | try_extend_selection(source_file.syntax(), frange.range).unwrap_or(frange.range) |
16 | } | 16 | } |
17 | 17 | ||
@@ -212,7 +212,7 @@ mod tests { | |||
212 | 212 | ||
213 | fn do_check(before: &str, afters: &[&str]) { | 213 | fn do_check(before: &str, afters: &[&str]) { |
214 | let (cursor, before) = extract_offset(before); | 214 | let (cursor, before) = extract_offset(before); |
215 | let file = SourceFile::parse(&before); | 215 | let file = SourceFile::parse(&before).tree; |
216 | let mut range = TextRange::offset_len(cursor, 0.into()); | 216 | let mut range = TextRange::offset_len(cursor, 0.into()); |
217 | for &after in afters { | 217 | for &after in afters { |
218 | range = try_extend_selection(file.syntax(), range).unwrap(); | 218 | range = try_extend_selection(file.syntax(), range).unwrap(); |
diff --git a/crates/ra_ide_api/src/folding_ranges.rs b/crates/ra_ide_api/src/folding_ranges.rs index 6987fcc9e..b50bbee38 100644 --- a/crates/ra_ide_api/src/folding_ranges.rs +++ b/crates/ra_ide_api/src/folding_ranges.rs | |||
@@ -191,7 +191,7 @@ mod tests { | |||
191 | 191 | ||
192 | fn do_check(text: &str, fold_kinds: &[FoldKind]) { | 192 | fn do_check(text: &str, fold_kinds: &[FoldKind]) { |
193 | let (ranges, text) = extract_ranges(text, "fold"); | 193 | let (ranges, text) = extract_ranges(text, "fold"); |
194 | let file = SourceFile::parse(&text); | 194 | let file = SourceFile::parse(&text).tree; |
195 | let folds = folding_ranges(&file); | 195 | let folds = folding_ranges(&file); |
196 | 196 | ||
197 | assert_eq!( | 197 | assert_eq!( |
diff --git a/crates/ra_ide_api/src/goto_definition.rs b/crates/ra_ide_api/src/goto_definition.rs index 9c56f17f2..4f8554625 100644 --- a/crates/ra_ide_api/src/goto_definition.rs +++ b/crates/ra_ide_api/src/goto_definition.rs | |||
@@ -19,7 +19,7 @@ pub(crate) fn goto_definition( | |||
19 | db: &RootDatabase, | 19 | db: &RootDatabase, |
20 | position: FilePosition, | 20 | position: FilePosition, |
21 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { | 21 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { |
22 | let file = db.parse(position.file_id); | 22 | let file = db.parse(position.file_id).tree; |
23 | let syntax = file.syntax(); | 23 | let syntax = file.syntax(); |
24 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { | 24 | if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, position.offset) { |
25 | let navs = reference_definition(db, position.file_id, name_ref).to_vec(); | 25 | let navs = reference_definition(db, position.file_id, name_ref).to_vec(); |
diff --git a/crates/ra_ide_api/src/goto_type_definition.rs b/crates/ra_ide_api/src/goto_type_definition.rs index e456ec5d6..0f638b170 100644 --- a/crates/ra_ide_api/src/goto_type_definition.rs +++ b/crates/ra_ide_api/src/goto_type_definition.rs | |||
@@ -10,7 +10,7 @@ pub(crate) fn goto_type_definition( | |||
10 | db: &RootDatabase, | 10 | db: &RootDatabase, |
11 | position: FilePosition, | 11 | position: FilePosition, |
12 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { | 12 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { |
13 | let file = db.parse(position.file_id); | 13 | let file = db.parse(position.file_id).tree; |
14 | 14 | ||
15 | let node = find_token_at_offset(file.syntax(), position.offset).find_map(|token| { | 15 | let node = find_token_at_offset(file.syntax(), position.offset).find_map(|token| { |
16 | token | 16 | token |
diff --git a/crates/ra_ide_api/src/hover.rs b/crates/ra_ide_api/src/hover.rs index 6545a2581..a390dab65 100644 --- a/crates/ra_ide_api/src/hover.rs +++ b/crates/ra_ide_api/src/hover.rs | |||
@@ -68,7 +68,7 @@ impl HoverResult { | |||
68 | } | 68 | } |
69 | 69 | ||
70 | pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> { | 70 | pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeInfo<HoverResult>> { |
71 | let file = db.parse(position.file_id); | 71 | let file = db.parse(position.file_id).tree; |
72 | let mut res = HoverResult::new(); | 72 | let mut res = HoverResult::new(); |
73 | 73 | ||
74 | let mut range = None; | 74 | let mut range = None; |
@@ -120,7 +120,7 @@ pub(crate) fn hover(db: &RootDatabase, position: FilePosition) -> Option<RangeIn | |||
120 | } | 120 | } |
121 | 121 | ||
122 | pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { | 122 | pub(crate) fn type_of(db: &RootDatabase, frange: FileRange) -> Option<String> { |
123 | let file = db.parse(frange.file_id); | 123 | let file = db.parse(frange.file_id).tree; |
124 | let syntax = file.syntax(); | 124 | let syntax = file.syntax(); |
125 | let leaf_node = find_covering_element(syntax, frange.range); | 125 | let leaf_node = find_covering_element(syntax, frange.range); |
126 | // if we picked identifier, expand to pattern/expression | 126 | // if we picked identifier, expand to pattern/expression |
diff --git a/crates/ra_ide_api/src/impls.rs b/crates/ra_ide_api/src/impls.rs index ee9220a15..b80238d9e 100644 --- a/crates/ra_ide_api/src/impls.rs +++ b/crates/ra_ide_api/src/impls.rs | |||
@@ -11,7 +11,7 @@ pub(crate) fn goto_implementation( | |||
11 | db: &RootDatabase, | 11 | db: &RootDatabase, |
12 | position: FilePosition, | 12 | position: FilePosition, |
13 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { | 13 | ) -> Option<RangeInfo<Vec<NavigationTarget>>> { |
14 | let file = db.parse(position.file_id); | 14 | let file = db.parse(position.file_id).tree; |
15 | let syntax = file.syntax(); | 15 | let syntax = file.syntax(); |
16 | 16 | ||
17 | let module = source_binder::module_from_position(db, position)?; | 17 | let module = source_binder::module_from_position(db, position)?; |
diff --git a/crates/ra_ide_api/src/join_lines.rs b/crates/ra_ide_api/src/join_lines.rs index 4ca005466..3978e9635 100644 --- a/crates/ra_ide_api/src/join_lines.rs +++ b/crates/ra_ide_api/src/join_lines.rs | |||
@@ -506,7 +506,7 @@ fn foo() { | |||
506 | 506 | ||
507 | fn check_join_lines_sel(before: &str, after: &str) { | 507 | fn check_join_lines_sel(before: &str, after: &str) { |
508 | let (sel, before) = extract_range(before); | 508 | let (sel, before) = extract_range(before); |
509 | let file = SourceFile::parse(&before); | 509 | let file = SourceFile::parse(&before).tree; |
510 | let result = join_lines(&file, sel); | 510 | let result = join_lines(&file, sel); |
511 | let actual = result.apply(&before); | 511 | let actual = result.apply(&before); |
512 | assert_eq_text!(after, &actual); | 512 | assert_eq_text!(after, &actual); |
diff --git a/crates/ra_ide_api/src/lib.rs b/crates/ra_ide_api/src/lib.rs index 452407e8e..2fe46cd13 100644 --- a/crates/ra_ide_api/src/lib.rs +++ b/crates/ra_ide_api/src/lib.rs | |||
@@ -314,7 +314,7 @@ impl Analysis { | |||
314 | 314 | ||
315 | /// Gets the syntax tree of the file. | 315 | /// Gets the syntax tree of the file. |
316 | pub fn parse(&self, file_id: FileId) -> TreeArc<SourceFile> { | 316 | pub fn parse(&self, file_id: FileId) -> TreeArc<SourceFile> { |
317 | self.db.parse(file_id).clone() | 317 | self.db.parse(file_id).tree |
318 | } | 318 | } |
319 | 319 | ||
320 | /// Gets the file's `LineIndex`: data structure to convert between absolute | 320 | /// Gets the file's `LineIndex`: data structure to convert between absolute |
@@ -331,7 +331,7 @@ impl Analysis { | |||
331 | /// Returns position of the matching brace (all types of braces are | 331 | /// Returns position of the matching brace (all types of braces are |
332 | /// supported). | 332 | /// supported). |
333 | pub fn matching_brace(&self, position: FilePosition) -> Option<TextUnit> { | 333 | pub fn matching_brace(&self, position: FilePosition) -> Option<TextUnit> { |
334 | let file = self.db.parse(position.file_id); | 334 | let file = self.db.parse(position.file_id).tree; |
335 | matching_brace::matching_brace(&file, position.offset) | 335 | matching_brace::matching_brace(&file, position.offset) |
336 | } | 336 | } |
337 | 337 | ||
@@ -344,7 +344,7 @@ impl Analysis { | |||
344 | /// Returns an edit to remove all newlines in the range, cleaning up minor | 344 | /// Returns an edit to remove all newlines in the range, cleaning up minor |
345 | /// stuff like trailing commas. | 345 | /// stuff like trailing commas. |
346 | pub fn join_lines(&self, frange: FileRange) -> SourceChange { | 346 | pub fn join_lines(&self, frange: FileRange) -> SourceChange { |
347 | let file = self.db.parse(frange.file_id); | 347 | let file = self.db.parse(frange.file_id).tree; |
348 | let file_edit = SourceFileEdit { | 348 | let file_edit = SourceFileEdit { |
349 | file_id: frange.file_id, | 349 | file_id: frange.file_id, |
350 | edit: join_lines::join_lines(&file, frange.range), | 350 | edit: join_lines::join_lines(&file, frange.range), |
@@ -362,7 +362,7 @@ impl Analysis { | |||
362 | /// this works when adding `let =`. | 362 | /// this works when adding `let =`. |
363 | // FIXME: use a snippet completion instead of this hack here. | 363 | // FIXME: use a snippet completion instead of this hack here. |
364 | pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> { | 364 | pub fn on_eq_typed(&self, position: FilePosition) -> Option<SourceChange> { |
365 | let file = self.db.parse(position.file_id); | 365 | let file = self.db.parse(position.file_id).tree; |
366 | let edit = typing::on_eq_typed(&file, position.offset)?; | 366 | let edit = typing::on_eq_typed(&file, position.offset)?; |
367 | Some(SourceChange::source_file_edit( | 367 | Some(SourceChange::source_file_edit( |
368 | "add semicolon", | 368 | "add semicolon", |
@@ -378,13 +378,13 @@ impl Analysis { | |||
378 | /// Returns a tree representation of symbols in the file. Useful to draw a | 378 | /// Returns a tree representation of symbols in the file. Useful to draw a |
379 | /// file outline. | 379 | /// file outline. |
380 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { | 380 | pub fn file_structure(&self, file_id: FileId) -> Vec<StructureNode> { |
381 | let file = self.db.parse(file_id); | 381 | let file = self.db.parse(file_id).tree; |
382 | file_structure(&file) | 382 | file_structure(&file) |
383 | } | 383 | } |
384 | 384 | ||
385 | /// Returns the set of folding ranges. | 385 | /// Returns the set of folding ranges. |
386 | pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> { | 386 | pub fn folding_ranges(&self, file_id: FileId) -> Vec<Fold> { |
387 | let file = self.db.parse(file_id); | 387 | let file = self.db.parse(file_id).tree; |
388 | folding_ranges::folding_ranges(&file) | 388 | folding_ranges::folding_ranges(&file) |
389 | } | 389 | } |
390 | 390 | ||
diff --git a/crates/ra_ide_api/src/matching_brace.rs b/crates/ra_ide_api/src/matching_brace.rs index eaa4b620c..7f3e65b46 100644 --- a/crates/ra_ide_api/src/matching_brace.rs +++ b/crates/ra_ide_api/src/matching_brace.rs | |||
@@ -31,7 +31,7 @@ mod tests { | |||
31 | fn test_matching_brace() { | 31 | fn test_matching_brace() { |
32 | fn do_check(before: &str, after: &str) { | 32 | fn do_check(before: &str, after: &str) { |
33 | let (pos, before) = extract_offset(before); | 33 | let (pos, before) = extract_offset(before); |
34 | let file = SourceFile::parse(&before); | 34 | let file = SourceFile::parse(&before).tree; |
35 | let new_pos = match matching_brace(&file, pos) { | 35 | let new_pos = match matching_brace(&file, pos) { |
36 | None => pos, | 36 | None => pos, |
37 | Some(pos) => pos, | 37 | Some(pos) => pos, |
diff --git a/crates/ra_ide_api/src/references.rs b/crates/ra_ide_api/src/references.rs index d5c2b08ca..a75042b76 100644 --- a/crates/ra_ide_api/src/references.rs +++ b/crates/ra_ide_api/src/references.rs | |||
@@ -60,7 +60,7 @@ pub(crate) fn find_all_refs( | |||
60 | db: &RootDatabase, | 60 | db: &RootDatabase, |
61 | position: FilePosition, | 61 | position: FilePosition, |
62 | ) -> Option<ReferenceSearchResult> { | 62 | ) -> Option<ReferenceSearchResult> { |
63 | let file = db.parse(position.file_id); | 63 | let file = db.parse(position.file_id).tree; |
64 | let (binding, analyzer) = find_binding(db, &file, position)?; | 64 | let (binding, analyzer) = find_binding(db, &file, position)?; |
65 | let declaration = NavigationTarget::from_bind_pat(position.file_id, binding); | 65 | let declaration = NavigationTarget::from_bind_pat(position.file_id, binding); |
66 | 66 | ||
@@ -99,7 +99,7 @@ pub(crate) fn rename( | |||
99 | position: FilePosition, | 99 | position: FilePosition, |
100 | new_name: &str, | 100 | new_name: &str, |
101 | ) -> Option<SourceChange> { | 101 | ) -> Option<SourceChange> { |
102 | let source_file = db.parse(position.file_id); | 102 | let source_file = db.parse(position.file_id).tree; |
103 | let syntax = source_file.syntax(); | 103 | let syntax = source_file.syntax(); |
104 | 104 | ||
105 | if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) { | 105 | if let Some((ast_name, ast_module)) = find_name_and_module_at_offset(syntax, position) { |
diff --git a/crates/ra_ide_api/src/runnables.rs b/crates/ra_ide_api/src/runnables.rs index 3969076a8..afe629d50 100644 --- a/crates/ra_ide_api/src/runnables.rs +++ b/crates/ra_ide_api/src/runnables.rs | |||
@@ -22,7 +22,7 @@ pub enum RunnableKind { | |||
22 | } | 22 | } |
23 | 23 | ||
24 | pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { | 24 | pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> { |
25 | let source_file = db.parse(file_id); | 25 | let source_file = db.parse(file_id).tree; |
26 | source_file.syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect() | 26 | source_file.syntax().descendants().filter_map(|i| runnable(db, file_id, i)).collect() |
27 | } | 27 | } |
28 | 28 | ||
diff --git a/crates/ra_ide_api/src/status.rs b/crates/ra_ide_api/src/status.rs index d99a4e750..821106fea 100644 --- a/crates/ra_ide_api/src/status.rs +++ b/crates/ra_ide_api/src/status.rs | |||
@@ -4,7 +4,7 @@ use std::{ | |||
4 | sync::Arc, | 4 | sync::Arc, |
5 | }; | 5 | }; |
6 | 6 | ||
7 | use ra_syntax::{AstNode, TreeArc, SourceFile}; | 7 | use ra_syntax::{AstNode, Parse}; |
8 | use ra_db::{ | 8 | use ra_db::{ |
9 | ParseQuery, FileTextQuery, SourceRootId, | 9 | ParseQuery, FileTextQuery, SourceRootId, |
10 | salsa::{Database, debug::{DebugQueryTable, TableEntry}}, | 10 | salsa::{Database, debug::{DebugQueryTable, TableEntry}}, |
@@ -72,17 +72,17 @@ impl fmt::Display for SyntaxTreeStats { | |||
72 | } | 72 | } |
73 | } | 73 | } |
74 | 74 | ||
75 | impl FromIterator<TableEntry<FileId, TreeArc<SourceFile>>> for SyntaxTreeStats { | 75 | impl FromIterator<TableEntry<FileId, Parse>> for SyntaxTreeStats { |
76 | fn from_iter<T>(iter: T) -> SyntaxTreeStats | 76 | fn from_iter<T>(iter: T) -> SyntaxTreeStats |
77 | where | 77 | where |
78 | T: IntoIterator<Item = TableEntry<FileId, TreeArc<SourceFile>>>, | 78 | T: IntoIterator<Item = TableEntry<FileId, Parse>>, |
79 | { | 79 | { |
80 | let mut res = SyntaxTreeStats::default(); | 80 | let mut res = SyntaxTreeStats::default(); |
81 | for entry in iter { | 81 | for entry in iter { |
82 | res.total += 1; | 82 | res.total += 1; |
83 | if let Some(value) = entry.value { | 83 | if let Some(value) = entry.value { |
84 | res.retained += 1; | 84 | res.retained += 1; |
85 | res.retained_size += value.syntax().memory_size_of_subtree(); | 85 | res.retained_size += value.tree.syntax().memory_size_of_subtree(); |
86 | } | 86 | } |
87 | } | 87 | } |
88 | res | 88 | res |
diff --git a/crates/ra_ide_api/src/symbol_index.rs b/crates/ra_ide_api/src/symbol_index.rs index 1dcff8beb..a6cd7bf61 100644 --- a/crates/ra_ide_api/src/symbol_index.rs +++ b/crates/ra_ide_api/src/symbol_index.rs | |||
@@ -63,7 +63,7 @@ pub(crate) trait SymbolsDatabase: hir::db::HirDatabase { | |||
63 | 63 | ||
64 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> { | 64 | fn file_symbols(db: &impl SymbolsDatabase, file_id: FileId) -> Arc<SymbolIndex> { |
65 | db.check_canceled(); | 65 | db.check_canceled(); |
66 | let source_file = db.parse(file_id); | 66 | let source_file = db.parse(file_id).tree; |
67 | 67 | ||
68 | let symbols = source_file_to_file_symbols(&source_file, file_id); | 68 | let symbols = source_file_to_file_symbols(&source_file, file_id); |
69 | 69 | ||
diff --git a/crates/ra_ide_api/src/syntax_highlighting.rs b/crates/ra_ide_api/src/syntax_highlighting.rs index dcefb0513..416e11334 100644 --- a/crates/ra_ide_api/src/syntax_highlighting.rs +++ b/crates/ra_ide_api/src/syntax_highlighting.rs | |||
@@ -30,7 +30,7 @@ fn is_control_keyword(kind: SyntaxKind) -> bool { | |||
30 | 30 | ||
31 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { | 31 | pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRange> { |
32 | let _p = profile("highlight"); | 32 | let _p = profile("highlight"); |
33 | let source_file = db.parse(file_id); | 33 | let source_file = db.parse(file_id).tree; |
34 | 34 | ||
35 | fn calc_binding_hash(file_id: FileId, text: &SmolStr, shadow_count: u32) -> u64 { | 35 | fn calc_binding_hash(file_id: FileId, text: &SmolStr, shadow_count: u32) -> u64 { |
36 | fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { | 36 | fn hash<T: std::hash::Hash + std::fmt::Debug>(x: T) -> u64 { |
@@ -162,7 +162,7 @@ pub(crate) fn highlight(db: &RootDatabase, file_id: FileId) -> Vec<HighlightedRa | |||
162 | } | 162 | } |
163 | 163 | ||
164 | pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { | 164 | pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String { |
165 | let source_file = db.parse(file_id); | 165 | let source_file = db.parse(file_id).tree; |
166 | 166 | ||
167 | fn rainbowify(seed: u64) -> String { | 167 | fn rainbowify(seed: u64) -> String { |
168 | use rand::prelude::*; | 168 | use rand::prelude::*; |
diff --git a/crates/ra_ide_api/src/syntax_tree.rs b/crates/ra_ide_api/src/syntax_tree.rs index c7288220c..7165fa97a 100644 --- a/crates/ra_ide_api/src/syntax_tree.rs +++ b/crates/ra_ide_api/src/syntax_tree.rs | |||
@@ -14,7 +14,7 @@ pub(crate) fn syntax_tree( | |||
14 | text_range: Option<TextRange>, | 14 | text_range: Option<TextRange>, |
15 | ) -> String { | 15 | ) -> String { |
16 | if let Some(text_range) = text_range { | 16 | if let Some(text_range) = text_range { |
17 | let file = db.parse(file_id); | 17 | let file = db.parse(file_id).tree; |
18 | let node = match algo::find_covering_element(file.syntax(), text_range) { | 18 | let node = match algo::find_covering_element(file.syntax(), text_range) { |
19 | SyntaxElement::Node(node) => node, | 19 | SyntaxElement::Node(node) => node, |
20 | SyntaxElement::Token(token) => { | 20 | SyntaxElement::Token(token) => { |
@@ -27,7 +27,7 @@ pub(crate) fn syntax_tree( | |||
27 | 27 | ||
28 | node.debug_dump() | 28 | node.debug_dump() |
29 | } else { | 29 | } else { |
30 | db.parse(file_id).syntax().debug_dump() | 30 | db.parse(file_id).tree.syntax().debug_dump() |
31 | } | 31 | } |
32 | } | 32 | } |
33 | 33 | ||
@@ -84,8 +84,8 @@ fn syntax_tree_for_token(node: SyntaxToken, text_range: TextRange) -> Option<Str | |||
84 | 84 | ||
85 | // If the "file" parsed without errors, | 85 | // If the "file" parsed without errors, |
86 | // return its syntax | 86 | // return its syntax |
87 | if parsed.errors().is_empty() { | 87 | if parsed.errors.is_empty() { |
88 | return Some(parsed.syntax().debug_dump()); | 88 | return Some(parsed.tree.syntax().debug_dump()); |
89 | } | 89 | } |
90 | 90 | ||
91 | None | 91 | None |
diff --git a/crates/ra_ide_api/src/test_utils.rs b/crates/ra_ide_api/src/test_utils.rs index d0bd3a1e4..6e0d883b4 100644 --- a/crates/ra_ide_api/src/test_utils.rs +++ b/crates/ra_ide_api/src/test_utils.rs | |||
@@ -9,7 +9,7 @@ pub fn check_action<F: Fn(&SourceFile, TextUnit) -> Option<TextEdit>>( | |||
9 | f: F, | 9 | f: F, |
10 | ) { | 10 | ) { |
11 | let (before_cursor_pos, before) = extract_offset(before); | 11 | let (before_cursor_pos, before) = extract_offset(before); |
12 | let file = SourceFile::parse(&before); | 12 | let file = SourceFile::parse(&before).ok().unwrap(); |
13 | let result = f(&file, before_cursor_pos).expect("code action is not applicable"); | 13 | let result = f(&file, before_cursor_pos).expect("code action is not applicable"); |
14 | let actual = result.apply(&before); | 14 | let actual = result.apply(&before); |
15 | let actual_cursor_pos = | 15 | let actual_cursor_pos = |
diff --git a/crates/ra_ide_api/src/typing.rs b/crates/ra_ide_api/src/typing.rs index ae53bca77..63bc0cf88 100644 --- a/crates/ra_ide_api/src/typing.rs +++ b/crates/ra_ide_api/src/typing.rs | |||
@@ -10,7 +10,7 @@ use ra_db::{FilePosition, SourceDatabase}; | |||
10 | use crate::{db::RootDatabase, SourceChange, SourceFileEdit}; | 10 | use crate::{db::RootDatabase, SourceChange, SourceFileEdit}; |
11 | 11 | ||
12 | pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { | 12 | pub(crate) fn on_enter(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { |
13 | let file = db.parse(position.file_id); | 13 | let file = db.parse(position.file_id).tree; |
14 | let comment = find_token_at_offset(file.syntax(), position.offset) | 14 | let comment = find_token_at_offset(file.syntax(), position.offset) |
15 | .left_biased() | 15 | .left_biased() |
16 | .and_then(ast::Comment::cast)?; | 16 | .and_then(ast::Comment::cast)?; |
@@ -85,7 +85,7 @@ pub fn on_eq_typed(file: &SourceFile, eq_offset: TextUnit) -> Option<TextEdit> { | |||
85 | } | 85 | } |
86 | 86 | ||
87 | pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { | 87 | pub(crate) fn on_dot_typed(db: &RootDatabase, position: FilePosition) -> Option<SourceChange> { |
88 | let file = db.parse(position.file_id); | 88 | let file = db.parse(position.file_id).tree; |
89 | assert_eq!(file.syntax().text().char_at(position.offset), Some('.')); | 89 | assert_eq!(file.syntax().text().char_at(position.offset), Some('.')); |
90 | 90 | ||
91 | let whitespace = find_token_at_offset(file.syntax(), position.offset) | 91 | let whitespace = find_token_at_offset(file.syntax(), position.offset) |
@@ -138,7 +138,7 @@ mod tests { | |||
138 | let mut edit = TextEditBuilder::default(); | 138 | let mut edit = TextEditBuilder::default(); |
139 | edit.insert(offset, "=".to_string()); | 139 | edit.insert(offset, "=".to_string()); |
140 | let before = edit.finish().apply(&before); | 140 | let before = edit.finish().apply(&before); |
141 | let file = SourceFile::parse(&before); | 141 | let file = SourceFile::parse(&before).tree; |
142 | if let Some(result) = on_eq_typed(&file, offset) { | 142 | if let Some(result) = on_eq_typed(&file, offset) { |
143 | let actual = result.apply(&before); | 143 | let actual = result.apply(&before); |
144 | assert_eq_text!(after, &actual); | 144 | assert_eq_text!(after, &actual); |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 4639baa38..dce82f33d 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -384,7 +384,7 @@ mod tests { | |||
384 | } | 384 | } |
385 | "#, | 385 | "#, |
386 | ); | 386 | ); |
387 | let expansion = expand(&rules, "literals!(foo)"); | 387 | let expansion = expand(&rules, "literals!(foo);"); |
388 | let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]); | 388 | let buffer = tt::buffer::TokenBuffer::new(&[expansion.clone().into()]); |
389 | let mut tt_src = SubtreeTokenSource::new(&buffer); | 389 | let mut tt_src = SubtreeTokenSource::new(&buffer); |
390 | let mut tokens = vec![]; | 390 | let mut tokens = vec![]; |
@@ -423,7 +423,7 @@ mod tests { | |||
423 | } | 423 | } |
424 | "#, | 424 | "#, |
425 | ); | 425 | ); |
426 | let expansion = expand(&rules, "stmts!()"); | 426 | let expansion = expand(&rules, "stmts!();"); |
427 | assert!(token_tree_to_expr(&expansion).is_err()); | 427 | assert!(token_tree_to_expr(&expansion).is_err()); |
428 | } | 428 | } |
429 | } | 429 | } |
diff --git a/crates/ra_mbe/src/tests.rs b/crates/ra_mbe/src/tests.rs index 5e4017f77..1db35cd8d 100644 --- a/crates/ra_mbe/src/tests.rs +++ b/crates/ra_mbe/src/tests.rs | |||
@@ -95,7 +95,7 @@ pub(crate) fn expand_to_expr( | |||
95 | pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree { | 95 | pub(crate) fn text_to_tokentree(text: &str) -> tt::Subtree { |
96 | // wrap the given text to a macro call | 96 | // wrap the given text to a macro call |
97 | let wrapped = format!("wrap_macro!( {} )", text); | 97 | let wrapped = format!("wrap_macro!( {} )", text); |
98 | let wrapped = ast::SourceFile::parse(&wrapped).ok().unwrap(); | 98 | let wrapped = ast::SourceFile::parse(&wrapped).tree; |
99 | let wrapped = wrapped.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); | 99 | let wrapped = wrapped.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); |
100 | let mut wrapped = ast_to_token_tree(wrapped).unwrap().0; | 100 | let mut wrapped = ast_to_token_tree(wrapped).unwrap().0; |
101 | wrapped.delimiter = tt::Delimiter::None; | 101 | wrapped.delimiter = tt::Delimiter::None; |
@@ -378,7 +378,7 @@ fn test_match_group_with_multichar_sep() { | |||
378 | assert_expansion( | 378 | assert_expansion( |
379 | MacroKind::Items, | 379 | MacroKind::Items, |
380 | &rules, | 380 | &rules, |
381 | "foo! (fn baz {true true} )", | 381 | "foo! (fn baz {true true} );", |
382 | "fn baz () -> bool {true &&true}", | 382 | "fn baz () -> bool {true &&true}", |
383 | ); | 383 | ); |
384 | } | 384 | } |
@@ -392,7 +392,7 @@ fn test_match_group_zero_match() { | |||
392 | }"#, | 392 | }"#, |
393 | ); | 393 | ); |
394 | 394 | ||
395 | assert_expansion(MacroKind::Items, &rules, "foo! ()", ""); | 395 | assert_expansion(MacroKind::Items, &rules, "foo! ();", ""); |
396 | } | 396 | } |
397 | 397 | ||
398 | #[test] | 398 | #[test] |
@@ -404,7 +404,7 @@ fn test_match_group_in_group() { | |||
404 | }"#, | 404 | }"#, |
405 | ); | 405 | ); |
406 | 406 | ||
407 | assert_expansion(MacroKind::Items, &rules, "foo! ( (a b) )", "(a b)"); | 407 | assert_expansion(MacroKind::Items, &rules, "foo! ( (a b) );", "(a b)"); |
408 | } | 408 | } |
409 | 409 | ||
410 | #[test] | 410 | #[test] |
@@ -418,7 +418,7 @@ fn test_expand_to_item_list() { | |||
418 | } | 418 | } |
419 | ", | 419 | ", |
420 | ); | 420 | ); |
421 | let expansion = expand(&rules, "structs!(Foo, Bar)"); | 421 | let expansion = expand(&rules, "structs!(Foo, Bar);"); |
422 | let tree = token_tree_to_macro_items(&expansion); | 422 | let tree = token_tree_to_macro_items(&expansion); |
423 | assert_eq!( | 423 | assert_eq!( |
424 | tree.unwrap().syntax().debug_dump().trim(), | 424 | tree.unwrap().syntax().debug_dump().trim(), |
@@ -490,7 +490,7 @@ fn test_expand_literals_to_token_tree() { | |||
490 | } | 490 | } |
491 | "#, | 491 | "#, |
492 | ); | 492 | ); |
493 | let expansion = expand(&rules, "literals!(foo)"); | 493 | let expansion = expand(&rules, "literals!(foo);"); |
494 | let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees; | 494 | let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees; |
495 | 495 | ||
496 | // [let] [a] [=] ['c'] [;] | 496 | // [let] [a] [=] ['c'] [;] |
@@ -586,7 +586,7 @@ fn test_match_literal() { | |||
586 | } | 586 | } |
587 | "#, | 587 | "#, |
588 | ); | 588 | ); |
589 | assert_expansion(MacroKind::Items, &rules, "foo! ['(']", "fn foo () {}"); | 589 | assert_expansion(MacroKind::Items, &rules, "foo! ['('];", "fn foo () {}"); |
590 | } | 590 | } |
591 | 591 | ||
592 | // The following tests are port from intellij-rust directly | 592 | // The following tests are port from intellij-rust directly |
@@ -725,7 +725,7 @@ fn test_last_expr() { | |||
725 | assert_expansion( | 725 | assert_expansion( |
726 | MacroKind::Items, | 726 | MacroKind::Items, |
727 | &rules, | 727 | &rules, |
728 | "vec!(1,2,3)", | 728 | "vec!(1,2,3);", |
729 | "{let mut v = Vec :: new () ; v . push (1) ; v . push (2) ; v . push (3) ; v}", | 729 | "{let mut v = Vec :: new () ; v . push (1) ; v . push (2) ; v . push (3) ; v}", |
730 | ); | 730 | ); |
731 | } | 731 | } |
@@ -902,7 +902,7 @@ fn test_meta_doc_comments() { | |||
902 | MultiLines Doc | 902 | MultiLines Doc |
903 | */ | 903 | */ |
904 | }"#, | 904 | }"#, |
905 | "# [doc = \" Single Line Doc 1\"] # [doc = \" \\\\n MultiLines Doc\\\\n \"] fn bar () {}", | 905 | "# [doc = \" Single Line Doc 1\"] # [doc = \"\\\\n MultiLines Doc\\\\n \"] fn bar () {}", |
906 | ); | 906 | ); |
907 | } | 907 | } |
908 | 908 | ||
@@ -950,7 +950,7 @@ fn test_literal() { | |||
950 | } | 950 | } |
951 | "#, | 951 | "#, |
952 | ); | 952 | ); |
953 | assert_expansion(MacroKind::Items, &rules, r#"foo!(u8 0)"#, r#"const VALUE : u8 = 0 ;"#); | 953 | assert_expansion(MacroKind::Items, &rules, r#"foo!(u8 0);"#, r#"const VALUE : u8 = 0 ;"#); |
954 | } | 954 | } |
955 | 955 | ||
956 | #[test] | 956 | #[test] |
@@ -1017,12 +1017,12 @@ fn test_vec() { | |||
1017 | assert_expansion( | 1017 | assert_expansion( |
1018 | MacroKind::Items, | 1018 | MacroKind::Items, |
1019 | &rules, | 1019 | &rules, |
1020 | r#"vec![1u32,2]"#, | 1020 | r#"vec![1u32,2];"#, |
1021 | r#"{let mut v = Vec :: new () ; v . push (1u32) ; v . push (2) ; v}"#, | 1021 | r#"{let mut v = Vec :: new () ; v . push (1u32) ; v . push (2) ; v}"#, |
1022 | ); | 1022 | ); |
1023 | 1023 | ||
1024 | assert_eq!( | 1024 | assert_eq!( |
1025 | expand_to_expr(&rules, r#"vec![1u32,2]"#).syntax().debug_dump().trim(), | 1025 | expand_to_expr(&rules, r#"vec![1u32,2];"#).syntax().debug_dump().trim(), |
1026 | r#"BLOCK_EXPR@[0; 45) | 1026 | r#"BLOCK_EXPR@[0; 45) |
1027 | BLOCK@[0; 45) | 1027 | BLOCK@[0; 45) |
1028 | L_CURLY@[0; 1) "{" | 1028 | L_CURLY@[0; 1) "{" |
@@ -1161,7 +1161,7 @@ macro_rules! generate_pattern_iterators { | |||
1161 | "#, | 1161 | "#, |
1162 | ); | 1162 | ); |
1163 | 1163 | ||
1164 | assert_expansion(MacroKind::Items, &rules, r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str )"#, | 1164 | assert_expansion(MacroKind::Items, &rules, r#"generate_pattern_iterators ! ( double ended ; with # [ stable ( feature = "rust1" , since = "1.0.0" ) ] , Split , RSplit , & 'a str );"#, |
1165 | "fn foo () {}"); | 1165 | "fn foo () {}"); |
1166 | } | 1166 | } |
1167 | 1167 | ||
@@ -1208,7 +1208,6 @@ $body: block; )+ | |||
1208 | )+ | 1208 | )+ |
1209 | } | 1209 | } |
1210 | } | 1210 | } |
1211 | } | ||
1212 | "#, | 1211 | "#, |
1213 | ); | 1212 | ); |
1214 | 1213 | ||