diff options
Diffstat (limited to 'crates')
22 files changed, 564 insertions, 48 deletions
diff --git a/crates/ra_batch/src/lib.rs b/crates/ra_batch/src/lib.rs index 3bbcdb0b8..5bb47afb2 100644 --- a/crates/ra_batch/src/lib.rs +++ b/crates/ra_batch/src/lib.rs | |||
@@ -126,10 +126,7 @@ mod tests { | |||
126 | 126 | ||
127 | #[test] | 127 | #[test] |
128 | fn test_loading_rust_analyzer() { | 128 | fn test_loading_rust_analyzer() { |
129 | let mut path = std::env::current_exe().unwrap(); | 129 | let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap(); |
130 | while !path.join("Cargo.toml").is_file() { | ||
131 | path = path.parent().unwrap().to_owned(); | ||
132 | } | ||
133 | let (db, roots) = BatchDatabase::load_cargo(path).unwrap(); | 130 | let (db, roots) = BatchDatabase::load_cargo(path).unwrap(); |
134 | let mut n_crates = 0; | 131 | let mut n_crates = 0; |
135 | for root in roots { | 132 | for root in roots { |
diff --git a/crates/ra_cli/Cargo.toml b/crates/ra_cli/Cargo.toml index 467628236..328b2436f 100644 --- a/crates/ra_cli/Cargo.toml +++ b/crates/ra_cli/Cargo.toml | |||
@@ -18,3 +18,4 @@ tools = { path = "../tools" } | |||
18 | ra_batch = { path = "../ra_batch" } | 18 | ra_batch = { path = "../ra_batch" } |
19 | ra_hir = { path = "../ra_hir" } | 19 | ra_hir = { path = "../ra_hir" } |
20 | ra_db = { path = "../ra_db" } | 20 | ra_db = { path = "../ra_db" } |
21 | ra_prof = { path = "../ra_prof" } | ||
diff --git a/crates/ra_cli/src/main.rs b/crates/ra_cli/src/main.rs index 1f2750d89..45555be6e 100644 --- a/crates/ra_cli/src/main.rs +++ b/crates/ra_cli/src/main.rs | |||
@@ -1,12 +1,13 @@ | |||
1 | mod analysis_stats; | 1 | mod analysis_stats; |
2 | 2 | ||
3 | use std::{fs, io::Read, path::Path, time::Instant}; | 3 | use std::{fs, io::Read, path::Path}; |
4 | 4 | ||
5 | use clap::{App, Arg, SubCommand}; | 5 | use clap::{App, Arg, SubCommand}; |
6 | use ra_ide_api::file_structure; | 6 | use ra_ide_api::file_structure; |
7 | use ra_syntax::{SourceFile, TreeArc, AstNode}; | 7 | use ra_syntax::{SourceFile, TreeArc, AstNode}; |
8 | use tools::collect_tests; | 8 | use tools::collect_tests; |
9 | use flexi_logger::Logger; | 9 | use flexi_logger::Logger; |
10 | use ra_prof::profile; | ||
10 | 11 | ||
11 | type Result<T> = ::std::result::Result<T, failure::Error>; | 12 | type Result<T> = ::std::result::Result<T, failure::Error>; |
12 | 13 | ||
@@ -27,13 +28,11 @@ fn main() -> Result<()> { | |||
27 | .get_matches(); | 28 | .get_matches(); |
28 | match matches.subcommand() { | 29 | match matches.subcommand() { |
29 | ("parse", Some(matches)) => { | 30 | ("parse", Some(matches)) => { |
30 | let start = Instant::now(); | 31 | let _p = profile("parsing"); |
31 | let file = file()?; | 32 | let file = file()?; |
32 | let elapsed = start.elapsed(); | ||
33 | if !matches.is_present("no-dump") { | 33 | if !matches.is_present("no-dump") { |
34 | println!("{}", file.syntax().debug_dump()); | 34 | println!("{}", file.syntax().debug_dump()); |
35 | } | 35 | } |
36 | eprintln!("parsing: {:?}", elapsed); | ||
37 | ::std::mem::forget(file); | 36 | ::std::mem::forget(file); |
38 | } | 37 | } |
39 | ("symbols", _) => { | 38 | ("symbols", _) => { |
diff --git a/crates/ra_db/Cargo.toml b/crates/ra_db/Cargo.toml index 581cd32fd..08aef9bf5 100644 --- a/crates/ra_db/Cargo.toml +++ b/crates/ra_db/Cargo.toml | |||
@@ -5,7 +5,7 @@ version = "0.1.0" | |||
5 | authors = ["rust-analyzer developers"] | 5 | authors = ["rust-analyzer developers"] |
6 | 6 | ||
7 | [dependencies] | 7 | [dependencies] |
8 | salsa = "0.11.1" | 8 | salsa = "0.12.0" |
9 | relative-path = "0.4.0" | 9 | relative-path = "0.4.0" |
10 | rustc-hash = "1.0" | 10 | rustc-hash = "1.0" |
11 | parking_lot = "0.7.0" | 11 | parking_lot = "0.7.0" |
diff --git a/crates/ra_hir/Cargo.toml b/crates/ra_hir/Cargo.toml index 501308acc..a2858dad9 100644 --- a/crates/ra_hir/Cargo.toml +++ b/crates/ra_hir/Cargo.toml | |||
@@ -19,6 +19,7 @@ ra_db = { path = "../ra_db" } | |||
19 | mbe = { path = "../ra_mbe", package = "ra_mbe" } | 19 | mbe = { path = "../ra_mbe", package = "ra_mbe" } |
20 | tt = { path = "../ra_tt", package = "ra_tt" } | 20 | tt = { path = "../ra_tt", package = "ra_tt" } |
21 | test_utils = { path = "../test_utils" } | 21 | test_utils = { path = "../test_utils" } |
22 | ra_prof = {path = "../ra_prof" } | ||
22 | 23 | ||
23 | [dev-dependencies] | 24 | [dev-dependencies] |
24 | flexi_logger = "0.11.0" | 25 | flexi_logger = "0.11.0" |
diff --git a/crates/ra_hir/src/nameres.rs b/crates/ra_hir/src/nameres.rs index 6f049acfc..4ae04514a 100644 --- a/crates/ra_hir/src/nameres.rs +++ b/crates/ra_hir/src/nameres.rs | |||
@@ -60,6 +60,7 @@ use ra_arena::{Arena, RawId, impl_arena_id}; | |||
60 | use ra_db::{FileId, Edition}; | 60 | use ra_db::{FileId, Edition}; |
61 | use test_utils::tested_by; | 61 | use test_utils::tested_by; |
62 | use ra_syntax::ast; | 62 | use ra_syntax::ast; |
63 | use ra_prof::profile; | ||
63 | 64 | ||
64 | use crate::{ | 65 | use crate::{ |
65 | ModuleDef, Name, Crate, Module, | 66 | ModuleDef, Name, Crate, Module, |
@@ -181,7 +182,7 @@ enum ReachedFixedPoint { | |||
181 | 182 | ||
182 | impl CrateDefMap { | 183 | impl CrateDefMap { |
183 | pub(crate) fn crate_def_map_query(db: &impl DefDatabase, krate: Crate) -> Arc<CrateDefMap> { | 184 | pub(crate) fn crate_def_map_query(db: &impl DefDatabase, krate: Crate) -> Arc<CrateDefMap> { |
184 | let start = std::time::Instant::now(); | 185 | let _p = profile("crate_def_map_query"); |
185 | let def_map = { | 186 | let def_map = { |
186 | let edition = krate.edition(db); | 187 | let edition = krate.edition(db); |
187 | let mut modules: Arena<CrateModuleId, ModuleData> = Arena::default(); | 188 | let mut modules: Arena<CrateModuleId, ModuleData> = Arena::default(); |
@@ -198,7 +199,6 @@ impl CrateDefMap { | |||
198 | } | 199 | } |
199 | }; | 200 | }; |
200 | let def_map = collector::collect_defs(db, def_map); | 201 | let def_map = collector::collect_defs(db, def_map); |
201 | log::info!("crate_def_map_query: {:?}", start.elapsed()); | ||
202 | Arc::new(def_map) | 202 | Arc::new(def_map) |
203 | } | 203 | } |
204 | 204 | ||
diff --git a/crates/ra_lsp_server/Cargo.toml b/crates/ra_lsp_server/Cargo.toml index d82410700..bc181e4eb 100644 --- a/crates/ra_lsp_server/Cargo.toml +++ b/crates/ra_lsp_server/Cargo.toml | |||
@@ -27,6 +27,7 @@ ra_ide_api = { path = "../ra_ide_api" } | |||
27 | ra_arena = { path = "../ra_arena" } | 27 | ra_arena = { path = "../ra_arena" } |
28 | gen_lsp_server = { path = "../gen_lsp_server" } | 28 | gen_lsp_server = { path = "../gen_lsp_server" } |
29 | ra_project_model = { path = "../ra_project_model" } | 29 | ra_project_model = { path = "../ra_project_model" } |
30 | ra_prof = { path = "../ra_prof" } | ||
30 | 31 | ||
31 | [dev-dependencies] | 32 | [dev-dependencies] |
32 | tempfile = "3" | 33 | tempfile = "3" |
diff --git a/crates/ra_lsp_server/src/main.rs b/crates/ra_lsp_server/src/main.rs index 5a2905207..eb4091a3d 100644 --- a/crates/ra_lsp_server/src/main.rs +++ b/crates/ra_lsp_server/src/main.rs | |||
@@ -3,6 +3,7 @@ use flexi_logger::{Duplicate, Logger}; | |||
3 | use gen_lsp_server::{run_server, stdio_transport}; | 3 | use gen_lsp_server::{run_server, stdio_transport}; |
4 | 4 | ||
5 | use ra_lsp_server::{Result, InitializationOptions}; | 5 | use ra_lsp_server::{Result, InitializationOptions}; |
6 | use ra_prof; | ||
6 | 7 | ||
7 | fn main() -> Result<()> { | 8 | fn main() -> Result<()> { |
8 | ::std::env::set_var("RUST_BACKTRACE", "short"); | 9 | ::std::env::set_var("RUST_BACKTRACE", "short"); |
@@ -11,6 +12,15 @@ fn main() -> Result<()> { | |||
11 | Ok(ref v) if v == "1" => logger.log_to_file().directory("log").start()?, | 12 | Ok(ref v) if v == "1" => logger.log_to_file().directory("log").start()?, |
12 | _ => logger.start()?, | 13 | _ => logger.start()?, |
13 | }; | 14 | }; |
15 | let prof_depth = match ::std::env::var("RA_PROFILE_DEPTH") { | ||
16 | Ok(ref d) => d.parse()?, | ||
17 | _ => 0, | ||
18 | }; | ||
19 | let profile_allowed = match ::std::env::var("RA_PROFILE") { | ||
20 | Ok(ref p) => p.split(";").map(String::from).collect(), | ||
21 | _ => Vec::new(), | ||
22 | }; | ||
23 | ra_prof::set_filter(ra_prof::Filter::new(prof_depth, profile_allowed)); | ||
14 | log::info!("lifecycle: server started"); | 24 | log::info!("lifecycle: server started"); |
15 | match ::std::panic::catch_unwind(main_inner) { | 25 | match ::std::panic::catch_unwind(main_inner) { |
16 | Ok(res) => { | 26 | Ok(res) => { |
diff --git a/crates/ra_lsp_server/src/main_loop.rs b/crates/ra_lsp_server/src/main_loop.rs index eecf278a8..82410bee3 100644 --- a/crates/ra_lsp_server/src/main_loop.rs +++ b/crates/ra_lsp_server/src/main_loop.rs | |||
@@ -24,6 +24,7 @@ use crate::{ | |||
24 | Result, | 24 | Result, |
25 | InitializationOptions, | 25 | InitializationOptions, |
26 | }; | 26 | }; |
27 | use ra_prof::profile; | ||
27 | 28 | ||
28 | #[derive(Debug, Fail)] | 29 | #[derive(Debug, Fail)] |
29 | #[fail(display = "Language Server request failed with {}. ({})", code, message)] | 30 | #[fail(display = "Language Server request failed with {}. ({})", code, message)] |
@@ -181,7 +182,7 @@ fn main_loop_inner( | |||
181 | recv(libdata_receiver) -> data => Event::Lib(data.unwrap()) | 182 | recv(libdata_receiver) -> data => Event::Lib(data.unwrap()) |
182 | }; | 183 | }; |
183 | log::info!("loop_turn = {:?}", event); | 184 | log::info!("loop_turn = {:?}", event); |
184 | let start = std::time::Instant::now(); | 185 | let _p = profile("loop_turn"); |
185 | let mut state_changed = false; | 186 | let mut state_changed = false; |
186 | match event { | 187 | match event { |
187 | Event::Task(task) => on_task(task, msg_sender, pending_requests), | 188 | Event::Task(task) => on_task(task, msg_sender, pending_requests), |
@@ -235,10 +236,9 @@ fn main_loop_inner( | |||
235 | in_flight_libraries += 1; | 236 | in_flight_libraries += 1; |
236 | let sender = libdata_sender.clone(); | 237 | let sender = libdata_sender.clone(); |
237 | pool.execute(move || { | 238 | pool.execute(move || { |
238 | let start = ::std::time::Instant::now(); | ||
239 | log::info!("indexing {:?} ... ", root); | 239 | log::info!("indexing {:?} ... ", root); |
240 | let _p = profile(&format!("indexed {:?}", root)); | ||
240 | let data = LibraryData::prepare(root, files); | 241 | let data = LibraryData::prepare(root, files); |
241 | log::info!("indexed {:?} {:?}", start.elapsed(), root); | ||
242 | sender.send(data).unwrap(); | 242 | sender.send(data).unwrap(); |
243 | }); | 243 | }); |
244 | } | 244 | } |
@@ -266,7 +266,6 @@ fn main_loop_inner( | |||
266 | subs.subscriptions(), | 266 | subs.subscriptions(), |
267 | ) | 267 | ) |
268 | } | 268 | } |
269 | log::info!("loop_turn = {:?}", start.elapsed()); | ||
270 | } | 269 | } |
271 | } | 270 | } |
272 | 271 | ||
diff --git a/crates/ra_mbe/Cargo.toml b/crates/ra_mbe/Cargo.toml index 6e785f570..1d0c2a340 100644 --- a/crates/ra_mbe/Cargo.toml +++ b/crates/ra_mbe/Cargo.toml | |||
@@ -8,5 +8,5 @@ authors = ["rust-analyzer developers"] | |||
8 | ra_syntax = { path = "../ra_syntax" } | 8 | ra_syntax = { path = "../ra_syntax" } |
9 | ra_parser = { path = "../ra_parser" } | 9 | ra_parser = { path = "../ra_parser" } |
10 | tt = { path = "../ra_tt", package = "ra_tt" } | 10 | tt = { path = "../ra_tt", package = "ra_tt" } |
11 | 11 | itertools = "0.8.0" | |
12 | rustc-hash = "1.0.0" | 12 | rustc-hash = "1.0.0" |
diff --git a/crates/ra_mbe/src/lib.rs b/crates/ra_mbe/src/lib.rs index 93246f54a..4203929d4 100644 --- a/crates/ra_mbe/src/lib.rs +++ b/crates/ra_mbe/src/lib.rs | |||
@@ -167,7 +167,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
167 | ) | 167 | ) |
168 | } | 168 | } |
169 | 169 | ||
170 | fn create_rules(macro_definition: &str) -> MacroRules { | 170 | pub(crate) fn create_rules(macro_definition: &str) -> MacroRules { |
171 | let source_file = ast::SourceFile::parse(macro_definition); | 171 | let source_file = ast::SourceFile::parse(macro_definition); |
172 | let macro_definition = | 172 | let macro_definition = |
173 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 173 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
@@ -176,7 +176,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
176 | crate::MacroRules::parse(&definition_tt).unwrap() | 176 | crate::MacroRules::parse(&definition_tt).unwrap() |
177 | } | 177 | } |
178 | 178 | ||
179 | fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree { | 179 | pub(crate) fn expand(rules: &MacroRules, invocation: &str) -> tt::Subtree { |
180 | let source_file = ast::SourceFile::parse(invocation); | 180 | let source_file = ast::SourceFile::parse(invocation); |
181 | let macro_invocation = | 181 | let macro_invocation = |
182 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); | 182 | source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap(); |
@@ -186,7 +186,7 @@ impl_froms!(TokenTree: Leaf, Subtree); | |||
186 | rules.expand(&invocation_tt).unwrap() | 186 | rules.expand(&invocation_tt).unwrap() |
187 | } | 187 | } |
188 | 188 | ||
189 | fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { | 189 | pub(crate) fn assert_expansion(rules: &MacroRules, invocation: &str, expansion: &str) { |
190 | let expanded = expand(rules, invocation); | 190 | let expanded = expand(rules, invocation); |
191 | assert_eq!(expanded.to_string(), expansion); | 191 | assert_eq!(expanded.to_string(), expansion); |
192 | } | 192 | } |
@@ -337,4 +337,46 @@ SOURCE_FILE@[0; 40) | |||
337 | ); | 337 | ); |
338 | } | 338 | } |
339 | 339 | ||
340 | #[test] | ||
341 | fn expand_literals_to_token_tree() { | ||
342 | fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree { | ||
343 | if let tt::TokenTree::Subtree(subtree) = tt { | ||
344 | return &subtree; | ||
345 | } | ||
346 | unreachable!("It is not a subtree"); | ||
347 | } | ||
348 | |||
349 | fn to_literal(tt: &tt::TokenTree) -> &tt::Literal { | ||
350 | if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt { | ||
351 | return lit; | ||
352 | } | ||
353 | unreachable!("It is not a literal"); | ||
354 | } | ||
355 | |||
356 | let rules = create_rules( | ||
357 | r#" | ||
358 | macro_rules! literals { | ||
359 | ($i:ident) => { | ||
360 | { | ||
361 | let a = 'c'; | ||
362 | let c = 1000; | ||
363 | let f = 12E+99_f64; | ||
364 | let s = "rust1"; | ||
365 | } | ||
366 | } | ||
367 | } | ||
368 | "#, | ||
369 | ); | ||
370 | let expansion = expand(&rules, "literals!(foo)"); | ||
371 | let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees; | ||
372 | |||
373 | // [let] [a] [=] ['c'] [;] | ||
374 | assert_eq!(to_literal(&stm_tokens[3]).text, "'c'"); | ||
375 | // [let] [c] [=] [1000] [;] | ||
376 | assert_eq!(to_literal(&stm_tokens[5 + 3]).text, "1000"); | ||
377 | // [let] [f] [=] [12E+99_f64] [;] | ||
378 | assert_eq!(to_literal(&stm_tokens[10 + 3]).text, "12E+99_f64"); | ||
379 | // [let] [s] [=] ["rust1"] [;] | ||
380 | assert_eq!(to_literal(&stm_tokens[15 + 3]).text, "\"rust1\""); | ||
381 | } | ||
340 | } | 382 | } |
diff --git a/crates/ra_mbe/src/syntax_bridge.rs b/crates/ra_mbe/src/syntax_bridge.rs index 05f9817da..139a0fd33 100644 --- a/crates/ra_mbe/src/syntax_bridge.rs +++ b/crates/ra_mbe/src/syntax_bridge.rs | |||
@@ -1,7 +1,7 @@ | |||
1 | use ra_parser::{TokenSource, TreeSink, ParseError}; | 1 | use ra_parser::{TokenSource, TreeSink, ParseError}; |
2 | use ra_syntax::{ | 2 | use ra_syntax::{ |
3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, | 3 | AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement, |
4 | ast, SyntaxKind::*, TextUnit | 4 | ast, SyntaxKind::*, TextUnit, classify_literal |
5 | }; | 5 | }; |
6 | 6 | ||
7 | /// Maps `tt::TokenId` to the relative range of the original token. | 7 | /// Maps `tt::TokenId` to the relative range of the original token. |
@@ -103,16 +103,63 @@ fn convert_tt( | |||
103 | Some(res) | 103 | Some(res) |
104 | } | 104 | } |
105 | 105 | ||
106 | #[derive(Debug)] | ||
106 | struct TtTokenSource { | 107 | struct TtTokenSource { |
107 | tokens: Vec<TtToken>, | 108 | tokens: Vec<TtToken>, |
108 | } | 109 | } |
109 | 110 | ||
111 | #[derive(Debug)] | ||
110 | struct TtToken { | 112 | struct TtToken { |
111 | kind: SyntaxKind, | 113 | kind: SyntaxKind, |
112 | is_joint_to_next: bool, | 114 | is_joint_to_next: bool, |
113 | text: SmolStr, | 115 | text: SmolStr, |
114 | } | 116 | } |
115 | 117 | ||
118 | // Some helper functions | ||
119 | fn to_punct(tt: &tt::TokenTree) -> Option<&tt::Punct> { | ||
120 | if let tt::TokenTree::Leaf(tt::Leaf::Punct(pp)) = tt { | ||
121 | return Some(pp); | ||
122 | } | ||
123 | None | ||
124 | } | ||
125 | |||
126 | struct TokenPeek<'a, I> | ||
127 | where | ||
128 | I: Iterator<Item = &'a tt::TokenTree>, | ||
129 | { | ||
130 | iter: itertools::MultiPeek<I>, | ||
131 | } | ||
132 | |||
133 | impl<'a, I> TokenPeek<'a, I> | ||
134 | where | ||
135 | I: Iterator<Item = &'a tt::TokenTree>, | ||
136 | { | ||
137 | fn next(&mut self) -> Option<&tt::TokenTree> { | ||
138 | self.iter.next() | ||
139 | } | ||
140 | |||
141 | fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> { | ||
142 | if p.spacing != tt::Spacing::Joint { | ||
143 | return None; | ||
144 | } | ||
145 | |||
146 | self.iter.reset_peek(); | ||
147 | let p1 = to_punct(self.iter.peek()?)?; | ||
148 | Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint)) | ||
149 | } | ||
150 | |||
151 | fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> { | ||
152 | self.current_punct2(p).and_then(|((p0, p1), last_joint)| { | ||
153 | if !last_joint { | ||
154 | None | ||
155 | } else { | ||
156 | let p2 = to_punct(*self.iter.peek()?)?; | ||
157 | Some(((p0, p1, p2.char), p2.spacing == tt::Spacing::Joint)) | ||
158 | } | ||
159 | }) | ||
160 | } | ||
161 | } | ||
162 | |||
116 | impl TtTokenSource { | 163 | impl TtTokenSource { |
117 | fn new(tt: &tt::Subtree) -> TtTokenSource { | 164 | fn new(tt: &tt::Subtree) -> TtTokenSource { |
118 | let mut res = TtTokenSource { tokens: Vec::new() }; | 165 | let mut res = TtTokenSource { tokens: Vec::new() }; |
@@ -121,38 +168,53 @@ impl TtTokenSource { | |||
121 | } | 168 | } |
122 | fn convert_subtree(&mut self, sub: &tt::Subtree) { | 169 | fn convert_subtree(&mut self, sub: &tt::Subtree) { |
123 | self.push_delim(sub.delimiter, false); | 170 | self.push_delim(sub.delimiter, false); |
124 | sub.token_trees.iter().for_each(|tt| self.convert_tt(tt)); | 171 | let mut peek = TokenPeek { iter: itertools::multipeek(sub.token_trees.iter()) }; |
172 | while let Some(tt) = peek.iter.next() { | ||
173 | self.convert_tt(tt, &mut peek); | ||
174 | } | ||
125 | self.push_delim(sub.delimiter, true) | 175 | self.push_delim(sub.delimiter, true) |
126 | } | 176 | } |
127 | fn convert_tt(&mut self, tt: &tt::TokenTree) { | 177 | |
178 | fn convert_tt<'a, I>(&mut self, tt: &tt::TokenTree, iter: &mut TokenPeek<'a, I>) | ||
179 | where | ||
180 | I: Iterator<Item = &'a tt::TokenTree>, | ||
181 | { | ||
128 | match tt { | 182 | match tt { |
129 | tt::TokenTree::Leaf(token) => self.convert_token(token), | 183 | tt::TokenTree::Leaf(token) => self.convert_token(token, iter), |
130 | tt::TokenTree::Subtree(sub) => self.convert_subtree(sub), | 184 | tt::TokenTree::Subtree(sub) => self.convert_subtree(sub), |
131 | } | 185 | } |
132 | } | 186 | } |
133 | fn convert_token(&mut self, token: &tt::Leaf) { | 187 | |
188 | fn convert_token<'a, I>(&mut self, token: &tt::Leaf, iter: &mut TokenPeek<'a, I>) | ||
189 | where | ||
190 | I: Iterator<Item = &'a tt::TokenTree>, | ||
191 | { | ||
134 | let tok = match token { | 192 | let tok = match token { |
135 | tt::Leaf::Literal(l) => TtToken { | 193 | tt::Leaf::Literal(l) => TtToken { |
136 | kind: SyntaxKind::INT_NUMBER, // FIXME | 194 | kind: classify_literal(&l.text).unwrap().kind, |
137 | is_joint_to_next: false, | 195 | is_joint_to_next: false, |
138 | text: l.text.clone(), | 196 | text: l.text.clone(), |
139 | }, | 197 | }, |
140 | tt::Leaf::Punct(p) => { | 198 | tt::Leaf::Punct(p) => { |
141 | let kind = match p.char { | 199 | if let Some(tt) = Self::convert_multi_char_punct(p, iter) { |
142 | // lexer may produce combpund tokens for these ones | 200 | tt |
143 | '.' => DOT, | 201 | } else { |
144 | ':' => COLON, | 202 | let kind = match p.char { |
145 | '=' => EQ, | 203 | // lexer may produce combpund tokens for these ones |
146 | '!' => EXCL, | 204 | '.' => DOT, |
147 | '-' => MINUS, | 205 | ':' => COLON, |
148 | c => SyntaxKind::from_char(c).unwrap(), | 206 | '=' => EQ, |
149 | }; | 207 | '!' => EXCL, |
150 | let text = { | 208 | '-' => MINUS, |
151 | let mut buf = [0u8; 4]; | 209 | c => SyntaxKind::from_char(c).unwrap(), |
152 | let s: &str = p.char.encode_utf8(&mut buf); | 210 | }; |
153 | SmolStr::new(s) | 211 | let text = { |
154 | }; | 212 | let mut buf = [0u8; 4]; |
155 | TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text } | 213 | let s: &str = p.char.encode_utf8(&mut buf); |
214 | SmolStr::new(s) | ||
215 | }; | ||
216 | TtToken { kind, is_joint_to_next: p.spacing == tt::Spacing::Joint, text } | ||
217 | } | ||
156 | } | 218 | } |
157 | tt::Leaf::Ident(ident) => { | 219 | tt::Leaf::Ident(ident) => { |
158 | let kind = SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT); | 220 | let kind = SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT); |
@@ -161,6 +223,64 @@ impl TtTokenSource { | |||
161 | }; | 223 | }; |
162 | self.tokens.push(tok) | 224 | self.tokens.push(tok) |
163 | } | 225 | } |
226 | |||
227 | fn convert_multi_char_punct<'a, I>( | ||
228 | p: &tt::Punct, | ||
229 | iter: &mut TokenPeek<'a, I>, | ||
230 | ) -> Option<TtToken> | ||
231 | where | ||
232 | I: Iterator<Item = &'a tt::TokenTree>, | ||
233 | { | ||
234 | if let Some((m, is_joint_to_next)) = iter.current_punct3(p) { | ||
235 | if let Some((kind, text)) = match m { | ||
236 | ('<', '<', '=') => Some((SHLEQ, "<<=")), | ||
237 | ('>', '>', '=') => Some((SHREQ, ">>=")), | ||
238 | ('.', '.', '.') => Some((DOTDOTDOT, "...")), | ||
239 | ('.', '.', '=') => Some((DOTDOTEQ, "..=")), | ||
240 | _ => None, | ||
241 | } { | ||
242 | iter.next(); | ||
243 | iter.next(); | ||
244 | return Some(TtToken { kind, is_joint_to_next, text: text.into() }); | ||
245 | } | ||
246 | } | ||
247 | |||
248 | if let Some((m, is_joint_to_next)) = iter.current_punct2(p) { | ||
249 | if let Some((kind, text)) = match m { | ||
250 | ('<', '<') => Some((SHL, "<<")), | ||
251 | ('>', '>') => Some((SHR, ">>")), | ||
252 | |||
253 | ('|', '|') => Some((PIPEPIPE, "||")), | ||
254 | ('&', '&') => Some((AMPAMP, "&&")), | ||
255 | ('%', '=') => Some((PERCENTEQ, "%=")), | ||
256 | ('*', '=') => Some((STAREQ, "*=")), | ||
257 | ('/', '=') => Some((SLASHEQ, "/=")), | ||
258 | ('^', '=') => Some((CARETEQ, "^=")), | ||
259 | |||
260 | ('&', '=') => Some((AMPEQ, "&=")), | ||
261 | ('|', '=') => Some((PIPEEQ, "|=")), | ||
262 | ('-', '=') => Some((MINUSEQ, "-=")), | ||
263 | ('+', '=') => Some((PLUSEQ, "+=")), | ||
264 | ('>', '=') => Some((GTEQ, ">=")), | ||
265 | ('<', '=') => Some((LTEQ, "<=")), | ||
266 | |||
267 | ('-', '>') => Some((THIN_ARROW, "->")), | ||
268 | ('!', '=') => Some((NEQ, "!=")), | ||
269 | ('=', '>') => Some((FAT_ARROW, "=>")), | ||
270 | ('=', '=') => Some((EQEQ, "==")), | ||
271 | ('.', '.') => Some((DOTDOT, "..")), | ||
272 | (':', ':') => Some((COLONCOLON, "::")), | ||
273 | |||
274 | _ => None, | ||
275 | } { | ||
276 | iter.next(); | ||
277 | return Some(TtToken { kind, is_joint_to_next, text: text.into() }); | ||
278 | } | ||
279 | } | ||
280 | |||
281 | None | ||
282 | } | ||
283 | |||
164 | fn push_delim(&mut self, d: tt::Delimiter, closing: bool) { | 284 | fn push_delim(&mut self, d: tt::Delimiter, closing: bool) { |
165 | let (kinds, texts) = match d { | 285 | let (kinds, texts) = match d { |
166 | tt::Delimiter::Parenthesis => ([L_PAREN, R_PAREN], "()"), | 286 | tt::Delimiter::Parenthesis => ([L_PAREN, R_PAREN], "()"), |
@@ -237,3 +357,44 @@ impl<'a> TreeSink for TtTreeSink<'a> { | |||
237 | self.inner.error(error, self.text_pos) | 357 | self.inner.error(error, self.text_pos) |
238 | } | 358 | } |
239 | } | 359 | } |
360 | |||
361 | #[cfg(test)] | ||
362 | mod tests { | ||
363 | use super::*; | ||
364 | use crate::tests::{expand, create_rules}; | ||
365 | |||
366 | #[test] | ||
367 | fn convert_tt_token_source() { | ||
368 | let rules = create_rules( | ||
369 | r#" | ||
370 | macro_rules! literals { | ||
371 | ($i:ident) => { | ||
372 | { | ||
373 | let a = 'c'; | ||
374 | let c = 1000; | ||
375 | let f = 12E+99_f64; | ||
376 | let s = "rust1"; | ||
377 | } | ||
378 | } | ||
379 | } | ||
380 | "#, | ||
381 | ); | ||
382 | let expansion = expand(&rules, "literals!(foo)"); | ||
383 | let tt_src = TtTokenSource::new(&expansion); | ||
384 | |||
385 | // [{] | ||
386 | // [let] [a] [=] ['c'] [;] | ||
387 | assert_eq!(tt_src.tokens[1 + 3].text, "'c'"); | ||
388 | assert_eq!(tt_src.tokens[1 + 3].kind, CHAR); | ||
389 | // [let] [c] [=] [1000] [;] | ||
390 | assert_eq!(tt_src.tokens[1 + 5 + 3].text, "1000"); | ||
391 | assert_eq!(tt_src.tokens[1 + 5 + 3].kind, INT_NUMBER); | ||
392 | // [let] [f] [=] [12E+99_f64] [;] | ||
393 | assert_eq!(tt_src.tokens[1 + 10 + 3].text, "12E+99_f64"); | ||
394 | assert_eq!(tt_src.tokens[1 + 10 + 3].kind, FLOAT_NUMBER); | ||
395 | |||
396 | // [let] [s] [=] ["rust1"] [;] | ||
397 | assert_eq!(tt_src.tokens[1 + 15 + 3].text, "\"rust1\""); | ||
398 | assert_eq!(tt_src.tokens[1 + 15 + 3].kind, STRING); | ||
399 | } | ||
400 | } | ||
diff --git a/crates/ra_parser/src/grammar/items.rs b/crates/ra_parser/src/grammar/items.rs index c4b8ef3c7..318fd69a1 100644 --- a/crates/ra_parser/src/grammar/items.rs +++ b/crates/ra_parser/src/grammar/items.rs | |||
@@ -79,19 +79,22 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul | |||
79 | let mut has_mods = false; | 79 | let mut has_mods = false; |
80 | 80 | ||
81 | // modifiers | 81 | // modifiers |
82 | // test_err async_without_semicolon | ||
83 | // fn foo() { let _ = async {} } | ||
84 | has_mods |= p.eat(CONST_KW); | 82 | has_mods |= p.eat(CONST_KW); |
85 | if p.at(ASYNC_KW) && p.nth(1) != L_CURLY && p.nth(1) != MOVE_KW && p.nth(1) != PIPE { | 83 | |
86 | p.eat(ASYNC_KW); | ||
87 | has_mods = true; | ||
88 | } | ||
89 | // test_err unsafe_block_in_mod | 84 | // test_err unsafe_block_in_mod |
90 | // fn foo(){} unsafe { } fn bar(){} | 85 | // fn foo(){} unsafe { } fn bar(){} |
91 | if p.at(UNSAFE_KW) && p.nth(1) != L_CURLY { | 86 | if p.at(UNSAFE_KW) && p.nth(1) != L_CURLY { |
92 | p.eat(UNSAFE_KW); | 87 | p.eat(UNSAFE_KW); |
93 | has_mods = true; | 88 | has_mods = true; |
94 | } | 89 | } |
90 | |||
91 | // test_err async_without_semicolon | ||
92 | // fn foo() { let _ = async {} } | ||
93 | if p.at(ASYNC_KW) && p.nth(1) != L_CURLY && p.nth(1) != MOVE_KW && p.nth(1) != PIPE { | ||
94 | p.eat(ASYNC_KW); | ||
95 | has_mods = true; | ||
96 | } | ||
97 | |||
95 | if p.at(EXTERN_KW) { | 98 | if p.at(EXTERN_KW) { |
96 | has_mods = true; | 99 | has_mods = true; |
97 | abi(p); | 100 | abi(p); |
@@ -124,6 +127,14 @@ pub(super) fn maybe_item(p: &mut Parser, m: Marker, flavor: ItemFlavor) -> Resul | |||
124 | 127 | ||
125 | // test unsafe_fn | 128 | // test unsafe_fn |
126 | // unsafe fn foo() {} | 129 | // unsafe fn foo() {} |
130 | |||
131 | // test combined_fns | ||
132 | // unsafe async fn foo() {} | ||
133 | // const unsafe fn bar() {} | ||
134 | |||
135 | // test_err wrong_order_fns | ||
136 | // async unsafe fn foo() {} | ||
137 | // unsafe const fn bar() {} | ||
127 | FN_KW => { | 138 | FN_KW => { |
128 | fn_def(p, flavor); | 139 | fn_def(p, flavor); |
129 | m.complete(p, FN_DEF); | 140 | m.complete(p, FN_DEF); |
diff --git a/crates/ra_prof/Cargo.toml b/crates/ra_prof/Cargo.toml new file mode 100644 index 000000000..19ce21783 --- /dev/null +++ b/crates/ra_prof/Cargo.toml | |||
@@ -0,0 +1,9 @@ | |||
1 | [package] | ||
2 | edition = "2018" | ||
3 | name = "ra_prof" | ||
4 | version = "0.1.0" | ||
5 | authors = ["rust-analyzer developers"] | ||
6 | publish = false | ||
7 | |||
8 | [dependencies] | ||
9 | lazy_static = "1.3.0" \ No newline at end of file | ||
diff --git a/crates/ra_prof/src/lib.rs b/crates/ra_prof/src/lib.rs new file mode 100644 index 000000000..1cc8e361d --- /dev/null +++ b/crates/ra_prof/src/lib.rs | |||
@@ -0,0 +1,198 @@ | |||
1 | use std::cell::RefCell; | ||
2 | use std::time::{Duration, Instant}; | ||
3 | use std::mem; | ||
4 | use std::io::{stderr, Write}; | ||
5 | use std::iter::repeat; | ||
6 | use std::collections::{HashSet}; | ||
7 | use std::default::Default; | ||
8 | use std::iter::FromIterator; | ||
9 | use std::sync::RwLock; | ||
10 | use lazy_static::lazy_static; | ||
11 | |||
12 | /// Set profiling filter. It specifies descriptions allowed to profile. | ||
13 | /// This is helpful when call stack has too many nested profiling scopes. | ||
14 | /// Additionally filter can specify maximum depth of profiling scopes nesting. | ||
15 | /// | ||
16 | /// #Example | ||
17 | /// ``` | ||
18 | /// use ra_prof::set_filter; | ||
19 | /// use ra_prof::Filter; | ||
20 | /// let max_depth = 2; | ||
21 | /// let allowed = vec!["profile1".to_string(), "profile2".to_string()]; | ||
22 | /// let f = Filter::new( max_depth, allowed ); | ||
23 | /// set_filter(f); | ||
24 | /// ``` | ||
25 | /// | ||
26 | pub fn set_filter(f: Filter) { | ||
27 | let set = HashSet::from_iter(f.allowed.iter().cloned()); | ||
28 | let mut old = FILTER.write().unwrap(); | ||
29 | let filter_data = FilterData { depth: f.depth, allowed: set, version: old.version + 1 }; | ||
30 | *old = filter_data; | ||
31 | } | ||
32 | |||
33 | /// This function starts a profiling scope in the current execution stack with a given description. | ||
34 | /// It returns a Profile structure and measure elapsed time between this method invocation and Profile structure drop. | ||
35 | /// It supports nested profiling scopes in case when this function invoked multiple times at the execution stack. In this case the profiling information will be nested at the output. | ||
36 | /// Profiling information is being printed in the stderr. | ||
37 | /// | ||
38 | /// #Example | ||
39 | /// ``` | ||
40 | /// use ra_prof::profile; | ||
41 | /// use ra_prof::set_filter; | ||
42 | /// use ra_prof::Filter; | ||
43 | /// | ||
44 | /// let allowed = vec!["profile1".to_string(), "profile2".to_string()]; | ||
45 | /// let f = Filter::new(2, allowed); | ||
46 | /// set_filter(f); | ||
47 | /// profiling_function1(); | ||
48 | /// | ||
49 | /// fn profiling_function1() { | ||
50 | /// let _p = profile("profile1"); | ||
51 | /// profiling_function2(); | ||
52 | /// } | ||
53 | /// | ||
54 | /// fn profiling_function2() { | ||
55 | /// let _p = profile("profile2"); | ||
56 | /// } | ||
57 | /// ``` | ||
58 | /// This will print in the stderr the following: | ||
59 | /// ```text | ||
60 | /// 0ms - profile | ||
61 | /// 0ms - profile2 | ||
62 | /// ``` | ||
63 | /// | ||
64 | pub fn profile(desc: &str) -> Profiler { | ||
65 | PROFILE_STACK.with(|stack| { | ||
66 | let mut stack = stack.borrow_mut(); | ||
67 | if stack.starts.len() == 0 { | ||
68 | match FILTER.try_read() { | ||
69 | Ok(f) => { | ||
70 | if f.version > stack.filter_data.version { | ||
71 | stack.filter_data = f.clone(); | ||
72 | } | ||
73 | } | ||
74 | Err(_) => (), | ||
75 | }; | ||
76 | } | ||
77 | let desc_str = desc.to_string(); | ||
78 | if desc_str.is_empty() { | ||
79 | Profiler { desc: None } | ||
80 | } else if stack.starts.len() < stack.filter_data.depth | ||
81 | && stack.filter_data.allowed.contains(&desc_str) | ||
82 | { | ||
83 | stack.starts.push(Instant::now()); | ||
84 | Profiler { desc: Some(desc_str) } | ||
85 | } else { | ||
86 | Profiler { desc: None } | ||
87 | } | ||
88 | }) | ||
89 | } | ||
90 | |||
91 | pub struct Profiler { | ||
92 | desc: Option<String>, | ||
93 | } | ||
94 | |||
95 | pub struct Filter { | ||
96 | depth: usize, | ||
97 | allowed: Vec<String>, | ||
98 | } | ||
99 | |||
100 | impl Filter { | ||
101 | pub fn new(depth: usize, allowed: Vec<String>) -> Filter { | ||
102 | Filter { depth, allowed } | ||
103 | } | ||
104 | } | ||
105 | |||
106 | struct ProfileStack { | ||
107 | starts: Vec<Instant>, | ||
108 | messages: Vec<Message>, | ||
109 | filter_data: FilterData, | ||
110 | } | ||
111 | |||
112 | struct Message { | ||
113 | level: usize, | ||
114 | duration: Duration, | ||
115 | message: String, | ||
116 | } | ||
117 | |||
118 | impl ProfileStack { | ||
119 | fn new() -> ProfileStack { | ||
120 | ProfileStack { starts: Vec::new(), messages: Vec::new(), filter_data: Default::default() } | ||
121 | } | ||
122 | } | ||
123 | |||
124 | #[derive(Default, Clone)] | ||
125 | struct FilterData { | ||
126 | depth: usize, | ||
127 | version: usize, | ||
128 | allowed: HashSet<String>, | ||
129 | } | ||
130 | |||
131 | lazy_static! { | ||
132 | static ref FILTER: RwLock<FilterData> = RwLock::new(Default::default()); | ||
133 | } | ||
134 | |||
135 | thread_local!(static PROFILE_STACK: RefCell<ProfileStack> = RefCell::new(ProfileStack::new())); | ||
136 | |||
137 | impl Drop for Profiler { | ||
138 | fn drop(&mut self) { | ||
139 | match self { | ||
140 | Profiler { desc: Some(desc) } => { | ||
141 | PROFILE_STACK.with(|stack| { | ||
142 | let mut stack = stack.borrow_mut(); | ||
143 | let start = stack.starts.pop().unwrap(); | ||
144 | let duration = start.elapsed(); | ||
145 | let level = stack.starts.len(); | ||
146 | let message = mem::replace(desc, String::new()); | ||
147 | stack.messages.push(Message { level, duration, message }); | ||
148 | if level == 0 { | ||
149 | let stdout = stderr(); | ||
150 | print(0, &stack.messages, &mut stdout.lock()); | ||
151 | stack.messages.clear(); | ||
152 | } | ||
153 | }); | ||
154 | } | ||
155 | Profiler { desc: None } => (), | ||
156 | } | ||
157 | } | ||
158 | } | ||
159 | |||
160 | fn print(lvl: usize, msgs: &[Message], out: &mut impl Write) { | ||
161 | let mut last = 0; | ||
162 | let indent = repeat(" ").take(lvl + 1).collect::<String>(); | ||
163 | for (i, &Message { level: l, duration: dur, message: ref msg }) in msgs.iter().enumerate() { | ||
164 | if l != lvl { | ||
165 | continue; | ||
166 | } | ||
167 | writeln!(out, "{} {:6}ms - {}", indent, dur.as_millis(), msg) | ||
168 | .expect("printing profiling info to stdout"); | ||
169 | |||
170 | print(lvl + 1, &msgs[last..i], out); | ||
171 | last = i; | ||
172 | } | ||
173 | } | ||
174 | |||
175 | #[cfg(test)] | ||
176 | mod tests { | ||
177 | |||
178 | use super::profile; | ||
179 | use super::set_filter; | ||
180 | use super::Filter; | ||
181 | |||
182 | #[test] | ||
183 | fn test_basic_profile() { | ||
184 | let s = vec!["profile1".to_string(), "profile2".to_string()]; | ||
185 | let f = Filter::new(2, s); | ||
186 | set_filter(f); | ||
187 | profiling_function1(); | ||
188 | } | ||
189 | |||
190 | fn profiling_function1() { | ||
191 | let _p = profile("profile1"); | ||
192 | profiling_function2(); | ||
193 | } | ||
194 | |||
195 | fn profiling_function2() { | ||
196 | let _p = profile("profile2"); | ||
197 | } | ||
198 | } | ||
diff --git a/crates/ra_syntax/src/lib.rs b/crates/ra_syntax/src/lib.rs index e1088e296..c56bc9f16 100644 --- a/crates/ra_syntax/src/lib.rs +++ b/crates/ra_syntax/src/lib.rs | |||
@@ -40,7 +40,7 @@ pub use crate::{ | |||
40 | syntax_text::SyntaxText, | 40 | syntax_text::SyntaxText, |
41 | syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder, SyntaxElement, SyntaxToken}, | 41 | syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder, SyntaxElement, SyntaxToken}, |
42 | ptr::{SyntaxNodePtr, AstPtr}, | 42 | ptr::{SyntaxNodePtr, AstPtr}, |
43 | parsing::{tokenize, Token}, | 43 | parsing::{tokenize, classify_literal, Token}, |
44 | }; | 44 | }; |
45 | 45 | ||
46 | use ra_text_edit::AtomTextEdit; | 46 | use ra_text_edit::AtomTextEdit; |
diff --git a/crates/ra_syntax/src/parsing.rs b/crates/ra_syntax/src/parsing.rs index ad5668a65..15d69c5ab 100644 --- a/crates/ra_syntax/src/parsing.rs +++ b/crates/ra_syntax/src/parsing.rs | |||
@@ -11,7 +11,7 @@ use crate::{ | |||
11 | syntax_node::GreenNode, | 11 | syntax_node::GreenNode, |
12 | }; | 12 | }; |
13 | 13 | ||
14 | pub use self::lexer::{tokenize, Token}; | 14 | pub use self::lexer::{tokenize, classify_literal, Token}; |
15 | 15 | ||
16 | pub(crate) use self::reparsing::incremental_reparse; | 16 | pub(crate) use self::reparsing::incremental_reparse; |
17 | 17 | ||
diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs index 36e841609..3ae42912c 100644 --- a/crates/ra_syntax/src/parsing/lexer.rs +++ b/crates/ra_syntax/src/parsing/lexer.rs | |||
@@ -214,3 +214,12 @@ fn scan_literal_suffix(ptr: &mut Ptr) { | |||
214 | } | 214 | } |
215 | ptr.bump_while(is_ident_continue); | 215 | ptr.bump_while(is_ident_continue); |
216 | } | 216 | } |
217 | |||
218 | pub fn classify_literal(text: &str) -> Option<Token> { | ||
219 | let tkn = next_token(text); | ||
220 | if !tkn.kind.is_literal() || tkn.len.to_usize() != text.len() { | ||
221 | return None; | ||
222 | } | ||
223 | |||
224 | Some(tkn) | ||
225 | } | ||
diff --git a/crates/ra_syntax/tests/data/parser/inline/err/0010_wrong_order_fns.rs b/crates/ra_syntax/tests/data/parser/inline/err/0010_wrong_order_fns.rs new file mode 100644 index 000000000..16edee95d --- /dev/null +++ b/crates/ra_syntax/tests/data/parser/inline/err/0010_wrong_order_fns.rs | |||
@@ -0,0 +1,2 @@ | |||
1 | async unsafe fn foo() {} | ||
2 | unsafe const fn bar() {} | ||
diff --git a/crates/ra_syntax/tests/data/parser/inline/err/0010_wrong_order_fns.txt b/crates/ra_syntax/tests/data/parser/inline/err/0010_wrong_order_fns.txt new file mode 100644 index 000000000..220191ffa --- /dev/null +++ b/crates/ra_syntax/tests/data/parser/inline/err/0010_wrong_order_fns.txt | |||
@@ -0,0 +1,39 @@ | |||
1 | SOURCE_FILE@[0; 50) | ||
2 | ERROR@[0; 5) | ||
3 | ASYNC_KW@[0; 5) "async" | ||
4 | err: `expected fn, trait or impl` | ||
5 | WHITESPACE@[5; 6) " " | ||
6 | FN_DEF@[6; 24) | ||
7 | UNSAFE_KW@[6; 12) "unsafe" | ||
8 | WHITESPACE@[12; 13) " " | ||
9 | FN_KW@[13; 15) "fn" | ||
10 | WHITESPACE@[15; 16) " " | ||
11 | NAME@[16; 19) | ||
12 | IDENT@[16; 19) "foo" | ||
13 | PARAM_LIST@[19; 21) | ||
14 | L_PAREN@[19; 20) "(" | ||
15 | R_PAREN@[20; 21) ")" | ||
16 | WHITESPACE@[21; 22) " " | ||
17 | BLOCK@[22; 24) | ||
18 | L_CURLY@[22; 23) "{" | ||
19 | R_CURLY@[23; 24) "}" | ||
20 | WHITESPACE@[24; 25) "\n" | ||
21 | ERROR@[25; 31) | ||
22 | UNSAFE_KW@[25; 31) "unsafe" | ||
23 | err: `expected fn, trait or impl` | ||
24 | WHITESPACE@[31; 32) " " | ||
25 | FN_DEF@[32; 49) | ||
26 | CONST_KW@[32; 37) "const" | ||
27 | WHITESPACE@[37; 38) " " | ||
28 | FN_KW@[38; 40) "fn" | ||
29 | WHITESPACE@[40; 41) " " | ||
30 | NAME@[41; 44) | ||
31 | IDENT@[41; 44) "bar" | ||
32 | PARAM_LIST@[44; 46) | ||
33 | L_PAREN@[44; 45) "(" | ||
34 | R_PAREN@[45; 46) ")" | ||
35 | WHITESPACE@[46; 47) " " | ||
36 | BLOCK@[47; 49) | ||
37 | L_CURLY@[47; 48) "{" | ||
38 | R_CURLY@[48; 49) "}" | ||
39 | WHITESPACE@[49; 50) "\n" | ||
diff --git a/crates/ra_syntax/tests/data/parser/inline/ok/0128_combined_fns.rs b/crates/ra_syntax/tests/data/parser/inline/ok/0128_combined_fns.rs new file mode 100644 index 000000000..46af91b82 --- /dev/null +++ b/crates/ra_syntax/tests/data/parser/inline/ok/0128_combined_fns.rs | |||
@@ -0,0 +1,2 @@ | |||
1 | unsafe async fn foo() {} | ||
2 | const unsafe fn bar() {} | ||
diff --git a/crates/ra_syntax/tests/data/parser/inline/ok/0128_combined_fns.txt b/crates/ra_syntax/tests/data/parser/inline/ok/0128_combined_fns.txt new file mode 100644 index 000000000..2a16aeb61 --- /dev/null +++ b/crates/ra_syntax/tests/data/parser/inline/ok/0128_combined_fns.txt | |||
@@ -0,0 +1,35 @@ | |||
1 | SOURCE_FILE@[0; 50) | ||
2 | FN_DEF@[0; 24) | ||
3 | UNSAFE_KW@[0; 6) "unsafe" | ||
4 | WHITESPACE@[6; 7) " " | ||
5 | ASYNC_KW@[7; 12) "async" | ||
6 | WHITESPACE@[12; 13) " " | ||
7 | FN_KW@[13; 15) "fn" | ||
8 | WHITESPACE@[15; 16) " " | ||
9 | NAME@[16; 19) | ||
10 | IDENT@[16; 19) "foo" | ||
11 | PARAM_LIST@[19; 21) | ||
12 | L_PAREN@[19; 20) "(" | ||
13 | R_PAREN@[20; 21) ")" | ||
14 | WHITESPACE@[21; 22) " " | ||
15 | BLOCK@[22; 24) | ||
16 | L_CURLY@[22; 23) "{" | ||
17 | R_CURLY@[23; 24) "}" | ||
18 | WHITESPACE@[24; 25) "\n" | ||
19 | FN_DEF@[25; 49) | ||
20 | CONST_KW@[25; 30) "const" | ||
21 | WHITESPACE@[30; 31) " " | ||
22 | UNSAFE_KW@[31; 37) "unsafe" | ||
23 | WHITESPACE@[37; 38) " " | ||
24 | FN_KW@[38; 40) "fn" | ||
25 | WHITESPACE@[40; 41) " " | ||
26 | NAME@[41; 44) | ||
27 | IDENT@[41; 44) "bar" | ||
28 | PARAM_LIST@[44; 46) | ||
29 | L_PAREN@[44; 45) "(" | ||
30 | R_PAREN@[45; 46) ")" | ||
31 | WHITESPACE@[46; 47) " " | ||
32 | BLOCK@[47; 49) | ||
33 | L_CURLY@[47; 48) "{" | ||
34 | R_CURLY@[48; 49) "}" | ||
35 | WHITESPACE@[49; 50) "\n" | ||