aboutsummaryrefslogtreecommitdiff
path: root/crates/syntax/src/parsing
diff options
context:
space:
mode:
Diffstat (limited to 'crates/syntax/src/parsing')
-rw-r--r--crates/syntax/src/parsing/lexer.rs8
-rw-r--r--crates/syntax/src/parsing/reparsing.rs61
2 files changed, 34 insertions, 35 deletions
diff --git a/crates/syntax/src/parsing/lexer.rs b/crates/syntax/src/parsing/lexer.rs
index 0cbba73c5..7c8d0a4c4 100644
--- a/crates/syntax/src/parsing/lexer.rs
+++ b/crates/syntax/src/parsing/lexer.rs
@@ -24,7 +24,7 @@ pub struct Token {
24/// Beware that it checks for shebang first and its length contributes to resulting 24/// Beware that it checks for shebang first and its length contributes to resulting
25/// tokens offsets. 25/// tokens offsets.
26pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) { 26pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) {
27 // non-empty string is a precondtion of `rustc_lexer::strip_shebang()`. 27 // non-empty string is a precondition of `rustc_lexer::strip_shebang()`.
28 if text.is_empty() { 28 if text.is_empty() {
29 return Default::default(); 29 return Default::default();
30 } 30 }
@@ -76,7 +76,7 @@ pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxEr
76} 76}
77 77
78/// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and 78/// The same as `lex_single_syntax_kind()` but returns only `SyntaxKind` and
79/// returns `None` if any tokenization error occured. 79/// returns `None` if any tokenization error occurred.
80/// 80///
81/// Beware that unescape errors are not checked at tokenization time. 81/// Beware that unescape errors are not checked at tokenization time.
82pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { 82pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
@@ -96,7 +96,7 @@ pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
96/// 96///
97/// Beware that unescape errors are not checked at tokenization time. 97/// Beware that unescape errors are not checked at tokenization time.
98fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> { 98fn lex_first_token(text: &str) -> Option<(Token, Option<SyntaxError>)> {
99 // non-empty string is a precondtion of `rustc_lexer::first_token()`. 99 // non-empty string is a precondition of `rustc_lexer::first_token()`.
100 if text.is_empty() { 100 if text.is_empty() {
101 return None; 101 return None;
102 } 102 }
@@ -117,7 +117,7 @@ fn rustc_token_kind_to_syntax_kind(
117 token_text: &str, 117 token_text: &str,
118) -> (SyntaxKind, Option<&'static str>) { 118) -> (SyntaxKind, Option<&'static str>) {
119 // A note on an intended tradeoff: 119 // A note on an intended tradeoff:
120 // We drop some useful infromation here (see patterns with double dots `..`) 120 // We drop some useful information here (see patterns with double dots `..`)
121 // Storing that info in `SyntaxKind` is not possible due to its layout requirements of 121 // Storing that info in `SyntaxKind` is not possible due to its layout requirements of
122 // being `u16` that come from `rowan::SyntaxKind`. 122 // being `u16` that come from `rowan::SyntaxKind`.
123 123
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs
index 190f5f67a..76f01084c 100644
--- a/crates/syntax/src/parsing/reparsing.rs
+++ b/crates/syntax/src/parsing/reparsing.rs
@@ -10,7 +10,6 @@ use parser::Reparser;
10use text_edit::Indel; 10use text_edit::Indel;
11 11
12use crate::{ 12use crate::{
13 algo,
14 parsing::{ 13 parsing::{
15 lexer::{lex_single_syntax_kind, tokenize, Token}, 14 lexer::{lex_single_syntax_kind, tokenize, Token},
16 text_token_source::TextTokenSource, 15 text_token_source::TextTokenSource,
@@ -41,7 +40,7 @@ fn reparse_token<'node>(
41 root: &'node SyntaxNode, 40 root: &'node SyntaxNode,
42 edit: &Indel, 41 edit: &Indel,
43) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> { 42) -> Option<(GreenNode, Vec<SyntaxError>, TextRange)> {
44 let prev_token = algo::find_covering_element(root, edit.delete).as_token()?.clone(); 43 let prev_token = root.covering_element(edit.delete).as_token()?.clone();
45 let prev_token_kind = prev_token.kind(); 44 let prev_token_kind = prev_token.kind();
46 match prev_token_kind { 45 match prev_token_kind {
47 WHITESPACE | COMMENT | IDENT | STRING => { 46 WHITESPACE | COMMENT | IDENT | STRING => {
@@ -124,7 +123,7 @@ fn is_contextual_kw(text: &str) -> bool {
124} 123}
125 124
126fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> { 125fn find_reparsable_node(node: &SyntaxNode, range: TextRange) -> Option<(SyntaxNode, Reparser)> {
127 let node = algo::find_covering_element(node, range); 126 let node = node.covering_element(range);
128 127
129 let mut ancestors = match node { 128 let mut ancestors = match node {
130 NodeOrToken::Token(it) => it.parent().ancestors(), 129 NodeOrToken::Token(it) => it.parent().ancestors(),
@@ -223,7 +222,7 @@ mod tests {
223 do_check( 222 do_check(
224 r" 223 r"
225fn foo() { 224fn foo() {
226 let x = foo + <|>bar<|> 225 let x = foo + $0bar$0
227} 226}
228", 227",
229 "baz", 228 "baz",
@@ -232,7 +231,7 @@ fn foo() {
232 do_check( 231 do_check(
233 r" 232 r"
234fn foo() { 233fn foo() {
235 let x = foo<|> + bar<|> 234 let x = foo$0 + bar$0
236} 235}
237", 236",
238 "baz", 237 "baz",
@@ -241,7 +240,7 @@ fn foo() {
241 do_check( 240 do_check(
242 r" 241 r"
243struct Foo { 242struct Foo {
244 f: foo<|><|> 243 f: foo$0$0
245} 244}
246", 245",
247 ",\n g: (),", 246 ",\n g: (),",
@@ -252,7 +251,7 @@ struct Foo {
252fn foo { 251fn foo {
253 let; 252 let;
254 1 + 1; 253 1 + 1;
255 <|>92<|>; 254 $092$0;
256} 255}
257", 256",
258 "62", 257 "62",
@@ -261,7 +260,7 @@ fn foo {
261 do_check( 260 do_check(
262 r" 261 r"
263mod foo { 262mod foo {
264 fn <|><|> 263 fn $0$0
265} 264}
266", 265",
267 "bar", 266 "bar",
@@ -271,7 +270,7 @@ mod foo {
271 do_check( 270 do_check(
272 r" 271 r"
273trait Foo { 272trait Foo {
274 type <|>Foo<|>; 273 type $0Foo$0;
275} 274}
276", 275",
277 "Output", 276 "Output",
@@ -280,17 +279,17 @@ trait Foo {
280 do_check( 279 do_check(
281 r" 280 r"
282impl IntoIterator<Item=i32> for Foo { 281impl IntoIterator<Item=i32> for Foo {
283 f<|><|> 282 f$0$0
284} 283}
285", 284",
286 "n next(", 285 "n next(",
287 9, 286 9,
288 ); 287 );
289 do_check(r"use a::b::{foo,<|>,bar<|>};", "baz", 10); 288 do_check(r"use a::b::{foo,$0,bar$0};", "baz", 10);
290 do_check( 289 do_check(
291 r" 290 r"
292pub enum A { 291pub enum A {
293 Foo<|><|> 292 Foo$0$0
294} 293}
295", 294",
296 "\nBar;\n", 295 "\nBar;\n",
@@ -298,7 +297,7 @@ pub enum A {
298 ); 297 );
299 do_check( 298 do_check(
300 r" 299 r"
301foo!{a, b<|><|> d} 300foo!{a, b$0$0 d}
302", 301",
303 ", c[3]", 302 ", c[3]",
304 8, 303 8,
@@ -306,7 +305,7 @@ foo!{a, b<|><|> d}
306 do_check( 305 do_check(
307 r" 306 r"
308fn foo() { 307fn foo() {
309 vec![<|><|>] 308 vec![$0$0]
310} 309}
311", 310",
312 "123", 311 "123",
@@ -315,7 +314,7 @@ fn foo() {
315 do_check( 314 do_check(
316 r" 315 r"
317extern { 316extern {
318 fn<|>;<|> 317 fn$0;$0
319} 318}
320", 319",
321 " exit(code: c_int)", 320 " exit(code: c_int)",
@@ -326,7 +325,7 @@ extern {
326 #[test] 325 #[test]
327 fn reparse_token_tests() { 326 fn reparse_token_tests() {
328 do_check( 327 do_check(
329 r"<|><|> 328 r"$0$0
330fn foo() -> i32 { 1 } 329fn foo() -> i32 { 1 }
331", 330",
332 "\n\n\n \n", 331 "\n\n\n \n",
@@ -334,49 +333,49 @@ fn foo() -> i32 { 1 }
334 ); 333 );
335 do_check( 334 do_check(
336 r" 335 r"
337fn foo() -> <|><|> {} 336fn foo() -> $0$0 {}
338", 337",
339 " \n", 338 " \n",
340 2, 339 2,
341 ); 340 );
342 do_check( 341 do_check(
343 r" 342 r"
344fn <|>foo<|>() -> i32 { 1 } 343fn $0foo$0() -> i32 { 1 }
345", 344",
346 "bar", 345 "bar",
347 3, 346 3,
348 ); 347 );
349 do_check( 348 do_check(
350 r" 349 r"
351fn foo<|><|>foo() { } 350fn foo$0$0foo() { }
352", 351",
353 "bar", 352 "bar",
354 6, 353 6,
355 ); 354 );
356 do_check( 355 do_check(
357 r" 356 r"
358fn foo /* <|><|> */ () {} 357fn foo /* $0$0 */ () {}
359", 358",
360 "some comment", 359 "some comment",
361 6, 360 6,
362 ); 361 );
363 do_check( 362 do_check(
364 r" 363 r"
365fn baz <|><|> () {} 364fn baz $0$0 () {}
366", 365",
367 " \t\t\n\n", 366 " \t\t\n\n",
368 2, 367 2,
369 ); 368 );
370 do_check( 369 do_check(
371 r" 370 r"
372fn baz <|><|> () {} 371fn baz $0$0 () {}
373", 372",
374 " \t\t\n\n", 373 " \t\t\n\n",
375 2, 374 2,
376 ); 375 );
377 do_check( 376 do_check(
378 r" 377 r"
379/// foo <|><|>omment 378/// foo $0$0omment
380mod { } 379mod { }
381", 380",
382 "c", 381 "c",
@@ -384,28 +383,28 @@ mod { }
384 ); 383 );
385 do_check( 384 do_check(
386 r#" 385 r#"
387fn -> &str { "Hello<|><|>" } 386fn -> &str { "Hello$0$0" }
388"#, 387"#,
389 ", world", 388 ", world",
390 7, 389 7,
391 ); 390 );
392 do_check( 391 do_check(
393 r#" 392 r#"
394fn -> &str { // "Hello<|><|>" 393fn -> &str { // "Hello$0$0"
395"#, 394"#,
396 ", world", 395 ", world",
397 10, 396 10,
398 ); 397 );
399 do_check( 398 do_check(
400 r##" 399 r##"
401fn -> &str { r#"Hello<|><|>"# 400fn -> &str { r#"Hello$0$0"#
402"##, 401"##,
403 ", world", 402 ", world",
404 10, 403 10,
405 ); 404 );
406 do_check( 405 do_check(
407 r" 406 r"
408#[derive(<|>Copy<|>)] 407#[derive($0Copy$0)]
409enum Foo { 408enum Foo {
410 409
411} 410}
@@ -417,12 +416,12 @@ enum Foo {
417 416
418 #[test] 417 #[test]
419 fn reparse_str_token_with_error_unchanged() { 418 fn reparse_str_token_with_error_unchanged() {
420 do_check(r#""<|>Unclosed<|> string literal"#, "Still unclosed", 24); 419 do_check(r#""$0Unclosed$0 string literal"#, "Still unclosed", 24);
421 } 420 }
422 421
423 #[test] 422 #[test]
424 fn reparse_str_token_with_error_fixed() { 423 fn reparse_str_token_with_error_fixed() {
425 do_check(r#""unterinated<|><|>"#, "\"", 12); 424 do_check(r#""unterinated$0$0"#, "\"", 12);
426 } 425 }
427 426
428 #[test] 427 #[test]
@@ -430,7 +429,7 @@ enum Foo {
430 do_check( 429 do_check(
431 r#"fn main() { 430 r#"fn main() {
432 if {} 431 if {}
433 32 + 4<|><|> 432 32 + 4$0$0
434 return 433 return
435 if {} 434 if {}
436 }"#, 435 }"#,
@@ -444,7 +443,7 @@ enum Foo {
444 do_check( 443 do_check(
445 r#"fn main() { 444 r#"fn main() {
446 if {} 445 if {}
447 32 + 4<|><|> 446 32 + 4$0$0
448 return 447 return
449 if {} 448 if {}
450 }"#, 449 }"#,