diff options
Diffstat (limited to 'crates/syntax/src')
-rw-r--r-- | crates/syntax/src/algo.rs | 7 | ||||
-rw-r--r-- | crates/syntax/src/ast.rs | 6 | ||||
-rw-r--r-- | crates/syntax/src/ast/edit.rs | 11 | ||||
-rw-r--r-- | crates/syntax/src/ast/generated/nodes.rs | 38 | ||||
-rw-r--r-- | crates/syntax/src/ast/make.rs | 74 | ||||
-rw-r--r-- | crates/syntax/src/ast/node_ext.rs | 106 | ||||
-rw-r--r-- | crates/syntax/src/ast/token_ext.rs | 36 | ||||
-rw-r--r-- | crates/syntax/src/lib.rs | 4 | ||||
-rw-r--r-- | crates/syntax/src/parsing/reparsing.rs | 3 | ||||
-rw-r--r-- | crates/syntax/src/parsing/text_tree_sink.rs | 4 | ||||
-rw-r--r-- | crates/syntax/src/syntax_node.rs | 4 | ||||
-rw-r--r-- | crates/syntax/src/tests.rs | 28 | ||||
-rw-r--r-- | crates/syntax/src/validation.rs | 18 |
13 files changed, 258 insertions, 81 deletions
diff --git a/crates/syntax/src/algo.rs b/crates/syntax/src/algo.rs index 827ae78f9..2ff92f9f6 100644 --- a/crates/syntax/src/algo.rs +++ b/crates/syntax/src/algo.rs | |||
@@ -4,6 +4,7 @@ use std::{ | |||
4 | fmt, | 4 | fmt, |
5 | hash::BuildHasherDefault, | 5 | hash::BuildHasherDefault, |
6 | ops::{self, RangeInclusive}, | 6 | ops::{self, RangeInclusive}, |
7 | ptr, | ||
7 | }; | 8 | }; |
8 | 9 | ||
9 | use indexmap::IndexMap; | 10 | use indexmap::IndexMap; |
@@ -171,7 +172,7 @@ pub fn diff(from: &SyntaxNode, to: &SyntaxNode) -> TreeDiff { | |||
171 | && lhs.text_range().len() == rhs.text_range().len() | 172 | && lhs.text_range().len() == rhs.text_range().len() |
172 | && match (&lhs, &rhs) { | 173 | && match (&lhs, &rhs) { |
173 | (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { | 174 | (NodeOrToken::Node(lhs), NodeOrToken::Node(rhs)) => { |
174 | lhs.green() == rhs.green() || lhs.text() == rhs.text() | 175 | ptr::eq(lhs.green(), rhs.green()) || lhs.text() == rhs.text() |
175 | } | 176 | } |
176 | (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), | 177 | (NodeOrToken::Token(lhs), NodeOrToken::Token(rhs)) => lhs.text() == rhs.text(), |
177 | _ => false, | 178 | _ => false, |
@@ -566,7 +567,7 @@ impl<'a> SyntaxRewriter<'a> { | |||
566 | 567 | ||
567 | fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { | 568 | fn element_to_green(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { |
568 | match element { | 569 | match element { |
569 | NodeOrToken::Node(it) => NodeOrToken::Node(it.green().clone()), | 570 | NodeOrToken::Node(it) => NodeOrToken::Node(it.green().to_owned()), |
570 | NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), | 571 | NodeOrToken::Token(it) => NodeOrToken::Token(it.green().clone()), |
571 | } | 572 | } |
572 | } | 573 | } |
@@ -624,7 +625,7 @@ fn position_of_child(parent: &SyntaxNode, child: SyntaxElement) -> usize { | |||
624 | 625 | ||
625 | fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { | 626 | fn to_green_element(element: SyntaxElement) -> NodeOrToken<rowan::GreenNode, rowan::GreenToken> { |
626 | match element { | 627 | match element { |
627 | NodeOrToken::Node(it) => it.green().clone().into(), | 628 | NodeOrToken::Node(it) => it.green().to_owned().into(), |
628 | NodeOrToken::Token(it) => it.green().clone().into(), | 629 | NodeOrToken::Token(it) => it.green().clone().into(), |
629 | } | 630 | } |
630 | } | 631 | } |
diff --git a/crates/syntax/src/ast.rs b/crates/syntax/src/ast.rs index 83de067d9..b3a24d39d 100644 --- a/crates/syntax/src/ast.rs +++ b/crates/syntax/src/ast.rs | |||
@@ -12,14 +12,14 @@ use std::marker::PhantomData; | |||
12 | 12 | ||
13 | use crate::{ | 13 | use crate::{ |
14 | syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, | 14 | syntax_node::{SyntaxNode, SyntaxNodeChildren, SyntaxToken}, |
15 | SmolStr, SyntaxKind, | 15 | SyntaxKind, |
16 | }; | 16 | }; |
17 | 17 | ||
18 | pub use self::{ | 18 | pub use self::{ |
19 | expr_ext::{ArrayExprKind, BinOp, Effect, ElseBranch, LiteralKind, PrefixOp, RangeOp}, | 19 | expr_ext::{ArrayExprKind, BinOp, Effect, ElseBranch, LiteralKind, PrefixOp, RangeOp}, |
20 | generated::{nodes::*, tokens::*}, | 20 | generated::{nodes::*, tokens::*}, |
21 | node_ext::{ | 21 | node_ext::{ |
22 | AttrKind, FieldKind, Macro, NameOrNameRef, PathSegmentKind, SelfParamKind, | 22 | AttrKind, FieldKind, Macro, NameLike, NameOrNameRef, PathSegmentKind, SelfParamKind, |
23 | SlicePatComponents, StructKind, TypeBoundKind, VisibilityKind, | 23 | SlicePatComponents, StructKind, TypeBoundKind, VisibilityKind, |
24 | }, | 24 | }, |
25 | token_ext::*, | 25 | token_ext::*, |
@@ -54,7 +54,7 @@ pub trait AstToken { | |||
54 | 54 | ||
55 | fn syntax(&self) -> &SyntaxToken; | 55 | fn syntax(&self) -> &SyntaxToken; |
56 | 56 | ||
57 | fn text(&self) -> &SmolStr { | 57 | fn text(&self) -> &str { |
58 | self.syntax().text() | 58 | self.syntax().text() |
59 | } | 59 | } |
60 | } | 60 | } |
diff --git a/crates/syntax/src/ast/edit.rs b/crates/syntax/src/ast/edit.rs index 824ebf41c..0b3b76d4a 100644 --- a/crates/syntax/src/ast/edit.rs +++ b/crates/syntax/src/ast/edit.rs | |||
@@ -595,11 +595,14 @@ impl ops::Add<u8> for IndentLevel { | |||
595 | 595 | ||
596 | impl IndentLevel { | 596 | impl IndentLevel { |
597 | pub fn from_node(node: &SyntaxNode) -> IndentLevel { | 597 | pub fn from_node(node: &SyntaxNode) -> IndentLevel { |
598 | let first_token = match node.first_token() { | 598 | match node.first_token() { |
599 | Some(it) => it, | 599 | Some(it) => Self::from_token(&it), |
600 | None => return IndentLevel(0), | 600 | None => return IndentLevel(0), |
601 | }; | 601 | } |
602 | for ws in prev_tokens(first_token).filter_map(ast::Whitespace::cast) { | 602 | } |
603 | |||
604 | pub fn from_token(token: &SyntaxToken) -> IndentLevel { | ||
605 | for ws in prev_tokens(token.clone()).filter_map(ast::Whitespace::cast) { | ||
603 | let text = ws.syntax().text(); | 606 | let text = ws.syntax().text(); |
604 | if let Some(pos) = text.rfind('\n') { | 607 | if let Some(pos) = text.rfind('\n') { |
605 | let level = text[pos + 1..].chars().count() / 4; | 608 | let level = text[pos + 1..].chars().count() / 4; |
diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 5baa54a3f..064931aec 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs | |||
@@ -1401,15 +1401,15 @@ pub enum FieldList { | |||
1401 | TupleFieldList(TupleFieldList), | 1401 | TupleFieldList(TupleFieldList), |
1402 | } | 1402 | } |
1403 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 1403 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1404 | pub enum AdtDef { | 1404 | pub enum Adt { |
1405 | Enum(Enum), | 1405 | Enum(Enum), |
1406 | Struct(Struct), | 1406 | Struct(Struct), |
1407 | Union(Union), | 1407 | Union(Union), |
1408 | } | 1408 | } |
1409 | impl ast::AttrsOwner for AdtDef {} | 1409 | impl ast::AttrsOwner for Adt {} |
1410 | impl ast::GenericParamsOwner for AdtDef {} | 1410 | impl ast::GenericParamsOwner for Adt {} |
1411 | impl ast::NameOwner for AdtDef {} | 1411 | impl ast::NameOwner for Adt {} |
1412 | impl ast::VisibilityOwner for AdtDef {} | 1412 | impl ast::VisibilityOwner for Adt {} |
1413 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] | 1413 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] |
1414 | pub enum AssocItem { | 1414 | pub enum AssocItem { |
1415 | Const(Const), | 1415 | Const(Const), |
@@ -3394,16 +3394,16 @@ impl AstNode for FieldList { | |||
3394 | } | 3394 | } |
3395 | } | 3395 | } |
3396 | } | 3396 | } |
3397 | impl From<Enum> for AdtDef { | 3397 | impl From<Enum> for Adt { |
3398 | fn from(node: Enum) -> AdtDef { AdtDef::Enum(node) } | 3398 | fn from(node: Enum) -> Adt { Adt::Enum(node) } |
3399 | } | 3399 | } |
3400 | impl From<Struct> for AdtDef { | 3400 | impl From<Struct> for Adt { |
3401 | fn from(node: Struct) -> AdtDef { AdtDef::Struct(node) } | 3401 | fn from(node: Struct) -> Adt { Adt::Struct(node) } |
3402 | } | 3402 | } |
3403 | impl From<Union> for AdtDef { | 3403 | impl From<Union> for Adt { |
3404 | fn from(node: Union) -> AdtDef { AdtDef::Union(node) } | 3404 | fn from(node: Union) -> Adt { Adt::Union(node) } |
3405 | } | 3405 | } |
3406 | impl AstNode for AdtDef { | 3406 | impl AstNode for Adt { |
3407 | fn can_cast(kind: SyntaxKind) -> bool { | 3407 | fn can_cast(kind: SyntaxKind) -> bool { |
3408 | match kind { | 3408 | match kind { |
3409 | ENUM | STRUCT | UNION => true, | 3409 | ENUM | STRUCT | UNION => true, |
@@ -3412,18 +3412,18 @@ impl AstNode for AdtDef { | |||
3412 | } | 3412 | } |
3413 | fn cast(syntax: SyntaxNode) -> Option<Self> { | 3413 | fn cast(syntax: SyntaxNode) -> Option<Self> { |
3414 | let res = match syntax.kind() { | 3414 | let res = match syntax.kind() { |
3415 | ENUM => AdtDef::Enum(Enum { syntax }), | 3415 | ENUM => Adt::Enum(Enum { syntax }), |
3416 | STRUCT => AdtDef::Struct(Struct { syntax }), | 3416 | STRUCT => Adt::Struct(Struct { syntax }), |
3417 | UNION => AdtDef::Union(Union { syntax }), | 3417 | UNION => Adt::Union(Union { syntax }), |
3418 | _ => return None, | 3418 | _ => return None, |
3419 | }; | 3419 | }; |
3420 | Some(res) | 3420 | Some(res) |
3421 | } | 3421 | } |
3422 | fn syntax(&self) -> &SyntaxNode { | 3422 | fn syntax(&self) -> &SyntaxNode { |
3423 | match self { | 3423 | match self { |
3424 | AdtDef::Enum(it) => &it.syntax, | 3424 | Adt::Enum(it) => &it.syntax, |
3425 | AdtDef::Struct(it) => &it.syntax, | 3425 | Adt::Struct(it) => &it.syntax, |
3426 | AdtDef::Union(it) => &it.syntax, | 3426 | Adt::Union(it) => &it.syntax, |
3427 | } | 3427 | } |
3428 | } | 3428 | } |
3429 | } | 3429 | } |
@@ -3571,7 +3571,7 @@ impl std::fmt::Display for FieldList { | |||
3571 | std::fmt::Display::fmt(self.syntax(), f) | 3571 | std::fmt::Display::fmt(self.syntax(), f) |
3572 | } | 3572 | } |
3573 | } | 3573 | } |
3574 | impl std::fmt::Display for AdtDef { | 3574 | impl std::fmt::Display for Adt { |
3575 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { | 3575 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { |
3576 | std::fmt::Display::fmt(self.syntax(), f) | 3576 | std::fmt::Display::fmt(self.syntax(), f) |
3577 | } | 3577 | } |
diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 9ffc3ae11..b6c5de658 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs | |||
@@ -24,11 +24,24 @@ pub fn name_ref(text: &str) -> ast::NameRef { | |||
24 | // FIXME: replace stringly-typed constructor with a family of typed ctors, a-la | 24 | // FIXME: replace stringly-typed constructor with a family of typed ctors, a-la |
25 | // `expr_xxx`. | 25 | // `expr_xxx`. |
26 | pub fn ty(text: &str) -> ast::Type { | 26 | pub fn ty(text: &str) -> ast::Type { |
27 | ast_from_text(&format!("impl {} for D {{}};", text)) | 27 | ast_from_text(&format!("fn f() -> {} {{}}", text)) |
28 | } | 28 | } |
29 | pub fn ty_unit() -> ast::Type { | 29 | pub fn ty_unit() -> ast::Type { |
30 | ty("()") | 30 | ty("()") |
31 | } | 31 | } |
32 | // FIXME: handle types of length == 1 | ||
33 | pub fn ty_tuple(types: impl IntoIterator<Item = ast::Type>) -> ast::Type { | ||
34 | let contents = types.into_iter().join(", "); | ||
35 | ty(&format!("({})", contents)) | ||
36 | } | ||
37 | // FIXME: handle path to type | ||
38 | pub fn ty_generic(name: ast::NameRef, types: impl IntoIterator<Item = ast::Type>) -> ast::Type { | ||
39 | let contents = types.into_iter().join(", "); | ||
40 | ty(&format!("{}<{}>", name, contents)) | ||
41 | } | ||
42 | pub fn ty_ref(target: ast::Type, exclusive: bool) -> ast::Type { | ||
43 | ty(&if exclusive { format!("&mut {}", target) } else { format!("&{}", target) }) | ||
44 | } | ||
32 | 45 | ||
33 | pub fn assoc_item_list() -> ast::AssocItemList { | 46 | pub fn assoc_item_list() -> ast::AssocItemList { |
34 | ast_from_text("impl C for D {};") | 47 | ast_from_text("impl C for D {};") |
@@ -175,11 +188,20 @@ pub fn expr_path(path: ast::Path) -> ast::Expr { | |||
175 | pub fn expr_continue() -> ast::Expr { | 188 | pub fn expr_continue() -> ast::Expr { |
176 | expr_from_text("continue") | 189 | expr_from_text("continue") |
177 | } | 190 | } |
178 | pub fn expr_break() -> ast::Expr { | 191 | pub fn expr_break(expr: Option<ast::Expr>) -> ast::Expr { |
179 | expr_from_text("break") | 192 | match expr { |
193 | Some(expr) => expr_from_text(&format!("break {}", expr)), | ||
194 | None => expr_from_text("break"), | ||
195 | } | ||
180 | } | 196 | } |
181 | pub fn expr_return() -> ast::Expr { | 197 | pub fn expr_return(expr: Option<ast::Expr>) -> ast::Expr { |
182 | expr_from_text("return") | 198 | match expr { |
199 | Some(expr) => expr_from_text(&format!("return {}", expr)), | ||
200 | None => expr_from_text("return"), | ||
201 | } | ||
202 | } | ||
203 | pub fn expr_try(expr: ast::Expr) -> ast::Expr { | ||
204 | expr_from_text(&format!("{}?", expr)) | ||
183 | } | 205 | } |
184 | pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr { | 206 | pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr { |
185 | expr_from_text(&format!("match {} {}", expr, match_arm_list)) | 207 | expr_from_text(&format!("match {} {}", expr, match_arm_list)) |
@@ -212,6 +234,10 @@ pub fn expr_ref(expr: ast::Expr, exclusive: bool) -> ast::Expr { | |||
212 | pub fn expr_paren(expr: ast::Expr) -> ast::Expr { | 234 | pub fn expr_paren(expr: ast::Expr) -> ast::Expr { |
213 | expr_from_text(&format!("({})", expr)) | 235 | expr_from_text(&format!("({})", expr)) |
214 | } | 236 | } |
237 | pub fn expr_tuple(elements: impl IntoIterator<Item = ast::Expr>) -> ast::Expr { | ||
238 | let expr = elements.into_iter().format(", "); | ||
239 | expr_from_text(&format!("({})", expr)) | ||
240 | } | ||
215 | fn expr_from_text(text: &str) -> ast::Expr { | 241 | fn expr_from_text(text: &str) -> ast::Expr { |
216 | ast_from_text(&format!("const C: () = {};", text)) | 242 | ast_from_text(&format!("const C: () = {};", text)) |
217 | } | 243 | } |
@@ -236,6 +262,13 @@ pub fn ident_pat(name: ast::Name) -> ast::IdentPat { | |||
236 | ast_from_text(&format!("fn f({}: ())", text)) | 262 | ast_from_text(&format!("fn f({}: ())", text)) |
237 | } | 263 | } |
238 | } | 264 | } |
265 | pub fn ident_mut_pat(name: ast::Name) -> ast::IdentPat { | ||
266 | return from_text(name.text()); | ||
267 | |||
268 | fn from_text(text: &str) -> ast::IdentPat { | ||
269 | ast_from_text(&format!("fn f(mut {}: ())", text)) | ||
270 | } | ||
271 | } | ||
239 | 272 | ||
240 | pub fn wildcard_pat() -> ast::WildcardPat { | 273 | pub fn wildcard_pat() -> ast::WildcardPat { |
241 | return from_text("_"); | 274 | return from_text("_"); |
@@ -356,17 +389,25 @@ pub fn token(kind: SyntaxKind) -> SyntaxToken { | |||
356 | .unwrap_or_else(|| panic!("unhandled token: {:?}", kind)) | 389 | .unwrap_or_else(|| panic!("unhandled token: {:?}", kind)) |
357 | } | 390 | } |
358 | 391 | ||
359 | pub fn param(name: String, ty: String) -> ast::Param { | 392 | pub fn param(pat: ast::Pat, ty: ast::Type) -> ast::Param { |
360 | ast_from_text(&format!("fn f({}: {}) {{ }}", name, ty)) | 393 | ast_from_text(&format!("fn f({}: {}) {{ }}", pat, ty)) |
361 | } | 394 | } |
362 | 395 | ||
363 | pub fn ret_type(ty: ast::Type) -> ast::RetType { | 396 | pub fn ret_type(ty: ast::Type) -> ast::RetType { |
364 | ast_from_text(&format!("fn f() -> {} {{ }}", ty)) | 397 | ast_from_text(&format!("fn f() -> {} {{ }}", ty)) |
365 | } | 398 | } |
366 | 399 | ||
367 | pub fn param_list(pats: impl IntoIterator<Item = ast::Param>) -> ast::ParamList { | 400 | pub fn param_list( |
401 | self_param: Option<ast::SelfParam>, | ||
402 | pats: impl IntoIterator<Item = ast::Param>, | ||
403 | ) -> ast::ParamList { | ||
368 | let args = pats.into_iter().join(", "); | 404 | let args = pats.into_iter().join(", "); |
369 | ast_from_text(&format!("fn f({}) {{ }}", args)) | 405 | let list = match self_param { |
406 | Some(self_param) if args.is_empty() => format!("fn f({}) {{ }}", self_param), | ||
407 | Some(self_param) => format!("fn f({}, {}) {{ }}", self_param, args), | ||
408 | None => format!("fn f({}) {{ }}", args), | ||
409 | }; | ||
410 | ast_from_text(&list) | ||
370 | } | 411 | } |
371 | 412 | ||
372 | pub fn generic_param(name: String, ty: Option<ast::TypeBoundList>) -> ast::GenericParam { | 413 | pub fn generic_param(name: String, ty: Option<ast::TypeBoundList>) -> ast::GenericParam { |
@@ -478,7 +519,7 @@ fn ast_from_text<N: AstNode>(text: &str) -> N { | |||
478 | } | 519 | } |
479 | 520 | ||
480 | fn unroot(n: SyntaxNode) -> SyntaxNode { | 521 | fn unroot(n: SyntaxNode) -> SyntaxNode { |
481 | SyntaxNode::new_root(n.green().clone()) | 522 | SyntaxNode::new_root(n.green().to_owned()) |
482 | } | 523 | } |
483 | 524 | ||
484 | pub mod tokens { | 525 | pub mod tokens { |
@@ -486,8 +527,11 @@ pub mod tokens { | |||
486 | 527 | ||
487 | use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken}; | 528 | use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken}; |
488 | 529 | ||
489 | pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = | 530 | pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| { |
490 | Lazy::new(|| SourceFile::parse("const C: <()>::Item = (1 != 1, 2 == 2, !true)\n;\n\n")); | 531 | SourceFile::parse( |
532 | "const C: <()>::Item = (1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p)\n;\n\n", | ||
533 | ) | ||
534 | }); | ||
491 | 535 | ||
492 | pub fn single_space() -> SyntaxToken { | 536 | pub fn single_space() -> SyntaxToken { |
493 | SOURCE_FILE | 537 | SOURCE_FILE |
@@ -495,7 +539,7 @@ pub mod tokens { | |||
495 | .syntax() | 539 | .syntax() |
496 | .descendants_with_tokens() | 540 | .descendants_with_tokens() |
497 | .filter_map(|it| it.into_token()) | 541 | .filter_map(|it| it.into_token()) |
498 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == " ") | 542 | .find(|it| it.kind() == WHITESPACE && it.text() == " ") |
499 | .unwrap() | 543 | .unwrap() |
500 | } | 544 | } |
501 | 545 | ||
@@ -523,7 +567,7 @@ pub mod tokens { | |||
523 | .syntax() | 567 | .syntax() |
524 | .descendants_with_tokens() | 568 | .descendants_with_tokens() |
525 | .filter_map(|it| it.into_token()) | 569 | .filter_map(|it| it.into_token()) |
526 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n") | 570 | .find(|it| it.kind() == WHITESPACE && it.text() == "\n") |
527 | .unwrap() | 571 | .unwrap() |
528 | } | 572 | } |
529 | 573 | ||
@@ -533,7 +577,7 @@ pub mod tokens { | |||
533 | .syntax() | 577 | .syntax() |
534 | .descendants_with_tokens() | 578 | .descendants_with_tokens() |
535 | .filter_map(|it| it.into_token()) | 579 | .filter_map(|it| it.into_token()) |
536 | .find(|it| it.kind() == WHITESPACE && it.text().as_str() == "\n\n") | 580 | .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n") |
537 | .unwrap() | 581 | .unwrap() |
538 | } | 582 | } |
539 | 583 | ||
diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index 738c92a5b..52ac97c84 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs | |||
@@ -3,29 +3,28 @@ | |||
3 | 3 | ||
4 | use std::fmt; | 4 | use std::fmt; |
5 | 5 | ||
6 | use ast::AttrsOwner; | ||
7 | use itertools::Itertools; | 6 | use itertools::Itertools; |
8 | use parser::SyntaxKind; | 7 | use parser::SyntaxKind; |
9 | 8 | ||
10 | use crate::{ | 9 | use crate::{ |
11 | ast::{self, support, AstNode, AstToken, NameOwner, SyntaxNode}, | 10 | ast::{self, support, AstNode, AstToken, AttrsOwner, NameOwner, SyntaxNode}, |
12 | SmolStr, SyntaxElement, SyntaxToken, T, | 11 | SmolStr, SyntaxElement, SyntaxToken, T, |
13 | }; | 12 | }; |
14 | 13 | ||
15 | impl ast::Lifetime { | 14 | impl ast::Lifetime { |
16 | pub fn text(&self) -> &SmolStr { | 15 | pub fn text(&self) -> &str { |
17 | text_of_first_token(self.syntax()) | 16 | text_of_first_token(self.syntax()) |
18 | } | 17 | } |
19 | } | 18 | } |
20 | 19 | ||
21 | impl ast::Name { | 20 | impl ast::Name { |
22 | pub fn text(&self) -> &SmolStr { | 21 | pub fn text(&self) -> &str { |
23 | text_of_first_token(self.syntax()) | 22 | text_of_first_token(self.syntax()) |
24 | } | 23 | } |
25 | } | 24 | } |
26 | 25 | ||
27 | impl ast::NameRef { | 26 | impl ast::NameRef { |
28 | pub fn text(&self) -> &SmolStr { | 27 | pub fn text(&self) -> &str { |
29 | text_of_first_token(self.syntax()) | 28 | text_of_first_token(self.syntax()) |
30 | } | 29 | } |
31 | 30 | ||
@@ -34,7 +33,7 @@ impl ast::NameRef { | |||
34 | } | 33 | } |
35 | } | 34 | } |
36 | 35 | ||
37 | fn text_of_first_token(node: &SyntaxNode) -> &SmolStr { | 36 | fn text_of_first_token(node: &SyntaxNode) -> &str { |
38 | node.green().children().next().and_then(|it| it.into_token()).unwrap().text() | 37 | node.green().children().next().and_then(|it| it.into_token()).unwrap().text() |
39 | } | 38 | } |
40 | 39 | ||
@@ -121,7 +120,7 @@ impl ast::Attr { | |||
121 | pub fn simple_name(&self) -> Option<SmolStr> { | 120 | pub fn simple_name(&self) -> Option<SmolStr> { |
122 | let path = self.path()?; | 121 | let path = self.path()?; |
123 | match (path.segment(), path.qualifier()) { | 122 | match (path.segment(), path.qualifier()) { |
124 | (Some(segment), None) => Some(segment.syntax().first_token()?.text().clone()), | 123 | (Some(segment), None) => Some(segment.syntax().first_token()?.text().into()), |
125 | _ => None, | 124 | _ => None, |
126 | } | 125 | } |
127 | } | 126 | } |
@@ -274,10 +273,7 @@ impl ast::Struct { | |||
274 | 273 | ||
275 | impl ast::RecordExprField { | 274 | impl ast::RecordExprField { |
276 | pub fn for_field_name(field_name: &ast::NameRef) -> Option<ast::RecordExprField> { | 275 | pub fn for_field_name(field_name: &ast::NameRef) -> Option<ast::RecordExprField> { |
277 | let candidate = | 276 | let candidate = Self::for_name_ref(field_name)?; |
278 | field_name.syntax().parent().and_then(ast::RecordExprField::cast).or_else(|| { | ||
279 | field_name.syntax().ancestors().nth(4).and_then(ast::RecordExprField::cast) | ||
280 | })?; | ||
281 | if candidate.field_name().as_ref() == Some(field_name) { | 277 | if candidate.field_name().as_ref() == Some(field_name) { |
282 | Some(candidate) | 278 | Some(candidate) |
283 | } else { | 279 | } else { |
@@ -285,6 +281,13 @@ impl ast::RecordExprField { | |||
285 | } | 281 | } |
286 | } | 282 | } |
287 | 283 | ||
284 | pub fn for_name_ref(name_ref: &ast::NameRef) -> Option<ast::RecordExprField> { | ||
285 | let syn = name_ref.syntax(); | ||
286 | syn.parent() | ||
287 | .and_then(ast::RecordExprField::cast) | ||
288 | .or_else(|| syn.ancestors().nth(4).and_then(ast::RecordExprField::cast)) | ||
289 | } | ||
290 | |||
288 | /// Deals with field init shorthand | 291 | /// Deals with field init shorthand |
289 | pub fn field_name(&self) -> Option<ast::NameRef> { | 292 | pub fn field_name(&self) -> Option<ast::NameRef> { |
290 | if let Some(name_ref) = self.name_ref() { | 293 | if let Some(name_ref) = self.name_ref() { |
@@ -294,6 +297,53 @@ impl ast::RecordExprField { | |||
294 | } | 297 | } |
295 | } | 298 | } |
296 | 299 | ||
300 | #[derive(Debug, Clone)] | ||
301 | pub enum NameLike { | ||
302 | NameRef(ast::NameRef), | ||
303 | Name(ast::Name), | ||
304 | Lifetime(ast::Lifetime), | ||
305 | } | ||
306 | |||
307 | impl NameLike { | ||
308 | pub fn as_name_ref(&self) -> Option<&ast::NameRef> { | ||
309 | match self { | ||
310 | NameLike::NameRef(name_ref) => Some(name_ref), | ||
311 | _ => None, | ||
312 | } | ||
313 | } | ||
314 | } | ||
315 | |||
316 | impl ast::AstNode for NameLike { | ||
317 | fn can_cast(kind: SyntaxKind) -> bool { | ||
318 | matches!(kind, SyntaxKind::NAME | SyntaxKind::NAME_REF | SyntaxKind::LIFETIME) | ||
319 | } | ||
320 | fn cast(syntax: SyntaxNode) -> Option<Self> { | ||
321 | let res = match syntax.kind() { | ||
322 | SyntaxKind::NAME => NameLike::Name(ast::Name { syntax }), | ||
323 | SyntaxKind::NAME_REF => NameLike::NameRef(ast::NameRef { syntax }), | ||
324 | SyntaxKind::LIFETIME => NameLike::Lifetime(ast::Lifetime { syntax }), | ||
325 | _ => return None, | ||
326 | }; | ||
327 | Some(res) | ||
328 | } | ||
329 | fn syntax(&self) -> &SyntaxNode { | ||
330 | match self { | ||
331 | NameLike::NameRef(it) => it.syntax(), | ||
332 | NameLike::Name(it) => it.syntax(), | ||
333 | NameLike::Lifetime(it) => it.syntax(), | ||
334 | } | ||
335 | } | ||
336 | } | ||
337 | |||
338 | mod __ { | ||
339 | use super::{ | ||
340 | ast::{Lifetime, Name, NameRef}, | ||
341 | NameLike, | ||
342 | }; | ||
343 | stdx::impl_from!(NameRef, Name, Lifetime for NameLike); | ||
344 | } | ||
345 | |||
346 | #[derive(Debug, Clone, PartialEq)] | ||
297 | pub enum NameOrNameRef { | 347 | pub enum NameOrNameRef { |
298 | Name(ast::Name), | 348 | Name(ast::Name), |
299 | NameRef(ast::NameRef), | 349 | NameRef(ast::NameRef), |
@@ -309,16 +359,42 @@ impl fmt::Display for NameOrNameRef { | |||
309 | } | 359 | } |
310 | 360 | ||
311 | impl ast::RecordPatField { | 361 | impl ast::RecordPatField { |
362 | pub fn for_field_name_ref(field_name: &ast::NameRef) -> Option<ast::RecordPatField> { | ||
363 | let candidate = field_name.syntax().parent().and_then(ast::RecordPatField::cast)?; | ||
364 | match candidate.field_name()? { | ||
365 | NameOrNameRef::NameRef(name_ref) if name_ref == *field_name => Some(candidate), | ||
366 | _ => None, | ||
367 | } | ||
368 | } | ||
369 | |||
370 | pub fn for_field_name(field_name: &ast::Name) -> Option<ast::RecordPatField> { | ||
371 | let candidate = | ||
372 | field_name.syntax().ancestors().nth(2).and_then(ast::RecordPatField::cast)?; | ||
373 | match candidate.field_name()? { | ||
374 | NameOrNameRef::Name(name) if name == *field_name => Some(candidate), | ||
375 | _ => None, | ||
376 | } | ||
377 | } | ||
378 | |||
312 | /// Deals with field init shorthand | 379 | /// Deals with field init shorthand |
313 | pub fn field_name(&self) -> Option<NameOrNameRef> { | 380 | pub fn field_name(&self) -> Option<NameOrNameRef> { |
314 | if let Some(name_ref) = self.name_ref() { | 381 | if let Some(name_ref) = self.name_ref() { |
315 | return Some(NameOrNameRef::NameRef(name_ref)); | 382 | return Some(NameOrNameRef::NameRef(name_ref)); |
316 | } | 383 | } |
317 | if let Some(ast::Pat::IdentPat(pat)) = self.pat() { | 384 | match self.pat() { |
318 | let name = pat.name()?; | 385 | Some(ast::Pat::IdentPat(pat)) => { |
319 | return Some(NameOrNameRef::Name(name)); | 386 | let name = pat.name()?; |
387 | Some(NameOrNameRef::Name(name)) | ||
388 | } | ||
389 | Some(ast::Pat::BoxPat(pat)) => match pat.pat() { | ||
390 | Some(ast::Pat::IdentPat(pat)) => { | ||
391 | let name = pat.name()?; | ||
392 | Some(NameOrNameRef::Name(name)) | ||
393 | } | ||
394 | _ => None, | ||
395 | }, | ||
396 | _ => None, | ||
320 | } | 397 | } |
321 | None | ||
322 | } | 398 | } |
323 | } | 399 | } |
324 | 400 | ||
diff --git a/crates/syntax/src/ast/token_ext.rs b/crates/syntax/src/ast/token_ext.rs index 5e9620a40..977eb8181 100644 --- a/crates/syntax/src/ast/token_ext.rs +++ b/crates/syntax/src/ast/token_ext.rs | |||
@@ -41,7 +41,7 @@ impl ast::Comment { | |||
41 | match kind { | 41 | match kind { |
42 | CommentKind { shape, doc: Some(_) } => { | 42 | CommentKind { shape, doc: Some(_) } => { |
43 | let prefix = kind.prefix(); | 43 | let prefix = kind.prefix(); |
44 | let text = &self.text().as_str()[prefix.len()..]; | 44 | let text = &self.text()[prefix.len()..]; |
45 | let ws = text.chars().next().filter(|c| c.is_whitespace()); | 45 | let ws = text.chars().next().filter(|c| c.is_whitespace()); |
46 | let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]); | 46 | let text = ws.map_or(text, |ws| &text[ws.len_utf8()..]); |
47 | match shape { | 47 | match shape { |
@@ -85,8 +85,9 @@ pub enum CommentPlacement { | |||
85 | } | 85 | } |
86 | 86 | ||
87 | impl CommentKind { | 87 | impl CommentKind { |
88 | const BY_PREFIX: [(&'static str, CommentKind); 8] = [ | 88 | const BY_PREFIX: [(&'static str, CommentKind); 9] = [ |
89 | ("/**/", CommentKind { shape: CommentShape::Block, doc: None }), | 89 | ("/**/", CommentKind { shape: CommentShape::Block, doc: None }), |
90 | ("/***", CommentKind { shape: CommentShape::Block, doc: None }), | ||
90 | ("////", CommentKind { shape: CommentShape::Line, doc: None }), | 91 | ("////", CommentKind { shape: CommentShape::Line, doc: None }), |
91 | ("///", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Outer) }), | 92 | ("///", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Outer) }), |
92 | ("//!", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Inner) }), | 93 | ("//!", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Inner) }), |
@@ -156,13 +157,13 @@ impl ast::String { | |||
156 | 157 | ||
157 | pub fn value(&self) -> Option<Cow<'_, str>> { | 158 | pub fn value(&self) -> Option<Cow<'_, str>> { |
158 | if self.is_raw() { | 159 | if self.is_raw() { |
159 | let text = self.text().as_str(); | 160 | let text = self.text(); |
160 | let text = | 161 | let text = |
161 | &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | 162 | &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; |
162 | return Some(Cow::Borrowed(text)); | 163 | return Some(Cow::Borrowed(text)); |
163 | } | 164 | } |
164 | 165 | ||
165 | let text = self.text().as_str(); | 166 | let text = self.text(); |
166 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | 167 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; |
167 | 168 | ||
168 | let mut buf = String::new(); | 169 | let mut buf = String::new(); |
@@ -173,7 +174,7 @@ impl ast::String { | |||
173 | buf.capacity() == 0, | 174 | buf.capacity() == 0, |
174 | ) { | 175 | ) { |
175 | (Ok(c), false) => buf.push(c), | 176 | (Ok(c), false) => buf.push(c), |
176 | (Ok(c), true) if Some(c) == text_iter.next() => (), | 177 | (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (), |
177 | (Ok(c), true) => { | 178 | (Ok(c), true) => { |
178 | buf.reserve_exact(text.len()); | 179 | buf.reserve_exact(text.len()); |
179 | buf.push_str(&text[..char_range.start]); | 180 | buf.push_str(&text[..char_range.start]); |
@@ -190,7 +191,7 @@ impl ast::String { | |||
190 | } | 191 | } |
191 | 192 | ||
192 | pub fn quote_offsets(&self) -> Option<QuoteOffsets> { | 193 | pub fn quote_offsets(&self) -> Option<QuoteOffsets> { |
193 | let text = self.text().as_str(); | 194 | let text = self.text(); |
194 | let offsets = QuoteOffsets::new(text)?; | 195 | let offsets = QuoteOffsets::new(text)?; |
195 | let o = self.syntax().text_range().start(); | 196 | let o = self.syntax().text_range().start(); |
196 | let offsets = QuoteOffsets { | 197 | let offsets = QuoteOffsets { |
@@ -560,7 +561,7 @@ impl HasFormatSpecifier for ast::String { | |||
560 | fn char_ranges( | 561 | fn char_ranges( |
561 | &self, | 562 | &self, |
562 | ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> { | 563 | ) -> Option<Vec<(TextRange, Result<char, rustc_lexer::unescape::EscapeError>)>> { |
563 | let text = self.text().as_str(); | 564 | let text = self.text(); |
564 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; | 565 | let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()]; |
565 | let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); | 566 | let offset = self.text_range_between_quotes()?.start() - self.syntax().text_range().start(); |
566 | 567 | ||
@@ -590,7 +591,7 @@ impl ast::IntNumber { | |||
590 | pub fn value(&self) -> Option<u128> { | 591 | pub fn value(&self) -> Option<u128> { |
591 | let token = self.syntax(); | 592 | let token = self.syntax(); |
592 | 593 | ||
593 | let mut text = token.text().as_str(); | 594 | let mut text = token.text(); |
594 | if let Some(suffix) = self.suffix() { | 595 | if let Some(suffix) = self.suffix() { |
595 | text = &text[..text.len() - suffix.len()] | 596 | text = &text[..text.len() - suffix.len()] |
596 | } | 597 | } |
@@ -659,7 +660,7 @@ impl Radix { | |||
659 | 660 | ||
660 | #[cfg(test)] | 661 | #[cfg(test)] |
661 | mod tests { | 662 | mod tests { |
662 | use crate::ast::{make, FloatNumber, IntNumber}; | 663 | use crate::ast::{self, make, FloatNumber, IntNumber}; |
663 | 664 | ||
664 | fn check_float_suffix<'a>(lit: &str, expected: impl Into<Option<&'a str>>) { | 665 | fn check_float_suffix<'a>(lit: &str, expected: impl Into<Option<&'a str>>) { |
665 | assert_eq!(FloatNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into()); | 666 | assert_eq!(FloatNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into()); |
@@ -692,4 +693,21 @@ mod tests { | |||
692 | check_int_suffix("0o11u32", "u32"); | 693 | check_int_suffix("0o11u32", "u32"); |
693 | check_int_suffix("0xffu32", "u32"); | 694 | check_int_suffix("0xffu32", "u32"); |
694 | } | 695 | } |
696 | |||
697 | fn check_string_value<'a>(lit: &str, expected: impl Into<Option<&'a str>>) { | ||
698 | assert_eq!( | ||
699 | ast::String { syntax: make::tokens::literal(&format!("\"{}\"", lit)) } | ||
700 | .value() | ||
701 | .as_deref(), | ||
702 | expected.into() | ||
703 | ); | ||
704 | } | ||
705 | |||
706 | #[test] | ||
707 | fn test_string_escape() { | ||
708 | check_string_value(r"foobar", "foobar"); | ||
709 | check_string_value(r"\foobar", None); | ||
710 | check_string_value(r"\nfoobar", "\nfoobar"); | ||
711 | check_string_value(r"C:\\Windows\\System32\\", "C:\\Windows\\System32\\"); | ||
712 | } | ||
695 | } | 713 | } |
diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index ea7482bb1..11294c5b2 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs | |||
@@ -56,9 +56,9 @@ pub use crate::{ | |||
56 | }; | 56 | }; |
57 | pub use parser::{SyntaxKind, T}; | 57 | pub use parser::{SyntaxKind, T}; |
58 | pub use rowan::{ | 58 | pub use rowan::{ |
59 | Direction, GreenNode, NodeOrToken, SmolStr, SyntaxText, TextRange, TextSize, TokenAtOffset, | 59 | Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize, TokenAtOffset, WalkEvent, |
60 | WalkEvent, | ||
61 | }; | 60 | }; |
61 | pub use smol_str::SmolStr; | ||
62 | 62 | ||
63 | /// `Parse` is the result of the parsing: a syntax tree and a collection of | 63 | /// `Parse` is the result of the parsing: a syntax tree and a collection of |
64 | /// errors. | 64 | /// errors. |
diff --git a/crates/syntax/src/parsing/reparsing.rs b/crates/syntax/src/parsing/reparsing.rs index 76f01084c..3d637bf91 100644 --- a/crates/syntax/src/parsing/reparsing.rs +++ b/crates/syntax/src/parsing/reparsing.rs | |||
@@ -73,8 +73,7 @@ fn reparse_token<'node>( | |||
73 | new_text.pop(); | 73 | new_text.pop(); |
74 | } | 74 | } |
75 | 75 | ||
76 | let new_token = | 76 | let new_token = GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), &new_text); |
77 | GreenToken::new(rowan::SyntaxKind(prev_token_kind.into()), new_text.into()); | ||
78 | Some(( | 77 | Some(( |
79 | prev_token.replace_with(new_token), | 78 | prev_token.replace_with(new_token), |
80 | new_err.into_iter().collect(), | 79 | new_err.into_iter().collect(), |
diff --git a/crates/syntax/src/parsing/text_tree_sink.rs b/crates/syntax/src/parsing/text_tree_sink.rs index ce27c3dd9..d5ddc076f 100644 --- a/crates/syntax/src/parsing/text_tree_sink.rs +++ b/crates/syntax/src/parsing/text_tree_sink.rs | |||
@@ -8,7 +8,7 @@ use crate::{ | |||
8 | ast, | 8 | ast, |
9 | parsing::Token, | 9 | parsing::Token, |
10 | syntax_node::GreenNode, | 10 | syntax_node::GreenNode, |
11 | SmolStr, SyntaxError, | 11 | SyntaxError, |
12 | SyntaxKind::{self, *}, | 12 | SyntaxKind::{self, *}, |
13 | SyntaxTreeBuilder, TextRange, TextSize, | 13 | SyntaxTreeBuilder, TextRange, TextSize, |
14 | }; | 14 | }; |
@@ -135,7 +135,7 @@ impl<'a> TextTreeSink<'a> { | |||
135 | 135 | ||
136 | fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) { | 136 | fn do_token(&mut self, kind: SyntaxKind, len: TextSize, n_tokens: usize) { |
137 | let range = TextRange::at(self.text_pos, len); | 137 | let range = TextRange::at(self.text_pos, len); |
138 | let text: SmolStr = self.text[range].into(); | 138 | let text = &self.text[range]; |
139 | self.text_pos += len; | 139 | self.text_pos += len; |
140 | self.token_pos += n_tokens; | 140 | self.token_pos += n_tokens; |
141 | self.inner.token(kind, text); | 141 | self.inner.token(kind, text); |
diff --git a/crates/syntax/src/syntax_node.rs b/crates/syntax/src/syntax_node.rs index cc30138fa..8f643b228 100644 --- a/crates/syntax/src/syntax_node.rs +++ b/crates/syntax/src/syntax_node.rs | |||
@@ -8,7 +8,7 @@ | |||
8 | 8 | ||
9 | use rowan::{GreenNodeBuilder, Language}; | 9 | use rowan::{GreenNodeBuilder, Language}; |
10 | 10 | ||
11 | use crate::{Parse, SmolStr, SyntaxError, SyntaxKind, TextSize}; | 11 | use crate::{Parse, SyntaxError, SyntaxKind, TextSize}; |
12 | 12 | ||
13 | pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken}; | 13 | pub(crate) use rowan::{GreenNode, GreenToken, NodeOrToken}; |
14 | 14 | ||
@@ -53,7 +53,7 @@ impl SyntaxTreeBuilder { | |||
53 | Parse::new(green, errors) | 53 | Parse::new(green, errors) |
54 | } | 54 | } |
55 | 55 | ||
56 | pub fn token(&mut self, kind: SyntaxKind, text: SmolStr) { | 56 | pub fn token(&mut self, kind: SyntaxKind, text: &str) { |
57 | let kind = RustLanguage::kind_to_raw(kind); | 57 | let kind = RustLanguage::kind_to_raw(kind); |
58 | self.inner.token(kind, text) | 58 | self.inner.token(kind, text) |
59 | } | 59 | } |
diff --git a/crates/syntax/src/tests.rs b/crates/syntax/src/tests.rs index 9d3433c9d..b2c06e24f 100644 --- a/crates/syntax/src/tests.rs +++ b/crates/syntax/src/tests.rs | |||
@@ -4,11 +4,12 @@ use std::{ | |||
4 | path::{Path, PathBuf}, | 4 | path::{Path, PathBuf}, |
5 | }; | 5 | }; |
6 | 6 | ||
7 | use ast::NameOwner; | ||
7 | use expect_test::expect_file; | 8 | use expect_test::expect_file; |
8 | use rayon::prelude::*; | 9 | use rayon::prelude::*; |
9 | use test_utils::project_dir; | 10 | use test_utils::{bench, bench_fixture, project_dir, skip_slow_tests}; |
10 | 11 | ||
11 | use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextSize, Token}; | 12 | use crate::{ast, fuzz, tokenize, AstNode, SourceFile, SyntaxError, TextRange, TextSize, Token}; |
12 | 13 | ||
13 | #[test] | 14 | #[test] |
14 | fn lexer_tests() { | 15 | fn lexer_tests() { |
@@ -42,6 +43,28 @@ fn main() { | |||
42 | } | 43 | } |
43 | 44 | ||
44 | #[test] | 45 | #[test] |
46 | fn benchmark_parser() { | ||
47 | if skip_slow_tests() { | ||
48 | return; | ||
49 | } | ||
50 | let data = bench_fixture::glorious_old_parser(); | ||
51 | let tree = { | ||
52 | let _b = bench("parsing"); | ||
53 | let p = SourceFile::parse(&data); | ||
54 | assert!(p.errors.is_empty()); | ||
55 | assert_eq!(p.tree().syntax.text_range().len(), 352474.into()); | ||
56 | p.tree() | ||
57 | }; | ||
58 | |||
59 | { | ||
60 | let _b = bench("tree traversal"); | ||
61 | let fn_names = | ||
62 | tree.syntax().descendants().filter_map(ast::Fn::cast).filter_map(|f| f.name()).count(); | ||
63 | assert_eq!(fn_names, 268); | ||
64 | } | ||
65 | } | ||
66 | |||
67 | #[test] | ||
45 | fn parser_tests() { | 68 | fn parser_tests() { |
46 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| { | 69 | dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], "rast", |text, path| { |
47 | let parse = SourceFile::parse(text); | 70 | let parse = SourceFile::parse(text); |
@@ -128,7 +151,6 @@ fn reparse_fuzz_tests() { | |||
128 | } | 151 | } |
129 | 152 | ||
130 | /// Test that Rust-analyzer can parse and validate the rust-analyzer | 153 | /// Test that Rust-analyzer can parse and validate the rust-analyzer |
131 | /// FIXME: Use this as a benchmark | ||
132 | #[test] | 154 | #[test] |
133 | fn self_hosting_parsing() { | 155 | fn self_hosting_parsing() { |
134 | let dir = project_dir().join("crates"); | 156 | let dir = project_dir().join("crates"); |
diff --git a/crates/syntax/src/validation.rs b/crates/syntax/src/validation.rs index 7901580ee..3e216fb70 100644 --- a/crates/syntax/src/validation.rs +++ b/crates/syntax/src/validation.rs | |||
@@ -1,4 +1,6 @@ | |||
1 | //! FIXME: write short doc here | 1 | //! This module implements syntax validation that the parser doesn't handle. |
2 | //! | ||
3 | //! A failed validation emits a diagnostic. | ||
2 | 4 | ||
3 | mod block; | 5 | mod block; |
4 | 6 | ||
@@ -92,6 +94,7 @@ pub(crate) fn validate(root: &SyntaxNode) -> Vec<SyntaxError> { | |||
92 | match_ast! { | 94 | match_ast! { |
93 | match node { | 95 | match node { |
94 | ast::Literal(it) => validate_literal(it, &mut errors), | 96 | ast::Literal(it) => validate_literal(it, &mut errors), |
97 | ast::Const(it) => validate_const(it, &mut errors), | ||
95 | ast::BlockExpr(it) => block::validate_block_expr(it, &mut errors), | 98 | ast::BlockExpr(it) => block::validate_block_expr(it, &mut errors), |
96 | ast::FieldExpr(it) => validate_numeric_name(it.name_ref(), &mut errors), | 99 | ast::FieldExpr(it) => validate_numeric_name(it.name_ref(), &mut errors), |
97 | ast::RecordExprField(it) => validate_numeric_name(it.name_ref(), &mut errors), | 100 | ast::RecordExprField(it) => validate_numeric_name(it.name_ref(), &mut errors), |
@@ -116,7 +119,7 @@ fn validate_literal(literal: ast::Literal, acc: &mut Vec<SyntaxError>) { | |||
116 | } | 119 | } |
117 | 120 | ||
118 | let token = literal.token(); | 121 | let token = literal.token(); |
119 | let text = token.text().as_str(); | 122 | let text = token.text(); |
120 | 123 | ||
121 | // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) | 124 | // FIXME: lift this lambda refactor to `fn` (https://github.com/rust-analyzer/rust-analyzer/pull/2834#discussion_r366199205) |
122 | let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { | 125 | let mut push_err = |prefix_len, (off, err): (usize, unescape::EscapeError)| { |
@@ -362,3 +365,14 @@ fn validate_macro_rules(mac: ast::MacroRules, errors: &mut Vec<SyntaxError>) { | |||
362 | )); | 365 | )); |
363 | } | 366 | } |
364 | } | 367 | } |
368 | |||
369 | fn validate_const(const_: ast::Const, errors: &mut Vec<SyntaxError>) { | ||
370 | if let Some(mut_token) = const_ | ||
371 | .const_token() | ||
372 | .and_then(|t| t.next_token()) | ||
373 | .and_then(|t| algo::skip_trivia_token(t, Direction::Next)) | ||
374 | .filter(|t| t.kind() == T![mut]) | ||
375 | { | ||
376 | errors.push(SyntaxError::new("const globals cannot be mutable", mut_token.text_range())); | ||
377 | } | ||
378 | } | ||