aboutsummaryrefslogtreecommitdiff
path: root/crates/ra_syntax/src
diff options
context:
space:
mode:
Diffstat (limited to 'crates/ra_syntax/src')
-rw-r--r--crates/ra_syntax/src/ast/make.rs16
-rw-r--r--crates/ra_syntax/src/parsing/lexer.rs4
-rw-r--r--crates/ra_syntax/src/tests.rs11
3 files changed, 23 insertions, 8 deletions
diff --git a/crates/ra_syntax/src/ast/make.rs b/crates/ra_syntax/src/ast/make.rs
index ae8829807..9f6f1cc53 100644
--- a/crates/ra_syntax/src/ast/make.rs
+++ b/crates/ra_syntax/src/ast/make.rs
@@ -112,10 +112,14 @@ pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr {
112 let token = token(op); 112 let token = token(op);
113 expr_from_text(&format!("{}{}", token, expr.syntax())) 113 expr_from_text(&format!("{}{}", token, expr.syntax()))
114} 114}
115pub fn expr_from_text(text: &str) -> ast::Expr { 115fn expr_from_text(text: &str) -> ast::Expr {
116 ast_from_text(&format!("const C: () = {};", text)) 116 ast_from_text(&format!("const C: () = {};", text))
117} 117}
118 118
119pub fn try_expr_from_text(text: &str) -> Option<ast::Expr> {
120 try_ast_from_text(&format!("const C: () = {};", text))
121}
122
119pub fn bind_pat(name: ast::Name) -> ast::BindPat { 123pub fn bind_pat(name: ast::Name) -> ast::BindPat {
120 return from_text(name.text()); 124 return from_text(name.text());
121 125
@@ -239,6 +243,16 @@ fn ast_from_text<N: AstNode>(text: &str) -> N {
239 node 243 node
240} 244}
241 245
246fn try_ast_from_text<N: AstNode>(text: &str) -> Option<N> {
247 let parse = SourceFile::parse(text);
248 let node = parse.tree().syntax().descendants().find_map(N::cast)?;
249 let node = node.syntax().clone();
250 let node = unroot(node);
251 let node = N::cast(node).unwrap();
252 assert_eq!(node.syntax().text_range().start(), 0.into());
253 Some(node)
254}
255
242fn unroot(n: SyntaxNode) -> SyntaxNode { 256fn unroot(n: SyntaxNode) -> SyntaxNode {
243 SyntaxNode::new_root(n.green().clone()) 257 SyntaxNode::new_root(n.green().clone())
244} 258}
diff --git a/crates/ra_syntax/src/parsing/lexer.rs b/crates/ra_syntax/src/parsing/lexer.rs
index f2684c852..d1baaa607 100644
--- a/crates/ra_syntax/src/parsing/lexer.rs
+++ b/crates/ra_syntax/src/parsing/lexer.rs
@@ -65,7 +65,7 @@ pub fn tokenize(text: &str) -> (Vec<Token>, Vec<SyntaxError>) {
65/// Beware that unescape errors are not checked at tokenization time. 65/// Beware that unescape errors are not checked at tokenization time.
66pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxError>)> { 66pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxError>)> {
67 lex_first_token(text) 67 lex_first_token(text)
68 .filter(|(token, _)| token.len.to_usize() == text.len()) 68 .filter(|(token, _)| token.len == TextUnit::of_str(text))
69 .map(|(token, error)| (token.kind, error)) 69 .map(|(token, error)| (token.kind, error))
70} 70}
71 71
@@ -75,7 +75,7 @@ pub fn lex_single_syntax_kind(text: &str) -> Option<(SyntaxKind, Option<SyntaxEr
75/// Beware that unescape errors are not checked at tokenization time. 75/// Beware that unescape errors are not checked at tokenization time.
76pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> { 76pub fn lex_single_valid_syntax_kind(text: &str) -> Option<SyntaxKind> {
77 lex_first_token(text) 77 lex_first_token(text)
78 .filter(|(token, error)| !error.is_some() && token.len.to_usize() == text.len()) 78 .filter(|(token, error)| !error.is_some() && token.len == TextUnit::of_str(text))
79 .map(|(token, _error)| token.kind) 79 .map(|(token, _error)| token.kind)
80} 80}
81 81
diff --git a/crates/ra_syntax/src/tests.rs b/crates/ra_syntax/src/tests.rs
index 912e6aec0..6a8cb6bb5 100644
--- a/crates/ra_syntax/src/tests.rs
+++ b/crates/ra_syntax/src/tests.rs
@@ -5,7 +5,7 @@ use std::{
5 5
6use test_utils::{collect_tests, dir_tests, project_dir, read_text}; 6use test_utils::{collect_tests, dir_tests, project_dir, read_text};
7 7
8use crate::{fuzz, tokenize, SourceFile, SyntaxError, Token}; 8use crate::{fuzz, tokenize, SourceFile, SyntaxError, TextRange, TextUnit, Token};
9 9
10#[test] 10#[test]
11fn lexer_tests() { 11fn lexer_tests() {
@@ -34,6 +34,7 @@ fn main() {
34 "##; 34 "##;
35 35
36 let parse = SourceFile::parse(code); 36 let parse = SourceFile::parse(code);
37 // eprintln!("{:#?}", parse.syntax_node());
37 assert!(parse.ok().is_ok()); 38 assert!(parse.ok().is_ok());
38} 39}
39 40
@@ -120,11 +121,11 @@ fn assert_errors_are_absent(errors: &[SyntaxError], path: &Path) {
120 121
121fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String { 122fn dump_tokens_and_errors(tokens: &[Token], errors: &[SyntaxError], text: &str) -> String {
122 let mut acc = String::new(); 123 let mut acc = String::new();
123 let mut offset = 0; 124 let mut offset = TextUnit::from_usize(0);
124 for token in tokens { 125 for token in tokens {
125 let token_len = token.len.to_usize(); 126 let token_len = token.len;
126 let token_text = &text[offset..offset + token_len]; 127 let token_text = &text[TextRange::offset_len(offset, token.len)];
127 offset += token_len; 128 offset += token.len;
128 writeln!(acc, "{:?} {} {:?}", token.kind, token_len, token_text).unwrap(); 129 writeln!(acc, "{:?} {} {:?}", token.kind, token_len, token_text).unwrap();
129 } 130 }
130 for err in errors { 131 for err in errors {