aboutsummaryrefslogtreecommitdiff
path: root/xtask
diff options
context:
space:
mode:
authorAleksey Kladov <[email protected]>2019-10-23 16:13:40 +0100
committerAleksey Kladov <[email protected]>2019-10-23 16:57:18 +0100
commitb5f13d8d51ef9107363a60b894a741ab596921ce (patch)
tree32fa43c640821d11d283e6f13ed41057d18dd27a /xtask
parentedf4d8e555c6847fb9e6e61d727c4def11789bfc (diff)
xtask: move codegen to a module
Diffstat (limited to 'xtask')
-rw-r--r--xtask/src/bin/pre-commit.rs4
-rw-r--r--xtask/src/boilerplate_gen.rs348
-rw-r--r--xtask/src/codegen.rs46
-rw-r--r--xtask/src/codegen/gen_parser_tests.rs150
-rw-r--r--xtask/src/codegen/gen_syntax.rs354
-rw-r--r--xtask/src/lib.rs175
-rw-r--r--xtask/src/main.rs10
-rw-r--r--xtask/tests/tidy-tests/cli.rs11
8 files changed, 567 insertions, 531 deletions
diff --git a/xtask/src/bin/pre-commit.rs b/xtask/src/bin/pre-commit.rs
index 4ee864756..cc6ccb25e 100644
--- a/xtask/src/bin/pre-commit.rs
+++ b/xtask/src/bin/pre-commit.rs
@@ -2,10 +2,10 @@
2 2
3use std::process::Command; 3use std::process::Command;
4 4
5use xtask::{project_root, run, run_rustfmt, Overwrite, Result}; 5use xtask::{codegen::Mode, project_root, run, run_rustfmt, Result};
6 6
7fn main() -> Result<()> { 7fn main() -> Result<()> {
8 run_rustfmt(Overwrite)?; 8 run_rustfmt(Mode::Overwrite)?;
9 update_staged() 9 update_staged()
10} 10}
11 11
diff --git a/xtask/src/boilerplate_gen.rs b/xtask/src/boilerplate_gen.rs
index 39f1cae66..e69de29bb 100644
--- a/xtask/src/boilerplate_gen.rs
+++ b/xtask/src/boilerplate_gen.rs
@@ -1,348 +0,0 @@
1//! FIXME: write short doc here
2
3use std::{
4 collections::BTreeMap,
5 fs,
6 io::Write,
7 process::{Command, Stdio},
8};
9
10use proc_macro2::{Punct, Spacing};
11use quote::{format_ident, quote};
12use ron;
13use serde::Deserialize;
14
15use crate::{project_root, update, Mode, Result, AST, GRAMMAR, SYNTAX_KINDS};
16
17pub fn generate_boilerplate(mode: Mode) -> Result<()> {
18 let grammar = project_root().join(GRAMMAR);
19 let grammar: Grammar = {
20 let text = fs::read_to_string(grammar)?;
21 ron::de::from_str(&text)?
22 };
23
24 let syntax_kinds_file = project_root().join(SYNTAX_KINDS);
25 let syntax_kinds = generate_syntax_kinds(&grammar)?;
26 update(syntax_kinds_file.as_path(), &syntax_kinds, mode)?;
27
28 let ast_file = project_root().join(AST);
29 let ast = generate_ast(&grammar)?;
30 update(ast_file.as_path(), &ast, mode)?;
31
32 Ok(())
33}
34
35fn generate_ast(grammar: &Grammar) -> Result<String> {
36 let nodes = grammar.ast.iter().map(|(name, ast_node)| {
37 let variants =
38 ast_node.variants.iter().map(|var| format_ident!("{}", var)).collect::<Vec<_>>();
39 let name = format_ident!("{}", name);
40
41 let adt = if variants.is_empty() {
42 let kind = format_ident!("{}", to_upper_snake_case(&name.to_string()));
43 quote! {
44 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
45 pub struct #name {
46 pub(crate) syntax: SyntaxNode,
47 }
48
49 impl AstNode for #name {
50 fn can_cast(kind: SyntaxKind) -> bool {
51 match kind {
52 #kind => true,
53 _ => false,
54 }
55 }
56 fn cast(syntax: SyntaxNode) -> Option<Self> {
57 if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
58 }
59 fn syntax(&self) -> &SyntaxNode { &self.syntax }
60 }
61 }
62 } else {
63 let kinds = variants
64 .iter()
65 .map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string())))
66 .collect::<Vec<_>>();
67
68 quote! {
69 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
70 pub enum #name {
71 #(#variants(#variants),)*
72 }
73
74 #(
75 impl From<#variants> for #name {
76 fn from(node: #variants) -> #name {
77 #name::#variants(node)
78 }
79 }
80 )*
81
82 impl AstNode for #name {
83 fn can_cast(kind: SyntaxKind) -> bool {
84 match kind {
85 #(#kinds)|* => true,
86 _ => false,
87 }
88 }
89 fn cast(syntax: SyntaxNode) -> Option<Self> {
90 let res = match syntax.kind() {
91 #(
92 #kinds => #name::#variants(#variants { syntax }),
93 )*
94 _ => return None,
95 };
96 Some(res)
97 }
98 fn syntax(&self) -> &SyntaxNode {
99 match self {
100 #(
101 #name::#variants(it) => &it.syntax,
102 )*
103 }
104 }
105 }
106 }
107 };
108
109 let traits = ast_node.traits.iter().map(|trait_name| {
110 let trait_name = format_ident!("{}", trait_name);
111 quote!(impl ast::#trait_name for #name {})
112 });
113
114 let collections = ast_node.collections.iter().map(|(name, kind)| {
115 let method_name = format_ident!("{}", name);
116 let kind = format_ident!("{}", kind);
117 quote! {
118 pub fn #method_name(&self) -> AstChildren<#kind> {
119 AstChildren::new(&self.syntax)
120 }
121 }
122 });
123
124 let options = ast_node.options.iter().map(|attr| {
125 let method_name = match attr {
126 Attr::Type(t) => format_ident!("{}", to_lower_snake_case(&t)),
127 Attr::NameType(n, _) => format_ident!("{}", n),
128 };
129 let ty = match attr {
130 Attr::Type(t) | Attr::NameType(_, t) => format_ident!("{}", t),
131 };
132 quote! {
133 pub fn #method_name(&self) -> Option<#ty> {
134 AstChildren::new(&self.syntax).next()
135 }
136 }
137 });
138
139 quote! {
140 #adt
141
142 #(#traits)*
143
144 impl #name {
145 #(#collections)*
146 #(#options)*
147 }
148 }
149 });
150
151 let ast = quote! {
152 use crate::{
153 SyntaxNode, SyntaxKind::{self, *},
154 ast::{self, AstNode, AstChildren},
155 };
156
157 #(#nodes)*
158 };
159
160 let pretty = reformat(ast)?;
161 Ok(pretty)
162}
163
164fn generate_syntax_kinds(grammar: &Grammar) -> Result<String> {
165 let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
166 .punct
167 .iter()
168 .filter(|(token, _name)| token.len() == 1)
169 .map(|(token, name)| (token.chars().next().unwrap(), format_ident!("{}", name)))
170 .unzip();
171
172 let punctuation_values = grammar.punct.iter().map(|(token, _name)| {
173 if "{}[]()".contains(token) {
174 let c = token.chars().next().unwrap();
175 quote! { #c }
176 } else {
177 let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
178 quote! { #(#cs)* }
179 }
180 });
181 let punctuation =
182 grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>();
183
184 let full_keywords_values = &grammar.keywords;
185 let full_keywords =
186 full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(&kw)));
187
188 let all_keywords_values =
189 grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
190 let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
191 let all_keywords = all_keywords_values
192 .iter()
193 .map(|name| format_ident!("{}_KW", to_upper_snake_case(&name)))
194 .collect::<Vec<_>>();
195
196 let literals =
197 grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
198
199 let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
200
201 let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
202
203 let ast = quote! {
204 #![allow(bad_style, missing_docs, unreachable_pub)]
205 /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`.
206 #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
207 #[repr(u16)]
208 pub enum SyntaxKind {
209 // Technical SyntaxKinds: they appear temporally during parsing,
210 // but never end up in the final tree
211 #[doc(hidden)]
212 TOMBSTONE,
213 #[doc(hidden)]
214 EOF,
215 #(#punctuation,)*
216 #(#all_keywords,)*
217 #(#literals,)*
218 #(#tokens,)*
219 #(#nodes,)*
220
221 // Technical kind so that we can cast from u16 safely
222 #[doc(hidden)]
223 __LAST,
224 }
225 use self::SyntaxKind::*;
226
227 impl SyntaxKind {
228 pub fn is_keyword(self) -> bool {
229 match self {
230 #(#all_keywords)|* => true,
231 _ => false,
232 }
233 }
234
235 pub fn is_punct(self) -> bool {
236 match self {
237 #(#punctuation)|* => true,
238 _ => false,
239 }
240 }
241
242 pub fn is_literal(self) -> bool {
243 match self {
244 #(#literals)|* => true,
245 _ => false,
246 }
247 }
248
249 pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
250 let kw = match ident {
251 #(#full_keywords_values => #full_keywords,)*
252 _ => return None,
253 };
254 Some(kw)
255 }
256
257 pub fn from_char(c: char) -> Option<SyntaxKind> {
258 let tok = match c {
259 #(#single_byte_tokens_values => #single_byte_tokens,)*
260 _ => return None,
261 };
262 Some(tok)
263 }
264 }
265
266 #[macro_export]
267 macro_rules! T {
268 #((#punctuation_values) => { $crate::SyntaxKind::#punctuation };)*
269 #((#all_keywords_idents) => { $crate::SyntaxKind::#all_keywords };)*
270 }
271 };
272
273 reformat(ast)
274}
275
276fn reformat(text: impl std::fmt::Display) -> Result<String> {
277 let mut rustfmt = Command::new("rustfmt")
278 .arg("--config-path")
279 .arg(project_root().join("rustfmt.toml"))
280 .stdin(Stdio::piped())
281 .stdout(Stdio::piped())
282 .spawn()?;
283 write!(rustfmt.stdin.take().unwrap(), "{}", text)?;
284 let output = rustfmt.wait_with_output()?;
285 let stdout = String::from_utf8(output.stdout)?;
286 let preamble = "Generated file, do not edit by hand, see `crate/ra_tools/src/codegen`";
287 Ok(format!("//! {}\n\n{}", preamble, stdout))
288}
289
290#[derive(Deserialize, Debug)]
291struct Grammar {
292 punct: Vec<(String, String)>,
293 keywords: Vec<String>,
294 contextual_keywords: Vec<String>,
295 literals: Vec<String>,
296 tokens: Vec<String>,
297 nodes: Vec<String>,
298 ast: BTreeMap<String, AstNode>,
299}
300
301#[derive(Deserialize, Debug)]
302struct AstNode {
303 #[serde(default)]
304 #[serde(rename = "enum")]
305 variants: Vec<String>,
306
307 #[serde(default)]
308 traits: Vec<String>,
309 #[serde(default)]
310 collections: Vec<(String, String)>,
311 #[serde(default)]
312 options: Vec<Attr>,
313}
314
315#[derive(Deserialize, Debug)]
316#[serde(untagged)]
317enum Attr {
318 Type(String),
319 NameType(String, String),
320}
321
322fn to_upper_snake_case(s: &str) -> String {
323 let mut buf = String::with_capacity(s.len());
324 let mut prev_is_upper = None;
325 for c in s.chars() {
326 if c.is_ascii_uppercase() && prev_is_upper == Some(false) {
327 buf.push('_')
328 }
329 prev_is_upper = Some(c.is_ascii_uppercase());
330
331 buf.push(c.to_ascii_uppercase());
332 }
333 buf
334}
335
336fn to_lower_snake_case(s: &str) -> String {
337 let mut buf = String::with_capacity(s.len());
338 let mut prev_is_upper = None;
339 for c in s.chars() {
340 if c.is_ascii_uppercase() && prev_is_upper == Some(false) {
341 buf.push('_')
342 }
343 prev_is_upper = Some(c.is_ascii_uppercase());
344
345 buf.push(c.to_ascii_lowercase());
346 }
347 buf
348}
diff --git a/xtask/src/codegen.rs b/xtask/src/codegen.rs
new file mode 100644
index 000000000..948b86719
--- /dev/null
+++ b/xtask/src/codegen.rs
@@ -0,0 +1,46 @@
1//! We use code generation heavily in rust-analyzer.
2//!
3//! Rather then doing it via proc-macros, we use old-school way of just dumping
4//! the source code.
5//!
6//! This module's submodules define specific bits that we generate.
7
8mod gen_syntax;
9mod gen_parser_tests;
10
11use std::{fs, path::Path};
12
13use crate::Result;
14
15pub use self::{gen_parser_tests::generate_parser_tests, gen_syntax::generate_syntax};
16
17pub const GRAMMAR: &str = "crates/ra_syntax/src/grammar.ron";
18const GRAMMAR_DIR: &str = "crates/ra_parser/src/grammar";
19const OK_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/ok";
20const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/err";
21
22pub const SYNTAX_KINDS: &str = "crates/ra_parser/src/syntax_kind/generated.rs";
23pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs";
24
25#[derive(Debug, PartialEq, Eq, Clone, Copy)]
26pub enum Mode {
27 Overwrite,
28 Verify,
29}
30
31/// A helper to update file on disk if it has changed.
32/// With verify = false,
33pub fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> {
34 match fs::read_to_string(path) {
35 Ok(ref old_contents) if old_contents == contents => {
36 return Ok(());
37 }
38 _ => (),
39 }
40 if mode == Mode::Verify {
41 Err(format!("`{}` is not up-to-date", path.display()))?;
42 }
43 eprintln!("updating {}", path.display());
44 fs::write(path, contents)?;
45 Ok(())
46}
diff --git a/xtask/src/codegen/gen_parser_tests.rs b/xtask/src/codegen/gen_parser_tests.rs
new file mode 100644
index 000000000..e09b6fcfe
--- /dev/null
+++ b/xtask/src/codegen/gen_parser_tests.rs
@@ -0,0 +1,150 @@
1//! This module greps parser's code for specially formatted comments and turnes
2//! them into tests.
3
4use std::{
5 collections::HashMap,
6 fs,
7 path::{Path, PathBuf},
8};
9
10use itertools::Itertools;
11
12use crate::{
13 codegen::{self, update, Mode},
14 project_root, Result,
15};
16
17pub fn generate_parser_tests(mode: Mode) -> Result<()> {
18 let tests = tests_from_dir(&project_root().join(Path::new(codegen::GRAMMAR_DIR)))?;
19 fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<()> {
20 let tests_dir = project_root().join(into);
21 if !tests_dir.is_dir() {
22 fs::create_dir_all(&tests_dir)?;
23 }
24 // ok is never actually read, but it needs to be specified to create a Test in existing_tests
25 let existing = existing_tests(&tests_dir, true)?;
26 for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
27 panic!("Test is deleted: {}", t);
28 }
29
30 let mut new_idx = existing.len() + 1;
31 for (name, test) in tests {
32 let path = match existing.get(name) {
33 Some((path, _test)) => path.clone(),
34 None => {
35 let file_name = format!("{:04}_{}.rs", new_idx, name);
36 new_idx += 1;
37 tests_dir.join(file_name)
38 }
39 };
40 update(&path, &test.text, mode)?;
41 }
42 Ok(())
43 }
44 install_tests(&tests.ok, codegen::OK_INLINE_TESTS_DIR, mode)?;
45 install_tests(&tests.err, codegen::ERR_INLINE_TESTS_DIR, mode)
46}
47
48#[derive(Debug)]
49struct Test {
50 pub name: String,
51 pub text: String,
52 pub ok: bool,
53}
54
55#[derive(Default, Debug)]
56struct Tests {
57 pub ok: HashMap<String, Test>,
58 pub err: HashMap<String, Test>,
59}
60
61fn collect_tests(s: &str) -> Vec<(usize, Test)> {
62 let mut res = vec![];
63 let prefix = "// ";
64 let comment_blocks = s
65 .lines()
66 .map(str::trim_start)
67 .enumerate()
68 .group_by(|(_idx, line)| line.starts_with(prefix));
69
70 'outer: for (is_comment, block) in comment_blocks.into_iter() {
71 if !is_comment {
72 continue;
73 }
74 let mut block = block.map(|(idx, line)| (idx, &line[prefix.len()..]));
75
76 let mut ok = true;
77 let (start_line, name) = loop {
78 match block.next() {
79 Some((idx, line)) if line.starts_with("test ") => {
80 break (idx, line["test ".len()..].to_string());
81 }
82 Some((idx, line)) if line.starts_with("test_err ") => {
83 ok = false;
84 break (idx, line["test_err ".len()..].to_string());
85 }
86 Some(_) => (),
87 None => continue 'outer,
88 }
89 };
90 let text: String =
91 itertools::join(block.map(|(_, line)| line).chain(::std::iter::once("")), "\n");
92 assert!(!text.trim().is_empty() && text.ends_with('\n'));
93 res.push((start_line, Test { name, text, ok }))
94 }
95 res
96}
97
98fn tests_from_dir(dir: &Path) -> Result<Tests> {
99 let mut res = Tests::default();
100 for entry in ::walkdir::WalkDir::new(dir) {
101 let entry = entry.unwrap();
102 if !entry.file_type().is_file() {
103 continue;
104 }
105 if entry.path().extension().unwrap_or_default() != "rs" {
106 continue;
107 }
108 process_file(&mut res, entry.path())?;
109 }
110 let grammar_rs = dir.parent().unwrap().join("grammar.rs");
111 process_file(&mut res, &grammar_rs)?;
112 return Ok(res);
113 fn process_file(res: &mut Tests, path: &Path) -> Result<()> {
114 let text = fs::read_to_string(path)?;
115
116 for (_, test) in collect_tests(&text) {
117 if test.ok {
118 if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
119 Err(format!("Duplicate test: {}", old_test.name))?
120 }
121 } else {
122 if let Some(old_test) = res.err.insert(test.name.clone(), test) {
123 Err(format!("Duplicate test: {}", old_test.name))?
124 }
125 }
126 }
127 Ok(())
128 }
129}
130
131fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test)>> {
132 let mut res = HashMap::new();
133 for file in fs::read_dir(dir)? {
134 let file = file?;
135 let path = file.path();
136 if path.extension().unwrap_or_default() != "rs" {
137 continue;
138 }
139 let name = {
140 let file_name = path.file_name().unwrap().to_str().unwrap();
141 file_name[5..file_name.len() - 3].to_string()
142 };
143 let text = fs::read_to_string(&path)?;
144 let test = Test { name: name.clone(), text, ok };
145 if let Some(old) = res.insert(name, (path, test)) {
146 println!("Duplicate test: {:?}", old);
147 }
148 }
149 Ok(res)
150}
diff --git a/xtask/src/codegen/gen_syntax.rs b/xtask/src/codegen/gen_syntax.rs
new file mode 100644
index 000000000..6a81c0e4d
--- /dev/null
+++ b/xtask/src/codegen/gen_syntax.rs
@@ -0,0 +1,354 @@
1//! This module generate AST datatype used by rust-analyzer.
2//!
3//! Specifically, it generates the `SyntaxKind` enum and a number of newtype
4//! wrappers around `SyntaxNode` which implement `ra_syntax::AstNode`.
5
6use std::{
7 collections::BTreeMap,
8 fs,
9 io::Write,
10 process::{Command, Stdio},
11};
12
13use proc_macro2::{Punct, Spacing};
14use quote::{format_ident, quote};
15use ron;
16use serde::Deserialize;
17
18use crate::{
19 codegen::{self, update, Mode},
20 project_root, Result,
21};
22
23pub fn generate_syntax(mode: Mode) -> Result<()> {
24 let grammar = project_root().join(codegen::GRAMMAR);
25 let grammar: Grammar = {
26 let text = fs::read_to_string(grammar)?;
27 ron::de::from_str(&text)?
28 };
29
30 let syntax_kinds_file = project_root().join(codegen::SYNTAX_KINDS);
31 let syntax_kinds = generate_syntax_kinds(&grammar)?;
32 update(syntax_kinds_file.as_path(), &syntax_kinds, mode)?;
33
34 let ast_file = project_root().join(codegen::AST);
35 let ast = generate_ast(&grammar)?;
36 update(ast_file.as_path(), &ast, mode)?;
37
38 Ok(())
39}
40
41fn generate_ast(grammar: &Grammar) -> Result<String> {
42 let nodes = grammar.ast.iter().map(|(name, ast_node)| {
43 let variants =
44 ast_node.variants.iter().map(|var| format_ident!("{}", var)).collect::<Vec<_>>();
45 let name = format_ident!("{}", name);
46
47 let adt = if variants.is_empty() {
48 let kind = format_ident!("{}", to_upper_snake_case(&name.to_string()));
49 quote! {
50 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
51 pub struct #name {
52 pub(crate) syntax: SyntaxNode,
53 }
54
55 impl AstNode for #name {
56 fn can_cast(kind: SyntaxKind) -> bool {
57 match kind {
58 #kind => true,
59 _ => false,
60 }
61 }
62 fn cast(syntax: SyntaxNode) -> Option<Self> {
63 if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
64 }
65 fn syntax(&self) -> &SyntaxNode { &self.syntax }
66 }
67 }
68 } else {
69 let kinds = variants
70 .iter()
71 .map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string())))
72 .collect::<Vec<_>>();
73
74 quote! {
75 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
76 pub enum #name {
77 #(#variants(#variants),)*
78 }
79
80 #(
81 impl From<#variants> for #name {
82 fn from(node: #variants) -> #name {
83 #name::#variants(node)
84 }
85 }
86 )*
87
88 impl AstNode for #name {
89 fn can_cast(kind: SyntaxKind) -> bool {
90 match kind {
91 #(#kinds)|* => true,
92 _ => false,
93 }
94 }
95 fn cast(syntax: SyntaxNode) -> Option<Self> {
96 let res = match syntax.kind() {
97 #(
98 #kinds => #name::#variants(#variants { syntax }),
99 )*
100 _ => return None,
101 };
102 Some(res)
103 }
104 fn syntax(&self) -> &SyntaxNode {
105 match self {
106 #(
107 #name::#variants(it) => &it.syntax,
108 )*
109 }
110 }
111 }
112 }
113 };
114
115 let traits = ast_node.traits.iter().map(|trait_name| {
116 let trait_name = format_ident!("{}", trait_name);
117 quote!(impl ast::#trait_name for #name {})
118 });
119
120 let collections = ast_node.collections.iter().map(|(name, kind)| {
121 let method_name = format_ident!("{}", name);
122 let kind = format_ident!("{}", kind);
123 quote! {
124 pub fn #method_name(&self) -> AstChildren<#kind> {
125 AstChildren::new(&self.syntax)
126 }
127 }
128 });
129
130 let options = ast_node.options.iter().map(|attr| {
131 let method_name = match attr {
132 Attr::Type(t) => format_ident!("{}", to_lower_snake_case(&t)),
133 Attr::NameType(n, _) => format_ident!("{}", n),
134 };
135 let ty = match attr {
136 Attr::Type(t) | Attr::NameType(_, t) => format_ident!("{}", t),
137 };
138 quote! {
139 pub fn #method_name(&self) -> Option<#ty> {
140 AstChildren::new(&self.syntax).next()
141 }
142 }
143 });
144
145 quote! {
146 #adt
147
148 #(#traits)*
149
150 impl #name {
151 #(#collections)*
152 #(#options)*
153 }
154 }
155 });
156
157 let ast = quote! {
158 use crate::{
159 SyntaxNode, SyntaxKind::{self, *},
160 ast::{self, AstNode, AstChildren},
161 };
162
163 #(#nodes)*
164 };
165
166 let pretty = reformat(ast)?;
167 Ok(pretty)
168}
169
170fn generate_syntax_kinds(grammar: &Grammar) -> Result<String> {
171 let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
172 .punct
173 .iter()
174 .filter(|(token, _name)| token.len() == 1)
175 .map(|(token, name)| (token.chars().next().unwrap(), format_ident!("{}", name)))
176 .unzip();
177
178 let punctuation_values = grammar.punct.iter().map(|(token, _name)| {
179 if "{}[]()".contains(token) {
180 let c = token.chars().next().unwrap();
181 quote! { #c }
182 } else {
183 let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
184 quote! { #(#cs)* }
185 }
186 });
187 let punctuation =
188 grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>();
189
190 let full_keywords_values = &grammar.keywords;
191 let full_keywords =
192 full_keywords_values.iter().map(|kw| format_ident!("{}_KW", to_upper_snake_case(&kw)));
193
194 let all_keywords_values =
195 grammar.keywords.iter().chain(grammar.contextual_keywords.iter()).collect::<Vec<_>>();
196 let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
197 let all_keywords = all_keywords_values
198 .iter()
199 .map(|name| format_ident!("{}_KW", to_upper_snake_case(&name)))
200 .collect::<Vec<_>>();
201
202 let literals =
203 grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
204
205 let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
206
207 let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
208
209 let ast = quote! {
210 #![allow(bad_style, missing_docs, unreachable_pub)]
211 /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT_DEF`.
212 #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
213 #[repr(u16)]
214 pub enum SyntaxKind {
215 // Technical SyntaxKinds: they appear temporally during parsing,
216 // but never end up in the final tree
217 #[doc(hidden)]
218 TOMBSTONE,
219 #[doc(hidden)]
220 EOF,
221 #(#punctuation,)*
222 #(#all_keywords,)*
223 #(#literals,)*
224 #(#tokens,)*
225 #(#nodes,)*
226
227 // Technical kind so that we can cast from u16 safely
228 #[doc(hidden)]
229 __LAST,
230 }
231 use self::SyntaxKind::*;
232
233 impl SyntaxKind {
234 pub fn is_keyword(self) -> bool {
235 match self {
236 #(#all_keywords)|* => true,
237 _ => false,
238 }
239 }
240
241 pub fn is_punct(self) -> bool {
242 match self {
243 #(#punctuation)|* => true,
244 _ => false,
245 }
246 }
247
248 pub fn is_literal(self) -> bool {
249 match self {
250 #(#literals)|* => true,
251 _ => false,
252 }
253 }
254
255 pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
256 let kw = match ident {
257 #(#full_keywords_values => #full_keywords,)*
258 _ => return None,
259 };
260 Some(kw)
261 }
262
263 pub fn from_char(c: char) -> Option<SyntaxKind> {
264 let tok = match c {
265 #(#single_byte_tokens_values => #single_byte_tokens,)*
266 _ => return None,
267 };
268 Some(tok)
269 }
270 }
271
272 #[macro_export]
273 macro_rules! T {
274 #((#punctuation_values) => { $crate::SyntaxKind::#punctuation };)*
275 #((#all_keywords_idents) => { $crate::SyntaxKind::#all_keywords };)*
276 }
277 };
278
279 reformat(ast)
280}
281
282fn reformat(text: impl std::fmt::Display) -> Result<String> {
283 let mut rustfmt = Command::new("rustfmt")
284 .arg("--config-path")
285 .arg(project_root().join("rustfmt.toml"))
286 .stdin(Stdio::piped())
287 .stdout(Stdio::piped())
288 .spawn()?;
289 write!(rustfmt.stdin.take().unwrap(), "{}", text)?;
290 let output = rustfmt.wait_with_output()?;
291 let stdout = String::from_utf8(output.stdout)?;
292 let preamble = "Generated file, do not edit by hand, see `crate/ra_tools/src/codegen`";
293 Ok(format!("//! {}\n\n{}", preamble, stdout))
294}
295
296#[derive(Deserialize, Debug)]
297struct Grammar {
298 punct: Vec<(String, String)>,
299 keywords: Vec<String>,
300 contextual_keywords: Vec<String>,
301 literals: Vec<String>,
302 tokens: Vec<String>,
303 nodes: Vec<String>,
304 ast: BTreeMap<String, AstNode>,
305}
306
307#[derive(Deserialize, Debug)]
308struct AstNode {
309 #[serde(default)]
310 #[serde(rename = "enum")]
311 variants: Vec<String>,
312
313 #[serde(default)]
314 traits: Vec<String>,
315 #[serde(default)]
316 collections: Vec<(String, String)>,
317 #[serde(default)]
318 options: Vec<Attr>,
319}
320
321#[derive(Deserialize, Debug)]
322#[serde(untagged)]
323enum Attr {
324 Type(String),
325 NameType(String, String),
326}
327
328fn to_upper_snake_case(s: &str) -> String {
329 let mut buf = String::with_capacity(s.len());
330 let mut prev_is_upper = None;
331 for c in s.chars() {
332 if c.is_ascii_uppercase() && prev_is_upper == Some(false) {
333 buf.push('_')
334 }
335 prev_is_upper = Some(c.is_ascii_uppercase());
336
337 buf.push(c.to_ascii_uppercase());
338 }
339 buf
340}
341
342fn to_lower_snake_case(s: &str) -> String {
343 let mut buf = String::with_capacity(s.len());
344 let mut prev_is_upper = None;
345 for c in s.chars() {
346 if c.is_ascii_uppercase() && prev_is_upper == Some(false) {
347 buf.push('_')
348 }
349 prev_is_upper = Some(c.is_ascii_uppercase());
350
351 buf.push(c.to_ascii_lowercase());
352 }
353 buf
354}
diff --git a/xtask/src/lib.rs b/xtask/src/lib.rs
index a8685f567..cc69463a9 100644
--- a/xtask/src/lib.rs
+++ b/xtask/src/lib.rs
@@ -1,9 +1,8 @@
1//! FIXME: write short doc here 1//! FIXME: write short doc here
2 2
3mod boilerplate_gen; 3pub mod codegen;
4 4
5use std::{ 5use std::{
6 collections::HashMap,
7 error::Error, 6 error::Error,
8 fs, 7 fs,
9 io::{Error as IoError, ErrorKind}, 8 io::{Error as IoError, ErrorKind},
@@ -11,72 +10,12 @@ use std::{
11 process::{Command, Output, Stdio}, 10 process::{Command, Output, Stdio},
12}; 11};
13 12
14use itertools::Itertools; 13use crate::codegen::Mode;
15
16pub use self::boilerplate_gen::generate_boilerplate;
17 14
18pub type Result<T> = std::result::Result<T, Box<dyn Error>>; 15pub type Result<T> = std::result::Result<T, Box<dyn Error>>;
19 16
20pub const GRAMMAR: &str = "crates/ra_syntax/src/grammar.ron";
21const GRAMMAR_DIR: &str = "crates/ra_parser/src/grammar";
22const OK_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/ok";
23const ERR_INLINE_TESTS_DIR: &str = "crates/ra_syntax/test_data/parser/inline/err";
24
25pub const SYNTAX_KINDS: &str = "crates/ra_parser/src/syntax_kind/generated.rs";
26pub const AST: &str = "crates/ra_syntax/src/ast/generated.rs";
27const TOOLCHAIN: &str = "stable"; 17const TOOLCHAIN: &str = "stable";
28 18
29#[derive(Debug, PartialEq, Eq, Clone, Copy)]
30pub enum Mode {
31 Overwrite,
32 Verify,
33}
34pub use Mode::*;
35
36#[derive(Debug)]
37pub struct Test {
38 pub name: String,
39 pub text: String,
40 pub ok: bool,
41}
42
43pub fn collect_tests(s: &str) -> Vec<(usize, Test)> {
44 let mut res = vec![];
45 let prefix = "// ";
46 let comment_blocks = s
47 .lines()
48 .map(str::trim_start)
49 .enumerate()
50 .group_by(|(_idx, line)| line.starts_with(prefix));
51
52 'outer: for (is_comment, block) in comment_blocks.into_iter() {
53 if !is_comment {
54 continue;
55 }
56 let mut block = block.map(|(idx, line)| (idx, &line[prefix.len()..]));
57
58 let mut ok = true;
59 let (start_line, name) = loop {
60 match block.next() {
61 Some((idx, line)) if line.starts_with("test ") => {
62 break (idx, line["test ".len()..].to_string());
63 }
64 Some((idx, line)) if line.starts_with("test_err ") => {
65 ok = false;
66 break (idx, line["test_err ".len()..].to_string());
67 }
68 Some(_) => (),
69 None => continue 'outer,
70 }
71 };
72 let text: String =
73 itertools::join(block.map(|(_, line)| line).chain(::std::iter::once("")), "\n");
74 assert!(!text.trim().is_empty() && text.ends_with('\n'));
75 res.push((start_line, Test { name, text, ok }))
76 }
77 res
78}
79
80pub fn project_root() -> PathBuf { 19pub fn project_root() -> PathBuf {
81 Path::new(&env!("CARGO_MANIFEST_DIR")).ancestors().nth(1).unwrap().to_path_buf() 20 Path::new(&env!("CARGO_MANIFEST_DIR")).ancestors().nth(1).unwrap().to_path_buf()
82} 21}
@@ -126,7 +65,7 @@ pub fn run_rustfmt(mode: Mode) -> Result<()> {
126 _ => install_rustfmt()?, 65 _ => install_rustfmt()?,
127 }; 66 };
128 67
129 if mode == Verify { 68 if mode == Mode::Verify {
130 run(&format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN), ".")?; 69 run(&format!("rustup run {} -- cargo fmt -- --check", TOOLCHAIN), ".")?;
131 } else { 70 } else {
132 run(&format!("rustup run {} -- cargo fmt", TOOLCHAIN), ".")?; 71 run(&format!("rustup run {} -- cargo fmt", TOOLCHAIN), ".")?;
@@ -206,37 +145,6 @@ pub fn run_fuzzer() -> Result<()> {
206 run("rustup run nightly -- cargo fuzz run parser", "./crates/ra_syntax") 145 run("rustup run nightly -- cargo fuzz run parser", "./crates/ra_syntax")
207} 146}
208 147
209pub fn gen_tests(mode: Mode) -> Result<()> {
210 let tests = tests_from_dir(&project_root().join(Path::new(GRAMMAR_DIR)))?;
211 fn install_tests(tests: &HashMap<String, Test>, into: &str, mode: Mode) -> Result<()> {
212 let tests_dir = project_root().join(into);
213 if !tests_dir.is_dir() {
214 fs::create_dir_all(&tests_dir)?;
215 }
216 // ok is never actually read, but it needs to be specified to create a Test in existing_tests
217 let existing = existing_tests(&tests_dir, true)?;
218 for t in existing.keys().filter(|&t| !tests.contains_key(t)) {
219 panic!("Test is deleted: {}", t);
220 }
221
222 let mut new_idx = existing.len() + 1;
223 for (name, test) in tests {
224 let path = match existing.get(name) {
225 Some((path, _test)) => path.clone(),
226 None => {
227 let file_name = format!("{:04}_{}.rs", new_idx, name);
228 new_idx += 1;
229 tests_dir.join(file_name)
230 }
231 };
232 update(&path, &test.text, mode)?;
233 }
234 Ok(())
235 }
236 install_tests(&tests.ok, OK_INLINE_TESTS_DIR, mode)?;
237 install_tests(&tests.err, ERR_INLINE_TESTS_DIR, mode)
238}
239
240fn do_run<F>(cmdline: &str, dir: &str, mut f: F) -> Result<Output> 148fn do_run<F>(cmdline: &str, dir: &str, mut f: F) -> Result<Output>
241where 149where
242 F: FnMut(&mut Command), 150 F: FnMut(&mut Command),
@@ -253,80 +161,3 @@ where
253 } 161 }
254 Ok(output) 162 Ok(output)
255} 163}
256
257#[derive(Default, Debug)]
258struct Tests {
259 pub ok: HashMap<String, Test>,
260 pub err: HashMap<String, Test>,
261}
262
263fn tests_from_dir(dir: &Path) -> Result<Tests> {
264 let mut res = Tests::default();
265 for entry in ::walkdir::WalkDir::new(dir) {
266 let entry = entry.unwrap();
267 if !entry.file_type().is_file() {
268 continue;
269 }
270 if entry.path().extension().unwrap_or_default() != "rs" {
271 continue;
272 }
273 process_file(&mut res, entry.path())?;
274 }
275 let grammar_rs = dir.parent().unwrap().join("grammar.rs");
276 process_file(&mut res, &grammar_rs)?;
277 return Ok(res);
278 fn process_file(res: &mut Tests, path: &Path) -> Result<()> {
279 let text = fs::read_to_string(path)?;
280
281 for (_, test) in collect_tests(&text) {
282 if test.ok {
283 if let Some(old_test) = res.ok.insert(test.name.clone(), test) {
284 Err(format!("Duplicate test: {}", old_test.name))?
285 }
286 } else {
287 if let Some(old_test) = res.err.insert(test.name.clone(), test) {
288 Err(format!("Duplicate test: {}", old_test.name))?
289 }
290 }
291 }
292 Ok(())
293 }
294}
295
296fn existing_tests(dir: &Path, ok: bool) -> Result<HashMap<String, (PathBuf, Test)>> {
297 let mut res = HashMap::new();
298 for file in fs::read_dir(dir)? {
299 let file = file?;
300 let path = file.path();
301 if path.extension().unwrap_or_default() != "rs" {
302 continue;
303 }
304 let name = {
305 let file_name = path.file_name().unwrap().to_str().unwrap();
306 file_name[5..file_name.len() - 3].to_string()
307 };
308 let text = fs::read_to_string(&path)?;
309 let test = Test { name: name.clone(), text, ok };
310 if let Some(old) = res.insert(name, (path, test)) {
311 println!("Duplicate test: {:?}", old);
312 }
313 }
314 Ok(res)
315}
316
317/// A helper to update file on disk if it has changed.
318/// With verify = false,
319pub fn update(path: &Path, contents: &str, mode: Mode) -> Result<()> {
320 match fs::read_to_string(path) {
321 Ok(ref old_contents) if old_contents == contents => {
322 return Ok(());
323 }
324 _ => (),
325 }
326 if mode == Verify {
327 Err(format!("`{}` is not up-to-date", path.display()))?;
328 }
329 eprintln!("updating {}", path.display());
330 fs::write(path, contents)?;
331 Ok(())
332}
diff --git a/xtask/src/main.rs b/xtask/src/main.rs
index c08915aac..0b19c34f4 100644
--- a/xtask/src/main.rs
+++ b/xtask/src/main.rs
@@ -7,8 +7,8 @@ use core::str;
7use pico_args::Arguments; 7use pico_args::Arguments;
8use std::{env, path::PathBuf}; 8use std::{env, path::PathBuf};
9use xtask::{ 9use xtask::{
10 gen_tests, generate_boilerplate, install_format_hook, run, run_clippy, run_fuzzer, run_rustfmt, 10 codegen::{self, Mode},
11 run_with_output, Cmd, Overwrite, Result, 11 install_format_hook, run, run_clippy, run_fuzzer, run_rustfmt, run_with_output, Cmd, Result,
12}; 12};
13 13
14// Latest stable, feel free to send a PR if this lags behind. 14// Latest stable, feel free to send a PR if this lags behind.
@@ -62,21 +62,21 @@ fn main() -> Result<()> {
62 help::print_no_param_subcommand_help(&subcommand); 62 help::print_no_param_subcommand_help(&subcommand);
63 return Ok(()); 63 return Ok(());
64 } 64 }
65 gen_tests(Overwrite)? 65 codegen::generate_parser_tests(Mode::Overwrite)?
66 } 66 }
67 "codegen" => { 67 "codegen" => {
68 if matches.contains(["-h", "--help"]) { 68 if matches.contains(["-h", "--help"]) {
69 help::print_no_param_subcommand_help(&subcommand); 69 help::print_no_param_subcommand_help(&subcommand);
70 return Ok(()); 70 return Ok(());
71 } 71 }
72 generate_boilerplate(Overwrite)? 72 codegen::generate_syntax(Mode::Overwrite)?
73 } 73 }
74 "format" => { 74 "format" => {
75 if matches.contains(["-h", "--help"]) { 75 if matches.contains(["-h", "--help"]) {
76 help::print_no_param_subcommand_help(&subcommand); 76 help::print_no_param_subcommand_help(&subcommand);
77 return Ok(()); 77 return Ok(());
78 } 78 }
79 run_rustfmt(Overwrite)? 79 run_rustfmt(Mode::Overwrite)?
80 } 80 }
81 "format-hook" => { 81 "format-hook" => {
82 if matches.contains(["-h", "--help"]) { 82 if matches.contains(["-h", "--help"]) {
diff --git a/xtask/tests/tidy-tests/cli.rs b/xtask/tests/tidy-tests/cli.rs
index 5d8ddea83..304d77d89 100644
--- a/xtask/tests/tidy-tests/cli.rs
+++ b/xtask/tests/tidy-tests/cli.rs
@@ -1,23 +1,26 @@
1use walkdir::WalkDir; 1use walkdir::WalkDir;
2use xtask::{gen_tests, generate_boilerplate, project_root, run_rustfmt, Verify}; 2use xtask::{
3 codegen::{self, Mode},
4 project_root, run_rustfmt,
5};
3 6
4#[test] 7#[test]
5fn generated_grammar_is_fresh() { 8fn generated_grammar_is_fresh() {
6 if let Err(error) = generate_boilerplate(Verify) { 9 if let Err(error) = codegen::generate_syntax(Mode::Verify) {
7 panic!("{}. Please update it by running `cargo xtask codegen`", error); 10 panic!("{}. Please update it by running `cargo xtask codegen`", error);
8 } 11 }
9} 12}
10 13
11#[test] 14#[test]
12fn generated_tests_are_fresh() { 15fn generated_tests_are_fresh() {
13 if let Err(error) = gen_tests(Verify) { 16 if let Err(error) = codegen::generate_parser_tests(Mode::Verify) {
14 panic!("{}. Please update tests by running `cargo xtask gen-tests`", error); 17 panic!("{}. Please update tests by running `cargo xtask gen-tests`", error);
15 } 18 }
16} 19}
17 20
18#[test] 21#[test]
19fn check_code_formatting() { 22fn check_code_formatting() {
20 if let Err(error) = run_rustfmt(Verify) { 23 if let Err(error) = run_rustfmt(Mode::Verify) {
21 panic!("{}. Please format the code by running `cargo format`", error); 24 panic!("{}. Please format the code by running `cargo format`", error);
22 } 25 }
23} 26}